##// END OF EJS Templates
extras: re-use Projection from jaraco.collections
Jason R. Coombs -
r51261:82e5a9b1 default
parent child Browse files
Show More
1 NO CONTENT: new file 100644
@@ -0,0 +1,56 b''
1 # adapted from jaraco.collections 3.9
2
3 import collections
4
5
6 class Projection(collections.abc.Mapping):
7 """
8 Project a set of keys over a mapping
9
10 >>> sample = {'a': 1, 'b': 2, 'c': 3}
11 >>> prj = Projection(['a', 'c', 'd'], sample)
12 >>> prj == {'a': 1, 'c': 3}
13 True
14
15 Keys should only appear if they were specified and exist in the space.
16
17 >>> sorted(list(prj.keys()))
18 ['a', 'c']
19
20 Attempting to access a key not in the projection
21 results in a KeyError.
22
23 >>> prj['b']
24 Traceback (most recent call last):
25 ...
26 KeyError: 'b'
27
28 Use the projection to update another dict.
29
30 >>> target = {'a': 2, 'b': 2}
31 >>> target.update(prj)
32 >>> target == {'a': 1, 'b': 2, 'c': 3}
33 True
34
35 Also note that Projection keeps a reference to the original dict, so
36 if you modify the original dict, that could modify the Projection.
37
38 >>> del sample['a']
39 >>> dict(prj)
40 {'c': 3}
41 """
42
43 def __init__(self, keys, space):
44 self._keys = tuple(keys)
45 self._space = space
46
47 def __getitem__(self, key):
48 if key not in self._keys:
49 raise KeyError(key)
50 return self._space[key]
51
52 def __iter__(self):
53 return iter(set(self._keys).intersection(self._space))
54
55 def __len__(self):
56 return len(tuple(iter(self)))
@@ -1,772 +1,773 b''
1 1 #!/usr/bin/env python3
2 2
3 3
4 4 import ast
5 5 import collections
6 6 import io
7 7 import os
8 8 import sys
9 9
10 10 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
11 11 # to work when run from a virtualenv. The modules were chosen empirically
12 12 # so that the return value matches the return value without virtualenv.
13 13 if True: # disable lexical sorting checks
14 14 try:
15 15 import BaseHTTPServer as basehttpserver
16 16 except ImportError:
17 17 basehttpserver = None
18 18 import zlib
19 19
20 20 import testparseutil
21 21
22 22 # Allow list of modules that symbols can be directly imported from.
23 23 allowsymbolimports = (
24 24 '__future__',
25 25 'breezy',
26 26 'concurrent',
27 27 'hgclient',
28 28 'mercurial',
29 29 'mercurial.hgweb.common',
30 30 'mercurial.hgweb.request',
31 31 'mercurial.i18n',
32 32 'mercurial.interfaces',
33 33 'mercurial.node',
34 34 'mercurial.pycompat',
35 35 # for revlog to re-export constant to extensions
36 36 'mercurial.revlogutils.constants',
37 37 'mercurial.revlogutils.flagutil',
38 38 # for cffi modules to re-export pure functions
39 39 'mercurial.pure.base85',
40 40 'mercurial.pure.bdiff',
41 41 'mercurial.pure.mpatch',
42 42 'mercurial.pure.osutil',
43 43 'mercurial.pure.parsers',
44 44 # third-party imports should be directly imported
45 45 'mercurial.thirdparty',
46 46 'mercurial.thirdparty.attr',
47 'mercurial.thirdparty.jaraco.collections',
47 48 'mercurial.thirdparty.zope',
48 49 'mercurial.thirdparty.zope.interface',
49 50 'typing',
50 51 'xml.etree.ElementTree',
51 52 )
52 53
53 54 # Allow list of symbols that can be directly imported.
54 55 directsymbols = ('demandimport',)
55 56
56 57 # Modules that must be aliased because they are commonly confused with
57 58 # common variables and can create aliasing and readability issues.
58 59 requirealias = {
59 60 'ui': 'uimod',
60 61 }
61 62
62 63
63 64 def walklocal(root):
64 65 """Recursively yield all descendant nodes but not in a different scope"""
65 66 todo = collections.deque(ast.iter_child_nodes(root))
66 67 yield root, False
67 68 while todo:
68 69 node = todo.popleft()
69 70 newscope = isinstance(node, ast.FunctionDef)
70 71 if not newscope:
71 72 todo.extend(ast.iter_child_nodes(node))
72 73 yield node, newscope
73 74
74 75
75 76 def dotted_name_of_path(path):
76 77 """Given a relative path to a source file, return its dotted module name.
77 78
78 79 >>> dotted_name_of_path('mercurial/error.py')
79 80 'mercurial.error'
80 81 >>> dotted_name_of_path('zlibmodule.so')
81 82 'zlib'
82 83 """
83 84 parts = path.replace(os.sep, '/').split('/')
84 85 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
85 86 if parts[-1].endswith('module'):
86 87 parts[-1] = parts[-1][:-6]
87 88 return '.'.join(parts)
88 89
89 90
90 91 def fromlocalfunc(modulename, localmods):
91 92 """Get a function to examine which locally defined module the
92 93 target source imports via a specified name.
93 94
94 95 `modulename` is an `dotted_name_of_path()`-ed source file path,
95 96 which may have `.__init__` at the end of it, of the target source.
96 97
97 98 `localmods` is a set of absolute `dotted_name_of_path()`-ed source file
98 99 paths of locally defined (= Mercurial specific) modules.
99 100
100 101 This function assumes that module names not existing in
101 102 `localmods` are from the Python standard library.
102 103
103 104 This function returns the function, which takes `name` argument,
104 105 and returns `(absname, dottedpath, hassubmod)` tuple if `name`
105 106 matches against locally defined module. Otherwise, it returns
106 107 False.
107 108
108 109 It is assumed that `name` doesn't have `.__init__`.
109 110
110 111 `absname` is an absolute module name of specified `name`
111 112 (e.g. "hgext.convert"). This can be used to compose prefix for sub
112 113 modules or so.
113 114
114 115 `dottedpath` is a `dotted_name_of_path()`-ed source file path
115 116 (e.g. "hgext.convert.__init__") of `name`. This is used to look
116 117 module up in `localmods` again.
117 118
118 119 `hassubmod` is whether it may have sub modules under it (for
119 120 convenient, even though this is also equivalent to "absname !=
120 121 dottednpath")
121 122
122 123 >>> localmods = {'foo.__init__', 'foo.foo1',
123 124 ... 'foo.bar.__init__', 'foo.bar.bar1',
124 125 ... 'baz.__init__', 'baz.baz1'}
125 126 >>> fromlocal = fromlocalfunc('foo.xxx', localmods)
126 127 >>> # relative
127 128 >>> fromlocal('foo1')
128 129 ('foo.foo1', 'foo.foo1', False)
129 130 >>> fromlocal('bar')
130 131 ('foo.bar', 'foo.bar.__init__', True)
131 132 >>> fromlocal('bar.bar1')
132 133 ('foo.bar.bar1', 'foo.bar.bar1', False)
133 134 >>> # absolute
134 135 >>> fromlocal('baz')
135 136 ('baz', 'baz.__init__', True)
136 137 >>> fromlocal('baz.baz1')
137 138 ('baz.baz1', 'baz.baz1', False)
138 139 >>> # unknown = maybe standard library
139 140 >>> fromlocal('os')
140 141 False
141 142 >>> fromlocal(None, 1)
142 143 ('foo', 'foo.__init__', True)
143 144 >>> fromlocal('foo1', 1)
144 145 ('foo.foo1', 'foo.foo1', False)
145 146 >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
146 147 >>> fromlocal2(None, 2)
147 148 ('foo', 'foo.__init__', True)
148 149 >>> fromlocal2('bar2', 1)
149 150 False
150 151 >>> fromlocal2('bar', 2)
151 152 ('foo.bar', 'foo.bar.__init__', True)
152 153 """
153 154 if not isinstance(modulename, str):
154 155 modulename = modulename.decode('ascii')
155 156 prefix = '.'.join(modulename.split('.')[:-1])
156 157 if prefix:
157 158 prefix += '.'
158 159
159 160 def fromlocal(name, level=0):
160 161 # name is false value when relative imports are used.
161 162 if not name:
162 163 # If relative imports are used, level must not be absolute.
163 164 assert level > 0
164 165 candidates = ['.'.join(modulename.split('.')[:-level])]
165 166 else:
166 167 if not level:
167 168 # Check relative name first.
168 169 candidates = [prefix + name, name]
169 170 else:
170 171 candidates = [
171 172 '.'.join(modulename.split('.')[:-level]) + '.' + name
172 173 ]
173 174
174 175 for n in candidates:
175 176 if n in localmods:
176 177 return (n, n, False)
177 178 dottedpath = n + '.__init__'
178 179 if dottedpath in localmods:
179 180 return (n, dottedpath, True)
180 181 return False
181 182
182 183 return fromlocal
183 184
184 185
185 186 def populateextmods(localmods):
186 187 """Populate C extension modules based on pure modules"""
187 188 newlocalmods = set(localmods)
188 189 for n in localmods:
189 190 if n.startswith('mercurial.pure.'):
190 191 m = n[len('mercurial.pure.') :]
191 192 newlocalmods.add('mercurial.cext.' + m)
192 193 newlocalmods.add('mercurial.cffi._' + m)
193 194 return newlocalmods
194 195
195 196
196 197 def list_stdlib_modules():
197 198 """List the modules present in the stdlib.
198 199
199 200 >>> py3 = sys.version_info[0] >= 3
200 201 >>> mods = set(list_stdlib_modules())
201 202 >>> 'BaseHTTPServer' in mods or py3
202 203 True
203 204
204 205 os.path isn't really a module, so it's missing:
205 206
206 207 >>> 'os.path' in mods
207 208 False
208 209
209 210 sys requires special treatment, because it's baked into the
210 211 interpreter, but it should still appear:
211 212
212 213 >>> 'sys' in mods
213 214 True
214 215
215 216 >>> 'collections' in mods
216 217 True
217 218
218 219 >>> 'cStringIO' in mods or py3
219 220 True
220 221
221 222 >>> 'cffi' in mods
222 223 True
223 224 """
224 225 for m in sys.builtin_module_names:
225 226 yield m
226 227 # These modules only exist on windows, but we should always
227 228 # consider them stdlib.
228 229 for m in ['msvcrt', '_winreg']:
229 230 yield m
230 231 yield '__builtin__'
231 232 yield 'builtins' # python3 only
232 233 yield 'importlib.abc' # python3 only
233 234 yield 'importlib.machinery' # python3 only
234 235 yield 'importlib.util' # python3 only
235 236 yield 'packaging.version'
236 237 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
237 238 yield m
238 239 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
239 240 yield m
240 241 for m in ['cffi']:
241 242 yield m
242 243 stdlib_prefixes = {sys.prefix, sys.exec_prefix}
243 244 # We need to supplement the list of prefixes for the search to work
244 245 # when run from within a virtualenv.
245 246 for mod in (basehttpserver, zlib):
246 247 if mod is None:
247 248 continue
248 249 try:
249 250 # Not all module objects have a __file__ attribute.
250 251 filename = mod.__file__
251 252 except AttributeError:
252 253 continue
253 254 dirname = os.path.dirname(filename)
254 255 for prefix in stdlib_prefixes:
255 256 if dirname.startswith(prefix):
256 257 # Then this directory is redundant.
257 258 break
258 259 else:
259 260 stdlib_prefixes.add(dirname)
260 261 sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
261 262 for libpath in sys.path:
262 263 # We want to walk everything in sys.path that starts with something in
263 264 # stdlib_prefixes, but not directories from the hg sources.
264 265 if os.path.abspath(libpath).startswith(sourceroot) or not any(
265 266 libpath.startswith(p) for p in stdlib_prefixes
266 267 ):
267 268 continue
268 269 for top, dirs, files in os.walk(libpath):
269 270 if 'dist-packages' in top.split(os.path.sep):
270 271 continue
271 272 for i, d in reversed(list(enumerate(dirs))):
272 273 if (
273 274 not os.path.exists(os.path.join(top, d, '__init__.py'))
274 275 or top == libpath
275 276 and d in ('hgdemandimport', 'hgext', 'mercurial')
276 277 ):
277 278 del dirs[i]
278 279 for name in files:
279 280 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
280 281 continue
281 282 if name.startswith('__init__.py'):
282 283 full_path = top
283 284 else:
284 285 full_path = os.path.join(top, name)
285 286 rel_path = full_path[len(libpath) + 1 :]
286 287 mod = dotted_name_of_path(rel_path)
287 288 yield mod
288 289
289 290
290 291 stdlib_modules = set(list_stdlib_modules())
291 292
292 293
293 294 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
294 295 """Given the source of a file as a string, yield the names
295 296 imported by that file.
296 297
297 298 Args:
298 299 source: The python source to examine as a string.
299 300 modulename: of specified python source (may have `__init__`)
300 301 localmods: set of locally defined module names (may have `__init__`)
301 302 ignore_nested: If true, import statements that do not start in
302 303 column zero will be ignored.
303 304
304 305 Returns:
305 306 A list of absolute module names imported by the given source.
306 307
307 308 >>> f = 'foo/xxx.py'
308 309 >>> modulename = 'foo.xxx'
309 310 >>> localmods = {'foo.__init__': True,
310 311 ... 'foo.foo1': True, 'foo.foo2': True,
311 312 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
312 313 ... 'baz.__init__': True, 'baz.baz1': True }
313 314 >>> # standard library (= not locally defined ones)
314 315 >>> sorted(imported_modules(
315 316 ... 'from stdlib1 import foo, bar; import stdlib2',
316 317 ... modulename, f, localmods))
317 318 []
318 319 >>> # relative importing
319 320 >>> sorted(imported_modules(
320 321 ... 'import foo1; from bar import bar1',
321 322 ... modulename, f, localmods))
322 323 ['foo.bar.bar1', 'foo.foo1']
323 324 >>> sorted(imported_modules(
324 325 ... 'from bar.bar1 import name1, name2, name3',
325 326 ... modulename, f, localmods))
326 327 ['foo.bar.bar1']
327 328 >>> # absolute importing
328 329 >>> sorted(imported_modules(
329 330 ... 'from baz import baz1, name1',
330 331 ... modulename, f, localmods))
331 332 ['baz.__init__', 'baz.baz1']
332 333 >>> # mixed importing, even though it shouldn't be recommended
333 334 >>> sorted(imported_modules(
334 335 ... 'import stdlib, foo1, baz',
335 336 ... modulename, f, localmods))
336 337 ['baz.__init__', 'foo.foo1']
337 338 >>> # ignore_nested
338 339 >>> sorted(imported_modules(
339 340 ... '''import foo
340 341 ... def wat():
341 342 ... import bar
342 343 ... ''', modulename, f, localmods))
343 344 ['foo.__init__', 'foo.bar.__init__']
344 345 >>> sorted(imported_modules(
345 346 ... '''import foo
346 347 ... def wat():
347 348 ... import bar
348 349 ... ''', modulename, f, localmods, ignore_nested=True))
349 350 ['foo.__init__']
350 351 """
351 352 fromlocal = fromlocalfunc(modulename, localmods)
352 353 for node in ast.walk(ast.parse(source, f)):
353 354 if ignore_nested and getattr(node, 'col_offset', 0) > 0:
354 355 continue
355 356 if isinstance(node, ast.Import):
356 357 for n in node.names:
357 358 found = fromlocal(n.name)
358 359 if not found:
359 360 # this should import standard library
360 361 continue
361 362 yield found[1]
362 363 elif isinstance(node, ast.ImportFrom):
363 364 found = fromlocal(node.module, node.level)
364 365 if not found:
365 366 # this should import standard library
366 367 continue
367 368
368 369 absname, dottedpath, hassubmod = found
369 370 if not hassubmod:
370 371 # "dottedpath" is not a package; must be imported
371 372 yield dottedpath
372 373 # examination of "node.names" should be redundant
373 374 # e.g.: from mercurial.node import nullid, nullrev
374 375 continue
375 376
376 377 modnotfound = False
377 378 prefix = absname + '.'
378 379 for n in node.names:
379 380 found = fromlocal(prefix + n.name)
380 381 if not found:
381 382 # this should be a function or a property of "node.module"
382 383 modnotfound = True
383 384 continue
384 385 yield found[1]
385 386 if modnotfound and dottedpath != modulename:
386 387 # "dottedpath" is a package, but imported because of non-module
387 388 # lookup
388 389 # specifically allow "from . import foo" from __init__.py
389 390 yield dottedpath
390 391
391 392
392 393 def verify_import_convention(module, source, localmods):
393 394 """Verify imports match our established coding convention."""
394 395 root = ast.parse(source)
395 396
396 397 return verify_modern_convention(module, root, localmods)
397 398
398 399
399 400 def verify_modern_convention(module, root, localmods, root_col_offset=0):
400 401 """Verify a file conforms to the modern import convention rules.
401 402
402 403 The rules of the modern convention are:
403 404
404 405 * Ordering is stdlib followed by local imports. Each group is lexically
405 406 sorted.
406 407 * Importing multiple modules via "import X, Y" is not allowed: use
407 408 separate import statements.
408 409 * Importing multiple modules via "from X import ..." is allowed if using
409 410 parenthesis and one entry per line.
410 411 * Only 1 relative import statement per import level ("from .", "from ..")
411 412 is allowed.
412 413 * Relative imports from higher levels must occur before lower levels. e.g.
413 414 "from .." must be before "from .".
414 415 * Imports from peer packages should use relative import (e.g. do not
415 416 "import mercurial.foo" from a "mercurial.*" module).
416 417 * Symbols can only be imported from specific modules (see
417 418 `allowsymbolimports`). For other modules, first import the module then
418 419 assign the symbol to a module-level variable. In addition, these imports
419 420 must be performed before other local imports. This rule only
420 421 applies to import statements outside of any blocks.
421 422 * Relative imports from the standard library are not allowed, unless that
422 423 library is also a local module.
423 424 * Certain modules must be aliased to alternate names to avoid aliasing
424 425 and readability problems. See `requirealias`.
425 426 """
426 427 if not isinstance(module, str):
427 428 module = module.decode('ascii')
428 429 topmodule = module.split('.')[0]
429 430 fromlocal = fromlocalfunc(module, localmods)
430 431
431 432 # Whether a local/non-stdlib import has been performed.
432 433 seenlocal = None
433 434 # Whether a local/non-stdlib, non-symbol import has been seen.
434 435 seennonsymbollocal = False
435 436 # The last name to be imported (for sorting).
436 437 lastname = None
437 438 laststdlib = None
438 439 # Relative import levels encountered so far.
439 440 seenlevels = set()
440 441
441 442 for node, newscope in walklocal(root):
442 443
443 444 def msg(fmt, *args):
444 445 return (fmt % args, node.lineno)
445 446
446 447 if newscope:
447 448 # Check for local imports in function
448 449 for r in verify_modern_convention(
449 450 module, node, localmods, node.col_offset + 4
450 451 ):
451 452 yield r
452 453 elif isinstance(node, ast.Import):
453 454 # Disallow "import foo, bar" and require separate imports
454 455 # for each module.
455 456 if len(node.names) > 1:
456 457 yield msg(
457 458 'multiple imported names: %s',
458 459 ', '.join(n.name for n in node.names),
459 460 )
460 461
461 462 name = node.names[0].name
462 463 asname = node.names[0].asname
463 464
464 465 stdlib = name in stdlib_modules
465 466
466 467 # Ignore sorting rules on imports inside blocks.
467 468 if node.col_offset == root_col_offset:
468 469 if lastname and name < lastname and laststdlib == stdlib:
469 470 yield msg(
470 471 'imports not lexically sorted: %s < %s', name, lastname
471 472 )
472 473
473 474 lastname = name
474 475 laststdlib = stdlib
475 476
476 477 # stdlib imports should be before local imports.
477 478 if stdlib and seenlocal and node.col_offset == root_col_offset:
478 479 yield msg(
479 480 'stdlib import "%s" follows local import: %s',
480 481 name,
481 482 seenlocal,
482 483 )
483 484
484 485 if not stdlib:
485 486 seenlocal = name
486 487
487 488 # Import of sibling modules should use relative imports.
488 489 topname = name.split('.')[0]
489 490 if topname == topmodule:
490 491 yield msg('import should be relative: %s', name)
491 492
492 493 if name in requirealias and asname != requirealias[name]:
493 494 yield msg(
494 495 '%s module must be "as" aliased to %s',
495 496 name,
496 497 requirealias[name],
497 498 )
498 499
499 500 elif isinstance(node, ast.ImportFrom):
500 501 # Resolve the full imported module name.
501 502 if node.level > 0:
502 503 fullname = '.'.join(module.split('.')[: -node.level])
503 504 if node.module:
504 505 fullname += '.%s' % node.module
505 506 else:
506 507 assert node.module
507 508 fullname = node.module
508 509
509 510 topname = fullname.split('.')[0]
510 511 if topname == topmodule:
511 512 yield msg('import should be relative: %s', fullname)
512 513
513 514 # __future__ is special since it needs to come first and use
514 515 # symbol import.
515 516 if fullname != '__future__':
516 517 if not fullname or (
517 518 fullname in stdlib_modules
518 519 # allow standard 'from typing import ...' style
519 520 and fullname.startswith('.')
520 521 and fullname not in localmods
521 522 and fullname + '.__init__' not in localmods
522 523 ):
523 524 yield msg('relative import of stdlib module')
524 525 else:
525 526 seenlocal = fullname
526 527
527 528 # Direct symbol import is only allowed from certain modules and
528 529 # must occur before non-symbol imports.
529 530 found = fromlocal(node.module, node.level)
530 531 if found and found[2]: # node.module is a package
531 532 prefix = found[0] + '.'
532 533 symbols = (
533 534 n.name for n in node.names if not fromlocal(prefix + n.name)
534 535 )
535 536 else:
536 537 symbols = (n.name for n in node.names)
537 538 symbols = [sym for sym in symbols if sym not in directsymbols]
538 539 if node.module and node.col_offset == root_col_offset:
539 540 if symbols and fullname not in allowsymbolimports:
540 541 yield msg(
541 542 'direct symbol import %s from %s',
542 543 ', '.join(symbols),
543 544 fullname,
544 545 )
545 546
546 547 if symbols and seennonsymbollocal:
547 548 yield msg(
548 549 'symbol import follows non-symbol import: %s', fullname
549 550 )
550 551 if not symbols and fullname not in stdlib_modules:
551 552 seennonsymbollocal = True
552 553
553 554 if not node.module:
554 555 assert node.level
555 556
556 557 # Only allow 1 group per level.
557 558 if (
558 559 node.level in seenlevels
559 560 and node.col_offset == root_col_offset
560 561 ):
561 562 yield msg(
562 563 'multiple "from %s import" statements', '.' * node.level
563 564 )
564 565
565 566 # Higher-level groups come before lower-level groups.
566 567 if any(node.level > l for l in seenlevels):
567 568 yield msg(
568 569 'higher-level import should come first: %s', fullname
569 570 )
570 571
571 572 seenlevels.add(node.level)
572 573
573 574 # Entries in "from .X import ( ... )" lists must be lexically
574 575 # sorted.
575 576 lastentryname = None
576 577
577 578 for n in node.names:
578 579 if lastentryname and n.name < lastentryname:
579 580 yield msg(
580 581 'imports from %s not lexically sorted: %s < %s',
581 582 fullname,
582 583 n.name,
583 584 lastentryname,
584 585 )
585 586
586 587 lastentryname = n.name
587 588
588 589 if n.name in requirealias and n.asname != requirealias[n.name]:
589 590 yield msg(
590 591 '%s from %s must be "as" aliased to %s',
591 592 n.name,
592 593 fullname,
593 594 requirealias[n.name],
594 595 )
595 596
596 597
597 598 class CircularImport(Exception):
598 599 pass
599 600
600 601
601 602 def checkmod(mod, imports):
602 603 shortest = {}
603 604 visit = [[mod]]
604 605 while visit:
605 606 path = visit.pop(0)
606 607 for i in sorted(imports.get(path[-1], [])):
607 608 if len(path) < shortest.get(i, 1000):
608 609 shortest[i] = len(path)
609 610 if i in path:
610 611 if i == path[0]:
611 612 raise CircularImport(path)
612 613 continue
613 614 visit.append(path + [i])
614 615
615 616
616 617 def rotatecycle(cycle):
617 618 """arrange a cycle so that the lexicographically first module listed first
618 619
619 620 >>> rotatecycle(['foo', 'bar'])
620 621 ['bar', 'foo', 'bar']
621 622 """
622 623 lowest = min(cycle)
623 624 idx = cycle.index(lowest)
624 625 return cycle[idx:] + cycle[:idx] + [lowest]
625 626
626 627
627 628 def find_cycles(imports):
628 629 """Find cycles in an already-loaded import graph.
629 630
630 631 All module names recorded in `imports` should be absolute one.
631 632
632 633 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
633 634 ... 'top.bar': ['top.baz', 'sys'],
634 635 ... 'top.baz': ['top.foo'],
635 636 ... 'top.qux': ['top.foo']}
636 637 >>> print('\\n'.join(sorted(find_cycles(imports))))
637 638 top.bar -> top.baz -> top.foo -> top.bar
638 639 top.foo -> top.qux -> top.foo
639 640 """
640 641 cycles = set()
641 642 for mod in sorted(imports.keys()):
642 643 try:
643 644 checkmod(mod, imports)
644 645 except CircularImport as e:
645 646 cycle = e.args[0]
646 647 cycles.add(" -> ".join(rotatecycle(cycle)))
647 648 return cycles
648 649
649 650
650 651 def _cycle_sortkey(c):
651 652 return len(c), c
652 653
653 654
654 655 def embedded(f, modname, src):
655 656 """Extract embedded python code
656 657
657 658 >>> def _forcestr(thing):
658 659 ... if not isinstance(thing, str):
659 660 ... return thing.decode('ascii')
660 661 ... return thing
661 662 >>> def test(fn, lines):
662 663 ... for s, m, f, l in embedded(fn, b"example", lines):
663 664 ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l))
664 665 ... print(repr(_forcestr(s)))
665 666 >>> lines = [
666 667 ... 'comment',
667 668 ... ' >>> from __future__ import print_function',
668 669 ... " >>> ' multiline",
669 670 ... " ... string'",
670 671 ... ' ',
671 672 ... 'comment',
672 673 ... ' $ cat > foo.py <<EOF',
673 674 ... ' > from __future__ import print_function',
674 675 ... ' > EOF',
675 676 ... ]
676 677 >>> test(b"example.t", lines)
677 678 example[2] doctest.py 1
678 679 "from __future__ import print_function\\n' multiline\\nstring'\\n\\n"
679 680 example[8] foo.py 7
680 681 'from __future__ import print_function\\n'
681 682 """
682 683 errors = []
683 684 for name, starts, ends, code in testparseutil.pyembedded(f, src, errors):
684 685 if not name:
685 686 # use 'doctest.py', in order to make already existing
686 687 # doctest above pass instantly
687 688 name = 'doctest.py'
688 689 # "starts" is "line number" (1-origin), but embedded() is
689 690 # expected to return "line offset" (0-origin). Therefore, this
690 691 # yields "starts - 1".
691 692 if not isinstance(modname, str):
692 693 modname = modname.decode('utf8')
693 694 yield code, "%s[%d]" % (modname, starts), name, starts - 1
694 695
695 696
696 697 def sources(f, modname):
697 698 """Yields possibly multiple sources from a filepath
698 699
699 700 input: filepath, modulename
700 701 yields: script(string), modulename, filepath, linenumber
701 702
702 703 For embedded scripts, the modulename and filepath will be different
703 704 from the function arguments. linenumber is an offset relative to
704 705 the input file.
705 706 """
706 707 py = False
707 708 if not f.endswith('.t'):
708 709 with open(f, 'rb') as src:
709 710 yield src.read(), modname, f, 0
710 711 py = True
711 712 if py or f.endswith('.t'):
712 713 # Strictly speaking we should sniff for the magic header that denotes
713 714 # Python source file encoding. But in reality we don't use anything
714 715 # other than ASCII (mainly) and UTF-8 (in a few exceptions), so
715 716 # simplicity is fine.
716 717 with io.open(f, 'r', encoding='utf-8') as src:
717 718 for script, modname, t, line in embedded(f, modname, src):
718 719 yield script, modname.encode('utf8'), t, line
719 720
720 721
721 722 def main(argv):
722 723 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
723 724 print('Usage: %s {-|file [file] [file] ...}')
724 725 return 1
725 726 if argv[1] == '-':
726 727 argv = argv[:1]
727 728 argv.extend(l.rstrip() for l in sys.stdin.readlines())
728 729 localmodpaths = {}
729 730 used_imports = {}
730 731 any_errors = False
731 732 for source_path in argv[1:]:
732 733 modname = dotted_name_of_path(source_path)
733 734 localmodpaths[modname] = source_path
734 735 localmods = populateextmods(localmodpaths)
735 736 for localmodname, source_path in sorted(localmodpaths.items()):
736 737 if not isinstance(localmodname, bytes):
737 738 # This is only safe because all hg's files are ascii
738 739 localmodname = localmodname.encode('ascii')
739 740 for src, modname, name, line in sources(source_path, localmodname):
740 741 try:
741 742 used_imports[modname] = sorted(
742 743 imported_modules(
743 744 src, modname, name, localmods, ignore_nested=True
744 745 )
745 746 )
746 747 for error, lineno in verify_import_convention(
747 748 modname, src, localmods
748 749 ):
749 750 any_errors = True
750 751 print('%s:%d: %s' % (source_path, lineno + line, error))
751 752 except SyntaxError as e:
752 753 print(
753 754 '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e)
754 755 )
755 756 cycles = find_cycles(used_imports)
756 757 if cycles:
757 758 firstmods = set()
758 759 for c in sorted(cycles, key=_cycle_sortkey):
759 760 first = c.split()[0]
760 761 # As a rough cut, ignore any cycle that starts with the
761 762 # same module as some other cycle. Otherwise we see lots
762 763 # of cycles that are effectively duplicates.
763 764 if first in firstmods:
764 765 continue
765 766 print('Import cycle:', c)
766 767 firstmods.add(first)
767 768 any_errors = True
768 769 return any_errors != 0
769 770
770 771
771 772 if __name__ == '__main__':
772 773 sys.exit(int(main(sys.argv)))
@@ -1,2299 +1,2290 b''
1 1 # rebase.py - rebasing feature for mercurial
2 2 #
3 3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to move sets of revisions to a different ancestor
9 9
10 10 This extension lets you rebase changesets in an existing Mercurial
11 11 repository.
12 12
13 13 For more information:
14 14 https://mercurial-scm.org/wiki/RebaseExtension
15 15 '''
16 16
17 17
18 18 import os
19 19
20 20 from mercurial.i18n import _
21 21 from mercurial.node import (
22 22 nullrev,
23 23 short,
24 24 wdirrev,
25 25 )
26 26 from mercurial.pycompat import open
27 from mercurial.thirdparty.jaraco.collections import Projection
27 28 from mercurial import (
28 29 bookmarks,
29 30 cmdutil,
30 31 commands,
31 32 copies,
32 33 destutil,
33 34 error,
34 35 extensions,
35 36 logcmdutil,
36 37 merge as mergemod,
37 38 mergestate as mergestatemod,
38 39 mergeutil,
39 40 obsolete,
40 41 obsutil,
41 42 patch,
42 43 phases,
43 44 pycompat,
44 45 registrar,
45 46 repair,
46 47 revset,
47 48 revsetlang,
48 49 rewriteutil,
49 50 scmutil,
50 51 smartset,
51 52 state as statemod,
52 53 util,
53 54 )
54 55
56
55 57 # The following constants are used throughout the rebase module. The ordering of
56 58 # their values must be maintained.
57 59
58 60 # Indicates that a revision needs to be rebased
59 61 revtodo = -1
60 62 revtodostr = b'-1'
61 63
62 64 # legacy revstates no longer needed in current code
63 65 # -2: nullmerge, -3: revignored, -4: revprecursor, -5: revpruned
64 66 legacystates = {b'-2', b'-3', b'-4', b'-5'}
65 67
66 68 cmdtable = {}
67 69 command = registrar.command(cmdtable)
68 70
69 71 configtable = {}
70 72 configitem = registrar.configitem(configtable)
71 73 configitem(
72 74 b'devel',
73 75 b'rebase.force-in-memory-merge',
74 76 default=False,
75 77 )
76 78 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 79 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 80 # be specifying the version(s) of Mercurial they are tested with, or
79 81 # leave the attribute unspecified.
80 82 testedwith = b'ships-with-hg-core'
81 83
82 84
83 85 def _nothingtorebase():
84 86 return 1
85 87
86 88
87 89 def retained_extras():
88 90 """
89 91 Yield the names of the extras to be retained.
90 92 """
91 93 # graft
92 94 yield b'source'
93 95 yield b'intermediate-source'
94 96
95 97
96 def _project(orig, names):
97 """Project a subset of names from orig."""
98 names_saved = tuple(names)
99 values = (orig.get(name, None) for name in names_saved)
100 return {
101 name: value
102 for name, value in zip(names_saved, values)
103 if value is not None
104 }
105
106
107 98 def _save_extras(ctx, extra):
108 extra.update(_project(ctx.extra(), retained_extras()))
99 extra.update(Projection(retained_extras(), ctx.extra()))
109 100
110 101
111 102 def _savebranch(ctx, extra):
112 103 extra[b'branch'] = ctx.branch()
113 104
114 105
115 106 def _destrebase(repo, sourceset, destspace=None):
116 107 """small wrapper around destmerge to pass the right extra args
117 108
118 109 Please wrap destutil.destmerge instead."""
119 110 return destutil.destmerge(
120 111 repo,
121 112 action=b'rebase',
122 113 sourceset=sourceset,
123 114 onheadcheck=False,
124 115 destspace=destspace,
125 116 )
126 117
127 118
128 119 revsetpredicate = registrar.revsetpredicate()
129 120
130 121
131 122 @revsetpredicate(b'_destrebase')
132 123 def _revsetdestrebase(repo, subset, x):
133 124 # ``_rebasedefaultdest()``
134 125
135 126 # default destination for rebase.
136 127 # # XXX: Currently private because I expect the signature to change.
137 128 # # XXX: - bailing out in case of ambiguity vs returning all data.
138 129 # i18n: "_rebasedefaultdest" is a keyword
139 130 sourceset = None
140 131 if x is not None:
141 132 sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
142 133 return subset & smartset.baseset([_destrebase(repo, sourceset)])
143 134
144 135
145 136 @revsetpredicate(b'_destautoorphanrebase')
146 137 def _revsetdestautoorphanrebase(repo, subset, x):
147 138 # ``_destautoorphanrebase()``
148 139
149 140 # automatic rebase destination for a single orphan revision.
150 141 unfi = repo.unfiltered()
151 142 obsoleted = unfi.revs(b'obsolete()')
152 143
153 144 src = revset.getset(repo, subset, x).first()
154 145
155 146 # Empty src or already obsoleted - Do not return a destination
156 147 if not src or src in obsoleted:
157 148 return smartset.baseset()
158 149 dests = destutil.orphanpossibledestination(repo, src)
159 150 if len(dests) > 1:
160 151 raise error.StateError(
161 152 _(b"ambiguous automatic rebase: %r could end up on any of %r")
162 153 % (src, dests)
163 154 )
164 155 # We have zero or one destination, so we can just return here.
165 156 return smartset.baseset(dests)
166 157
167 158
168 159 def _ctxdesc(ctx):
169 160 """short description for a context"""
170 161 return cmdutil.format_changeset_summary(
171 162 ctx.repo().ui, ctx, command=b'rebase'
172 163 )
173 164
174 165
175 166 class rebaseruntime:
176 167 """This class is a container for rebase runtime state"""
177 168
178 169 def __init__(self, repo, ui, inmemory=False, dryrun=False, opts=None):
179 170 if opts is None:
180 171 opts = {}
181 172
182 173 # prepared: whether we have rebasestate prepared or not. Currently it
183 174 # decides whether "self.repo" is unfiltered or not.
184 175 # The rebasestate has explicit hash to hash instructions not depending
185 176 # on visibility. If rebasestate exists (in-memory or on-disk), use
186 177 # unfiltered repo to avoid visibility issues.
187 178 # Before knowing rebasestate (i.e. when starting a new rebase (not
188 179 # --continue or --abort)), the original repo should be used so
189 180 # visibility-dependent revsets are correct.
190 181 self.prepared = False
191 182 self.resume = False
192 183 self._repo = repo
193 184
194 185 self.ui = ui
195 186 self.opts = opts
196 187 self.originalwd = None
197 188 self.external = nullrev
198 189 # Mapping between the old revision id and either what is the new rebased
199 190 # revision or what needs to be done with the old revision. The state
200 191 # dict will be what contains most of the rebase progress state.
201 192 self.state = {}
202 193 self.activebookmark = None
203 194 self.destmap = {}
204 195 self.skipped = set()
205 196
206 197 self.collapsef = opts.get('collapse', False)
207 198 self.collapsemsg = cmdutil.logmessage(ui, pycompat.byteskwargs(opts))
208 199 self.date = opts.get('date', None)
209 200
210 201 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
211 202 self.extrafns = [_save_extras]
212 203 if e:
213 204 self.extrafns = [e]
214 205
215 206 self.backupf = ui.configbool(b'rewrite', b'backup-bundle')
216 207 self.keepf = opts.get('keep', False)
217 208 self.keepbranchesf = opts.get('keepbranches', False)
218 209 self.skipemptysuccessorf = rewriteutil.skip_empty_successor(
219 210 repo.ui, b'rebase'
220 211 )
221 212 self.obsolete_with_successor_in_destination = {}
222 213 self.obsolete_with_successor_in_rebase_set = set()
223 214 self.inmemory = inmemory
224 215 self.dryrun = dryrun
225 216 self.stateobj = statemod.cmdstate(repo, b'rebasestate')
226 217
227 218 @property
228 219 def repo(self):
229 220 if self.prepared:
230 221 return self._repo.unfiltered()
231 222 else:
232 223 return self._repo
233 224
234 225 def storestatus(self, tr=None):
235 226 """Store the current status to allow recovery"""
236 227 if tr:
237 228 tr.addfilegenerator(
238 229 b'rebasestate',
239 230 (b'rebasestate',),
240 231 self._writestatus,
241 232 location=b'plain',
242 233 )
243 234 else:
244 235 with self.repo.vfs(b"rebasestate", b"w") as f:
245 236 self._writestatus(f)
246 237
247 238 def _writestatus(self, f):
248 239 repo = self.repo
249 240 assert repo.filtername is None
250 241 f.write(repo[self.originalwd].hex() + b'\n')
251 242 # was "dest". we now write dest per src root below.
252 243 f.write(b'\n')
253 244 f.write(repo[self.external].hex() + b'\n')
254 245 f.write(b'%d\n' % int(self.collapsef))
255 246 f.write(b'%d\n' % int(self.keepf))
256 247 f.write(b'%d\n' % int(self.keepbranchesf))
257 248 f.write(b'%s\n' % (self.activebookmark or b''))
258 249 destmap = self.destmap
259 250 for d, v in self.state.items():
260 251 oldrev = repo[d].hex()
261 252 if v >= 0:
262 253 newrev = repo[v].hex()
263 254 else:
264 255 newrev = b"%d" % v
265 256 destnode = repo[destmap[d]].hex()
266 257 f.write(b"%s:%s:%s\n" % (oldrev, newrev, destnode))
267 258 repo.ui.debug(b'rebase status stored\n')
268 259
269 260 def restorestatus(self):
270 261 """Restore a previously stored status"""
271 262 if not self.stateobj.exists():
272 263 cmdutil.wrongtooltocontinue(self.repo, _(b'rebase'))
273 264
274 265 data = self._read()
275 266 self.repo.ui.debug(b'rebase status resumed\n')
276 267
277 268 self.originalwd = data[b'originalwd']
278 269 self.destmap = data[b'destmap']
279 270 self.state = data[b'state']
280 271 self.skipped = data[b'skipped']
281 272 self.collapsef = data[b'collapse']
282 273 self.keepf = data[b'keep']
283 274 self.keepbranchesf = data[b'keepbranches']
284 275 self.external = data[b'external']
285 276 self.activebookmark = data[b'activebookmark']
286 277
287 278 def _read(self):
288 279 self.prepared = True
289 280 repo = self.repo
290 281 assert repo.filtername is None
291 282 data = {
292 283 b'keepbranches': None,
293 284 b'collapse': None,
294 285 b'activebookmark': None,
295 286 b'external': nullrev,
296 287 b'keep': None,
297 288 b'originalwd': None,
298 289 }
299 290 legacydest = None
300 291 state = {}
301 292 destmap = {}
302 293
303 294 if True:
304 295 f = repo.vfs(b"rebasestate")
305 296 for i, l in enumerate(f.read().splitlines()):
306 297 if i == 0:
307 298 data[b'originalwd'] = repo[l].rev()
308 299 elif i == 1:
309 300 # this line should be empty in newer version. but legacy
310 301 # clients may still use it
311 302 if l:
312 303 legacydest = repo[l].rev()
313 304 elif i == 2:
314 305 data[b'external'] = repo[l].rev()
315 306 elif i == 3:
316 307 data[b'collapse'] = bool(int(l))
317 308 elif i == 4:
318 309 data[b'keep'] = bool(int(l))
319 310 elif i == 5:
320 311 data[b'keepbranches'] = bool(int(l))
321 312 elif i == 6 and not (len(l) == 81 and b':' in l):
322 313 # line 6 is a recent addition, so for backwards
323 314 # compatibility check that the line doesn't look like the
324 315 # oldrev:newrev lines
325 316 data[b'activebookmark'] = l
326 317 else:
327 318 args = l.split(b':')
328 319 oldrev = repo[args[0]].rev()
329 320 newrev = args[1]
330 321 if newrev in legacystates:
331 322 continue
332 323 if len(args) > 2:
333 324 destrev = repo[args[2]].rev()
334 325 else:
335 326 destrev = legacydest
336 327 destmap[oldrev] = destrev
337 328 if newrev == revtodostr:
338 329 state[oldrev] = revtodo
339 330 # Legacy compat special case
340 331 else:
341 332 state[oldrev] = repo[newrev].rev()
342 333
343 334 if data[b'keepbranches'] is None:
344 335 raise error.Abort(_(b'.hg/rebasestate is incomplete'))
345 336
346 337 data[b'destmap'] = destmap
347 338 data[b'state'] = state
348 339 skipped = set()
349 340 # recompute the set of skipped revs
350 341 if not data[b'collapse']:
351 342 seen = set(destmap.values())
352 343 for old, new in sorted(state.items()):
353 344 if new != revtodo and new in seen:
354 345 skipped.add(old)
355 346 seen.add(new)
356 347 data[b'skipped'] = skipped
357 348 repo.ui.debug(
358 349 b'computed skipped revs: %s\n'
359 350 % (b' '.join(b'%d' % r for r in sorted(skipped)) or b'')
360 351 )
361 352
362 353 return data
363 354
364 355 def _handleskippingobsolete(self):
365 356 """Compute structures necessary for skipping obsolete revisions"""
366 357 if self.keepf:
367 358 return
368 359 if not self.ui.configbool(b'experimental', b'rebaseskipobsolete'):
369 360 return
370 361 obsoleteset = {r for r in self.state if self.repo[r].obsolete()}
371 362 (
372 363 self.obsolete_with_successor_in_destination,
373 364 self.obsolete_with_successor_in_rebase_set,
374 365 ) = _compute_obsolete_sets(self.repo, obsoleteset, self.destmap)
375 366 skippedset = set(self.obsolete_with_successor_in_destination)
376 367 skippedset.update(self.obsolete_with_successor_in_rebase_set)
377 368 _checkobsrebase(self.repo, self.ui, obsoleteset, skippedset)
378 369 if obsolete.isenabled(self.repo, obsolete.allowdivergenceopt):
379 370 self.obsolete_with_successor_in_rebase_set = set()
380 371 else:
381 372 for rev in self.repo.revs(
382 373 b'descendants(%ld) and not %ld',
383 374 self.obsolete_with_successor_in_rebase_set,
384 375 self.obsolete_with_successor_in_rebase_set,
385 376 ):
386 377 self.state.pop(rev, None)
387 378 self.destmap.pop(rev, None)
388 379
389 380 def _prepareabortorcontinue(
390 381 self, isabort, backup=True, suppwarns=False, dryrun=False, confirm=False
391 382 ):
392 383 self.resume = True
393 384 try:
394 385 self.restorestatus()
395 386 # Calculate self.obsolete_* sets
396 387 self._handleskippingobsolete()
397 388 self.collapsemsg = restorecollapsemsg(self.repo, isabort)
398 389 except error.RepoLookupError:
399 390 if isabort:
400 391 clearstatus(self.repo)
401 392 clearcollapsemsg(self.repo)
402 393 self.repo.ui.warn(
403 394 _(
404 395 b'rebase aborted (no revision is removed,'
405 396 b' only broken state is cleared)\n'
406 397 )
407 398 )
408 399 return 0
409 400 else:
410 401 msg = _(b'cannot continue inconsistent rebase')
411 402 hint = _(b'use "hg rebase --abort" to clear broken state')
412 403 raise error.Abort(msg, hint=hint)
413 404
414 405 if isabort:
415 406 backup = backup and self.backupf
416 407 return self._abort(
417 408 backup=backup,
418 409 suppwarns=suppwarns,
419 410 dryrun=dryrun,
420 411 confirm=confirm,
421 412 )
422 413
423 414 def _preparenewrebase(self, destmap):
424 415 if not destmap:
425 416 return _nothingtorebase()
426 417
427 418 result = buildstate(self.repo, destmap, self.collapsef)
428 419
429 420 if not result:
430 421 # Empty state built, nothing to rebase
431 422 self.ui.status(_(b'nothing to rebase\n'))
432 423 return _nothingtorebase()
433 424
434 425 (self.originalwd, self.destmap, self.state) = result
435 426 if self.collapsef:
436 427 dests = set(self.destmap.values())
437 428 if len(dests) != 1:
438 429 raise error.InputError(
439 430 _(b'--collapse does not work with multiple destinations')
440 431 )
441 432 destrev = next(iter(dests))
442 433 destancestors = self.repo.changelog.ancestors(
443 434 [destrev], inclusive=True
444 435 )
445 436 self.external = externalparent(self.repo, self.state, destancestors)
446 437
447 438 for destrev in sorted(set(destmap.values())):
448 439 dest = self.repo[destrev]
449 440 if dest.closesbranch() and not self.keepbranchesf:
450 441 self.ui.status(_(b'reopening closed branch head %s\n') % dest)
451 442
452 443 # Calculate self.obsolete_* sets
453 444 self._handleskippingobsolete()
454 445
455 446 if not self.keepf:
456 447 rebaseset = set(destmap.keys())
457 448 rebaseset -= set(self.obsolete_with_successor_in_destination)
458 449 rebaseset -= self.obsolete_with_successor_in_rebase_set
459 450 # We have our own divergence-checking in the rebase extension
460 451 overrides = {}
461 452 if obsolete.isenabled(self.repo, obsolete.createmarkersopt):
462 453 overrides = {
463 454 (b'experimental', b'evolution.allowdivergence'): b'true'
464 455 }
465 456 try:
466 457 with self.ui.configoverride(overrides):
467 458 rewriteutil.precheck(self.repo, rebaseset, action=b'rebase')
468 459 except error.Abort as e:
469 460 if e.hint is None:
470 461 e.hint = _(b'use --keep to keep original changesets')
471 462 raise e
472 463
473 464 self.prepared = True
474 465
475 466 def _assignworkingcopy(self):
476 467 if self.inmemory:
477 468 from mercurial.context import overlayworkingctx
478 469
479 470 self.wctx = overlayworkingctx(self.repo)
480 471 self.repo.ui.debug(b"rebasing in memory\n")
481 472 else:
482 473 self.wctx = self.repo[None]
483 474 self.repo.ui.debug(b"rebasing on disk\n")
484 475 self.repo.ui.log(
485 476 b"rebase",
486 477 b"using in-memory rebase: %r\n",
487 478 self.inmemory,
488 479 rebase_imm_used=self.inmemory,
489 480 )
490 481
491 482 def _performrebase(self, tr):
492 483 self._assignworkingcopy()
493 484 repo, ui = self.repo, self.ui
494 485 if self.keepbranchesf:
495 486 # insert _savebranch at the start of extrafns so if
496 487 # there's a user-provided extrafn it can clobber branch if
497 488 # desired
498 489 self.extrafns.insert(0, _savebranch)
499 490 if self.collapsef:
500 491 branches = set()
501 492 for rev in self.state:
502 493 branches.add(repo[rev].branch())
503 494 if len(branches) > 1:
504 495 raise error.InputError(
505 496 _(b'cannot collapse multiple named branches')
506 497 )
507 498
508 499 # Keep track of the active bookmarks in order to reset them later
509 500 self.activebookmark = self.activebookmark or repo._activebookmark
510 501 if self.activebookmark:
511 502 bookmarks.deactivate(repo)
512 503
513 504 # Store the state before we begin so users can run 'hg rebase --abort'
514 505 # if we fail before the transaction closes.
515 506 self.storestatus()
516 507 if tr:
517 508 # When using single transaction, store state when transaction
518 509 # commits.
519 510 self.storestatus(tr)
520 511
521 512 cands = [k for k, v in self.state.items() if v == revtodo]
522 513 p = repo.ui.makeprogress(
523 514 _(b"rebasing"), unit=_(b'changesets'), total=len(cands)
524 515 )
525 516
526 517 def progress(ctx):
527 518 p.increment(item=(b"%d:%s" % (ctx.rev(), ctx)))
528 519
529 520 for subset in sortsource(self.destmap):
530 521 sortedrevs = self.repo.revs(b'sort(%ld, -topo)', subset)
531 522 for rev in sortedrevs:
532 523 self._rebasenode(tr, rev, progress)
533 524 p.complete()
534 525 ui.note(_(b'rebase merging completed\n'))
535 526
536 527 def _concludenode(self, rev, editor, commitmsg=None):
537 528 """Commit the wd changes with parents p1 and p2.
538 529
539 530 Reuse commit info from rev but also store useful information in extra.
540 531 Return node of committed revision."""
541 532 repo = self.repo
542 533 ctx = repo[rev]
543 534 if commitmsg is None:
544 535 commitmsg = ctx.description()
545 536
546 537 # Skip replacement if collapsing, as that degenerates to p1 for all
547 538 # nodes.
548 539 if not self.collapsef:
549 540 cl = repo.changelog
550 541 commitmsg = rewriteutil.update_hash_refs(
551 542 repo,
552 543 commitmsg,
553 544 {
554 545 cl.node(oldrev): [cl.node(newrev)]
555 546 for oldrev, newrev in self.state.items()
556 547 if newrev != revtodo
557 548 },
558 549 )
559 550
560 551 date = self.date
561 552 if date is None:
562 553 date = ctx.date()
563 554 extra = {}
564 555 if repo.ui.configbool(b'rebase', b'store-source'):
565 556 extra = {b'rebase_source': ctx.hex()}
566 557 for c in self.extrafns:
567 558 c(ctx, extra)
568 559 destphase = max(ctx.phase(), phases.draft)
569 560 overrides = {
570 561 (b'phases', b'new-commit'): destphase,
571 562 (b'ui', b'allowemptycommit'): not self.skipemptysuccessorf,
572 563 }
573 564 with repo.ui.configoverride(overrides, b'rebase'):
574 565 if self.inmemory:
575 566 newnode = commitmemorynode(
576 567 repo,
577 568 wctx=self.wctx,
578 569 extra=extra,
579 570 commitmsg=commitmsg,
580 571 editor=editor,
581 572 user=ctx.user(),
582 573 date=date,
583 574 )
584 575 else:
585 576 newnode = commitnode(
586 577 repo,
587 578 extra=extra,
588 579 commitmsg=commitmsg,
589 580 editor=editor,
590 581 user=ctx.user(),
591 582 date=date,
592 583 )
593 584
594 585 return newnode
595 586
596 587 def _rebasenode(self, tr, rev, progressfn):
597 588 repo, ui, opts = self.repo, self.ui, self.opts
598 589 ctx = repo[rev]
599 590 desc = _ctxdesc(ctx)
600 591 if self.state[rev] == rev:
601 592 ui.status(_(b'already rebased %s\n') % desc)
602 593 elif rev in self.obsolete_with_successor_in_rebase_set:
603 594 msg = (
604 595 _(
605 596 b'note: not rebasing %s and its descendants as '
606 597 b'this would cause divergence\n'
607 598 )
608 599 % desc
609 600 )
610 601 repo.ui.status(msg)
611 602 self.skipped.add(rev)
612 603 elif rev in self.obsolete_with_successor_in_destination:
613 604 succ = self.obsolete_with_successor_in_destination[rev]
614 605 if succ is None:
615 606 msg = _(b'note: not rebasing %s, it has no successor\n') % desc
616 607 else:
617 608 succdesc = _ctxdesc(repo[succ])
618 609 msg = _(
619 610 b'note: not rebasing %s, already in destination as %s\n'
620 611 ) % (desc, succdesc)
621 612 repo.ui.status(msg)
622 613 # Make clearrebased aware state[rev] is not a true successor
623 614 self.skipped.add(rev)
624 615 # Record rev as moved to its desired destination in self.state.
625 616 # This helps bookmark and working parent movement.
626 617 dest = max(
627 618 adjustdest(repo, rev, self.destmap, self.state, self.skipped)
628 619 )
629 620 self.state[rev] = dest
630 621 elif self.state[rev] == revtodo:
631 622 ui.status(_(b'rebasing %s\n') % desc)
632 623 progressfn(ctx)
633 624 p1, p2, base = defineparents(
634 625 repo,
635 626 rev,
636 627 self.destmap,
637 628 self.state,
638 629 self.skipped,
639 630 self.obsolete_with_successor_in_destination,
640 631 )
641 632 if self.resume and self.wctx.p1().rev() == p1:
642 633 repo.ui.debug(b'resuming interrupted rebase\n')
643 634 self.resume = False
644 635 else:
645 636 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
646 637 with ui.configoverride(overrides, b'rebase'):
647 638 try:
648 639 rebasenode(
649 640 repo,
650 641 rev,
651 642 p1,
652 643 p2,
653 644 base,
654 645 self.collapsef,
655 646 wctx=self.wctx,
656 647 )
657 648 except error.InMemoryMergeConflictsError:
658 649 if self.dryrun:
659 650 raise error.ConflictResolutionRequired(b'rebase')
660 651 if self.collapsef:
661 652 # TODO: Make the overlayworkingctx reflected
662 653 # in the working copy here instead of re-raising
663 654 # so the entire rebase operation is retried.
664 655 raise
665 656 ui.status(
666 657 _(
667 658 b"hit merge conflicts; rebasing that "
668 659 b"commit again in the working copy\n"
669 660 )
670 661 )
671 662 try:
672 663 cmdutil.bailifchanged(repo)
673 664 except error.Abort:
674 665 clearstatus(repo)
675 666 clearcollapsemsg(repo)
676 667 raise
677 668 self.inmemory = False
678 669 self._assignworkingcopy()
679 670 mergemod.update(repo[p1], wc=self.wctx)
680 671 rebasenode(
681 672 repo,
682 673 rev,
683 674 p1,
684 675 p2,
685 676 base,
686 677 self.collapsef,
687 678 wctx=self.wctx,
688 679 )
689 680 if not self.collapsef:
690 681 merging = p2 != nullrev
691 682 editform = cmdutil.mergeeditform(merging, b'rebase')
692 683 editor = cmdutil.getcommiteditor(editform=editform, **opts)
693 684 # We need to set parents again here just in case we're continuing
694 685 # a rebase started with an old hg version (before 9c9cfecd4600),
695 686 # because those old versions would have left us with two dirstate
696 687 # parents, and we don't want to create a merge commit here (unless
697 688 # we're rebasing a merge commit).
698 689 self.wctx.setparents(repo[p1].node(), repo[p2].node())
699 690 newnode = self._concludenode(rev, editor)
700 691 else:
701 692 # Skip commit if we are collapsing
702 693 newnode = None
703 694 # Update the state
704 695 if newnode is not None:
705 696 self.state[rev] = repo[newnode].rev()
706 697 ui.debug(b'rebased as %s\n' % short(newnode))
707 698 if repo[newnode].isempty():
708 699 ui.warn(
709 700 _(
710 701 b'note: created empty successor for %s, its '
711 702 b'destination already has all its changes\n'
712 703 )
713 704 % desc
714 705 )
715 706 else:
716 707 if not self.collapsef:
717 708 ui.warn(
718 709 _(
719 710 b'note: not rebasing %s, its destination already '
720 711 b'has all its changes\n'
721 712 )
722 713 % desc
723 714 )
724 715 self.skipped.add(rev)
725 716 self.state[rev] = p1
726 717 ui.debug(b'next revision set to %d\n' % p1)
727 718 else:
728 719 ui.status(
729 720 _(b'already rebased %s as %s\n') % (desc, repo[self.state[rev]])
730 721 )
731 722 if not tr:
732 723 # When not using single transaction, store state after each
733 724 # commit is completely done. On InterventionRequired, we thus
734 725 # won't store the status. Instead, we'll hit the "len(parents) == 2"
735 726 # case and realize that the commit was in progress.
736 727 self.storestatus()
737 728
738 729 def _finishrebase(self):
739 730 repo, ui, opts = self.repo, self.ui, self.opts
740 731 fm = ui.formatter(b'rebase', pycompat.byteskwargs(opts))
741 732 fm.startitem()
742 733 if self.collapsef:
743 734 p1, p2, _base = defineparents(
744 735 repo,
745 736 min(self.state),
746 737 self.destmap,
747 738 self.state,
748 739 self.skipped,
749 740 self.obsolete_with_successor_in_destination,
750 741 )
751 742 editopt = opts.get('edit')
752 743 editform = b'rebase.collapse'
753 744 if self.collapsemsg:
754 745 commitmsg = self.collapsemsg
755 746 else:
756 747 commitmsg = b'Collapsed revision'
757 748 for rebased in sorted(self.state):
758 749 if rebased not in self.skipped:
759 750 commitmsg += b'\n* %s' % repo[rebased].description()
760 751 editopt = True
761 752 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
762 753 revtoreuse = max(self.state)
763 754
764 755 self.wctx.setparents(repo[p1].node(), repo[self.external].node())
765 756 newnode = self._concludenode(
766 757 revtoreuse, editor, commitmsg=commitmsg
767 758 )
768 759
769 760 if newnode is not None:
770 761 newrev = repo[newnode].rev()
771 762 for oldrev in self.state:
772 763 self.state[oldrev] = newrev
773 764
774 765 if b'qtip' in repo.tags():
775 766 updatemq(repo, self.state, self.skipped, **opts)
776 767
777 768 # restore original working directory
778 769 # (we do this before stripping)
779 770 newwd = self.state.get(self.originalwd, self.originalwd)
780 771 if newwd < 0:
781 772 # original directory is a parent of rebase set root or ignored
782 773 newwd = self.originalwd
783 774 if newwd not in [c.rev() for c in repo[None].parents()]:
784 775 ui.note(_(b"update back to initial working directory parent\n"))
785 776 mergemod.update(repo[newwd])
786 777
787 778 collapsedas = None
788 779 if self.collapsef and not self.keepf:
789 780 collapsedas = newnode
790 781 clearrebased(
791 782 ui,
792 783 repo,
793 784 self.destmap,
794 785 self.state,
795 786 self.skipped,
796 787 collapsedas,
797 788 self.keepf,
798 789 fm=fm,
799 790 backup=self.backupf,
800 791 )
801 792
802 793 clearstatus(repo)
803 794 clearcollapsemsg(repo)
804 795
805 796 ui.note(_(b"rebase completed\n"))
806 797 util.unlinkpath(repo.sjoin(b'undo'), ignoremissing=True)
807 798 if self.skipped:
808 799 skippedlen = len(self.skipped)
809 800 ui.note(_(b"%d revisions have been skipped\n") % skippedlen)
810 801 fm.end()
811 802
812 803 if (
813 804 self.activebookmark
814 805 and self.activebookmark in repo._bookmarks
815 806 and repo[b'.'].node() == repo._bookmarks[self.activebookmark]
816 807 ):
817 808 bookmarks.activate(repo, self.activebookmark)
818 809
819 810 def _abort(self, backup=True, suppwarns=False, dryrun=False, confirm=False):
820 811 '''Restore the repository to its original state.'''
821 812
822 813 repo = self.repo
823 814 try:
824 815 # If the first commits in the rebased set get skipped during the
825 816 # rebase, their values within the state mapping will be the dest
826 817 # rev id. The rebased list must must not contain the dest rev
827 818 # (issue4896)
828 819 rebased = [
829 820 s
830 821 for r, s in self.state.items()
831 822 if s >= 0 and s != r and s != self.destmap[r]
832 823 ]
833 824 immutable = [d for d in rebased if not repo[d].mutable()]
834 825 cleanup = True
835 826 if immutable:
836 827 repo.ui.warn(
837 828 _(b"warning: can't clean up public changesets %s\n")
838 829 % b', '.join(bytes(repo[r]) for r in immutable),
839 830 hint=_(b"see 'hg help phases' for details"),
840 831 )
841 832 cleanup = False
842 833
843 834 descendants = set()
844 835 if rebased:
845 836 descendants = set(repo.changelog.descendants(rebased))
846 837 if descendants - set(rebased):
847 838 repo.ui.warn(
848 839 _(
849 840 b"warning: new changesets detected on "
850 841 b"destination branch, can't strip\n"
851 842 )
852 843 )
853 844 cleanup = False
854 845
855 846 if cleanup:
856 847 if rebased:
857 848 strippoints = [
858 849 c.node() for c in repo.set(b'roots(%ld)', rebased)
859 850 ]
860 851
861 852 updateifonnodes = set(rebased)
862 853 updateifonnodes.update(self.destmap.values())
863 854
864 855 if not dryrun and not confirm:
865 856 updateifonnodes.add(self.originalwd)
866 857
867 858 shouldupdate = repo[b'.'].rev() in updateifonnodes
868 859
869 860 # Update away from the rebase if necessary
870 861 if shouldupdate:
871 862 mergemod.clean_update(repo[self.originalwd])
872 863
873 864 # Strip from the first rebased revision
874 865 if rebased:
875 866 repair.strip(repo.ui, repo, strippoints, backup=backup)
876 867
877 868 if self.activebookmark and self.activebookmark in repo._bookmarks:
878 869 bookmarks.activate(repo, self.activebookmark)
879 870
880 871 finally:
881 872 clearstatus(repo)
882 873 clearcollapsemsg(repo)
883 874 if not suppwarns:
884 875 repo.ui.warn(_(b'rebase aborted\n'))
885 876 return 0
886 877
887 878
888 879 @command(
889 880 b'rebase',
890 881 [
891 882 (
892 883 b's',
893 884 b'source',
894 885 [],
895 886 _(b'rebase the specified changesets and their descendants'),
896 887 _(b'REV'),
897 888 ),
898 889 (
899 890 b'b',
900 891 b'base',
901 892 [],
902 893 _(b'rebase everything from branching point of specified changeset'),
903 894 _(b'REV'),
904 895 ),
905 896 (b'r', b'rev', [], _(b'rebase these revisions'), _(b'REV')),
906 897 (
907 898 b'd',
908 899 b'dest',
909 900 b'',
910 901 _(b'rebase onto the specified changeset'),
911 902 _(b'REV'),
912 903 ),
913 904 (b'', b'collapse', False, _(b'collapse the rebased changesets')),
914 905 (
915 906 b'm',
916 907 b'message',
917 908 b'',
918 909 _(b'use text as collapse commit message'),
919 910 _(b'TEXT'),
920 911 ),
921 912 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
922 913 (
923 914 b'l',
924 915 b'logfile',
925 916 b'',
926 917 _(b'read collapse commit message from file'),
927 918 _(b'FILE'),
928 919 ),
929 920 (b'k', b'keep', False, _(b'keep original changesets')),
930 921 (b'', b'keepbranches', False, _(b'keep original branch names')),
931 922 (b'D', b'detach', False, _(b'(DEPRECATED)')),
932 923 (b'i', b'interactive', False, _(b'(DEPRECATED)')),
933 924 (b't', b'tool', b'', _(b'specify merge tool')),
934 925 (b'', b'stop', False, _(b'stop interrupted rebase')),
935 926 (b'c', b'continue', False, _(b'continue an interrupted rebase')),
936 927 (b'a', b'abort', False, _(b'abort an interrupted rebase')),
937 928 (
938 929 b'',
939 930 b'auto-orphans',
940 931 b'',
941 932 _(
942 933 b'automatically rebase orphan revisions '
943 934 b'in the specified revset (EXPERIMENTAL)'
944 935 ),
945 936 ),
946 937 ]
947 938 + cmdutil.dryrunopts
948 939 + cmdutil.formatteropts
949 940 + cmdutil.confirmopts,
950 941 _(b'[[-s REV]... | [-b REV]... | [-r REV]...] [-d REV] [OPTION]...'),
951 942 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
952 943 )
953 944 def rebase(ui, repo, **opts):
954 945 """move changeset (and descendants) to a different branch
955 946
956 947 Rebase uses repeated merging to graft changesets from one part of
957 948 history (the source) onto another (the destination). This can be
958 949 useful for linearizing *local* changes relative to a master
959 950 development tree.
960 951
961 952 Published commits cannot be rebased (see :hg:`help phases`).
962 953 To copy commits, see :hg:`help graft`.
963 954
964 955 If you don't specify a destination changeset (``-d/--dest``), rebase
965 956 will use the same logic as :hg:`merge` to pick a destination. if
966 957 the current branch contains exactly one other head, the other head
967 958 is merged with by default. Otherwise, an explicit revision with
968 959 which to merge with must be provided. (destination changeset is not
969 960 modified by rebasing, but new changesets are added as its
970 961 descendants.)
971 962
972 963 Here are the ways to select changesets:
973 964
974 965 1. Explicitly select them using ``--rev``.
975 966
976 967 2. Use ``--source`` to select a root changeset and include all of its
977 968 descendants.
978 969
979 970 3. Use ``--base`` to select a changeset; rebase will find ancestors
980 971 and their descendants which are not also ancestors of the destination.
981 972
982 973 4. If you do not specify any of ``--rev``, ``--source``, or ``--base``,
983 974 rebase will use ``--base .`` as above.
984 975
985 976 If ``--source`` or ``--rev`` is used, special names ``SRC`` and ``ALLSRC``
986 977 can be used in ``--dest``. Destination would be calculated per source
987 978 revision with ``SRC`` substituted by that single source revision and
988 979 ``ALLSRC`` substituted by all source revisions.
989 980
990 981 Rebase will destroy original changesets unless you use ``--keep``.
991 982 It will also move your bookmarks (even if you do).
992 983
993 984 Some changesets may be dropped if they do not contribute changes
994 985 (e.g. merges from the destination branch).
995 986
996 987 Unlike ``merge``, rebase will do nothing if you are at the branch tip of
997 988 a named branch with two heads. You will need to explicitly specify source
998 989 and/or destination.
999 990
1000 991 If you need to use a tool to automate merge/conflict decisions, you
1001 992 can specify one with ``--tool``, see :hg:`help merge-tools`.
1002 993 As a caveat: the tool will not be used to mediate when a file was
1003 994 deleted, there is no hook presently available for this.
1004 995
1005 996 If a rebase is interrupted to manually resolve a conflict, it can be
1006 997 continued with --continue/-c, aborted with --abort/-a, or stopped with
1007 998 --stop.
1008 999
1009 1000 .. container:: verbose
1010 1001
1011 1002 Examples:
1012 1003
1013 1004 - move "local changes" (current commit back to branching point)
1014 1005 to the current branch tip after a pull::
1015 1006
1016 1007 hg rebase
1017 1008
1018 1009 - move a single changeset to the stable branch::
1019 1010
1020 1011 hg rebase -r 5f493448 -d stable
1021 1012
1022 1013 - splice a commit and all its descendants onto another part of history::
1023 1014
1024 1015 hg rebase --source c0c3 --dest 4cf9
1025 1016
1026 1017 - rebase everything on a branch marked by a bookmark onto the
1027 1018 default branch::
1028 1019
1029 1020 hg rebase --base myfeature --dest default
1030 1021
1031 1022 - collapse a sequence of changes into a single commit::
1032 1023
1033 1024 hg rebase --collapse -r 1520:1525 -d .
1034 1025
1035 1026 - move a named branch while preserving its name::
1036 1027
1037 1028 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
1038 1029
1039 1030 - stabilize orphaned changesets so history looks linear::
1040 1031
1041 1032 hg rebase -r 'orphan()-obsolete()'\
1042 1033 -d 'first(max((successors(max(roots(ALLSRC) & ::SRC)^)-obsolete())::) +\
1043 1034 max(::((roots(ALLSRC) & ::SRC)^)-obsolete()))'
1044 1035
1045 1036 Configuration Options:
1046 1037
1047 1038 You can make rebase require a destination if you set the following config
1048 1039 option::
1049 1040
1050 1041 [commands]
1051 1042 rebase.requiredest = True
1052 1043
1053 1044 By default, rebase will close the transaction after each commit. For
1054 1045 performance purposes, you can configure rebase to use a single transaction
1055 1046 across the entire rebase. WARNING: This setting introduces a significant
1056 1047 risk of losing the work you've done in a rebase if the rebase aborts
1057 1048 unexpectedly::
1058 1049
1059 1050 [rebase]
1060 1051 singletransaction = True
1061 1052
1062 1053 By default, rebase writes to the working copy, but you can configure it to
1063 1054 run in-memory for better performance. When the rebase is not moving the
1064 1055 parent(s) of the working copy (AKA the "currently checked out changesets"),
1065 1056 this may also allow it to run even if the working copy is dirty::
1066 1057
1067 1058 [rebase]
1068 1059 experimental.inmemory = True
1069 1060
1070 1061 Return Values:
1071 1062
1072 1063 Returns 0 on success, 1 if nothing to rebase or there are
1073 1064 unresolved conflicts.
1074 1065
1075 1066 """
1076 1067 inmemory = ui.configbool(b'rebase', b'experimental.inmemory')
1077 1068 action = cmdutil.check_at_most_one_arg(opts, 'abort', 'stop', 'continue')
1078 1069 if action:
1079 1070 cmdutil.check_incompatible_arguments(
1080 1071 opts, action, ['confirm', 'dry_run']
1081 1072 )
1082 1073 cmdutil.check_incompatible_arguments(
1083 1074 opts, action, ['rev', 'source', 'base', 'dest']
1084 1075 )
1085 1076 cmdutil.check_at_most_one_arg(opts, 'confirm', 'dry_run')
1086 1077 cmdutil.check_at_most_one_arg(opts, 'rev', 'source', 'base')
1087 1078
1088 1079 if action or repo.currenttransaction() is not None:
1089 1080 # in-memory rebase is not compatible with resuming rebases.
1090 1081 # (Or if it is run within a transaction, since the restart logic can
1091 1082 # fail the entire transaction.)
1092 1083 inmemory = False
1093 1084
1094 1085 if opts.get('auto_orphans'):
1095 1086 disallowed_opts = set(opts) - {'auto_orphans'}
1096 1087 cmdutil.check_incompatible_arguments(
1097 1088 opts, 'auto_orphans', disallowed_opts
1098 1089 )
1099 1090
1100 1091 userrevs = list(repo.revs(opts.get('auto_orphans')))
1101 1092 opts['rev'] = [revsetlang.formatspec(b'%ld and orphan()', userrevs)]
1102 1093 opts['dest'] = b'_destautoorphanrebase(SRC)'
1103 1094
1104 1095 if opts.get('dry_run') or opts.get('confirm'):
1105 1096 return _dryrunrebase(ui, repo, action, opts)
1106 1097 elif action == 'stop':
1107 1098 rbsrt = rebaseruntime(repo, ui)
1108 1099 with repo.wlock(), repo.lock():
1109 1100 rbsrt.restorestatus()
1110 1101 if rbsrt.collapsef:
1111 1102 raise error.StateError(_(b"cannot stop in --collapse session"))
1112 1103 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1113 1104 if not (rbsrt.keepf or allowunstable):
1114 1105 raise error.StateError(
1115 1106 _(
1116 1107 b"cannot remove original changesets with"
1117 1108 b" unrebased descendants"
1118 1109 ),
1119 1110 hint=_(
1120 1111 b'either enable obsmarkers to allow unstable '
1121 1112 b'revisions or use --keep to keep original '
1122 1113 b'changesets'
1123 1114 ),
1124 1115 )
1125 1116 # update to the current working revision
1126 1117 # to clear interrupted merge
1127 1118 mergemod.clean_update(repo[rbsrt.originalwd])
1128 1119 rbsrt._finishrebase()
1129 1120 return 0
1130 1121 elif inmemory:
1131 1122 try:
1132 1123 # in-memory merge doesn't support conflicts, so if we hit any, abort
1133 1124 # and re-run as an on-disk merge.
1134 1125 overrides = {(b'rebase', b'singletransaction'): True}
1135 1126 with ui.configoverride(overrides, b'rebase'):
1136 1127 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
1137 1128 except error.InMemoryMergeConflictsError:
1138 1129 if ui.configbool(b'devel', b'rebase.force-in-memory-merge'):
1139 1130 raise
1140 1131 ui.warn(
1141 1132 _(
1142 1133 b'hit merge conflicts; re-running rebase without in-memory'
1143 1134 b' merge\n'
1144 1135 )
1145 1136 )
1146 1137 clearstatus(repo)
1147 1138 clearcollapsemsg(repo)
1148 1139 return _dorebase(ui, repo, action, opts, inmemory=False)
1149 1140 else:
1150 1141 return _dorebase(ui, repo, action, opts)
1151 1142
1152 1143
1153 1144 def _dryrunrebase(ui, repo, action, opts):
1154 1145 rbsrt = rebaseruntime(repo, ui, inmemory=True, dryrun=True, opts=opts)
1155 1146 confirm = opts.get('confirm')
1156 1147 if confirm:
1157 1148 ui.status(_(b'starting in-memory rebase\n'))
1158 1149 else:
1159 1150 ui.status(
1160 1151 _(b'starting dry-run rebase; repository will not be changed\n')
1161 1152 )
1162 1153 with repo.wlock(), repo.lock():
1163 1154 needsabort = True
1164 1155 try:
1165 1156 overrides = {(b'rebase', b'singletransaction'): True}
1166 1157 with ui.configoverride(overrides, b'rebase'):
1167 1158 res = _origrebase(
1168 1159 ui,
1169 1160 repo,
1170 1161 action,
1171 1162 opts,
1172 1163 rbsrt,
1173 1164 )
1174 1165 if res == _nothingtorebase():
1175 1166 needsabort = False
1176 1167 return res
1177 1168 except error.ConflictResolutionRequired:
1178 1169 ui.status(_(b'hit a merge conflict\n'))
1179 1170 return 1
1180 1171 except error.Abort:
1181 1172 needsabort = False
1182 1173 raise
1183 1174 else:
1184 1175 if confirm:
1185 1176 ui.status(_(b'rebase completed successfully\n'))
1186 1177 if not ui.promptchoice(_(b'apply changes (yn)?$$ &Yes $$ &No')):
1187 1178 # finish unfinished rebase
1188 1179 rbsrt._finishrebase()
1189 1180 else:
1190 1181 rbsrt._prepareabortorcontinue(
1191 1182 isabort=True,
1192 1183 backup=False,
1193 1184 suppwarns=True,
1194 1185 confirm=confirm,
1195 1186 )
1196 1187 needsabort = False
1197 1188 else:
1198 1189 ui.status(
1199 1190 _(
1200 1191 b'dry-run rebase completed successfully; run without'
1201 1192 b' -n/--dry-run to perform this rebase\n'
1202 1193 )
1203 1194 )
1204 1195 return 0
1205 1196 finally:
1206 1197 if needsabort:
1207 1198 # no need to store backup in case of dryrun
1208 1199 rbsrt._prepareabortorcontinue(
1209 1200 isabort=True,
1210 1201 backup=False,
1211 1202 suppwarns=True,
1212 1203 dryrun=opts.get('dry_run'),
1213 1204 )
1214 1205
1215 1206
1216 1207 def _dorebase(ui, repo, action, opts, inmemory=False):
1217 1208 rbsrt = rebaseruntime(repo, ui, inmemory, opts=opts)
1218 1209 return _origrebase(ui, repo, action, opts, rbsrt)
1219 1210
1220 1211
1221 1212 def _origrebase(ui, repo, action, opts, rbsrt):
1222 1213 assert action != 'stop'
1223 1214 with repo.wlock(), repo.lock():
1224 1215 if opts.get('interactive'):
1225 1216 try:
1226 1217 if extensions.find(b'histedit'):
1227 1218 enablehistedit = b''
1228 1219 except KeyError:
1229 1220 enablehistedit = b" --config extensions.histedit="
1230 1221 help = b"hg%s help -e histedit" % enablehistedit
1231 1222 msg = (
1232 1223 _(
1233 1224 b"interactive history editing is supported by the "
1234 1225 b"'histedit' extension (see \"%s\")"
1235 1226 )
1236 1227 % help
1237 1228 )
1238 1229 raise error.InputError(msg)
1239 1230
1240 1231 if rbsrt.collapsemsg and not rbsrt.collapsef:
1241 1232 raise error.InputError(
1242 1233 _(b'message can only be specified with collapse')
1243 1234 )
1244 1235
1245 1236 if action:
1246 1237 if rbsrt.collapsef:
1247 1238 raise error.InputError(
1248 1239 _(b'cannot use collapse with continue or abort')
1249 1240 )
1250 1241 if action == 'abort' and opts.get('tool', False):
1251 1242 ui.warn(_(b'tool option will be ignored\n'))
1252 1243 if action == 'continue':
1253 1244 ms = mergestatemod.mergestate.read(repo)
1254 1245 mergeutil.checkunresolved(ms)
1255 1246
1256 1247 retcode = rbsrt._prepareabortorcontinue(isabort=(action == 'abort'))
1257 1248 if retcode is not None:
1258 1249 return retcode
1259 1250 else:
1260 1251 # search default destination in this space
1261 1252 # used in the 'hg pull --rebase' case, see issue 5214.
1262 1253 destspace = opts.get('_destspace')
1263 1254 destmap = _definedestmap(
1264 1255 ui,
1265 1256 repo,
1266 1257 rbsrt.inmemory,
1267 1258 opts.get('dest', None),
1268 1259 opts.get('source', []),
1269 1260 opts.get('base', []),
1270 1261 opts.get('rev', []),
1271 1262 destspace=destspace,
1272 1263 )
1273 1264 retcode = rbsrt._preparenewrebase(destmap)
1274 1265 if retcode is not None:
1275 1266 return retcode
1276 1267 storecollapsemsg(repo, rbsrt.collapsemsg)
1277 1268
1278 1269 tr = None
1279 1270
1280 1271 singletr = ui.configbool(b'rebase', b'singletransaction')
1281 1272 if singletr:
1282 1273 tr = repo.transaction(b'rebase')
1283 1274
1284 1275 # If `rebase.singletransaction` is enabled, wrap the entire operation in
1285 1276 # one transaction here. Otherwise, transactions are obtained when
1286 1277 # committing each node, which is slower but allows partial success.
1287 1278 with util.acceptintervention(tr):
1288 1279 rbsrt._performrebase(tr)
1289 1280 if not rbsrt.dryrun:
1290 1281 rbsrt._finishrebase()
1291 1282
1292 1283
1293 1284 def _definedestmap(ui, repo, inmemory, destf, srcf, basef, revf, destspace):
1294 1285 """use revisions argument to define destmap {srcrev: destrev}"""
1295 1286 if revf is None:
1296 1287 revf = []
1297 1288
1298 1289 # destspace is here to work around issues with `hg pull --rebase` see
1299 1290 # issue5214 for details
1300 1291
1301 1292 cmdutil.checkunfinished(repo)
1302 1293 if not inmemory:
1303 1294 cmdutil.bailifchanged(repo)
1304 1295
1305 1296 if ui.configbool(b'commands', b'rebase.requiredest') and not destf:
1306 1297 raise error.InputError(
1307 1298 _(b'you must specify a destination'),
1308 1299 hint=_(b'use: hg rebase -d REV'),
1309 1300 )
1310 1301
1311 1302 dest = None
1312 1303
1313 1304 if revf:
1314 1305 rebaseset = logcmdutil.revrange(repo, revf)
1315 1306 if not rebaseset:
1316 1307 ui.status(_(b'empty "rev" revision set - nothing to rebase\n'))
1317 1308 return None
1318 1309 elif srcf:
1319 1310 src = logcmdutil.revrange(repo, srcf)
1320 1311 if not src:
1321 1312 ui.status(_(b'empty "source" revision set - nothing to rebase\n'))
1322 1313 return None
1323 1314 # `+ (%ld)` to work around `wdir()::` being empty
1324 1315 rebaseset = repo.revs(b'(%ld):: + (%ld)', src, src)
1325 1316 else:
1326 1317 base = logcmdutil.revrange(repo, basef or [b'.'])
1327 1318 if not base:
1328 1319 ui.status(
1329 1320 _(b'empty "base" revision set - ' b"can't compute rebase set\n")
1330 1321 )
1331 1322 return None
1332 1323 if destf:
1333 1324 # --base does not support multiple destinations
1334 1325 dest = logcmdutil.revsingle(repo, destf)
1335 1326 else:
1336 1327 dest = repo[_destrebase(repo, base, destspace=destspace)]
1337 1328 destf = bytes(dest)
1338 1329
1339 1330 roots = [] # selected children of branching points
1340 1331 bpbase = {} # {branchingpoint: [origbase]}
1341 1332 for b in base: # group bases by branching points
1342 1333 bp = repo.revs(b'ancestor(%d, %d)', b, dest.rev()).first()
1343 1334 bpbase[bp] = bpbase.get(bp, []) + [b]
1344 1335 if None in bpbase:
1345 1336 # emulate the old behavior, showing "nothing to rebase" (a better
1346 1337 # behavior may be abort with "cannot find branching point" error)
1347 1338 bpbase.clear()
1348 1339 for bp, bs in bpbase.items(): # calculate roots
1349 1340 roots += list(repo.revs(b'children(%d) & ancestors(%ld)', bp, bs))
1350 1341
1351 1342 rebaseset = repo.revs(b'%ld::', roots)
1352 1343
1353 1344 if not rebaseset:
1354 1345 # transform to list because smartsets are not comparable to
1355 1346 # lists. This should be improved to honor laziness of
1356 1347 # smartset.
1357 1348 if list(base) == [dest.rev()]:
1358 1349 if basef:
1359 1350 ui.status(
1360 1351 _(
1361 1352 b'nothing to rebase - %s is both "base"'
1362 1353 b' and destination\n'
1363 1354 )
1364 1355 % dest
1365 1356 )
1366 1357 else:
1367 1358 ui.status(
1368 1359 _(
1369 1360 b'nothing to rebase - working directory '
1370 1361 b'parent is also destination\n'
1371 1362 )
1372 1363 )
1373 1364 elif not repo.revs(b'%ld - ::%d', base, dest.rev()):
1374 1365 if basef:
1375 1366 ui.status(
1376 1367 _(
1377 1368 b'nothing to rebase - "base" %s is '
1378 1369 b'already an ancestor of destination '
1379 1370 b'%s\n'
1380 1371 )
1381 1372 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1382 1373 )
1383 1374 else:
1384 1375 ui.status(
1385 1376 _(
1386 1377 b'nothing to rebase - working '
1387 1378 b'directory parent is already an '
1388 1379 b'ancestor of destination %s\n'
1389 1380 )
1390 1381 % dest
1391 1382 )
1392 1383 else: # can it happen?
1393 1384 ui.status(
1394 1385 _(b'nothing to rebase from %s to %s\n')
1395 1386 % (b'+'.join(bytes(repo[r]) for r in base), dest)
1396 1387 )
1397 1388 return None
1398 1389
1399 1390 if wdirrev in rebaseset:
1400 1391 raise error.InputError(_(b'cannot rebase the working copy'))
1401 1392 rebasingwcp = repo[b'.'].rev() in rebaseset
1402 1393 ui.log(
1403 1394 b"rebase",
1404 1395 b"rebasing working copy parent: %r\n",
1405 1396 rebasingwcp,
1406 1397 rebase_rebasing_wcp=rebasingwcp,
1407 1398 )
1408 1399 if inmemory and rebasingwcp:
1409 1400 # Check these since we did not before.
1410 1401 cmdutil.checkunfinished(repo)
1411 1402 cmdutil.bailifchanged(repo)
1412 1403
1413 1404 if not destf:
1414 1405 dest = repo[_destrebase(repo, rebaseset, destspace=destspace)]
1415 1406 destf = bytes(dest)
1416 1407
1417 1408 allsrc = revsetlang.formatspec(b'%ld', rebaseset)
1418 1409 alias = {b'ALLSRC': allsrc}
1419 1410
1420 1411 if dest is None:
1421 1412 try:
1422 1413 # fast path: try to resolve dest without SRC alias
1423 1414 dest = scmutil.revsingle(repo, destf, localalias=alias)
1424 1415 except error.RepoLookupError:
1425 1416 # multi-dest path: resolve dest for each SRC separately
1426 1417 destmap = {}
1427 1418 for r in rebaseset:
1428 1419 alias[b'SRC'] = revsetlang.formatspec(b'%d', r)
1429 1420 # use repo.anyrevs instead of scmutil.revsingle because we
1430 1421 # don't want to abort if destset is empty.
1431 1422 destset = repo.anyrevs([destf], user=True, localalias=alias)
1432 1423 size = len(destset)
1433 1424 if size == 1:
1434 1425 destmap[r] = destset.first()
1435 1426 elif size == 0:
1436 1427 ui.note(_(b'skipping %s - empty destination\n') % repo[r])
1437 1428 else:
1438 1429 raise error.InputError(
1439 1430 _(b'rebase destination for %s is not unique') % repo[r]
1440 1431 )
1441 1432
1442 1433 if dest is not None:
1443 1434 # single-dest case: assign dest to each rev in rebaseset
1444 1435 destrev = dest.rev()
1445 1436 destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
1446 1437
1447 1438 if not destmap:
1448 1439 ui.status(_(b'nothing to rebase - empty destination\n'))
1449 1440 return None
1450 1441
1451 1442 return destmap
1452 1443
1453 1444
1454 1445 def externalparent(repo, state, destancestors):
1455 1446 """Return the revision that should be used as the second parent
1456 1447 when the revisions in state is collapsed on top of destancestors.
1457 1448 Abort if there is more than one parent.
1458 1449 """
1459 1450 parents = set()
1460 1451 source = min(state)
1461 1452 for rev in state:
1462 1453 if rev == source:
1463 1454 continue
1464 1455 for p in repo[rev].parents():
1465 1456 if p.rev() not in state and p.rev() not in destancestors:
1466 1457 parents.add(p.rev())
1467 1458 if not parents:
1468 1459 return nullrev
1469 1460 if len(parents) == 1:
1470 1461 return parents.pop()
1471 1462 raise error.StateError(
1472 1463 _(
1473 1464 b'unable to collapse on top of %d, there is more '
1474 1465 b'than one external parent: %s'
1475 1466 )
1476 1467 % (max(destancestors), b', '.join(b"%d" % p for p in sorted(parents)))
1477 1468 )
1478 1469
1479 1470
1480 1471 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1481 1472 """Commit the memory changes with parents p1 and p2.
1482 1473 Return node of committed revision."""
1483 1474 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1484 1475 # ``branch`` (used when passing ``--keepbranches``).
1485 1476 branch = None
1486 1477 if b'branch' in extra:
1487 1478 branch = extra[b'branch']
1488 1479
1489 1480 # FIXME: We call _compact() because it's required to correctly detect
1490 1481 # changed files. This was added to fix a regression shortly before the 5.5
1491 1482 # release. A proper fix will be done in the default branch.
1492 1483 wctx._compact()
1493 1484 memctx = wctx.tomemctx(
1494 1485 commitmsg,
1495 1486 date=date,
1496 1487 extra=extra,
1497 1488 user=user,
1498 1489 branch=branch,
1499 1490 editor=editor,
1500 1491 )
1501 1492 if memctx.isempty() and not repo.ui.configbool(b'ui', b'allowemptycommit'):
1502 1493 return None
1503 1494 commitres = repo.commitctx(memctx)
1504 1495 wctx.clean() # Might be reused
1505 1496 return commitres
1506 1497
1507 1498
1508 1499 def commitnode(repo, editor, extra, user, date, commitmsg):
1509 1500 """Commit the wd changes with parents p1 and p2.
1510 1501 Return node of committed revision."""
1511 1502 tr = util.nullcontextmanager
1512 1503 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1513 1504 tr = lambda: repo.transaction(b'rebase')
1514 1505 with tr():
1515 1506 # Commit might fail if unresolved files exist
1516 1507 newnode = repo.commit(
1517 1508 text=commitmsg, user=user, date=date, extra=extra, editor=editor
1518 1509 )
1519 1510
1520 1511 repo.dirstate.setbranch(
1521 1512 repo[newnode].branch(), repo.currenttransaction()
1522 1513 )
1523 1514 return newnode
1524 1515
1525 1516
1526 1517 def rebasenode(repo, rev, p1, p2, base, collapse, wctx):
1527 1518 """Rebase a single revision rev on top of p1 using base as merge ancestor"""
1528 1519 # Merge phase
1529 1520 # Update to destination and merge it with local
1530 1521 p1ctx = repo[p1]
1531 1522 if wctx.isinmemory():
1532 1523 wctx.setbase(p1ctx)
1533 1524 scope = util.nullcontextmanager
1534 1525 else:
1535 1526 if repo[b'.'].rev() != p1:
1536 1527 repo.ui.debug(b" update to %d:%s\n" % (p1, p1ctx))
1537 1528 mergemod.clean_update(p1ctx)
1538 1529 else:
1539 1530 repo.ui.debug(b" already in destination\n")
1540 1531 scope = lambda: repo.dirstate.changing_parents(repo)
1541 1532 # This is, alas, necessary to invalidate workingctx's manifest cache,
1542 1533 # as well as other data we litter on it in other places.
1543 1534 wctx = repo[None]
1544 1535 repo.dirstate.write(repo.currenttransaction())
1545 1536 ctx = repo[rev]
1546 1537 repo.ui.debug(b" merge against %d:%s\n" % (rev, ctx))
1547 1538 if base is not None:
1548 1539 repo.ui.debug(b" detach base %d:%s\n" % (base, repo[base]))
1549 1540
1550 1541 with scope():
1551 1542 # See explanation in merge.graft()
1552 1543 mergeancestor = repo.changelog.isancestor(p1ctx.node(), ctx.node())
1553 1544 stats = mergemod._update(
1554 1545 repo,
1555 1546 rev,
1556 1547 branchmerge=True,
1557 1548 force=True,
1558 1549 ancestor=base,
1559 1550 mergeancestor=mergeancestor,
1560 1551 labels=[b'dest', b'source', b'parent of source'],
1561 1552 wc=wctx,
1562 1553 )
1563 1554 wctx.setparents(p1ctx.node(), repo[p2].node())
1564 1555 if collapse:
1565 1556 copies.graftcopies(wctx, ctx, p1ctx)
1566 1557 else:
1567 1558 # If we're not using --collapse, we need to
1568 1559 # duplicate copies between the revision we're
1569 1560 # rebasing and its first parent.
1570 1561 copies.graftcopies(wctx, ctx, ctx.p1())
1571 1562
1572 1563 if stats.unresolvedcount > 0:
1573 1564 if wctx.isinmemory():
1574 1565 raise error.InMemoryMergeConflictsError()
1575 1566 else:
1576 1567 raise error.ConflictResolutionRequired(b'rebase')
1577 1568
1578 1569
1579 1570 def adjustdest(repo, rev, destmap, state, skipped):
1580 1571 r"""adjust rebase destination given the current rebase state
1581 1572
1582 1573 rev is what is being rebased. Return a list of two revs, which are the
1583 1574 adjusted destinations for rev's p1 and p2, respectively. If a parent is
1584 1575 nullrev, return dest without adjustment for it.
1585 1576
1586 1577 For example, when doing rebasing B+E to F, C to G, rebase will first move B
1587 1578 to B1, and E's destination will be adjusted from F to B1.
1588 1579
1589 1580 B1 <- written during rebasing B
1590 1581 |
1591 1582 F <- original destination of B, E
1592 1583 |
1593 1584 | E <- rev, which is being rebased
1594 1585 | |
1595 1586 | D <- prev, one parent of rev being checked
1596 1587 | |
1597 1588 | x <- skipped, ex. no successor or successor in (::dest)
1598 1589 | |
1599 1590 | C <- rebased as C', different destination
1600 1591 | |
1601 1592 | B <- rebased as B1 C'
1602 1593 |/ |
1603 1594 A G <- destination of C, different
1604 1595
1605 1596 Another example about merge changeset, rebase -r C+G+H -d K, rebase will
1606 1597 first move C to C1, G to G1, and when it's checking H, the adjusted
1607 1598 destinations will be [C1, G1].
1608 1599
1609 1600 H C1 G1
1610 1601 /| | /
1611 1602 F G |/
1612 1603 K | | -> K
1613 1604 | C D |
1614 1605 | |/ |
1615 1606 | B | ...
1616 1607 |/ |/
1617 1608 A A
1618 1609
1619 1610 Besides, adjust dest according to existing rebase information. For example,
1620 1611
1621 1612 B C D B needs to be rebased on top of C, C needs to be rebased on top
1622 1613 \|/ of D. We will rebase C first.
1623 1614 A
1624 1615
1625 1616 C' After rebasing C, when considering B's destination, use C'
1626 1617 | instead of the original C.
1627 1618 B D
1628 1619 \ /
1629 1620 A
1630 1621 """
1631 1622 # pick already rebased revs with same dest from state as interesting source
1632 1623 dest = destmap[rev]
1633 1624 source = [
1634 1625 s
1635 1626 for s, d in state.items()
1636 1627 if d > 0 and destmap[s] == dest and s not in skipped
1637 1628 ]
1638 1629
1639 1630 result = []
1640 1631 for prev in repo.changelog.parentrevs(rev):
1641 1632 adjusted = dest
1642 1633 if prev != nullrev:
1643 1634 candidate = repo.revs(b'max(%ld and (::%d))', source, prev).first()
1644 1635 if candidate is not None:
1645 1636 adjusted = state[candidate]
1646 1637 if adjusted == dest and dest in state:
1647 1638 adjusted = state[dest]
1648 1639 if adjusted == revtodo:
1649 1640 # sortsource should produce an order that makes this impossible
1650 1641 raise error.ProgrammingError(
1651 1642 b'rev %d should be rebased already at this time' % dest
1652 1643 )
1653 1644 result.append(adjusted)
1654 1645 return result
1655 1646
1656 1647
1657 1648 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
1658 1649 """
1659 1650 Abort if rebase will create divergence or rebase is noop because of markers
1660 1651
1661 1652 `rebaseobsrevs`: set of obsolete revision in source
1662 1653 `rebaseobsskipped`: set of revisions from source skipped because they have
1663 1654 successors in destination or no non-obsolete successor.
1664 1655 """
1665 1656 # Obsolete node with successors not in dest leads to divergence
1666 1657 divergenceok = obsolete.isenabled(repo, obsolete.allowdivergenceopt)
1667 1658 divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
1668 1659
1669 1660 if divergencebasecandidates and not divergenceok:
1670 1661 divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
1671 1662 msg = _(b"this rebase will cause divergences from: %s")
1672 1663 h = _(
1673 1664 b"to force the rebase please set "
1674 1665 b"experimental.evolution.allowdivergence=True"
1675 1666 )
1676 1667 raise error.StateError(msg % (b",".join(divhashes),), hint=h)
1677 1668
1678 1669
1679 1670 def successorrevs(unfi, rev):
1680 1671 """yield revision numbers for successors of rev"""
1681 1672 assert unfi.filtername is None
1682 1673 get_rev = unfi.changelog.index.get_rev
1683 1674 for s in obsutil.allsuccessors(unfi.obsstore, [unfi[rev].node()]):
1684 1675 r = get_rev(s)
1685 1676 if r is not None:
1686 1677 yield r
1687 1678
1688 1679
1689 1680 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
1690 1681 """Return new parents and optionally a merge base for rev being rebased
1691 1682
1692 1683 The destination specified by "dest" cannot always be used directly because
1693 1684 previously rebase result could affect destination. For example,
1694 1685
1695 1686 D E rebase -r C+D+E -d B
1696 1687 |/ C will be rebased to C'
1697 1688 B C D's new destination will be C' instead of B
1698 1689 |/ E's new destination will be C' instead of B
1699 1690 A
1700 1691
1701 1692 The new parents of a merge is slightly more complicated. See the comment
1702 1693 block below.
1703 1694 """
1704 1695 # use unfiltered changelog since successorrevs may return filtered nodes
1705 1696 assert repo.filtername is None
1706 1697 cl = repo.changelog
1707 1698 isancestor = cl.isancestorrev
1708 1699
1709 1700 dest = destmap[rev]
1710 1701 oldps = repo.changelog.parentrevs(rev) # old parents
1711 1702 newps = [nullrev, nullrev] # new parents
1712 1703 dests = adjustdest(repo, rev, destmap, state, skipped)
1713 1704 bases = list(oldps) # merge base candidates, initially just old parents
1714 1705
1715 1706 if all(r == nullrev for r in oldps[1:]):
1716 1707 # For non-merge changeset, just move p to adjusted dest as requested.
1717 1708 newps[0] = dests[0]
1718 1709 else:
1719 1710 # For merge changeset, if we move p to dests[i] unconditionally, both
1720 1711 # parents may change and the end result looks like "the merge loses a
1721 1712 # parent", which is a surprise. This is a limit because "--dest" only
1722 1713 # accepts one dest per src.
1723 1714 #
1724 1715 # Therefore, only move p with reasonable conditions (in this order):
1725 1716 # 1. use dest, if dest is a descendent of (p or one of p's successors)
1726 1717 # 2. use p's rebased result, if p is rebased (state[p] > 0)
1727 1718 #
1728 1719 # Comparing with adjustdest, the logic here does some additional work:
1729 1720 # 1. decide which parents will not be moved towards dest
1730 1721 # 2. if the above decision is "no", should a parent still be moved
1731 1722 # because it was rebased?
1732 1723 #
1733 1724 # For example:
1734 1725 #
1735 1726 # C # "rebase -r C -d D" is an error since none of the parents
1736 1727 # /| # can be moved. "rebase -r B+C -d D" will move C's parent
1737 1728 # A B D # B (using rule "2."), since B will be rebased.
1738 1729 #
1739 1730 # The loop tries to be not rely on the fact that a Mercurial node has
1740 1731 # at most 2 parents.
1741 1732 for i, p in enumerate(oldps):
1742 1733 np = p # new parent
1743 1734 if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
1744 1735 np = dests[i]
1745 1736 elif p in state and state[p] > 0:
1746 1737 np = state[p]
1747 1738
1748 1739 # If one parent becomes an ancestor of the other, drop the ancestor
1749 1740 for j, x in enumerate(newps[:i]):
1750 1741 if x == nullrev:
1751 1742 continue
1752 1743 if isancestor(np, x): # CASE-1
1753 1744 np = nullrev
1754 1745 elif isancestor(x, np): # CASE-2
1755 1746 newps[j] = np
1756 1747 np = nullrev
1757 1748 # New parents forming an ancestor relationship does not
1758 1749 # mean the old parents have a similar relationship. Do not
1759 1750 # set bases[x] to nullrev.
1760 1751 bases[j], bases[i] = bases[i], bases[j]
1761 1752
1762 1753 newps[i] = np
1763 1754
1764 1755 # "rebasenode" updates to new p1, and the old p1 will be used as merge
1765 1756 # base. If only p2 changes, merging using unchanged p1 as merge base is
1766 1757 # suboptimal. Therefore swap parents to make the merge sane.
1767 1758 if newps[1] != nullrev and oldps[0] == newps[0]:
1768 1759 assert len(newps) == 2 and len(oldps) == 2
1769 1760 newps.reverse()
1770 1761 bases.reverse()
1771 1762
1772 1763 # No parent change might be an error because we fail to make rev a
1773 1764 # descendent of requested dest. This can happen, for example:
1774 1765 #
1775 1766 # C # rebase -r C -d D
1776 1767 # /| # None of A and B will be changed to D and rebase fails.
1777 1768 # A B D
1778 1769 if set(newps) == set(oldps) and dest not in newps:
1779 1770 raise error.InputError(
1780 1771 _(
1781 1772 b'cannot rebase %d:%s without '
1782 1773 b'moving at least one of its parents'
1783 1774 )
1784 1775 % (rev, repo[rev])
1785 1776 )
1786 1777
1787 1778 # Source should not be ancestor of dest. The check here guarantees it's
1788 1779 # impossible. With multi-dest, the initial check does not cover complex
1789 1780 # cases since we don't have abstractions to dry-run rebase cheaply.
1790 1781 if any(p != nullrev and isancestor(rev, p) for p in newps):
1791 1782 raise error.InputError(_(b'source is ancestor of destination'))
1792 1783
1793 1784 # Check if the merge will contain unwanted changes. That may happen if
1794 1785 # there are multiple special (non-changelog ancestor) merge bases, which
1795 1786 # cannot be handled well by the 3-way merge algorithm. For example:
1796 1787 #
1797 1788 # F
1798 1789 # /|
1799 1790 # D E # "rebase -r D+E+F -d Z", when rebasing F, if "D" was chosen
1800 1791 # | | # as merge base, the difference between D and F will include
1801 1792 # B C # C, so the rebased F will contain C surprisingly. If "E" was
1802 1793 # |/ # chosen, the rebased F will contain B.
1803 1794 # A Z
1804 1795 #
1805 1796 # But our merge base candidates (D and E in above case) could still be
1806 1797 # better than the default (ancestor(F, Z) == null). Therefore still
1807 1798 # pick one (so choose p1 above).
1808 1799 if sum(1 for b in set(bases) if b != nullrev and b not in newps) > 1:
1809 1800 unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
1810 1801 for i, base in enumerate(bases):
1811 1802 if base == nullrev or base in newps:
1812 1803 continue
1813 1804 # Revisions in the side (not chosen as merge base) branch that
1814 1805 # might contain "surprising" contents
1815 1806 other_bases = set(bases) - {base}
1816 1807 siderevs = list(
1817 1808 repo.revs(b'(%ld %% (%d+%d))', other_bases, base, dest)
1818 1809 )
1819 1810
1820 1811 # If those revisions are covered by rebaseset, the result is good.
1821 1812 # A merge in rebaseset would be considered to cover its ancestors.
1822 1813 if siderevs:
1823 1814 rebaseset = [
1824 1815 r for r, d in state.items() if d > 0 and r not in obsskipped
1825 1816 ]
1826 1817 merges = [
1827 1818 r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
1828 1819 ]
1829 1820 unwanted[i] = list(
1830 1821 repo.revs(
1831 1822 b'%ld - (::%ld) - %ld', siderevs, merges, rebaseset
1832 1823 )
1833 1824 )
1834 1825
1835 1826 if any(revs is not None for revs in unwanted):
1836 1827 # Choose a merge base that has a minimal number of unwanted revs.
1837 1828 l, i = min(
1838 1829 (len(revs), i)
1839 1830 for i, revs in enumerate(unwanted)
1840 1831 if revs is not None
1841 1832 )
1842 1833
1843 1834 # The merge will include unwanted revisions. Abort now. Revisit this if
1844 1835 # we have a more advanced merge algorithm that handles multiple bases.
1845 1836 if l > 0:
1846 1837 unwanteddesc = _(b' or ').join(
1847 1838 (
1848 1839 b', '.join(b'%d:%s' % (r, repo[r]) for r in revs)
1849 1840 for revs in unwanted
1850 1841 if revs is not None
1851 1842 )
1852 1843 )
1853 1844 raise error.InputError(
1854 1845 _(b'rebasing %d:%s will include unwanted changes from %s')
1855 1846 % (rev, repo[rev], unwanteddesc)
1856 1847 )
1857 1848
1858 1849 # newps[0] should match merge base if possible. Currently, if newps[i]
1859 1850 # is nullrev, the only case is newps[i] and newps[j] (j < i), one is
1860 1851 # the other's ancestor. In that case, it's fine to not swap newps here.
1861 1852 # (see CASE-1 and CASE-2 above)
1862 1853 if i != 0:
1863 1854 if newps[i] != nullrev:
1864 1855 newps[0], newps[i] = newps[i], newps[0]
1865 1856 bases[0], bases[i] = bases[i], bases[0]
1866 1857
1867 1858 # "rebasenode" updates to new p1, use the corresponding merge base.
1868 1859 base = bases[0]
1869 1860
1870 1861 repo.ui.debug(b" future parents are %d and %d\n" % tuple(newps))
1871 1862
1872 1863 return newps[0], newps[1], base
1873 1864
1874 1865
1875 1866 def isagitpatch(repo, patchname):
1876 1867 """Return true if the given patch is in git format"""
1877 1868 mqpatch = os.path.join(repo.mq.path, patchname)
1878 1869 for line in patch.linereader(open(mqpatch, b'rb')):
1879 1870 if line.startswith(b'diff --git'):
1880 1871 return True
1881 1872 return False
1882 1873
1883 1874
1884 1875 def updatemq(repo, state, skipped, **opts):
1885 1876 """Update rebased mq patches - finalize and then import them"""
1886 1877 mqrebase = {}
1887 1878 mq = repo.mq
1888 1879 original_series = mq.fullseries[:]
1889 1880 skippedpatches = set()
1890 1881
1891 1882 for p in mq.applied:
1892 1883 rev = repo[p.node].rev()
1893 1884 if rev in state:
1894 1885 repo.ui.debug(
1895 1886 b'revision %d is an mq patch (%s), finalize it.\n'
1896 1887 % (rev, p.name)
1897 1888 )
1898 1889 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
1899 1890 else:
1900 1891 # Applied but not rebased, not sure this should happen
1901 1892 skippedpatches.add(p.name)
1902 1893
1903 1894 if mqrebase:
1904 1895 mq.finish(repo, mqrebase.keys())
1905 1896
1906 1897 # We must start import from the newest revision
1907 1898 for rev in sorted(mqrebase, reverse=True):
1908 1899 if rev not in skipped:
1909 1900 name, isgit = mqrebase[rev]
1910 1901 repo.ui.note(
1911 1902 _(b'updating mq patch %s to %d:%s\n')
1912 1903 % (name, state[rev], repo[state[rev]])
1913 1904 )
1914 1905 mq.qimport(
1915 1906 repo,
1916 1907 (),
1917 1908 patchname=name,
1918 1909 git=isgit,
1919 1910 rev=[b"%d" % state[rev]],
1920 1911 )
1921 1912 else:
1922 1913 # Rebased and skipped
1923 1914 skippedpatches.add(mqrebase[rev][0])
1924 1915
1925 1916 # Patches were either applied and rebased and imported in
1926 1917 # order, applied and removed or unapplied. Discard the removed
1927 1918 # ones while preserving the original series order and guards.
1928 1919 newseries = [
1929 1920 s
1930 1921 for s in original_series
1931 1922 if mq.guard_re.split(s, 1)[0] not in skippedpatches
1932 1923 ]
1933 1924 mq.fullseries[:] = newseries
1934 1925 mq.seriesdirty = True
1935 1926 mq.savedirty()
1936 1927
1937 1928
1938 1929 def storecollapsemsg(repo, collapsemsg):
1939 1930 """Store the collapse message to allow recovery"""
1940 1931 collapsemsg = collapsemsg or b''
1941 1932 f = repo.vfs(b"last-message.txt", b"w")
1942 1933 f.write(b"%s\n" % collapsemsg)
1943 1934 f.close()
1944 1935
1945 1936
1946 1937 def clearcollapsemsg(repo):
1947 1938 """Remove collapse message file"""
1948 1939 repo.vfs.unlinkpath(b"last-message.txt", ignoremissing=True)
1949 1940
1950 1941
1951 1942 def restorecollapsemsg(repo, isabort):
1952 1943 """Restore previously stored collapse message"""
1953 1944 try:
1954 1945 f = repo.vfs(b"last-message.txt")
1955 1946 collapsemsg = f.readline().strip()
1956 1947 f.close()
1957 1948 except FileNotFoundError:
1958 1949 if isabort:
1959 1950 # Oh well, just abort like normal
1960 1951 collapsemsg = b''
1961 1952 else:
1962 1953 raise error.Abort(_(b'missing .hg/last-message.txt for rebase'))
1963 1954 return collapsemsg
1964 1955
1965 1956
1966 1957 def clearstatus(repo):
1967 1958 """Remove the status files"""
1968 1959 # Make sure the active transaction won't write the state file
1969 1960 tr = repo.currenttransaction()
1970 1961 if tr:
1971 1962 tr.removefilegenerator(b'rebasestate')
1972 1963 repo.vfs.unlinkpath(b"rebasestate", ignoremissing=True)
1973 1964
1974 1965
1975 1966 def sortsource(destmap):
1976 1967 """yield source revisions in an order that we only rebase things once
1977 1968
1978 1969 If source and destination overlaps, we should filter out revisions
1979 1970 depending on other revisions which hasn't been rebased yet.
1980 1971
1981 1972 Yield a sorted list of revisions each time.
1982 1973
1983 1974 For example, when rebasing A to B, B to C. This function yields [B], then
1984 1975 [A], indicating B needs to be rebased first.
1985 1976
1986 1977 Raise if there is a cycle so the rebase is impossible.
1987 1978 """
1988 1979 srcset = set(destmap)
1989 1980 while srcset:
1990 1981 srclist = sorted(srcset)
1991 1982 result = []
1992 1983 for r in srclist:
1993 1984 if destmap[r] not in srcset:
1994 1985 result.append(r)
1995 1986 if not result:
1996 1987 raise error.InputError(_(b'source and destination form a cycle'))
1997 1988 srcset -= set(result)
1998 1989 yield result
1999 1990
2000 1991
2001 1992 def buildstate(repo, destmap, collapse):
2002 1993 """Define which revisions are going to be rebased and where
2003 1994
2004 1995 repo: repo
2005 1996 destmap: {srcrev: destrev}
2006 1997 """
2007 1998 rebaseset = destmap.keys()
2008 1999 originalwd = repo[b'.'].rev()
2009 2000
2010 2001 # This check isn't strictly necessary, since mq detects commits over an
2011 2002 # applied patch. But it prevents messing up the working directory when
2012 2003 # a partially completed rebase is blocked by mq.
2013 2004 if b'qtip' in repo.tags():
2014 2005 mqapplied = {repo[s.node].rev() for s in repo.mq.applied}
2015 2006 if set(destmap.values()) & mqapplied:
2016 2007 raise error.StateError(_(b'cannot rebase onto an applied mq patch'))
2017 2008
2018 2009 # Get "cycle" error early by exhausting the generator.
2019 2010 sortedsrc = list(sortsource(destmap)) # a list of sorted revs
2020 2011 if not sortedsrc:
2021 2012 raise error.InputError(_(b'no matching revisions'))
2022 2013
2023 2014 # Only check the first batch of revisions to rebase not depending on other
2024 2015 # rebaseset. This means "source is ancestor of destination" for the second
2025 2016 # (and following) batches of revisions are not checked here. We rely on
2026 2017 # "defineparents" to do that check.
2027 2018 roots = list(repo.set(b'roots(%ld)', sortedsrc[0]))
2028 2019 if not roots:
2029 2020 raise error.InputError(_(b'no matching revisions'))
2030 2021
2031 2022 def revof(r):
2032 2023 return r.rev()
2033 2024
2034 2025 roots = sorted(roots, key=revof)
2035 2026 state = dict.fromkeys(rebaseset, revtodo)
2036 2027 emptyrebase = len(sortedsrc) == 1
2037 2028 for root in roots:
2038 2029 dest = repo[destmap[root.rev()]]
2039 2030 commonbase = root.ancestor(dest)
2040 2031 if commonbase == root:
2041 2032 raise error.InputError(_(b'source is ancestor of destination'))
2042 2033 if commonbase == dest:
2043 2034 wctx = repo[None]
2044 2035 if dest == wctx.p1():
2045 2036 # when rebasing to '.', it will use the current wd branch name
2046 2037 samebranch = root.branch() == wctx.branch()
2047 2038 else:
2048 2039 samebranch = root.branch() == dest.branch()
2049 2040 if not collapse and samebranch and dest in root.parents():
2050 2041 # mark the revision as done by setting its new revision
2051 2042 # equal to its old (current) revisions
2052 2043 state[root.rev()] = root.rev()
2053 2044 repo.ui.debug(b'source is a child of destination\n')
2054 2045 continue
2055 2046
2056 2047 emptyrebase = False
2057 2048 repo.ui.debug(b'rebase onto %s starting from %s\n' % (dest, root))
2058 2049 if emptyrebase:
2059 2050 return None
2060 2051 for rev in sorted(state):
2061 2052 parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev]
2062 2053 # if all parents of this revision are done, then so is this revision
2063 2054 if parents and all((state.get(p) == p for p in parents)):
2064 2055 state[rev] = rev
2065 2056 return originalwd, destmap, state
2066 2057
2067 2058
2068 2059 def clearrebased(
2069 2060 ui,
2070 2061 repo,
2071 2062 destmap,
2072 2063 state,
2073 2064 skipped,
2074 2065 collapsedas=None,
2075 2066 keepf=False,
2076 2067 fm=None,
2077 2068 backup=True,
2078 2069 ):
2079 2070 """dispose of rebased revision at the end of the rebase
2080 2071
2081 2072 If `collapsedas` is not None, the rebase was a collapse whose result if the
2082 2073 `collapsedas` node.
2083 2074
2084 2075 If `keepf` is not True, the rebase has --keep set and no nodes should be
2085 2076 removed (but bookmarks still need to be moved).
2086 2077
2087 2078 If `backup` is False, no backup will be stored when stripping rebased
2088 2079 revisions.
2089 2080 """
2090 2081 tonode = repo.changelog.node
2091 2082 replacements = {}
2092 2083 moves = {}
2093 2084 stripcleanup = not obsolete.isenabled(repo, obsolete.createmarkersopt)
2094 2085
2095 2086 collapsednodes = []
2096 2087 for rev, newrev in sorted(state.items()):
2097 2088 if newrev >= 0 and newrev != rev:
2098 2089 oldnode = tonode(rev)
2099 2090 newnode = collapsedas or tonode(newrev)
2100 2091 moves[oldnode] = newnode
2101 2092 succs = None
2102 2093 if rev in skipped:
2103 2094 if stripcleanup or not repo[rev].obsolete():
2104 2095 succs = ()
2105 2096 elif collapsedas:
2106 2097 collapsednodes.append(oldnode)
2107 2098 else:
2108 2099 succs = (newnode,)
2109 2100 if succs is not None:
2110 2101 replacements[(oldnode,)] = succs
2111 2102 if collapsednodes:
2112 2103 replacements[tuple(collapsednodes)] = (collapsedas,)
2113 2104 if fm:
2114 2105 hf = fm.hexfunc
2115 2106 fl = fm.formatlist
2116 2107 fd = fm.formatdict
2117 2108 changes = {}
2118 2109 for oldns, newn in replacements.items():
2119 2110 for oldn in oldns:
2120 2111 changes[hf(oldn)] = fl([hf(n) for n in newn], name=b'node')
2121 2112 nodechanges = fd(changes, key=b"oldnode", value=b"newnodes")
2122 2113 fm.data(nodechanges=nodechanges)
2123 2114 if keepf:
2124 2115 replacements = {}
2125 2116 scmutil.cleanupnodes(repo, replacements, b'rebase', moves, backup=backup)
2126 2117
2127 2118
2128 2119 def pullrebase(orig, ui, repo, *args, **opts):
2129 2120 """Call rebase after pull if the latter has been invoked with --rebase"""
2130 2121 if opts.get('rebase'):
2131 2122 if ui.configbool(b'commands', b'rebase.requiredest'):
2132 2123 msg = _(b'rebase destination required by configuration')
2133 2124 hint = _(b'use hg pull followed by hg rebase -d DEST')
2134 2125 raise error.InputError(msg, hint=hint)
2135 2126
2136 2127 with repo.wlock(), repo.lock():
2137 2128 if opts.get('update'):
2138 2129 del opts['update']
2139 2130 ui.debug(
2140 2131 b'--update and --rebase are not compatible, ignoring '
2141 2132 b'the update flag\n'
2142 2133 )
2143 2134
2144 2135 cmdutil.checkunfinished(repo, skipmerge=True)
2145 2136 cmdutil.bailifchanged(
2146 2137 repo,
2147 2138 hint=_(
2148 2139 b'cannot pull with rebase: '
2149 2140 b'please commit or shelve your changes first'
2150 2141 ),
2151 2142 )
2152 2143
2153 2144 revsprepull = len(repo)
2154 2145 origpostincoming = commands.postincoming
2155 2146
2156 2147 def _dummy(*args, **kwargs):
2157 2148 pass
2158 2149
2159 2150 commands.postincoming = _dummy
2160 2151 try:
2161 2152 ret = orig(ui, repo, *args, **opts)
2162 2153 finally:
2163 2154 commands.postincoming = origpostincoming
2164 2155 revspostpull = len(repo)
2165 2156 if revspostpull > revsprepull:
2166 2157 # --rev option from pull conflict with rebase own --rev
2167 2158 # dropping it
2168 2159 if 'rev' in opts:
2169 2160 del opts['rev']
2170 2161 # positional argument from pull conflicts with rebase's own
2171 2162 # --source.
2172 2163 if 'source' in opts:
2173 2164 del opts['source']
2174 2165 # revsprepull is the len of the repo, not revnum of tip.
2175 2166 destspace = list(repo.changelog.revs(start=revsprepull))
2176 2167 opts['_destspace'] = destspace
2177 2168 try:
2178 2169 rebase(ui, repo, **opts)
2179 2170 except error.NoMergeDestAbort:
2180 2171 # we can maybe update instead
2181 2172 rev, _a, _b = destutil.destupdate(repo)
2182 2173 if rev == repo[b'.'].rev():
2183 2174 ui.status(_(b'nothing to rebase\n'))
2184 2175 else:
2185 2176 ui.status(_(b'nothing to rebase - updating instead\n'))
2186 2177 # not passing argument to get the bare update behavior
2187 2178 # with warning and trumpets
2188 2179 commands.update(ui, repo)
2189 2180 else:
2190 2181 if opts.get('tool'):
2191 2182 raise error.InputError(_(b'--tool can only be used with --rebase'))
2192 2183 ret = orig(ui, repo, *args, **opts)
2193 2184
2194 2185 return ret
2195 2186
2196 2187
2197 2188 def _compute_obsolete_sets(repo, rebaseobsrevs, destmap):
2198 2189 """Figure out what to do about about obsolete revisions
2199 2190
2200 2191 `obsolete_with_successor_in_destination` is a mapping mapping obsolete => successor for all
2201 2192 obsolete nodes to be rebased given in `rebaseobsrevs`.
2202 2193
2203 2194 `obsolete_with_successor_in_rebase_set` is a set with obsolete revisions,
2204 2195 without a successor in destination, that would cause divergence.
2205 2196 """
2206 2197 obsolete_with_successor_in_destination = {}
2207 2198 obsolete_with_successor_in_rebase_set = set()
2208 2199
2209 2200 cl = repo.changelog
2210 2201 get_rev = cl.index.get_rev
2211 2202 extinctrevs = set(repo.revs(b'extinct()'))
2212 2203 for srcrev in rebaseobsrevs:
2213 2204 srcnode = cl.node(srcrev)
2214 2205 # XXX: more advanced APIs are required to handle split correctly
2215 2206 successors = set(obsutil.allsuccessors(repo.obsstore, [srcnode]))
2216 2207 # obsutil.allsuccessors includes node itself
2217 2208 successors.remove(srcnode)
2218 2209 succrevs = {get_rev(s) for s in successors}
2219 2210 succrevs.discard(None)
2220 2211 if not successors or succrevs.issubset(extinctrevs):
2221 2212 # no successor, or all successors are extinct
2222 2213 obsolete_with_successor_in_destination[srcrev] = None
2223 2214 else:
2224 2215 dstrev = destmap[srcrev]
2225 2216 for succrev in succrevs:
2226 2217 if cl.isancestorrev(succrev, dstrev):
2227 2218 obsolete_with_successor_in_destination[srcrev] = succrev
2228 2219 break
2229 2220 else:
2230 2221 # If 'srcrev' has a successor in rebase set but none in
2231 2222 # destination (which would be catched above), we shall skip it
2232 2223 # and its descendants to avoid divergence.
2233 2224 if srcrev in extinctrevs or any(s in destmap for s in succrevs):
2234 2225 obsolete_with_successor_in_rebase_set.add(srcrev)
2235 2226
2236 2227 return (
2237 2228 obsolete_with_successor_in_destination,
2238 2229 obsolete_with_successor_in_rebase_set,
2239 2230 )
2240 2231
2241 2232
2242 2233 def abortrebase(ui, repo):
2243 2234 with repo.wlock(), repo.lock():
2244 2235 rbsrt = rebaseruntime(repo, ui)
2245 2236 rbsrt._prepareabortorcontinue(isabort=True)
2246 2237
2247 2238
2248 2239 def continuerebase(ui, repo):
2249 2240 with repo.wlock(), repo.lock():
2250 2241 rbsrt = rebaseruntime(repo, ui)
2251 2242 ms = mergestatemod.mergestate.read(repo)
2252 2243 mergeutil.checkunresolved(ms)
2253 2244 retcode = rbsrt._prepareabortorcontinue(isabort=False)
2254 2245 if retcode is not None:
2255 2246 return retcode
2256 2247 rbsrt._performrebase(None)
2257 2248 rbsrt._finishrebase()
2258 2249
2259 2250
2260 2251 def summaryhook(ui, repo):
2261 2252 if not repo.vfs.exists(b'rebasestate'):
2262 2253 return
2263 2254 try:
2264 2255 rbsrt = rebaseruntime(repo, ui, {})
2265 2256 rbsrt.restorestatus()
2266 2257 state = rbsrt.state
2267 2258 except error.RepoLookupError:
2268 2259 # i18n: column positioning for "hg summary"
2269 2260 msg = _(b'rebase: (use "hg rebase --abort" to clear broken state)\n')
2270 2261 ui.write(msg)
2271 2262 return
2272 2263 numrebased = len([i for i in state.values() if i >= 0])
2273 2264 # i18n: column positioning for "hg summary"
2274 2265 ui.write(
2275 2266 _(b'rebase: %s, %s (rebase --continue)\n')
2276 2267 % (
2277 2268 ui.label(_(b'%d rebased'), b'rebase.rebased') % numrebased,
2278 2269 ui.label(_(b'%d remaining'), b'rebase.remaining')
2279 2270 % (len(state) - numrebased),
2280 2271 )
2281 2272 )
2282 2273
2283 2274
2284 2275 def uisetup(ui):
2285 2276 # Replace pull with a decorator to provide --rebase option
2286 2277 entry = extensions.wrapcommand(commands.table, b'pull', pullrebase)
2287 2278 entry[1].append(
2288 2279 (b'', b'rebase', None, _(b"rebase working directory to branch head"))
2289 2280 )
2290 2281 entry[1].append((b't', b'tool', b'', _(b"specify merge tool for rebase")))
2291 2282 cmdutil.summaryhooks.add(b'rebase', summaryhook)
2292 2283 statemod.addunfinished(
2293 2284 b'rebase',
2294 2285 fname=b'rebasestate',
2295 2286 stopflag=True,
2296 2287 continueflag=True,
2297 2288 abortfunc=abortrebase,
2298 2289 continuefunc=continuerebase,
2299 2290 )
@@ -1,1803 +1,1804 b''
1 1 #
2 2 # This is the mercurial setup script.
3 3 #
4 4 # 'python setup.py install', or
5 5 # 'python setup.py --help' for more options
6 6 import os
7 7
8 8 # Mercurial can't work on 3.6.0 or 3.6.1 due to a bug in % formatting
9 9 # in bytestrings.
10 10 supportedpy = ','.join(
11 11 [
12 12 '>=3.6.2',
13 13 ]
14 14 )
15 15
16 16 import sys, platform
17 17 import sysconfig
18 18
19 19
20 20 def sysstr(s):
21 21 return s.decode('latin-1')
22 22
23 23
24 24 def eprint(*args, **kwargs):
25 25 kwargs['file'] = sys.stderr
26 26 print(*args, **kwargs)
27 27
28 28
29 29 import ssl
30 30
31 31 # ssl.HAS_TLSv1* are preferred to check support but they were added in Python
32 32 # 3.7. Prior to CPython commit 6e8cda91d92da72800d891b2fc2073ecbc134d98
33 33 # (backported to the 3.7 branch), ssl.PROTOCOL_TLSv1_1 / ssl.PROTOCOL_TLSv1_2
34 34 # were defined only if compiled against a OpenSSL version with TLS 1.1 / 1.2
35 35 # support. At the mentioned commit, they were unconditionally defined.
36 36 _notset = object()
37 37 has_tlsv1_1 = getattr(ssl, 'HAS_TLSv1_1', _notset)
38 38 if has_tlsv1_1 is _notset:
39 39 has_tlsv1_1 = getattr(ssl, 'PROTOCOL_TLSv1_1', _notset) is not _notset
40 40 has_tlsv1_2 = getattr(ssl, 'HAS_TLSv1_2', _notset)
41 41 if has_tlsv1_2 is _notset:
42 42 has_tlsv1_2 = getattr(ssl, 'PROTOCOL_TLSv1_2', _notset) is not _notset
43 43 if not (has_tlsv1_1 or has_tlsv1_2):
44 44 error = """
45 45 The `ssl` module does not advertise support for TLS 1.1 or TLS 1.2.
46 46 Please make sure that your Python installation was compiled against an OpenSSL
47 47 version enabling these features (likely this requires the OpenSSL version to
48 48 be at least 1.0.1).
49 49 """
50 50 print(error, file=sys.stderr)
51 51 sys.exit(1)
52 52
53 53 DYLIB_SUFFIX = sysconfig.get_config_vars()['EXT_SUFFIX']
54 54
55 55 # Solaris Python packaging brain damage
56 56 try:
57 57 import hashlib
58 58
59 59 sha = hashlib.sha1()
60 60 except ImportError:
61 61 try:
62 62 import sha
63 63
64 64 sha.sha # silence unused import warning
65 65 except ImportError:
66 66 raise SystemExit(
67 67 "Couldn't import standard hashlib (incomplete Python install)."
68 68 )
69 69
70 70 try:
71 71 import zlib
72 72
73 73 zlib.compressobj # silence unused import warning
74 74 except ImportError:
75 75 raise SystemExit(
76 76 "Couldn't import standard zlib (incomplete Python install)."
77 77 )
78 78
79 79 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
80 80 isironpython = False
81 81 try:
82 82 isironpython = (
83 83 platform.python_implementation().lower().find("ironpython") != -1
84 84 )
85 85 except AttributeError:
86 86 pass
87 87
88 88 if isironpython:
89 89 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
90 90 else:
91 91 try:
92 92 import bz2
93 93
94 94 bz2.BZ2Compressor # silence unused import warning
95 95 except ImportError:
96 96 raise SystemExit(
97 97 "Couldn't import standard bz2 (incomplete Python install)."
98 98 )
99 99
100 100 ispypy = "PyPy" in sys.version
101 101
102 102 import ctypes
103 103 import stat, subprocess, time
104 104 import re
105 105 import shutil
106 106 import tempfile
107 107
108 108 # We have issues with setuptools on some platforms and builders. Until
109 109 # those are resolved, setuptools is opt-in except for platforms where
110 110 # we don't have issues.
111 111 issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ
112 112 if issetuptools:
113 113 from setuptools import setup
114 114 else:
115 115 from distutils.core import setup
116 116 from distutils.ccompiler import new_compiler
117 117 from distutils.core import Command, Extension
118 118 from distutils.dist import Distribution
119 119 from distutils.command.build import build
120 120 from distutils.command.build_ext import build_ext
121 121 from distutils.command.build_py import build_py
122 122 from distutils.command.build_scripts import build_scripts
123 123 from distutils.command.install import install
124 124 from distutils.command.install_lib import install_lib
125 125 from distutils.command.install_scripts import install_scripts
126 126 from distutils import log
127 127 from distutils.spawn import spawn, find_executable
128 128 from distutils import file_util
129 129 from distutils.errors import (
130 130 CCompilerError,
131 131 DistutilsError,
132 132 DistutilsExecError,
133 133 )
134 134 from distutils.sysconfig import get_python_inc
135 135
136 136
137 137 def write_if_changed(path, content):
138 138 """Write content to a file iff the content hasn't changed."""
139 139 if os.path.exists(path):
140 140 with open(path, 'rb') as fh:
141 141 current = fh.read()
142 142 else:
143 143 current = b''
144 144
145 145 if current != content:
146 146 with open(path, 'wb') as fh:
147 147 fh.write(content)
148 148
149 149
150 150 scripts = ['hg']
151 151 if os.name == 'nt':
152 152 # We remove hg.bat if we are able to build hg.exe.
153 153 scripts.append('contrib/win32/hg.bat')
154 154
155 155
156 156 def cancompile(cc, code):
157 157 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
158 158 devnull = oldstderr = None
159 159 try:
160 160 fname = os.path.join(tmpdir, 'testcomp.c')
161 161 f = open(fname, 'w')
162 162 f.write(code)
163 163 f.close()
164 164 # Redirect stderr to /dev/null to hide any error messages
165 165 # from the compiler.
166 166 # This will have to be changed if we ever have to check
167 167 # for a function on Windows.
168 168 devnull = open('/dev/null', 'w')
169 169 oldstderr = os.dup(sys.stderr.fileno())
170 170 os.dup2(devnull.fileno(), sys.stderr.fileno())
171 171 objects = cc.compile([fname], output_dir=tmpdir)
172 172 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
173 173 return True
174 174 except Exception:
175 175 return False
176 176 finally:
177 177 if oldstderr is not None:
178 178 os.dup2(oldstderr, sys.stderr.fileno())
179 179 if devnull is not None:
180 180 devnull.close()
181 181 shutil.rmtree(tmpdir)
182 182
183 183
184 184 # simplified version of distutils.ccompiler.CCompiler.has_function
185 185 # that actually removes its temporary files.
186 186 def hasfunction(cc, funcname):
187 187 code = 'int main(void) { %s(); }\n' % funcname
188 188 return cancompile(cc, code)
189 189
190 190
191 191 def hasheader(cc, headername):
192 192 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
193 193 return cancompile(cc, code)
194 194
195 195
196 196 # py2exe needs to be installed to work
197 197 try:
198 198 import py2exe
199 199
200 200 py2exe.patch_distutils()
201 201 py2exeloaded = True
202 202 # import py2exe's patched Distribution class
203 203 from distutils.core import Distribution
204 204 except ImportError:
205 205 py2exeloaded = False
206 206
207 207
208 208 def runcmd(cmd, env, cwd=None):
209 209 p = subprocess.Popen(
210 210 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd
211 211 )
212 212 out, err = p.communicate()
213 213 return p.returncode, out, err
214 214
215 215
216 216 class hgcommand:
217 217 def __init__(self, cmd, env):
218 218 self.cmd = cmd
219 219 self.env = env
220 220
221 221 def run(self, args):
222 222 cmd = self.cmd + args
223 223 returncode, out, err = runcmd(cmd, self.env)
224 224 err = filterhgerr(err)
225 225 if err:
226 226 print("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
227 227 print(err, file=sys.stderr)
228 228 if returncode != 0:
229 229 return b''
230 230 return out
231 231
232 232
233 233 def filterhgerr(err):
234 234 # If root is executing setup.py, but the repository is owned by
235 235 # another user (as in "sudo python setup.py install") we will get
236 236 # trust warnings since the .hg/hgrc file is untrusted. That is
237 237 # fine, we don't want to load it anyway. Python may warn about
238 238 # a missing __init__.py in mercurial/locale, we also ignore that.
239 239 err = [
240 240 e
241 241 for e in err.splitlines()
242 242 if (
243 243 not e.startswith(b'not trusting file')
244 244 and not e.startswith(b'warning: Not importing')
245 245 and not e.startswith(b'obsolete feature not enabled')
246 246 and not e.startswith(b'*** failed to import extension')
247 247 and not e.startswith(b'devel-warn:')
248 248 and not (
249 249 e.startswith(b'(third party extension')
250 250 and e.endswith(b'or newer of Mercurial; disabling)')
251 251 )
252 252 )
253 253 ]
254 254 return b'\n'.join(b' ' + e for e in err)
255 255
256 256
257 257 def findhg():
258 258 """Try to figure out how we should invoke hg for examining the local
259 259 repository contents.
260 260
261 261 Returns an hgcommand object."""
262 262 # By default, prefer the "hg" command in the user's path. This was
263 263 # presumably the hg command that the user used to create this repository.
264 264 #
265 265 # This repository may require extensions or other settings that would not
266 266 # be enabled by running the hg script directly from this local repository.
267 267 hgenv = os.environ.copy()
268 268 # Use HGPLAIN to disable hgrc settings that would change output formatting,
269 269 # and disable localization for the same reasons.
270 270 hgenv['HGPLAIN'] = '1'
271 271 hgenv['LANGUAGE'] = 'C'
272 272 hgcmd = ['hg']
273 273 # Run a simple "hg log" command just to see if using hg from the user's
274 274 # path works and can successfully interact with this repository. Windows
275 275 # gives precedence to hg.exe in the current directory, so fall back to the
276 276 # python invocation of local hg, where pythonXY.dll can always be found.
277 277 check_cmd = ['log', '-r.', '-Ttest']
278 278 if os.name != 'nt' or not os.path.exists("hg.exe"):
279 279 try:
280 280 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
281 281 except EnvironmentError:
282 282 retcode = -1
283 283 if retcode == 0 and not filterhgerr(err):
284 284 return hgcommand(hgcmd, hgenv)
285 285
286 286 # Fall back to trying the local hg installation.
287 287 hgenv = localhgenv()
288 288 hgcmd = [sys.executable, 'hg']
289 289 try:
290 290 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
291 291 except EnvironmentError:
292 292 retcode = -1
293 293 if retcode == 0 and not filterhgerr(err):
294 294 return hgcommand(hgcmd, hgenv)
295 295
296 296 eprint("/!\\")
297 297 eprint(r"/!\ Unable to find a working hg binary")
298 298 eprint(r"/!\ Version cannot be extract from the repository")
299 299 eprint(r"/!\ Re-run the setup once a first version is built")
300 300 return None
301 301
302 302
303 303 def localhgenv():
304 304 """Get an environment dictionary to use for invoking or importing
305 305 mercurial from the local repository."""
306 306 # Execute hg out of this directory with a custom environment which takes
307 307 # care to not use any hgrc files and do no localization.
308 308 env = {
309 309 'HGMODULEPOLICY': 'py',
310 310 'HGRCPATH': '',
311 311 'LANGUAGE': 'C',
312 312 'PATH': '',
313 313 } # make pypi modules that use os.environ['PATH'] happy
314 314 if 'LD_LIBRARY_PATH' in os.environ:
315 315 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
316 316 if 'SystemRoot' in os.environ:
317 317 # SystemRoot is required by Windows to load various DLLs. See:
318 318 # https://bugs.python.org/issue13524#msg148850
319 319 env['SystemRoot'] = os.environ['SystemRoot']
320 320 return env
321 321
322 322
323 323 version = ''
324 324
325 325
326 326 def _try_get_version():
327 327 hg = findhg()
328 328 if hg is None:
329 329 return ''
330 330 hgid = None
331 331 numerictags = []
332 332 cmd = ['log', '-r', '.', '--template', '{tags}\n']
333 333 pieces = sysstr(hg.run(cmd)).split()
334 334 numerictags = [t for t in pieces if t[0:1].isdigit()]
335 335 hgid = sysstr(hg.run(['id', '-i'])).strip()
336 336 if hgid.count('+') == 2:
337 337 hgid = hgid.replace("+", ".", 1)
338 338 if not hgid:
339 339 eprint("/!\\")
340 340 eprint(r"/!\ Unable to determine hg version from local repository")
341 341 eprint(r"/!\ Failed to retrieve current revision tags")
342 342 return ''
343 343 if numerictags: # tag(s) found
344 344 version = numerictags[-1]
345 345 if hgid.endswith('+'): # propagate the dirty status to the tag
346 346 version += '+'
347 347 else: # no tag found on the checked out revision
348 348 ltagcmd = ['log', '--rev', 'wdir()', '--template', '{latesttag}']
349 349 ltag = sysstr(hg.run(ltagcmd))
350 350 if not ltag:
351 351 eprint("/!\\")
352 352 eprint(r"/!\ Unable to determine hg version from local repository")
353 353 eprint(
354 354 r"/!\ Failed to retrieve current revision distance to lated tag"
355 355 )
356 356 return ''
357 357 changessincecmd = [
358 358 'log',
359 359 '-T',
360 360 'x\n',
361 361 '-r',
362 362 "only(parents(),'%s')" % ltag,
363 363 ]
364 364 changessince = len(hg.run(changessincecmd).splitlines())
365 365 version = '%s+hg%s.%s' % (ltag, changessince, hgid)
366 366 if version.endswith('+'):
367 367 version = version[:-1] + 'local' + time.strftime('%Y%m%d')
368 368 return version
369 369
370 370
371 371 if os.path.isdir('.hg'):
372 372 version = _try_get_version()
373 373 elif os.path.exists('.hg_archival.txt'):
374 374 kw = dict(
375 375 [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')]
376 376 )
377 377 if 'tag' in kw:
378 378 version = kw['tag']
379 379 elif 'latesttag' in kw:
380 380 if 'changessincelatesttag' in kw:
381 381 version = (
382 382 '%(latesttag)s+hg%(changessincelatesttag)s.%(node).12s' % kw
383 383 )
384 384 else:
385 385 version = '%(latesttag)s+hg%(latesttagdistance)s.%(node).12s' % kw
386 386 else:
387 387 version = '0+hg' + kw.get('node', '')[:12]
388 388 elif os.path.exists('mercurial/__version__.py'):
389 389 with open('mercurial/__version__.py') as f:
390 390 data = f.read()
391 391 version = re.search('version = b"(.*)"', data).group(1)
392 392 if not version:
393 393 if os.environ.get("MERCURIAL_SETUP_MAKE_LOCAL") == "1":
394 394 version = "0.0+0"
395 395 eprint("/!\\")
396 396 eprint(r"/!\ Using '0.0+0' as the default version")
397 397 eprint(r"/!\ Re-run make local once that first version is built")
398 398 eprint("/!\\")
399 399 else:
400 400 eprint("/!\\")
401 401 eprint(r"/!\ Could not determine the Mercurial version")
402 402 eprint(r"/!\ You need to build a local version first")
403 403 eprint(r"/!\ Run `make local` and try again")
404 404 eprint("/!\\")
405 405 msg = "Run `make local` first to get a working local version"
406 406 raise SystemExit(msg)
407 407
408 408 versionb = version
409 409 if not isinstance(versionb, bytes):
410 410 versionb = versionb.encode('ascii')
411 411
412 412 write_if_changed(
413 413 'mercurial/__version__.py',
414 414 b''.join(
415 415 [
416 416 b'# this file is autogenerated by setup.py\n'
417 417 b'version = b"%s"\n' % versionb,
418 418 ]
419 419 ),
420 420 )
421 421
422 422
423 423 class hgbuild(build):
424 424 # Insert hgbuildmo first so that files in mercurial/locale/ are found
425 425 # when build_py is run next.
426 426 sub_commands = [('build_mo', None)] + build.sub_commands
427 427
428 428
429 429 class hgbuildmo(build):
430 430
431 431 description = "build translations (.mo files)"
432 432
433 433 def run(self):
434 434 if not find_executable('msgfmt'):
435 435 self.warn(
436 436 "could not find msgfmt executable, no translations "
437 437 "will be built"
438 438 )
439 439 return
440 440
441 441 podir = 'i18n'
442 442 if not os.path.isdir(podir):
443 443 self.warn("could not find %s/ directory" % podir)
444 444 return
445 445
446 446 join = os.path.join
447 447 for po in os.listdir(podir):
448 448 if not po.endswith('.po'):
449 449 continue
450 450 pofile = join(podir, po)
451 451 modir = join('locale', po[:-3], 'LC_MESSAGES')
452 452 mofile = join(modir, 'hg.mo')
453 453 mobuildfile = join('mercurial', mofile)
454 454 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
455 455 if sys.platform != 'sunos5':
456 456 # msgfmt on Solaris does not know about -c
457 457 cmd.append('-c')
458 458 self.mkpath(join('mercurial', modir))
459 459 self.make_file([pofile], mobuildfile, spawn, (cmd,))
460 460
461 461
462 462 class hgdist(Distribution):
463 463 pure = False
464 464 rust = False
465 465 no_rust = False
466 466 cffi = ispypy
467 467
468 468 global_options = Distribution.global_options + [
469 469 ('pure', None, "use pure (slow) Python code instead of C extensions"),
470 470 ('rust', None, "use Rust extensions additionally to C extensions"),
471 471 (
472 472 'no-rust',
473 473 None,
474 474 "do not use Rust extensions additionally to C extensions",
475 475 ),
476 476 ]
477 477
478 478 negative_opt = Distribution.negative_opt.copy()
479 479 boolean_options = ['pure', 'rust', 'no-rust']
480 480 negative_opt['no-rust'] = 'rust'
481 481
482 482 def _set_command_options(self, command_obj, option_dict=None):
483 483 # Not all distutils versions in the wild have boolean_options.
484 484 # This should be cleaned up when we're Python 3 only.
485 485 command_obj.boolean_options = (
486 486 getattr(command_obj, 'boolean_options', []) + self.boolean_options
487 487 )
488 488 return Distribution._set_command_options(
489 489 self, command_obj, option_dict=option_dict
490 490 )
491 491
492 492 def parse_command_line(self):
493 493 ret = Distribution.parse_command_line(self)
494 494 if not (self.rust or self.no_rust):
495 495 hgrustext = os.environ.get('HGWITHRUSTEXT')
496 496 # TODO record it for proper rebuild upon changes
497 497 # (see mercurial/__modulepolicy__.py)
498 498 if hgrustext != 'cpython' and hgrustext is not None:
499 499 if hgrustext:
500 500 msg = 'unknown HGWITHRUSTEXT value: %s' % hgrustext
501 501 print(msg, file=sys.stderr)
502 502 hgrustext = None
503 503 self.rust = hgrustext is not None
504 504 self.no_rust = not self.rust
505 505 return ret
506 506
507 507 def has_ext_modules(self):
508 508 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
509 509 # too late for some cases
510 510 return not self.pure and Distribution.has_ext_modules(self)
511 511
512 512
513 513 # This is ugly as a one-liner. So use a variable.
514 514 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
515 515 buildextnegops['no-zstd'] = 'zstd'
516 516 buildextnegops['no-rust'] = 'rust'
517 517
518 518
519 519 class hgbuildext(build_ext):
520 520 user_options = build_ext.user_options + [
521 521 ('zstd', None, 'compile zstd bindings [default]'),
522 522 ('no-zstd', None, 'do not compile zstd bindings'),
523 523 (
524 524 'rust',
525 525 None,
526 526 'compile Rust extensions if they are in use '
527 527 '(requires Cargo) [default]',
528 528 ),
529 529 ('no-rust', None, 'do not compile Rust extensions'),
530 530 ]
531 531
532 532 boolean_options = build_ext.boolean_options + ['zstd', 'rust']
533 533 negative_opt = buildextnegops
534 534
535 535 def initialize_options(self):
536 536 self.zstd = True
537 537 self.rust = True
538 538
539 539 return build_ext.initialize_options(self)
540 540
541 541 def finalize_options(self):
542 542 # Unless overridden by the end user, build extensions in parallel.
543 543 # Only influences behavior on Python 3.5+.
544 544 if getattr(self, 'parallel', None) is None:
545 545 self.parallel = True
546 546
547 547 return build_ext.finalize_options(self)
548 548
549 549 def build_extensions(self):
550 550 ruststandalones = [
551 551 e for e in self.extensions if isinstance(e, RustStandaloneExtension)
552 552 ]
553 553 self.extensions = [
554 554 e for e in self.extensions if e not in ruststandalones
555 555 ]
556 556 # Filter out zstd if disabled via argument.
557 557 if not self.zstd:
558 558 self.extensions = [
559 559 e for e in self.extensions if e.name != 'mercurial.zstd'
560 560 ]
561 561
562 562 # Build Rust standalone extensions if it'll be used
563 563 # and its build is not explicitly disabled (for external build
564 564 # as Linux distributions would do)
565 565 if self.distribution.rust and self.rust:
566 566 if not sys.platform.startswith('linux'):
567 567 self.warn(
568 568 "rust extensions have only been tested on Linux "
569 569 "and may not behave correctly on other platforms"
570 570 )
571 571
572 572 for rustext in ruststandalones:
573 573 rustext.build('' if self.inplace else self.build_lib)
574 574
575 575 return build_ext.build_extensions(self)
576 576
577 577 def build_extension(self, ext):
578 578 if (
579 579 self.distribution.rust
580 580 and self.rust
581 581 and isinstance(ext, RustExtension)
582 582 ):
583 583 ext.rustbuild()
584 584 try:
585 585 build_ext.build_extension(self, ext)
586 586 except CCompilerError:
587 587 if not getattr(ext, 'optional', False):
588 588 raise
589 589 log.warn(
590 590 "Failed to build optional extension '%s' (skipping)", ext.name
591 591 )
592 592
593 593
594 594 class hgbuildscripts(build_scripts):
595 595 def run(self):
596 596 if os.name != 'nt' or self.distribution.pure:
597 597 return build_scripts.run(self)
598 598
599 599 exebuilt = False
600 600 try:
601 601 self.run_command('build_hgexe')
602 602 exebuilt = True
603 603 except (DistutilsError, CCompilerError):
604 604 log.warn('failed to build optional hg.exe')
605 605
606 606 if exebuilt:
607 607 # Copying hg.exe to the scripts build directory ensures it is
608 608 # installed by the install_scripts command.
609 609 hgexecommand = self.get_finalized_command('build_hgexe')
610 610 dest = os.path.join(self.build_dir, 'hg.exe')
611 611 self.mkpath(self.build_dir)
612 612 self.copy_file(hgexecommand.hgexepath, dest)
613 613
614 614 # Remove hg.bat because it is redundant with hg.exe.
615 615 self.scripts.remove('contrib/win32/hg.bat')
616 616
617 617 return build_scripts.run(self)
618 618
619 619
620 620 class hgbuildpy(build_py):
621 621 def finalize_options(self):
622 622 build_py.finalize_options(self)
623 623
624 624 if self.distribution.pure:
625 625 self.distribution.ext_modules = []
626 626 elif self.distribution.cffi:
627 627 from mercurial.cffi import (
628 628 bdiffbuild,
629 629 mpatchbuild,
630 630 )
631 631
632 632 exts = [
633 633 mpatchbuild.ffi.distutils_extension(),
634 634 bdiffbuild.ffi.distutils_extension(),
635 635 ]
636 636 # cffi modules go here
637 637 if sys.platform == 'darwin':
638 638 from mercurial.cffi import osutilbuild
639 639
640 640 exts.append(osutilbuild.ffi.distutils_extension())
641 641 self.distribution.ext_modules = exts
642 642 else:
643 643 h = os.path.join(get_python_inc(), 'Python.h')
644 644 if not os.path.exists(h):
645 645 raise SystemExit(
646 646 'Python headers are required to build '
647 647 'Mercurial but weren\'t found in %s' % h
648 648 )
649 649
650 650 def run(self):
651 651 basepath = os.path.join(self.build_lib, 'mercurial')
652 652 self.mkpath(basepath)
653 653
654 654 rust = self.distribution.rust
655 655 if self.distribution.pure:
656 656 modulepolicy = 'py'
657 657 elif self.build_lib == '.':
658 658 # in-place build should run without rebuilding and Rust extensions
659 659 modulepolicy = 'rust+c-allow' if rust else 'allow'
660 660 else:
661 661 modulepolicy = 'rust+c' if rust else 'c'
662 662
663 663 content = b''.join(
664 664 [
665 665 b'# this file is autogenerated by setup.py\n',
666 666 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
667 667 ]
668 668 )
669 669 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content)
670 670
671 671 build_py.run(self)
672 672
673 673
674 674 class buildhgextindex(Command):
675 675 description = 'generate prebuilt index of hgext (for frozen package)'
676 676 user_options = []
677 677 _indexfilename = 'hgext/__index__.py'
678 678
679 679 def initialize_options(self):
680 680 pass
681 681
682 682 def finalize_options(self):
683 683 pass
684 684
685 685 def run(self):
686 686 if os.path.exists(self._indexfilename):
687 687 with open(self._indexfilename, 'w') as f:
688 688 f.write('# empty\n')
689 689
690 690 # here no extension enabled, disabled() lists up everything
691 691 code = (
692 692 'import pprint; from mercurial import extensions; '
693 693 'ext = extensions.disabled();'
694 694 'ext.pop("__index__", None);'
695 695 'pprint.pprint(ext)'
696 696 )
697 697 returncode, out, err = runcmd(
698 698 [sys.executable, '-c', code], localhgenv()
699 699 )
700 700 if err or returncode != 0:
701 701 raise DistutilsExecError(err)
702 702
703 703 with open(self._indexfilename, 'wb') as f:
704 704 f.write(b'# this file is autogenerated by setup.py\n')
705 705 f.write(b'docs = ')
706 706 f.write(out)
707 707
708 708
709 709 class buildhgexe(build_ext):
710 710 description = 'compile hg.exe from mercurial/exewrapper.c'
711 711
712 712 LONG_PATHS_MANIFEST = """\
713 713 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
714 714 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
715 715 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
716 716 <security>
717 717 <requestedPrivileges>
718 718 <requestedExecutionLevel
719 719 level="asInvoker"
720 720 uiAccess="false"
721 721 />
722 722 </requestedPrivileges>
723 723 </security>
724 724 </trustInfo>
725 725 <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
726 726 <application>
727 727 <!-- Windows Vista -->
728 728 <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
729 729 <!-- Windows 7 -->
730 730 <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
731 731 <!-- Windows 8 -->
732 732 <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
733 733 <!-- Windows 8.1 -->
734 734 <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
735 735 <!-- Windows 10 and Windows 11 -->
736 736 <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
737 737 </application>
738 738 </compatibility>
739 739 <application xmlns="urn:schemas-microsoft-com:asm.v3">
740 740 <windowsSettings
741 741 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
742 742 <ws2:longPathAware>true</ws2:longPathAware>
743 743 </windowsSettings>
744 744 </application>
745 745 <dependency>
746 746 <dependentAssembly>
747 747 <assemblyIdentity type="win32"
748 748 name="Microsoft.Windows.Common-Controls"
749 749 version="6.0.0.0"
750 750 processorArchitecture="*"
751 751 publicKeyToken="6595b64144ccf1df"
752 752 language="*" />
753 753 </dependentAssembly>
754 754 </dependency>
755 755 </assembly>
756 756 """
757 757
758 758 def initialize_options(self):
759 759 build_ext.initialize_options(self)
760 760
761 761 def build_extensions(self):
762 762 if os.name != 'nt':
763 763 return
764 764 if isinstance(self.compiler, HackedMingw32CCompiler):
765 765 self.compiler.compiler_so = self.compiler.compiler # no -mdll
766 766 self.compiler.dll_libraries = [] # no -lmsrvc90
767 767
768 768 pythonlib = None
769 769
770 770 dirname = os.path.dirname(self.get_ext_fullpath('dummy'))
771 771 self.hgtarget = os.path.join(dirname, 'hg')
772 772
773 773 if getattr(sys, 'dllhandle', None):
774 774 # Different Python installs can have different Python library
775 775 # names. e.g. the official CPython distribution uses pythonXY.dll
776 776 # and MinGW uses libpythonX.Y.dll.
777 777 _kernel32 = ctypes.windll.kernel32
778 778 _kernel32.GetModuleFileNameA.argtypes = [
779 779 ctypes.c_void_p,
780 780 ctypes.c_void_p,
781 781 ctypes.c_ulong,
782 782 ]
783 783 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
784 784 size = 1000
785 785 buf = ctypes.create_string_buffer(size + 1)
786 786 filelen = _kernel32.GetModuleFileNameA(
787 787 sys.dllhandle, ctypes.byref(buf), size
788 788 )
789 789
790 790 if filelen > 0 and filelen != size:
791 791 dllbasename = os.path.basename(buf.value)
792 792 if not dllbasename.lower().endswith(b'.dll'):
793 793 raise SystemExit(
794 794 'Python DLL does not end with .dll: %s' % dllbasename
795 795 )
796 796 pythonlib = dllbasename[:-4]
797 797
798 798 # Copy the pythonXY.dll next to the binary so that it runs
799 799 # without tampering with PATH.
800 800 dest = os.path.join(
801 801 os.path.dirname(self.hgtarget),
802 802 os.fsdecode(dllbasename),
803 803 )
804 804
805 805 if not os.path.exists(dest):
806 806 shutil.copy(buf.value, dest)
807 807
808 808 # Also overwrite python3.dll so that hgext.git is usable.
809 809 # TODO: also handle the MSYS flavor
810 810 python_x = os.path.join(
811 811 os.path.dirname(os.fsdecode(buf.value)),
812 812 "python3.dll",
813 813 )
814 814
815 815 if os.path.exists(python_x):
816 816 dest = os.path.join(
817 817 os.path.dirname(self.hgtarget),
818 818 os.path.basename(python_x),
819 819 )
820 820
821 821 shutil.copy(python_x, dest)
822 822
823 823 if not pythonlib:
824 824 log.warn(
825 825 'could not determine Python DLL filename; assuming pythonXY'
826 826 )
827 827
828 828 hv = sys.hexversion
829 829 pythonlib = b'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF)
830 830
831 831 log.info('using %s as Python library name' % pythonlib)
832 832 with open('mercurial/hgpythonlib.h', 'wb') as f:
833 833 f.write(b'/* this file is autogenerated by setup.py */\n')
834 834 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
835 835
836 836 objects = self.compiler.compile(
837 837 ['mercurial/exewrapper.c'],
838 838 output_dir=self.build_temp,
839 839 macros=[('_UNICODE', None), ('UNICODE', None)],
840 840 )
841 841 self.compiler.link_executable(
842 842 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
843 843 )
844 844
845 845 self.addlongpathsmanifest()
846 846
847 847 def addlongpathsmanifest(self):
848 848 """Add manifest pieces so that hg.exe understands long paths
849 849
850 850 Why resource #1 should be used for .exe manifests? I don't know and
851 851 wasn't able to find an explanation for mortals. But it seems to work.
852 852 """
853 853 exefname = self.compiler.executable_filename(self.hgtarget)
854 854 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
855 855 os.close(fdauto)
856 856 with open(manfname, 'w', encoding="UTF-8") as f:
857 857 f.write(self.LONG_PATHS_MANIFEST)
858 858 log.info("long paths manifest is written to '%s'" % manfname)
859 859 outputresource = '-outputresource:%s;#1' % exefname
860 860 log.info("running mt.exe to update hg.exe's manifest in-place")
861 861
862 862 self.spawn(
863 863 [
864 864 self.compiler.mt,
865 865 '-nologo',
866 866 '-manifest',
867 867 manfname,
868 868 outputresource,
869 869 ]
870 870 )
871 871 log.info("done updating hg.exe's manifest")
872 872 os.remove(manfname)
873 873
874 874 @property
875 875 def hgexepath(self):
876 876 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
877 877 return os.path.join(self.build_temp, dir, 'hg.exe')
878 878
879 879
880 880 class hgbuilddoc(Command):
881 881 description = 'build documentation'
882 882 user_options = [
883 883 ('man', None, 'generate man pages'),
884 884 ('html', None, 'generate html pages'),
885 885 ]
886 886
887 887 def initialize_options(self):
888 888 self.man = None
889 889 self.html = None
890 890
891 891 def finalize_options(self):
892 892 # If --man or --html are set, only generate what we're told to.
893 893 # Otherwise generate everything.
894 894 have_subset = self.man is not None or self.html is not None
895 895
896 896 if have_subset:
897 897 self.man = True if self.man else False
898 898 self.html = True if self.html else False
899 899 else:
900 900 self.man = True
901 901 self.html = True
902 902
903 903 def run(self):
904 904 def normalizecrlf(p):
905 905 with open(p, 'rb') as fh:
906 906 orig = fh.read()
907 907
908 908 if b'\r\n' not in orig:
909 909 return
910 910
911 911 log.info('normalizing %s to LF line endings' % p)
912 912 with open(p, 'wb') as fh:
913 913 fh.write(orig.replace(b'\r\n', b'\n'))
914 914
915 915 def gentxt(root):
916 916 txt = 'doc/%s.txt' % root
917 917 log.info('generating %s' % txt)
918 918 res, out, err = runcmd(
919 919 [sys.executable, 'gendoc.py', root], os.environ, cwd='doc'
920 920 )
921 921 if res:
922 922 raise SystemExit(
923 923 'error running gendoc.py: %s'
924 924 % '\n'.join([sysstr(out), sysstr(err)])
925 925 )
926 926
927 927 with open(txt, 'wb') as fh:
928 928 fh.write(out)
929 929
930 930 def gengendoc(root):
931 931 gendoc = 'doc/%s.gendoc.txt' % root
932 932
933 933 log.info('generating %s' % gendoc)
934 934 res, out, err = runcmd(
935 935 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
936 936 os.environ,
937 937 cwd='doc',
938 938 )
939 939 if res:
940 940 raise SystemExit(
941 941 'error running gendoc: %s'
942 942 % '\n'.join([sysstr(out), sysstr(err)])
943 943 )
944 944
945 945 with open(gendoc, 'wb') as fh:
946 946 fh.write(out)
947 947
948 948 def genman(root):
949 949 log.info('generating doc/%s' % root)
950 950 res, out, err = runcmd(
951 951 [
952 952 sys.executable,
953 953 'runrst',
954 954 'hgmanpage',
955 955 '--halt',
956 956 'warning',
957 957 '--strip-elements-with-class',
958 958 'htmlonly',
959 959 '%s.txt' % root,
960 960 root,
961 961 ],
962 962 os.environ,
963 963 cwd='doc',
964 964 )
965 965 if res:
966 966 raise SystemExit(
967 967 'error running runrst: %s'
968 968 % '\n'.join([sysstr(out), sysstr(err)])
969 969 )
970 970
971 971 normalizecrlf('doc/%s' % root)
972 972
973 973 def genhtml(root):
974 974 log.info('generating doc/%s.html' % root)
975 975 res, out, err = runcmd(
976 976 [
977 977 sys.executable,
978 978 'runrst',
979 979 'html',
980 980 '--halt',
981 981 'warning',
982 982 '--link-stylesheet',
983 983 '--stylesheet-path',
984 984 'style.css',
985 985 '%s.txt' % root,
986 986 '%s.html' % root,
987 987 ],
988 988 os.environ,
989 989 cwd='doc',
990 990 )
991 991 if res:
992 992 raise SystemExit(
993 993 'error running runrst: %s'
994 994 % '\n'.join([sysstr(out), sysstr(err)])
995 995 )
996 996
997 997 normalizecrlf('doc/%s.html' % root)
998 998
999 999 # This logic is duplicated in doc/Makefile.
1000 1000 sources = {
1001 1001 f
1002 1002 for f in os.listdir('mercurial/helptext')
1003 1003 if re.search(r'[0-9]\.txt$', f)
1004 1004 }
1005 1005
1006 1006 # common.txt is a one-off.
1007 1007 gentxt('common')
1008 1008
1009 1009 for source in sorted(sources):
1010 1010 assert source[-4:] == '.txt'
1011 1011 root = source[:-4]
1012 1012
1013 1013 gentxt(root)
1014 1014 gengendoc(root)
1015 1015
1016 1016 if self.man:
1017 1017 genman(root)
1018 1018 if self.html:
1019 1019 genhtml(root)
1020 1020
1021 1021
1022 1022 class hginstall(install):
1023 1023
1024 1024 user_options = install.user_options + [
1025 1025 (
1026 1026 'old-and-unmanageable',
1027 1027 None,
1028 1028 'noop, present for eggless setuptools compat',
1029 1029 ),
1030 1030 (
1031 1031 'single-version-externally-managed',
1032 1032 None,
1033 1033 'noop, present for eggless setuptools compat',
1034 1034 ),
1035 1035 ]
1036 1036
1037 1037 sub_commands = install.sub_commands + [
1038 1038 ('install_completion', lambda self: True)
1039 1039 ]
1040 1040
1041 1041 # Also helps setuptools not be sad while we refuse to create eggs.
1042 1042 single_version_externally_managed = True
1043 1043
1044 1044 def get_sub_commands(self):
1045 1045 # Screen out egg related commands to prevent egg generation. But allow
1046 1046 # mercurial.egg-info generation, since that is part of modern
1047 1047 # packaging.
1048 1048 excl = {'bdist_egg'}
1049 1049 return filter(lambda x: x not in excl, install.get_sub_commands(self))
1050 1050
1051 1051
1052 1052 class hginstalllib(install_lib):
1053 1053 """
1054 1054 This is a specialization of install_lib that replaces the copy_file used
1055 1055 there so that it supports setting the mode of files after copying them,
1056 1056 instead of just preserving the mode that the files originally had. If your
1057 1057 system has a umask of something like 027, preserving the permissions when
1058 1058 copying will lead to a broken install.
1059 1059
1060 1060 Note that just passing keep_permissions=False to copy_file would be
1061 1061 insufficient, as it might still be applying a umask.
1062 1062 """
1063 1063
1064 1064 def run(self):
1065 1065 realcopyfile = file_util.copy_file
1066 1066
1067 1067 def copyfileandsetmode(*args, **kwargs):
1068 1068 src, dst = args[0], args[1]
1069 1069 dst, copied = realcopyfile(*args, **kwargs)
1070 1070 if copied:
1071 1071 st = os.stat(src)
1072 1072 # Persist executable bit (apply it to group and other if user
1073 1073 # has it)
1074 1074 if st[stat.ST_MODE] & stat.S_IXUSR:
1075 1075 setmode = int('0755', 8)
1076 1076 else:
1077 1077 setmode = int('0644', 8)
1078 1078 m = stat.S_IMODE(st[stat.ST_MODE])
1079 1079 m = (m & ~int('0777', 8)) | setmode
1080 1080 os.chmod(dst, m)
1081 1081
1082 1082 file_util.copy_file = copyfileandsetmode
1083 1083 try:
1084 1084 install_lib.run(self)
1085 1085 finally:
1086 1086 file_util.copy_file = realcopyfile
1087 1087
1088 1088
1089 1089 class hginstallscripts(install_scripts):
1090 1090 """
1091 1091 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1092 1092 the configured directory for modules. If possible, the path is made relative
1093 1093 to the directory for scripts.
1094 1094 """
1095 1095
1096 1096 def initialize_options(self):
1097 1097 install_scripts.initialize_options(self)
1098 1098
1099 1099 self.install_lib = None
1100 1100
1101 1101 def finalize_options(self):
1102 1102 install_scripts.finalize_options(self)
1103 1103 self.set_undefined_options('install', ('install_lib', 'install_lib'))
1104 1104
1105 1105 def run(self):
1106 1106 install_scripts.run(self)
1107 1107
1108 1108 # It only makes sense to replace @LIBDIR@ with the install path if
1109 1109 # the install path is known. For wheels, the logic below calculates
1110 1110 # the libdir to be "../..". This is because the internal layout of a
1111 1111 # wheel archive looks like:
1112 1112 #
1113 1113 # mercurial-3.6.1.data/scripts/hg
1114 1114 # mercurial/__init__.py
1115 1115 #
1116 1116 # When installing wheels, the subdirectories of the "<pkg>.data"
1117 1117 # directory are translated to system local paths and files therein
1118 1118 # are copied in place. The mercurial/* files are installed into the
1119 1119 # site-packages directory. However, the site-packages directory
1120 1120 # isn't known until wheel install time. This means we have no clue
1121 1121 # at wheel generation time what the installed site-packages directory
1122 1122 # will be. And, wheels don't appear to provide the ability to register
1123 1123 # custom code to run during wheel installation. This all means that
1124 1124 # we can't reliably set the libdir in wheels: the default behavior
1125 1125 # of looking in sys.path must do.
1126 1126
1127 1127 if (
1128 1128 os.path.splitdrive(self.install_dir)[0]
1129 1129 != os.path.splitdrive(self.install_lib)[0]
1130 1130 ):
1131 1131 # can't make relative paths from one drive to another, so use an
1132 1132 # absolute path instead
1133 1133 libdir = self.install_lib
1134 1134 else:
1135 1135 libdir = os.path.relpath(self.install_lib, self.install_dir)
1136 1136
1137 1137 for outfile in self.outfiles:
1138 1138 with open(outfile, 'rb') as fp:
1139 1139 data = fp.read()
1140 1140
1141 1141 # skip binary files
1142 1142 if b'\0' in data:
1143 1143 continue
1144 1144
1145 1145 # During local installs, the shebang will be rewritten to the final
1146 1146 # install path. During wheel packaging, the shebang has a special
1147 1147 # value.
1148 1148 if data.startswith(b'#!python'):
1149 1149 log.info(
1150 1150 'not rewriting @LIBDIR@ in %s because install path '
1151 1151 'not known' % outfile
1152 1152 )
1153 1153 continue
1154 1154
1155 1155 data = data.replace(b'@LIBDIR@', libdir.encode('unicode_escape'))
1156 1156 with open(outfile, 'wb') as fp:
1157 1157 fp.write(data)
1158 1158
1159 1159
1160 1160 class hginstallcompletion(Command):
1161 1161 description = 'Install shell completion'
1162 1162
1163 1163 def initialize_options(self):
1164 1164 self.install_dir = None
1165 1165 self.outputs = []
1166 1166
1167 1167 def finalize_options(self):
1168 1168 self.set_undefined_options(
1169 1169 'install_data', ('install_dir', 'install_dir')
1170 1170 )
1171 1171
1172 1172 def get_outputs(self):
1173 1173 return self.outputs
1174 1174
1175 1175 def run(self):
1176 1176 for src, dir_path, dest in (
1177 1177 (
1178 1178 'bash_completion',
1179 1179 ('share', 'bash-completion', 'completions'),
1180 1180 'hg',
1181 1181 ),
1182 1182 ('zsh_completion', ('share', 'zsh', 'site-functions'), '_hg'),
1183 1183 ):
1184 1184 dir = os.path.join(self.install_dir, *dir_path)
1185 1185 self.mkpath(dir)
1186 1186
1187 1187 dest = os.path.join(dir, dest)
1188 1188 self.outputs.append(dest)
1189 1189 self.copy_file(os.path.join('contrib', src), dest)
1190 1190
1191 1191
1192 1192 # virtualenv installs custom distutils/__init__.py and
1193 1193 # distutils/distutils.cfg files which essentially proxy back to the
1194 1194 # "real" distutils in the main Python install. The presence of this
1195 1195 # directory causes py2exe to pick up the "hacked" distutils package
1196 1196 # from the virtualenv and "import distutils" will fail from the py2exe
1197 1197 # build because the "real" distutils files can't be located.
1198 1198 #
1199 1199 # We work around this by monkeypatching the py2exe code finding Python
1200 1200 # modules to replace the found virtualenv distutils modules with the
1201 1201 # original versions via filesystem scanning. This is a bit hacky. But
1202 1202 # it allows us to use virtualenvs for py2exe packaging, which is more
1203 1203 # deterministic and reproducible.
1204 1204 #
1205 1205 # It's worth noting that the common StackOverflow suggestions for this
1206 1206 # problem involve copying the original distutils files into the
1207 1207 # virtualenv or into the staging directory after setup() is invoked.
1208 1208 # The former is very brittle and can easily break setup(). Our hacking
1209 1209 # of the found modules routine has a similar result as copying the files
1210 1210 # manually. But it makes fewer assumptions about how py2exe works and
1211 1211 # is less brittle.
1212 1212
1213 1213 # This only catches virtualenvs made with virtualenv (as opposed to
1214 1214 # venv, which is likely what Python 3 uses).
1215 1215 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
1216 1216
1217 1217 if py2exehacked:
1218 1218 from distutils.command.py2exe import py2exe as buildpy2exe
1219 1219 from py2exe.mf import Module as py2exemodule
1220 1220
1221 1221 class hgbuildpy2exe(buildpy2exe):
1222 1222 def find_needed_modules(self, mf, files, modules):
1223 1223 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
1224 1224
1225 1225 # Replace virtualenv's distutils modules with the real ones.
1226 1226 modules = {}
1227 1227 for k, v in res.modules.items():
1228 1228 if k != 'distutils' and not k.startswith('distutils.'):
1229 1229 modules[k] = v
1230 1230
1231 1231 res.modules = modules
1232 1232
1233 1233 import opcode
1234 1234
1235 1235 distutilsreal = os.path.join(
1236 1236 os.path.dirname(opcode.__file__), 'distutils'
1237 1237 )
1238 1238
1239 1239 for root, dirs, files in os.walk(distutilsreal):
1240 1240 for f in sorted(files):
1241 1241 if not f.endswith('.py'):
1242 1242 continue
1243 1243
1244 1244 full = os.path.join(root, f)
1245 1245
1246 1246 parents = ['distutils']
1247 1247
1248 1248 if root != distutilsreal:
1249 1249 rel = os.path.relpath(root, distutilsreal)
1250 1250 parents.extend(p for p in rel.split(os.sep))
1251 1251
1252 1252 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1253 1253
1254 1254 if modname.startswith('distutils.tests.'):
1255 1255 continue
1256 1256
1257 1257 if modname.endswith('.__init__'):
1258 1258 modname = modname[: -len('.__init__')]
1259 1259 path = os.path.dirname(full)
1260 1260 else:
1261 1261 path = None
1262 1262
1263 1263 res.modules[modname] = py2exemodule(
1264 1264 modname, full, path=path
1265 1265 )
1266 1266
1267 1267 if 'distutils' not in res.modules:
1268 1268 raise SystemExit('could not find distutils modules')
1269 1269
1270 1270 return res
1271 1271
1272 1272
1273 1273 cmdclass = {
1274 1274 'build': hgbuild,
1275 1275 'build_doc': hgbuilddoc,
1276 1276 'build_mo': hgbuildmo,
1277 1277 'build_ext': hgbuildext,
1278 1278 'build_py': hgbuildpy,
1279 1279 'build_scripts': hgbuildscripts,
1280 1280 'build_hgextindex': buildhgextindex,
1281 1281 'install': hginstall,
1282 1282 'install_completion': hginstallcompletion,
1283 1283 'install_lib': hginstalllib,
1284 1284 'install_scripts': hginstallscripts,
1285 1285 'build_hgexe': buildhgexe,
1286 1286 }
1287 1287
1288 1288 if py2exehacked:
1289 1289 cmdclass['py2exe'] = hgbuildpy2exe
1290 1290
1291 1291 packages = [
1292 1292 'mercurial',
1293 1293 'mercurial.cext',
1294 1294 'mercurial.cffi',
1295 1295 'mercurial.defaultrc',
1296 1296 'mercurial.dirstateutils',
1297 1297 'mercurial.helptext',
1298 1298 'mercurial.helptext.internals',
1299 1299 'mercurial.hgweb',
1300 1300 'mercurial.interfaces',
1301 1301 'mercurial.pure',
1302 1302 'mercurial.templates',
1303 1303 'mercurial.thirdparty',
1304 1304 'mercurial.thirdparty.attr',
1305 'mercurial.thirdparty.jaraco',
1305 1306 'mercurial.thirdparty.zope',
1306 1307 'mercurial.thirdparty.zope.interface',
1307 1308 'mercurial.upgrade_utils',
1308 1309 'mercurial.utils',
1309 1310 'mercurial.revlogutils',
1310 1311 'mercurial.testing',
1311 1312 'hgext',
1312 1313 'hgext.convert',
1313 1314 'hgext.fsmonitor',
1314 1315 'hgext.fastannotate',
1315 1316 'hgext.fsmonitor.pywatchman',
1316 1317 'hgext.git',
1317 1318 'hgext.highlight',
1318 1319 'hgext.hooklib',
1319 1320 'hgext.infinitepush',
1320 1321 'hgext.largefiles',
1321 1322 'hgext.lfs',
1322 1323 'hgext.narrow',
1323 1324 'hgext.remotefilelog',
1324 1325 'hgext.zeroconf',
1325 1326 'hgext3rd',
1326 1327 'hgdemandimport',
1327 1328 ]
1328 1329
1329 1330 for name in os.listdir(os.path.join('mercurial', 'templates')):
1330 1331 if name != '__pycache__' and os.path.isdir(
1331 1332 os.path.join('mercurial', 'templates', name)
1332 1333 ):
1333 1334 packages.append('mercurial.templates.%s' % name)
1334 1335
1335 1336 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1336 1337 # py2exe can't cope with namespace packages very well, so we have to
1337 1338 # install any hgext3rd.* extensions that we want in the final py2exe
1338 1339 # image here. This is gross, but you gotta do what you gotta do.
1339 1340 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1340 1341
1341 1342 common_depends = [
1342 1343 'mercurial/bitmanipulation.h',
1343 1344 'mercurial/compat.h',
1344 1345 'mercurial/cext/util.h',
1345 1346 ]
1346 1347 common_include_dirs = ['mercurial']
1347 1348
1348 1349 common_cflags = []
1349 1350
1350 1351 # MSVC 2008 still needs declarations at the top of the scope, but Python 3.9
1351 1352 # makes declarations not at the top of a scope in the headers.
1352 1353 if os.name != 'nt' and sys.version_info[1] < 9:
1353 1354 common_cflags = ['-Werror=declaration-after-statement']
1354 1355
1355 1356 osutil_cflags = []
1356 1357 osutil_ldflags = []
1357 1358
1358 1359 # platform specific macros
1359 1360 for plat, func in [('bsd', 'setproctitle')]:
1360 1361 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1361 1362 osutil_cflags.append('-DHAVE_%s' % func.upper())
1362 1363
1363 1364 for plat, macro, code in [
1364 1365 (
1365 1366 'bsd|darwin',
1366 1367 'BSD_STATFS',
1367 1368 '''
1368 1369 #include <sys/param.h>
1369 1370 #include <sys/mount.h>
1370 1371 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1371 1372 ''',
1372 1373 ),
1373 1374 (
1374 1375 'linux',
1375 1376 'LINUX_STATFS',
1376 1377 '''
1377 1378 #include <linux/magic.h>
1378 1379 #include <sys/vfs.h>
1379 1380 int main() { struct statfs s; return sizeof(s.f_type); }
1380 1381 ''',
1381 1382 ),
1382 1383 ]:
1383 1384 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1384 1385 osutil_cflags.append('-DHAVE_%s' % macro)
1385 1386
1386 1387 if sys.platform == 'darwin':
1387 1388 osutil_ldflags += ['-framework', 'ApplicationServices']
1388 1389
1389 1390 if sys.platform == 'sunos5':
1390 1391 osutil_ldflags += ['-lsocket']
1391 1392
1392 1393 xdiff_srcs = [
1393 1394 'mercurial/thirdparty/xdiff/xdiffi.c',
1394 1395 'mercurial/thirdparty/xdiff/xprepare.c',
1395 1396 'mercurial/thirdparty/xdiff/xutils.c',
1396 1397 ]
1397 1398
1398 1399 xdiff_headers = [
1399 1400 'mercurial/thirdparty/xdiff/xdiff.h',
1400 1401 'mercurial/thirdparty/xdiff/xdiffi.h',
1401 1402 'mercurial/thirdparty/xdiff/xinclude.h',
1402 1403 'mercurial/thirdparty/xdiff/xmacros.h',
1403 1404 'mercurial/thirdparty/xdiff/xprepare.h',
1404 1405 'mercurial/thirdparty/xdiff/xtypes.h',
1405 1406 'mercurial/thirdparty/xdiff/xutils.h',
1406 1407 ]
1407 1408
1408 1409
1409 1410 class RustCompilationError(CCompilerError):
1410 1411 """Exception class for Rust compilation errors."""
1411 1412
1412 1413
1413 1414 class RustExtension(Extension):
1414 1415 """Base classes for concrete Rust Extension classes."""
1415 1416
1416 1417 rusttargetdir = os.path.join('rust', 'target', 'release')
1417 1418
1418 1419 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1419 1420 Extension.__init__(self, mpath, sources, **kw)
1420 1421 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1421 1422
1422 1423 # adding Rust source and control files to depends so that the extension
1423 1424 # gets rebuilt if they've changed
1424 1425 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1425 1426 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1426 1427 if os.path.exists(cargo_lock):
1427 1428 self.depends.append(cargo_lock)
1428 1429 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1429 1430 self.depends.extend(
1430 1431 os.path.join(dirpath, fname)
1431 1432 for fname in fnames
1432 1433 if os.path.splitext(fname)[1] == '.rs'
1433 1434 )
1434 1435
1435 1436 @staticmethod
1436 1437 def rustdylibsuffix():
1437 1438 """Return the suffix for shared libraries produced by rustc.
1438 1439
1439 1440 See also: https://doc.rust-lang.org/reference/linkage.html
1440 1441 """
1441 1442 if sys.platform == 'darwin':
1442 1443 return '.dylib'
1443 1444 elif os.name == 'nt':
1444 1445 return '.dll'
1445 1446 else:
1446 1447 return '.so'
1447 1448
1448 1449 def rustbuild(self):
1449 1450 env = os.environ.copy()
1450 1451 if 'HGTEST_RESTOREENV' in env:
1451 1452 # Mercurial tests change HOME to a temporary directory,
1452 1453 # but, if installed with rustup, the Rust toolchain needs
1453 1454 # HOME to be correct (otherwise the 'no default toolchain'
1454 1455 # error message is issued and the build fails).
1455 1456 # This happens currently with test-hghave.t, which does
1456 1457 # invoke this build.
1457 1458
1458 1459 # Unix only fix (os.path.expanduser not really reliable if
1459 1460 # HOME is shadowed like this)
1460 1461 import pwd
1461 1462
1462 1463 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1463 1464
1464 1465 cargocmd = ['cargo', 'rustc', '--release']
1465 1466
1466 1467 rust_features = env.get("HG_RUST_FEATURES")
1467 1468 if rust_features:
1468 1469 cargocmd.extend(('--features', rust_features))
1469 1470
1470 1471 cargocmd.append('--')
1471 1472 if sys.platform == 'darwin':
1472 1473 cargocmd.extend(
1473 1474 ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup")
1474 1475 )
1475 1476 try:
1476 1477 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1477 1478 except FileNotFoundError:
1478 1479 raise RustCompilationError("Cargo not found")
1479 1480 except PermissionError:
1480 1481 raise RustCompilationError(
1481 1482 "Cargo found, but permission to execute it is denied"
1482 1483 )
1483 1484 except subprocess.CalledProcessError:
1484 1485 raise RustCompilationError(
1485 1486 "Cargo failed. Working directory: %r, "
1486 1487 "command: %r, environment: %r"
1487 1488 % (self.rustsrcdir, cargocmd, env)
1488 1489 )
1489 1490
1490 1491
1491 1492 class RustStandaloneExtension(RustExtension):
1492 1493 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1493 1494 RustExtension.__init__(
1494 1495 self, pydottedname, [], dylibname, rustcrate, **kw
1495 1496 )
1496 1497 self.dylibname = dylibname
1497 1498
1498 1499 def build(self, target_dir):
1499 1500 self.rustbuild()
1500 1501 target = [target_dir]
1501 1502 target.extend(self.name.split('.'))
1502 1503 target[-1] += DYLIB_SUFFIX
1503 1504 target = os.path.join(*target)
1504 1505 os.makedirs(os.path.dirname(target), exist_ok=True)
1505 1506 shutil.copy2(
1506 1507 os.path.join(
1507 1508 self.rusttargetdir, self.dylibname + self.rustdylibsuffix()
1508 1509 ),
1509 1510 target,
1510 1511 )
1511 1512
1512 1513
1513 1514 extmodules = [
1514 1515 Extension(
1515 1516 'mercurial.cext.base85',
1516 1517 ['mercurial/cext/base85.c'],
1517 1518 include_dirs=common_include_dirs,
1518 1519 extra_compile_args=common_cflags,
1519 1520 depends=common_depends,
1520 1521 ),
1521 1522 Extension(
1522 1523 'mercurial.cext.bdiff',
1523 1524 ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1524 1525 include_dirs=common_include_dirs,
1525 1526 extra_compile_args=common_cflags,
1526 1527 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
1527 1528 ),
1528 1529 Extension(
1529 1530 'mercurial.cext.mpatch',
1530 1531 ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
1531 1532 include_dirs=common_include_dirs,
1532 1533 extra_compile_args=common_cflags,
1533 1534 depends=common_depends,
1534 1535 ),
1535 1536 Extension(
1536 1537 'mercurial.cext.parsers',
1537 1538 [
1538 1539 'mercurial/cext/charencode.c',
1539 1540 'mercurial/cext/dirs.c',
1540 1541 'mercurial/cext/manifest.c',
1541 1542 'mercurial/cext/parsers.c',
1542 1543 'mercurial/cext/pathencode.c',
1543 1544 'mercurial/cext/revlog.c',
1544 1545 ],
1545 1546 include_dirs=common_include_dirs,
1546 1547 extra_compile_args=common_cflags,
1547 1548 depends=common_depends
1548 1549 + [
1549 1550 'mercurial/cext/charencode.h',
1550 1551 'mercurial/cext/revlog.h',
1551 1552 ],
1552 1553 ),
1553 1554 Extension(
1554 1555 'mercurial.cext.osutil',
1555 1556 ['mercurial/cext/osutil.c'],
1556 1557 include_dirs=common_include_dirs,
1557 1558 extra_compile_args=common_cflags + osutil_cflags,
1558 1559 extra_link_args=osutil_ldflags,
1559 1560 depends=common_depends,
1560 1561 ),
1561 1562 Extension(
1562 1563 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations',
1563 1564 [
1564 1565 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1565 1566 ],
1566 1567 extra_compile_args=common_cflags,
1567 1568 ),
1568 1569 Extension(
1569 1570 'mercurial.thirdparty.sha1dc',
1570 1571 [
1571 1572 'mercurial/thirdparty/sha1dc/cext.c',
1572 1573 'mercurial/thirdparty/sha1dc/lib/sha1.c',
1573 1574 'mercurial/thirdparty/sha1dc/lib/ubc_check.c',
1574 1575 ],
1575 1576 extra_compile_args=common_cflags,
1576 1577 ),
1577 1578 Extension(
1578 1579 'hgext.fsmonitor.pywatchman.bser',
1579 1580 ['hgext/fsmonitor/pywatchman/bser.c'],
1580 1581 extra_compile_args=common_cflags,
1581 1582 ),
1582 1583 RustStandaloneExtension(
1583 1584 'mercurial.rustext',
1584 1585 'hg-cpython',
1585 1586 'librusthg',
1586 1587 ),
1587 1588 ]
1588 1589
1589 1590
1590 1591 sys.path.insert(0, 'contrib/python-zstandard')
1591 1592 import setup_zstd
1592 1593
1593 1594 zstd = setup_zstd.get_c_extension(
1594 1595 name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
1595 1596 )
1596 1597 zstd.extra_compile_args += common_cflags
1597 1598 extmodules.append(zstd)
1598 1599
1599 1600 try:
1600 1601 from distutils import cygwinccompiler
1601 1602
1602 1603 # the -mno-cygwin option has been deprecated for years
1603 1604 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1604 1605
1605 1606 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1606 1607 def __init__(self, *args, **kwargs):
1607 1608 mingw32compilerclass.__init__(self, *args, **kwargs)
1608 1609 for i in 'compiler compiler_so linker_exe linker_so'.split():
1609 1610 try:
1610 1611 getattr(self, i).remove('-mno-cygwin')
1611 1612 except ValueError:
1612 1613 pass
1613 1614
1614 1615 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1615 1616 except ImportError:
1616 1617 # the cygwinccompiler package is not available on some Python
1617 1618 # distributions like the ones from the optware project for Synology
1618 1619 # DiskStation boxes
1619 1620 class HackedMingw32CCompiler:
1620 1621 pass
1621 1622
1622 1623
1623 1624 if os.name == 'nt':
1624 1625 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1625 1626 # extra_link_args to distutils.extensions.Extension() doesn't have any
1626 1627 # effect.
1627 1628 from distutils import msvccompiler
1628 1629
1629 1630 msvccompilerclass = msvccompiler.MSVCCompiler
1630 1631
1631 1632 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1632 1633 def initialize(self):
1633 1634 msvccompilerclass.initialize(self)
1634 1635 # "warning LNK4197: export 'func' specified multiple times"
1635 1636 self.ldflags_shared.append('/ignore:4197')
1636 1637 self.ldflags_shared_debug.append('/ignore:4197')
1637 1638
1638 1639 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1639 1640
1640 1641 packagedata = {
1641 1642 'mercurial': [
1642 1643 'locale/*/LC_MESSAGES/hg.mo',
1643 1644 'dummycert.pem',
1644 1645 ],
1645 1646 'mercurial.defaultrc': [
1646 1647 '*.rc',
1647 1648 ],
1648 1649 'mercurial.helptext': [
1649 1650 '*.txt',
1650 1651 ],
1651 1652 'mercurial.helptext.internals': [
1652 1653 '*.txt',
1653 1654 ],
1654 1655 'mercurial.thirdparty.attr': [
1655 1656 '*.pyi',
1656 1657 'py.typed',
1657 1658 ],
1658 1659 }
1659 1660
1660 1661
1661 1662 def ordinarypath(p):
1662 1663 return p and p[0] != '.' and p[-1] != '~'
1663 1664
1664 1665
1665 1666 for root in ('templates',):
1666 1667 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1667 1668 packagename = curdir.replace(os.sep, '.')
1668 1669 packagedata[packagename] = list(filter(ordinarypath, files))
1669 1670
1670 1671 datafiles = []
1671 1672
1672 1673 # distutils expects version to be str/unicode. Converting it to
1673 1674 # unicode on Python 2 still works because it won't contain any
1674 1675 # non-ascii bytes and will be implicitly converted back to bytes
1675 1676 # when operated on.
1676 1677 assert isinstance(version, str)
1677 1678 setupversion = version
1678 1679
1679 1680 extra = {}
1680 1681
1681 1682 py2exepackages = [
1682 1683 'hgdemandimport',
1683 1684 'hgext3rd',
1684 1685 'hgext',
1685 1686 'email',
1686 1687 # implicitly imported per module policy
1687 1688 # (cffi wouldn't be used as a frozen exe)
1688 1689 'mercurial.cext',
1689 1690 #'mercurial.cffi',
1690 1691 'mercurial.pure',
1691 1692 ]
1692 1693
1693 1694 py2exe_includes = []
1694 1695
1695 1696 py2exeexcludes = []
1696 1697 py2exedllexcludes = ['crypt32.dll']
1697 1698
1698 1699 if issetuptools:
1699 1700 extra['python_requires'] = supportedpy
1700 1701
1701 1702 if py2exeloaded:
1702 1703 extra['console'] = [
1703 1704 {
1704 1705 'script': 'hg',
1705 1706 'copyright': 'Copyright (C) 2005-2023 Olivia Mackall and others',
1706 1707 'product_version': version,
1707 1708 }
1708 1709 ]
1709 1710 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1710 1711 # Need to override hgbuild because it has a private copy of
1711 1712 # build.sub_commands.
1712 1713 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1713 1714 # put dlls in sub directory so that they won't pollute PATH
1714 1715 extra['zipfile'] = 'lib/library.zip'
1715 1716
1716 1717 # We allow some configuration to be supplemented via environment
1717 1718 # variables. This is better than setup.cfg files because it allows
1718 1719 # supplementing configs instead of replacing them.
1719 1720 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1720 1721 if extrapackages:
1721 1722 py2exepackages.extend(extrapackages.split(' '))
1722 1723
1723 1724 extra_includes = os.environ.get('HG_PY2EXE_EXTRA_INCLUDES')
1724 1725 if extra_includes:
1725 1726 py2exe_includes.extend(extra_includes.split(' '))
1726 1727
1727 1728 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1728 1729 if excludes:
1729 1730 py2exeexcludes.extend(excludes.split(' '))
1730 1731
1731 1732 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1732 1733 if dllexcludes:
1733 1734 py2exedllexcludes.extend(dllexcludes.split(' '))
1734 1735
1735 1736 if os.environ.get('PYOXIDIZER'):
1736 1737 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1737 1738
1738 1739 if os.name == 'nt':
1739 1740 # Windows binary file versions for exe/dll files must have the
1740 1741 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1741 1742 setupversion = setupversion.split(r'+', 1)[0]
1742 1743
1743 1744 setup(
1744 1745 name='mercurial',
1745 1746 version=setupversion,
1746 1747 author='Olivia Mackall and many others',
1747 1748 author_email='mercurial@mercurial-scm.org',
1748 1749 url='https://mercurial-scm.org/',
1749 1750 download_url='https://mercurial-scm.org/release/',
1750 1751 description=(
1751 1752 'Fast scalable distributed SCM (revision control, version '
1752 1753 'control) system'
1753 1754 ),
1754 1755 long_description=(
1755 1756 'Mercurial is a distributed SCM tool written in Python.'
1756 1757 ' It is used by a number of large projects that require'
1757 1758 ' fast, reliable distributed revision control, such as '
1758 1759 'Mozilla.'
1759 1760 ),
1760 1761 license='GNU GPLv2 or any later version',
1761 1762 classifiers=[
1762 1763 'Development Status :: 6 - Mature',
1763 1764 'Environment :: Console',
1764 1765 'Intended Audience :: Developers',
1765 1766 'Intended Audience :: System Administrators',
1766 1767 'License :: OSI Approved :: GNU General Public License (GPL)',
1767 1768 'Natural Language :: Danish',
1768 1769 'Natural Language :: English',
1769 1770 'Natural Language :: German',
1770 1771 'Natural Language :: Italian',
1771 1772 'Natural Language :: Japanese',
1772 1773 'Natural Language :: Portuguese (Brazilian)',
1773 1774 'Operating System :: Microsoft :: Windows',
1774 1775 'Operating System :: OS Independent',
1775 1776 'Operating System :: POSIX',
1776 1777 'Programming Language :: C',
1777 1778 'Programming Language :: Python',
1778 1779 'Topic :: Software Development :: Version Control',
1779 1780 ],
1780 1781 scripts=scripts,
1781 1782 packages=packages,
1782 1783 ext_modules=extmodules,
1783 1784 data_files=datafiles,
1784 1785 package_data=packagedata,
1785 1786 cmdclass=cmdclass,
1786 1787 distclass=hgdist,
1787 1788 options={
1788 1789 'py2exe': {
1789 1790 'bundle_files': 3,
1790 1791 'dll_excludes': py2exedllexcludes,
1791 1792 'includes': py2exe_includes,
1792 1793 'excludes': py2exeexcludes,
1793 1794 'packages': py2exepackages,
1794 1795 },
1795 1796 'bdist_mpkg': {
1796 1797 'zipdist': False,
1797 1798 'license': 'COPYING',
1798 1799 'readme': 'contrib/packaging/macosx/Readme.html',
1799 1800 'welcome': 'contrib/packaging/macosx/Welcome.html',
1800 1801 },
1801 1802 },
1802 1803 **extra
1803 1804 )
General Comments 0
You need to be logged in to leave comments. Login now