##// END OF EJS Templates
evolution: rename divergent to content-divergent...
Boris Feld -
r33651:2194a872 default
parent child Browse files
Show More
@@ -1,5444 +1,5444 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import os
13 13 import re
14 14 import sys
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23 from . import (
24 24 archival,
25 25 bookmarks,
26 26 bundle2,
27 27 changegroup,
28 28 cmdutil,
29 29 copies,
30 30 debugcommands as debugcommandsmod,
31 31 destutil,
32 32 dirstateguard,
33 33 discovery,
34 34 encoding,
35 35 error,
36 36 exchange,
37 37 extensions,
38 38 formatter,
39 39 graphmod,
40 40 hbisect,
41 41 help,
42 42 hg,
43 43 lock as lockmod,
44 44 merge as mergemod,
45 45 obsolete,
46 46 patch,
47 47 phases,
48 48 pycompat,
49 49 rcutil,
50 50 registrar,
51 51 revsetlang,
52 52 scmutil,
53 53 server,
54 54 sshserver,
55 55 streamclone,
56 56 tags as tagsmod,
57 57 templatekw,
58 58 ui as uimod,
59 59 util,
60 60 )
61 61
62 62 release = lockmod.release
63 63
64 64 table = {}
65 65 table.update(debugcommandsmod.command._table)
66 66
67 67 command = registrar.command(table)
68 68
69 69 # common command options
70 70
71 71 globalopts = [
72 72 ('R', 'repository', '',
73 73 _('repository root directory or name of overlay bundle file'),
74 74 _('REPO')),
75 75 ('', 'cwd', '',
76 76 _('change working directory'), _('DIR')),
77 77 ('y', 'noninteractive', None,
78 78 _('do not prompt, automatically pick the first choice for all prompts')),
79 79 ('q', 'quiet', None, _('suppress output')),
80 80 ('v', 'verbose', None, _('enable additional output')),
81 81 ('', 'color', '',
82 82 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
83 83 # and should not be translated
84 84 _("when to colorize (boolean, always, auto, never, or debug)"),
85 85 _('TYPE')),
86 86 ('', 'config', [],
87 87 _('set/override config option (use \'section.name=value\')'),
88 88 _('CONFIG')),
89 89 ('', 'debug', None, _('enable debugging output')),
90 90 ('', 'debugger', None, _('start debugger')),
91 91 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
92 92 _('ENCODE')),
93 93 ('', 'encodingmode', encoding.encodingmode,
94 94 _('set the charset encoding mode'), _('MODE')),
95 95 ('', 'traceback', None, _('always print a traceback on exception')),
96 96 ('', 'time', None, _('time how long the command takes')),
97 97 ('', 'profile', None, _('print command execution profile')),
98 98 ('', 'version', None, _('output version information and exit')),
99 99 ('h', 'help', None, _('display help and exit')),
100 100 ('', 'hidden', False, _('consider hidden changesets')),
101 101 ('', 'pager', 'auto',
102 102 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
103 103 ]
104 104
105 105 dryrunopts = cmdutil.dryrunopts
106 106 remoteopts = cmdutil.remoteopts
107 107 walkopts = cmdutil.walkopts
108 108 commitopts = cmdutil.commitopts
109 109 commitopts2 = cmdutil.commitopts2
110 110 formatteropts = cmdutil.formatteropts
111 111 templateopts = cmdutil.templateopts
112 112 logopts = cmdutil.logopts
113 113 diffopts = cmdutil.diffopts
114 114 diffwsopts = cmdutil.diffwsopts
115 115 diffopts2 = cmdutil.diffopts2
116 116 mergetoolopts = cmdutil.mergetoolopts
117 117 similarityopts = cmdutil.similarityopts
118 118 subrepoopts = cmdutil.subrepoopts
119 119 debugrevlogopts = cmdutil.debugrevlogopts
120 120
121 121 # Commands start here, listed alphabetically
122 122
123 123 @command('^add',
124 124 walkopts + subrepoopts + dryrunopts,
125 125 _('[OPTION]... [FILE]...'),
126 126 inferrepo=True)
127 127 def add(ui, repo, *pats, **opts):
128 128 """add the specified files on the next commit
129 129
130 130 Schedule files to be version controlled and added to the
131 131 repository.
132 132
133 133 The files will be added to the repository at the next commit. To
134 134 undo an add before that, see :hg:`forget`.
135 135
136 136 If no names are given, add all files to the repository (except
137 137 files matching ``.hgignore``).
138 138
139 139 .. container:: verbose
140 140
141 141 Examples:
142 142
143 143 - New (unknown) files are added
144 144 automatically by :hg:`add`::
145 145
146 146 $ ls
147 147 foo.c
148 148 $ hg status
149 149 ? foo.c
150 150 $ hg add
151 151 adding foo.c
152 152 $ hg status
153 153 A foo.c
154 154
155 155 - Specific files to be added can be specified::
156 156
157 157 $ ls
158 158 bar.c foo.c
159 159 $ hg status
160 160 ? bar.c
161 161 ? foo.c
162 162 $ hg add bar.c
163 163 $ hg status
164 164 A bar.c
165 165 ? foo.c
166 166
167 167 Returns 0 if all files are successfully added.
168 168 """
169 169
170 170 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
171 171 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
172 172 return rejected and 1 or 0
173 173
174 174 @command('addremove',
175 175 similarityopts + subrepoopts + walkopts + dryrunopts,
176 176 _('[OPTION]... [FILE]...'),
177 177 inferrepo=True)
178 178 def addremove(ui, repo, *pats, **opts):
179 179 """add all new files, delete all missing files
180 180
181 181 Add all new files and remove all missing files from the
182 182 repository.
183 183
184 184 Unless names are given, new files are ignored if they match any of
185 185 the patterns in ``.hgignore``. As with add, these changes take
186 186 effect at the next commit.
187 187
188 188 Use the -s/--similarity option to detect renamed files. This
189 189 option takes a percentage between 0 (disabled) and 100 (files must
190 190 be identical) as its parameter. With a parameter greater than 0,
191 191 this compares every removed file with every added file and records
192 192 those similar enough as renames. Detecting renamed files this way
193 193 can be expensive. After using this option, :hg:`status -C` can be
194 194 used to check which files were identified as moved or renamed. If
195 195 not specified, -s/--similarity defaults to 100 and only renames of
196 196 identical files are detected.
197 197
198 198 .. container:: verbose
199 199
200 200 Examples:
201 201
202 202 - A number of files (bar.c and foo.c) are new,
203 203 while foobar.c has been removed (without using :hg:`remove`)
204 204 from the repository::
205 205
206 206 $ ls
207 207 bar.c foo.c
208 208 $ hg status
209 209 ! foobar.c
210 210 ? bar.c
211 211 ? foo.c
212 212 $ hg addremove
213 213 adding bar.c
214 214 adding foo.c
215 215 removing foobar.c
216 216 $ hg status
217 217 A bar.c
218 218 A foo.c
219 219 R foobar.c
220 220
221 221 - A file foobar.c was moved to foo.c without using :hg:`rename`.
222 222 Afterwards, it was edited slightly::
223 223
224 224 $ ls
225 225 foo.c
226 226 $ hg status
227 227 ! foobar.c
228 228 ? foo.c
229 229 $ hg addremove --similarity 90
230 230 removing foobar.c
231 231 adding foo.c
232 232 recording removal of foobar.c as rename to foo.c (94% similar)
233 233 $ hg status -C
234 234 A foo.c
235 235 foobar.c
236 236 R foobar.c
237 237
238 238 Returns 0 if all files are successfully added.
239 239 """
240 240 opts = pycompat.byteskwargs(opts)
241 241 try:
242 242 sim = float(opts.get('similarity') or 100)
243 243 except ValueError:
244 244 raise error.Abort(_('similarity must be a number'))
245 245 if sim < 0 or sim > 100:
246 246 raise error.Abort(_('similarity must be between 0 and 100'))
247 247 matcher = scmutil.match(repo[None], pats, opts)
248 248 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
249 249
250 250 @command('^annotate|blame',
251 251 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
252 252 ('', 'follow', None,
253 253 _('follow copies/renames and list the filename (DEPRECATED)')),
254 254 ('', 'no-follow', None, _("don't follow copies and renames")),
255 255 ('a', 'text', None, _('treat all files as text')),
256 256 ('u', 'user', None, _('list the author (long with -v)')),
257 257 ('f', 'file', None, _('list the filename')),
258 258 ('d', 'date', None, _('list the date (short with -q)')),
259 259 ('n', 'number', None, _('list the revision number (default)')),
260 260 ('c', 'changeset', None, _('list the changeset')),
261 261 ('l', 'line-number', None, _('show line number at the first appearance')),
262 262 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
263 263 ] + diffwsopts + walkopts + formatteropts,
264 264 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
265 265 inferrepo=True)
266 266 def annotate(ui, repo, *pats, **opts):
267 267 """show changeset information by line for each file
268 268
269 269 List changes in files, showing the revision id responsible for
270 270 each line.
271 271
272 272 This command is useful for discovering when a change was made and
273 273 by whom.
274 274
275 275 If you include --file, --user, or --date, the revision number is
276 276 suppressed unless you also include --number.
277 277
278 278 Without the -a/--text option, annotate will avoid processing files
279 279 it detects as binary. With -a, annotate will annotate the file
280 280 anyway, although the results will probably be neither useful
281 281 nor desirable.
282 282
283 283 Returns 0 on success.
284 284 """
285 285 opts = pycompat.byteskwargs(opts)
286 286 if not pats:
287 287 raise error.Abort(_('at least one filename or pattern is required'))
288 288
289 289 if opts.get('follow'):
290 290 # --follow is deprecated and now just an alias for -f/--file
291 291 # to mimic the behavior of Mercurial before version 1.5
292 292 opts['file'] = True
293 293
294 294 ctx = scmutil.revsingle(repo, opts.get('rev'))
295 295
296 296 rootfm = ui.formatter('annotate', opts)
297 297 if ui.quiet:
298 298 datefunc = util.shortdate
299 299 else:
300 300 datefunc = util.datestr
301 301 if ctx.rev() is None:
302 302 def hexfn(node):
303 303 if node is None:
304 304 return None
305 305 else:
306 306 return rootfm.hexfunc(node)
307 307 if opts.get('changeset'):
308 308 # omit "+" suffix which is appended to node hex
309 309 def formatrev(rev):
310 310 if rev is None:
311 311 return '%d' % ctx.p1().rev()
312 312 else:
313 313 return '%d' % rev
314 314 else:
315 315 def formatrev(rev):
316 316 if rev is None:
317 317 return '%d+' % ctx.p1().rev()
318 318 else:
319 319 return '%d ' % rev
320 320 def formathex(hex):
321 321 if hex is None:
322 322 return '%s+' % rootfm.hexfunc(ctx.p1().node())
323 323 else:
324 324 return '%s ' % hex
325 325 else:
326 326 hexfn = rootfm.hexfunc
327 327 formatrev = formathex = pycompat.bytestr
328 328
329 329 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
330 330 ('number', ' ', lambda x: x[0].rev(), formatrev),
331 331 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
332 332 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
333 333 ('file', ' ', lambda x: x[0].path(), str),
334 334 ('line_number', ':', lambda x: x[1], str),
335 335 ]
336 336 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
337 337
338 338 if (not opts.get('user') and not opts.get('changeset')
339 339 and not opts.get('date') and not opts.get('file')):
340 340 opts['number'] = True
341 341
342 342 linenumber = opts.get('line_number') is not None
343 343 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
344 344 raise error.Abort(_('at least one of -n/-c is required for -l'))
345 345
346 346 ui.pager('annotate')
347 347
348 348 if rootfm.isplain():
349 349 def makefunc(get, fmt):
350 350 return lambda x: fmt(get(x))
351 351 else:
352 352 def makefunc(get, fmt):
353 353 return get
354 354 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
355 355 if opts.get(op)]
356 356 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
357 357 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
358 358 if opts.get(op))
359 359
360 360 def bad(x, y):
361 361 raise error.Abort("%s: %s" % (x, y))
362 362
363 363 m = scmutil.match(ctx, pats, opts, badfn=bad)
364 364
365 365 follow = not opts.get('no_follow')
366 366 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
367 367 whitespace=True)
368 368 skiprevs = opts.get('skip')
369 369 if skiprevs:
370 370 skiprevs = scmutil.revrange(repo, skiprevs)
371 371
372 372 for abs in ctx.walk(m):
373 373 fctx = ctx[abs]
374 374 rootfm.startitem()
375 375 rootfm.data(abspath=abs, path=m.rel(abs))
376 376 if not opts.get('text') and fctx.isbinary():
377 377 rootfm.plain(_("%s: binary file\n")
378 378 % ((pats and m.rel(abs)) or abs))
379 379 continue
380 380
381 381 fm = rootfm.nested('lines')
382 382 lines = fctx.annotate(follow=follow, linenumber=linenumber,
383 383 skiprevs=skiprevs, diffopts=diffopts)
384 384 if not lines:
385 385 fm.end()
386 386 continue
387 387 formats = []
388 388 pieces = []
389 389
390 390 for f, sep in funcmap:
391 391 l = [f(n) for n, dummy in lines]
392 392 if fm.isplain():
393 393 sizes = [encoding.colwidth(x) for x in l]
394 394 ml = max(sizes)
395 395 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
396 396 else:
397 397 formats.append(['%s' for x in l])
398 398 pieces.append(l)
399 399
400 400 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
401 401 fm.startitem()
402 402 fm.write(fields, "".join(f), *p)
403 403 fm.write('line', ": %s", l[1])
404 404
405 405 if not lines[-1][1].endswith('\n'):
406 406 fm.plain('\n')
407 407 fm.end()
408 408
409 409 rootfm.end()
410 410
411 411 @command('archive',
412 412 [('', 'no-decode', None, _('do not pass files through decoders')),
413 413 ('p', 'prefix', '', _('directory prefix for files in archive'),
414 414 _('PREFIX')),
415 415 ('r', 'rev', '', _('revision to distribute'), _('REV')),
416 416 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
417 417 ] + subrepoopts + walkopts,
418 418 _('[OPTION]... DEST'))
419 419 def archive(ui, repo, dest, **opts):
420 420 '''create an unversioned archive of a repository revision
421 421
422 422 By default, the revision used is the parent of the working
423 423 directory; use -r/--rev to specify a different revision.
424 424
425 425 The archive type is automatically detected based on file
426 426 extension (to override, use -t/--type).
427 427
428 428 .. container:: verbose
429 429
430 430 Examples:
431 431
432 432 - create a zip file containing the 1.0 release::
433 433
434 434 hg archive -r 1.0 project-1.0.zip
435 435
436 436 - create a tarball excluding .hg files::
437 437
438 438 hg archive project.tar.gz -X ".hg*"
439 439
440 440 Valid types are:
441 441
442 442 :``files``: a directory full of files (default)
443 443 :``tar``: tar archive, uncompressed
444 444 :``tbz2``: tar archive, compressed using bzip2
445 445 :``tgz``: tar archive, compressed using gzip
446 446 :``uzip``: zip archive, uncompressed
447 447 :``zip``: zip archive, compressed using deflate
448 448
449 449 The exact name of the destination archive or directory is given
450 450 using a format string; see :hg:`help export` for details.
451 451
452 452 Each member added to an archive file has a directory prefix
453 453 prepended. Use -p/--prefix to specify a format string for the
454 454 prefix. The default is the basename of the archive, with suffixes
455 455 removed.
456 456
457 457 Returns 0 on success.
458 458 '''
459 459
460 460 opts = pycompat.byteskwargs(opts)
461 461 ctx = scmutil.revsingle(repo, opts.get('rev'))
462 462 if not ctx:
463 463 raise error.Abort(_('no working directory: please specify a revision'))
464 464 node = ctx.node()
465 465 dest = cmdutil.makefilename(repo, dest, node)
466 466 if os.path.realpath(dest) == repo.root:
467 467 raise error.Abort(_('repository root cannot be destination'))
468 468
469 469 kind = opts.get('type') or archival.guesskind(dest) or 'files'
470 470 prefix = opts.get('prefix')
471 471
472 472 if dest == '-':
473 473 if kind == 'files':
474 474 raise error.Abort(_('cannot archive plain files to stdout'))
475 475 dest = cmdutil.makefileobj(repo, dest)
476 476 if not prefix:
477 477 prefix = os.path.basename(repo.root) + '-%h'
478 478
479 479 prefix = cmdutil.makefilename(repo, prefix, node)
480 480 matchfn = scmutil.match(ctx, [], opts)
481 481 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
482 482 matchfn, prefix, subrepos=opts.get('subrepos'))
483 483
484 484 @command('backout',
485 485 [('', 'merge', None, _('merge with old dirstate parent after backout')),
486 486 ('', 'commit', None,
487 487 _('commit if no conflicts were encountered (DEPRECATED)')),
488 488 ('', 'no-commit', None, _('do not commit')),
489 489 ('', 'parent', '',
490 490 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
491 491 ('r', 'rev', '', _('revision to backout'), _('REV')),
492 492 ('e', 'edit', False, _('invoke editor on commit messages')),
493 493 ] + mergetoolopts + walkopts + commitopts + commitopts2,
494 494 _('[OPTION]... [-r] REV'))
495 495 def backout(ui, repo, node=None, rev=None, **opts):
496 496 '''reverse effect of earlier changeset
497 497
498 498 Prepare a new changeset with the effect of REV undone in the
499 499 current working directory. If no conflicts were encountered,
500 500 it will be committed immediately.
501 501
502 502 If REV is the parent of the working directory, then this new changeset
503 503 is committed automatically (unless --no-commit is specified).
504 504
505 505 .. note::
506 506
507 507 :hg:`backout` cannot be used to fix either an unwanted or
508 508 incorrect merge.
509 509
510 510 .. container:: verbose
511 511
512 512 Examples:
513 513
514 514 - Reverse the effect of the parent of the working directory.
515 515 This backout will be committed immediately::
516 516
517 517 hg backout -r .
518 518
519 519 - Reverse the effect of previous bad revision 23::
520 520
521 521 hg backout -r 23
522 522
523 523 - Reverse the effect of previous bad revision 23 and
524 524 leave changes uncommitted::
525 525
526 526 hg backout -r 23 --no-commit
527 527 hg commit -m "Backout revision 23"
528 528
529 529 By default, the pending changeset will have one parent,
530 530 maintaining a linear history. With --merge, the pending
531 531 changeset will instead have two parents: the old parent of the
532 532 working directory and a new child of REV that simply undoes REV.
533 533
534 534 Before version 1.7, the behavior without --merge was equivalent
535 535 to specifying --merge followed by :hg:`update --clean .` to
536 536 cancel the merge and leave the child of REV as a head to be
537 537 merged separately.
538 538
539 539 See :hg:`help dates` for a list of formats valid for -d/--date.
540 540
541 541 See :hg:`help revert` for a way to restore files to the state
542 542 of another revision.
543 543
544 544 Returns 0 on success, 1 if nothing to backout or there are unresolved
545 545 files.
546 546 '''
547 547 wlock = lock = None
548 548 try:
549 549 wlock = repo.wlock()
550 550 lock = repo.lock()
551 551 return _dobackout(ui, repo, node, rev, **opts)
552 552 finally:
553 553 release(lock, wlock)
554 554
555 555 def _dobackout(ui, repo, node=None, rev=None, **opts):
556 556 opts = pycompat.byteskwargs(opts)
557 557 if opts.get('commit') and opts.get('no_commit'):
558 558 raise error.Abort(_("cannot use --commit with --no-commit"))
559 559 if opts.get('merge') and opts.get('no_commit'):
560 560 raise error.Abort(_("cannot use --merge with --no-commit"))
561 561
562 562 if rev and node:
563 563 raise error.Abort(_("please specify just one revision"))
564 564
565 565 if not rev:
566 566 rev = node
567 567
568 568 if not rev:
569 569 raise error.Abort(_("please specify a revision to backout"))
570 570
571 571 date = opts.get('date')
572 572 if date:
573 573 opts['date'] = util.parsedate(date)
574 574
575 575 cmdutil.checkunfinished(repo)
576 576 cmdutil.bailifchanged(repo)
577 577 node = scmutil.revsingle(repo, rev).node()
578 578
579 579 op1, op2 = repo.dirstate.parents()
580 580 if not repo.changelog.isancestor(node, op1):
581 581 raise error.Abort(_('cannot backout change that is not an ancestor'))
582 582
583 583 p1, p2 = repo.changelog.parents(node)
584 584 if p1 == nullid:
585 585 raise error.Abort(_('cannot backout a change with no parents'))
586 586 if p2 != nullid:
587 587 if not opts.get('parent'):
588 588 raise error.Abort(_('cannot backout a merge changeset'))
589 589 p = repo.lookup(opts['parent'])
590 590 if p not in (p1, p2):
591 591 raise error.Abort(_('%s is not a parent of %s') %
592 592 (short(p), short(node)))
593 593 parent = p
594 594 else:
595 595 if opts.get('parent'):
596 596 raise error.Abort(_('cannot use --parent on non-merge changeset'))
597 597 parent = p1
598 598
599 599 # the backout should appear on the same branch
600 600 branch = repo.dirstate.branch()
601 601 bheads = repo.branchheads(branch)
602 602 rctx = scmutil.revsingle(repo, hex(parent))
603 603 if not opts.get('merge') and op1 != node:
604 604 dsguard = dirstateguard.dirstateguard(repo, 'backout')
605 605 try:
606 606 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
607 607 'backout')
608 608 stats = mergemod.update(repo, parent, True, True, node, False)
609 609 repo.setparents(op1, op2)
610 610 dsguard.close()
611 611 hg._showstats(repo, stats)
612 612 if stats[3]:
613 613 repo.ui.status(_("use 'hg resolve' to retry unresolved "
614 614 "file merges\n"))
615 615 return 1
616 616 finally:
617 617 ui.setconfig('ui', 'forcemerge', '', '')
618 618 lockmod.release(dsguard)
619 619 else:
620 620 hg.clean(repo, node, show_stats=False)
621 621 repo.dirstate.setbranch(branch)
622 622 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
623 623
624 624 if opts.get('no_commit'):
625 625 msg = _("changeset %s backed out, "
626 626 "don't forget to commit.\n")
627 627 ui.status(msg % short(node))
628 628 return 0
629 629
630 630 def commitfunc(ui, repo, message, match, opts):
631 631 editform = 'backout'
632 632 e = cmdutil.getcommiteditor(editform=editform,
633 633 **pycompat.strkwargs(opts))
634 634 if not message:
635 635 # we don't translate commit messages
636 636 message = "Backed out changeset %s" % short(node)
637 637 e = cmdutil.getcommiteditor(edit=True, editform=editform)
638 638 return repo.commit(message, opts.get('user'), opts.get('date'),
639 639 match, editor=e)
640 640 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
641 641 if not newnode:
642 642 ui.status(_("nothing changed\n"))
643 643 return 1
644 644 cmdutil.commitstatus(repo, newnode, branch, bheads)
645 645
646 646 def nice(node):
647 647 return '%d:%s' % (repo.changelog.rev(node), short(node))
648 648 ui.status(_('changeset %s backs out changeset %s\n') %
649 649 (nice(repo.changelog.tip()), nice(node)))
650 650 if opts.get('merge') and op1 != node:
651 651 hg.clean(repo, op1, show_stats=False)
652 652 ui.status(_('merging with changeset %s\n')
653 653 % nice(repo.changelog.tip()))
654 654 try:
655 655 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
656 656 'backout')
657 657 return hg.merge(repo, hex(repo.changelog.tip()))
658 658 finally:
659 659 ui.setconfig('ui', 'forcemerge', '', '')
660 660 return 0
661 661
662 662 @command('bisect',
663 663 [('r', 'reset', False, _('reset bisect state')),
664 664 ('g', 'good', False, _('mark changeset good')),
665 665 ('b', 'bad', False, _('mark changeset bad')),
666 666 ('s', 'skip', False, _('skip testing changeset')),
667 667 ('e', 'extend', False, _('extend the bisect range')),
668 668 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
669 669 ('U', 'noupdate', False, _('do not update to target'))],
670 670 _("[-gbsr] [-U] [-c CMD] [REV]"))
671 671 def bisect(ui, repo, rev=None, extra=None, command=None,
672 672 reset=None, good=None, bad=None, skip=None, extend=None,
673 673 noupdate=None):
674 674 """subdivision search of changesets
675 675
676 676 This command helps to find changesets which introduce problems. To
677 677 use, mark the earliest changeset you know exhibits the problem as
678 678 bad, then mark the latest changeset which is free from the problem
679 679 as good. Bisect will update your working directory to a revision
680 680 for testing (unless the -U/--noupdate option is specified). Once
681 681 you have performed tests, mark the working directory as good or
682 682 bad, and bisect will either update to another candidate changeset
683 683 or announce that it has found the bad revision.
684 684
685 685 As a shortcut, you can also use the revision argument to mark a
686 686 revision as good or bad without checking it out first.
687 687
688 688 If you supply a command, it will be used for automatic bisection.
689 689 The environment variable HG_NODE will contain the ID of the
690 690 changeset being tested. The exit status of the command will be
691 691 used to mark revisions as good or bad: status 0 means good, 125
692 692 means to skip the revision, 127 (command not found) will abort the
693 693 bisection, and any other non-zero exit status means the revision
694 694 is bad.
695 695
696 696 .. container:: verbose
697 697
698 698 Some examples:
699 699
700 700 - start a bisection with known bad revision 34, and good revision 12::
701 701
702 702 hg bisect --bad 34
703 703 hg bisect --good 12
704 704
705 705 - advance the current bisection by marking current revision as good or
706 706 bad::
707 707
708 708 hg bisect --good
709 709 hg bisect --bad
710 710
711 711 - mark the current revision, or a known revision, to be skipped (e.g. if
712 712 that revision is not usable because of another issue)::
713 713
714 714 hg bisect --skip
715 715 hg bisect --skip 23
716 716
717 717 - skip all revisions that do not touch directories ``foo`` or ``bar``::
718 718
719 719 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
720 720
721 721 - forget the current bisection::
722 722
723 723 hg bisect --reset
724 724
725 725 - use 'make && make tests' to automatically find the first broken
726 726 revision::
727 727
728 728 hg bisect --reset
729 729 hg bisect --bad 34
730 730 hg bisect --good 12
731 731 hg bisect --command "make && make tests"
732 732
733 733 - see all changesets whose states are already known in the current
734 734 bisection::
735 735
736 736 hg log -r "bisect(pruned)"
737 737
738 738 - see the changeset currently being bisected (especially useful
739 739 if running with -U/--noupdate)::
740 740
741 741 hg log -r "bisect(current)"
742 742
743 743 - see all changesets that took part in the current bisection::
744 744
745 745 hg log -r "bisect(range)"
746 746
747 747 - you can even get a nice graph::
748 748
749 749 hg log --graph -r "bisect(range)"
750 750
751 751 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
752 752
753 753 Returns 0 on success.
754 754 """
755 755 # backward compatibility
756 756 if rev in "good bad reset init".split():
757 757 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
758 758 cmd, rev, extra = rev, extra, None
759 759 if cmd == "good":
760 760 good = True
761 761 elif cmd == "bad":
762 762 bad = True
763 763 else:
764 764 reset = True
765 765 elif extra:
766 766 raise error.Abort(_('incompatible arguments'))
767 767
768 768 incompatibles = {
769 769 '--bad': bad,
770 770 '--command': bool(command),
771 771 '--extend': extend,
772 772 '--good': good,
773 773 '--reset': reset,
774 774 '--skip': skip,
775 775 }
776 776
777 777 enabled = [x for x in incompatibles if incompatibles[x]]
778 778
779 779 if len(enabled) > 1:
780 780 raise error.Abort(_('%s and %s are incompatible') %
781 781 tuple(sorted(enabled)[0:2]))
782 782
783 783 if reset:
784 784 hbisect.resetstate(repo)
785 785 return
786 786
787 787 state = hbisect.load_state(repo)
788 788
789 789 # update state
790 790 if good or bad or skip:
791 791 if rev:
792 792 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
793 793 else:
794 794 nodes = [repo.lookup('.')]
795 795 if good:
796 796 state['good'] += nodes
797 797 elif bad:
798 798 state['bad'] += nodes
799 799 elif skip:
800 800 state['skip'] += nodes
801 801 hbisect.save_state(repo, state)
802 802 if not (state['good'] and state['bad']):
803 803 return
804 804
805 805 def mayupdate(repo, node, show_stats=True):
806 806 """common used update sequence"""
807 807 if noupdate:
808 808 return
809 809 cmdutil.checkunfinished(repo)
810 810 cmdutil.bailifchanged(repo)
811 811 return hg.clean(repo, node, show_stats=show_stats)
812 812
813 813 displayer = cmdutil.show_changeset(ui, repo, {})
814 814
815 815 if command:
816 816 changesets = 1
817 817 if noupdate:
818 818 try:
819 819 node = state['current'][0]
820 820 except LookupError:
821 821 raise error.Abort(_('current bisect revision is unknown - '
822 822 'start a new bisect to fix'))
823 823 else:
824 824 node, p2 = repo.dirstate.parents()
825 825 if p2 != nullid:
826 826 raise error.Abort(_('current bisect revision is a merge'))
827 827 if rev:
828 828 node = repo[scmutil.revsingle(repo, rev, node)].node()
829 829 try:
830 830 while changesets:
831 831 # update state
832 832 state['current'] = [node]
833 833 hbisect.save_state(repo, state)
834 834 status = ui.system(command, environ={'HG_NODE': hex(node)},
835 835 blockedtag='bisect_check')
836 836 if status == 125:
837 837 transition = "skip"
838 838 elif status == 0:
839 839 transition = "good"
840 840 # status < 0 means process was killed
841 841 elif status == 127:
842 842 raise error.Abort(_("failed to execute %s") % command)
843 843 elif status < 0:
844 844 raise error.Abort(_("%s killed") % command)
845 845 else:
846 846 transition = "bad"
847 847 state[transition].append(node)
848 848 ctx = repo[node]
849 849 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
850 850 hbisect.checkstate(state)
851 851 # bisect
852 852 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
853 853 # update to next check
854 854 node = nodes[0]
855 855 mayupdate(repo, node, show_stats=False)
856 856 finally:
857 857 state['current'] = [node]
858 858 hbisect.save_state(repo, state)
859 859 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
860 860 return
861 861
862 862 hbisect.checkstate(state)
863 863
864 864 # actually bisect
865 865 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
866 866 if extend:
867 867 if not changesets:
868 868 extendnode = hbisect.extendrange(repo, state, nodes, good)
869 869 if extendnode is not None:
870 870 ui.write(_("Extending search to changeset %d:%s\n")
871 871 % (extendnode.rev(), extendnode))
872 872 state['current'] = [extendnode.node()]
873 873 hbisect.save_state(repo, state)
874 874 return mayupdate(repo, extendnode.node())
875 875 raise error.Abort(_("nothing to extend"))
876 876
877 877 if changesets == 0:
878 878 hbisect.printresult(ui, repo, state, displayer, nodes, good)
879 879 else:
880 880 assert len(nodes) == 1 # only a single node can be tested next
881 881 node = nodes[0]
882 882 # compute the approximate number of remaining tests
883 883 tests, size = 0, 2
884 884 while size <= changesets:
885 885 tests, size = tests + 1, size * 2
886 886 rev = repo.changelog.rev(node)
887 887 ui.write(_("Testing changeset %d:%s "
888 888 "(%d changesets remaining, ~%d tests)\n")
889 889 % (rev, short(node), changesets, tests))
890 890 state['current'] = [node]
891 891 hbisect.save_state(repo, state)
892 892 return mayupdate(repo, node)
893 893
894 894 @command('bookmarks|bookmark',
895 895 [('f', 'force', False, _('force')),
896 896 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
897 897 ('d', 'delete', False, _('delete a given bookmark')),
898 898 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
899 899 ('i', 'inactive', False, _('mark a bookmark inactive')),
900 900 ] + formatteropts,
901 901 _('hg bookmarks [OPTIONS]... [NAME]...'))
902 902 def bookmark(ui, repo, *names, **opts):
903 903 '''create a new bookmark or list existing bookmarks
904 904
905 905 Bookmarks are labels on changesets to help track lines of development.
906 906 Bookmarks are unversioned and can be moved, renamed and deleted.
907 907 Deleting or moving a bookmark has no effect on the associated changesets.
908 908
909 909 Creating or updating to a bookmark causes it to be marked as 'active'.
910 910 The active bookmark is indicated with a '*'.
911 911 When a commit is made, the active bookmark will advance to the new commit.
912 912 A plain :hg:`update` will also advance an active bookmark, if possible.
913 913 Updating away from a bookmark will cause it to be deactivated.
914 914
915 915 Bookmarks can be pushed and pulled between repositories (see
916 916 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
917 917 diverged, a new 'divergent bookmark' of the form 'name@path' will
918 918 be created. Using :hg:`merge` will resolve the divergence.
919 919
920 920 A bookmark named '@' has the special property that :hg:`clone` will
921 921 check it out by default if it exists.
922 922
923 923 .. container:: verbose
924 924
925 925 Examples:
926 926
927 927 - create an active bookmark for a new line of development::
928 928
929 929 hg book new-feature
930 930
931 931 - create an inactive bookmark as a place marker::
932 932
933 933 hg book -i reviewed
934 934
935 935 - create an inactive bookmark on another changeset::
936 936
937 937 hg book -r .^ tested
938 938
939 939 - rename bookmark turkey to dinner::
940 940
941 941 hg book -m turkey dinner
942 942
943 943 - move the '@' bookmark from another branch::
944 944
945 945 hg book -f @
946 946 '''
947 947 force = opts.get(r'force')
948 948 rev = opts.get(r'rev')
949 949 delete = opts.get(r'delete')
950 950 rename = opts.get(r'rename')
951 951 inactive = opts.get(r'inactive')
952 952
953 953 if delete and rename:
954 954 raise error.Abort(_("--delete and --rename are incompatible"))
955 955 if delete and rev:
956 956 raise error.Abort(_("--rev is incompatible with --delete"))
957 957 if rename and rev:
958 958 raise error.Abort(_("--rev is incompatible with --rename"))
959 959 if not names and (delete or rev):
960 960 raise error.Abort(_("bookmark name required"))
961 961
962 962 if delete or rename or names or inactive:
963 963 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
964 964 if delete:
965 965 bookmarks.delete(repo, tr, names)
966 966 elif rename:
967 967 if not names:
968 968 raise error.Abort(_("new bookmark name required"))
969 969 elif len(names) > 1:
970 970 raise error.Abort(_("only one new bookmark name allowed"))
971 971 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
972 972 elif names:
973 973 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
974 974 elif inactive:
975 975 if len(repo._bookmarks) == 0:
976 976 ui.status(_("no bookmarks set\n"))
977 977 elif not repo._activebookmark:
978 978 ui.status(_("no active bookmark\n"))
979 979 else:
980 980 bookmarks.deactivate(repo)
981 981 else: # show bookmarks
982 982 bookmarks.printbookmarks(ui, repo, **opts)
983 983
984 984 @command('branch',
985 985 [('f', 'force', None,
986 986 _('set branch name even if it shadows an existing branch')),
987 987 ('C', 'clean', None, _('reset branch name to parent branch name'))],
988 988 _('[-fC] [NAME]'))
989 989 def branch(ui, repo, label=None, **opts):
990 990 """set or show the current branch name
991 991
992 992 .. note::
993 993
994 994 Branch names are permanent and global. Use :hg:`bookmark` to create a
995 995 light-weight bookmark instead. See :hg:`help glossary` for more
996 996 information about named branches and bookmarks.
997 997
998 998 With no argument, show the current branch name. With one argument,
999 999 set the working directory branch name (the branch will not exist
1000 1000 in the repository until the next commit). Standard practice
1001 1001 recommends that primary development take place on the 'default'
1002 1002 branch.
1003 1003
1004 1004 Unless -f/--force is specified, branch will not let you set a
1005 1005 branch name that already exists.
1006 1006
1007 1007 Use -C/--clean to reset the working directory branch to that of
1008 1008 the parent of the working directory, negating a previous branch
1009 1009 change.
1010 1010
1011 1011 Use the command :hg:`update` to switch to an existing branch. Use
1012 1012 :hg:`commit --close-branch` to mark this branch head as closed.
1013 1013 When all heads of a branch are closed, the branch will be
1014 1014 considered closed.
1015 1015
1016 1016 Returns 0 on success.
1017 1017 """
1018 1018 opts = pycompat.byteskwargs(opts)
1019 1019 if label:
1020 1020 label = label.strip()
1021 1021
1022 1022 if not opts.get('clean') and not label:
1023 1023 ui.write("%s\n" % repo.dirstate.branch())
1024 1024 return
1025 1025
1026 1026 with repo.wlock():
1027 1027 if opts.get('clean'):
1028 1028 label = repo[None].p1().branch()
1029 1029 repo.dirstate.setbranch(label)
1030 1030 ui.status(_('reset working directory to branch %s\n') % label)
1031 1031 elif label:
1032 1032 if not opts.get('force') and label in repo.branchmap():
1033 1033 if label not in [p.branch() for p in repo[None].parents()]:
1034 1034 raise error.Abort(_('a branch of the same name already'
1035 1035 ' exists'),
1036 1036 # i18n: "it" refers to an existing branch
1037 1037 hint=_("use 'hg update' to switch to it"))
1038 1038 scmutil.checknewlabel(repo, label, 'branch')
1039 1039 repo.dirstate.setbranch(label)
1040 1040 ui.status(_('marked working directory as branch %s\n') % label)
1041 1041
1042 1042 # find any open named branches aside from default
1043 1043 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1044 1044 if n != "default" and not c]
1045 1045 if not others:
1046 1046 ui.status(_('(branches are permanent and global, '
1047 1047 'did you want a bookmark?)\n'))
1048 1048
1049 1049 @command('branches',
1050 1050 [('a', 'active', False,
1051 1051 _('show only branches that have unmerged heads (DEPRECATED)')),
1052 1052 ('c', 'closed', False, _('show normal and closed branches')),
1053 1053 ] + formatteropts,
1054 1054 _('[-c]'))
1055 1055 def branches(ui, repo, active=False, closed=False, **opts):
1056 1056 """list repository named branches
1057 1057
1058 1058 List the repository's named branches, indicating which ones are
1059 1059 inactive. If -c/--closed is specified, also list branches which have
1060 1060 been marked closed (see :hg:`commit --close-branch`).
1061 1061
1062 1062 Use the command :hg:`update` to switch to an existing branch.
1063 1063
1064 1064 Returns 0.
1065 1065 """
1066 1066
1067 1067 opts = pycompat.byteskwargs(opts)
1068 1068 ui.pager('branches')
1069 1069 fm = ui.formatter('branches', opts)
1070 1070 hexfunc = fm.hexfunc
1071 1071
1072 1072 allheads = set(repo.heads())
1073 1073 branches = []
1074 1074 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1075 1075 isactive = not isclosed and bool(set(heads) & allheads)
1076 1076 branches.append((tag, repo[tip], isactive, not isclosed))
1077 1077 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1078 1078 reverse=True)
1079 1079
1080 1080 for tag, ctx, isactive, isopen in branches:
1081 1081 if active and not isactive:
1082 1082 continue
1083 1083 if isactive:
1084 1084 label = 'branches.active'
1085 1085 notice = ''
1086 1086 elif not isopen:
1087 1087 if not closed:
1088 1088 continue
1089 1089 label = 'branches.closed'
1090 1090 notice = _(' (closed)')
1091 1091 else:
1092 1092 label = 'branches.inactive'
1093 1093 notice = _(' (inactive)')
1094 1094 current = (tag == repo.dirstate.branch())
1095 1095 if current:
1096 1096 label = 'branches.current'
1097 1097
1098 1098 fm.startitem()
1099 1099 fm.write('branch', '%s', tag, label=label)
1100 1100 rev = ctx.rev()
1101 1101 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1102 1102 fmt = ' ' * padsize + ' %d:%s'
1103 1103 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1104 1104 label='log.changeset changeset.%s' % ctx.phasestr())
1105 1105 fm.context(ctx=ctx)
1106 1106 fm.data(active=isactive, closed=not isopen, current=current)
1107 1107 if not ui.quiet:
1108 1108 fm.plain(notice)
1109 1109 fm.plain('\n')
1110 1110 fm.end()
1111 1111
1112 1112 @command('bundle',
1113 1113 [('f', 'force', None, _('run even when the destination is unrelated')),
1114 1114 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1115 1115 _('REV')),
1116 1116 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1117 1117 _('BRANCH')),
1118 1118 ('', 'base', [],
1119 1119 _('a base changeset assumed to be available at the destination'),
1120 1120 _('REV')),
1121 1121 ('a', 'all', None, _('bundle all changesets in the repository')),
1122 1122 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1123 1123 ] + remoteopts,
1124 1124 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1125 1125 def bundle(ui, repo, fname, dest=None, **opts):
1126 1126 """create a bundle file
1127 1127
1128 1128 Generate a bundle file containing data to be added to a repository.
1129 1129
1130 1130 To create a bundle containing all changesets, use -a/--all
1131 1131 (or --base null). Otherwise, hg assumes the destination will have
1132 1132 all the nodes you specify with --base parameters. Otherwise, hg
1133 1133 will assume the repository has all the nodes in destination, or
1134 1134 default-push/default if no destination is specified.
1135 1135
1136 1136 You can change bundle format with the -t/--type option. See
1137 1137 :hg:`help bundlespec` for documentation on this format. By default,
1138 1138 the most appropriate format is used and compression defaults to
1139 1139 bzip2.
1140 1140
1141 1141 The bundle file can then be transferred using conventional means
1142 1142 and applied to another repository with the unbundle or pull
1143 1143 command. This is useful when direct push and pull are not
1144 1144 available or when exporting an entire repository is undesirable.
1145 1145
1146 1146 Applying bundles preserves all changeset contents including
1147 1147 permissions, copy/rename information, and revision history.
1148 1148
1149 1149 Returns 0 on success, 1 if no changes found.
1150 1150 """
1151 1151 opts = pycompat.byteskwargs(opts)
1152 1152 revs = None
1153 1153 if 'rev' in opts:
1154 1154 revstrings = opts['rev']
1155 1155 revs = scmutil.revrange(repo, revstrings)
1156 1156 if revstrings and not revs:
1157 1157 raise error.Abort(_('no commits to bundle'))
1158 1158
1159 1159 bundletype = opts.get('type', 'bzip2').lower()
1160 1160 try:
1161 1161 bcompression, cgversion, params = exchange.parsebundlespec(
1162 1162 repo, bundletype, strict=False)
1163 1163 except error.UnsupportedBundleSpecification as e:
1164 1164 raise error.Abort(str(e),
1165 1165 hint=_("see 'hg help bundlespec' for supported "
1166 1166 "values for --type"))
1167 1167
1168 1168 # Packed bundles are a pseudo bundle format for now.
1169 1169 if cgversion == 's1':
1170 1170 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1171 1171 hint=_("use 'hg debugcreatestreamclonebundle'"))
1172 1172
1173 1173 if opts.get('all'):
1174 1174 if dest:
1175 1175 raise error.Abort(_("--all is incompatible with specifying "
1176 1176 "a destination"))
1177 1177 if opts.get('base'):
1178 1178 ui.warn(_("ignoring --base because --all was specified\n"))
1179 1179 base = ['null']
1180 1180 else:
1181 1181 base = scmutil.revrange(repo, opts.get('base'))
1182 1182 if cgversion not in changegroup.supportedoutgoingversions(repo):
1183 1183 raise error.Abort(_("repository does not support bundle version %s") %
1184 1184 cgversion)
1185 1185
1186 1186 if base:
1187 1187 if dest:
1188 1188 raise error.Abort(_("--base is incompatible with specifying "
1189 1189 "a destination"))
1190 1190 common = [repo.lookup(rev) for rev in base]
1191 1191 heads = revs and map(repo.lookup, revs) or None
1192 1192 outgoing = discovery.outgoing(repo, common, heads)
1193 1193 else:
1194 1194 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1195 1195 dest, branches = hg.parseurl(dest, opts.get('branch'))
1196 1196 other = hg.peer(repo, opts, dest)
1197 1197 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1198 1198 heads = revs and map(repo.lookup, revs) or revs
1199 1199 outgoing = discovery.findcommonoutgoing(repo, other,
1200 1200 onlyheads=heads,
1201 1201 force=opts.get('force'),
1202 1202 portable=True)
1203 1203
1204 1204 if not outgoing.missing:
1205 1205 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1206 1206 return 1
1207 1207
1208 1208 if cgversion == '01': #bundle1
1209 1209 if bcompression is None:
1210 1210 bcompression = 'UN'
1211 1211 bversion = 'HG10' + bcompression
1212 1212 bcompression = None
1213 1213 elif cgversion in ('02', '03'):
1214 1214 bversion = 'HG20'
1215 1215 else:
1216 1216 raise error.ProgrammingError(
1217 1217 'bundle: unexpected changegroup version %s' % cgversion)
1218 1218
1219 1219 # TODO compression options should be derived from bundlespec parsing.
1220 1220 # This is a temporary hack to allow adjusting bundle compression
1221 1221 # level without a) formalizing the bundlespec changes to declare it
1222 1222 # b) introducing a command flag.
1223 1223 compopts = {}
1224 1224 complevel = ui.configint('experimental', 'bundlecomplevel')
1225 1225 if complevel is not None:
1226 1226 compopts['level'] = complevel
1227 1227
1228 1228
1229 1229 contentopts = {'cg.version': cgversion}
1230 1230 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1231 1231 contentopts['obsolescence'] = True
1232 1232 if repo.ui.configbool('experimental', 'bundle-phases'):
1233 1233 contentopts['phases'] = True
1234 1234 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1235 1235 contentopts, compression=bcompression,
1236 1236 compopts=compopts)
1237 1237
1238 1238 @command('cat',
1239 1239 [('o', 'output', '',
1240 1240 _('print output to file with formatted name'), _('FORMAT')),
1241 1241 ('r', 'rev', '', _('print the given revision'), _('REV')),
1242 1242 ('', 'decode', None, _('apply any matching decode filter')),
1243 1243 ] + walkopts + formatteropts,
1244 1244 _('[OPTION]... FILE...'),
1245 1245 inferrepo=True)
1246 1246 def cat(ui, repo, file1, *pats, **opts):
1247 1247 """output the current or given revision of files
1248 1248
1249 1249 Print the specified files as they were at the given revision. If
1250 1250 no revision is given, the parent of the working directory is used.
1251 1251
1252 1252 Output may be to a file, in which case the name of the file is
1253 1253 given using a format string. The formatting rules as follows:
1254 1254
1255 1255 :``%%``: literal "%" character
1256 1256 :``%s``: basename of file being printed
1257 1257 :``%d``: dirname of file being printed, or '.' if in repository root
1258 1258 :``%p``: root-relative path name of file being printed
1259 1259 :``%H``: changeset hash (40 hexadecimal digits)
1260 1260 :``%R``: changeset revision number
1261 1261 :``%h``: short-form changeset hash (12 hexadecimal digits)
1262 1262 :``%r``: zero-padded changeset revision number
1263 1263 :``%b``: basename of the exporting repository
1264 1264
1265 1265 Returns 0 on success.
1266 1266 """
1267 1267 ctx = scmutil.revsingle(repo, opts.get('rev'))
1268 1268 m = scmutil.match(ctx, (file1,) + pats, opts)
1269 1269 fntemplate = opts.pop('output', '')
1270 1270 if cmdutil.isstdiofilename(fntemplate):
1271 1271 fntemplate = ''
1272 1272
1273 1273 if fntemplate:
1274 1274 fm = formatter.nullformatter(ui, 'cat')
1275 1275 else:
1276 1276 ui.pager('cat')
1277 1277 fm = ui.formatter('cat', opts)
1278 1278 with fm:
1279 1279 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
1280 1280
1281 1281 @command('^clone',
1282 1282 [('U', 'noupdate', None, _('the clone will include an empty working '
1283 1283 'directory (only a repository)')),
1284 1284 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1285 1285 _('REV')),
1286 1286 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1287 1287 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1288 1288 ('', 'pull', None, _('use pull protocol to copy metadata')),
1289 1289 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1290 1290 ] + remoteopts,
1291 1291 _('[OPTION]... SOURCE [DEST]'),
1292 1292 norepo=True)
1293 1293 def clone(ui, source, dest=None, **opts):
1294 1294 """make a copy of an existing repository
1295 1295
1296 1296 Create a copy of an existing repository in a new directory.
1297 1297
1298 1298 If no destination directory name is specified, it defaults to the
1299 1299 basename of the source.
1300 1300
1301 1301 The location of the source is added to the new repository's
1302 1302 ``.hg/hgrc`` file, as the default to be used for future pulls.
1303 1303
1304 1304 Only local paths and ``ssh://`` URLs are supported as
1305 1305 destinations. For ``ssh://`` destinations, no working directory or
1306 1306 ``.hg/hgrc`` will be created on the remote side.
1307 1307
1308 1308 If the source repository has a bookmark called '@' set, that
1309 1309 revision will be checked out in the new repository by default.
1310 1310
1311 1311 To check out a particular version, use -u/--update, or
1312 1312 -U/--noupdate to create a clone with no working directory.
1313 1313
1314 1314 To pull only a subset of changesets, specify one or more revisions
1315 1315 identifiers with -r/--rev or branches with -b/--branch. The
1316 1316 resulting clone will contain only the specified changesets and
1317 1317 their ancestors. These options (or 'clone src#rev dest') imply
1318 1318 --pull, even for local source repositories.
1319 1319
1320 1320 .. note::
1321 1321
1322 1322 Specifying a tag will include the tagged changeset but not the
1323 1323 changeset containing the tag.
1324 1324
1325 1325 .. container:: verbose
1326 1326
1327 1327 For efficiency, hardlinks are used for cloning whenever the
1328 1328 source and destination are on the same filesystem (note this
1329 1329 applies only to the repository data, not to the working
1330 1330 directory). Some filesystems, such as AFS, implement hardlinking
1331 1331 incorrectly, but do not report errors. In these cases, use the
1332 1332 --pull option to avoid hardlinking.
1333 1333
1334 1334 In some cases, you can clone repositories and the working
1335 1335 directory using full hardlinks with ::
1336 1336
1337 1337 $ cp -al REPO REPOCLONE
1338 1338
1339 1339 This is the fastest way to clone, but it is not always safe. The
1340 1340 operation is not atomic (making sure REPO is not modified during
1341 1341 the operation is up to you) and you have to make sure your
1342 1342 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1343 1343 so). Also, this is not compatible with certain extensions that
1344 1344 place their metadata under the .hg directory, such as mq.
1345 1345
1346 1346 Mercurial will update the working directory to the first applicable
1347 1347 revision from this list:
1348 1348
1349 1349 a) null if -U or the source repository has no changesets
1350 1350 b) if -u . and the source repository is local, the first parent of
1351 1351 the source repository's working directory
1352 1352 c) the changeset specified with -u (if a branch name, this means the
1353 1353 latest head of that branch)
1354 1354 d) the changeset specified with -r
1355 1355 e) the tipmost head specified with -b
1356 1356 f) the tipmost head specified with the url#branch source syntax
1357 1357 g) the revision marked with the '@' bookmark, if present
1358 1358 h) the tipmost head of the default branch
1359 1359 i) tip
1360 1360
1361 1361 When cloning from servers that support it, Mercurial may fetch
1362 1362 pre-generated data from a server-advertised URL. When this is done,
1363 1363 hooks operating on incoming changesets and changegroups may fire twice,
1364 1364 once for the bundle fetched from the URL and another for any additional
1365 1365 data not fetched from this URL. In addition, if an error occurs, the
1366 1366 repository may be rolled back to a partial clone. This behavior may
1367 1367 change in future releases. See :hg:`help -e clonebundles` for more.
1368 1368
1369 1369 Examples:
1370 1370
1371 1371 - clone a remote repository to a new directory named hg/::
1372 1372
1373 1373 hg clone https://www.mercurial-scm.org/repo/hg/
1374 1374
1375 1375 - create a lightweight local clone::
1376 1376
1377 1377 hg clone project/ project-feature/
1378 1378
1379 1379 - clone from an absolute path on an ssh server (note double-slash)::
1380 1380
1381 1381 hg clone ssh://user@server//home/projects/alpha/
1382 1382
1383 1383 - do a high-speed clone over a LAN while checking out a
1384 1384 specified version::
1385 1385
1386 1386 hg clone --uncompressed http://server/repo -u 1.5
1387 1387
1388 1388 - create a repository without changesets after a particular revision::
1389 1389
1390 1390 hg clone -r 04e544 experimental/ good/
1391 1391
1392 1392 - clone (and track) a particular named branch::
1393 1393
1394 1394 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1395 1395
1396 1396 See :hg:`help urls` for details on specifying URLs.
1397 1397
1398 1398 Returns 0 on success.
1399 1399 """
1400 1400 opts = pycompat.byteskwargs(opts)
1401 1401 if opts.get('noupdate') and opts.get('updaterev'):
1402 1402 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1403 1403
1404 1404 r = hg.clone(ui, opts, source, dest,
1405 1405 pull=opts.get('pull'),
1406 1406 stream=opts.get('uncompressed'),
1407 1407 rev=opts.get('rev'),
1408 1408 update=opts.get('updaterev') or not opts.get('noupdate'),
1409 1409 branch=opts.get('branch'),
1410 1410 shareopts=opts.get('shareopts'))
1411 1411
1412 1412 return r is None
1413 1413
1414 1414 @command('^commit|ci',
1415 1415 [('A', 'addremove', None,
1416 1416 _('mark new/missing files as added/removed before committing')),
1417 1417 ('', 'close-branch', None,
1418 1418 _('mark a branch head as closed')),
1419 1419 ('', 'amend', None, _('amend the parent of the working directory')),
1420 1420 ('s', 'secret', None, _('use the secret phase for committing')),
1421 1421 ('e', 'edit', None, _('invoke editor on commit messages')),
1422 1422 ('i', 'interactive', None, _('use interactive mode')),
1423 1423 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1424 1424 _('[OPTION]... [FILE]...'),
1425 1425 inferrepo=True)
1426 1426 def commit(ui, repo, *pats, **opts):
1427 1427 """commit the specified files or all outstanding changes
1428 1428
1429 1429 Commit changes to the given files into the repository. Unlike a
1430 1430 centralized SCM, this operation is a local operation. See
1431 1431 :hg:`push` for a way to actively distribute your changes.
1432 1432
1433 1433 If a list of files is omitted, all changes reported by :hg:`status`
1434 1434 will be committed.
1435 1435
1436 1436 If you are committing the result of a merge, do not provide any
1437 1437 filenames or -I/-X filters.
1438 1438
1439 1439 If no commit message is specified, Mercurial starts your
1440 1440 configured editor where you can enter a message. In case your
1441 1441 commit fails, you will find a backup of your message in
1442 1442 ``.hg/last-message.txt``.
1443 1443
1444 1444 The --close-branch flag can be used to mark the current branch
1445 1445 head closed. When all heads of a branch are closed, the branch
1446 1446 will be considered closed and no longer listed.
1447 1447
1448 1448 The --amend flag can be used to amend the parent of the
1449 1449 working directory with a new commit that contains the changes
1450 1450 in the parent in addition to those currently reported by :hg:`status`,
1451 1451 if there are any. The old commit is stored in a backup bundle in
1452 1452 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1453 1453 on how to restore it).
1454 1454
1455 1455 Message, user and date are taken from the amended commit unless
1456 1456 specified. When a message isn't specified on the command line,
1457 1457 the editor will open with the message of the amended commit.
1458 1458
1459 1459 It is not possible to amend public changesets (see :hg:`help phases`)
1460 1460 or changesets that have children.
1461 1461
1462 1462 See :hg:`help dates` for a list of formats valid for -d/--date.
1463 1463
1464 1464 Returns 0 on success, 1 if nothing changed.
1465 1465
1466 1466 .. container:: verbose
1467 1467
1468 1468 Examples:
1469 1469
1470 1470 - commit all files ending in .py::
1471 1471
1472 1472 hg commit --include "set:**.py"
1473 1473
1474 1474 - commit all non-binary files::
1475 1475
1476 1476 hg commit --exclude "set:binary()"
1477 1477
1478 1478 - amend the current commit and set the date to now::
1479 1479
1480 1480 hg commit --amend --date now
1481 1481 """
1482 1482 wlock = lock = None
1483 1483 try:
1484 1484 wlock = repo.wlock()
1485 1485 lock = repo.lock()
1486 1486 return _docommit(ui, repo, *pats, **opts)
1487 1487 finally:
1488 1488 release(lock, wlock)
1489 1489
1490 1490 def _docommit(ui, repo, *pats, **opts):
1491 1491 if opts.get(r'interactive'):
1492 1492 opts.pop(r'interactive')
1493 1493 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1494 1494 cmdutil.recordfilter, *pats,
1495 1495 **opts)
1496 1496 # ret can be 0 (no changes to record) or the value returned by
1497 1497 # commit(), 1 if nothing changed or None on success.
1498 1498 return 1 if ret == 0 else ret
1499 1499
1500 1500 opts = pycompat.byteskwargs(opts)
1501 1501 if opts.get('subrepos'):
1502 1502 if opts.get('amend'):
1503 1503 raise error.Abort(_('cannot amend with --subrepos'))
1504 1504 # Let --subrepos on the command line override config setting.
1505 1505 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1506 1506
1507 1507 cmdutil.checkunfinished(repo, commit=True)
1508 1508
1509 1509 branch = repo[None].branch()
1510 1510 bheads = repo.branchheads(branch)
1511 1511
1512 1512 extra = {}
1513 1513 if opts.get('close_branch'):
1514 1514 extra['close'] = 1
1515 1515
1516 1516 if not bheads:
1517 1517 raise error.Abort(_('can only close branch heads'))
1518 1518 elif opts.get('amend'):
1519 1519 if repo[None].parents()[0].p1().branch() != branch and \
1520 1520 repo[None].parents()[0].p2().branch() != branch:
1521 1521 raise error.Abort(_('can only close branch heads'))
1522 1522
1523 1523 if opts.get('amend'):
1524 1524 if ui.configbool('ui', 'commitsubrepos'):
1525 1525 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1526 1526
1527 1527 old = repo['.']
1528 1528 if not old.mutable():
1529 1529 raise error.Abort(_('cannot amend public changesets'))
1530 1530 if len(repo[None].parents()) > 1:
1531 1531 raise error.Abort(_('cannot amend while merging'))
1532 1532 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1533 1533 if not allowunstable and old.children():
1534 1534 raise error.Abort(_('cannot amend changeset with children'))
1535 1535
1536 1536 # Currently histedit gets confused if an amend happens while histedit
1537 1537 # is in progress. Since we have a checkunfinished command, we are
1538 1538 # temporarily honoring it.
1539 1539 #
1540 1540 # Note: eventually this guard will be removed. Please do not expect
1541 1541 # this behavior to remain.
1542 1542 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1543 1543 cmdutil.checkunfinished(repo)
1544 1544
1545 1545 # commitfunc is used only for temporary amend commit by cmdutil.amend
1546 1546 def commitfunc(ui, repo, message, match, opts):
1547 1547 return repo.commit(message,
1548 1548 opts.get('user') or old.user(),
1549 1549 opts.get('date') or old.date(),
1550 1550 match,
1551 1551 extra=extra)
1552 1552
1553 1553 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1554 1554 if node == old.node():
1555 1555 ui.status(_("nothing changed\n"))
1556 1556 return 1
1557 1557 else:
1558 1558 def commitfunc(ui, repo, message, match, opts):
1559 1559 overrides = {}
1560 1560 if opts.get('secret'):
1561 1561 overrides[('phases', 'new-commit')] = 'secret'
1562 1562
1563 1563 baseui = repo.baseui
1564 1564 with baseui.configoverride(overrides, 'commit'):
1565 1565 with ui.configoverride(overrides, 'commit'):
1566 1566 editform = cmdutil.mergeeditform(repo[None],
1567 1567 'commit.normal')
1568 1568 editor = cmdutil.getcommiteditor(
1569 1569 editform=editform, **pycompat.strkwargs(opts))
1570 1570 return repo.commit(message,
1571 1571 opts.get('user'),
1572 1572 opts.get('date'),
1573 1573 match,
1574 1574 editor=editor,
1575 1575 extra=extra)
1576 1576
1577 1577 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1578 1578
1579 1579 if not node:
1580 1580 stat = cmdutil.postcommitstatus(repo, pats, opts)
1581 1581 if stat[3]:
1582 1582 ui.status(_("nothing changed (%d missing files, see "
1583 1583 "'hg status')\n") % len(stat[3]))
1584 1584 else:
1585 1585 ui.status(_("nothing changed\n"))
1586 1586 return 1
1587 1587
1588 1588 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1589 1589
1590 1590 @command('config|showconfig|debugconfig',
1591 1591 [('u', 'untrusted', None, _('show untrusted configuration options')),
1592 1592 ('e', 'edit', None, _('edit user config')),
1593 1593 ('l', 'local', None, _('edit repository config')),
1594 1594 ('g', 'global', None, _('edit global config'))] + formatteropts,
1595 1595 _('[-u] [NAME]...'),
1596 1596 optionalrepo=True)
1597 1597 def config(ui, repo, *values, **opts):
1598 1598 """show combined config settings from all hgrc files
1599 1599
1600 1600 With no arguments, print names and values of all config items.
1601 1601
1602 1602 With one argument of the form section.name, print just the value
1603 1603 of that config item.
1604 1604
1605 1605 With multiple arguments, print names and values of all config
1606 1606 items with matching section names.
1607 1607
1608 1608 With --edit, start an editor on the user-level config file. With
1609 1609 --global, edit the system-wide config file. With --local, edit the
1610 1610 repository-level config file.
1611 1611
1612 1612 With --debug, the source (filename and line number) is printed
1613 1613 for each config item.
1614 1614
1615 1615 See :hg:`help config` for more information about config files.
1616 1616
1617 1617 Returns 0 on success, 1 if NAME does not exist.
1618 1618
1619 1619 """
1620 1620
1621 1621 opts = pycompat.byteskwargs(opts)
1622 1622 if opts.get('edit') or opts.get('local') or opts.get('global'):
1623 1623 if opts.get('local') and opts.get('global'):
1624 1624 raise error.Abort(_("can't use --local and --global together"))
1625 1625
1626 1626 if opts.get('local'):
1627 1627 if not repo:
1628 1628 raise error.Abort(_("can't use --local outside a repository"))
1629 1629 paths = [repo.vfs.join('hgrc')]
1630 1630 elif opts.get('global'):
1631 1631 paths = rcutil.systemrcpath()
1632 1632 else:
1633 1633 paths = rcutil.userrcpath()
1634 1634
1635 1635 for f in paths:
1636 1636 if os.path.exists(f):
1637 1637 break
1638 1638 else:
1639 1639 if opts.get('global'):
1640 1640 samplehgrc = uimod.samplehgrcs['global']
1641 1641 elif opts.get('local'):
1642 1642 samplehgrc = uimod.samplehgrcs['local']
1643 1643 else:
1644 1644 samplehgrc = uimod.samplehgrcs['user']
1645 1645
1646 1646 f = paths[0]
1647 1647 fp = open(f, "wb")
1648 1648 fp.write(util.tonativeeol(samplehgrc))
1649 1649 fp.close()
1650 1650
1651 1651 editor = ui.geteditor()
1652 1652 ui.system("%s \"%s\"" % (editor, f),
1653 1653 onerr=error.Abort, errprefix=_("edit failed"),
1654 1654 blockedtag='config_edit')
1655 1655 return
1656 1656 ui.pager('config')
1657 1657 fm = ui.formatter('config', opts)
1658 1658 for t, f in rcutil.rccomponents():
1659 1659 if t == 'path':
1660 1660 ui.debug('read config from: %s\n' % f)
1661 1661 elif t == 'items':
1662 1662 for section, name, value, source in f:
1663 1663 ui.debug('set config by: %s\n' % source)
1664 1664 else:
1665 1665 raise error.ProgrammingError('unknown rctype: %s' % t)
1666 1666 untrusted = bool(opts.get('untrusted'))
1667 1667 if values:
1668 1668 sections = [v for v in values if '.' not in v]
1669 1669 items = [v for v in values if '.' in v]
1670 1670 if len(items) > 1 or items and sections:
1671 1671 raise error.Abort(_('only one config item permitted'))
1672 1672 matched = False
1673 1673 for section, name, value in ui.walkconfig(untrusted=untrusted):
1674 1674 source = ui.configsource(section, name, untrusted)
1675 1675 value = pycompat.bytestr(value)
1676 1676 if fm.isplain():
1677 1677 source = source or 'none'
1678 1678 value = value.replace('\n', '\\n')
1679 1679 entryname = section + '.' + name
1680 1680 if values:
1681 1681 for v in values:
1682 1682 if v == section:
1683 1683 fm.startitem()
1684 1684 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1685 1685 fm.write('name value', '%s=%s\n', entryname, value)
1686 1686 matched = True
1687 1687 elif v == entryname:
1688 1688 fm.startitem()
1689 1689 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1690 1690 fm.write('value', '%s\n', value)
1691 1691 fm.data(name=entryname)
1692 1692 matched = True
1693 1693 else:
1694 1694 fm.startitem()
1695 1695 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1696 1696 fm.write('name value', '%s=%s\n', entryname, value)
1697 1697 matched = True
1698 1698 fm.end()
1699 1699 if matched:
1700 1700 return 0
1701 1701 return 1
1702 1702
1703 1703 @command('copy|cp',
1704 1704 [('A', 'after', None, _('record a copy that has already occurred')),
1705 1705 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1706 1706 ] + walkopts + dryrunopts,
1707 1707 _('[OPTION]... [SOURCE]... DEST'))
1708 1708 def copy(ui, repo, *pats, **opts):
1709 1709 """mark files as copied for the next commit
1710 1710
1711 1711 Mark dest as having copies of source files. If dest is a
1712 1712 directory, copies are put in that directory. If dest is a file,
1713 1713 the source must be a single file.
1714 1714
1715 1715 By default, this command copies the contents of files as they
1716 1716 exist in the working directory. If invoked with -A/--after, the
1717 1717 operation is recorded, but no copying is performed.
1718 1718
1719 1719 This command takes effect with the next commit. To undo a copy
1720 1720 before that, see :hg:`revert`.
1721 1721
1722 1722 Returns 0 on success, 1 if errors are encountered.
1723 1723 """
1724 1724 opts = pycompat.byteskwargs(opts)
1725 1725 with repo.wlock(False):
1726 1726 return cmdutil.copy(ui, repo, pats, opts)
1727 1727
1728 1728 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1729 1729 def debugcommands(ui, cmd='', *args):
1730 1730 """list all available commands and options"""
1731 1731 for cmd, vals in sorted(table.iteritems()):
1732 1732 cmd = cmd.split('|')[0].strip('^')
1733 1733 opts = ', '.join([i[1] for i in vals[1]])
1734 1734 ui.write('%s: %s\n' % (cmd, opts))
1735 1735
1736 1736 @command('debugcomplete',
1737 1737 [('o', 'options', None, _('show the command options'))],
1738 1738 _('[-o] CMD'),
1739 1739 norepo=True)
1740 1740 def debugcomplete(ui, cmd='', **opts):
1741 1741 """returns the completion list associated with the given command"""
1742 1742
1743 1743 if opts.get('options'):
1744 1744 options = []
1745 1745 otables = [globalopts]
1746 1746 if cmd:
1747 1747 aliases, entry = cmdutil.findcmd(cmd, table, False)
1748 1748 otables.append(entry[1])
1749 1749 for t in otables:
1750 1750 for o in t:
1751 1751 if "(DEPRECATED)" in o[3]:
1752 1752 continue
1753 1753 if o[0]:
1754 1754 options.append('-%s' % o[0])
1755 1755 options.append('--%s' % o[1])
1756 1756 ui.write("%s\n" % "\n".join(options))
1757 1757 return
1758 1758
1759 1759 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1760 1760 if ui.verbose:
1761 1761 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1762 1762 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1763 1763
1764 1764 @command('^diff',
1765 1765 [('r', 'rev', [], _('revision'), _('REV')),
1766 1766 ('c', 'change', '', _('change made by revision'), _('REV'))
1767 1767 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1768 1768 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1769 1769 inferrepo=True)
1770 1770 def diff(ui, repo, *pats, **opts):
1771 1771 """diff repository (or selected files)
1772 1772
1773 1773 Show differences between revisions for the specified files.
1774 1774
1775 1775 Differences between files are shown using the unified diff format.
1776 1776
1777 1777 .. note::
1778 1778
1779 1779 :hg:`diff` may generate unexpected results for merges, as it will
1780 1780 default to comparing against the working directory's first
1781 1781 parent changeset if no revisions are specified.
1782 1782
1783 1783 When two revision arguments are given, then changes are shown
1784 1784 between those revisions. If only one revision is specified then
1785 1785 that revision is compared to the working directory, and, when no
1786 1786 revisions are specified, the working directory files are compared
1787 1787 to its first parent.
1788 1788
1789 1789 Alternatively you can specify -c/--change with a revision to see
1790 1790 the changes in that changeset relative to its first parent.
1791 1791
1792 1792 Without the -a/--text option, diff will avoid generating diffs of
1793 1793 files it detects as binary. With -a, diff will generate a diff
1794 1794 anyway, probably with undesirable results.
1795 1795
1796 1796 Use the -g/--git option to generate diffs in the git extended diff
1797 1797 format. For more information, read :hg:`help diffs`.
1798 1798
1799 1799 .. container:: verbose
1800 1800
1801 1801 Examples:
1802 1802
1803 1803 - compare a file in the current working directory to its parent::
1804 1804
1805 1805 hg diff foo.c
1806 1806
1807 1807 - compare two historical versions of a directory, with rename info::
1808 1808
1809 1809 hg diff --git -r 1.0:1.2 lib/
1810 1810
1811 1811 - get change stats relative to the last change on some date::
1812 1812
1813 1813 hg diff --stat -r "date('may 2')"
1814 1814
1815 1815 - diff all newly-added files that contain a keyword::
1816 1816
1817 1817 hg diff "set:added() and grep(GNU)"
1818 1818
1819 1819 - compare a revision and its parents::
1820 1820
1821 1821 hg diff -c 9353 # compare against first parent
1822 1822 hg diff -r 9353^:9353 # same using revset syntax
1823 1823 hg diff -r 9353^2:9353 # compare against the second parent
1824 1824
1825 1825 Returns 0 on success.
1826 1826 """
1827 1827
1828 1828 opts = pycompat.byteskwargs(opts)
1829 1829 revs = opts.get('rev')
1830 1830 change = opts.get('change')
1831 1831 stat = opts.get('stat')
1832 1832 reverse = opts.get('reverse')
1833 1833
1834 1834 if revs and change:
1835 1835 msg = _('cannot specify --rev and --change at the same time')
1836 1836 raise error.Abort(msg)
1837 1837 elif change:
1838 1838 node2 = scmutil.revsingle(repo, change, None).node()
1839 1839 node1 = repo[node2].p1().node()
1840 1840 else:
1841 1841 node1, node2 = scmutil.revpair(repo, revs)
1842 1842
1843 1843 if reverse:
1844 1844 node1, node2 = node2, node1
1845 1845
1846 1846 diffopts = patch.diffallopts(ui, opts)
1847 1847 m = scmutil.match(repo[node2], pats, opts)
1848 1848 ui.pager('diff')
1849 1849 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1850 1850 listsubrepos=opts.get('subrepos'),
1851 1851 root=opts.get('root'))
1852 1852
1853 1853 @command('^export',
1854 1854 [('o', 'output', '',
1855 1855 _('print output to file with formatted name'), _('FORMAT')),
1856 1856 ('', 'switch-parent', None, _('diff against the second parent')),
1857 1857 ('r', 'rev', [], _('revisions to export'), _('REV')),
1858 1858 ] + diffopts,
1859 1859 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1860 1860 def export(ui, repo, *changesets, **opts):
1861 1861 """dump the header and diffs for one or more changesets
1862 1862
1863 1863 Print the changeset header and diffs for one or more revisions.
1864 1864 If no revision is given, the parent of the working directory is used.
1865 1865
1866 1866 The information shown in the changeset header is: author, date,
1867 1867 branch name (if non-default), changeset hash, parent(s) and commit
1868 1868 comment.
1869 1869
1870 1870 .. note::
1871 1871
1872 1872 :hg:`export` may generate unexpected diff output for merge
1873 1873 changesets, as it will compare the merge changeset against its
1874 1874 first parent only.
1875 1875
1876 1876 Output may be to a file, in which case the name of the file is
1877 1877 given using a format string. The formatting rules are as follows:
1878 1878
1879 1879 :``%%``: literal "%" character
1880 1880 :``%H``: changeset hash (40 hexadecimal digits)
1881 1881 :``%N``: number of patches being generated
1882 1882 :``%R``: changeset revision number
1883 1883 :``%b``: basename of the exporting repository
1884 1884 :``%h``: short-form changeset hash (12 hexadecimal digits)
1885 1885 :``%m``: first line of the commit message (only alphanumeric characters)
1886 1886 :``%n``: zero-padded sequence number, starting at 1
1887 1887 :``%r``: zero-padded changeset revision number
1888 1888
1889 1889 Without the -a/--text option, export will avoid generating diffs
1890 1890 of files it detects as binary. With -a, export will generate a
1891 1891 diff anyway, probably with undesirable results.
1892 1892
1893 1893 Use the -g/--git option to generate diffs in the git extended diff
1894 1894 format. See :hg:`help diffs` for more information.
1895 1895
1896 1896 With the --switch-parent option, the diff will be against the
1897 1897 second parent. It can be useful to review a merge.
1898 1898
1899 1899 .. container:: verbose
1900 1900
1901 1901 Examples:
1902 1902
1903 1903 - use export and import to transplant a bugfix to the current
1904 1904 branch::
1905 1905
1906 1906 hg export -r 9353 | hg import -
1907 1907
1908 1908 - export all the changesets between two revisions to a file with
1909 1909 rename information::
1910 1910
1911 1911 hg export --git -r 123:150 > changes.txt
1912 1912
1913 1913 - split outgoing changes into a series of patches with
1914 1914 descriptive names::
1915 1915
1916 1916 hg export -r "outgoing()" -o "%n-%m.patch"
1917 1917
1918 1918 Returns 0 on success.
1919 1919 """
1920 1920 opts = pycompat.byteskwargs(opts)
1921 1921 changesets += tuple(opts.get('rev', []))
1922 1922 if not changesets:
1923 1923 changesets = ['.']
1924 1924 revs = scmutil.revrange(repo, changesets)
1925 1925 if not revs:
1926 1926 raise error.Abort(_("export requires at least one changeset"))
1927 1927 if len(revs) > 1:
1928 1928 ui.note(_('exporting patches:\n'))
1929 1929 else:
1930 1930 ui.note(_('exporting patch:\n'))
1931 1931 ui.pager('export')
1932 1932 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1933 1933 switch_parent=opts.get('switch_parent'),
1934 1934 opts=patch.diffallopts(ui, opts))
1935 1935
1936 1936 @command('files',
1937 1937 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1938 1938 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1939 1939 ] + walkopts + formatteropts + subrepoopts,
1940 1940 _('[OPTION]... [FILE]...'))
1941 1941 def files(ui, repo, *pats, **opts):
1942 1942 """list tracked files
1943 1943
1944 1944 Print files under Mercurial control in the working directory or
1945 1945 specified revision for given files (excluding removed files).
1946 1946 Files can be specified as filenames or filesets.
1947 1947
1948 1948 If no files are given to match, this command prints the names
1949 1949 of all files under Mercurial control.
1950 1950
1951 1951 .. container:: verbose
1952 1952
1953 1953 Examples:
1954 1954
1955 1955 - list all files under the current directory::
1956 1956
1957 1957 hg files .
1958 1958
1959 1959 - shows sizes and flags for current revision::
1960 1960
1961 1961 hg files -vr .
1962 1962
1963 1963 - list all files named README::
1964 1964
1965 1965 hg files -I "**/README"
1966 1966
1967 1967 - list all binary files::
1968 1968
1969 1969 hg files "set:binary()"
1970 1970
1971 1971 - find files containing a regular expression::
1972 1972
1973 1973 hg files "set:grep('bob')"
1974 1974
1975 1975 - search tracked file contents with xargs and grep::
1976 1976
1977 1977 hg files -0 | xargs -0 grep foo
1978 1978
1979 1979 See :hg:`help patterns` and :hg:`help filesets` for more information
1980 1980 on specifying file patterns.
1981 1981
1982 1982 Returns 0 if a match is found, 1 otherwise.
1983 1983
1984 1984 """
1985 1985
1986 1986 opts = pycompat.byteskwargs(opts)
1987 1987 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1988 1988
1989 1989 end = '\n'
1990 1990 if opts.get('print0'):
1991 1991 end = '\0'
1992 1992 fmt = '%s' + end
1993 1993
1994 1994 m = scmutil.match(ctx, pats, opts)
1995 1995 ui.pager('files')
1996 1996 with ui.formatter('files', opts) as fm:
1997 1997 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
1998 1998
1999 1999 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
2000 2000 def forget(ui, repo, *pats, **opts):
2001 2001 """forget the specified files on the next commit
2002 2002
2003 2003 Mark the specified files so they will no longer be tracked
2004 2004 after the next commit.
2005 2005
2006 2006 This only removes files from the current branch, not from the
2007 2007 entire project history, and it does not delete them from the
2008 2008 working directory.
2009 2009
2010 2010 To delete the file from the working directory, see :hg:`remove`.
2011 2011
2012 2012 To undo a forget before the next commit, see :hg:`add`.
2013 2013
2014 2014 .. container:: verbose
2015 2015
2016 2016 Examples:
2017 2017
2018 2018 - forget newly-added binary files::
2019 2019
2020 2020 hg forget "set:added() and binary()"
2021 2021
2022 2022 - forget files that would be excluded by .hgignore::
2023 2023
2024 2024 hg forget "set:hgignore()"
2025 2025
2026 2026 Returns 0 on success.
2027 2027 """
2028 2028
2029 2029 opts = pycompat.byteskwargs(opts)
2030 2030 if not pats:
2031 2031 raise error.Abort(_('no files specified'))
2032 2032
2033 2033 m = scmutil.match(repo[None], pats, opts)
2034 2034 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2035 2035 return rejected and 1 or 0
2036 2036
2037 2037 @command(
2038 2038 'graft',
2039 2039 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2040 2040 ('c', 'continue', False, _('resume interrupted graft')),
2041 2041 ('e', 'edit', False, _('invoke editor on commit messages')),
2042 2042 ('', 'log', None, _('append graft info to log message')),
2043 2043 ('f', 'force', False, _('force graft')),
2044 2044 ('D', 'currentdate', False,
2045 2045 _('record the current date as commit date')),
2046 2046 ('U', 'currentuser', False,
2047 2047 _('record the current user as committer'), _('DATE'))]
2048 2048 + commitopts2 + mergetoolopts + dryrunopts,
2049 2049 _('[OPTION]... [-r REV]... REV...'))
2050 2050 def graft(ui, repo, *revs, **opts):
2051 2051 '''copy changes from other branches onto the current branch
2052 2052
2053 2053 This command uses Mercurial's merge logic to copy individual
2054 2054 changes from other branches without merging branches in the
2055 2055 history graph. This is sometimes known as 'backporting' or
2056 2056 'cherry-picking'. By default, graft will copy user, date, and
2057 2057 description from the source changesets.
2058 2058
2059 2059 Changesets that are ancestors of the current revision, that have
2060 2060 already been grafted, or that are merges will be skipped.
2061 2061
2062 2062 If --log is specified, log messages will have a comment appended
2063 2063 of the form::
2064 2064
2065 2065 (grafted from CHANGESETHASH)
2066 2066
2067 2067 If --force is specified, revisions will be grafted even if they
2068 2068 are already ancestors of or have been grafted to the destination.
2069 2069 This is useful when the revisions have since been backed out.
2070 2070
2071 2071 If a graft merge results in conflicts, the graft process is
2072 2072 interrupted so that the current merge can be manually resolved.
2073 2073 Once all conflicts are addressed, the graft process can be
2074 2074 continued with the -c/--continue option.
2075 2075
2076 2076 .. note::
2077 2077
2078 2078 The -c/--continue option does not reapply earlier options, except
2079 2079 for --force.
2080 2080
2081 2081 .. container:: verbose
2082 2082
2083 2083 Examples:
2084 2084
2085 2085 - copy a single change to the stable branch and edit its description::
2086 2086
2087 2087 hg update stable
2088 2088 hg graft --edit 9393
2089 2089
2090 2090 - graft a range of changesets with one exception, updating dates::
2091 2091
2092 2092 hg graft -D "2085::2093 and not 2091"
2093 2093
2094 2094 - continue a graft after resolving conflicts::
2095 2095
2096 2096 hg graft -c
2097 2097
2098 2098 - show the source of a grafted changeset::
2099 2099
2100 2100 hg log --debug -r .
2101 2101
2102 2102 - show revisions sorted by date::
2103 2103
2104 2104 hg log -r "sort(all(), date)"
2105 2105
2106 2106 See :hg:`help revisions` for more about specifying revisions.
2107 2107
2108 2108 Returns 0 on successful completion.
2109 2109 '''
2110 2110 with repo.wlock():
2111 2111 return _dograft(ui, repo, *revs, **opts)
2112 2112
2113 2113 def _dograft(ui, repo, *revs, **opts):
2114 2114 opts = pycompat.byteskwargs(opts)
2115 2115 if revs and opts.get('rev'):
2116 2116 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2117 2117 'revision ordering!\n'))
2118 2118
2119 2119 revs = list(revs)
2120 2120 revs.extend(opts.get('rev'))
2121 2121
2122 2122 if not opts.get('user') and opts.get('currentuser'):
2123 2123 opts['user'] = ui.username()
2124 2124 if not opts.get('date') and opts.get('currentdate'):
2125 2125 opts['date'] = "%d %d" % util.makedate()
2126 2126
2127 2127 editor = cmdutil.getcommiteditor(editform='graft',
2128 2128 **pycompat.strkwargs(opts))
2129 2129
2130 2130 cont = False
2131 2131 if opts.get('continue'):
2132 2132 cont = True
2133 2133 if revs:
2134 2134 raise error.Abort(_("can't specify --continue and revisions"))
2135 2135 # read in unfinished revisions
2136 2136 try:
2137 2137 nodes = repo.vfs.read('graftstate').splitlines()
2138 2138 revs = [repo[node].rev() for node in nodes]
2139 2139 except IOError as inst:
2140 2140 if inst.errno != errno.ENOENT:
2141 2141 raise
2142 2142 cmdutil.wrongtooltocontinue(repo, _('graft'))
2143 2143 else:
2144 2144 cmdutil.checkunfinished(repo)
2145 2145 cmdutil.bailifchanged(repo)
2146 2146 if not revs:
2147 2147 raise error.Abort(_('no revisions specified'))
2148 2148 revs = scmutil.revrange(repo, revs)
2149 2149
2150 2150 skipped = set()
2151 2151 # check for merges
2152 2152 for rev in repo.revs('%ld and merge()', revs):
2153 2153 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2154 2154 skipped.add(rev)
2155 2155 revs = [r for r in revs if r not in skipped]
2156 2156 if not revs:
2157 2157 return -1
2158 2158
2159 2159 # Don't check in the --continue case, in effect retaining --force across
2160 2160 # --continues. That's because without --force, any revisions we decided to
2161 2161 # skip would have been filtered out here, so they wouldn't have made their
2162 2162 # way to the graftstate. With --force, any revisions we would have otherwise
2163 2163 # skipped would not have been filtered out, and if they hadn't been applied
2164 2164 # already, they'd have been in the graftstate.
2165 2165 if not (cont or opts.get('force')):
2166 2166 # check for ancestors of dest branch
2167 2167 crev = repo['.'].rev()
2168 2168 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2169 2169 # XXX make this lazy in the future
2170 2170 # don't mutate while iterating, create a copy
2171 2171 for rev in list(revs):
2172 2172 if rev in ancestors:
2173 2173 ui.warn(_('skipping ancestor revision %d:%s\n') %
2174 2174 (rev, repo[rev]))
2175 2175 # XXX remove on list is slow
2176 2176 revs.remove(rev)
2177 2177 if not revs:
2178 2178 return -1
2179 2179
2180 2180 # analyze revs for earlier grafts
2181 2181 ids = {}
2182 2182 for ctx in repo.set("%ld", revs):
2183 2183 ids[ctx.hex()] = ctx.rev()
2184 2184 n = ctx.extra().get('source')
2185 2185 if n:
2186 2186 ids[n] = ctx.rev()
2187 2187
2188 2188 # check ancestors for earlier grafts
2189 2189 ui.debug('scanning for duplicate grafts\n')
2190 2190
2191 2191 # The only changesets we can be sure doesn't contain grafts of any
2192 2192 # revs, are the ones that are common ancestors of *all* revs:
2193 2193 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2194 2194 ctx = repo[rev]
2195 2195 n = ctx.extra().get('source')
2196 2196 if n in ids:
2197 2197 try:
2198 2198 r = repo[n].rev()
2199 2199 except error.RepoLookupError:
2200 2200 r = None
2201 2201 if r in revs:
2202 2202 ui.warn(_('skipping revision %d:%s '
2203 2203 '(already grafted to %d:%s)\n')
2204 2204 % (r, repo[r], rev, ctx))
2205 2205 revs.remove(r)
2206 2206 elif ids[n] in revs:
2207 2207 if r is None:
2208 2208 ui.warn(_('skipping already grafted revision %d:%s '
2209 2209 '(%d:%s also has unknown origin %s)\n')
2210 2210 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2211 2211 else:
2212 2212 ui.warn(_('skipping already grafted revision %d:%s '
2213 2213 '(%d:%s also has origin %d:%s)\n')
2214 2214 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2215 2215 revs.remove(ids[n])
2216 2216 elif ctx.hex() in ids:
2217 2217 r = ids[ctx.hex()]
2218 2218 ui.warn(_('skipping already grafted revision %d:%s '
2219 2219 '(was grafted from %d:%s)\n') %
2220 2220 (r, repo[r], rev, ctx))
2221 2221 revs.remove(r)
2222 2222 if not revs:
2223 2223 return -1
2224 2224
2225 2225 for pos, ctx in enumerate(repo.set("%ld", revs)):
2226 2226 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2227 2227 ctx.description().split('\n', 1)[0])
2228 2228 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2229 2229 if names:
2230 2230 desc += ' (%s)' % ' '.join(names)
2231 2231 ui.status(_('grafting %s\n') % desc)
2232 2232 if opts.get('dry_run'):
2233 2233 continue
2234 2234
2235 2235 source = ctx.extra().get('source')
2236 2236 extra = {}
2237 2237 if source:
2238 2238 extra['source'] = source
2239 2239 extra['intermediate-source'] = ctx.hex()
2240 2240 else:
2241 2241 extra['source'] = ctx.hex()
2242 2242 user = ctx.user()
2243 2243 if opts.get('user'):
2244 2244 user = opts['user']
2245 2245 date = ctx.date()
2246 2246 if opts.get('date'):
2247 2247 date = opts['date']
2248 2248 message = ctx.description()
2249 2249 if opts.get('log'):
2250 2250 message += '\n(grafted from %s)' % ctx.hex()
2251 2251
2252 2252 # we don't merge the first commit when continuing
2253 2253 if not cont:
2254 2254 # perform the graft merge with p1(rev) as 'ancestor'
2255 2255 try:
2256 2256 # ui.forcemerge is an internal variable, do not document
2257 2257 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2258 2258 'graft')
2259 2259 stats = mergemod.graft(repo, ctx, ctx.p1(),
2260 2260 ['local', 'graft'])
2261 2261 finally:
2262 2262 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2263 2263 # report any conflicts
2264 2264 if stats and stats[3] > 0:
2265 2265 # write out state for --continue
2266 2266 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2267 2267 repo.vfs.write('graftstate', ''.join(nodelines))
2268 2268 extra = ''
2269 2269 if opts.get('user'):
2270 2270 extra += ' --user %s' % util.shellquote(opts['user'])
2271 2271 if opts.get('date'):
2272 2272 extra += ' --date %s' % util.shellquote(opts['date'])
2273 2273 if opts.get('log'):
2274 2274 extra += ' --log'
2275 2275 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2276 2276 raise error.Abort(
2277 2277 _("unresolved conflicts, can't continue"),
2278 2278 hint=hint)
2279 2279 else:
2280 2280 cont = False
2281 2281
2282 2282 # commit
2283 2283 node = repo.commit(text=message, user=user,
2284 2284 date=date, extra=extra, editor=editor)
2285 2285 if node is None:
2286 2286 ui.warn(
2287 2287 _('note: graft of %d:%s created no changes to commit\n') %
2288 2288 (ctx.rev(), ctx))
2289 2289
2290 2290 # remove state when we complete successfully
2291 2291 if not opts.get('dry_run'):
2292 2292 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2293 2293
2294 2294 return 0
2295 2295
2296 2296 @command('grep',
2297 2297 [('0', 'print0', None, _('end fields with NUL')),
2298 2298 ('', 'all', None, _('print all revisions that match')),
2299 2299 ('a', 'text', None, _('treat all files as text')),
2300 2300 ('f', 'follow', None,
2301 2301 _('follow changeset history,'
2302 2302 ' or file history across copies and renames')),
2303 2303 ('i', 'ignore-case', None, _('ignore case when matching')),
2304 2304 ('l', 'files-with-matches', None,
2305 2305 _('print only filenames and revisions that match')),
2306 2306 ('n', 'line-number', None, _('print matching line numbers')),
2307 2307 ('r', 'rev', [],
2308 2308 _('only search files changed within revision range'), _('REV')),
2309 2309 ('u', 'user', None, _('list the author (long with -v)')),
2310 2310 ('d', 'date', None, _('list the date (short with -q)')),
2311 2311 ] + formatteropts + walkopts,
2312 2312 _('[OPTION]... PATTERN [FILE]...'),
2313 2313 inferrepo=True)
2314 2314 def grep(ui, repo, pattern, *pats, **opts):
2315 2315 """search revision history for a pattern in specified files
2316 2316
2317 2317 Search revision history for a regular expression in the specified
2318 2318 files or the entire project.
2319 2319
2320 2320 By default, grep prints the most recent revision number for each
2321 2321 file in which it finds a match. To get it to print every revision
2322 2322 that contains a change in match status ("-" for a match that becomes
2323 2323 a non-match, or "+" for a non-match that becomes a match), use the
2324 2324 --all flag.
2325 2325
2326 2326 PATTERN can be any Python (roughly Perl-compatible) regular
2327 2327 expression.
2328 2328
2329 2329 If no FILEs are specified (and -f/--follow isn't set), all files in
2330 2330 the repository are searched, including those that don't exist in the
2331 2331 current branch or have been deleted in a prior changeset.
2332 2332
2333 2333 Returns 0 if a match is found, 1 otherwise.
2334 2334 """
2335 2335 opts = pycompat.byteskwargs(opts)
2336 2336 reflags = re.M
2337 2337 if opts.get('ignore_case'):
2338 2338 reflags |= re.I
2339 2339 try:
2340 2340 regexp = util.re.compile(pattern, reflags)
2341 2341 except re.error as inst:
2342 2342 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2343 2343 return 1
2344 2344 sep, eol = ':', '\n'
2345 2345 if opts.get('print0'):
2346 2346 sep = eol = '\0'
2347 2347
2348 2348 getfile = util.lrucachefunc(repo.file)
2349 2349
2350 2350 def matchlines(body):
2351 2351 begin = 0
2352 2352 linenum = 0
2353 2353 while begin < len(body):
2354 2354 match = regexp.search(body, begin)
2355 2355 if not match:
2356 2356 break
2357 2357 mstart, mend = match.span()
2358 2358 linenum += body.count('\n', begin, mstart) + 1
2359 2359 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2360 2360 begin = body.find('\n', mend) + 1 or len(body) + 1
2361 2361 lend = begin - 1
2362 2362 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2363 2363
2364 2364 class linestate(object):
2365 2365 def __init__(self, line, linenum, colstart, colend):
2366 2366 self.line = line
2367 2367 self.linenum = linenum
2368 2368 self.colstart = colstart
2369 2369 self.colend = colend
2370 2370
2371 2371 def __hash__(self):
2372 2372 return hash((self.linenum, self.line))
2373 2373
2374 2374 def __eq__(self, other):
2375 2375 return self.line == other.line
2376 2376
2377 2377 def findpos(self):
2378 2378 """Iterate all (start, end) indices of matches"""
2379 2379 yield self.colstart, self.colend
2380 2380 p = self.colend
2381 2381 while p < len(self.line):
2382 2382 m = regexp.search(self.line, p)
2383 2383 if not m:
2384 2384 break
2385 2385 yield m.span()
2386 2386 p = m.end()
2387 2387
2388 2388 matches = {}
2389 2389 copies = {}
2390 2390 def grepbody(fn, rev, body):
2391 2391 matches[rev].setdefault(fn, [])
2392 2392 m = matches[rev][fn]
2393 2393 for lnum, cstart, cend, line in matchlines(body):
2394 2394 s = linestate(line, lnum, cstart, cend)
2395 2395 m.append(s)
2396 2396
2397 2397 def difflinestates(a, b):
2398 2398 sm = difflib.SequenceMatcher(None, a, b)
2399 2399 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2400 2400 if tag == 'insert':
2401 2401 for i in xrange(blo, bhi):
2402 2402 yield ('+', b[i])
2403 2403 elif tag == 'delete':
2404 2404 for i in xrange(alo, ahi):
2405 2405 yield ('-', a[i])
2406 2406 elif tag == 'replace':
2407 2407 for i in xrange(alo, ahi):
2408 2408 yield ('-', a[i])
2409 2409 for i in xrange(blo, bhi):
2410 2410 yield ('+', b[i])
2411 2411
2412 2412 def display(fm, fn, ctx, pstates, states):
2413 2413 rev = ctx.rev()
2414 2414 if fm.isplain():
2415 2415 formatuser = ui.shortuser
2416 2416 else:
2417 2417 formatuser = str
2418 2418 if ui.quiet:
2419 2419 datefmt = '%Y-%m-%d'
2420 2420 else:
2421 2421 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2422 2422 found = False
2423 2423 @util.cachefunc
2424 2424 def binary():
2425 2425 flog = getfile(fn)
2426 2426 return util.binary(flog.read(ctx.filenode(fn)))
2427 2427
2428 2428 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2429 2429 if opts.get('all'):
2430 2430 iter = difflinestates(pstates, states)
2431 2431 else:
2432 2432 iter = [('', l) for l in states]
2433 2433 for change, l in iter:
2434 2434 fm.startitem()
2435 2435 fm.data(node=fm.hexfunc(ctx.node()))
2436 2436 cols = [
2437 2437 ('filename', fn, True),
2438 2438 ('rev', rev, True),
2439 2439 ('linenumber', l.linenum, opts.get('line_number')),
2440 2440 ]
2441 2441 if opts.get('all'):
2442 2442 cols.append(('change', change, True))
2443 2443 cols.extend([
2444 2444 ('user', formatuser(ctx.user()), opts.get('user')),
2445 2445 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2446 2446 ])
2447 2447 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2448 2448 for name, data, cond in cols:
2449 2449 field = fieldnamemap.get(name, name)
2450 2450 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2451 2451 if cond and name != lastcol:
2452 2452 fm.plain(sep, label='grep.sep')
2453 2453 if not opts.get('files_with_matches'):
2454 2454 fm.plain(sep, label='grep.sep')
2455 2455 if not opts.get('text') and binary():
2456 2456 fm.plain(_(" Binary file matches"))
2457 2457 else:
2458 2458 displaymatches(fm.nested('texts'), l)
2459 2459 fm.plain(eol)
2460 2460 found = True
2461 2461 if opts.get('files_with_matches'):
2462 2462 break
2463 2463 return found
2464 2464
2465 2465 def displaymatches(fm, l):
2466 2466 p = 0
2467 2467 for s, e in l.findpos():
2468 2468 if p < s:
2469 2469 fm.startitem()
2470 2470 fm.write('text', '%s', l.line[p:s])
2471 2471 fm.data(matched=False)
2472 2472 fm.startitem()
2473 2473 fm.write('text', '%s', l.line[s:e], label='grep.match')
2474 2474 fm.data(matched=True)
2475 2475 p = e
2476 2476 if p < len(l.line):
2477 2477 fm.startitem()
2478 2478 fm.write('text', '%s', l.line[p:])
2479 2479 fm.data(matched=False)
2480 2480 fm.end()
2481 2481
2482 2482 skip = {}
2483 2483 revfiles = {}
2484 2484 matchfn = scmutil.match(repo[None], pats, opts)
2485 2485 found = False
2486 2486 follow = opts.get('follow')
2487 2487
2488 2488 def prep(ctx, fns):
2489 2489 rev = ctx.rev()
2490 2490 pctx = ctx.p1()
2491 2491 parent = pctx.rev()
2492 2492 matches.setdefault(rev, {})
2493 2493 matches.setdefault(parent, {})
2494 2494 files = revfiles.setdefault(rev, [])
2495 2495 for fn in fns:
2496 2496 flog = getfile(fn)
2497 2497 try:
2498 2498 fnode = ctx.filenode(fn)
2499 2499 except error.LookupError:
2500 2500 continue
2501 2501
2502 2502 copied = flog.renamed(fnode)
2503 2503 copy = follow and copied and copied[0]
2504 2504 if copy:
2505 2505 copies.setdefault(rev, {})[fn] = copy
2506 2506 if fn in skip:
2507 2507 if copy:
2508 2508 skip[copy] = True
2509 2509 continue
2510 2510 files.append(fn)
2511 2511
2512 2512 if fn not in matches[rev]:
2513 2513 grepbody(fn, rev, flog.read(fnode))
2514 2514
2515 2515 pfn = copy or fn
2516 2516 if pfn not in matches[parent]:
2517 2517 try:
2518 2518 fnode = pctx.filenode(pfn)
2519 2519 grepbody(pfn, parent, flog.read(fnode))
2520 2520 except error.LookupError:
2521 2521 pass
2522 2522
2523 2523 ui.pager('grep')
2524 2524 fm = ui.formatter('grep', opts)
2525 2525 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2526 2526 rev = ctx.rev()
2527 2527 parent = ctx.p1().rev()
2528 2528 for fn in sorted(revfiles.get(rev, [])):
2529 2529 states = matches[rev][fn]
2530 2530 copy = copies.get(rev, {}).get(fn)
2531 2531 if fn in skip:
2532 2532 if copy:
2533 2533 skip[copy] = True
2534 2534 continue
2535 2535 pstates = matches.get(parent, {}).get(copy or fn, [])
2536 2536 if pstates or states:
2537 2537 r = display(fm, fn, ctx, pstates, states)
2538 2538 found = found or r
2539 2539 if r and not opts.get('all'):
2540 2540 skip[fn] = True
2541 2541 if copy:
2542 2542 skip[copy] = True
2543 2543 del matches[rev]
2544 2544 del revfiles[rev]
2545 2545 fm.end()
2546 2546
2547 2547 return not found
2548 2548
2549 2549 @command('heads',
2550 2550 [('r', 'rev', '',
2551 2551 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2552 2552 ('t', 'topo', False, _('show topological heads only')),
2553 2553 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2554 2554 ('c', 'closed', False, _('show normal and closed branch heads')),
2555 2555 ] + templateopts,
2556 2556 _('[-ct] [-r STARTREV] [REV]...'))
2557 2557 def heads(ui, repo, *branchrevs, **opts):
2558 2558 """show branch heads
2559 2559
2560 2560 With no arguments, show all open branch heads in the repository.
2561 2561 Branch heads are changesets that have no descendants on the
2562 2562 same branch. They are where development generally takes place and
2563 2563 are the usual targets for update and merge operations.
2564 2564
2565 2565 If one or more REVs are given, only open branch heads on the
2566 2566 branches associated with the specified changesets are shown. This
2567 2567 means that you can use :hg:`heads .` to see the heads on the
2568 2568 currently checked-out branch.
2569 2569
2570 2570 If -c/--closed is specified, also show branch heads marked closed
2571 2571 (see :hg:`commit --close-branch`).
2572 2572
2573 2573 If STARTREV is specified, only those heads that are descendants of
2574 2574 STARTREV will be displayed.
2575 2575
2576 2576 If -t/--topo is specified, named branch mechanics will be ignored and only
2577 2577 topological heads (changesets with no children) will be shown.
2578 2578
2579 2579 Returns 0 if matching heads are found, 1 if not.
2580 2580 """
2581 2581
2582 2582 opts = pycompat.byteskwargs(opts)
2583 2583 start = None
2584 2584 if 'rev' in opts:
2585 2585 start = scmutil.revsingle(repo, opts['rev'], None).node()
2586 2586
2587 2587 if opts.get('topo'):
2588 2588 heads = [repo[h] for h in repo.heads(start)]
2589 2589 else:
2590 2590 heads = []
2591 2591 for branch in repo.branchmap():
2592 2592 heads += repo.branchheads(branch, start, opts.get('closed'))
2593 2593 heads = [repo[h] for h in heads]
2594 2594
2595 2595 if branchrevs:
2596 2596 branches = set(repo[br].branch() for br in branchrevs)
2597 2597 heads = [h for h in heads if h.branch() in branches]
2598 2598
2599 2599 if opts.get('active') and branchrevs:
2600 2600 dagheads = repo.heads(start)
2601 2601 heads = [h for h in heads if h.node() in dagheads]
2602 2602
2603 2603 if branchrevs:
2604 2604 haveheads = set(h.branch() for h in heads)
2605 2605 if branches - haveheads:
2606 2606 headless = ', '.join(b for b in branches - haveheads)
2607 2607 msg = _('no open branch heads found on branches %s')
2608 2608 if opts.get('rev'):
2609 2609 msg += _(' (started at %s)') % opts['rev']
2610 2610 ui.warn((msg + '\n') % headless)
2611 2611
2612 2612 if not heads:
2613 2613 return 1
2614 2614
2615 2615 ui.pager('heads')
2616 2616 heads = sorted(heads, key=lambda x: -x.rev())
2617 2617 displayer = cmdutil.show_changeset(ui, repo, opts)
2618 2618 for ctx in heads:
2619 2619 displayer.show(ctx)
2620 2620 displayer.close()
2621 2621
2622 2622 @command('help',
2623 2623 [('e', 'extension', None, _('show only help for extensions')),
2624 2624 ('c', 'command', None, _('show only help for commands')),
2625 2625 ('k', 'keyword', None, _('show topics matching keyword')),
2626 2626 ('s', 'system', [], _('show help for specific platform(s)')),
2627 2627 ],
2628 2628 _('[-ecks] [TOPIC]'),
2629 2629 norepo=True)
2630 2630 def help_(ui, name=None, **opts):
2631 2631 """show help for a given topic or a help overview
2632 2632
2633 2633 With no arguments, print a list of commands with short help messages.
2634 2634
2635 2635 Given a topic, extension, or command name, print help for that
2636 2636 topic.
2637 2637
2638 2638 Returns 0 if successful.
2639 2639 """
2640 2640
2641 2641 keep = opts.get(r'system') or []
2642 2642 if len(keep) == 0:
2643 2643 if pycompat.sysplatform.startswith('win'):
2644 2644 keep.append('windows')
2645 2645 elif pycompat.sysplatform == 'OpenVMS':
2646 2646 keep.append('vms')
2647 2647 elif pycompat.sysplatform == 'plan9':
2648 2648 keep.append('plan9')
2649 2649 else:
2650 2650 keep.append('unix')
2651 2651 keep.append(pycompat.sysplatform.lower())
2652 2652 if ui.verbose:
2653 2653 keep.append('verbose')
2654 2654
2655 2655 commands = sys.modules[__name__]
2656 2656 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2657 2657 ui.pager('help')
2658 2658 ui.write(formatted)
2659 2659
2660 2660
2661 2661 @command('identify|id',
2662 2662 [('r', 'rev', '',
2663 2663 _('identify the specified revision'), _('REV')),
2664 2664 ('n', 'num', None, _('show local revision number')),
2665 2665 ('i', 'id', None, _('show global revision id')),
2666 2666 ('b', 'branch', None, _('show branch')),
2667 2667 ('t', 'tags', None, _('show tags')),
2668 2668 ('B', 'bookmarks', None, _('show bookmarks')),
2669 2669 ] + remoteopts + formatteropts,
2670 2670 _('[-nibtB] [-r REV] [SOURCE]'),
2671 2671 optionalrepo=True)
2672 2672 def identify(ui, repo, source=None, rev=None,
2673 2673 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2674 2674 """identify the working directory or specified revision
2675 2675
2676 2676 Print a summary identifying the repository state at REV using one or
2677 2677 two parent hash identifiers, followed by a "+" if the working
2678 2678 directory has uncommitted changes, the branch name (if not default),
2679 2679 a list of tags, and a list of bookmarks.
2680 2680
2681 2681 When REV is not given, print a summary of the current state of the
2682 2682 repository.
2683 2683
2684 2684 Specifying a path to a repository root or Mercurial bundle will
2685 2685 cause lookup to operate on that repository/bundle.
2686 2686
2687 2687 .. container:: verbose
2688 2688
2689 2689 Examples:
2690 2690
2691 2691 - generate a build identifier for the working directory::
2692 2692
2693 2693 hg id --id > build-id.dat
2694 2694
2695 2695 - find the revision corresponding to a tag::
2696 2696
2697 2697 hg id -n -r 1.3
2698 2698
2699 2699 - check the most recent revision of a remote repository::
2700 2700
2701 2701 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2702 2702
2703 2703 See :hg:`log` for generating more information about specific revisions,
2704 2704 including full hash identifiers.
2705 2705
2706 2706 Returns 0 if successful.
2707 2707 """
2708 2708
2709 2709 opts = pycompat.byteskwargs(opts)
2710 2710 if not repo and not source:
2711 2711 raise error.Abort(_("there is no Mercurial repository here "
2712 2712 "(.hg not found)"))
2713 2713
2714 2714 if ui.debugflag:
2715 2715 hexfunc = hex
2716 2716 else:
2717 2717 hexfunc = short
2718 2718 default = not (num or id or branch or tags or bookmarks)
2719 2719 output = []
2720 2720 revs = []
2721 2721
2722 2722 if source:
2723 2723 source, branches = hg.parseurl(ui.expandpath(source))
2724 2724 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2725 2725 repo = peer.local()
2726 2726 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2727 2727
2728 2728 fm = ui.formatter('identify', opts)
2729 2729 fm.startitem()
2730 2730
2731 2731 if not repo:
2732 2732 if num or branch or tags:
2733 2733 raise error.Abort(
2734 2734 _("can't query remote revision number, branch, or tags"))
2735 2735 if not rev and revs:
2736 2736 rev = revs[0]
2737 2737 if not rev:
2738 2738 rev = "tip"
2739 2739
2740 2740 remoterev = peer.lookup(rev)
2741 2741 hexrev = hexfunc(remoterev)
2742 2742 if default or id:
2743 2743 output = [hexrev]
2744 2744 fm.data(id=hexrev)
2745 2745
2746 2746 def getbms():
2747 2747 bms = []
2748 2748
2749 2749 if 'bookmarks' in peer.listkeys('namespaces'):
2750 2750 hexremoterev = hex(remoterev)
2751 2751 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2752 2752 if bmr == hexremoterev]
2753 2753
2754 2754 return sorted(bms)
2755 2755
2756 2756 bms = getbms()
2757 2757 if bookmarks:
2758 2758 output.extend(bms)
2759 2759 elif default and not ui.quiet:
2760 2760 # multiple bookmarks for a single parent separated by '/'
2761 2761 bm = '/'.join(bms)
2762 2762 if bm:
2763 2763 output.append(bm)
2764 2764
2765 2765 fm.data(node=hex(remoterev))
2766 2766 fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
2767 2767 else:
2768 2768 ctx = scmutil.revsingle(repo, rev, None)
2769 2769
2770 2770 if ctx.rev() is None:
2771 2771 ctx = repo[None]
2772 2772 parents = ctx.parents()
2773 2773 taglist = []
2774 2774 for p in parents:
2775 2775 taglist.extend(p.tags())
2776 2776
2777 2777 dirty = ""
2778 2778 if ctx.dirty(missing=True, merge=False, branch=False):
2779 2779 dirty = '+'
2780 2780 fm.data(dirty=dirty)
2781 2781
2782 2782 hexoutput = [hexfunc(p.node()) for p in parents]
2783 2783 if default or id:
2784 2784 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
2785 2785 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
2786 2786
2787 2787 if num:
2788 2788 numoutput = ["%d" % p.rev() for p in parents]
2789 2789 output.append("%s%s" % ('+'.join(numoutput), dirty))
2790 2790
2791 2791 fn = fm.nested('parents')
2792 2792 for p in parents:
2793 2793 fn.startitem()
2794 2794 fn.data(rev=p.rev())
2795 2795 fn.data(node=p.hex())
2796 2796 fn.context(ctx=p)
2797 2797 fn.end()
2798 2798 else:
2799 2799 hexoutput = hexfunc(ctx.node())
2800 2800 if default or id:
2801 2801 output = [hexoutput]
2802 2802 fm.data(id=hexoutput)
2803 2803
2804 2804 if num:
2805 2805 output.append(pycompat.bytestr(ctx.rev()))
2806 2806 taglist = ctx.tags()
2807 2807
2808 2808 if default and not ui.quiet:
2809 2809 b = ctx.branch()
2810 2810 if b != 'default':
2811 2811 output.append("(%s)" % b)
2812 2812
2813 2813 # multiple tags for a single parent separated by '/'
2814 2814 t = '/'.join(taglist)
2815 2815 if t:
2816 2816 output.append(t)
2817 2817
2818 2818 # multiple bookmarks for a single parent separated by '/'
2819 2819 bm = '/'.join(ctx.bookmarks())
2820 2820 if bm:
2821 2821 output.append(bm)
2822 2822 else:
2823 2823 if branch:
2824 2824 output.append(ctx.branch())
2825 2825
2826 2826 if tags:
2827 2827 output.extend(taglist)
2828 2828
2829 2829 if bookmarks:
2830 2830 output.extend(ctx.bookmarks())
2831 2831
2832 2832 fm.data(node=ctx.hex())
2833 2833 fm.data(branch=ctx.branch())
2834 2834 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
2835 2835 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
2836 2836 fm.context(ctx=ctx)
2837 2837
2838 2838 fm.plain("%s\n" % ' '.join(output))
2839 2839 fm.end()
2840 2840
2841 2841 @command('import|patch',
2842 2842 [('p', 'strip', 1,
2843 2843 _('directory strip option for patch. This has the same '
2844 2844 'meaning as the corresponding patch option'), _('NUM')),
2845 2845 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2846 2846 ('e', 'edit', False, _('invoke editor on commit messages')),
2847 2847 ('f', 'force', None,
2848 2848 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2849 2849 ('', 'no-commit', None,
2850 2850 _("don't commit, just update the working directory")),
2851 2851 ('', 'bypass', None,
2852 2852 _("apply patch without touching the working directory")),
2853 2853 ('', 'partial', None,
2854 2854 _('commit even if some hunks fail')),
2855 2855 ('', 'exact', None,
2856 2856 _('abort if patch would apply lossily')),
2857 2857 ('', 'prefix', '',
2858 2858 _('apply patch to subdirectory'), _('DIR')),
2859 2859 ('', 'import-branch', None,
2860 2860 _('use any branch information in patch (implied by --exact)'))] +
2861 2861 commitopts + commitopts2 + similarityopts,
2862 2862 _('[OPTION]... PATCH...'))
2863 2863 def import_(ui, repo, patch1=None, *patches, **opts):
2864 2864 """import an ordered set of patches
2865 2865
2866 2866 Import a list of patches and commit them individually (unless
2867 2867 --no-commit is specified).
2868 2868
2869 2869 To read a patch from standard input (stdin), use "-" as the patch
2870 2870 name. If a URL is specified, the patch will be downloaded from
2871 2871 there.
2872 2872
2873 2873 Import first applies changes to the working directory (unless
2874 2874 --bypass is specified), import will abort if there are outstanding
2875 2875 changes.
2876 2876
2877 2877 Use --bypass to apply and commit patches directly to the
2878 2878 repository, without affecting the working directory. Without
2879 2879 --exact, patches will be applied on top of the working directory
2880 2880 parent revision.
2881 2881
2882 2882 You can import a patch straight from a mail message. Even patches
2883 2883 as attachments work (to use the body part, it must have type
2884 2884 text/plain or text/x-patch). From and Subject headers of email
2885 2885 message are used as default committer and commit message. All
2886 2886 text/plain body parts before first diff are added to the commit
2887 2887 message.
2888 2888
2889 2889 If the imported patch was generated by :hg:`export`, user and
2890 2890 description from patch override values from message headers and
2891 2891 body. Values given on command line with -m/--message and -u/--user
2892 2892 override these.
2893 2893
2894 2894 If --exact is specified, import will set the working directory to
2895 2895 the parent of each patch before applying it, and will abort if the
2896 2896 resulting changeset has a different ID than the one recorded in
2897 2897 the patch. This will guard against various ways that portable
2898 2898 patch formats and mail systems might fail to transfer Mercurial
2899 2899 data or metadata. See :hg:`bundle` for lossless transmission.
2900 2900
2901 2901 Use --partial to ensure a changeset will be created from the patch
2902 2902 even if some hunks fail to apply. Hunks that fail to apply will be
2903 2903 written to a <target-file>.rej file. Conflicts can then be resolved
2904 2904 by hand before :hg:`commit --amend` is run to update the created
2905 2905 changeset. This flag exists to let people import patches that
2906 2906 partially apply without losing the associated metadata (author,
2907 2907 date, description, ...).
2908 2908
2909 2909 .. note::
2910 2910
2911 2911 When no hunks apply cleanly, :hg:`import --partial` will create
2912 2912 an empty changeset, importing only the patch metadata.
2913 2913
2914 2914 With -s/--similarity, hg will attempt to discover renames and
2915 2915 copies in the patch in the same way as :hg:`addremove`.
2916 2916
2917 2917 It is possible to use external patch programs to perform the patch
2918 2918 by setting the ``ui.patch`` configuration option. For the default
2919 2919 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2920 2920 See :hg:`help config` for more information about configuration
2921 2921 files and how to use these options.
2922 2922
2923 2923 See :hg:`help dates` for a list of formats valid for -d/--date.
2924 2924
2925 2925 .. container:: verbose
2926 2926
2927 2927 Examples:
2928 2928
2929 2929 - import a traditional patch from a website and detect renames::
2930 2930
2931 2931 hg import -s 80 http://example.com/bugfix.patch
2932 2932
2933 2933 - import a changeset from an hgweb server::
2934 2934
2935 2935 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2936 2936
2937 2937 - import all the patches in an Unix-style mbox::
2938 2938
2939 2939 hg import incoming-patches.mbox
2940 2940
2941 2941 - import patches from stdin::
2942 2942
2943 2943 hg import -
2944 2944
2945 2945 - attempt to exactly restore an exported changeset (not always
2946 2946 possible)::
2947 2947
2948 2948 hg import --exact proposed-fix.patch
2949 2949
2950 2950 - use an external tool to apply a patch which is too fuzzy for
2951 2951 the default internal tool.
2952 2952
2953 2953 hg import --config ui.patch="patch --merge" fuzzy.patch
2954 2954
2955 2955 - change the default fuzzing from 2 to a less strict 7
2956 2956
2957 2957 hg import --config ui.fuzz=7 fuzz.patch
2958 2958
2959 2959 Returns 0 on success, 1 on partial success (see --partial).
2960 2960 """
2961 2961
2962 2962 opts = pycompat.byteskwargs(opts)
2963 2963 if not patch1:
2964 2964 raise error.Abort(_('need at least one patch to import'))
2965 2965
2966 2966 patches = (patch1,) + patches
2967 2967
2968 2968 date = opts.get('date')
2969 2969 if date:
2970 2970 opts['date'] = util.parsedate(date)
2971 2971
2972 2972 exact = opts.get('exact')
2973 2973 update = not opts.get('bypass')
2974 2974 if not update and opts.get('no_commit'):
2975 2975 raise error.Abort(_('cannot use --no-commit with --bypass'))
2976 2976 try:
2977 2977 sim = float(opts.get('similarity') or 0)
2978 2978 except ValueError:
2979 2979 raise error.Abort(_('similarity must be a number'))
2980 2980 if sim < 0 or sim > 100:
2981 2981 raise error.Abort(_('similarity must be between 0 and 100'))
2982 2982 if sim and not update:
2983 2983 raise error.Abort(_('cannot use --similarity with --bypass'))
2984 2984 if exact:
2985 2985 if opts.get('edit'):
2986 2986 raise error.Abort(_('cannot use --exact with --edit'))
2987 2987 if opts.get('prefix'):
2988 2988 raise error.Abort(_('cannot use --exact with --prefix'))
2989 2989
2990 2990 base = opts["base"]
2991 2991 wlock = dsguard = lock = tr = None
2992 2992 msgs = []
2993 2993 ret = 0
2994 2994
2995 2995
2996 2996 try:
2997 2997 wlock = repo.wlock()
2998 2998
2999 2999 if update:
3000 3000 cmdutil.checkunfinished(repo)
3001 3001 if (exact or not opts.get('force')):
3002 3002 cmdutil.bailifchanged(repo)
3003 3003
3004 3004 if not opts.get('no_commit'):
3005 3005 lock = repo.lock()
3006 3006 tr = repo.transaction('import')
3007 3007 else:
3008 3008 dsguard = dirstateguard.dirstateguard(repo, 'import')
3009 3009 parents = repo[None].parents()
3010 3010 for patchurl in patches:
3011 3011 if patchurl == '-':
3012 3012 ui.status(_('applying patch from stdin\n'))
3013 3013 patchfile = ui.fin
3014 3014 patchurl = 'stdin' # for error message
3015 3015 else:
3016 3016 patchurl = os.path.join(base, patchurl)
3017 3017 ui.status(_('applying %s\n') % patchurl)
3018 3018 patchfile = hg.openpath(ui, patchurl)
3019 3019
3020 3020 haspatch = False
3021 3021 for hunk in patch.split(patchfile):
3022 3022 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3023 3023 parents, opts,
3024 3024 msgs, hg.clean)
3025 3025 if msg:
3026 3026 haspatch = True
3027 3027 ui.note(msg + '\n')
3028 3028 if update or exact:
3029 3029 parents = repo[None].parents()
3030 3030 else:
3031 3031 parents = [repo[node]]
3032 3032 if rej:
3033 3033 ui.write_err(_("patch applied partially\n"))
3034 3034 ui.write_err(_("(fix the .rej files and run "
3035 3035 "`hg commit --amend`)\n"))
3036 3036 ret = 1
3037 3037 break
3038 3038
3039 3039 if not haspatch:
3040 3040 raise error.Abort(_('%s: no diffs found') % patchurl)
3041 3041
3042 3042 if tr:
3043 3043 tr.close()
3044 3044 if msgs:
3045 3045 repo.savecommitmessage('\n* * *\n'.join(msgs))
3046 3046 if dsguard:
3047 3047 dsguard.close()
3048 3048 return ret
3049 3049 finally:
3050 3050 if tr:
3051 3051 tr.release()
3052 3052 release(lock, dsguard, wlock)
3053 3053
3054 3054 @command('incoming|in',
3055 3055 [('f', 'force', None,
3056 3056 _('run even if remote repository is unrelated')),
3057 3057 ('n', 'newest-first', None, _('show newest record first')),
3058 3058 ('', 'bundle', '',
3059 3059 _('file to store the bundles into'), _('FILE')),
3060 3060 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3061 3061 ('B', 'bookmarks', False, _("compare bookmarks")),
3062 3062 ('b', 'branch', [],
3063 3063 _('a specific branch you would like to pull'), _('BRANCH')),
3064 3064 ] + logopts + remoteopts + subrepoopts,
3065 3065 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3066 3066 def incoming(ui, repo, source="default", **opts):
3067 3067 """show new changesets found in source
3068 3068
3069 3069 Show new changesets found in the specified path/URL or the default
3070 3070 pull location. These are the changesets that would have been pulled
3071 3071 if a pull at the time you issued this command.
3072 3072
3073 3073 See pull for valid source format details.
3074 3074
3075 3075 .. container:: verbose
3076 3076
3077 3077 With -B/--bookmarks, the result of bookmark comparison between
3078 3078 local and remote repositories is displayed. With -v/--verbose,
3079 3079 status is also displayed for each bookmark like below::
3080 3080
3081 3081 BM1 01234567890a added
3082 3082 BM2 1234567890ab advanced
3083 3083 BM3 234567890abc diverged
3084 3084 BM4 34567890abcd changed
3085 3085
3086 3086 The action taken locally when pulling depends on the
3087 3087 status of each bookmark:
3088 3088
3089 3089 :``added``: pull will create it
3090 3090 :``advanced``: pull will update it
3091 3091 :``diverged``: pull will create a divergent bookmark
3092 3092 :``changed``: result depends on remote changesets
3093 3093
3094 3094 From the point of view of pulling behavior, bookmark
3095 3095 existing only in the remote repository are treated as ``added``,
3096 3096 even if it is in fact locally deleted.
3097 3097
3098 3098 .. container:: verbose
3099 3099
3100 3100 For remote repository, using --bundle avoids downloading the
3101 3101 changesets twice if the incoming is followed by a pull.
3102 3102
3103 3103 Examples:
3104 3104
3105 3105 - show incoming changes with patches and full description::
3106 3106
3107 3107 hg incoming -vp
3108 3108
3109 3109 - show incoming changes excluding merges, store a bundle::
3110 3110
3111 3111 hg in -vpM --bundle incoming.hg
3112 3112 hg pull incoming.hg
3113 3113
3114 3114 - briefly list changes inside a bundle::
3115 3115
3116 3116 hg in changes.hg -T "{desc|firstline}\\n"
3117 3117
3118 3118 Returns 0 if there are incoming changes, 1 otherwise.
3119 3119 """
3120 3120 opts = pycompat.byteskwargs(opts)
3121 3121 if opts.get('graph'):
3122 3122 cmdutil.checkunsupportedgraphflags([], opts)
3123 3123 def display(other, chlist, displayer):
3124 3124 revdag = cmdutil.graphrevs(other, chlist, opts)
3125 3125 cmdutil.displaygraph(ui, repo, revdag, displayer,
3126 3126 graphmod.asciiedges)
3127 3127
3128 3128 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3129 3129 return 0
3130 3130
3131 3131 if opts.get('bundle') and opts.get('subrepos'):
3132 3132 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3133 3133
3134 3134 if opts.get('bookmarks'):
3135 3135 source, branches = hg.parseurl(ui.expandpath(source),
3136 3136 opts.get('branch'))
3137 3137 other = hg.peer(repo, opts, source)
3138 3138 if 'bookmarks' not in other.listkeys('namespaces'):
3139 3139 ui.warn(_("remote doesn't support bookmarks\n"))
3140 3140 return 0
3141 3141 ui.pager('incoming')
3142 3142 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3143 3143 return bookmarks.incoming(ui, repo, other)
3144 3144
3145 3145 repo._subtoppath = ui.expandpath(source)
3146 3146 try:
3147 3147 return hg.incoming(ui, repo, source, opts)
3148 3148 finally:
3149 3149 del repo._subtoppath
3150 3150
3151 3151
3152 3152 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3153 3153 norepo=True)
3154 3154 def init(ui, dest=".", **opts):
3155 3155 """create a new repository in the given directory
3156 3156
3157 3157 Initialize a new repository in the given directory. If the given
3158 3158 directory does not exist, it will be created.
3159 3159
3160 3160 If no directory is given, the current directory is used.
3161 3161
3162 3162 It is possible to specify an ``ssh://`` URL as the destination.
3163 3163 See :hg:`help urls` for more information.
3164 3164
3165 3165 Returns 0 on success.
3166 3166 """
3167 3167 opts = pycompat.byteskwargs(opts)
3168 3168 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3169 3169
3170 3170 @command('locate',
3171 3171 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3172 3172 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3173 3173 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3174 3174 ] + walkopts,
3175 3175 _('[OPTION]... [PATTERN]...'))
3176 3176 def locate(ui, repo, *pats, **opts):
3177 3177 """locate files matching specific patterns (DEPRECATED)
3178 3178
3179 3179 Print files under Mercurial control in the working directory whose
3180 3180 names match the given patterns.
3181 3181
3182 3182 By default, this command searches all directories in the working
3183 3183 directory. To search just the current directory and its
3184 3184 subdirectories, use "--include .".
3185 3185
3186 3186 If no patterns are given to match, this command prints the names
3187 3187 of all files under Mercurial control in the working directory.
3188 3188
3189 3189 If you want to feed the output of this command into the "xargs"
3190 3190 command, use the -0 option to both this command and "xargs". This
3191 3191 will avoid the problem of "xargs" treating single filenames that
3192 3192 contain whitespace as multiple filenames.
3193 3193
3194 3194 See :hg:`help files` for a more versatile command.
3195 3195
3196 3196 Returns 0 if a match is found, 1 otherwise.
3197 3197 """
3198 3198 opts = pycompat.byteskwargs(opts)
3199 3199 if opts.get('print0'):
3200 3200 end = '\0'
3201 3201 else:
3202 3202 end = '\n'
3203 3203 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3204 3204
3205 3205 ret = 1
3206 3206 ctx = repo[rev]
3207 3207 m = scmutil.match(ctx, pats, opts, default='relglob',
3208 3208 badfn=lambda x, y: False)
3209 3209
3210 3210 ui.pager('locate')
3211 3211 for abs in ctx.matches(m):
3212 3212 if opts.get('fullpath'):
3213 3213 ui.write(repo.wjoin(abs), end)
3214 3214 else:
3215 3215 ui.write(((pats and m.rel(abs)) or abs), end)
3216 3216 ret = 0
3217 3217
3218 3218 return ret
3219 3219
3220 3220 @command('^log|history',
3221 3221 [('f', 'follow', None,
3222 3222 _('follow changeset history, or file history across copies and renames')),
3223 3223 ('', 'follow-first', None,
3224 3224 _('only follow the first parent of merge changesets (DEPRECATED)')),
3225 3225 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3226 3226 ('C', 'copies', None, _('show copied files')),
3227 3227 ('k', 'keyword', [],
3228 3228 _('do case-insensitive search for a given text'), _('TEXT')),
3229 3229 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3230 3230 ('', 'removed', None, _('include revisions where files were removed')),
3231 3231 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3232 3232 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3233 3233 ('', 'only-branch', [],
3234 3234 _('show only changesets within the given named branch (DEPRECATED)'),
3235 3235 _('BRANCH')),
3236 3236 ('b', 'branch', [],
3237 3237 _('show changesets within the given named branch'), _('BRANCH')),
3238 3238 ('P', 'prune', [],
3239 3239 _('do not display revision or any of its ancestors'), _('REV')),
3240 3240 ] + logopts + walkopts,
3241 3241 _('[OPTION]... [FILE]'),
3242 3242 inferrepo=True)
3243 3243 def log(ui, repo, *pats, **opts):
3244 3244 """show revision history of entire repository or files
3245 3245
3246 3246 Print the revision history of the specified files or the entire
3247 3247 project.
3248 3248
3249 3249 If no revision range is specified, the default is ``tip:0`` unless
3250 3250 --follow is set, in which case the working directory parent is
3251 3251 used as the starting revision.
3252 3252
3253 3253 File history is shown without following rename or copy history of
3254 3254 files. Use -f/--follow with a filename to follow history across
3255 3255 renames and copies. --follow without a filename will only show
3256 3256 ancestors or descendants of the starting revision.
3257 3257
3258 3258 By default this command prints revision number and changeset id,
3259 3259 tags, non-trivial parents, user, date and time, and a summary for
3260 3260 each commit. When the -v/--verbose switch is used, the list of
3261 3261 changed files and full commit message are shown.
3262 3262
3263 3263 With --graph the revisions are shown as an ASCII art DAG with the most
3264 3264 recent changeset at the top.
3265 3265 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3266 3266 and '+' represents a fork where the changeset from the lines below is a
3267 3267 parent of the 'o' merge on the same line.
3268 3268 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3269 3269 of a '|' indicates one or more revisions in a path are omitted.
3270 3270
3271 3271 .. note::
3272 3272
3273 3273 :hg:`log --patch` may generate unexpected diff output for merge
3274 3274 changesets, as it will only compare the merge changeset against
3275 3275 its first parent. Also, only files different from BOTH parents
3276 3276 will appear in files:.
3277 3277
3278 3278 .. note::
3279 3279
3280 3280 For performance reasons, :hg:`log FILE` may omit duplicate changes
3281 3281 made on branches and will not show removals or mode changes. To
3282 3282 see all such changes, use the --removed switch.
3283 3283
3284 3284 .. container:: verbose
3285 3285
3286 3286 Some examples:
3287 3287
3288 3288 - changesets with full descriptions and file lists::
3289 3289
3290 3290 hg log -v
3291 3291
3292 3292 - changesets ancestral to the working directory::
3293 3293
3294 3294 hg log -f
3295 3295
3296 3296 - last 10 commits on the current branch::
3297 3297
3298 3298 hg log -l 10 -b .
3299 3299
3300 3300 - changesets showing all modifications of a file, including removals::
3301 3301
3302 3302 hg log --removed file.c
3303 3303
3304 3304 - all changesets that touch a directory, with diffs, excluding merges::
3305 3305
3306 3306 hg log -Mp lib/
3307 3307
3308 3308 - all revision numbers that match a keyword::
3309 3309
3310 3310 hg log -k bug --template "{rev}\\n"
3311 3311
3312 3312 - the full hash identifier of the working directory parent::
3313 3313
3314 3314 hg log -r . --template "{node}\\n"
3315 3315
3316 3316 - list available log templates::
3317 3317
3318 3318 hg log -T list
3319 3319
3320 3320 - check if a given changeset is included in a tagged release::
3321 3321
3322 3322 hg log -r "a21ccf and ancestor(1.9)"
3323 3323
3324 3324 - find all changesets by some user in a date range::
3325 3325
3326 3326 hg log -k alice -d "may 2008 to jul 2008"
3327 3327
3328 3328 - summary of all changesets after the last tag::
3329 3329
3330 3330 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3331 3331
3332 3332 See :hg:`help dates` for a list of formats valid for -d/--date.
3333 3333
3334 3334 See :hg:`help revisions` for more about specifying and ordering
3335 3335 revisions.
3336 3336
3337 3337 See :hg:`help templates` for more about pre-packaged styles and
3338 3338 specifying custom templates.
3339 3339
3340 3340 Returns 0 on success.
3341 3341
3342 3342 """
3343 3343 opts = pycompat.byteskwargs(opts)
3344 3344 if opts.get('follow') and opts.get('rev'):
3345 3345 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3346 3346 del opts['follow']
3347 3347
3348 3348 if opts.get('graph'):
3349 3349 return cmdutil.graphlog(ui, repo, pats, opts)
3350 3350
3351 3351 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3352 3352 limit = cmdutil.loglimit(opts)
3353 3353 count = 0
3354 3354
3355 3355 getrenamed = None
3356 3356 if opts.get('copies'):
3357 3357 endrev = None
3358 3358 if opts.get('rev'):
3359 3359 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3360 3360 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3361 3361
3362 3362 ui.pager('log')
3363 3363 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3364 3364 for rev in revs:
3365 3365 if count == limit:
3366 3366 break
3367 3367 ctx = repo[rev]
3368 3368 copies = None
3369 3369 if getrenamed is not None and rev:
3370 3370 copies = []
3371 3371 for fn in ctx.files():
3372 3372 rename = getrenamed(fn, rev)
3373 3373 if rename:
3374 3374 copies.append((fn, rename[0]))
3375 3375 if filematcher:
3376 3376 revmatchfn = filematcher(ctx.rev())
3377 3377 else:
3378 3378 revmatchfn = None
3379 3379 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3380 3380 if displayer.flush(ctx):
3381 3381 count += 1
3382 3382
3383 3383 displayer.close()
3384 3384
3385 3385 @command('manifest',
3386 3386 [('r', 'rev', '', _('revision to display'), _('REV')),
3387 3387 ('', 'all', False, _("list files from all revisions"))]
3388 3388 + formatteropts,
3389 3389 _('[-r REV]'))
3390 3390 def manifest(ui, repo, node=None, rev=None, **opts):
3391 3391 """output the current or given revision of the project manifest
3392 3392
3393 3393 Print a list of version controlled files for the given revision.
3394 3394 If no revision is given, the first parent of the working directory
3395 3395 is used, or the null revision if no revision is checked out.
3396 3396
3397 3397 With -v, print file permissions, symlink and executable bits.
3398 3398 With --debug, print file revision hashes.
3399 3399
3400 3400 If option --all is specified, the list of all files from all revisions
3401 3401 is printed. This includes deleted and renamed files.
3402 3402
3403 3403 Returns 0 on success.
3404 3404 """
3405 3405 opts = pycompat.byteskwargs(opts)
3406 3406 fm = ui.formatter('manifest', opts)
3407 3407
3408 3408 if opts.get('all'):
3409 3409 if rev or node:
3410 3410 raise error.Abort(_("can't specify a revision with --all"))
3411 3411
3412 3412 res = []
3413 3413 prefix = "data/"
3414 3414 suffix = ".i"
3415 3415 plen = len(prefix)
3416 3416 slen = len(suffix)
3417 3417 with repo.lock():
3418 3418 for fn, b, size in repo.store.datafiles():
3419 3419 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3420 3420 res.append(fn[plen:-slen])
3421 3421 ui.pager('manifest')
3422 3422 for f in res:
3423 3423 fm.startitem()
3424 3424 fm.write("path", '%s\n', f)
3425 3425 fm.end()
3426 3426 return
3427 3427
3428 3428 if rev and node:
3429 3429 raise error.Abort(_("please specify just one revision"))
3430 3430
3431 3431 if not node:
3432 3432 node = rev
3433 3433
3434 3434 char = {'l': '@', 'x': '*', '': ''}
3435 3435 mode = {'l': '644', 'x': '755', '': '644'}
3436 3436 ctx = scmutil.revsingle(repo, node)
3437 3437 mf = ctx.manifest()
3438 3438 ui.pager('manifest')
3439 3439 for f in ctx:
3440 3440 fm.startitem()
3441 3441 fl = ctx[f].flags()
3442 3442 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3443 3443 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3444 3444 fm.write('path', '%s\n', f)
3445 3445 fm.end()
3446 3446
3447 3447 @command('^merge',
3448 3448 [('f', 'force', None,
3449 3449 _('force a merge including outstanding changes (DEPRECATED)')),
3450 3450 ('r', 'rev', '', _('revision to merge'), _('REV')),
3451 3451 ('P', 'preview', None,
3452 3452 _('review revisions to merge (no merge is performed)'))
3453 3453 ] + mergetoolopts,
3454 3454 _('[-P] [[-r] REV]'))
3455 3455 def merge(ui, repo, node=None, **opts):
3456 3456 """merge another revision into working directory
3457 3457
3458 3458 The current working directory is updated with all changes made in
3459 3459 the requested revision since the last common predecessor revision.
3460 3460
3461 3461 Files that changed between either parent are marked as changed for
3462 3462 the next commit and a commit must be performed before any further
3463 3463 updates to the repository are allowed. The next commit will have
3464 3464 two parents.
3465 3465
3466 3466 ``--tool`` can be used to specify the merge tool used for file
3467 3467 merges. It overrides the HGMERGE environment variable and your
3468 3468 configuration files. See :hg:`help merge-tools` for options.
3469 3469
3470 3470 If no revision is specified, the working directory's parent is a
3471 3471 head revision, and the current branch contains exactly one other
3472 3472 head, the other head is merged with by default. Otherwise, an
3473 3473 explicit revision with which to merge with must be provided.
3474 3474
3475 3475 See :hg:`help resolve` for information on handling file conflicts.
3476 3476
3477 3477 To undo an uncommitted merge, use :hg:`update --clean .` which
3478 3478 will check out a clean copy of the original merge parent, losing
3479 3479 all changes.
3480 3480
3481 3481 Returns 0 on success, 1 if there are unresolved files.
3482 3482 """
3483 3483
3484 3484 opts = pycompat.byteskwargs(opts)
3485 3485 if opts.get('rev') and node:
3486 3486 raise error.Abort(_("please specify just one revision"))
3487 3487 if not node:
3488 3488 node = opts.get('rev')
3489 3489
3490 3490 if node:
3491 3491 node = scmutil.revsingle(repo, node).node()
3492 3492
3493 3493 if not node:
3494 3494 node = repo[destutil.destmerge(repo)].node()
3495 3495
3496 3496 if opts.get('preview'):
3497 3497 # find nodes that are ancestors of p2 but not of p1
3498 3498 p1 = repo.lookup('.')
3499 3499 p2 = repo.lookup(node)
3500 3500 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3501 3501
3502 3502 displayer = cmdutil.show_changeset(ui, repo, opts)
3503 3503 for node in nodes:
3504 3504 displayer.show(repo[node])
3505 3505 displayer.close()
3506 3506 return 0
3507 3507
3508 3508 try:
3509 3509 # ui.forcemerge is an internal variable, do not document
3510 3510 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3511 3511 force = opts.get('force')
3512 3512 labels = ['working copy', 'merge rev']
3513 3513 return hg.merge(repo, node, force=force, mergeforce=force,
3514 3514 labels=labels)
3515 3515 finally:
3516 3516 ui.setconfig('ui', 'forcemerge', '', 'merge')
3517 3517
3518 3518 @command('outgoing|out',
3519 3519 [('f', 'force', None, _('run even when the destination is unrelated')),
3520 3520 ('r', 'rev', [],
3521 3521 _('a changeset intended to be included in the destination'), _('REV')),
3522 3522 ('n', 'newest-first', None, _('show newest record first')),
3523 3523 ('B', 'bookmarks', False, _('compare bookmarks')),
3524 3524 ('b', 'branch', [], _('a specific branch you would like to push'),
3525 3525 _('BRANCH')),
3526 3526 ] + logopts + remoteopts + subrepoopts,
3527 3527 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3528 3528 def outgoing(ui, repo, dest=None, **opts):
3529 3529 """show changesets not found in the destination
3530 3530
3531 3531 Show changesets not found in the specified destination repository
3532 3532 or the default push location. These are the changesets that would
3533 3533 be pushed if a push was requested.
3534 3534
3535 3535 See pull for details of valid destination formats.
3536 3536
3537 3537 .. container:: verbose
3538 3538
3539 3539 With -B/--bookmarks, the result of bookmark comparison between
3540 3540 local and remote repositories is displayed. With -v/--verbose,
3541 3541 status is also displayed for each bookmark like below::
3542 3542
3543 3543 BM1 01234567890a added
3544 3544 BM2 deleted
3545 3545 BM3 234567890abc advanced
3546 3546 BM4 34567890abcd diverged
3547 3547 BM5 4567890abcde changed
3548 3548
3549 3549 The action taken when pushing depends on the
3550 3550 status of each bookmark:
3551 3551
3552 3552 :``added``: push with ``-B`` will create it
3553 3553 :``deleted``: push with ``-B`` will delete it
3554 3554 :``advanced``: push will update it
3555 3555 :``diverged``: push with ``-B`` will update it
3556 3556 :``changed``: push with ``-B`` will update it
3557 3557
3558 3558 From the point of view of pushing behavior, bookmarks
3559 3559 existing only in the remote repository are treated as
3560 3560 ``deleted``, even if it is in fact added remotely.
3561 3561
3562 3562 Returns 0 if there are outgoing changes, 1 otherwise.
3563 3563 """
3564 3564 opts = pycompat.byteskwargs(opts)
3565 3565 if opts.get('graph'):
3566 3566 cmdutil.checkunsupportedgraphflags([], opts)
3567 3567 o, other = hg._outgoing(ui, repo, dest, opts)
3568 3568 if not o:
3569 3569 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3570 3570 return
3571 3571
3572 3572 revdag = cmdutil.graphrevs(repo, o, opts)
3573 3573 ui.pager('outgoing')
3574 3574 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3575 3575 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3576 3576 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3577 3577 return 0
3578 3578
3579 3579 if opts.get('bookmarks'):
3580 3580 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3581 3581 dest, branches = hg.parseurl(dest, opts.get('branch'))
3582 3582 other = hg.peer(repo, opts, dest)
3583 3583 if 'bookmarks' not in other.listkeys('namespaces'):
3584 3584 ui.warn(_("remote doesn't support bookmarks\n"))
3585 3585 return 0
3586 3586 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3587 3587 ui.pager('outgoing')
3588 3588 return bookmarks.outgoing(ui, repo, other)
3589 3589
3590 3590 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3591 3591 try:
3592 3592 return hg.outgoing(ui, repo, dest, opts)
3593 3593 finally:
3594 3594 del repo._subtoppath
3595 3595
3596 3596 @command('parents',
3597 3597 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3598 3598 ] + templateopts,
3599 3599 _('[-r REV] [FILE]'),
3600 3600 inferrepo=True)
3601 3601 def parents(ui, repo, file_=None, **opts):
3602 3602 """show the parents of the working directory or revision (DEPRECATED)
3603 3603
3604 3604 Print the working directory's parent revisions. If a revision is
3605 3605 given via -r/--rev, the parent of that revision will be printed.
3606 3606 If a file argument is given, the revision in which the file was
3607 3607 last changed (before the working directory revision or the
3608 3608 argument to --rev if given) is printed.
3609 3609
3610 3610 This command is equivalent to::
3611 3611
3612 3612 hg log -r "p1()+p2()" or
3613 3613 hg log -r "p1(REV)+p2(REV)" or
3614 3614 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3615 3615 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3616 3616
3617 3617 See :hg:`summary` and :hg:`help revsets` for related information.
3618 3618
3619 3619 Returns 0 on success.
3620 3620 """
3621 3621
3622 3622 opts = pycompat.byteskwargs(opts)
3623 3623 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3624 3624
3625 3625 if file_:
3626 3626 m = scmutil.match(ctx, (file_,), opts)
3627 3627 if m.anypats() or len(m.files()) != 1:
3628 3628 raise error.Abort(_('can only specify an explicit filename'))
3629 3629 file_ = m.files()[0]
3630 3630 filenodes = []
3631 3631 for cp in ctx.parents():
3632 3632 if not cp:
3633 3633 continue
3634 3634 try:
3635 3635 filenodes.append(cp.filenode(file_))
3636 3636 except error.LookupError:
3637 3637 pass
3638 3638 if not filenodes:
3639 3639 raise error.Abort(_("'%s' not found in manifest!") % file_)
3640 3640 p = []
3641 3641 for fn in filenodes:
3642 3642 fctx = repo.filectx(file_, fileid=fn)
3643 3643 p.append(fctx.node())
3644 3644 else:
3645 3645 p = [cp.node() for cp in ctx.parents()]
3646 3646
3647 3647 displayer = cmdutil.show_changeset(ui, repo, opts)
3648 3648 for n in p:
3649 3649 if n != nullid:
3650 3650 displayer.show(repo[n])
3651 3651 displayer.close()
3652 3652
3653 3653 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3654 3654 def paths(ui, repo, search=None, **opts):
3655 3655 """show aliases for remote repositories
3656 3656
3657 3657 Show definition of symbolic path name NAME. If no name is given,
3658 3658 show definition of all available names.
3659 3659
3660 3660 Option -q/--quiet suppresses all output when searching for NAME
3661 3661 and shows only the path names when listing all definitions.
3662 3662
3663 3663 Path names are defined in the [paths] section of your
3664 3664 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3665 3665 repository, ``.hg/hgrc`` is used, too.
3666 3666
3667 3667 The path names ``default`` and ``default-push`` have a special
3668 3668 meaning. When performing a push or pull operation, they are used
3669 3669 as fallbacks if no location is specified on the command-line.
3670 3670 When ``default-push`` is set, it will be used for push and
3671 3671 ``default`` will be used for pull; otherwise ``default`` is used
3672 3672 as the fallback for both. When cloning a repository, the clone
3673 3673 source is written as ``default`` in ``.hg/hgrc``.
3674 3674
3675 3675 .. note::
3676 3676
3677 3677 ``default`` and ``default-push`` apply to all inbound (e.g.
3678 3678 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3679 3679 and :hg:`bundle`) operations.
3680 3680
3681 3681 See :hg:`help urls` for more information.
3682 3682
3683 3683 Returns 0 on success.
3684 3684 """
3685 3685
3686 3686 opts = pycompat.byteskwargs(opts)
3687 3687 ui.pager('paths')
3688 3688 if search:
3689 3689 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3690 3690 if name == search]
3691 3691 else:
3692 3692 pathitems = sorted(ui.paths.iteritems())
3693 3693
3694 3694 fm = ui.formatter('paths', opts)
3695 3695 if fm.isplain():
3696 3696 hidepassword = util.hidepassword
3697 3697 else:
3698 3698 hidepassword = str
3699 3699 if ui.quiet:
3700 3700 namefmt = '%s\n'
3701 3701 else:
3702 3702 namefmt = '%s = '
3703 3703 showsubopts = not search and not ui.quiet
3704 3704
3705 3705 for name, path in pathitems:
3706 3706 fm.startitem()
3707 3707 fm.condwrite(not search, 'name', namefmt, name)
3708 3708 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3709 3709 for subopt, value in sorted(path.suboptions.items()):
3710 3710 assert subopt not in ('name', 'url')
3711 3711 if showsubopts:
3712 3712 fm.plain('%s:%s = ' % (name, subopt))
3713 3713 fm.condwrite(showsubopts, subopt, '%s\n', value)
3714 3714
3715 3715 fm.end()
3716 3716
3717 3717 if search and not pathitems:
3718 3718 if not ui.quiet:
3719 3719 ui.warn(_("not found!\n"))
3720 3720 return 1
3721 3721 else:
3722 3722 return 0
3723 3723
3724 3724 @command('phase',
3725 3725 [('p', 'public', False, _('set changeset phase to public')),
3726 3726 ('d', 'draft', False, _('set changeset phase to draft')),
3727 3727 ('s', 'secret', False, _('set changeset phase to secret')),
3728 3728 ('f', 'force', False, _('allow to move boundary backward')),
3729 3729 ('r', 'rev', [], _('target revision'), _('REV')),
3730 3730 ],
3731 3731 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3732 3732 def phase(ui, repo, *revs, **opts):
3733 3733 """set or show the current phase name
3734 3734
3735 3735 With no argument, show the phase name of the current revision(s).
3736 3736
3737 3737 With one of -p/--public, -d/--draft or -s/--secret, change the
3738 3738 phase value of the specified revisions.
3739 3739
3740 3740 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3741 3741 lower phase to an higher phase. Phases are ordered as follows::
3742 3742
3743 3743 public < draft < secret
3744 3744
3745 3745 Returns 0 on success, 1 if some phases could not be changed.
3746 3746
3747 3747 (For more information about the phases concept, see :hg:`help phases`.)
3748 3748 """
3749 3749 opts = pycompat.byteskwargs(opts)
3750 3750 # search for a unique phase argument
3751 3751 targetphase = None
3752 3752 for idx, name in enumerate(phases.phasenames):
3753 3753 if opts[name]:
3754 3754 if targetphase is not None:
3755 3755 raise error.Abort(_('only one phase can be specified'))
3756 3756 targetphase = idx
3757 3757
3758 3758 # look for specified revision
3759 3759 revs = list(revs)
3760 3760 revs.extend(opts['rev'])
3761 3761 if not revs:
3762 3762 # display both parents as the second parent phase can influence
3763 3763 # the phase of a merge commit
3764 3764 revs = [c.rev() for c in repo[None].parents()]
3765 3765
3766 3766 revs = scmutil.revrange(repo, revs)
3767 3767
3768 3768 lock = None
3769 3769 ret = 0
3770 3770 if targetphase is None:
3771 3771 # display
3772 3772 for r in revs:
3773 3773 ctx = repo[r]
3774 3774 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3775 3775 else:
3776 3776 tr = None
3777 3777 lock = repo.lock()
3778 3778 try:
3779 3779 tr = repo.transaction("phase")
3780 3780 # set phase
3781 3781 if not revs:
3782 3782 raise error.Abort(_('empty revision set'))
3783 3783 nodes = [repo[r].node() for r in revs]
3784 3784 # moving revision from public to draft may hide them
3785 3785 # We have to check result on an unfiltered repository
3786 3786 unfi = repo.unfiltered()
3787 3787 getphase = unfi._phasecache.phase
3788 3788 olddata = [getphase(unfi, r) for r in unfi]
3789 3789 phases.advanceboundary(repo, tr, targetphase, nodes)
3790 3790 if opts['force']:
3791 3791 phases.retractboundary(repo, tr, targetphase, nodes)
3792 3792 tr.close()
3793 3793 finally:
3794 3794 if tr is not None:
3795 3795 tr.release()
3796 3796 lock.release()
3797 3797 getphase = unfi._phasecache.phase
3798 3798 newdata = [getphase(unfi, r) for r in unfi]
3799 3799 changes = sum(newdata[r] != olddata[r] for r in unfi)
3800 3800 cl = unfi.changelog
3801 3801 rejected = [n for n in nodes
3802 3802 if newdata[cl.rev(n)] < targetphase]
3803 3803 if rejected:
3804 3804 ui.warn(_('cannot move %i changesets to a higher '
3805 3805 'phase, use --force\n') % len(rejected))
3806 3806 ret = 1
3807 3807 if changes:
3808 3808 msg = _('phase changed for %i changesets\n') % changes
3809 3809 if ret:
3810 3810 ui.status(msg)
3811 3811 else:
3812 3812 ui.note(msg)
3813 3813 else:
3814 3814 ui.warn(_('no phases changed\n'))
3815 3815 return ret
3816 3816
3817 3817 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3818 3818 """Run after a changegroup has been added via pull/unbundle
3819 3819
3820 3820 This takes arguments below:
3821 3821
3822 3822 :modheads: change of heads by pull/unbundle
3823 3823 :optupdate: updating working directory is needed or not
3824 3824 :checkout: update destination revision (or None to default destination)
3825 3825 :brev: a name, which might be a bookmark to be activated after updating
3826 3826 """
3827 3827 if modheads == 0:
3828 3828 return
3829 3829 if optupdate:
3830 3830 try:
3831 3831 return hg.updatetotally(ui, repo, checkout, brev)
3832 3832 except error.UpdateAbort as inst:
3833 3833 msg = _("not updating: %s") % str(inst)
3834 3834 hint = inst.hint
3835 3835 raise error.UpdateAbort(msg, hint=hint)
3836 3836 if modheads > 1:
3837 3837 currentbranchheads = len(repo.branchheads())
3838 3838 if currentbranchheads == modheads:
3839 3839 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3840 3840 elif currentbranchheads > 1:
3841 3841 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3842 3842 "merge)\n"))
3843 3843 else:
3844 3844 ui.status(_("(run 'hg heads' to see heads)\n"))
3845 3845 else:
3846 3846 ui.status(_("(run 'hg update' to get a working copy)\n"))
3847 3847
3848 3848 @command('^pull',
3849 3849 [('u', 'update', None,
3850 3850 _('update to new branch head if changesets were pulled')),
3851 3851 ('f', 'force', None, _('run even when remote repository is unrelated')),
3852 3852 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3853 3853 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3854 3854 ('b', 'branch', [], _('a specific branch you would like to pull'),
3855 3855 _('BRANCH')),
3856 3856 ] + remoteopts,
3857 3857 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3858 3858 def pull(ui, repo, source="default", **opts):
3859 3859 """pull changes from the specified source
3860 3860
3861 3861 Pull changes from a remote repository to a local one.
3862 3862
3863 3863 This finds all changes from the repository at the specified path
3864 3864 or URL and adds them to a local repository (the current one unless
3865 3865 -R is specified). By default, this does not update the copy of the
3866 3866 project in the working directory.
3867 3867
3868 3868 Use :hg:`incoming` if you want to see what would have been added
3869 3869 by a pull at the time you issued this command. If you then decide
3870 3870 to add those changes to the repository, you should use :hg:`pull
3871 3871 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3872 3872
3873 3873 If SOURCE is omitted, the 'default' path will be used.
3874 3874 See :hg:`help urls` for more information.
3875 3875
3876 3876 Specifying bookmark as ``.`` is equivalent to specifying the active
3877 3877 bookmark's name.
3878 3878
3879 3879 Returns 0 on success, 1 if an update had unresolved files.
3880 3880 """
3881 3881
3882 3882 opts = pycompat.byteskwargs(opts)
3883 3883 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3884 3884 msg = _('update destination required by configuration')
3885 3885 hint = _('use hg pull followed by hg update DEST')
3886 3886 raise error.Abort(msg, hint=hint)
3887 3887
3888 3888 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3889 3889 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3890 3890 other = hg.peer(repo, opts, source)
3891 3891 try:
3892 3892 revs, checkout = hg.addbranchrevs(repo, other, branches,
3893 3893 opts.get('rev'))
3894 3894
3895 3895
3896 3896 pullopargs = {}
3897 3897 if opts.get('bookmark'):
3898 3898 if not revs:
3899 3899 revs = []
3900 3900 # The list of bookmark used here is not the one used to actually
3901 3901 # update the bookmark name. This can result in the revision pulled
3902 3902 # not ending up with the name of the bookmark because of a race
3903 3903 # condition on the server. (See issue 4689 for details)
3904 3904 remotebookmarks = other.listkeys('bookmarks')
3905 3905 pullopargs['remotebookmarks'] = remotebookmarks
3906 3906 for b in opts['bookmark']:
3907 3907 b = repo._bookmarks.expandname(b)
3908 3908 if b not in remotebookmarks:
3909 3909 raise error.Abort(_('remote bookmark %s not found!') % b)
3910 3910 revs.append(remotebookmarks[b])
3911 3911
3912 3912 if revs:
3913 3913 try:
3914 3914 # When 'rev' is a bookmark name, we cannot guarantee that it
3915 3915 # will be updated with that name because of a race condition
3916 3916 # server side. (See issue 4689 for details)
3917 3917 oldrevs = revs
3918 3918 revs = [] # actually, nodes
3919 3919 for r in oldrevs:
3920 3920 node = other.lookup(r)
3921 3921 revs.append(node)
3922 3922 if r == checkout:
3923 3923 checkout = node
3924 3924 except error.CapabilityError:
3925 3925 err = _("other repository doesn't support revision lookup, "
3926 3926 "so a rev cannot be specified.")
3927 3927 raise error.Abort(err)
3928 3928
3929 3929 pullopargs.update(opts.get('opargs', {}))
3930 3930 modheads = exchange.pull(repo, other, heads=revs,
3931 3931 force=opts.get('force'),
3932 3932 bookmarks=opts.get('bookmark', ()),
3933 3933 opargs=pullopargs).cgresult
3934 3934
3935 3935 # brev is a name, which might be a bookmark to be activated at
3936 3936 # the end of the update. In other words, it is an explicit
3937 3937 # destination of the update
3938 3938 brev = None
3939 3939
3940 3940 if checkout:
3941 3941 checkout = str(repo.changelog.rev(checkout))
3942 3942
3943 3943 # order below depends on implementation of
3944 3944 # hg.addbranchrevs(). opts['bookmark'] is ignored,
3945 3945 # because 'checkout' is determined without it.
3946 3946 if opts.get('rev'):
3947 3947 brev = opts['rev'][0]
3948 3948 elif opts.get('branch'):
3949 3949 brev = opts['branch'][0]
3950 3950 else:
3951 3951 brev = branches[0]
3952 3952 repo._subtoppath = source
3953 3953 try:
3954 3954 ret = postincoming(ui, repo, modheads, opts.get('update'),
3955 3955 checkout, brev)
3956 3956
3957 3957 finally:
3958 3958 del repo._subtoppath
3959 3959
3960 3960 finally:
3961 3961 other.close()
3962 3962 return ret
3963 3963
3964 3964 @command('^push',
3965 3965 [('f', 'force', None, _('force push')),
3966 3966 ('r', 'rev', [],
3967 3967 _('a changeset intended to be included in the destination'),
3968 3968 _('REV')),
3969 3969 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
3970 3970 ('b', 'branch', [],
3971 3971 _('a specific branch you would like to push'), _('BRANCH')),
3972 3972 ('', 'new-branch', False, _('allow pushing a new branch')),
3973 3973 ] + remoteopts,
3974 3974 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
3975 3975 def push(ui, repo, dest=None, **opts):
3976 3976 """push changes to the specified destination
3977 3977
3978 3978 Push changesets from the local repository to the specified
3979 3979 destination.
3980 3980
3981 3981 This operation is symmetrical to pull: it is identical to a pull
3982 3982 in the destination repository from the current one.
3983 3983
3984 3984 By default, push will not allow creation of new heads at the
3985 3985 destination, since multiple heads would make it unclear which head
3986 3986 to use. In this situation, it is recommended to pull and merge
3987 3987 before pushing.
3988 3988
3989 3989 Use --new-branch if you want to allow push to create a new named
3990 3990 branch that is not present at the destination. This allows you to
3991 3991 only create a new branch without forcing other changes.
3992 3992
3993 3993 .. note::
3994 3994
3995 3995 Extra care should be taken with the -f/--force option,
3996 3996 which will push all new heads on all branches, an action which will
3997 3997 almost always cause confusion for collaborators.
3998 3998
3999 3999 If -r/--rev is used, the specified revision and all its ancestors
4000 4000 will be pushed to the remote repository.
4001 4001
4002 4002 If -B/--bookmark is used, the specified bookmarked revision, its
4003 4003 ancestors, and the bookmark will be pushed to the remote
4004 4004 repository. Specifying ``.`` is equivalent to specifying the active
4005 4005 bookmark's name.
4006 4006
4007 4007 Please see :hg:`help urls` for important details about ``ssh://``
4008 4008 URLs. If DESTINATION is omitted, a default path will be used.
4009 4009
4010 4010 Returns 0 if push was successful, 1 if nothing to push.
4011 4011 """
4012 4012
4013 4013 opts = pycompat.byteskwargs(opts)
4014 4014 if opts.get('bookmark'):
4015 4015 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4016 4016 for b in opts['bookmark']:
4017 4017 # translate -B options to -r so changesets get pushed
4018 4018 b = repo._bookmarks.expandname(b)
4019 4019 if b in repo._bookmarks:
4020 4020 opts.setdefault('rev', []).append(b)
4021 4021 else:
4022 4022 # if we try to push a deleted bookmark, translate it to null
4023 4023 # this lets simultaneous -r, -b options continue working
4024 4024 opts.setdefault('rev', []).append("null")
4025 4025
4026 4026 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4027 4027 if not path:
4028 4028 raise error.Abort(_('default repository not configured!'),
4029 4029 hint=_("see 'hg help config.paths'"))
4030 4030 dest = path.pushloc or path.loc
4031 4031 branches = (path.branch, opts.get('branch') or [])
4032 4032 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4033 4033 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4034 4034 other = hg.peer(repo, opts, dest)
4035 4035
4036 4036 if revs:
4037 4037 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4038 4038 if not revs:
4039 4039 raise error.Abort(_("specified revisions evaluate to an empty set"),
4040 4040 hint=_("use different revision arguments"))
4041 4041 elif path.pushrev:
4042 4042 # It doesn't make any sense to specify ancestor revisions. So limit
4043 4043 # to DAG heads to make discovery simpler.
4044 4044 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4045 4045 revs = scmutil.revrange(repo, [expr])
4046 4046 revs = [repo[rev].node() for rev in revs]
4047 4047 if not revs:
4048 4048 raise error.Abort(_('default push revset for path evaluates to an '
4049 4049 'empty set'))
4050 4050
4051 4051 repo._subtoppath = dest
4052 4052 try:
4053 4053 # push subrepos depth-first for coherent ordering
4054 4054 c = repo['']
4055 4055 subs = c.substate # only repos that are committed
4056 4056 for s in sorted(subs):
4057 4057 result = c.sub(s).push(opts)
4058 4058 if result == 0:
4059 4059 return not result
4060 4060 finally:
4061 4061 del repo._subtoppath
4062 4062 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4063 4063 newbranch=opts.get('new_branch'),
4064 4064 bookmarks=opts.get('bookmark', ()),
4065 4065 opargs=opts.get('opargs'))
4066 4066
4067 4067 result = not pushop.cgresult
4068 4068
4069 4069 if pushop.bkresult is not None:
4070 4070 if pushop.bkresult == 2:
4071 4071 result = 2
4072 4072 elif not result and pushop.bkresult:
4073 4073 result = 2
4074 4074
4075 4075 return result
4076 4076
4077 4077 @command('recover', [])
4078 4078 def recover(ui, repo):
4079 4079 """roll back an interrupted transaction
4080 4080
4081 4081 Recover from an interrupted commit or pull.
4082 4082
4083 4083 This command tries to fix the repository status after an
4084 4084 interrupted operation. It should only be necessary when Mercurial
4085 4085 suggests it.
4086 4086
4087 4087 Returns 0 if successful, 1 if nothing to recover or verify fails.
4088 4088 """
4089 4089 if repo.recover():
4090 4090 return hg.verify(repo)
4091 4091 return 1
4092 4092
4093 4093 @command('^remove|rm',
4094 4094 [('A', 'after', None, _('record delete for missing files')),
4095 4095 ('f', 'force', None,
4096 4096 _('forget added files, delete modified files')),
4097 4097 ] + subrepoopts + walkopts,
4098 4098 _('[OPTION]... FILE...'),
4099 4099 inferrepo=True)
4100 4100 def remove(ui, repo, *pats, **opts):
4101 4101 """remove the specified files on the next commit
4102 4102
4103 4103 Schedule the indicated files for removal from the current branch.
4104 4104
4105 4105 This command schedules the files to be removed at the next commit.
4106 4106 To undo a remove before that, see :hg:`revert`. To undo added
4107 4107 files, see :hg:`forget`.
4108 4108
4109 4109 .. container:: verbose
4110 4110
4111 4111 -A/--after can be used to remove only files that have already
4112 4112 been deleted, -f/--force can be used to force deletion, and -Af
4113 4113 can be used to remove files from the next revision without
4114 4114 deleting them from the working directory.
4115 4115
4116 4116 The following table details the behavior of remove for different
4117 4117 file states (columns) and option combinations (rows). The file
4118 4118 states are Added [A], Clean [C], Modified [M] and Missing [!]
4119 4119 (as reported by :hg:`status`). The actions are Warn, Remove
4120 4120 (from branch) and Delete (from disk):
4121 4121
4122 4122 ========= == == == ==
4123 4123 opt/state A C M !
4124 4124 ========= == == == ==
4125 4125 none W RD W R
4126 4126 -f R RD RD R
4127 4127 -A W W W R
4128 4128 -Af R R R R
4129 4129 ========= == == == ==
4130 4130
4131 4131 .. note::
4132 4132
4133 4133 :hg:`remove` never deletes files in Added [A] state from the
4134 4134 working directory, not even if ``--force`` is specified.
4135 4135
4136 4136 Returns 0 on success, 1 if any warnings encountered.
4137 4137 """
4138 4138
4139 4139 opts = pycompat.byteskwargs(opts)
4140 4140 after, force = opts.get('after'), opts.get('force')
4141 4141 if not pats and not after:
4142 4142 raise error.Abort(_('no files specified'))
4143 4143
4144 4144 m = scmutil.match(repo[None], pats, opts)
4145 4145 subrepos = opts.get('subrepos')
4146 4146 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4147 4147
4148 4148 @command('rename|move|mv',
4149 4149 [('A', 'after', None, _('record a rename that has already occurred')),
4150 4150 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4151 4151 ] + walkopts + dryrunopts,
4152 4152 _('[OPTION]... SOURCE... DEST'))
4153 4153 def rename(ui, repo, *pats, **opts):
4154 4154 """rename files; equivalent of copy + remove
4155 4155
4156 4156 Mark dest as copies of sources; mark sources for deletion. If dest
4157 4157 is a directory, copies are put in that directory. If dest is a
4158 4158 file, there can only be one source.
4159 4159
4160 4160 By default, this command copies the contents of files as they
4161 4161 exist in the working directory. If invoked with -A/--after, the
4162 4162 operation is recorded, but no copying is performed.
4163 4163
4164 4164 This command takes effect at the next commit. To undo a rename
4165 4165 before that, see :hg:`revert`.
4166 4166
4167 4167 Returns 0 on success, 1 if errors are encountered.
4168 4168 """
4169 4169 opts = pycompat.byteskwargs(opts)
4170 4170 with repo.wlock(False):
4171 4171 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4172 4172
4173 4173 @command('resolve',
4174 4174 [('a', 'all', None, _('select all unresolved files')),
4175 4175 ('l', 'list', None, _('list state of files needing merge')),
4176 4176 ('m', 'mark', None, _('mark files as resolved')),
4177 4177 ('u', 'unmark', None, _('mark files as unresolved')),
4178 4178 ('n', 'no-status', None, _('hide status prefix'))]
4179 4179 + mergetoolopts + walkopts + formatteropts,
4180 4180 _('[OPTION]... [FILE]...'),
4181 4181 inferrepo=True)
4182 4182 def resolve(ui, repo, *pats, **opts):
4183 4183 """redo merges or set/view the merge status of files
4184 4184
4185 4185 Merges with unresolved conflicts are often the result of
4186 4186 non-interactive merging using the ``internal:merge`` configuration
4187 4187 setting, or a command-line merge tool like ``diff3``. The resolve
4188 4188 command is used to manage the files involved in a merge, after
4189 4189 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4190 4190 working directory must have two parents). See :hg:`help
4191 4191 merge-tools` for information on configuring merge tools.
4192 4192
4193 4193 The resolve command can be used in the following ways:
4194 4194
4195 4195 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4196 4196 files, discarding any previous merge attempts. Re-merging is not
4197 4197 performed for files already marked as resolved. Use ``--all/-a``
4198 4198 to select all unresolved files. ``--tool`` can be used to specify
4199 4199 the merge tool used for the given files. It overrides the HGMERGE
4200 4200 environment variable and your configuration files. Previous file
4201 4201 contents are saved with a ``.orig`` suffix.
4202 4202
4203 4203 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4204 4204 (e.g. after having manually fixed-up the files). The default is
4205 4205 to mark all unresolved files.
4206 4206
4207 4207 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4208 4208 default is to mark all resolved files.
4209 4209
4210 4210 - :hg:`resolve -l`: list files which had or still have conflicts.
4211 4211 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4212 4212 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4213 4213 the list. See :hg:`help filesets` for details.
4214 4214
4215 4215 .. note::
4216 4216
4217 4217 Mercurial will not let you commit files with unresolved merge
4218 4218 conflicts. You must use :hg:`resolve -m ...` before you can
4219 4219 commit after a conflicting merge.
4220 4220
4221 4221 Returns 0 on success, 1 if any files fail a resolve attempt.
4222 4222 """
4223 4223
4224 4224 opts = pycompat.byteskwargs(opts)
4225 4225 flaglist = 'all mark unmark list no_status'.split()
4226 4226 all, mark, unmark, show, nostatus = \
4227 4227 [opts.get(o) for o in flaglist]
4228 4228
4229 4229 if (show and (mark or unmark)) or (mark and unmark):
4230 4230 raise error.Abort(_("too many options specified"))
4231 4231 if pats and all:
4232 4232 raise error.Abort(_("can't specify --all and patterns"))
4233 4233 if not (all or pats or show or mark or unmark):
4234 4234 raise error.Abort(_('no files or directories specified'),
4235 4235 hint=('use --all to re-merge all unresolved files'))
4236 4236
4237 4237 if show:
4238 4238 ui.pager('resolve')
4239 4239 fm = ui.formatter('resolve', opts)
4240 4240 ms = mergemod.mergestate.read(repo)
4241 4241 m = scmutil.match(repo[None], pats, opts)
4242 4242 for f in ms:
4243 4243 if not m(f):
4244 4244 continue
4245 4245 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4246 4246 'd': 'driverresolved'}[ms[f]]
4247 4247 fm.startitem()
4248 4248 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4249 4249 fm.write('path', '%s\n', f, label=l)
4250 4250 fm.end()
4251 4251 return 0
4252 4252
4253 4253 with repo.wlock():
4254 4254 ms = mergemod.mergestate.read(repo)
4255 4255
4256 4256 if not (ms.active() or repo.dirstate.p2() != nullid):
4257 4257 raise error.Abort(
4258 4258 _('resolve command not applicable when not merging'))
4259 4259
4260 4260 wctx = repo[None]
4261 4261
4262 4262 if ms.mergedriver and ms.mdstate() == 'u':
4263 4263 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4264 4264 ms.commit()
4265 4265 # allow mark and unmark to go through
4266 4266 if not mark and not unmark and not proceed:
4267 4267 return 1
4268 4268
4269 4269 m = scmutil.match(wctx, pats, opts)
4270 4270 ret = 0
4271 4271 didwork = False
4272 4272 runconclude = False
4273 4273
4274 4274 tocomplete = []
4275 4275 for f in ms:
4276 4276 if not m(f):
4277 4277 continue
4278 4278
4279 4279 didwork = True
4280 4280
4281 4281 # don't let driver-resolved files be marked, and run the conclude
4282 4282 # step if asked to resolve
4283 4283 if ms[f] == "d":
4284 4284 exact = m.exact(f)
4285 4285 if mark:
4286 4286 if exact:
4287 4287 ui.warn(_('not marking %s as it is driver-resolved\n')
4288 4288 % f)
4289 4289 elif unmark:
4290 4290 if exact:
4291 4291 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4292 4292 % f)
4293 4293 else:
4294 4294 runconclude = True
4295 4295 continue
4296 4296
4297 4297 if mark:
4298 4298 ms.mark(f, "r")
4299 4299 elif unmark:
4300 4300 ms.mark(f, "u")
4301 4301 else:
4302 4302 # backup pre-resolve (merge uses .orig for its own purposes)
4303 4303 a = repo.wjoin(f)
4304 4304 try:
4305 4305 util.copyfile(a, a + ".resolve")
4306 4306 except (IOError, OSError) as inst:
4307 4307 if inst.errno != errno.ENOENT:
4308 4308 raise
4309 4309
4310 4310 try:
4311 4311 # preresolve file
4312 4312 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4313 4313 'resolve')
4314 4314 complete, r = ms.preresolve(f, wctx)
4315 4315 if not complete:
4316 4316 tocomplete.append(f)
4317 4317 elif r:
4318 4318 ret = 1
4319 4319 finally:
4320 4320 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4321 4321 ms.commit()
4322 4322
4323 4323 # replace filemerge's .orig file with our resolve file, but only
4324 4324 # for merges that are complete
4325 4325 if complete:
4326 4326 try:
4327 4327 util.rename(a + ".resolve",
4328 4328 scmutil.origpath(ui, repo, a))
4329 4329 except OSError as inst:
4330 4330 if inst.errno != errno.ENOENT:
4331 4331 raise
4332 4332
4333 4333 for f in tocomplete:
4334 4334 try:
4335 4335 # resolve file
4336 4336 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4337 4337 'resolve')
4338 4338 r = ms.resolve(f, wctx)
4339 4339 if r:
4340 4340 ret = 1
4341 4341 finally:
4342 4342 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4343 4343 ms.commit()
4344 4344
4345 4345 # replace filemerge's .orig file with our resolve file
4346 4346 a = repo.wjoin(f)
4347 4347 try:
4348 4348 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4349 4349 except OSError as inst:
4350 4350 if inst.errno != errno.ENOENT:
4351 4351 raise
4352 4352
4353 4353 ms.commit()
4354 4354 ms.recordactions()
4355 4355
4356 4356 if not didwork and pats:
4357 4357 hint = None
4358 4358 if not any([p for p in pats if p.find(':') >= 0]):
4359 4359 pats = ['path:%s' % p for p in pats]
4360 4360 m = scmutil.match(wctx, pats, opts)
4361 4361 for f in ms:
4362 4362 if not m(f):
4363 4363 continue
4364 4364 flags = ''.join(['-%s ' % o[0] for o in flaglist
4365 4365 if opts.get(o)])
4366 4366 hint = _("(try: hg resolve %s%s)\n") % (
4367 4367 flags,
4368 4368 ' '.join(pats))
4369 4369 break
4370 4370 ui.warn(_("arguments do not match paths that need resolving\n"))
4371 4371 if hint:
4372 4372 ui.warn(hint)
4373 4373 elif ms.mergedriver and ms.mdstate() != 's':
4374 4374 # run conclude step when either a driver-resolved file is requested
4375 4375 # or there are no driver-resolved files
4376 4376 # we can't use 'ret' to determine whether any files are unresolved
4377 4377 # because we might not have tried to resolve some
4378 4378 if ((runconclude or not list(ms.driverresolved()))
4379 4379 and not list(ms.unresolved())):
4380 4380 proceed = mergemod.driverconclude(repo, ms, wctx)
4381 4381 ms.commit()
4382 4382 if not proceed:
4383 4383 return 1
4384 4384
4385 4385 # Nudge users into finishing an unfinished operation
4386 4386 unresolvedf = list(ms.unresolved())
4387 4387 driverresolvedf = list(ms.driverresolved())
4388 4388 if not unresolvedf and not driverresolvedf:
4389 4389 ui.status(_('(no more unresolved files)\n'))
4390 4390 cmdutil.checkafterresolved(repo)
4391 4391 elif not unresolvedf:
4392 4392 ui.status(_('(no more unresolved files -- '
4393 4393 'run "hg resolve --all" to conclude)\n'))
4394 4394
4395 4395 return ret
4396 4396
4397 4397 @command('revert',
4398 4398 [('a', 'all', None, _('revert all changes when no arguments given')),
4399 4399 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4400 4400 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4401 4401 ('C', 'no-backup', None, _('do not save backup copies of files')),
4402 4402 ('i', 'interactive', None,
4403 4403 _('interactively select the changes (EXPERIMENTAL)')),
4404 4404 ] + walkopts + dryrunopts,
4405 4405 _('[OPTION]... [-r REV] [NAME]...'))
4406 4406 def revert(ui, repo, *pats, **opts):
4407 4407 """restore files to their checkout state
4408 4408
4409 4409 .. note::
4410 4410
4411 4411 To check out earlier revisions, you should use :hg:`update REV`.
4412 4412 To cancel an uncommitted merge (and lose your changes),
4413 4413 use :hg:`update --clean .`.
4414 4414
4415 4415 With no revision specified, revert the specified files or directories
4416 4416 to the contents they had in the parent of the working directory.
4417 4417 This restores the contents of files to an unmodified
4418 4418 state and unschedules adds, removes, copies, and renames. If the
4419 4419 working directory has two parents, you must explicitly specify a
4420 4420 revision.
4421 4421
4422 4422 Using the -r/--rev or -d/--date options, revert the given files or
4423 4423 directories to their states as of a specific revision. Because
4424 4424 revert does not change the working directory parents, this will
4425 4425 cause these files to appear modified. This can be helpful to "back
4426 4426 out" some or all of an earlier change. See :hg:`backout` for a
4427 4427 related method.
4428 4428
4429 4429 Modified files are saved with a .orig suffix before reverting.
4430 4430 To disable these backups, use --no-backup. It is possible to store
4431 4431 the backup files in a custom directory relative to the root of the
4432 4432 repository by setting the ``ui.origbackuppath`` configuration
4433 4433 option.
4434 4434
4435 4435 See :hg:`help dates` for a list of formats valid for -d/--date.
4436 4436
4437 4437 See :hg:`help backout` for a way to reverse the effect of an
4438 4438 earlier changeset.
4439 4439
4440 4440 Returns 0 on success.
4441 4441 """
4442 4442
4443 4443 if opts.get("date"):
4444 4444 if opts.get("rev"):
4445 4445 raise error.Abort(_("you can't specify a revision and a date"))
4446 4446 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4447 4447
4448 4448 parent, p2 = repo.dirstate.parents()
4449 4449 if not opts.get('rev') and p2 != nullid:
4450 4450 # revert after merge is a trap for new users (issue2915)
4451 4451 raise error.Abort(_('uncommitted merge with no revision specified'),
4452 4452 hint=_("use 'hg update' or see 'hg help revert'"))
4453 4453
4454 4454 ctx = scmutil.revsingle(repo, opts.get('rev'))
4455 4455
4456 4456 if (not (pats or opts.get('include') or opts.get('exclude') or
4457 4457 opts.get('all') or opts.get('interactive'))):
4458 4458 msg = _("no files or directories specified")
4459 4459 if p2 != nullid:
4460 4460 hint = _("uncommitted merge, use --all to discard all changes,"
4461 4461 " or 'hg update -C .' to abort the merge")
4462 4462 raise error.Abort(msg, hint=hint)
4463 4463 dirty = any(repo.status())
4464 4464 node = ctx.node()
4465 4465 if node != parent:
4466 4466 if dirty:
4467 4467 hint = _("uncommitted changes, use --all to discard all"
4468 4468 " changes, or 'hg update %s' to update") % ctx.rev()
4469 4469 else:
4470 4470 hint = _("use --all to revert all files,"
4471 4471 " or 'hg update %s' to update") % ctx.rev()
4472 4472 elif dirty:
4473 4473 hint = _("uncommitted changes, use --all to discard all changes")
4474 4474 else:
4475 4475 hint = _("use --all to revert all files")
4476 4476 raise error.Abort(msg, hint=hint)
4477 4477
4478 4478 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4479 4479
4480 4480 @command('rollback', dryrunopts +
4481 4481 [('f', 'force', False, _('ignore safety measures'))])
4482 4482 def rollback(ui, repo, **opts):
4483 4483 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4484 4484
4485 4485 Please use :hg:`commit --amend` instead of rollback to correct
4486 4486 mistakes in the last commit.
4487 4487
4488 4488 This command should be used with care. There is only one level of
4489 4489 rollback, and there is no way to undo a rollback. It will also
4490 4490 restore the dirstate at the time of the last transaction, losing
4491 4491 any dirstate changes since that time. This command does not alter
4492 4492 the working directory.
4493 4493
4494 4494 Transactions are used to encapsulate the effects of all commands
4495 4495 that create new changesets or propagate existing changesets into a
4496 4496 repository.
4497 4497
4498 4498 .. container:: verbose
4499 4499
4500 4500 For example, the following commands are transactional, and their
4501 4501 effects can be rolled back:
4502 4502
4503 4503 - commit
4504 4504 - import
4505 4505 - pull
4506 4506 - push (with this repository as the destination)
4507 4507 - unbundle
4508 4508
4509 4509 To avoid permanent data loss, rollback will refuse to rollback a
4510 4510 commit transaction if it isn't checked out. Use --force to
4511 4511 override this protection.
4512 4512
4513 4513 The rollback command can be entirely disabled by setting the
4514 4514 ``ui.rollback`` configuration setting to false. If you're here
4515 4515 because you want to use rollback and it's disabled, you can
4516 4516 re-enable the command by setting ``ui.rollback`` to true.
4517 4517
4518 4518 This command is not intended for use on public repositories. Once
4519 4519 changes are visible for pull by other users, rolling a transaction
4520 4520 back locally is ineffective (someone else may already have pulled
4521 4521 the changes). Furthermore, a race is possible with readers of the
4522 4522 repository; for example an in-progress pull from the repository
4523 4523 may fail if a rollback is performed.
4524 4524
4525 4525 Returns 0 on success, 1 if no rollback data is available.
4526 4526 """
4527 4527 if not ui.configbool('ui', 'rollback'):
4528 4528 raise error.Abort(_('rollback is disabled because it is unsafe'),
4529 4529 hint=('see `hg help -v rollback` for information'))
4530 4530 return repo.rollback(dryrun=opts.get(r'dry_run'),
4531 4531 force=opts.get(r'force'))
4532 4532
4533 4533 @command('root', [])
4534 4534 def root(ui, repo):
4535 4535 """print the root (top) of the current working directory
4536 4536
4537 4537 Print the root directory of the current repository.
4538 4538
4539 4539 Returns 0 on success.
4540 4540 """
4541 4541 ui.write(repo.root + "\n")
4542 4542
4543 4543 @command('^serve',
4544 4544 [('A', 'accesslog', '', _('name of access log file to write to'),
4545 4545 _('FILE')),
4546 4546 ('d', 'daemon', None, _('run server in background')),
4547 4547 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4548 4548 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4549 4549 # use string type, then we can check if something was passed
4550 4550 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4551 4551 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4552 4552 _('ADDR')),
4553 4553 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4554 4554 _('PREFIX')),
4555 4555 ('n', 'name', '',
4556 4556 _('name to show in web pages (default: working directory)'), _('NAME')),
4557 4557 ('', 'web-conf', '',
4558 4558 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4559 4559 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4560 4560 _('FILE')),
4561 4561 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4562 4562 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4563 4563 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4564 4564 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4565 4565 ('', 'style', '', _('template style to use'), _('STYLE')),
4566 4566 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4567 4567 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4568 4568 + subrepoopts,
4569 4569 _('[OPTION]...'),
4570 4570 optionalrepo=True)
4571 4571 def serve(ui, repo, **opts):
4572 4572 """start stand-alone webserver
4573 4573
4574 4574 Start a local HTTP repository browser and pull server. You can use
4575 4575 this for ad-hoc sharing and browsing of repositories. It is
4576 4576 recommended to use a real web server to serve a repository for
4577 4577 longer periods of time.
4578 4578
4579 4579 Please note that the server does not implement access control.
4580 4580 This means that, by default, anybody can read from the server and
4581 4581 nobody can write to it by default. Set the ``web.allow_push``
4582 4582 option to ``*`` to allow everybody to push to the server. You
4583 4583 should use a real web server if you need to authenticate users.
4584 4584
4585 4585 By default, the server logs accesses to stdout and errors to
4586 4586 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4587 4587 files.
4588 4588
4589 4589 To have the server choose a free port number to listen on, specify
4590 4590 a port number of 0; in this case, the server will print the port
4591 4591 number it uses.
4592 4592
4593 4593 Returns 0 on success.
4594 4594 """
4595 4595
4596 4596 opts = pycompat.byteskwargs(opts)
4597 4597 if opts["stdio"] and opts["cmdserver"]:
4598 4598 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4599 4599
4600 4600 if opts["stdio"]:
4601 4601 if repo is None:
4602 4602 raise error.RepoError(_("there is no Mercurial repository here"
4603 4603 " (.hg not found)"))
4604 4604 s = sshserver.sshserver(ui, repo)
4605 4605 s.serve_forever()
4606 4606
4607 4607 service = server.createservice(ui, repo, opts)
4608 4608 return server.runservice(opts, initfn=service.init, runfn=service.run)
4609 4609
4610 4610 @command('^status|st',
4611 4611 [('A', 'all', None, _('show status of all files')),
4612 4612 ('m', 'modified', None, _('show only modified files')),
4613 4613 ('a', 'added', None, _('show only added files')),
4614 4614 ('r', 'removed', None, _('show only removed files')),
4615 4615 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4616 4616 ('c', 'clean', None, _('show only files without changes')),
4617 4617 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4618 4618 ('i', 'ignored', None, _('show only ignored files')),
4619 4619 ('n', 'no-status', None, _('hide status prefix')),
4620 4620 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4621 4621 ('C', 'copies', None, _('show source of copied files')),
4622 4622 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4623 4623 ('', 'rev', [], _('show difference from revision'), _('REV')),
4624 4624 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4625 4625 ] + walkopts + subrepoopts + formatteropts,
4626 4626 _('[OPTION]... [FILE]...'),
4627 4627 inferrepo=True)
4628 4628 def status(ui, repo, *pats, **opts):
4629 4629 """show changed files in the working directory
4630 4630
4631 4631 Show status of files in the repository. If names are given, only
4632 4632 files that match are shown. Files that are clean or ignored or
4633 4633 the source of a copy/move operation, are not listed unless
4634 4634 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4635 4635 Unless options described with "show only ..." are given, the
4636 4636 options -mardu are used.
4637 4637
4638 4638 Option -q/--quiet hides untracked (unknown and ignored) files
4639 4639 unless explicitly requested with -u/--unknown or -i/--ignored.
4640 4640
4641 4641 .. note::
4642 4642
4643 4643 :hg:`status` may appear to disagree with diff if permissions have
4644 4644 changed or a merge has occurred. The standard diff format does
4645 4645 not report permission changes and diff only reports changes
4646 4646 relative to one merge parent.
4647 4647
4648 4648 If one revision is given, it is used as the base revision.
4649 4649 If two revisions are given, the differences between them are
4650 4650 shown. The --change option can also be used as a shortcut to list
4651 4651 the changed files of a revision from its first parent.
4652 4652
4653 4653 The codes used to show the status of files are::
4654 4654
4655 4655 M = modified
4656 4656 A = added
4657 4657 R = removed
4658 4658 C = clean
4659 4659 ! = missing (deleted by non-hg command, but still tracked)
4660 4660 ? = not tracked
4661 4661 I = ignored
4662 4662 = origin of the previous file (with --copies)
4663 4663
4664 4664 .. container:: verbose
4665 4665
4666 4666 The -t/--terse option abbreviates the output by showing directory name
4667 4667 if all the files in it share the same status. The option expects a value
4668 4668 which can be a string formed by using 'm', 'a', 'r', 'd', 'u', 'i', 'c'
4669 4669 where, 'm' stands for 'modified', 'a' for 'added', 'r' for 'removed',
4670 4670 'd' for 'deleted', 'u' for 'unknown', 'i' for 'ignored' and 'c' for clean.
4671 4671
4672 4672 It terses the output of only those status which are passed. The ignored
4673 4673 files are not considered while tersing until 'i' is there in --terse value
4674 4674 or the --ignored option is used.
4675 4675
4676 4676 Examples:
4677 4677
4678 4678 - show changes in the working directory relative to a
4679 4679 changeset::
4680 4680
4681 4681 hg status --rev 9353
4682 4682
4683 4683 - show changes in the working directory relative to the
4684 4684 current directory (see :hg:`help patterns` for more information)::
4685 4685
4686 4686 hg status re:
4687 4687
4688 4688 - show all changes including copies in an existing changeset::
4689 4689
4690 4690 hg status --copies --change 9353
4691 4691
4692 4692 - get a NUL separated list of added files, suitable for xargs::
4693 4693
4694 4694 hg status -an0
4695 4695
4696 4696 Returns 0 on success.
4697 4697 """
4698 4698
4699 4699 opts = pycompat.byteskwargs(opts)
4700 4700 revs = opts.get('rev')
4701 4701 change = opts.get('change')
4702 4702 terse = opts.get('terse')
4703 4703
4704 4704 if revs and change:
4705 4705 msg = _('cannot specify --rev and --change at the same time')
4706 4706 raise error.Abort(msg)
4707 4707 elif revs and terse:
4708 4708 msg = _('cannot use --terse with --rev')
4709 4709 raise error.Abort(msg)
4710 4710 elif change:
4711 4711 node2 = scmutil.revsingle(repo, change, None).node()
4712 4712 node1 = repo[node2].p1().node()
4713 4713 else:
4714 4714 node1, node2 = scmutil.revpair(repo, revs)
4715 4715
4716 4716 if pats or ui.configbool('commands', 'status.relative'):
4717 4717 cwd = repo.getcwd()
4718 4718 else:
4719 4719 cwd = ''
4720 4720
4721 4721 if opts.get('print0'):
4722 4722 end = '\0'
4723 4723 else:
4724 4724 end = '\n'
4725 4725 copy = {}
4726 4726 states = 'modified added removed deleted unknown ignored clean'.split()
4727 4727 show = [k for k in states if opts.get(k)]
4728 4728 if opts.get('all'):
4729 4729 show += ui.quiet and (states[:4] + ['clean']) or states
4730 4730
4731 4731 if not show:
4732 4732 if ui.quiet:
4733 4733 show = states[:4]
4734 4734 else:
4735 4735 show = states[:5]
4736 4736
4737 4737 m = scmutil.match(repo[node2], pats, opts)
4738 4738 stat = repo.status(node1, node2, m,
4739 4739 'ignored' in show, 'clean' in show, 'unknown' in show,
4740 4740 opts.get('subrepos'))
4741 4741 if terse:
4742 4742 stat = cmdutil.tersestatus(repo.root, stat, terse,
4743 4743 repo.dirstate._ignore, opts.get('ignored'))
4744 4744 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4745 4745
4746 4746 if (opts.get('all') or opts.get('copies')
4747 4747 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4748 4748 copy = copies.pathcopies(repo[node1], repo[node2], m)
4749 4749
4750 4750 ui.pager('status')
4751 4751 fm = ui.formatter('status', opts)
4752 4752 fmt = '%s' + end
4753 4753 showchar = not opts.get('no_status')
4754 4754
4755 4755 for state, char, files in changestates:
4756 4756 if state in show:
4757 4757 label = 'status.' + state
4758 4758 for f in files:
4759 4759 fm.startitem()
4760 4760 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4761 4761 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4762 4762 if f in copy:
4763 4763 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4764 4764 label='status.copied')
4765 4765 fm.end()
4766 4766
4767 4767 @command('^summary|sum',
4768 4768 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4769 4769 def summary(ui, repo, **opts):
4770 4770 """summarize working directory state
4771 4771
4772 4772 This generates a brief summary of the working directory state,
4773 4773 including parents, branch, commit status, phase and available updates.
4774 4774
4775 4775 With the --remote option, this will check the default paths for
4776 4776 incoming and outgoing changes. This can be time-consuming.
4777 4777
4778 4778 Returns 0 on success.
4779 4779 """
4780 4780
4781 4781 opts = pycompat.byteskwargs(opts)
4782 4782 ui.pager('summary')
4783 4783 ctx = repo[None]
4784 4784 parents = ctx.parents()
4785 4785 pnode = parents[0].node()
4786 4786 marks = []
4787 4787
4788 4788 ms = None
4789 4789 try:
4790 4790 ms = mergemod.mergestate.read(repo)
4791 4791 except error.UnsupportedMergeRecords as e:
4792 4792 s = ' '.join(e.recordtypes)
4793 4793 ui.warn(
4794 4794 _('warning: merge state has unsupported record types: %s\n') % s)
4795 4795 unresolved = []
4796 4796 else:
4797 4797 unresolved = list(ms.unresolved())
4798 4798
4799 4799 for p in parents:
4800 4800 # label with log.changeset (instead of log.parent) since this
4801 4801 # shows a working directory parent *changeset*:
4802 4802 # i18n: column positioning for "hg summary"
4803 4803 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4804 4804 label=cmdutil._changesetlabels(p))
4805 4805 ui.write(' '.join(p.tags()), label='log.tag')
4806 4806 if p.bookmarks():
4807 4807 marks.extend(p.bookmarks())
4808 4808 if p.rev() == -1:
4809 4809 if not len(repo):
4810 4810 ui.write(_(' (empty repository)'))
4811 4811 else:
4812 4812 ui.write(_(' (no revision checked out)'))
4813 4813 if p.obsolete():
4814 4814 ui.write(_(' (obsolete)'))
4815 4815 if p.troubled():
4816 4816 ui.write(' ('
4817 4817 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4818 4818 for trouble in p.troubles())
4819 4819 + ')')
4820 4820 ui.write('\n')
4821 4821 if p.description():
4822 4822 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4823 4823 label='log.summary')
4824 4824
4825 4825 branch = ctx.branch()
4826 4826 bheads = repo.branchheads(branch)
4827 4827 # i18n: column positioning for "hg summary"
4828 4828 m = _('branch: %s\n') % branch
4829 4829 if branch != 'default':
4830 4830 ui.write(m, label='log.branch')
4831 4831 else:
4832 4832 ui.status(m, label='log.branch')
4833 4833
4834 4834 if marks:
4835 4835 active = repo._activebookmark
4836 4836 # i18n: column positioning for "hg summary"
4837 4837 ui.write(_('bookmarks:'), label='log.bookmark')
4838 4838 if active is not None:
4839 4839 if active in marks:
4840 4840 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
4841 4841 marks.remove(active)
4842 4842 else:
4843 4843 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
4844 4844 for m in marks:
4845 4845 ui.write(' ' + m, label='log.bookmark')
4846 4846 ui.write('\n', label='log.bookmark')
4847 4847
4848 4848 status = repo.status(unknown=True)
4849 4849
4850 4850 c = repo.dirstate.copies()
4851 4851 copied, renamed = [], []
4852 4852 for d, s in c.iteritems():
4853 4853 if s in status.removed:
4854 4854 status.removed.remove(s)
4855 4855 renamed.append(d)
4856 4856 else:
4857 4857 copied.append(d)
4858 4858 if d in status.added:
4859 4859 status.added.remove(d)
4860 4860
4861 4861 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4862 4862
4863 4863 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4864 4864 (ui.label(_('%d added'), 'status.added'), status.added),
4865 4865 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4866 4866 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4867 4867 (ui.label(_('%d copied'), 'status.copied'), copied),
4868 4868 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4869 4869 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4870 4870 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4871 4871 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4872 4872 t = []
4873 4873 for l, s in labels:
4874 4874 if s:
4875 4875 t.append(l % len(s))
4876 4876
4877 4877 t = ', '.join(t)
4878 4878 cleanworkdir = False
4879 4879
4880 4880 if repo.vfs.exists('graftstate'):
4881 4881 t += _(' (graft in progress)')
4882 4882 if repo.vfs.exists('updatestate'):
4883 4883 t += _(' (interrupted update)')
4884 4884 elif len(parents) > 1:
4885 4885 t += _(' (merge)')
4886 4886 elif branch != parents[0].branch():
4887 4887 t += _(' (new branch)')
4888 4888 elif (parents[0].closesbranch() and
4889 4889 pnode in repo.branchheads(branch, closed=True)):
4890 4890 t += _(' (head closed)')
4891 4891 elif not (status.modified or status.added or status.removed or renamed or
4892 4892 copied or subs):
4893 4893 t += _(' (clean)')
4894 4894 cleanworkdir = True
4895 4895 elif pnode not in bheads:
4896 4896 t += _(' (new branch head)')
4897 4897
4898 4898 if parents:
4899 4899 pendingphase = max(p.phase() for p in parents)
4900 4900 else:
4901 4901 pendingphase = phases.public
4902 4902
4903 4903 if pendingphase > phases.newcommitphase(ui):
4904 4904 t += ' (%s)' % phases.phasenames[pendingphase]
4905 4905
4906 4906 if cleanworkdir:
4907 4907 # i18n: column positioning for "hg summary"
4908 4908 ui.status(_('commit: %s\n') % t.strip())
4909 4909 else:
4910 4910 # i18n: column positioning for "hg summary"
4911 4911 ui.write(_('commit: %s\n') % t.strip())
4912 4912
4913 4913 # all ancestors of branch heads - all ancestors of parent = new csets
4914 4914 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4915 4915 bheads))
4916 4916
4917 4917 if new == 0:
4918 4918 # i18n: column positioning for "hg summary"
4919 4919 ui.status(_('update: (current)\n'))
4920 4920 elif pnode not in bheads:
4921 4921 # i18n: column positioning for "hg summary"
4922 4922 ui.write(_('update: %d new changesets (update)\n') % new)
4923 4923 else:
4924 4924 # i18n: column positioning for "hg summary"
4925 4925 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4926 4926 (new, len(bheads)))
4927 4927
4928 4928 t = []
4929 4929 draft = len(repo.revs('draft()'))
4930 4930 if draft:
4931 4931 t.append(_('%d draft') % draft)
4932 4932 secret = len(repo.revs('secret()'))
4933 4933 if secret:
4934 4934 t.append(_('%d secret') % secret)
4935 4935
4936 4936 if draft or secret:
4937 4937 ui.status(_('phases: %s\n') % ', '.join(t))
4938 4938
4939 4939 if obsolete.isenabled(repo, obsolete.createmarkersopt):
4940 4940 for trouble in ("unstable", "divergent", "bumped"):
4941 4941 numtrouble = len(repo.revs(trouble + "()"))
4942 4942 # We write all the possibilities to ease translation
4943 4943 troublemsg = {
4944 4944 "unstable": _("orphan: %d changesets"),
4945 "divergent": _("divergent: %d changesets"),
4945 "divergent": _("content-divergent: %d changesets"),
4946 4946 "bumped": _("bumped: %d changesets"),
4947 4947 }
4948 4948 if numtrouble > 0:
4949 4949 ui.status(troublemsg[trouble] % numtrouble + "\n")
4950 4950
4951 4951 cmdutil.summaryhooks(ui, repo)
4952 4952
4953 4953 if opts.get('remote'):
4954 4954 needsincoming, needsoutgoing = True, True
4955 4955 else:
4956 4956 needsincoming, needsoutgoing = False, False
4957 4957 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
4958 4958 if i:
4959 4959 needsincoming = True
4960 4960 if o:
4961 4961 needsoutgoing = True
4962 4962 if not needsincoming and not needsoutgoing:
4963 4963 return
4964 4964
4965 4965 def getincoming():
4966 4966 source, branches = hg.parseurl(ui.expandpath('default'))
4967 4967 sbranch = branches[0]
4968 4968 try:
4969 4969 other = hg.peer(repo, {}, source)
4970 4970 except error.RepoError:
4971 4971 if opts.get('remote'):
4972 4972 raise
4973 4973 return source, sbranch, None, None, None
4974 4974 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
4975 4975 if revs:
4976 4976 revs = [other.lookup(rev) for rev in revs]
4977 4977 ui.debug('comparing with %s\n' % util.hidepassword(source))
4978 4978 repo.ui.pushbuffer()
4979 4979 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
4980 4980 repo.ui.popbuffer()
4981 4981 return source, sbranch, other, commoninc, commoninc[1]
4982 4982
4983 4983 if needsincoming:
4984 4984 source, sbranch, sother, commoninc, incoming = getincoming()
4985 4985 else:
4986 4986 source = sbranch = sother = commoninc = incoming = None
4987 4987
4988 4988 def getoutgoing():
4989 4989 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4990 4990 dbranch = branches[0]
4991 4991 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4992 4992 if source != dest:
4993 4993 try:
4994 4994 dother = hg.peer(repo, {}, dest)
4995 4995 except error.RepoError:
4996 4996 if opts.get('remote'):
4997 4997 raise
4998 4998 return dest, dbranch, None, None
4999 4999 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5000 5000 elif sother is None:
5001 5001 # there is no explicit destination peer, but source one is invalid
5002 5002 return dest, dbranch, None, None
5003 5003 else:
5004 5004 dother = sother
5005 5005 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5006 5006 common = None
5007 5007 else:
5008 5008 common = commoninc
5009 5009 if revs:
5010 5010 revs = [repo.lookup(rev) for rev in revs]
5011 5011 repo.ui.pushbuffer()
5012 5012 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5013 5013 commoninc=common)
5014 5014 repo.ui.popbuffer()
5015 5015 return dest, dbranch, dother, outgoing
5016 5016
5017 5017 if needsoutgoing:
5018 5018 dest, dbranch, dother, outgoing = getoutgoing()
5019 5019 else:
5020 5020 dest = dbranch = dother = outgoing = None
5021 5021
5022 5022 if opts.get('remote'):
5023 5023 t = []
5024 5024 if incoming:
5025 5025 t.append(_('1 or more incoming'))
5026 5026 o = outgoing.missing
5027 5027 if o:
5028 5028 t.append(_('%d outgoing') % len(o))
5029 5029 other = dother or sother
5030 5030 if 'bookmarks' in other.listkeys('namespaces'):
5031 5031 counts = bookmarks.summary(repo, other)
5032 5032 if counts[0] > 0:
5033 5033 t.append(_('%d incoming bookmarks') % counts[0])
5034 5034 if counts[1] > 0:
5035 5035 t.append(_('%d outgoing bookmarks') % counts[1])
5036 5036
5037 5037 if t:
5038 5038 # i18n: column positioning for "hg summary"
5039 5039 ui.write(_('remote: %s\n') % (', '.join(t)))
5040 5040 else:
5041 5041 # i18n: column positioning for "hg summary"
5042 5042 ui.status(_('remote: (synced)\n'))
5043 5043
5044 5044 cmdutil.summaryremotehooks(ui, repo, opts,
5045 5045 ((source, sbranch, sother, commoninc),
5046 5046 (dest, dbranch, dother, outgoing)))
5047 5047
5048 5048 @command('tag',
5049 5049 [('f', 'force', None, _('force tag')),
5050 5050 ('l', 'local', None, _('make the tag local')),
5051 5051 ('r', 'rev', '', _('revision to tag'), _('REV')),
5052 5052 ('', 'remove', None, _('remove a tag')),
5053 5053 # -l/--local is already there, commitopts cannot be used
5054 5054 ('e', 'edit', None, _('invoke editor on commit messages')),
5055 5055 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5056 5056 ] + commitopts2,
5057 5057 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5058 5058 def tag(ui, repo, name1, *names, **opts):
5059 5059 """add one or more tags for the current or given revision
5060 5060
5061 5061 Name a particular revision using <name>.
5062 5062
5063 5063 Tags are used to name particular revisions of the repository and are
5064 5064 very useful to compare different revisions, to go back to significant
5065 5065 earlier versions or to mark branch points as releases, etc. Changing
5066 5066 an existing tag is normally disallowed; use -f/--force to override.
5067 5067
5068 5068 If no revision is given, the parent of the working directory is
5069 5069 used.
5070 5070
5071 5071 To facilitate version control, distribution, and merging of tags,
5072 5072 they are stored as a file named ".hgtags" which is managed similarly
5073 5073 to other project files and can be hand-edited if necessary. This
5074 5074 also means that tagging creates a new commit. The file
5075 5075 ".hg/localtags" is used for local tags (not shared among
5076 5076 repositories).
5077 5077
5078 5078 Tag commits are usually made at the head of a branch. If the parent
5079 5079 of the working directory is not a branch head, :hg:`tag` aborts; use
5080 5080 -f/--force to force the tag commit to be based on a non-head
5081 5081 changeset.
5082 5082
5083 5083 See :hg:`help dates` for a list of formats valid for -d/--date.
5084 5084
5085 5085 Since tag names have priority over branch names during revision
5086 5086 lookup, using an existing branch name as a tag name is discouraged.
5087 5087
5088 5088 Returns 0 on success.
5089 5089 """
5090 5090 opts = pycompat.byteskwargs(opts)
5091 5091 wlock = lock = None
5092 5092 try:
5093 5093 wlock = repo.wlock()
5094 5094 lock = repo.lock()
5095 5095 rev_ = "."
5096 5096 names = [t.strip() for t in (name1,) + names]
5097 5097 if len(names) != len(set(names)):
5098 5098 raise error.Abort(_('tag names must be unique'))
5099 5099 for n in names:
5100 5100 scmutil.checknewlabel(repo, n, 'tag')
5101 5101 if not n:
5102 5102 raise error.Abort(_('tag names cannot consist entirely of '
5103 5103 'whitespace'))
5104 5104 if opts.get('rev') and opts.get('remove'):
5105 5105 raise error.Abort(_("--rev and --remove are incompatible"))
5106 5106 if opts.get('rev'):
5107 5107 rev_ = opts['rev']
5108 5108 message = opts.get('message')
5109 5109 if opts.get('remove'):
5110 5110 if opts.get('local'):
5111 5111 expectedtype = 'local'
5112 5112 else:
5113 5113 expectedtype = 'global'
5114 5114
5115 5115 for n in names:
5116 5116 if not repo.tagtype(n):
5117 5117 raise error.Abort(_("tag '%s' does not exist") % n)
5118 5118 if repo.tagtype(n) != expectedtype:
5119 5119 if expectedtype == 'global':
5120 5120 raise error.Abort(_("tag '%s' is not a global tag") % n)
5121 5121 else:
5122 5122 raise error.Abort(_("tag '%s' is not a local tag") % n)
5123 5123 rev_ = 'null'
5124 5124 if not message:
5125 5125 # we don't translate commit messages
5126 5126 message = 'Removed tag %s' % ', '.join(names)
5127 5127 elif not opts.get('force'):
5128 5128 for n in names:
5129 5129 if n in repo.tags():
5130 5130 raise error.Abort(_("tag '%s' already exists "
5131 5131 "(use -f to force)") % n)
5132 5132 if not opts.get('local'):
5133 5133 p1, p2 = repo.dirstate.parents()
5134 5134 if p2 != nullid:
5135 5135 raise error.Abort(_('uncommitted merge'))
5136 5136 bheads = repo.branchheads()
5137 5137 if not opts.get('force') and bheads and p1 not in bheads:
5138 5138 raise error.Abort(_('working directory is not at a branch head '
5139 5139 '(use -f to force)'))
5140 5140 r = scmutil.revsingle(repo, rev_).node()
5141 5141
5142 5142 if not message:
5143 5143 # we don't translate commit messages
5144 5144 message = ('Added tag %s for changeset %s' %
5145 5145 (', '.join(names), short(r)))
5146 5146
5147 5147 date = opts.get('date')
5148 5148 if date:
5149 5149 date = util.parsedate(date)
5150 5150
5151 5151 if opts.get('remove'):
5152 5152 editform = 'tag.remove'
5153 5153 else:
5154 5154 editform = 'tag.add'
5155 5155 editor = cmdutil.getcommiteditor(editform=editform,
5156 5156 **pycompat.strkwargs(opts))
5157 5157
5158 5158 # don't allow tagging the null rev
5159 5159 if (not opts.get('remove') and
5160 5160 scmutil.revsingle(repo, rev_).rev() == nullrev):
5161 5161 raise error.Abort(_("cannot tag null revision"))
5162 5162
5163 5163 tagsmod.tag(repo, names, r, message, opts.get('local'),
5164 5164 opts.get('user'), date, editor=editor)
5165 5165 finally:
5166 5166 release(lock, wlock)
5167 5167
5168 5168 @command('tags', formatteropts, '')
5169 5169 def tags(ui, repo, **opts):
5170 5170 """list repository tags
5171 5171
5172 5172 This lists both regular and local tags. When the -v/--verbose
5173 5173 switch is used, a third column "local" is printed for local tags.
5174 5174 When the -q/--quiet switch is used, only the tag name is printed.
5175 5175
5176 5176 Returns 0 on success.
5177 5177 """
5178 5178
5179 5179 opts = pycompat.byteskwargs(opts)
5180 5180 ui.pager('tags')
5181 5181 fm = ui.formatter('tags', opts)
5182 5182 hexfunc = fm.hexfunc
5183 5183 tagtype = ""
5184 5184
5185 5185 for t, n in reversed(repo.tagslist()):
5186 5186 hn = hexfunc(n)
5187 5187 label = 'tags.normal'
5188 5188 tagtype = ''
5189 5189 if repo.tagtype(t) == 'local':
5190 5190 label = 'tags.local'
5191 5191 tagtype = 'local'
5192 5192
5193 5193 fm.startitem()
5194 5194 fm.write('tag', '%s', t, label=label)
5195 5195 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5196 5196 fm.condwrite(not ui.quiet, 'rev node', fmt,
5197 5197 repo.changelog.rev(n), hn, label=label)
5198 5198 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5199 5199 tagtype, label=label)
5200 5200 fm.plain('\n')
5201 5201 fm.end()
5202 5202
5203 5203 @command('tip',
5204 5204 [('p', 'patch', None, _('show patch')),
5205 5205 ('g', 'git', None, _('use git extended diff format')),
5206 5206 ] + templateopts,
5207 5207 _('[-p] [-g]'))
5208 5208 def tip(ui, repo, **opts):
5209 5209 """show the tip revision (DEPRECATED)
5210 5210
5211 5211 The tip revision (usually just called the tip) is the changeset
5212 5212 most recently added to the repository (and therefore the most
5213 5213 recently changed head).
5214 5214
5215 5215 If you have just made a commit, that commit will be the tip. If
5216 5216 you have just pulled changes from another repository, the tip of
5217 5217 that repository becomes the current tip. The "tip" tag is special
5218 5218 and cannot be renamed or assigned to a different changeset.
5219 5219
5220 5220 This command is deprecated, please use :hg:`heads` instead.
5221 5221
5222 5222 Returns 0 on success.
5223 5223 """
5224 5224 opts = pycompat.byteskwargs(opts)
5225 5225 displayer = cmdutil.show_changeset(ui, repo, opts)
5226 5226 displayer.show(repo['tip'])
5227 5227 displayer.close()
5228 5228
5229 5229 @command('unbundle',
5230 5230 [('u', 'update', None,
5231 5231 _('update to new branch head if changesets were unbundled'))],
5232 5232 _('[-u] FILE...'))
5233 5233 def unbundle(ui, repo, fname1, *fnames, **opts):
5234 5234 """apply one or more bundle files
5235 5235
5236 5236 Apply one or more bundle files generated by :hg:`bundle`.
5237 5237
5238 5238 Returns 0 on success, 1 if an update has unresolved files.
5239 5239 """
5240 5240 fnames = (fname1,) + fnames
5241 5241
5242 5242 with repo.lock():
5243 5243 for fname in fnames:
5244 5244 f = hg.openpath(ui, fname)
5245 5245 gen = exchange.readbundle(ui, f, fname)
5246 5246 if isinstance(gen, streamclone.streamcloneapplier):
5247 5247 raise error.Abort(
5248 5248 _('packed bundles cannot be applied with '
5249 5249 '"hg unbundle"'),
5250 5250 hint=_('use "hg debugapplystreamclonebundle"'))
5251 5251 url = 'bundle:' + fname
5252 5252 try:
5253 5253 txnname = 'unbundle'
5254 5254 if not isinstance(gen, bundle2.unbundle20):
5255 5255 txnname = 'unbundle\n%s' % util.hidepassword(url)
5256 5256 with repo.transaction(txnname) as tr:
5257 5257 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5258 5258 url=url)
5259 5259 except error.BundleUnknownFeatureError as exc:
5260 5260 raise error.Abort(
5261 5261 _('%s: unknown bundle feature, %s') % (fname, exc),
5262 5262 hint=_("see https://mercurial-scm.org/"
5263 5263 "wiki/BundleFeature for more "
5264 5264 "information"))
5265 5265 modheads = bundle2.combinechangegroupresults(op)
5266 5266
5267 5267 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5268 5268
5269 5269 @command('^update|up|checkout|co',
5270 5270 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5271 5271 ('c', 'check', None, _('require clean working directory')),
5272 5272 ('m', 'merge', None, _('merge uncommitted changes')),
5273 5273 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5274 5274 ('r', 'rev', '', _('revision'), _('REV'))
5275 5275 ] + mergetoolopts,
5276 5276 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5277 5277 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5278 5278 merge=None, tool=None):
5279 5279 """update working directory (or switch revisions)
5280 5280
5281 5281 Update the repository's working directory to the specified
5282 5282 changeset. If no changeset is specified, update to the tip of the
5283 5283 current named branch and move the active bookmark (see :hg:`help
5284 5284 bookmarks`).
5285 5285
5286 5286 Update sets the working directory's parent revision to the specified
5287 5287 changeset (see :hg:`help parents`).
5288 5288
5289 5289 If the changeset is not a descendant or ancestor of the working
5290 5290 directory's parent and there are uncommitted changes, the update is
5291 5291 aborted. With the -c/--check option, the working directory is checked
5292 5292 for uncommitted changes; if none are found, the working directory is
5293 5293 updated to the specified changeset.
5294 5294
5295 5295 .. container:: verbose
5296 5296
5297 5297 The -C/--clean, -c/--check, and -m/--merge options control what
5298 5298 happens if the working directory contains uncommitted changes.
5299 5299 At most of one of them can be specified.
5300 5300
5301 5301 1. If no option is specified, and if
5302 5302 the requested changeset is an ancestor or descendant of
5303 5303 the working directory's parent, the uncommitted changes
5304 5304 are merged into the requested changeset and the merged
5305 5305 result is left uncommitted. If the requested changeset is
5306 5306 not an ancestor or descendant (that is, it is on another
5307 5307 branch), the update is aborted and the uncommitted changes
5308 5308 are preserved.
5309 5309
5310 5310 2. With the -m/--merge option, the update is allowed even if the
5311 5311 requested changeset is not an ancestor or descendant of
5312 5312 the working directory's parent.
5313 5313
5314 5314 3. With the -c/--check option, the update is aborted and the
5315 5315 uncommitted changes are preserved.
5316 5316
5317 5317 4. With the -C/--clean option, uncommitted changes are discarded and
5318 5318 the working directory is updated to the requested changeset.
5319 5319
5320 5320 To cancel an uncommitted merge (and lose your changes), use
5321 5321 :hg:`update --clean .`.
5322 5322
5323 5323 Use null as the changeset to remove the working directory (like
5324 5324 :hg:`clone -U`).
5325 5325
5326 5326 If you want to revert just one file to an older revision, use
5327 5327 :hg:`revert [-r REV] NAME`.
5328 5328
5329 5329 See :hg:`help dates` for a list of formats valid for -d/--date.
5330 5330
5331 5331 Returns 0 on success, 1 if there are unresolved files.
5332 5332 """
5333 5333 if rev and node:
5334 5334 raise error.Abort(_("please specify just one revision"))
5335 5335
5336 5336 if ui.configbool('commands', 'update.requiredest'):
5337 5337 if not node and not rev and not date:
5338 5338 raise error.Abort(_('you must specify a destination'),
5339 5339 hint=_('for example: hg update ".::"'))
5340 5340
5341 5341 if rev is None or rev == '':
5342 5342 rev = node
5343 5343
5344 5344 if date and rev is not None:
5345 5345 raise error.Abort(_("you can't specify a revision and a date"))
5346 5346
5347 5347 if len([x for x in (clean, check, merge) if x]) > 1:
5348 5348 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5349 5349 "or -m/merge"))
5350 5350
5351 5351 updatecheck = None
5352 5352 if check:
5353 5353 updatecheck = 'abort'
5354 5354 elif merge:
5355 5355 updatecheck = 'none'
5356 5356
5357 5357 with repo.wlock():
5358 5358 cmdutil.clearunfinished(repo)
5359 5359
5360 5360 if date:
5361 5361 rev = cmdutil.finddate(ui, repo, date)
5362 5362
5363 5363 # if we defined a bookmark, we have to remember the original name
5364 5364 brev = rev
5365 5365 rev = scmutil.revsingle(repo, rev, rev).rev()
5366 5366
5367 5367 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5368 5368
5369 5369 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5370 5370 updatecheck=updatecheck)
5371 5371
5372 5372 @command('verify', [])
5373 5373 def verify(ui, repo):
5374 5374 """verify the integrity of the repository
5375 5375
5376 5376 Verify the integrity of the current repository.
5377 5377
5378 5378 This will perform an extensive check of the repository's
5379 5379 integrity, validating the hashes and checksums of each entry in
5380 5380 the changelog, manifest, and tracked files, as well as the
5381 5381 integrity of their crosslinks and indices.
5382 5382
5383 5383 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5384 5384 for more information about recovery from corruption of the
5385 5385 repository.
5386 5386
5387 5387 Returns 0 on success, 1 if errors are encountered.
5388 5388 """
5389 5389 return hg.verify(repo)
5390 5390
5391 5391 @command('version', [] + formatteropts, norepo=True)
5392 5392 def version_(ui, **opts):
5393 5393 """output version and copyright information"""
5394 5394 opts = pycompat.byteskwargs(opts)
5395 5395 if ui.verbose:
5396 5396 ui.pager('version')
5397 5397 fm = ui.formatter("version", opts)
5398 5398 fm.startitem()
5399 5399 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5400 5400 util.version())
5401 5401 license = _(
5402 5402 "(see https://mercurial-scm.org for more information)\n"
5403 5403 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5404 5404 "This is free software; see the source for copying conditions. "
5405 5405 "There is NO\nwarranty; "
5406 5406 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5407 5407 )
5408 5408 if not ui.quiet:
5409 5409 fm.plain(license)
5410 5410
5411 5411 if ui.verbose:
5412 5412 fm.plain(_("\nEnabled extensions:\n\n"))
5413 5413 # format names and versions into columns
5414 5414 names = []
5415 5415 vers = []
5416 5416 isinternals = []
5417 5417 for name, module in extensions.extensions():
5418 5418 names.append(name)
5419 5419 vers.append(extensions.moduleversion(module) or None)
5420 5420 isinternals.append(extensions.ismoduleinternal(module))
5421 5421 fn = fm.nested("extensions")
5422 5422 if names:
5423 5423 namefmt = " %%-%ds " % max(len(n) for n in names)
5424 5424 places = [_("external"), _("internal")]
5425 5425 for n, v, p in zip(names, vers, isinternals):
5426 5426 fn.startitem()
5427 5427 fn.condwrite(ui.verbose, "name", namefmt, n)
5428 5428 if ui.verbose:
5429 5429 fn.plain("%s " % places[p])
5430 5430 fn.data(bundled=p)
5431 5431 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5432 5432 if ui.verbose:
5433 5433 fn.plain("\n")
5434 5434 fn.end()
5435 5435 fm.end()
5436 5436
5437 5437 def loadcmdtable(ui, name, cmdtable):
5438 5438 """Load command functions from specified cmdtable
5439 5439 """
5440 5440 overrides = [cmd for cmd in cmdtable if cmd in table]
5441 5441 if overrides:
5442 5442 ui.warn(_("extension '%s' overrides commands: %s\n")
5443 5443 % (name, " ".join(overrides)))
5444 5444 table.update(cmdtable)
@@ -1,2330 +1,2330 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import os
12 12 import re
13 13 import stat
14 14
15 15 from .i18n import _
16 16 from .node import (
17 17 addednodeid,
18 18 bin,
19 19 hex,
20 20 modifiednodeid,
21 21 nullid,
22 22 nullrev,
23 23 short,
24 24 wdirid,
25 25 wdirnodes,
26 26 wdirrev,
27 27 )
28 28 from . import (
29 29 encoding,
30 30 error,
31 31 fileset,
32 32 match as matchmod,
33 33 mdiff,
34 34 obsolete as obsmod,
35 35 patch,
36 36 pathutil,
37 37 phases,
38 38 pycompat,
39 39 repoview,
40 40 revlog,
41 41 scmutil,
42 42 sparse,
43 43 subrepo,
44 44 util,
45 45 )
46 46
47 47 propertycache = util.propertycache
48 48
49 49 nonascii = re.compile(r'[^\x21-\x7f]').search
50 50
51 51 class basectx(object):
52 52 """A basectx object represents the common logic for its children:
53 53 changectx: read-only context that is already present in the repo,
54 54 workingctx: a context that represents the working directory and can
55 55 be committed,
56 56 memctx: a context that represents changes in-memory and can also
57 57 be committed."""
58 58 def __new__(cls, repo, changeid='', *args, **kwargs):
59 59 if isinstance(changeid, basectx):
60 60 return changeid
61 61
62 62 o = super(basectx, cls).__new__(cls)
63 63
64 64 o._repo = repo
65 65 o._rev = nullrev
66 66 o._node = nullid
67 67
68 68 return o
69 69
70 70 def __bytes__(self):
71 71 return short(self.node())
72 72
73 73 __str__ = encoding.strmethod(__bytes__)
74 74
75 75 def __int__(self):
76 76 return self.rev()
77 77
78 78 def __repr__(self):
79 79 return r"<%s %s>" % (type(self).__name__, str(self))
80 80
81 81 def __eq__(self, other):
82 82 try:
83 83 return type(self) == type(other) and self._rev == other._rev
84 84 except AttributeError:
85 85 return False
86 86
87 87 def __ne__(self, other):
88 88 return not (self == other)
89 89
90 90 def __contains__(self, key):
91 91 return key in self._manifest
92 92
93 93 def __getitem__(self, key):
94 94 return self.filectx(key)
95 95
96 96 def __iter__(self):
97 97 return iter(self._manifest)
98 98
99 99 def _buildstatusmanifest(self, status):
100 100 """Builds a manifest that includes the given status results, if this is
101 101 a working copy context. For non-working copy contexts, it just returns
102 102 the normal manifest."""
103 103 return self.manifest()
104 104
105 105 def _matchstatus(self, other, match):
106 106 """return match.always if match is none
107 107
108 108 This internal method provides a way for child objects to override the
109 109 match operator.
110 110 """
111 111 return match or matchmod.always(self._repo.root, self._repo.getcwd())
112 112
113 113 def _buildstatus(self, other, s, match, listignored, listclean,
114 114 listunknown):
115 115 """build a status with respect to another context"""
116 116 # Load earliest manifest first for caching reasons. More specifically,
117 117 # if you have revisions 1000 and 1001, 1001 is probably stored as a
118 118 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
119 119 # 1000 and cache it so that when you read 1001, we just need to apply a
120 120 # delta to what's in the cache. So that's one full reconstruction + one
121 121 # delta application.
122 122 mf2 = None
123 123 if self.rev() is not None and self.rev() < other.rev():
124 124 mf2 = self._buildstatusmanifest(s)
125 125 mf1 = other._buildstatusmanifest(s)
126 126 if mf2 is None:
127 127 mf2 = self._buildstatusmanifest(s)
128 128
129 129 modified, added = [], []
130 130 removed = []
131 131 clean = []
132 132 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
133 133 deletedset = set(deleted)
134 134 d = mf1.diff(mf2, match=match, clean=listclean)
135 135 for fn, value in d.iteritems():
136 136 if fn in deletedset:
137 137 continue
138 138 if value is None:
139 139 clean.append(fn)
140 140 continue
141 141 (node1, flag1), (node2, flag2) = value
142 142 if node1 is None:
143 143 added.append(fn)
144 144 elif node2 is None:
145 145 removed.append(fn)
146 146 elif flag1 != flag2:
147 147 modified.append(fn)
148 148 elif node2 not in wdirnodes:
149 149 # When comparing files between two commits, we save time by
150 150 # not comparing the file contents when the nodeids differ.
151 151 # Note that this means we incorrectly report a reverted change
152 152 # to a file as a modification.
153 153 modified.append(fn)
154 154 elif self[fn].cmp(other[fn]):
155 155 modified.append(fn)
156 156 else:
157 157 clean.append(fn)
158 158
159 159 if removed:
160 160 # need to filter files if they are already reported as removed
161 161 unknown = [fn for fn in unknown if fn not in mf1 and
162 162 (not match or match(fn))]
163 163 ignored = [fn for fn in ignored if fn not in mf1 and
164 164 (not match or match(fn))]
165 165 # if they're deleted, don't report them as removed
166 166 removed = [fn for fn in removed if fn not in deletedset]
167 167
168 168 return scmutil.status(modified, added, removed, deleted, unknown,
169 169 ignored, clean)
170 170
171 171 @propertycache
172 172 def substate(self):
173 173 return subrepo.state(self, self._repo.ui)
174 174
175 175 def subrev(self, subpath):
176 176 return self.substate[subpath][1]
177 177
178 178 def rev(self):
179 179 return self._rev
180 180 def node(self):
181 181 return self._node
182 182 def hex(self):
183 183 return hex(self.node())
184 184 def manifest(self):
185 185 return self._manifest
186 186 def manifestctx(self):
187 187 return self._manifestctx
188 188 def repo(self):
189 189 return self._repo
190 190 def phasestr(self):
191 191 return phases.phasenames[self.phase()]
192 192 def mutable(self):
193 193 return self.phase() > phases.public
194 194
195 195 def getfileset(self, expr):
196 196 return fileset.getfileset(self, expr)
197 197
198 198 def obsolete(self):
199 199 """True if the changeset is obsolete"""
200 200 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
201 201
202 202 def extinct(self):
203 203 """True if the changeset is extinct"""
204 204 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
205 205
206 206 def unstable(self):
207 207 """True if the changeset is not obsolete but it's ancestor are"""
208 208 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
209 209
210 210 def bumped(self):
211 211 """True if the changeset try to be a successor of a public changeset
212 212
213 213 Only non-public and non-obsolete changesets may be bumped.
214 214 """
215 215 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
216 216
217 217 def divergent(self):
218 218 """Is a successors of a changeset with multiple possible successors set
219 219
220 220 Only non-public and non-obsolete changesets may be divergent.
221 221 """
222 222 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
223 223
224 224 def troubled(self):
225 225 """True if the changeset is either unstable, bumped or divergent"""
226 226 return self.unstable() or self.bumped() or self.divergent()
227 227
228 228 def troubles(self):
229 229 """return the list of troubles affecting this changesets.
230 230
231 231 Troubles are returned as strings. possible values are:
232 232 - orphan,
233 233 - bumped,
234 - divergent.
234 - content-divergent.
235 235 """
236 236 troubles = []
237 237 if self.unstable():
238 238 troubles.append('orphan')
239 239 if self.bumped():
240 240 troubles.append('bumped')
241 241 if self.divergent():
242 troubles.append('divergent')
242 troubles.append('content-divergent')
243 243 return troubles
244 244
245 245 def parents(self):
246 246 """return contexts for each parent changeset"""
247 247 return self._parents
248 248
249 249 def p1(self):
250 250 return self._parents[0]
251 251
252 252 def p2(self):
253 253 parents = self._parents
254 254 if len(parents) == 2:
255 255 return parents[1]
256 256 return changectx(self._repo, nullrev)
257 257
258 258 def _fileinfo(self, path):
259 259 if r'_manifest' in self.__dict__:
260 260 try:
261 261 return self._manifest[path], self._manifest.flags(path)
262 262 except KeyError:
263 263 raise error.ManifestLookupError(self._node, path,
264 264 _('not found in manifest'))
265 265 if r'_manifestdelta' in self.__dict__ or path in self.files():
266 266 if path in self._manifestdelta:
267 267 return (self._manifestdelta[path],
268 268 self._manifestdelta.flags(path))
269 269 mfl = self._repo.manifestlog
270 270 try:
271 271 node, flag = mfl[self._changeset.manifest].find(path)
272 272 except KeyError:
273 273 raise error.ManifestLookupError(self._node, path,
274 274 _('not found in manifest'))
275 275
276 276 return node, flag
277 277
278 278 def filenode(self, path):
279 279 return self._fileinfo(path)[0]
280 280
281 281 def flags(self, path):
282 282 try:
283 283 return self._fileinfo(path)[1]
284 284 except error.LookupError:
285 285 return ''
286 286
287 287 def sub(self, path, allowcreate=True):
288 288 '''return a subrepo for the stored revision of path, never wdir()'''
289 289 return subrepo.subrepo(self, path, allowcreate=allowcreate)
290 290
291 291 def nullsub(self, path, pctx):
292 292 return subrepo.nullsubrepo(self, path, pctx)
293 293
294 294 def workingsub(self, path):
295 295 '''return a subrepo for the stored revision, or wdir if this is a wdir
296 296 context.
297 297 '''
298 298 return subrepo.subrepo(self, path, allowwdir=True)
299 299
300 300 def match(self, pats=None, include=None, exclude=None, default='glob',
301 301 listsubrepos=False, badfn=None):
302 302 r = self._repo
303 303 return matchmod.match(r.root, r.getcwd(), pats,
304 304 include, exclude, default,
305 305 auditor=r.nofsauditor, ctx=self,
306 306 listsubrepos=listsubrepos, badfn=badfn)
307 307
308 308 def diff(self, ctx2=None, match=None, **opts):
309 309 """Returns a diff generator for the given contexts and matcher"""
310 310 if ctx2 is None:
311 311 ctx2 = self.p1()
312 312 if ctx2 is not None:
313 313 ctx2 = self._repo[ctx2]
314 314 diffopts = patch.diffopts(self._repo.ui, opts)
315 315 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
316 316
317 317 def dirs(self):
318 318 return self._manifest.dirs()
319 319
320 320 def hasdir(self, dir):
321 321 return self._manifest.hasdir(dir)
322 322
323 323 def status(self, other=None, match=None, listignored=False,
324 324 listclean=False, listunknown=False, listsubrepos=False):
325 325 """return status of files between two nodes or node and working
326 326 directory.
327 327
328 328 If other is None, compare this node with working directory.
329 329
330 330 returns (modified, added, removed, deleted, unknown, ignored, clean)
331 331 """
332 332
333 333 ctx1 = self
334 334 ctx2 = self._repo[other]
335 335
336 336 # This next code block is, admittedly, fragile logic that tests for
337 337 # reversing the contexts and wouldn't need to exist if it weren't for
338 338 # the fast (and common) code path of comparing the working directory
339 339 # with its first parent.
340 340 #
341 341 # What we're aiming for here is the ability to call:
342 342 #
343 343 # workingctx.status(parentctx)
344 344 #
345 345 # If we always built the manifest for each context and compared those,
346 346 # then we'd be done. But the special case of the above call means we
347 347 # just copy the manifest of the parent.
348 348 reversed = False
349 349 if (not isinstance(ctx1, changectx)
350 350 and isinstance(ctx2, changectx)):
351 351 reversed = True
352 352 ctx1, ctx2 = ctx2, ctx1
353 353
354 354 match = ctx2._matchstatus(ctx1, match)
355 355 r = scmutil.status([], [], [], [], [], [], [])
356 356 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
357 357 listunknown)
358 358
359 359 if reversed:
360 360 # Reverse added and removed. Clear deleted, unknown and ignored as
361 361 # these make no sense to reverse.
362 362 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
363 363 r.clean)
364 364
365 365 if listsubrepos:
366 366 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
367 367 try:
368 368 rev2 = ctx2.subrev(subpath)
369 369 except KeyError:
370 370 # A subrepo that existed in node1 was deleted between
371 371 # node1 and node2 (inclusive). Thus, ctx2's substate
372 372 # won't contain that subpath. The best we can do ignore it.
373 373 rev2 = None
374 374 submatch = matchmod.subdirmatcher(subpath, match)
375 375 s = sub.status(rev2, match=submatch, ignored=listignored,
376 376 clean=listclean, unknown=listunknown,
377 377 listsubrepos=True)
378 378 for rfiles, sfiles in zip(r, s):
379 379 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
380 380
381 381 for l in r:
382 382 l.sort()
383 383
384 384 return r
385 385
386 386 def _filterederror(repo, changeid):
387 387 """build an exception to be raised about a filtered changeid
388 388
389 389 This is extracted in a function to help extensions (eg: evolve) to
390 390 experiment with various message variants."""
391 391 if repo.filtername.startswith('visible'):
392 392 msg = _("hidden revision '%s'") % changeid
393 393 hint = _('use --hidden to access hidden revisions')
394 394 return error.FilteredRepoLookupError(msg, hint=hint)
395 395 msg = _("filtered revision '%s' (not in '%s' subset)")
396 396 msg %= (changeid, repo.filtername)
397 397 return error.FilteredRepoLookupError(msg)
398 398
399 399 class changectx(basectx):
400 400 """A changecontext object makes access to data related to a particular
401 401 changeset convenient. It represents a read-only context already present in
402 402 the repo."""
403 403 def __init__(self, repo, changeid=''):
404 404 """changeid is a revision number, node, or tag"""
405 405
406 406 # since basectx.__new__ already took care of copying the object, we
407 407 # don't need to do anything in __init__, so we just exit here
408 408 if isinstance(changeid, basectx):
409 409 return
410 410
411 411 if changeid == '':
412 412 changeid = '.'
413 413 self._repo = repo
414 414
415 415 try:
416 416 if isinstance(changeid, int):
417 417 self._node = repo.changelog.node(changeid)
418 418 self._rev = changeid
419 419 return
420 420 if not pycompat.ispy3 and isinstance(changeid, long):
421 421 changeid = str(changeid)
422 422 if changeid == 'null':
423 423 self._node = nullid
424 424 self._rev = nullrev
425 425 return
426 426 if changeid == 'tip':
427 427 self._node = repo.changelog.tip()
428 428 self._rev = repo.changelog.rev(self._node)
429 429 return
430 430 if changeid == '.' or changeid == repo.dirstate.p1():
431 431 # this is a hack to delay/avoid loading obsmarkers
432 432 # when we know that '.' won't be hidden
433 433 self._node = repo.dirstate.p1()
434 434 self._rev = repo.unfiltered().changelog.rev(self._node)
435 435 return
436 436 if len(changeid) == 20:
437 437 try:
438 438 self._node = changeid
439 439 self._rev = repo.changelog.rev(changeid)
440 440 return
441 441 except error.FilteredRepoLookupError:
442 442 raise
443 443 except LookupError:
444 444 pass
445 445
446 446 try:
447 447 r = int(changeid)
448 448 if '%d' % r != changeid:
449 449 raise ValueError
450 450 l = len(repo.changelog)
451 451 if r < 0:
452 452 r += l
453 453 if r < 0 or r >= l and r != wdirrev:
454 454 raise ValueError
455 455 self._rev = r
456 456 self._node = repo.changelog.node(r)
457 457 return
458 458 except error.FilteredIndexError:
459 459 raise
460 460 except (ValueError, OverflowError, IndexError):
461 461 pass
462 462
463 463 if len(changeid) == 40:
464 464 try:
465 465 self._node = bin(changeid)
466 466 self._rev = repo.changelog.rev(self._node)
467 467 return
468 468 except error.FilteredLookupError:
469 469 raise
470 470 except (TypeError, LookupError):
471 471 pass
472 472
473 473 # lookup bookmarks through the name interface
474 474 try:
475 475 self._node = repo.names.singlenode(repo, changeid)
476 476 self._rev = repo.changelog.rev(self._node)
477 477 return
478 478 except KeyError:
479 479 pass
480 480 except error.FilteredRepoLookupError:
481 481 raise
482 482 except error.RepoLookupError:
483 483 pass
484 484
485 485 self._node = repo.unfiltered().changelog._partialmatch(changeid)
486 486 if self._node is not None:
487 487 self._rev = repo.changelog.rev(self._node)
488 488 return
489 489
490 490 # lookup failed
491 491 # check if it might have come from damaged dirstate
492 492 #
493 493 # XXX we could avoid the unfiltered if we had a recognizable
494 494 # exception for filtered changeset access
495 495 if changeid in repo.unfiltered().dirstate.parents():
496 496 msg = _("working directory has unknown parent '%s'!")
497 497 raise error.Abort(msg % short(changeid))
498 498 try:
499 499 if len(changeid) == 20 and nonascii(changeid):
500 500 changeid = hex(changeid)
501 501 except TypeError:
502 502 pass
503 503 except (error.FilteredIndexError, error.FilteredLookupError,
504 504 error.FilteredRepoLookupError):
505 505 raise _filterederror(repo, changeid)
506 506 except IndexError:
507 507 pass
508 508 raise error.RepoLookupError(
509 509 _("unknown revision '%s'") % changeid)
510 510
511 511 def __hash__(self):
512 512 try:
513 513 return hash(self._rev)
514 514 except AttributeError:
515 515 return id(self)
516 516
517 517 def __nonzero__(self):
518 518 return self._rev != nullrev
519 519
520 520 __bool__ = __nonzero__
521 521
522 522 @propertycache
523 523 def _changeset(self):
524 524 return self._repo.changelog.changelogrevision(self.rev())
525 525
526 526 @propertycache
527 527 def _manifest(self):
528 528 return self._manifestctx.read()
529 529
530 530 @property
531 531 def _manifestctx(self):
532 532 return self._repo.manifestlog[self._changeset.manifest]
533 533
534 534 @propertycache
535 535 def _manifestdelta(self):
536 536 return self._manifestctx.readdelta()
537 537
538 538 @propertycache
539 539 def _parents(self):
540 540 repo = self._repo
541 541 p1, p2 = repo.changelog.parentrevs(self._rev)
542 542 if p2 == nullrev:
543 543 return [changectx(repo, p1)]
544 544 return [changectx(repo, p1), changectx(repo, p2)]
545 545
546 546 def changeset(self):
547 547 c = self._changeset
548 548 return (
549 549 c.manifest,
550 550 c.user,
551 551 c.date,
552 552 c.files,
553 553 c.description,
554 554 c.extra,
555 555 )
556 556 def manifestnode(self):
557 557 return self._changeset.manifest
558 558
559 559 def user(self):
560 560 return self._changeset.user
561 561 def date(self):
562 562 return self._changeset.date
563 563 def files(self):
564 564 return self._changeset.files
565 565 def description(self):
566 566 return self._changeset.description
567 567 def branch(self):
568 568 return encoding.tolocal(self._changeset.extra.get("branch"))
569 569 def closesbranch(self):
570 570 return 'close' in self._changeset.extra
571 571 def extra(self):
572 572 return self._changeset.extra
573 573 def tags(self):
574 574 return self._repo.nodetags(self._node)
575 575 def bookmarks(self):
576 576 return self._repo.nodebookmarks(self._node)
577 577 def phase(self):
578 578 return self._repo._phasecache.phase(self._repo, self._rev)
579 579 def hidden(self):
580 580 return self._rev in repoview.filterrevs(self._repo, 'visible')
581 581
582 582 def children(self):
583 583 """return contexts for each child changeset"""
584 584 c = self._repo.changelog.children(self._node)
585 585 return [changectx(self._repo, x) for x in c]
586 586
587 587 def ancestors(self):
588 588 for a in self._repo.changelog.ancestors([self._rev]):
589 589 yield changectx(self._repo, a)
590 590
591 591 def descendants(self):
592 592 for d in self._repo.changelog.descendants([self._rev]):
593 593 yield changectx(self._repo, d)
594 594
595 595 def filectx(self, path, fileid=None, filelog=None):
596 596 """get a file context from this changeset"""
597 597 if fileid is None:
598 598 fileid = self.filenode(path)
599 599 return filectx(self._repo, path, fileid=fileid,
600 600 changectx=self, filelog=filelog)
601 601
602 602 def ancestor(self, c2, warn=False):
603 603 """return the "best" ancestor context of self and c2
604 604
605 605 If there are multiple candidates, it will show a message and check
606 606 merge.preferancestor configuration before falling back to the
607 607 revlog ancestor."""
608 608 # deal with workingctxs
609 609 n2 = c2._node
610 610 if n2 is None:
611 611 n2 = c2._parents[0]._node
612 612 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
613 613 if not cahs:
614 614 anc = nullid
615 615 elif len(cahs) == 1:
616 616 anc = cahs[0]
617 617 else:
618 618 # experimental config: merge.preferancestor
619 619 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
620 620 try:
621 621 ctx = changectx(self._repo, r)
622 622 except error.RepoLookupError:
623 623 continue
624 624 anc = ctx.node()
625 625 if anc in cahs:
626 626 break
627 627 else:
628 628 anc = self._repo.changelog.ancestor(self._node, n2)
629 629 if warn:
630 630 self._repo.ui.status(
631 631 (_("note: using %s as ancestor of %s and %s\n") %
632 632 (short(anc), short(self._node), short(n2))) +
633 633 ''.join(_(" alternatively, use --config "
634 634 "merge.preferancestor=%s\n") %
635 635 short(n) for n in sorted(cahs) if n != anc))
636 636 return changectx(self._repo, anc)
637 637
638 638 def descendant(self, other):
639 639 """True if other is descendant of this changeset"""
640 640 return self._repo.changelog.descendant(self._rev, other._rev)
641 641
642 642 def walk(self, match):
643 643 '''Generates matching file names.'''
644 644
645 645 # Wrap match.bad method to have message with nodeid
646 646 def bad(fn, msg):
647 647 # The manifest doesn't know about subrepos, so don't complain about
648 648 # paths into valid subrepos.
649 649 if any(fn == s or fn.startswith(s + '/')
650 650 for s in self.substate):
651 651 return
652 652 match.bad(fn, _('no such file in rev %s') % self)
653 653
654 654 m = matchmod.badmatch(match, bad)
655 655 return self._manifest.walk(m)
656 656
657 657 def matches(self, match):
658 658 return self.walk(match)
659 659
660 660 class basefilectx(object):
661 661 """A filecontext object represents the common logic for its children:
662 662 filectx: read-only access to a filerevision that is already present
663 663 in the repo,
664 664 workingfilectx: a filecontext that represents files from the working
665 665 directory,
666 666 memfilectx: a filecontext that represents files in-memory,
667 667 overlayfilectx: duplicate another filecontext with some fields overridden.
668 668 """
669 669 @propertycache
670 670 def _filelog(self):
671 671 return self._repo.file(self._path)
672 672
673 673 @propertycache
674 674 def _changeid(self):
675 675 if r'_changeid' in self.__dict__:
676 676 return self._changeid
677 677 elif r'_changectx' in self.__dict__:
678 678 return self._changectx.rev()
679 679 elif r'_descendantrev' in self.__dict__:
680 680 # this file context was created from a revision with a known
681 681 # descendant, we can (lazily) correct for linkrev aliases
682 682 return self._adjustlinkrev(self._descendantrev)
683 683 else:
684 684 return self._filelog.linkrev(self._filerev)
685 685
686 686 @propertycache
687 687 def _filenode(self):
688 688 if r'_fileid' in self.__dict__:
689 689 return self._filelog.lookup(self._fileid)
690 690 else:
691 691 return self._changectx.filenode(self._path)
692 692
693 693 @propertycache
694 694 def _filerev(self):
695 695 return self._filelog.rev(self._filenode)
696 696
697 697 @propertycache
698 698 def _repopath(self):
699 699 return self._path
700 700
701 701 def __nonzero__(self):
702 702 try:
703 703 self._filenode
704 704 return True
705 705 except error.LookupError:
706 706 # file is missing
707 707 return False
708 708
709 709 __bool__ = __nonzero__
710 710
711 711 def __bytes__(self):
712 712 try:
713 713 return "%s@%s" % (self.path(), self._changectx)
714 714 except error.LookupError:
715 715 return "%s@???" % self.path()
716 716
717 717 __str__ = encoding.strmethod(__bytes__)
718 718
719 719 def __repr__(self):
720 720 return "<%s %s>" % (type(self).__name__, str(self))
721 721
722 722 def __hash__(self):
723 723 try:
724 724 return hash((self._path, self._filenode))
725 725 except AttributeError:
726 726 return id(self)
727 727
728 728 def __eq__(self, other):
729 729 try:
730 730 return (type(self) == type(other) and self._path == other._path
731 731 and self._filenode == other._filenode)
732 732 except AttributeError:
733 733 return False
734 734
735 735 def __ne__(self, other):
736 736 return not (self == other)
737 737
738 738 def filerev(self):
739 739 return self._filerev
740 740 def filenode(self):
741 741 return self._filenode
742 742 @propertycache
743 743 def _flags(self):
744 744 return self._changectx.flags(self._path)
745 745 def flags(self):
746 746 return self._flags
747 747 def filelog(self):
748 748 return self._filelog
749 749 def rev(self):
750 750 return self._changeid
751 751 def linkrev(self):
752 752 return self._filelog.linkrev(self._filerev)
753 753 def node(self):
754 754 return self._changectx.node()
755 755 def hex(self):
756 756 return self._changectx.hex()
757 757 def user(self):
758 758 return self._changectx.user()
759 759 def date(self):
760 760 return self._changectx.date()
761 761 def files(self):
762 762 return self._changectx.files()
763 763 def description(self):
764 764 return self._changectx.description()
765 765 def branch(self):
766 766 return self._changectx.branch()
767 767 def extra(self):
768 768 return self._changectx.extra()
769 769 def phase(self):
770 770 return self._changectx.phase()
771 771 def phasestr(self):
772 772 return self._changectx.phasestr()
773 773 def manifest(self):
774 774 return self._changectx.manifest()
775 775 def changectx(self):
776 776 return self._changectx
777 777 def renamed(self):
778 778 return self._copied
779 779 def repo(self):
780 780 return self._repo
781 781 def size(self):
782 782 return len(self.data())
783 783
784 784 def path(self):
785 785 return self._path
786 786
787 787 def isbinary(self):
788 788 try:
789 789 return util.binary(self.data())
790 790 except IOError:
791 791 return False
792 792 def isexec(self):
793 793 return 'x' in self.flags()
794 794 def islink(self):
795 795 return 'l' in self.flags()
796 796
797 797 def isabsent(self):
798 798 """whether this filectx represents a file not in self._changectx
799 799
800 800 This is mainly for merge code to detect change/delete conflicts. This is
801 801 expected to be True for all subclasses of basectx."""
802 802 return False
803 803
804 804 _customcmp = False
805 805 def cmp(self, fctx):
806 806 """compare with other file context
807 807
808 808 returns True if different than fctx.
809 809 """
810 810 if fctx._customcmp:
811 811 return fctx.cmp(self)
812 812
813 813 if (fctx._filenode is None
814 814 and (self._repo._encodefilterpats
815 815 # if file data starts with '\1\n', empty metadata block is
816 816 # prepended, which adds 4 bytes to filelog.size().
817 817 or self.size() - 4 == fctx.size())
818 818 or self.size() == fctx.size()):
819 819 return self._filelog.cmp(self._filenode, fctx.data())
820 820
821 821 return True
822 822
823 823 def _adjustlinkrev(self, srcrev, inclusive=False):
824 824 """return the first ancestor of <srcrev> introducing <fnode>
825 825
826 826 If the linkrev of the file revision does not point to an ancestor of
827 827 srcrev, we'll walk down the ancestors until we find one introducing
828 828 this file revision.
829 829
830 830 :srcrev: the changeset revision we search ancestors from
831 831 :inclusive: if true, the src revision will also be checked
832 832 """
833 833 repo = self._repo
834 834 cl = repo.unfiltered().changelog
835 835 mfl = repo.manifestlog
836 836 # fetch the linkrev
837 837 lkr = self.linkrev()
838 838 # hack to reuse ancestor computation when searching for renames
839 839 memberanc = getattr(self, '_ancestrycontext', None)
840 840 iteranc = None
841 841 if srcrev is None:
842 842 # wctx case, used by workingfilectx during mergecopy
843 843 revs = [p.rev() for p in self._repo[None].parents()]
844 844 inclusive = True # we skipped the real (revless) source
845 845 else:
846 846 revs = [srcrev]
847 847 if memberanc is None:
848 848 memberanc = iteranc = cl.ancestors(revs, lkr,
849 849 inclusive=inclusive)
850 850 # check if this linkrev is an ancestor of srcrev
851 851 if lkr not in memberanc:
852 852 if iteranc is None:
853 853 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
854 854 fnode = self._filenode
855 855 path = self._path
856 856 for a in iteranc:
857 857 ac = cl.read(a) # get changeset data (we avoid object creation)
858 858 if path in ac[3]: # checking the 'files' field.
859 859 # The file has been touched, check if the content is
860 860 # similar to the one we search for.
861 861 if fnode == mfl[ac[0]].readfast().get(path):
862 862 return a
863 863 # In theory, we should never get out of that loop without a result.
864 864 # But if manifest uses a buggy file revision (not children of the
865 865 # one it replaces) we could. Such a buggy situation will likely
866 866 # result is crash somewhere else at to some point.
867 867 return lkr
868 868
869 869 def introrev(self):
870 870 """return the rev of the changeset which introduced this file revision
871 871
872 872 This method is different from linkrev because it take into account the
873 873 changeset the filectx was created from. It ensures the returned
874 874 revision is one of its ancestors. This prevents bugs from
875 875 'linkrev-shadowing' when a file revision is used by multiple
876 876 changesets.
877 877 """
878 878 lkr = self.linkrev()
879 879 attrs = vars(self)
880 880 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
881 881 if noctx or self.rev() == lkr:
882 882 return self.linkrev()
883 883 return self._adjustlinkrev(self.rev(), inclusive=True)
884 884
885 885 def _parentfilectx(self, path, fileid, filelog):
886 886 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
887 887 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
888 888 if '_changeid' in vars(self) or '_changectx' in vars(self):
889 889 # If self is associated with a changeset (probably explicitly
890 890 # fed), ensure the created filectx is associated with a
891 891 # changeset that is an ancestor of self.changectx.
892 892 # This lets us later use _adjustlinkrev to get a correct link.
893 893 fctx._descendantrev = self.rev()
894 894 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
895 895 elif '_descendantrev' in vars(self):
896 896 # Otherwise propagate _descendantrev if we have one associated.
897 897 fctx._descendantrev = self._descendantrev
898 898 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
899 899 return fctx
900 900
901 901 def parents(self):
902 902 _path = self._path
903 903 fl = self._filelog
904 904 parents = self._filelog.parents(self._filenode)
905 905 pl = [(_path, node, fl) for node in parents if node != nullid]
906 906
907 907 r = fl.renamed(self._filenode)
908 908 if r:
909 909 # - In the simple rename case, both parent are nullid, pl is empty.
910 910 # - In case of merge, only one of the parent is null id and should
911 911 # be replaced with the rename information. This parent is -always-
912 912 # the first one.
913 913 #
914 914 # As null id have always been filtered out in the previous list
915 915 # comprehension, inserting to 0 will always result in "replacing
916 916 # first nullid parent with rename information.
917 917 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
918 918
919 919 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
920 920
921 921 def p1(self):
922 922 return self.parents()[0]
923 923
924 924 def p2(self):
925 925 p = self.parents()
926 926 if len(p) == 2:
927 927 return p[1]
928 928 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
929 929
930 930 def annotate(self, follow=False, linenumber=False, skiprevs=None,
931 931 diffopts=None):
932 932 '''returns a list of tuples of ((ctx, number), line) for each line
933 933 in the file, where ctx is the filectx of the node where
934 934 that line was last changed; if linenumber parameter is true, number is
935 935 the line number at the first appearance in the managed file, otherwise,
936 936 number has a fixed value of False.
937 937 '''
938 938
939 939 def lines(text):
940 940 if text.endswith("\n"):
941 941 return text.count("\n")
942 942 return text.count("\n") + int(bool(text))
943 943
944 944 if linenumber:
945 945 def decorate(text, rev):
946 946 return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
947 947 else:
948 948 def decorate(text, rev):
949 949 return ([(rev, False)] * lines(text), text)
950 950
951 951 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
952 952
953 953 def parents(f):
954 954 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
955 955 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
956 956 # from the topmost introrev (= srcrev) down to p.linkrev() if it
957 957 # isn't an ancestor of the srcrev.
958 958 f._changeid
959 959 pl = f.parents()
960 960
961 961 # Don't return renamed parents if we aren't following.
962 962 if not follow:
963 963 pl = [p for p in pl if p.path() == f.path()]
964 964
965 965 # renamed filectx won't have a filelog yet, so set it
966 966 # from the cache to save time
967 967 for p in pl:
968 968 if not '_filelog' in p.__dict__:
969 969 p._filelog = getlog(p.path())
970 970
971 971 return pl
972 972
973 973 # use linkrev to find the first changeset where self appeared
974 974 base = self
975 975 introrev = self.introrev()
976 976 if self.rev() != introrev:
977 977 base = self.filectx(self.filenode(), changeid=introrev)
978 978 if getattr(base, '_ancestrycontext', None) is None:
979 979 cl = self._repo.changelog
980 980 if introrev is None:
981 981 # wctx is not inclusive, but works because _ancestrycontext
982 982 # is used to test filelog revisions
983 983 ac = cl.ancestors([p.rev() for p in base.parents()],
984 984 inclusive=True)
985 985 else:
986 986 ac = cl.ancestors([introrev], inclusive=True)
987 987 base._ancestrycontext = ac
988 988
989 989 # This algorithm would prefer to be recursive, but Python is a
990 990 # bit recursion-hostile. Instead we do an iterative
991 991 # depth-first search.
992 992
993 993 # 1st DFS pre-calculates pcache and needed
994 994 visit = [base]
995 995 pcache = {}
996 996 needed = {base: 1}
997 997 while visit:
998 998 f = visit.pop()
999 999 if f in pcache:
1000 1000 continue
1001 1001 pl = parents(f)
1002 1002 pcache[f] = pl
1003 1003 for p in pl:
1004 1004 needed[p] = needed.get(p, 0) + 1
1005 1005 if p not in pcache:
1006 1006 visit.append(p)
1007 1007
1008 1008 # 2nd DFS does the actual annotate
1009 1009 visit[:] = [base]
1010 1010 hist = {}
1011 1011 while visit:
1012 1012 f = visit[-1]
1013 1013 if f in hist:
1014 1014 visit.pop()
1015 1015 continue
1016 1016
1017 1017 ready = True
1018 1018 pl = pcache[f]
1019 1019 for p in pl:
1020 1020 if p not in hist:
1021 1021 ready = False
1022 1022 visit.append(p)
1023 1023 if ready:
1024 1024 visit.pop()
1025 1025 curr = decorate(f.data(), f)
1026 1026 skipchild = False
1027 1027 if skiprevs is not None:
1028 1028 skipchild = f._changeid in skiprevs
1029 1029 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1030 1030 diffopts)
1031 1031 for p in pl:
1032 1032 if needed[p] == 1:
1033 1033 del hist[p]
1034 1034 del needed[p]
1035 1035 else:
1036 1036 needed[p] -= 1
1037 1037
1038 1038 hist[f] = curr
1039 1039 del pcache[f]
1040 1040
1041 1041 return zip(hist[base][0], hist[base][1].splitlines(True))
1042 1042
1043 1043 def ancestors(self, followfirst=False):
1044 1044 visit = {}
1045 1045 c = self
1046 1046 if followfirst:
1047 1047 cut = 1
1048 1048 else:
1049 1049 cut = None
1050 1050
1051 1051 while True:
1052 1052 for parent in c.parents()[:cut]:
1053 1053 visit[(parent.linkrev(), parent.filenode())] = parent
1054 1054 if not visit:
1055 1055 break
1056 1056 c = visit.pop(max(visit))
1057 1057 yield c
1058 1058
1059 1059 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1060 1060 r'''
1061 1061 Given parent and child fctxes and annotate data for parents, for all lines
1062 1062 in either parent that match the child, annotate the child with the parent's
1063 1063 data.
1064 1064
1065 1065 Additionally, if `skipchild` is True, replace all other lines with parent
1066 1066 annotate data as well such that child is never blamed for any lines.
1067 1067
1068 1068 >>> oldfctx = 'old'
1069 1069 >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
1070 1070 >>> olddata = 'a\nb\n'
1071 1071 >>> p1data = 'a\nb\nc\n'
1072 1072 >>> p2data = 'a\nc\nd\n'
1073 1073 >>> childdata = 'a\nb2\nc\nc2\nd\n'
1074 1074 >>> diffopts = mdiff.diffopts()
1075 1075
1076 1076 >>> def decorate(text, rev):
1077 1077 ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
1078 1078
1079 1079 Basic usage:
1080 1080
1081 1081 >>> oldann = decorate(olddata, oldfctx)
1082 1082 >>> p1ann = decorate(p1data, p1fctx)
1083 1083 >>> p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
1084 1084 >>> p1ann[0]
1085 1085 [('old', 1), ('old', 2), ('p1', 3)]
1086 1086 >>> p2ann = decorate(p2data, p2fctx)
1087 1087 >>> p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
1088 1088 >>> p2ann[0]
1089 1089 [('old', 1), ('p2', 2), ('p2', 3)]
1090 1090
1091 1091 Test with multiple parents (note the difference caused by ordering):
1092 1092
1093 1093 >>> childann = decorate(childdata, childfctx)
1094 1094 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
1095 1095 ... diffopts)
1096 1096 >>> childann[0]
1097 1097 [('old', 1), ('c', 2), ('p2', 2), ('c', 4), ('p2', 3)]
1098 1098
1099 1099 >>> childann = decorate(childdata, childfctx)
1100 1100 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
1101 1101 ... diffopts)
1102 1102 >>> childann[0]
1103 1103 [('old', 1), ('c', 2), ('p1', 3), ('c', 4), ('p2', 3)]
1104 1104
1105 1105 Test with skipchild (note the difference caused by ordering):
1106 1106
1107 1107 >>> childann = decorate(childdata, childfctx)
1108 1108 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
1109 1109 ... diffopts)
1110 1110 >>> childann[0]
1111 1111 [('old', 1), ('old', 2), ('p2', 2), ('p2', 2), ('p2', 3)]
1112 1112
1113 1113 >>> childann = decorate(childdata, childfctx)
1114 1114 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
1115 1115 ... diffopts)
1116 1116 >>> childann[0]
1117 1117 [('old', 1), ('old', 2), ('p1', 3), ('p1', 3), ('p2', 3)]
1118 1118 '''
1119 1119 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1120 1120 for parent in parents]
1121 1121
1122 1122 if skipchild:
1123 1123 # Need to iterate over the blocks twice -- make it a list
1124 1124 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1125 1125 # Mercurial currently prefers p2 over p1 for annotate.
1126 1126 # TODO: change this?
1127 1127 for parent, blocks in pblocks:
1128 1128 for (a1, a2, b1, b2), t in blocks:
1129 1129 # Changed blocks ('!') or blocks made only of blank lines ('~')
1130 1130 # belong to the child.
1131 1131 if t == '=':
1132 1132 child[0][b1:b2] = parent[0][a1:a2]
1133 1133
1134 1134 if skipchild:
1135 1135 # Now try and match up anything that couldn't be matched,
1136 1136 # Reversing pblocks maintains bias towards p2, matching above
1137 1137 # behavior.
1138 1138 pblocks.reverse()
1139 1139
1140 1140 # The heuristics are:
1141 1141 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1142 1142 # This could potentially be smarter but works well enough.
1143 1143 # * For a non-matching section, do a best-effort fit. Match lines in
1144 1144 # diff hunks 1:1, dropping lines as necessary.
1145 1145 # * Repeat the last line as a last resort.
1146 1146
1147 1147 # First, replace as much as possible without repeating the last line.
1148 1148 remaining = [(parent, []) for parent, _blocks in pblocks]
1149 1149 for idx, (parent, blocks) in enumerate(pblocks):
1150 1150 for (a1, a2, b1, b2), _t in blocks:
1151 1151 if a2 - a1 >= b2 - b1:
1152 1152 for bk in xrange(b1, b2):
1153 1153 if child[0][bk][0] == childfctx:
1154 1154 ak = min(a1 + (bk - b1), a2 - 1)
1155 1155 child[0][bk] = parent[0][ak]
1156 1156 else:
1157 1157 remaining[idx][1].append((a1, a2, b1, b2))
1158 1158
1159 1159 # Then, look at anything left, which might involve repeating the last
1160 1160 # line.
1161 1161 for parent, blocks in remaining:
1162 1162 for a1, a2, b1, b2 in blocks:
1163 1163 for bk in xrange(b1, b2):
1164 1164 if child[0][bk][0] == childfctx:
1165 1165 ak = min(a1 + (bk - b1), a2 - 1)
1166 1166 child[0][bk] = parent[0][ak]
1167 1167 return child
1168 1168
1169 1169 class filectx(basefilectx):
1170 1170 """A filecontext object makes access to data related to a particular
1171 1171 filerevision convenient."""
1172 1172 def __init__(self, repo, path, changeid=None, fileid=None,
1173 1173 filelog=None, changectx=None):
1174 1174 """changeid can be a changeset revision, node, or tag.
1175 1175 fileid can be a file revision or node."""
1176 1176 self._repo = repo
1177 1177 self._path = path
1178 1178
1179 1179 assert (changeid is not None
1180 1180 or fileid is not None
1181 1181 or changectx is not None), \
1182 1182 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1183 1183 % (changeid, fileid, changectx))
1184 1184
1185 1185 if filelog is not None:
1186 1186 self._filelog = filelog
1187 1187
1188 1188 if changeid is not None:
1189 1189 self._changeid = changeid
1190 1190 if changectx is not None:
1191 1191 self._changectx = changectx
1192 1192 if fileid is not None:
1193 1193 self._fileid = fileid
1194 1194
1195 1195 @propertycache
1196 1196 def _changectx(self):
1197 1197 try:
1198 1198 return changectx(self._repo, self._changeid)
1199 1199 except error.FilteredRepoLookupError:
1200 1200 # Linkrev may point to any revision in the repository. When the
1201 1201 # repository is filtered this may lead to `filectx` trying to build
1202 1202 # `changectx` for filtered revision. In such case we fallback to
1203 1203 # creating `changectx` on the unfiltered version of the reposition.
1204 1204 # This fallback should not be an issue because `changectx` from
1205 1205 # `filectx` are not used in complex operations that care about
1206 1206 # filtering.
1207 1207 #
1208 1208 # This fallback is a cheap and dirty fix that prevent several
1209 1209 # crashes. It does not ensure the behavior is correct. However the
1210 1210 # behavior was not correct before filtering either and "incorrect
1211 1211 # behavior" is seen as better as "crash"
1212 1212 #
1213 1213 # Linkrevs have several serious troubles with filtering that are
1214 1214 # complicated to solve. Proper handling of the issue here should be
1215 1215 # considered when solving linkrev issue are on the table.
1216 1216 return changectx(self._repo.unfiltered(), self._changeid)
1217 1217
1218 1218 def filectx(self, fileid, changeid=None):
1219 1219 '''opens an arbitrary revision of the file without
1220 1220 opening a new filelog'''
1221 1221 return filectx(self._repo, self._path, fileid=fileid,
1222 1222 filelog=self._filelog, changeid=changeid)
1223 1223
1224 1224 def rawdata(self):
1225 1225 return self._filelog.revision(self._filenode, raw=True)
1226 1226
1227 1227 def rawflags(self):
1228 1228 """low-level revlog flags"""
1229 1229 return self._filelog.flags(self._filerev)
1230 1230
1231 1231 def data(self):
1232 1232 try:
1233 1233 return self._filelog.read(self._filenode)
1234 1234 except error.CensoredNodeError:
1235 1235 if self._repo.ui.config("censor", "policy") == "ignore":
1236 1236 return ""
1237 1237 raise error.Abort(_("censored node: %s") % short(self._filenode),
1238 1238 hint=_("set censor.policy to ignore errors"))
1239 1239
1240 1240 def size(self):
1241 1241 return self._filelog.size(self._filerev)
1242 1242
1243 1243 @propertycache
1244 1244 def _copied(self):
1245 1245 """check if file was actually renamed in this changeset revision
1246 1246
1247 1247 If rename logged in file revision, we report copy for changeset only
1248 1248 if file revisions linkrev points back to the changeset in question
1249 1249 or both changeset parents contain different file revisions.
1250 1250 """
1251 1251
1252 1252 renamed = self._filelog.renamed(self._filenode)
1253 1253 if not renamed:
1254 1254 return renamed
1255 1255
1256 1256 if self.rev() == self.linkrev():
1257 1257 return renamed
1258 1258
1259 1259 name = self.path()
1260 1260 fnode = self._filenode
1261 1261 for p in self._changectx.parents():
1262 1262 try:
1263 1263 if fnode == p.filenode(name):
1264 1264 return None
1265 1265 except error.LookupError:
1266 1266 pass
1267 1267 return renamed
1268 1268
1269 1269 def children(self):
1270 1270 # hard for renames
1271 1271 c = self._filelog.children(self._filenode)
1272 1272 return [filectx(self._repo, self._path, fileid=x,
1273 1273 filelog=self._filelog) for x in c]
1274 1274
1275 1275 class committablectx(basectx):
1276 1276 """A committablectx object provides common functionality for a context that
1277 1277 wants the ability to commit, e.g. workingctx or memctx."""
1278 1278 def __init__(self, repo, text="", user=None, date=None, extra=None,
1279 1279 changes=None):
1280 1280 self._repo = repo
1281 1281 self._rev = None
1282 1282 self._node = None
1283 1283 self._text = text
1284 1284 if date:
1285 1285 self._date = util.parsedate(date)
1286 1286 if user:
1287 1287 self._user = user
1288 1288 if changes:
1289 1289 self._status = changes
1290 1290
1291 1291 self._extra = {}
1292 1292 if extra:
1293 1293 self._extra = extra.copy()
1294 1294 if 'branch' not in self._extra:
1295 1295 try:
1296 1296 branch = encoding.fromlocal(self._repo.dirstate.branch())
1297 1297 except UnicodeDecodeError:
1298 1298 raise error.Abort(_('branch name not in UTF-8!'))
1299 1299 self._extra['branch'] = branch
1300 1300 if self._extra['branch'] == '':
1301 1301 self._extra['branch'] = 'default'
1302 1302
1303 1303 def __bytes__(self):
1304 1304 return bytes(self._parents[0]) + "+"
1305 1305
1306 1306 __str__ = encoding.strmethod(__bytes__)
1307 1307
1308 1308 def __nonzero__(self):
1309 1309 return True
1310 1310
1311 1311 __bool__ = __nonzero__
1312 1312
1313 1313 def _buildflagfunc(self):
1314 1314 # Create a fallback function for getting file flags when the
1315 1315 # filesystem doesn't support them
1316 1316
1317 1317 copiesget = self._repo.dirstate.copies().get
1318 1318 parents = self.parents()
1319 1319 if len(parents) < 2:
1320 1320 # when we have one parent, it's easy: copy from parent
1321 1321 man = parents[0].manifest()
1322 1322 def func(f):
1323 1323 f = copiesget(f, f)
1324 1324 return man.flags(f)
1325 1325 else:
1326 1326 # merges are tricky: we try to reconstruct the unstored
1327 1327 # result from the merge (issue1802)
1328 1328 p1, p2 = parents
1329 1329 pa = p1.ancestor(p2)
1330 1330 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1331 1331
1332 1332 def func(f):
1333 1333 f = copiesget(f, f) # may be wrong for merges with copies
1334 1334 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1335 1335 if fl1 == fl2:
1336 1336 return fl1
1337 1337 if fl1 == fla:
1338 1338 return fl2
1339 1339 if fl2 == fla:
1340 1340 return fl1
1341 1341 return '' # punt for conflicts
1342 1342
1343 1343 return func
1344 1344
1345 1345 @propertycache
1346 1346 def _flagfunc(self):
1347 1347 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1348 1348
1349 1349 @propertycache
1350 1350 def _status(self):
1351 1351 return self._repo.status()
1352 1352
1353 1353 @propertycache
1354 1354 def _user(self):
1355 1355 return self._repo.ui.username()
1356 1356
1357 1357 @propertycache
1358 1358 def _date(self):
1359 1359 ui = self._repo.ui
1360 1360 date = ui.configdate('devel', 'default-date')
1361 1361 if date is None:
1362 1362 date = util.makedate()
1363 1363 return date
1364 1364
1365 1365 def subrev(self, subpath):
1366 1366 return None
1367 1367
1368 1368 def manifestnode(self):
1369 1369 return None
1370 1370 def user(self):
1371 1371 return self._user or self._repo.ui.username()
1372 1372 def date(self):
1373 1373 return self._date
1374 1374 def description(self):
1375 1375 return self._text
1376 1376 def files(self):
1377 1377 return sorted(self._status.modified + self._status.added +
1378 1378 self._status.removed)
1379 1379
1380 1380 def modified(self):
1381 1381 return self._status.modified
1382 1382 def added(self):
1383 1383 return self._status.added
1384 1384 def removed(self):
1385 1385 return self._status.removed
1386 1386 def deleted(self):
1387 1387 return self._status.deleted
1388 1388 def branch(self):
1389 1389 return encoding.tolocal(self._extra['branch'])
1390 1390 def closesbranch(self):
1391 1391 return 'close' in self._extra
1392 1392 def extra(self):
1393 1393 return self._extra
1394 1394
1395 1395 def tags(self):
1396 1396 return []
1397 1397
1398 1398 def bookmarks(self):
1399 1399 b = []
1400 1400 for p in self.parents():
1401 1401 b.extend(p.bookmarks())
1402 1402 return b
1403 1403
1404 1404 def phase(self):
1405 1405 phase = phases.draft # default phase to draft
1406 1406 for p in self.parents():
1407 1407 phase = max(phase, p.phase())
1408 1408 return phase
1409 1409
1410 1410 def hidden(self):
1411 1411 return False
1412 1412
1413 1413 def children(self):
1414 1414 return []
1415 1415
1416 1416 def flags(self, path):
1417 1417 if r'_manifest' in self.__dict__:
1418 1418 try:
1419 1419 return self._manifest.flags(path)
1420 1420 except KeyError:
1421 1421 return ''
1422 1422
1423 1423 try:
1424 1424 return self._flagfunc(path)
1425 1425 except OSError:
1426 1426 return ''
1427 1427
1428 1428 def ancestor(self, c2):
1429 1429 """return the "best" ancestor context of self and c2"""
1430 1430 return self._parents[0].ancestor(c2) # punt on two parents for now
1431 1431
1432 1432 def walk(self, match):
1433 1433 '''Generates matching file names.'''
1434 1434 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1435 1435 True, False))
1436 1436
1437 1437 def matches(self, match):
1438 1438 return sorted(self._repo.dirstate.matches(match))
1439 1439
1440 1440 def ancestors(self):
1441 1441 for p in self._parents:
1442 1442 yield p
1443 1443 for a in self._repo.changelog.ancestors(
1444 1444 [p.rev() for p in self._parents]):
1445 1445 yield changectx(self._repo, a)
1446 1446
1447 1447 def markcommitted(self, node):
1448 1448 """Perform post-commit cleanup necessary after committing this ctx
1449 1449
1450 1450 Specifically, this updates backing stores this working context
1451 1451 wraps to reflect the fact that the changes reflected by this
1452 1452 workingctx have been committed. For example, it marks
1453 1453 modified and added files as normal in the dirstate.
1454 1454
1455 1455 """
1456 1456
1457 1457 with self._repo.dirstate.parentchange():
1458 1458 for f in self.modified() + self.added():
1459 1459 self._repo.dirstate.normal(f)
1460 1460 for f in self.removed():
1461 1461 self._repo.dirstate.drop(f)
1462 1462 self._repo.dirstate.setparents(node)
1463 1463
1464 1464 # write changes out explicitly, because nesting wlock at
1465 1465 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1466 1466 # from immediately doing so for subsequent changing files
1467 1467 self._repo.dirstate.write(self._repo.currenttransaction())
1468 1468
1469 1469 def dirty(self, missing=False, merge=True, branch=True):
1470 1470 return False
1471 1471
1472 1472 class workingctx(committablectx):
1473 1473 """A workingctx object makes access to data related to
1474 1474 the current working directory convenient.
1475 1475 date - any valid date string or (unixtime, offset), or None.
1476 1476 user - username string, or None.
1477 1477 extra - a dictionary of extra values, or None.
1478 1478 changes - a list of file lists as returned by localrepo.status()
1479 1479 or None to use the repository status.
1480 1480 """
1481 1481 def __init__(self, repo, text="", user=None, date=None, extra=None,
1482 1482 changes=None):
1483 1483 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1484 1484
1485 1485 def __iter__(self):
1486 1486 d = self._repo.dirstate
1487 1487 for f in d:
1488 1488 if d[f] != 'r':
1489 1489 yield f
1490 1490
1491 1491 def __contains__(self, key):
1492 1492 return self._repo.dirstate[key] not in "?r"
1493 1493
1494 1494 def hex(self):
1495 1495 return hex(wdirid)
1496 1496
1497 1497 @propertycache
1498 1498 def _parents(self):
1499 1499 p = self._repo.dirstate.parents()
1500 1500 if p[1] == nullid:
1501 1501 p = p[:-1]
1502 1502 return [changectx(self._repo, x) for x in p]
1503 1503
1504 1504 def filectx(self, path, filelog=None):
1505 1505 """get a file context from the working directory"""
1506 1506 return workingfilectx(self._repo, path, workingctx=self,
1507 1507 filelog=filelog)
1508 1508
1509 1509 def dirty(self, missing=False, merge=True, branch=True):
1510 1510 "check whether a working directory is modified"
1511 1511 # check subrepos first
1512 1512 for s in sorted(self.substate):
1513 1513 if self.sub(s).dirty(missing=missing):
1514 1514 return True
1515 1515 # check current working dir
1516 1516 return ((merge and self.p2()) or
1517 1517 (branch and self.branch() != self.p1().branch()) or
1518 1518 self.modified() or self.added() or self.removed() or
1519 1519 (missing and self.deleted()))
1520 1520
1521 1521 def add(self, list, prefix=""):
1522 1522 with self._repo.wlock():
1523 1523 ui, ds = self._repo.ui, self._repo.dirstate
1524 1524 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1525 1525 rejected = []
1526 1526 lstat = self._repo.wvfs.lstat
1527 1527 for f in list:
1528 1528 # ds.pathto() returns an absolute file when this is invoked from
1529 1529 # the keyword extension. That gets flagged as non-portable on
1530 1530 # Windows, since it contains the drive letter and colon.
1531 1531 scmutil.checkportable(ui, os.path.join(prefix, f))
1532 1532 try:
1533 1533 st = lstat(f)
1534 1534 except OSError:
1535 1535 ui.warn(_("%s does not exist!\n") % uipath(f))
1536 1536 rejected.append(f)
1537 1537 continue
1538 1538 if st.st_size > 10000000:
1539 1539 ui.warn(_("%s: up to %d MB of RAM may be required "
1540 1540 "to manage this file\n"
1541 1541 "(use 'hg revert %s' to cancel the "
1542 1542 "pending addition)\n")
1543 1543 % (f, 3 * st.st_size // 1000000, uipath(f)))
1544 1544 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1545 1545 ui.warn(_("%s not added: only files and symlinks "
1546 1546 "supported currently\n") % uipath(f))
1547 1547 rejected.append(f)
1548 1548 elif ds[f] in 'amn':
1549 1549 ui.warn(_("%s already tracked!\n") % uipath(f))
1550 1550 elif ds[f] == 'r':
1551 1551 ds.normallookup(f)
1552 1552 else:
1553 1553 ds.add(f)
1554 1554 return rejected
1555 1555
1556 1556 def forget(self, files, prefix=""):
1557 1557 with self._repo.wlock():
1558 1558 ds = self._repo.dirstate
1559 1559 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1560 1560 rejected = []
1561 1561 for f in files:
1562 1562 if f not in self._repo.dirstate:
1563 1563 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1564 1564 rejected.append(f)
1565 1565 elif self._repo.dirstate[f] != 'a':
1566 1566 self._repo.dirstate.remove(f)
1567 1567 else:
1568 1568 self._repo.dirstate.drop(f)
1569 1569 return rejected
1570 1570
1571 1571 def undelete(self, list):
1572 1572 pctxs = self.parents()
1573 1573 with self._repo.wlock():
1574 1574 ds = self._repo.dirstate
1575 1575 for f in list:
1576 1576 if self._repo.dirstate[f] != 'r':
1577 1577 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1578 1578 else:
1579 1579 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1580 1580 t = fctx.data()
1581 1581 self._repo.wwrite(f, t, fctx.flags())
1582 1582 self._repo.dirstate.normal(f)
1583 1583
1584 1584 def copy(self, source, dest):
1585 1585 try:
1586 1586 st = self._repo.wvfs.lstat(dest)
1587 1587 except OSError as err:
1588 1588 if err.errno != errno.ENOENT:
1589 1589 raise
1590 1590 self._repo.ui.warn(_("%s does not exist!\n")
1591 1591 % self._repo.dirstate.pathto(dest))
1592 1592 return
1593 1593 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1594 1594 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1595 1595 "symbolic link\n")
1596 1596 % self._repo.dirstate.pathto(dest))
1597 1597 else:
1598 1598 with self._repo.wlock():
1599 1599 if self._repo.dirstate[dest] in '?':
1600 1600 self._repo.dirstate.add(dest)
1601 1601 elif self._repo.dirstate[dest] in 'r':
1602 1602 self._repo.dirstate.normallookup(dest)
1603 1603 self._repo.dirstate.copy(source, dest)
1604 1604
1605 1605 def match(self, pats=None, include=None, exclude=None, default='glob',
1606 1606 listsubrepos=False, badfn=None):
1607 1607 r = self._repo
1608 1608
1609 1609 # Only a case insensitive filesystem needs magic to translate user input
1610 1610 # to actual case in the filesystem.
1611 1611 icasefs = not util.fscasesensitive(r.root)
1612 1612 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1613 1613 default, auditor=r.auditor, ctx=self,
1614 1614 listsubrepos=listsubrepos, badfn=badfn,
1615 1615 icasefs=icasefs)
1616 1616
1617 1617 def _filtersuspectsymlink(self, files):
1618 1618 if not files or self._repo.dirstate._checklink:
1619 1619 return files
1620 1620
1621 1621 # Symlink placeholders may get non-symlink-like contents
1622 1622 # via user error or dereferencing by NFS or Samba servers,
1623 1623 # so we filter out any placeholders that don't look like a
1624 1624 # symlink
1625 1625 sane = []
1626 1626 for f in files:
1627 1627 if self.flags(f) == 'l':
1628 1628 d = self[f].data()
1629 1629 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1630 1630 self._repo.ui.debug('ignoring suspect symlink placeholder'
1631 1631 ' "%s"\n' % f)
1632 1632 continue
1633 1633 sane.append(f)
1634 1634 return sane
1635 1635
1636 1636 def _checklookup(self, files):
1637 1637 # check for any possibly clean files
1638 1638 if not files:
1639 1639 return [], [], []
1640 1640
1641 1641 modified = []
1642 1642 deleted = []
1643 1643 fixup = []
1644 1644 pctx = self._parents[0]
1645 1645 # do a full compare of any files that might have changed
1646 1646 for f in sorted(files):
1647 1647 try:
1648 1648 # This will return True for a file that got replaced by a
1649 1649 # directory in the interim, but fixing that is pretty hard.
1650 1650 if (f not in pctx or self.flags(f) != pctx.flags(f)
1651 1651 or pctx[f].cmp(self[f])):
1652 1652 modified.append(f)
1653 1653 else:
1654 1654 fixup.append(f)
1655 1655 except (IOError, OSError):
1656 1656 # A file become inaccessible in between? Mark it as deleted,
1657 1657 # matching dirstate behavior (issue5584).
1658 1658 # The dirstate has more complex behavior around whether a
1659 1659 # missing file matches a directory, etc, but we don't need to
1660 1660 # bother with that: if f has made it to this point, we're sure
1661 1661 # it's in the dirstate.
1662 1662 deleted.append(f)
1663 1663
1664 1664 return modified, deleted, fixup
1665 1665
1666 1666 def _poststatusfixup(self, status, fixup):
1667 1667 """update dirstate for files that are actually clean"""
1668 1668 poststatus = self._repo.postdsstatus()
1669 1669 if fixup or poststatus:
1670 1670 try:
1671 1671 oldid = self._repo.dirstate.identity()
1672 1672
1673 1673 # updating the dirstate is optional
1674 1674 # so we don't wait on the lock
1675 1675 # wlock can invalidate the dirstate, so cache normal _after_
1676 1676 # taking the lock
1677 1677 with self._repo.wlock(False):
1678 1678 if self._repo.dirstate.identity() == oldid:
1679 1679 if fixup:
1680 1680 normal = self._repo.dirstate.normal
1681 1681 for f in fixup:
1682 1682 normal(f)
1683 1683 # write changes out explicitly, because nesting
1684 1684 # wlock at runtime may prevent 'wlock.release()'
1685 1685 # after this block from doing so for subsequent
1686 1686 # changing files
1687 1687 tr = self._repo.currenttransaction()
1688 1688 self._repo.dirstate.write(tr)
1689 1689
1690 1690 if poststatus:
1691 1691 for ps in poststatus:
1692 1692 ps(self, status)
1693 1693 else:
1694 1694 # in this case, writing changes out breaks
1695 1695 # consistency, because .hg/dirstate was
1696 1696 # already changed simultaneously after last
1697 1697 # caching (see also issue5584 for detail)
1698 1698 self._repo.ui.debug('skip updating dirstate: '
1699 1699 'identity mismatch\n')
1700 1700 except error.LockError:
1701 1701 pass
1702 1702 finally:
1703 1703 # Even if the wlock couldn't be grabbed, clear out the list.
1704 1704 self._repo.clearpostdsstatus()
1705 1705
1706 1706 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1707 1707 unknown=False):
1708 1708 '''Gets the status from the dirstate -- internal use only.'''
1709 1709 listignored, listclean, listunknown = ignored, clean, unknown
1710 1710 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1711 1711 subrepos = []
1712 1712 if '.hgsub' in self:
1713 1713 subrepos = sorted(self.substate)
1714 1714 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1715 1715 listclean, listunknown)
1716 1716
1717 1717 # check for any possibly clean files
1718 1718 fixup = []
1719 1719 if cmp:
1720 1720 modified2, deleted2, fixup = self._checklookup(cmp)
1721 1721 s.modified.extend(modified2)
1722 1722 s.deleted.extend(deleted2)
1723 1723
1724 1724 if fixup and listclean:
1725 1725 s.clean.extend(fixup)
1726 1726
1727 1727 self._poststatusfixup(s, fixup)
1728 1728
1729 1729 if match.always():
1730 1730 # cache for performance
1731 1731 if s.unknown or s.ignored or s.clean:
1732 1732 # "_status" is cached with list*=False in the normal route
1733 1733 self._status = scmutil.status(s.modified, s.added, s.removed,
1734 1734 s.deleted, [], [], [])
1735 1735 else:
1736 1736 self._status = s
1737 1737
1738 1738 return s
1739 1739
1740 1740 @propertycache
1741 1741 def _manifest(self):
1742 1742 """generate a manifest corresponding to the values in self._status
1743 1743
1744 1744 This reuse the file nodeid from parent, but we use special node
1745 1745 identifiers for added and modified files. This is used by manifests
1746 1746 merge to see that files are different and by update logic to avoid
1747 1747 deleting newly added files.
1748 1748 """
1749 1749 return self._buildstatusmanifest(self._status)
1750 1750
1751 1751 def _buildstatusmanifest(self, status):
1752 1752 """Builds a manifest that includes the given status results."""
1753 1753 parents = self.parents()
1754 1754
1755 1755 man = parents[0].manifest().copy()
1756 1756
1757 1757 ff = self._flagfunc
1758 1758 for i, l in ((addednodeid, status.added),
1759 1759 (modifiednodeid, status.modified)):
1760 1760 for f in l:
1761 1761 man[f] = i
1762 1762 try:
1763 1763 man.setflag(f, ff(f))
1764 1764 except OSError:
1765 1765 pass
1766 1766
1767 1767 for f in status.deleted + status.removed:
1768 1768 if f in man:
1769 1769 del man[f]
1770 1770
1771 1771 return man
1772 1772
1773 1773 def _buildstatus(self, other, s, match, listignored, listclean,
1774 1774 listunknown):
1775 1775 """build a status with respect to another context
1776 1776
1777 1777 This includes logic for maintaining the fast path of status when
1778 1778 comparing the working directory against its parent, which is to skip
1779 1779 building a new manifest if self (working directory) is not comparing
1780 1780 against its parent (repo['.']).
1781 1781 """
1782 1782 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1783 1783 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1784 1784 # might have accidentally ended up with the entire contents of the file
1785 1785 # they are supposed to be linking to.
1786 1786 s.modified[:] = self._filtersuspectsymlink(s.modified)
1787 1787 if other != self._repo['.']:
1788 1788 s = super(workingctx, self)._buildstatus(other, s, match,
1789 1789 listignored, listclean,
1790 1790 listunknown)
1791 1791 return s
1792 1792
1793 1793 def _matchstatus(self, other, match):
1794 1794 """override the match method with a filter for directory patterns
1795 1795
1796 1796 We use inheritance to customize the match.bad method only in cases of
1797 1797 workingctx since it belongs only to the working directory when
1798 1798 comparing against the parent changeset.
1799 1799
1800 1800 If we aren't comparing against the working directory's parent, then we
1801 1801 just use the default match object sent to us.
1802 1802 """
1803 1803 superself = super(workingctx, self)
1804 1804 match = superself._matchstatus(other, match)
1805 1805 if other != self._repo['.']:
1806 1806 def bad(f, msg):
1807 1807 # 'f' may be a directory pattern from 'match.files()',
1808 1808 # so 'f not in ctx1' is not enough
1809 1809 if f not in other and not other.hasdir(f):
1810 1810 self._repo.ui.warn('%s: %s\n' %
1811 1811 (self._repo.dirstate.pathto(f), msg))
1812 1812 match.bad = bad
1813 1813 return match
1814 1814
1815 1815 def markcommitted(self, node):
1816 1816 super(workingctx, self).markcommitted(node)
1817 1817
1818 1818 sparse.aftercommit(self._repo, node)
1819 1819
1820 1820 class committablefilectx(basefilectx):
1821 1821 """A committablefilectx provides common functionality for a file context
1822 1822 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1823 1823 def __init__(self, repo, path, filelog=None, ctx=None):
1824 1824 self._repo = repo
1825 1825 self._path = path
1826 1826 self._changeid = None
1827 1827 self._filerev = self._filenode = None
1828 1828
1829 1829 if filelog is not None:
1830 1830 self._filelog = filelog
1831 1831 if ctx:
1832 1832 self._changectx = ctx
1833 1833
1834 1834 def __nonzero__(self):
1835 1835 return True
1836 1836
1837 1837 __bool__ = __nonzero__
1838 1838
1839 1839 def linkrev(self):
1840 1840 # linked to self._changectx no matter if file is modified or not
1841 1841 return self.rev()
1842 1842
1843 1843 def parents(self):
1844 1844 '''return parent filectxs, following copies if necessary'''
1845 1845 def filenode(ctx, path):
1846 1846 return ctx._manifest.get(path, nullid)
1847 1847
1848 1848 path = self._path
1849 1849 fl = self._filelog
1850 1850 pcl = self._changectx._parents
1851 1851 renamed = self.renamed()
1852 1852
1853 1853 if renamed:
1854 1854 pl = [renamed + (None,)]
1855 1855 else:
1856 1856 pl = [(path, filenode(pcl[0], path), fl)]
1857 1857
1858 1858 for pc in pcl[1:]:
1859 1859 pl.append((path, filenode(pc, path), fl))
1860 1860
1861 1861 return [self._parentfilectx(p, fileid=n, filelog=l)
1862 1862 for p, n, l in pl if n != nullid]
1863 1863
1864 1864 def children(self):
1865 1865 return []
1866 1866
1867 1867 class workingfilectx(committablefilectx):
1868 1868 """A workingfilectx object makes access to data related to a particular
1869 1869 file in the working directory convenient."""
1870 1870 def __init__(self, repo, path, filelog=None, workingctx=None):
1871 1871 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1872 1872
1873 1873 @propertycache
1874 1874 def _changectx(self):
1875 1875 return workingctx(self._repo)
1876 1876
1877 1877 def data(self):
1878 1878 return self._repo.wread(self._path)
1879 1879 def renamed(self):
1880 1880 rp = self._repo.dirstate.copied(self._path)
1881 1881 if not rp:
1882 1882 return None
1883 1883 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1884 1884
1885 1885 def size(self):
1886 1886 return self._repo.wvfs.lstat(self._path).st_size
1887 1887 def date(self):
1888 1888 t, tz = self._changectx.date()
1889 1889 try:
1890 1890 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1891 1891 except OSError as err:
1892 1892 if err.errno != errno.ENOENT:
1893 1893 raise
1894 1894 return (t, tz)
1895 1895
1896 1896 def exists(self):
1897 1897 return self._repo.wvfs.exists(self._path)
1898 1898
1899 1899 def lexists(self):
1900 1900 return self._repo.wvfs.lexists(self._path)
1901 1901
1902 1902 def audit(self):
1903 1903 return self._repo.wvfs.audit(self._path)
1904 1904
1905 1905 def cmp(self, fctx):
1906 1906 """compare with other file context
1907 1907
1908 1908 returns True if different than fctx.
1909 1909 """
1910 1910 # fctx should be a filectx (not a workingfilectx)
1911 1911 # invert comparison to reuse the same code path
1912 1912 return fctx.cmp(self)
1913 1913
1914 1914 def remove(self, ignoremissing=False):
1915 1915 """wraps unlink for a repo's working directory"""
1916 1916 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1917 1917
1918 1918 def write(self, data, flags, backgroundclose=False):
1919 1919 """wraps repo.wwrite"""
1920 1920 self._repo.wwrite(self._path, data, flags,
1921 1921 backgroundclose=backgroundclose)
1922 1922
1923 1923 def setflags(self, l, x):
1924 1924 self._repo.wvfs.setflags(self._path, l, x)
1925 1925
1926 1926 class workingcommitctx(workingctx):
1927 1927 """A workingcommitctx object makes access to data related to
1928 1928 the revision being committed convenient.
1929 1929
1930 1930 This hides changes in the working directory, if they aren't
1931 1931 committed in this context.
1932 1932 """
1933 1933 def __init__(self, repo, changes,
1934 1934 text="", user=None, date=None, extra=None):
1935 1935 super(workingctx, self).__init__(repo, text, user, date, extra,
1936 1936 changes)
1937 1937
1938 1938 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1939 1939 unknown=False):
1940 1940 """Return matched files only in ``self._status``
1941 1941
1942 1942 Uncommitted files appear "clean" via this context, even if
1943 1943 they aren't actually so in the working directory.
1944 1944 """
1945 1945 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1946 1946 if clean:
1947 1947 clean = [f for f in self._manifest if f not in self._changedset]
1948 1948 else:
1949 1949 clean = []
1950 1950 return scmutil.status([f for f in self._status.modified if match(f)],
1951 1951 [f for f in self._status.added if match(f)],
1952 1952 [f for f in self._status.removed if match(f)],
1953 1953 [], [], [], clean)
1954 1954
1955 1955 @propertycache
1956 1956 def _changedset(self):
1957 1957 """Return the set of files changed in this context
1958 1958 """
1959 1959 changed = set(self._status.modified)
1960 1960 changed.update(self._status.added)
1961 1961 changed.update(self._status.removed)
1962 1962 return changed
1963 1963
1964 1964 def makecachingfilectxfn(func):
1965 1965 """Create a filectxfn that caches based on the path.
1966 1966
1967 1967 We can't use util.cachefunc because it uses all arguments as the cache
1968 1968 key and this creates a cycle since the arguments include the repo and
1969 1969 memctx.
1970 1970 """
1971 1971 cache = {}
1972 1972
1973 1973 def getfilectx(repo, memctx, path):
1974 1974 if path not in cache:
1975 1975 cache[path] = func(repo, memctx, path)
1976 1976 return cache[path]
1977 1977
1978 1978 return getfilectx
1979 1979
1980 1980 def memfilefromctx(ctx):
1981 1981 """Given a context return a memfilectx for ctx[path]
1982 1982
1983 1983 This is a convenience method for building a memctx based on another
1984 1984 context.
1985 1985 """
1986 1986 def getfilectx(repo, memctx, path):
1987 1987 fctx = ctx[path]
1988 1988 # this is weird but apparently we only keep track of one parent
1989 1989 # (why not only store that instead of a tuple?)
1990 1990 copied = fctx.renamed()
1991 1991 if copied:
1992 1992 copied = copied[0]
1993 1993 return memfilectx(repo, path, fctx.data(),
1994 1994 islink=fctx.islink(), isexec=fctx.isexec(),
1995 1995 copied=copied, memctx=memctx)
1996 1996
1997 1997 return getfilectx
1998 1998
1999 1999 def memfilefrompatch(patchstore):
2000 2000 """Given a patch (e.g. patchstore object) return a memfilectx
2001 2001
2002 2002 This is a convenience method for building a memctx based on a patchstore.
2003 2003 """
2004 2004 def getfilectx(repo, memctx, path):
2005 2005 data, mode, copied = patchstore.getfile(path)
2006 2006 if data is None:
2007 2007 return None
2008 2008 islink, isexec = mode
2009 2009 return memfilectx(repo, path, data, islink=islink,
2010 2010 isexec=isexec, copied=copied,
2011 2011 memctx=memctx)
2012 2012
2013 2013 return getfilectx
2014 2014
2015 2015 class memctx(committablectx):
2016 2016 """Use memctx to perform in-memory commits via localrepo.commitctx().
2017 2017
2018 2018 Revision information is supplied at initialization time while
2019 2019 related files data and is made available through a callback
2020 2020 mechanism. 'repo' is the current localrepo, 'parents' is a
2021 2021 sequence of two parent revisions identifiers (pass None for every
2022 2022 missing parent), 'text' is the commit message and 'files' lists
2023 2023 names of files touched by the revision (normalized and relative to
2024 2024 repository root).
2025 2025
2026 2026 filectxfn(repo, memctx, path) is a callable receiving the
2027 2027 repository, the current memctx object and the normalized path of
2028 2028 requested file, relative to repository root. It is fired by the
2029 2029 commit function for every file in 'files', but calls order is
2030 2030 undefined. If the file is available in the revision being
2031 2031 committed (updated or added), filectxfn returns a memfilectx
2032 2032 object. If the file was removed, filectxfn return None for recent
2033 2033 Mercurial. Moved files are represented by marking the source file
2034 2034 removed and the new file added with copy information (see
2035 2035 memfilectx).
2036 2036
2037 2037 user receives the committer name and defaults to current
2038 2038 repository username, date is the commit date in any format
2039 2039 supported by util.parsedate() and defaults to current date, extra
2040 2040 is a dictionary of metadata or is left empty.
2041 2041 """
2042 2042
2043 2043 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2044 2044 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2045 2045 # this field to determine what to do in filectxfn.
2046 2046 _returnnoneformissingfiles = True
2047 2047
2048 2048 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2049 2049 date=None, extra=None, branch=None, editor=False):
2050 2050 super(memctx, self).__init__(repo, text, user, date, extra)
2051 2051 self._rev = None
2052 2052 self._node = None
2053 2053 parents = [(p or nullid) for p in parents]
2054 2054 p1, p2 = parents
2055 2055 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2056 2056 files = sorted(set(files))
2057 2057 self._files = files
2058 2058 if branch is not None:
2059 2059 self._extra['branch'] = encoding.fromlocal(branch)
2060 2060 self.substate = {}
2061 2061
2062 2062 if isinstance(filectxfn, patch.filestore):
2063 2063 filectxfn = memfilefrompatch(filectxfn)
2064 2064 elif not callable(filectxfn):
2065 2065 # if store is not callable, wrap it in a function
2066 2066 filectxfn = memfilefromctx(filectxfn)
2067 2067
2068 2068 # memoizing increases performance for e.g. vcs convert scenarios.
2069 2069 self._filectxfn = makecachingfilectxfn(filectxfn)
2070 2070
2071 2071 if editor:
2072 2072 self._text = editor(self._repo, self, [])
2073 2073 self._repo.savecommitmessage(self._text)
2074 2074
2075 2075 def filectx(self, path, filelog=None):
2076 2076 """get a file context from the working directory
2077 2077
2078 2078 Returns None if file doesn't exist and should be removed."""
2079 2079 return self._filectxfn(self._repo, self, path)
2080 2080
2081 2081 def commit(self):
2082 2082 """commit context to the repo"""
2083 2083 return self._repo.commitctx(self)
2084 2084
2085 2085 @propertycache
2086 2086 def _manifest(self):
2087 2087 """generate a manifest based on the return values of filectxfn"""
2088 2088
2089 2089 # keep this simple for now; just worry about p1
2090 2090 pctx = self._parents[0]
2091 2091 man = pctx.manifest().copy()
2092 2092
2093 2093 for f in self._status.modified:
2094 2094 p1node = nullid
2095 2095 p2node = nullid
2096 2096 p = pctx[f].parents() # if file isn't in pctx, check p2?
2097 2097 if len(p) > 0:
2098 2098 p1node = p[0].filenode()
2099 2099 if len(p) > 1:
2100 2100 p2node = p[1].filenode()
2101 2101 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2102 2102
2103 2103 for f in self._status.added:
2104 2104 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2105 2105
2106 2106 for f in self._status.removed:
2107 2107 if f in man:
2108 2108 del man[f]
2109 2109
2110 2110 return man
2111 2111
2112 2112 @propertycache
2113 2113 def _status(self):
2114 2114 """Calculate exact status from ``files`` specified at construction
2115 2115 """
2116 2116 man1 = self.p1().manifest()
2117 2117 p2 = self._parents[1]
2118 2118 # "1 < len(self._parents)" can't be used for checking
2119 2119 # existence of the 2nd parent, because "memctx._parents" is
2120 2120 # explicitly initialized by the list, of which length is 2.
2121 2121 if p2.node() != nullid:
2122 2122 man2 = p2.manifest()
2123 2123 managing = lambda f: f in man1 or f in man2
2124 2124 else:
2125 2125 managing = lambda f: f in man1
2126 2126
2127 2127 modified, added, removed = [], [], []
2128 2128 for f in self._files:
2129 2129 if not managing(f):
2130 2130 added.append(f)
2131 2131 elif self[f]:
2132 2132 modified.append(f)
2133 2133 else:
2134 2134 removed.append(f)
2135 2135
2136 2136 return scmutil.status(modified, added, removed, [], [], [], [])
2137 2137
2138 2138 class memfilectx(committablefilectx):
2139 2139 """memfilectx represents an in-memory file to commit.
2140 2140
2141 2141 See memctx and committablefilectx for more details.
2142 2142 """
2143 2143 def __init__(self, repo, path, data, islink=False,
2144 2144 isexec=False, copied=None, memctx=None):
2145 2145 """
2146 2146 path is the normalized file path relative to repository root.
2147 2147 data is the file content as a string.
2148 2148 islink is True if the file is a symbolic link.
2149 2149 isexec is True if the file is executable.
2150 2150 copied is the source file path if current file was copied in the
2151 2151 revision being committed, or None."""
2152 2152 super(memfilectx, self).__init__(repo, path, None, memctx)
2153 2153 self._data = data
2154 2154 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2155 2155 self._copied = None
2156 2156 if copied:
2157 2157 self._copied = (copied, nullid)
2158 2158
2159 2159 def data(self):
2160 2160 return self._data
2161 2161
2162 2162 def remove(self, ignoremissing=False):
2163 2163 """wraps unlink for a repo's working directory"""
2164 2164 # need to figure out what to do here
2165 2165 del self._changectx[self._path]
2166 2166
2167 2167 def write(self, data, flags):
2168 2168 """wraps repo.wwrite"""
2169 2169 self._data = data
2170 2170
2171 2171 class overlayfilectx(committablefilectx):
2172 2172 """Like memfilectx but take an original filectx and optional parameters to
2173 2173 override parts of it. This is useful when fctx.data() is expensive (i.e.
2174 2174 flag processor is expensive) and raw data, flags, and filenode could be
2175 2175 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2176 2176 """
2177 2177
2178 2178 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2179 2179 copied=None, ctx=None):
2180 2180 """originalfctx: filecontext to duplicate
2181 2181
2182 2182 datafunc: None or a function to override data (file content). It is a
2183 2183 function to be lazy. path, flags, copied, ctx: None or overridden value
2184 2184
2185 2185 copied could be (path, rev), or False. copied could also be just path,
2186 2186 and will be converted to (path, nullid). This simplifies some callers.
2187 2187 """
2188 2188
2189 2189 if path is None:
2190 2190 path = originalfctx.path()
2191 2191 if ctx is None:
2192 2192 ctx = originalfctx.changectx()
2193 2193 ctxmatch = lambda: True
2194 2194 else:
2195 2195 ctxmatch = lambda: ctx == originalfctx.changectx()
2196 2196
2197 2197 repo = originalfctx.repo()
2198 2198 flog = originalfctx.filelog()
2199 2199 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2200 2200
2201 2201 if copied is None:
2202 2202 copied = originalfctx.renamed()
2203 2203 copiedmatch = lambda: True
2204 2204 else:
2205 2205 if copied and not isinstance(copied, tuple):
2206 2206 # repo._filecommit will recalculate copyrev so nullid is okay
2207 2207 copied = (copied, nullid)
2208 2208 copiedmatch = lambda: copied == originalfctx.renamed()
2209 2209
2210 2210 # When data, copied (could affect data), ctx (could affect filelog
2211 2211 # parents) are not overridden, rawdata, rawflags, and filenode may be
2212 2212 # reused (repo._filecommit should double check filelog parents).
2213 2213 #
2214 2214 # path, flags are not hashed in filelog (but in manifestlog) so they do
2215 2215 # not affect reusable here.
2216 2216 #
2217 2217 # If ctx or copied is overridden to a same value with originalfctx,
2218 2218 # still consider it's reusable. originalfctx.renamed() may be a bit
2219 2219 # expensive so it's not called unless necessary. Assuming datafunc is
2220 2220 # always expensive, do not call it for this "reusable" test.
2221 2221 reusable = datafunc is None and ctxmatch() and copiedmatch()
2222 2222
2223 2223 if datafunc is None:
2224 2224 datafunc = originalfctx.data
2225 2225 if flags is None:
2226 2226 flags = originalfctx.flags()
2227 2227
2228 2228 self._datafunc = datafunc
2229 2229 self._flags = flags
2230 2230 self._copied = copied
2231 2231
2232 2232 if reusable:
2233 2233 # copy extra fields from originalfctx
2234 2234 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2235 2235 for attr in attrs:
2236 2236 if util.safehasattr(originalfctx, attr):
2237 2237 setattr(self, attr, getattr(originalfctx, attr))
2238 2238
2239 2239 def data(self):
2240 2240 return self._datafunc()
2241 2241
2242 2242 class metadataonlyctx(committablectx):
2243 2243 """Like memctx but it's reusing the manifest of different commit.
2244 2244 Intended to be used by lightweight operations that are creating
2245 2245 metadata-only changes.
2246 2246
2247 2247 Revision information is supplied at initialization time. 'repo' is the
2248 2248 current localrepo, 'ctx' is original revision which manifest we're reuisng
2249 2249 'parents' is a sequence of two parent revisions identifiers (pass None for
2250 2250 every missing parent), 'text' is the commit.
2251 2251
2252 2252 user receives the committer name and defaults to current repository
2253 2253 username, date is the commit date in any format supported by
2254 2254 util.parsedate() and defaults to current date, extra is a dictionary of
2255 2255 metadata or is left empty.
2256 2256 """
2257 2257 def __new__(cls, repo, originalctx, *args, **kwargs):
2258 2258 return super(metadataonlyctx, cls).__new__(cls, repo)
2259 2259
2260 2260 def __init__(self, repo, originalctx, parents, text, user=None, date=None,
2261 2261 extra=None, editor=False):
2262 2262 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2263 2263 self._rev = None
2264 2264 self._node = None
2265 2265 self._originalctx = originalctx
2266 2266 self._manifestnode = originalctx.manifestnode()
2267 2267 parents = [(p or nullid) for p in parents]
2268 2268 p1, p2 = self._parents = [changectx(self._repo, p) for p in parents]
2269 2269
2270 2270 # sanity check to ensure that the reused manifest parents are
2271 2271 # manifests of our commit parents
2272 2272 mp1, mp2 = self.manifestctx().parents
2273 2273 if p1 != nullid and p1.manifestnode() != mp1:
2274 2274 raise RuntimeError('can\'t reuse the manifest: '
2275 2275 'its p1 doesn\'t match the new ctx p1')
2276 2276 if p2 != nullid and p2.manifestnode() != mp2:
2277 2277 raise RuntimeError('can\'t reuse the manifest: '
2278 2278 'its p2 doesn\'t match the new ctx p2')
2279 2279
2280 2280 self._files = originalctx.files()
2281 2281 self.substate = {}
2282 2282
2283 2283 if editor:
2284 2284 self._text = editor(self._repo, self, [])
2285 2285 self._repo.savecommitmessage(self._text)
2286 2286
2287 2287 def manifestnode(self):
2288 2288 return self._manifestnode
2289 2289
2290 2290 @property
2291 2291 def _manifestctx(self):
2292 2292 return self._repo.manifestlog[self._manifestnode]
2293 2293
2294 2294 def filectx(self, path, filelog=None):
2295 2295 return self._originalctx.filectx(path, filelog=filelog)
2296 2296
2297 2297 def commit(self):
2298 2298 """commit context to the repo"""
2299 2299 return self._repo.commitctx(self)
2300 2300
2301 2301 @property
2302 2302 def _manifest(self):
2303 2303 return self._originalctx.manifest()
2304 2304
2305 2305 @propertycache
2306 2306 def _status(self):
2307 2307 """Calculate exact status from ``files`` specified in the ``origctx``
2308 2308 and parents manifests.
2309 2309 """
2310 2310 man1 = self.p1().manifest()
2311 2311 p2 = self._parents[1]
2312 2312 # "1 < len(self._parents)" can't be used for checking
2313 2313 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2314 2314 # explicitly initialized by the list, of which length is 2.
2315 2315 if p2.node() != nullid:
2316 2316 man2 = p2.manifest()
2317 2317 managing = lambda f: f in man1 or f in man2
2318 2318 else:
2319 2319 managing = lambda f: f in man1
2320 2320
2321 2321 modified, added, removed = [], [], []
2322 2322 for f in self._files:
2323 2323 if not managing(f):
2324 2324 added.append(f)
2325 2325 elif self[f]:
2326 2326 modified.append(f)
2327 2327 else:
2328 2328 removed.append(f)
2329 2329
2330 2330 return scmutil.status(modified, added, removed, [], [], [], [])
@@ -1,2012 +1,2013 b''
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import errno
11 11 import hashlib
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 nullid,
17 17 )
18 18 from . import (
19 19 bookmarks as bookmod,
20 20 bundle2,
21 21 changegroup,
22 22 discovery,
23 23 error,
24 24 lock as lockmod,
25 25 obsolete,
26 26 phases,
27 27 pushkey,
28 28 pycompat,
29 29 scmutil,
30 30 sslutil,
31 31 streamclone,
32 32 url as urlmod,
33 33 util,
34 34 )
35 35
36 36 urlerr = util.urlerr
37 37 urlreq = util.urlreq
38 38
39 39 # Maps bundle version human names to changegroup versions.
40 40 _bundlespeccgversions = {'v1': '01',
41 41 'v2': '02',
42 42 'packed1': 's1',
43 43 'bundle2': '02', #legacy
44 44 }
45 45
46 46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48 48
49 49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 50 """Parse a bundle string specification into parts.
51 51
52 52 Bundle specifications denote a well-defined bundle/exchange format.
53 53 The content of a given specification should not change over time in
54 54 order to ensure that bundles produced by a newer version of Mercurial are
55 55 readable from an older version.
56 56
57 57 The string currently has the form:
58 58
59 59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60 60
61 61 Where <compression> is one of the supported compression formats
62 62 and <type> is (currently) a version string. A ";" can follow the type and
63 63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 64 pairs.
65 65
66 66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 67 it is optional.
68 68
69 69 If ``externalnames`` is False (the default), the human-centric names will
70 70 be converted to their internal representation.
71 71
72 72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 73 be ``None`` if not in strict mode and a compression isn't defined.
74 74
75 75 An ``InvalidBundleSpecification`` is raised when the specification is
76 76 not syntactically well formed.
77 77
78 78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 79 bundle type/version is not recognized.
80 80
81 81 Note: this function will likely eventually return a more complex data
82 82 structure, including bundle2 part information.
83 83 """
84 84 def parseparams(s):
85 85 if ';' not in s:
86 86 return s, {}
87 87
88 88 params = {}
89 89 version, paramstr = s.split(';', 1)
90 90
91 91 for p in paramstr.split(';'):
92 92 if '=' not in p:
93 93 raise error.InvalidBundleSpecification(
94 94 _('invalid bundle specification: '
95 95 'missing "=" in parameter: %s') % p)
96 96
97 97 key, value = p.split('=', 1)
98 98 key = urlreq.unquote(key)
99 99 value = urlreq.unquote(value)
100 100 params[key] = value
101 101
102 102 return version, params
103 103
104 104
105 105 if strict and '-' not in spec:
106 106 raise error.InvalidBundleSpecification(
107 107 _('invalid bundle specification; '
108 108 'must be prefixed with compression: %s') % spec)
109 109
110 110 if '-' in spec:
111 111 compression, version = spec.split('-', 1)
112 112
113 113 if compression not in util.compengines.supportedbundlenames:
114 114 raise error.UnsupportedBundleSpecification(
115 115 _('%s compression is not supported') % compression)
116 116
117 117 version, params = parseparams(version)
118 118
119 119 if version not in _bundlespeccgversions:
120 120 raise error.UnsupportedBundleSpecification(
121 121 _('%s is not a recognized bundle version') % version)
122 122 else:
123 123 # Value could be just the compression or just the version, in which
124 124 # case some defaults are assumed (but only when not in strict mode).
125 125 assert not strict
126 126
127 127 spec, params = parseparams(spec)
128 128
129 129 if spec in util.compengines.supportedbundlenames:
130 130 compression = spec
131 131 version = 'v1'
132 132 # Generaldelta repos require v2.
133 133 if 'generaldelta' in repo.requirements:
134 134 version = 'v2'
135 135 # Modern compression engines require v2.
136 136 if compression not in _bundlespecv1compengines:
137 137 version = 'v2'
138 138 elif spec in _bundlespeccgversions:
139 139 if spec == 'packed1':
140 140 compression = 'none'
141 141 else:
142 142 compression = 'bzip2'
143 143 version = spec
144 144 else:
145 145 raise error.UnsupportedBundleSpecification(
146 146 _('%s is not a recognized bundle specification') % spec)
147 147
148 148 # Bundle version 1 only supports a known set of compression engines.
149 149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 150 raise error.UnsupportedBundleSpecification(
151 151 _('compression engine %s is not supported on v1 bundles') %
152 152 compression)
153 153
154 154 # The specification for packed1 can optionally declare the data formats
155 155 # required to apply it. If we see this metadata, compare against what the
156 156 # repo supports and error if the bundle isn't compatible.
157 157 if version == 'packed1' and 'requirements' in params:
158 158 requirements = set(params['requirements'].split(','))
159 159 missingreqs = requirements - repo.supportedformats
160 160 if missingreqs:
161 161 raise error.UnsupportedBundleSpecification(
162 162 _('missing support for repository features: %s') %
163 163 ', '.join(sorted(missingreqs)))
164 164
165 165 if not externalnames:
166 166 engine = util.compengines.forbundlename(compression)
167 167 compression = engine.bundletype()[1]
168 168 version = _bundlespeccgversions[version]
169 169 return compression, version, params
170 170
171 171 def readbundle(ui, fh, fname, vfs=None):
172 172 header = changegroup.readexactly(fh, 4)
173 173
174 174 alg = None
175 175 if not fname:
176 176 fname = "stream"
177 177 if not header.startswith('HG') and header.startswith('\0'):
178 178 fh = changegroup.headerlessfixup(fh, header)
179 179 header = "HG10"
180 180 alg = 'UN'
181 181 elif vfs:
182 182 fname = vfs.join(fname)
183 183
184 184 magic, version = header[0:2], header[2:4]
185 185
186 186 if magic != 'HG':
187 187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 188 if version == '10':
189 189 if alg is None:
190 190 alg = changegroup.readexactly(fh, 2)
191 191 return changegroup.cg1unpacker(fh, alg)
192 192 elif version.startswith('2'):
193 193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 194 elif version == 'S1':
195 195 return streamclone.streamcloneapplier(fh)
196 196 else:
197 197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198 198
199 199 def getbundlespec(ui, fh):
200 200 """Infer the bundlespec from a bundle file handle.
201 201
202 202 The input file handle is seeked and the original seek position is not
203 203 restored.
204 204 """
205 205 def speccompression(alg):
206 206 try:
207 207 return util.compengines.forbundletype(alg).bundletype()[0]
208 208 except KeyError:
209 209 return None
210 210
211 211 b = readbundle(ui, fh, None)
212 212 if isinstance(b, changegroup.cg1unpacker):
213 213 alg = b._type
214 214 if alg == '_truncatedBZ':
215 215 alg = 'BZ'
216 216 comp = speccompression(alg)
217 217 if not comp:
218 218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 219 return '%s-v1' % comp
220 220 elif isinstance(b, bundle2.unbundle20):
221 221 if 'Compression' in b.params:
222 222 comp = speccompression(b.params['Compression'])
223 223 if not comp:
224 224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 225 else:
226 226 comp = 'none'
227 227
228 228 version = None
229 229 for part in b.iterparts():
230 230 if part.type == 'changegroup':
231 231 version = part.params['version']
232 232 if version in ('01', '02'):
233 233 version = 'v2'
234 234 else:
235 235 raise error.Abort(_('changegroup version %s does not have '
236 236 'a known bundlespec') % version,
237 237 hint=_('try upgrading your Mercurial '
238 238 'client'))
239 239
240 240 if not version:
241 241 raise error.Abort(_('could not identify changegroup version in '
242 242 'bundle'))
243 243
244 244 return '%s-%s' % (comp, version)
245 245 elif isinstance(b, streamclone.streamcloneapplier):
246 246 requirements = streamclone.readbundle1header(fh)[2]
247 247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 248 return 'none-packed1;%s' % urlreq.quote(params)
249 249 else:
250 250 raise error.Abort(_('unknown bundle type: %s') % b)
251 251
252 252 def _computeoutgoing(repo, heads, common):
253 253 """Computes which revs are outgoing given a set of common
254 254 and a set of heads.
255 255
256 256 This is a separate function so extensions can have access to
257 257 the logic.
258 258
259 259 Returns a discovery.outgoing object.
260 260 """
261 261 cl = repo.changelog
262 262 if common:
263 263 hasnode = cl.hasnode
264 264 common = [n for n in common if hasnode(n)]
265 265 else:
266 266 common = [nullid]
267 267 if not heads:
268 268 heads = cl.heads()
269 269 return discovery.outgoing(repo, common, heads)
270 270
271 271 def _forcebundle1(op):
272 272 """return true if a pull/push must use bundle1
273 273
274 274 This function is used to allow testing of the older bundle version"""
275 275 ui = op.repo.ui
276 276 forcebundle1 = False
277 277 # The goal is this config is to allow developer to choose the bundle
278 278 # version used during exchanged. This is especially handy during test.
279 279 # Value is a list of bundle version to be picked from, highest version
280 280 # should be used.
281 281 #
282 282 # developer config: devel.legacy.exchange
283 283 exchange = ui.configlist('devel', 'legacy.exchange')
284 284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 285 return forcebundle1 or not op.remote.capable('bundle2')
286 286
287 287 class pushoperation(object):
288 288 """A object that represent a single push operation
289 289
290 290 Its purpose is to carry push related state and very common operations.
291 291
292 292 A new pushoperation should be created at the beginning of each push and
293 293 discarded afterward.
294 294 """
295 295
296 296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 297 bookmarks=()):
298 298 # repo we push from
299 299 self.repo = repo
300 300 self.ui = repo.ui
301 301 # repo we push to
302 302 self.remote = remote
303 303 # force option provided
304 304 self.force = force
305 305 # revs to be pushed (None is "all")
306 306 self.revs = revs
307 307 # bookmark explicitly pushed
308 308 self.bookmarks = bookmarks
309 309 # allow push of new branch
310 310 self.newbranch = newbranch
311 311 # did a local lock get acquired?
312 312 self.locallocked = None
313 313 # step already performed
314 314 # (used to check what steps have been already performed through bundle2)
315 315 self.stepsdone = set()
316 316 # Integer version of the changegroup push result
317 317 # - None means nothing to push
318 318 # - 0 means HTTP error
319 319 # - 1 means we pushed and remote head count is unchanged *or*
320 320 # we have outgoing changesets but refused to push
321 321 # - other values as described by addchangegroup()
322 322 self.cgresult = None
323 323 # Boolean value for the bookmark push
324 324 self.bkresult = None
325 325 # discover.outgoing object (contains common and outgoing data)
326 326 self.outgoing = None
327 327 # all remote topological heads before the push
328 328 self.remoteheads = None
329 329 # Details of the remote branch pre and post push
330 330 #
331 331 # mapping: {'branch': ([remoteheads],
332 332 # [newheads],
333 333 # [unsyncedheads],
334 334 # [discardedheads])}
335 335 # - branch: the branch name
336 336 # - remoteheads: the list of remote heads known locally
337 337 # None if the branch is new
338 338 # - newheads: the new remote heads (known locally) with outgoing pushed
339 339 # - unsyncedheads: the list of remote heads unknown locally.
340 340 # - discardedheads: the list of remote heads made obsolete by the push
341 341 self.pushbranchmap = None
342 342 # testable as a boolean indicating if any nodes are missing locally.
343 343 self.incoming = None
344 344 # phases changes that must be pushed along side the changesets
345 345 self.outdatedphases = None
346 346 # phases changes that must be pushed if changeset push fails
347 347 self.fallbackoutdatedphases = None
348 348 # outgoing obsmarkers
349 349 self.outobsmarkers = set()
350 350 # outgoing bookmarks
351 351 self.outbookmarks = []
352 352 # transaction manager
353 353 self.trmanager = None
354 354 # map { pushkey partid -> callback handling failure}
355 355 # used to handle exception from mandatory pushkey part failure
356 356 self.pkfailcb = {}
357 357
358 358 @util.propertycache
359 359 def futureheads(self):
360 360 """future remote heads if the changeset push succeeds"""
361 361 return self.outgoing.missingheads
362 362
363 363 @util.propertycache
364 364 def fallbackheads(self):
365 365 """future remote heads if the changeset push fails"""
366 366 if self.revs is None:
367 367 # not target to push, all common are relevant
368 368 return self.outgoing.commonheads
369 369 unfi = self.repo.unfiltered()
370 370 # I want cheads = heads(::missingheads and ::commonheads)
371 371 # (missingheads is revs with secret changeset filtered out)
372 372 #
373 373 # This can be expressed as:
374 374 # cheads = ( (missingheads and ::commonheads)
375 375 # + (commonheads and ::missingheads))"
376 376 # )
377 377 #
378 378 # while trying to push we already computed the following:
379 379 # common = (::commonheads)
380 380 # missing = ((commonheads::missingheads) - commonheads)
381 381 #
382 382 # We can pick:
383 383 # * missingheads part of common (::commonheads)
384 384 common = self.outgoing.common
385 385 nm = self.repo.changelog.nodemap
386 386 cheads = [node for node in self.revs if nm[node] in common]
387 387 # and
388 388 # * commonheads parents on missing
389 389 revset = unfi.set('%ln and parents(roots(%ln))',
390 390 self.outgoing.commonheads,
391 391 self.outgoing.missing)
392 392 cheads.extend(c.node() for c in revset)
393 393 return cheads
394 394
395 395 @property
396 396 def commonheads(self):
397 397 """set of all common heads after changeset bundle push"""
398 398 if self.cgresult:
399 399 return self.futureheads
400 400 else:
401 401 return self.fallbackheads
402 402
403 403 # mapping of message used when pushing bookmark
404 404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 405 _('updating bookmark %s failed!\n')),
406 406 'export': (_("exporting bookmark %s\n"),
407 407 _('exporting bookmark %s failed!\n')),
408 408 'delete': (_("deleting remote bookmark %s\n"),
409 409 _('deleting remote bookmark %s failed!\n')),
410 410 }
411 411
412 412
413 413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 414 opargs=None):
415 415 '''Push outgoing changesets (limited by revs) from a local
416 416 repository to remote. Return an integer:
417 417 - None means nothing to push
418 418 - 0 means HTTP error
419 419 - 1 means we pushed and remote head count is unchanged *or*
420 420 we have outgoing changesets but refused to push
421 421 - other values as described by addchangegroup()
422 422 '''
423 423 if opargs is None:
424 424 opargs = {}
425 425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 426 **opargs)
427 427 if pushop.remote.local():
428 428 missing = (set(pushop.repo.requirements)
429 429 - pushop.remote.local().supported)
430 430 if missing:
431 431 msg = _("required features are not"
432 432 " supported in the destination:"
433 433 " %s") % (', '.join(sorted(missing)))
434 434 raise error.Abort(msg)
435 435
436 436 # there are two ways to push to remote repo:
437 437 #
438 438 # addchangegroup assumes local user can lock remote
439 439 # repo (local filesystem, old ssh servers).
440 440 #
441 441 # unbundle assumes local user cannot lock remote repo (new ssh
442 442 # servers, http servers).
443 443
444 444 if not pushop.remote.canpush():
445 445 raise error.Abort(_("destination does not support push"))
446 446 # get local lock as we might write phase data
447 447 localwlock = locallock = None
448 448 try:
449 449 # bundle2 push may receive a reply bundle touching bookmarks or other
450 450 # things requiring the wlock. Take it now to ensure proper ordering.
451 451 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
452 452 if (not _forcebundle1(pushop)) and maypushback:
453 453 localwlock = pushop.repo.wlock()
454 454 locallock = pushop.repo.lock()
455 455 pushop.locallocked = True
456 456 except IOError as err:
457 457 pushop.locallocked = False
458 458 if err.errno != errno.EACCES:
459 459 raise
460 460 # source repo cannot be locked.
461 461 # We do not abort the push, but just disable the local phase
462 462 # synchronisation.
463 463 msg = 'cannot lock source repository: %s\n' % err
464 464 pushop.ui.debug(msg)
465 465 try:
466 466 if pushop.locallocked:
467 467 pushop.trmanager = transactionmanager(pushop.repo,
468 468 'push-response',
469 469 pushop.remote.url())
470 470 pushop.repo.checkpush(pushop)
471 471 lock = None
472 472 unbundle = pushop.remote.capable('unbundle')
473 473 if not unbundle:
474 474 lock = pushop.remote.lock()
475 475 try:
476 476 _pushdiscovery(pushop)
477 477 if not _forcebundle1(pushop):
478 478 _pushbundle2(pushop)
479 479 _pushchangeset(pushop)
480 480 _pushsyncphase(pushop)
481 481 _pushobsolete(pushop)
482 482 _pushbookmark(pushop)
483 483 finally:
484 484 if lock is not None:
485 485 lock.release()
486 486 if pushop.trmanager:
487 487 pushop.trmanager.close()
488 488 finally:
489 489 if pushop.trmanager:
490 490 pushop.trmanager.release()
491 491 if locallock is not None:
492 492 locallock.release()
493 493 if localwlock is not None:
494 494 localwlock.release()
495 495
496 496 return pushop
497 497
498 498 # list of steps to perform discovery before push
499 499 pushdiscoveryorder = []
500 500
501 501 # Mapping between step name and function
502 502 #
503 503 # This exists to help extensions wrap steps if necessary
504 504 pushdiscoverymapping = {}
505 505
506 506 def pushdiscovery(stepname):
507 507 """decorator for function performing discovery before push
508 508
509 509 The function is added to the step -> function mapping and appended to the
510 510 list of steps. Beware that decorated function will be added in order (this
511 511 may matter).
512 512
513 513 You can only use this decorator for a new step, if you want to wrap a step
514 514 from an extension, change the pushdiscovery dictionary directly."""
515 515 def dec(func):
516 516 assert stepname not in pushdiscoverymapping
517 517 pushdiscoverymapping[stepname] = func
518 518 pushdiscoveryorder.append(stepname)
519 519 return func
520 520 return dec
521 521
522 522 def _pushdiscovery(pushop):
523 523 """Run all discovery steps"""
524 524 for stepname in pushdiscoveryorder:
525 525 step = pushdiscoverymapping[stepname]
526 526 step(pushop)
527 527
528 528 @pushdiscovery('changeset')
529 529 def _pushdiscoverychangeset(pushop):
530 530 """discover the changeset that need to be pushed"""
531 531 fci = discovery.findcommonincoming
532 532 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
533 533 common, inc, remoteheads = commoninc
534 534 fco = discovery.findcommonoutgoing
535 535 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
536 536 commoninc=commoninc, force=pushop.force)
537 537 pushop.outgoing = outgoing
538 538 pushop.remoteheads = remoteheads
539 539 pushop.incoming = inc
540 540
541 541 @pushdiscovery('phase')
542 542 def _pushdiscoveryphase(pushop):
543 543 """discover the phase that needs to be pushed
544 544
545 545 (computed for both success and failure case for changesets push)"""
546 546 outgoing = pushop.outgoing
547 547 unfi = pushop.repo.unfiltered()
548 548 remotephases = pushop.remote.listkeys('phases')
549 549 publishing = remotephases.get('publishing', False)
550 550 if (pushop.ui.configbool('ui', '_usedassubrepo')
551 551 and remotephases # server supports phases
552 552 and not pushop.outgoing.missing # no changesets to be pushed
553 553 and publishing):
554 554 # When:
555 555 # - this is a subrepo push
556 556 # - and remote support phase
557 557 # - and no changeset are to be pushed
558 558 # - and remote is publishing
559 559 # We may be in issue 3871 case!
560 560 # We drop the possible phase synchronisation done by
561 561 # courtesy to publish changesets possibly locally draft
562 562 # on the remote.
563 563 remotephases = {'publishing': 'True'}
564 564 ana = phases.analyzeremotephases(pushop.repo,
565 565 pushop.fallbackheads,
566 566 remotephases)
567 567 pheads, droots = ana
568 568 extracond = ''
569 569 if not publishing:
570 570 extracond = ' and public()'
571 571 revset = 'heads((%%ln::%%ln) %s)' % extracond
572 572 # Get the list of all revs draft on remote by public here.
573 573 # XXX Beware that revset break if droots is not strictly
574 574 # XXX root we may want to ensure it is but it is costly
575 575 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
576 576 if not outgoing.missing:
577 577 future = fallback
578 578 else:
579 579 # adds changeset we are going to push as draft
580 580 #
581 581 # should not be necessary for publishing server, but because of an
582 582 # issue fixed in xxxxx we have to do it anyway.
583 583 fdroots = list(unfi.set('roots(%ln + %ln::)',
584 584 outgoing.missing, droots))
585 585 fdroots = [f.node() for f in fdroots]
586 586 future = list(unfi.set(revset, fdroots, pushop.futureheads))
587 587 pushop.outdatedphases = future
588 588 pushop.fallbackoutdatedphases = fallback
589 589
590 590 @pushdiscovery('obsmarker')
591 591 def _pushdiscoveryobsmarkers(pushop):
592 592 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
593 593 and pushop.repo.obsstore
594 594 and 'obsolete' in pushop.remote.listkeys('namespaces')):
595 595 repo = pushop.repo
596 596 # very naive computation, that can be quite expensive on big repo.
597 597 # However: evolution is currently slow on them anyway.
598 598 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
599 599 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
600 600
601 601 @pushdiscovery('bookmarks')
602 602 def _pushdiscoverybookmarks(pushop):
603 603 ui = pushop.ui
604 604 repo = pushop.repo.unfiltered()
605 605 remote = pushop.remote
606 606 ui.debug("checking for updated bookmarks\n")
607 607 ancestors = ()
608 608 if pushop.revs:
609 609 revnums = map(repo.changelog.rev, pushop.revs)
610 610 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
611 611 remotebookmark = remote.listkeys('bookmarks')
612 612
613 613 explicit = set([repo._bookmarks.expandname(bookmark)
614 614 for bookmark in pushop.bookmarks])
615 615
616 616 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
617 617 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
618 618
619 619 def safehex(x):
620 620 if x is None:
621 621 return x
622 622 return hex(x)
623 623
624 624 def hexifycompbookmarks(bookmarks):
625 625 for b, scid, dcid in bookmarks:
626 626 yield b, safehex(scid), safehex(dcid)
627 627
628 628 comp = [hexifycompbookmarks(marks) for marks in comp]
629 629 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
630 630
631 631 for b, scid, dcid in advsrc:
632 632 if b in explicit:
633 633 explicit.remove(b)
634 634 if not ancestors or repo[scid].rev() in ancestors:
635 635 pushop.outbookmarks.append((b, dcid, scid))
636 636 # search added bookmark
637 637 for b, scid, dcid in addsrc:
638 638 if b in explicit:
639 639 explicit.remove(b)
640 640 pushop.outbookmarks.append((b, '', scid))
641 641 # search for overwritten bookmark
642 642 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
643 643 if b in explicit:
644 644 explicit.remove(b)
645 645 pushop.outbookmarks.append((b, dcid, scid))
646 646 # search for bookmark to delete
647 647 for b, scid, dcid in adddst:
648 648 if b in explicit:
649 649 explicit.remove(b)
650 650 # treat as "deleted locally"
651 651 pushop.outbookmarks.append((b, dcid, ''))
652 652 # identical bookmarks shouldn't get reported
653 653 for b, scid, dcid in same:
654 654 if b in explicit:
655 655 explicit.remove(b)
656 656
657 657 if explicit:
658 658 explicit = sorted(explicit)
659 659 # we should probably list all of them
660 660 ui.warn(_('bookmark %s does not exist on the local '
661 661 'or remote repository!\n') % explicit[0])
662 662 pushop.bkresult = 2
663 663
664 664 pushop.outbookmarks.sort()
665 665
666 666 def _pushcheckoutgoing(pushop):
667 667 outgoing = pushop.outgoing
668 668 unfi = pushop.repo.unfiltered()
669 669 if not outgoing.missing:
670 670 # nothing to push
671 671 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
672 672 return False
673 673 # something to push
674 674 if not pushop.force:
675 675 # if repo.obsstore == False --> no obsolete
676 676 # then, save the iteration
677 677 if unfi.obsstore:
678 678 # this message are here for 80 char limit reason
679 679 mso = _("push includes obsolete changeset: %s!")
680 mscd = _("push includes content-divergent changeset: %s!")
680 681 mst = {"orphan": _("push includes orphan changeset: %s!"),
681 682 "bumped": _("push includes bumped changeset: %s!"),
682 "divergent": _("push includes divergent changeset: %s!")}
683 "content-divergent": mscd}
683 684 # If we are to push if there is at least one
684 685 # obsolete or unstable changeset in missing, at
685 686 # least one of the missinghead will be obsolete or
686 687 # unstable. So checking heads only is ok
687 688 for node in outgoing.missingheads:
688 689 ctx = unfi[node]
689 690 if ctx.obsolete():
690 691 raise error.Abort(mso % ctx)
691 692 elif ctx.troubled():
692 693 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
693 694
694 695 discovery.checkheads(pushop)
695 696 return True
696 697
697 698 # List of names of steps to perform for an outgoing bundle2, order matters.
698 699 b2partsgenorder = []
699 700
700 701 # Mapping between step name and function
701 702 #
702 703 # This exists to help extensions wrap steps if necessary
703 704 b2partsgenmapping = {}
704 705
705 706 def b2partsgenerator(stepname, idx=None):
706 707 """decorator for function generating bundle2 part
707 708
708 709 The function is added to the step -> function mapping and appended to the
709 710 list of steps. Beware that decorated functions will be added in order
710 711 (this may matter).
711 712
712 713 You can only use this decorator for new steps, if you want to wrap a step
713 714 from an extension, attack the b2partsgenmapping dictionary directly."""
714 715 def dec(func):
715 716 assert stepname not in b2partsgenmapping
716 717 b2partsgenmapping[stepname] = func
717 718 if idx is None:
718 719 b2partsgenorder.append(stepname)
719 720 else:
720 721 b2partsgenorder.insert(idx, stepname)
721 722 return func
722 723 return dec
723 724
724 725 def _pushb2ctxcheckheads(pushop, bundler):
725 726 """Generate race condition checking parts
726 727
727 728 Exists as an independent function to aid extensions
728 729 """
729 730 # * 'force' do not check for push race,
730 731 # * if we don't push anything, there are nothing to check.
731 732 if not pushop.force and pushop.outgoing.missingheads:
732 733 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
733 734 emptyremote = pushop.pushbranchmap is None
734 735 if not allowunrelated or emptyremote:
735 736 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
736 737 else:
737 738 affected = set()
738 739 for branch, heads in pushop.pushbranchmap.iteritems():
739 740 remoteheads, newheads, unsyncedheads, discardedheads = heads
740 741 if remoteheads is not None:
741 742 remote = set(remoteheads)
742 743 affected |= set(discardedheads) & remote
743 744 affected |= remote - set(newheads)
744 745 if affected:
745 746 data = iter(sorted(affected))
746 747 bundler.newpart('check:updated-heads', data=data)
747 748
748 749 @b2partsgenerator('changeset')
749 750 def _pushb2ctx(pushop, bundler):
750 751 """handle changegroup push through bundle2
751 752
752 753 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
753 754 """
754 755 if 'changesets' in pushop.stepsdone:
755 756 return
756 757 pushop.stepsdone.add('changesets')
757 758 # Send known heads to the server for race detection.
758 759 if not _pushcheckoutgoing(pushop):
759 760 return
760 761 pushop.repo.prepushoutgoinghooks(pushop)
761 762
762 763 _pushb2ctxcheckheads(pushop, bundler)
763 764
764 765 b2caps = bundle2.bundle2caps(pushop.remote)
765 766 version = '01'
766 767 cgversions = b2caps.get('changegroup')
767 768 if cgversions: # 3.1 and 3.2 ship with an empty value
768 769 cgversions = [v for v in cgversions
769 770 if v in changegroup.supportedoutgoingversions(
770 771 pushop.repo)]
771 772 if not cgversions:
772 773 raise ValueError(_('no common changegroup version'))
773 774 version = max(cgversions)
774 775 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
775 776 pushop.outgoing,
776 777 version=version)
777 778 cgpart = bundler.newpart('changegroup', data=cg)
778 779 if cgversions:
779 780 cgpart.addparam('version', version)
780 781 if 'treemanifest' in pushop.repo.requirements:
781 782 cgpart.addparam('treemanifest', '1')
782 783 def handlereply(op):
783 784 """extract addchangegroup returns from server reply"""
784 785 cgreplies = op.records.getreplies(cgpart.id)
785 786 assert len(cgreplies['changegroup']) == 1
786 787 pushop.cgresult = cgreplies['changegroup'][0]['return']
787 788 return handlereply
788 789
789 790 @b2partsgenerator('phase')
790 791 def _pushb2phases(pushop, bundler):
791 792 """handle phase push through bundle2"""
792 793 if 'phases' in pushop.stepsdone:
793 794 return
794 795 b2caps = bundle2.bundle2caps(pushop.remote)
795 796 if not 'pushkey' in b2caps:
796 797 return
797 798 pushop.stepsdone.add('phases')
798 799 part2node = []
799 800
800 801 def handlefailure(pushop, exc):
801 802 targetid = int(exc.partid)
802 803 for partid, node in part2node:
803 804 if partid == targetid:
804 805 raise error.Abort(_('updating %s to public failed') % node)
805 806
806 807 enc = pushkey.encode
807 808 for newremotehead in pushop.outdatedphases:
808 809 part = bundler.newpart('pushkey')
809 810 part.addparam('namespace', enc('phases'))
810 811 part.addparam('key', enc(newremotehead.hex()))
811 812 part.addparam('old', enc(str(phases.draft)))
812 813 part.addparam('new', enc(str(phases.public)))
813 814 part2node.append((part.id, newremotehead))
814 815 pushop.pkfailcb[part.id] = handlefailure
815 816
816 817 def handlereply(op):
817 818 for partid, node in part2node:
818 819 partrep = op.records.getreplies(partid)
819 820 results = partrep['pushkey']
820 821 assert len(results) <= 1
821 822 msg = None
822 823 if not results:
823 824 msg = _('server ignored update of %s to public!\n') % node
824 825 elif not int(results[0]['return']):
825 826 msg = _('updating %s to public failed!\n') % node
826 827 if msg is not None:
827 828 pushop.ui.warn(msg)
828 829 return handlereply
829 830
830 831 @b2partsgenerator('obsmarkers')
831 832 def _pushb2obsmarkers(pushop, bundler):
832 833 if 'obsmarkers' in pushop.stepsdone:
833 834 return
834 835 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
835 836 if obsolete.commonversion(remoteversions) is None:
836 837 return
837 838 pushop.stepsdone.add('obsmarkers')
838 839 if pushop.outobsmarkers:
839 840 markers = sorted(pushop.outobsmarkers)
840 841 bundle2.buildobsmarkerspart(bundler, markers)
841 842
842 843 @b2partsgenerator('bookmarks')
843 844 def _pushb2bookmarks(pushop, bundler):
844 845 """handle bookmark push through bundle2"""
845 846 if 'bookmarks' in pushop.stepsdone:
846 847 return
847 848 b2caps = bundle2.bundle2caps(pushop.remote)
848 849 if 'pushkey' not in b2caps:
849 850 return
850 851 pushop.stepsdone.add('bookmarks')
851 852 part2book = []
852 853 enc = pushkey.encode
853 854
854 855 def handlefailure(pushop, exc):
855 856 targetid = int(exc.partid)
856 857 for partid, book, action in part2book:
857 858 if partid == targetid:
858 859 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
859 860 # we should not be called for part we did not generated
860 861 assert False
861 862
862 863 for book, old, new in pushop.outbookmarks:
863 864 part = bundler.newpart('pushkey')
864 865 part.addparam('namespace', enc('bookmarks'))
865 866 part.addparam('key', enc(book))
866 867 part.addparam('old', enc(old))
867 868 part.addparam('new', enc(new))
868 869 action = 'update'
869 870 if not old:
870 871 action = 'export'
871 872 elif not new:
872 873 action = 'delete'
873 874 part2book.append((part.id, book, action))
874 875 pushop.pkfailcb[part.id] = handlefailure
875 876
876 877 def handlereply(op):
877 878 ui = pushop.ui
878 879 for partid, book, action in part2book:
879 880 partrep = op.records.getreplies(partid)
880 881 results = partrep['pushkey']
881 882 assert len(results) <= 1
882 883 if not results:
883 884 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
884 885 else:
885 886 ret = int(results[0]['return'])
886 887 if ret:
887 888 ui.status(bookmsgmap[action][0] % book)
888 889 else:
889 890 ui.warn(bookmsgmap[action][1] % book)
890 891 if pushop.bkresult is not None:
891 892 pushop.bkresult = 1
892 893 return handlereply
893 894
894 895
895 896 def _pushbundle2(pushop):
896 897 """push data to the remote using bundle2
897 898
898 899 The only currently supported type of data is changegroup but this will
899 900 evolve in the future."""
900 901 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
901 902 pushback = (pushop.trmanager
902 903 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
903 904
904 905 # create reply capability
905 906 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
906 907 allowpushback=pushback))
907 908 bundler.newpart('replycaps', data=capsblob)
908 909 replyhandlers = []
909 910 for partgenname in b2partsgenorder:
910 911 partgen = b2partsgenmapping[partgenname]
911 912 ret = partgen(pushop, bundler)
912 913 if callable(ret):
913 914 replyhandlers.append(ret)
914 915 # do not push if nothing to push
915 916 if bundler.nbparts <= 1:
916 917 return
917 918 stream = util.chunkbuffer(bundler.getchunks())
918 919 try:
919 920 try:
920 921 reply = pushop.remote.unbundle(
921 922 stream, ['force'], pushop.remote.url())
922 923 except error.BundleValueError as exc:
923 924 raise error.Abort(_('missing support for %s') % exc)
924 925 try:
925 926 trgetter = None
926 927 if pushback:
927 928 trgetter = pushop.trmanager.transaction
928 929 op = bundle2.processbundle(pushop.repo, reply, trgetter)
929 930 except error.BundleValueError as exc:
930 931 raise error.Abort(_('missing support for %s') % exc)
931 932 except bundle2.AbortFromPart as exc:
932 933 pushop.ui.status(_('remote: %s\n') % exc)
933 934 if exc.hint is not None:
934 935 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
935 936 raise error.Abort(_('push failed on remote'))
936 937 except error.PushkeyFailed as exc:
937 938 partid = int(exc.partid)
938 939 if partid not in pushop.pkfailcb:
939 940 raise
940 941 pushop.pkfailcb[partid](pushop, exc)
941 942 for rephand in replyhandlers:
942 943 rephand(op)
943 944
944 945 def _pushchangeset(pushop):
945 946 """Make the actual push of changeset bundle to remote repo"""
946 947 if 'changesets' in pushop.stepsdone:
947 948 return
948 949 pushop.stepsdone.add('changesets')
949 950 if not _pushcheckoutgoing(pushop):
950 951 return
951 952 pushop.repo.prepushoutgoinghooks(pushop)
952 953 outgoing = pushop.outgoing
953 954 unbundle = pushop.remote.capable('unbundle')
954 955 # TODO: get bundlecaps from remote
955 956 bundlecaps = None
956 957 # create a changegroup from local
957 958 if pushop.revs is None and not (outgoing.excluded
958 959 or pushop.repo.changelog.filteredrevs):
959 960 # push everything,
960 961 # use the fast path, no race possible on push
961 962 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
962 963 cg = changegroup.getsubset(pushop.repo,
963 964 outgoing,
964 965 bundler,
965 966 'push',
966 967 fastpath=True)
967 968 else:
968 969 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
969 970 bundlecaps=bundlecaps)
970 971
971 972 # apply changegroup to remote
972 973 if unbundle:
973 974 # local repo finds heads on server, finds out what
974 975 # revs it must push. once revs transferred, if server
975 976 # finds it has different heads (someone else won
976 977 # commit/push race), server aborts.
977 978 if pushop.force:
978 979 remoteheads = ['force']
979 980 else:
980 981 remoteheads = pushop.remoteheads
981 982 # ssh: return remote's addchangegroup()
982 983 # http: return remote's addchangegroup() or 0 for error
983 984 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
984 985 pushop.repo.url())
985 986 else:
986 987 # we return an integer indicating remote head count
987 988 # change
988 989 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
989 990 pushop.repo.url())
990 991
991 992 def _pushsyncphase(pushop):
992 993 """synchronise phase information locally and remotely"""
993 994 cheads = pushop.commonheads
994 995 # even when we don't push, exchanging phase data is useful
995 996 remotephases = pushop.remote.listkeys('phases')
996 997 if (pushop.ui.configbool('ui', '_usedassubrepo')
997 998 and remotephases # server supports phases
998 999 and pushop.cgresult is None # nothing was pushed
999 1000 and remotephases.get('publishing', False)):
1000 1001 # When:
1001 1002 # - this is a subrepo push
1002 1003 # - and remote support phase
1003 1004 # - and no changeset was pushed
1004 1005 # - and remote is publishing
1005 1006 # We may be in issue 3871 case!
1006 1007 # We drop the possible phase synchronisation done by
1007 1008 # courtesy to publish changesets possibly locally draft
1008 1009 # on the remote.
1009 1010 remotephases = {'publishing': 'True'}
1010 1011 if not remotephases: # old server or public only reply from non-publishing
1011 1012 _localphasemove(pushop, cheads)
1012 1013 # don't push any phase data as there is nothing to push
1013 1014 else:
1014 1015 ana = phases.analyzeremotephases(pushop.repo, cheads,
1015 1016 remotephases)
1016 1017 pheads, droots = ana
1017 1018 ### Apply remote phase on local
1018 1019 if remotephases.get('publishing', False):
1019 1020 _localphasemove(pushop, cheads)
1020 1021 else: # publish = False
1021 1022 _localphasemove(pushop, pheads)
1022 1023 _localphasemove(pushop, cheads, phases.draft)
1023 1024 ### Apply local phase on remote
1024 1025
1025 1026 if pushop.cgresult:
1026 1027 if 'phases' in pushop.stepsdone:
1027 1028 # phases already pushed though bundle2
1028 1029 return
1029 1030 outdated = pushop.outdatedphases
1030 1031 else:
1031 1032 outdated = pushop.fallbackoutdatedphases
1032 1033
1033 1034 pushop.stepsdone.add('phases')
1034 1035
1035 1036 # filter heads already turned public by the push
1036 1037 outdated = [c for c in outdated if c.node() not in pheads]
1037 1038 # fallback to independent pushkey command
1038 1039 for newremotehead in outdated:
1039 1040 r = pushop.remote.pushkey('phases',
1040 1041 newremotehead.hex(),
1041 1042 str(phases.draft),
1042 1043 str(phases.public))
1043 1044 if not r:
1044 1045 pushop.ui.warn(_('updating %s to public failed!\n')
1045 1046 % newremotehead)
1046 1047
1047 1048 def _localphasemove(pushop, nodes, phase=phases.public):
1048 1049 """move <nodes> to <phase> in the local source repo"""
1049 1050 if pushop.trmanager:
1050 1051 phases.advanceboundary(pushop.repo,
1051 1052 pushop.trmanager.transaction(),
1052 1053 phase,
1053 1054 nodes)
1054 1055 else:
1055 1056 # repo is not locked, do not change any phases!
1056 1057 # Informs the user that phases should have been moved when
1057 1058 # applicable.
1058 1059 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1059 1060 phasestr = phases.phasenames[phase]
1060 1061 if actualmoves:
1061 1062 pushop.ui.status(_('cannot lock source repo, skipping '
1062 1063 'local %s phase update\n') % phasestr)
1063 1064
1064 1065 def _pushobsolete(pushop):
1065 1066 """utility function to push obsolete markers to a remote"""
1066 1067 if 'obsmarkers' in pushop.stepsdone:
1067 1068 return
1068 1069 repo = pushop.repo
1069 1070 remote = pushop.remote
1070 1071 pushop.stepsdone.add('obsmarkers')
1071 1072 if pushop.outobsmarkers:
1072 1073 pushop.ui.debug('try to push obsolete markers to remote\n')
1073 1074 rslts = []
1074 1075 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1075 1076 for key in sorted(remotedata, reverse=True):
1076 1077 # reverse sort to ensure we end with dump0
1077 1078 data = remotedata[key]
1078 1079 rslts.append(remote.pushkey('obsolete', key, '', data))
1079 1080 if [r for r in rslts if not r]:
1080 1081 msg = _('failed to push some obsolete markers!\n')
1081 1082 repo.ui.warn(msg)
1082 1083
1083 1084 def _pushbookmark(pushop):
1084 1085 """Update bookmark position on remote"""
1085 1086 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1086 1087 return
1087 1088 pushop.stepsdone.add('bookmarks')
1088 1089 ui = pushop.ui
1089 1090 remote = pushop.remote
1090 1091
1091 1092 for b, old, new in pushop.outbookmarks:
1092 1093 action = 'update'
1093 1094 if not old:
1094 1095 action = 'export'
1095 1096 elif not new:
1096 1097 action = 'delete'
1097 1098 if remote.pushkey('bookmarks', b, old, new):
1098 1099 ui.status(bookmsgmap[action][0] % b)
1099 1100 else:
1100 1101 ui.warn(bookmsgmap[action][1] % b)
1101 1102 # discovery can have set the value form invalid entry
1102 1103 if pushop.bkresult is not None:
1103 1104 pushop.bkresult = 1
1104 1105
1105 1106 class pulloperation(object):
1106 1107 """A object that represent a single pull operation
1107 1108
1108 1109 It purpose is to carry pull related state and very common operation.
1109 1110
1110 1111 A new should be created at the beginning of each pull and discarded
1111 1112 afterward.
1112 1113 """
1113 1114
1114 1115 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1115 1116 remotebookmarks=None, streamclonerequested=None):
1116 1117 # repo we pull into
1117 1118 self.repo = repo
1118 1119 # repo we pull from
1119 1120 self.remote = remote
1120 1121 # revision we try to pull (None is "all")
1121 1122 self.heads = heads
1122 1123 # bookmark pulled explicitly
1123 1124 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1124 1125 for bookmark in bookmarks]
1125 1126 # do we force pull?
1126 1127 self.force = force
1127 1128 # whether a streaming clone was requested
1128 1129 self.streamclonerequested = streamclonerequested
1129 1130 # transaction manager
1130 1131 self.trmanager = None
1131 1132 # set of common changeset between local and remote before pull
1132 1133 self.common = None
1133 1134 # set of pulled head
1134 1135 self.rheads = None
1135 1136 # list of missing changeset to fetch remotely
1136 1137 self.fetch = None
1137 1138 # remote bookmarks data
1138 1139 self.remotebookmarks = remotebookmarks
1139 1140 # result of changegroup pulling (used as return code by pull)
1140 1141 self.cgresult = None
1141 1142 # list of step already done
1142 1143 self.stepsdone = set()
1143 1144 # Whether we attempted a clone from pre-generated bundles.
1144 1145 self.clonebundleattempted = False
1145 1146
1146 1147 @util.propertycache
1147 1148 def pulledsubset(self):
1148 1149 """heads of the set of changeset target by the pull"""
1149 1150 # compute target subset
1150 1151 if self.heads is None:
1151 1152 # We pulled every thing possible
1152 1153 # sync on everything common
1153 1154 c = set(self.common)
1154 1155 ret = list(self.common)
1155 1156 for n in self.rheads:
1156 1157 if n not in c:
1157 1158 ret.append(n)
1158 1159 return ret
1159 1160 else:
1160 1161 # We pulled a specific subset
1161 1162 # sync on this subset
1162 1163 return self.heads
1163 1164
1164 1165 @util.propertycache
1165 1166 def canusebundle2(self):
1166 1167 return not _forcebundle1(self)
1167 1168
1168 1169 @util.propertycache
1169 1170 def remotebundle2caps(self):
1170 1171 return bundle2.bundle2caps(self.remote)
1171 1172
1172 1173 def gettransaction(self):
1173 1174 # deprecated; talk to trmanager directly
1174 1175 return self.trmanager.transaction()
1175 1176
1176 1177 class transactionmanager(object):
1177 1178 """An object to manage the life cycle of a transaction
1178 1179
1179 1180 It creates the transaction on demand and calls the appropriate hooks when
1180 1181 closing the transaction."""
1181 1182 def __init__(self, repo, source, url):
1182 1183 self.repo = repo
1183 1184 self.source = source
1184 1185 self.url = url
1185 1186 self._tr = None
1186 1187
1187 1188 def transaction(self):
1188 1189 """Return an open transaction object, constructing if necessary"""
1189 1190 if not self._tr:
1190 1191 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1191 1192 self._tr = self.repo.transaction(trname)
1192 1193 self._tr.hookargs['source'] = self.source
1193 1194 self._tr.hookargs['url'] = self.url
1194 1195 return self._tr
1195 1196
1196 1197 def close(self):
1197 1198 """close transaction if created"""
1198 1199 if self._tr is not None:
1199 1200 self._tr.close()
1200 1201
1201 1202 def release(self):
1202 1203 """release transaction if created"""
1203 1204 if self._tr is not None:
1204 1205 self._tr.release()
1205 1206
1206 1207 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1207 1208 streamclonerequested=None):
1208 1209 """Fetch repository data from a remote.
1209 1210
1210 1211 This is the main function used to retrieve data from a remote repository.
1211 1212
1212 1213 ``repo`` is the local repository to clone into.
1213 1214 ``remote`` is a peer instance.
1214 1215 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1215 1216 default) means to pull everything from the remote.
1216 1217 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1217 1218 default, all remote bookmarks are pulled.
1218 1219 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1219 1220 initialization.
1220 1221 ``streamclonerequested`` is a boolean indicating whether a "streaming
1221 1222 clone" is requested. A "streaming clone" is essentially a raw file copy
1222 1223 of revlogs from the server. This only works when the local repository is
1223 1224 empty. The default value of ``None`` means to respect the server
1224 1225 configuration for preferring stream clones.
1225 1226
1226 1227 Returns the ``pulloperation`` created for this pull.
1227 1228 """
1228 1229 if opargs is None:
1229 1230 opargs = {}
1230 1231 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1231 1232 streamclonerequested=streamclonerequested, **opargs)
1232 1233 if pullop.remote.local():
1233 1234 missing = set(pullop.remote.requirements) - pullop.repo.supported
1234 1235 if missing:
1235 1236 msg = _("required features are not"
1236 1237 " supported in the destination:"
1237 1238 " %s") % (', '.join(sorted(missing)))
1238 1239 raise error.Abort(msg)
1239 1240
1240 1241 wlock = lock = None
1241 1242 try:
1242 1243 wlock = pullop.repo.wlock()
1243 1244 lock = pullop.repo.lock()
1244 1245 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1245 1246 streamclone.maybeperformlegacystreamclone(pullop)
1246 1247 # This should ideally be in _pullbundle2(). However, it needs to run
1247 1248 # before discovery to avoid extra work.
1248 1249 _maybeapplyclonebundle(pullop)
1249 1250 _pulldiscovery(pullop)
1250 1251 if pullop.canusebundle2:
1251 1252 _pullbundle2(pullop)
1252 1253 _pullchangeset(pullop)
1253 1254 _pullphase(pullop)
1254 1255 _pullbookmarks(pullop)
1255 1256 _pullobsolete(pullop)
1256 1257 pullop.trmanager.close()
1257 1258 finally:
1258 1259 lockmod.release(pullop.trmanager, lock, wlock)
1259 1260
1260 1261 return pullop
1261 1262
1262 1263 # list of steps to perform discovery before pull
1263 1264 pulldiscoveryorder = []
1264 1265
1265 1266 # Mapping between step name and function
1266 1267 #
1267 1268 # This exists to help extensions wrap steps if necessary
1268 1269 pulldiscoverymapping = {}
1269 1270
1270 1271 def pulldiscovery(stepname):
1271 1272 """decorator for function performing discovery before pull
1272 1273
1273 1274 The function is added to the step -> function mapping and appended to the
1274 1275 list of steps. Beware that decorated function will be added in order (this
1275 1276 may matter).
1276 1277
1277 1278 You can only use this decorator for a new step, if you want to wrap a step
1278 1279 from an extension, change the pulldiscovery dictionary directly."""
1279 1280 def dec(func):
1280 1281 assert stepname not in pulldiscoverymapping
1281 1282 pulldiscoverymapping[stepname] = func
1282 1283 pulldiscoveryorder.append(stepname)
1283 1284 return func
1284 1285 return dec
1285 1286
1286 1287 def _pulldiscovery(pullop):
1287 1288 """Run all discovery steps"""
1288 1289 for stepname in pulldiscoveryorder:
1289 1290 step = pulldiscoverymapping[stepname]
1290 1291 step(pullop)
1291 1292
1292 1293 @pulldiscovery('b1:bookmarks')
1293 1294 def _pullbookmarkbundle1(pullop):
1294 1295 """fetch bookmark data in bundle1 case
1295 1296
1296 1297 If not using bundle2, we have to fetch bookmarks before changeset
1297 1298 discovery to reduce the chance and impact of race conditions."""
1298 1299 if pullop.remotebookmarks is not None:
1299 1300 return
1300 1301 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1301 1302 # all known bundle2 servers now support listkeys, but lets be nice with
1302 1303 # new implementation.
1303 1304 return
1304 1305 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1305 1306
1306 1307
1307 1308 @pulldiscovery('changegroup')
1308 1309 def _pulldiscoverychangegroup(pullop):
1309 1310 """discovery phase for the pull
1310 1311
1311 1312 Current handle changeset discovery only, will change handle all discovery
1312 1313 at some point."""
1313 1314 tmp = discovery.findcommonincoming(pullop.repo,
1314 1315 pullop.remote,
1315 1316 heads=pullop.heads,
1316 1317 force=pullop.force)
1317 1318 common, fetch, rheads = tmp
1318 1319 nm = pullop.repo.unfiltered().changelog.nodemap
1319 1320 if fetch and rheads:
1320 1321 # If a remote heads in filtered locally, lets drop it from the unknown
1321 1322 # remote heads and put in back in common.
1322 1323 #
1323 1324 # This is a hackish solution to catch most of "common but locally
1324 1325 # hidden situation". We do not performs discovery on unfiltered
1325 1326 # repository because it end up doing a pathological amount of round
1326 1327 # trip for w huge amount of changeset we do not care about.
1327 1328 #
1328 1329 # If a set of such "common but filtered" changeset exist on the server
1329 1330 # but are not including a remote heads, we'll not be able to detect it,
1330 1331 scommon = set(common)
1331 1332 filteredrheads = []
1332 1333 for n in rheads:
1333 1334 if n in nm:
1334 1335 if n not in scommon:
1335 1336 common.append(n)
1336 1337 else:
1337 1338 filteredrheads.append(n)
1338 1339 if not filteredrheads:
1339 1340 fetch = []
1340 1341 rheads = filteredrheads
1341 1342 pullop.common = common
1342 1343 pullop.fetch = fetch
1343 1344 pullop.rheads = rheads
1344 1345
1345 1346 def _pullbundle2(pullop):
1346 1347 """pull data using bundle2
1347 1348
1348 1349 For now, the only supported data are changegroup."""
1349 1350 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1350 1351
1351 1352 # At the moment we don't do stream clones over bundle2. If that is
1352 1353 # implemented then here's where the check for that will go.
1353 1354 streaming = False
1354 1355
1355 1356 # pulling changegroup
1356 1357 pullop.stepsdone.add('changegroup')
1357 1358
1358 1359 kwargs['common'] = pullop.common
1359 1360 kwargs['heads'] = pullop.heads or pullop.rheads
1360 1361 kwargs['cg'] = pullop.fetch
1361 1362 if 'listkeys' in pullop.remotebundle2caps:
1362 1363 kwargs['listkeys'] = ['phases']
1363 1364 if pullop.remotebookmarks is None:
1364 1365 # make sure to always includes bookmark data when migrating
1365 1366 # `hg incoming --bundle` to using this function.
1366 1367 kwargs['listkeys'].append('bookmarks')
1367 1368
1368 1369 # If this is a full pull / clone and the server supports the clone bundles
1369 1370 # feature, tell the server whether we attempted a clone bundle. The
1370 1371 # presence of this flag indicates the client supports clone bundles. This
1371 1372 # will enable the server to treat clients that support clone bundles
1372 1373 # differently from those that don't.
1373 1374 if (pullop.remote.capable('clonebundles')
1374 1375 and pullop.heads is None and list(pullop.common) == [nullid]):
1375 1376 kwargs['cbattempted'] = pullop.clonebundleattempted
1376 1377
1377 1378 if streaming:
1378 1379 pullop.repo.ui.status(_('streaming all changes\n'))
1379 1380 elif not pullop.fetch:
1380 1381 pullop.repo.ui.status(_("no changes found\n"))
1381 1382 pullop.cgresult = 0
1382 1383 else:
1383 1384 if pullop.heads is None and list(pullop.common) == [nullid]:
1384 1385 pullop.repo.ui.status(_("requesting all changes\n"))
1385 1386 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1386 1387 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1387 1388 if obsolete.commonversion(remoteversions) is not None:
1388 1389 kwargs['obsmarkers'] = True
1389 1390 pullop.stepsdone.add('obsmarkers')
1390 1391 _pullbundle2extraprepare(pullop, kwargs)
1391 1392 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1392 1393 try:
1393 1394 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1394 1395 except bundle2.AbortFromPart as exc:
1395 1396 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1396 1397 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1397 1398 except error.BundleValueError as exc:
1398 1399 raise error.Abort(_('missing support for %s') % exc)
1399 1400
1400 1401 if pullop.fetch:
1401 1402 pullop.cgresult = bundle2.combinechangegroupresults(op)
1402 1403
1403 1404 # processing phases change
1404 1405 for namespace, value in op.records['listkeys']:
1405 1406 if namespace == 'phases':
1406 1407 _pullapplyphases(pullop, value)
1407 1408
1408 1409 # processing bookmark update
1409 1410 for namespace, value in op.records['listkeys']:
1410 1411 if namespace == 'bookmarks':
1411 1412 pullop.remotebookmarks = value
1412 1413
1413 1414 # bookmark data were either already there or pulled in the bundle
1414 1415 if pullop.remotebookmarks is not None:
1415 1416 _pullbookmarks(pullop)
1416 1417
1417 1418 def _pullbundle2extraprepare(pullop, kwargs):
1418 1419 """hook function so that extensions can extend the getbundle call"""
1419 1420 pass
1420 1421
1421 1422 def _pullchangeset(pullop):
1422 1423 """pull changeset from unbundle into the local repo"""
1423 1424 # We delay the open of the transaction as late as possible so we
1424 1425 # don't open transaction for nothing or you break future useful
1425 1426 # rollback call
1426 1427 if 'changegroup' in pullop.stepsdone:
1427 1428 return
1428 1429 pullop.stepsdone.add('changegroup')
1429 1430 if not pullop.fetch:
1430 1431 pullop.repo.ui.status(_("no changes found\n"))
1431 1432 pullop.cgresult = 0
1432 1433 return
1433 1434 tr = pullop.gettransaction()
1434 1435 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 1436 pullop.repo.ui.status(_("requesting all changes\n"))
1436 1437 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 1438 # issue1320, avoid a race if remote changed after discovery
1438 1439 pullop.heads = pullop.rheads
1439 1440
1440 1441 if pullop.remote.capable('getbundle'):
1441 1442 # TODO: get bundlecaps from remote
1442 1443 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 1444 heads=pullop.heads or pullop.rheads)
1444 1445 elif pullop.heads is None:
1445 1446 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 1447 elif not pullop.remote.capable('changegroupsubset'):
1447 1448 raise error.Abort(_("partial pull cannot be done because "
1448 1449 "other repository doesn't support "
1449 1450 "changegroupsubset."))
1450 1451 else:
1451 1452 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 1453 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 1454 pullop.remote.url())
1454 1455 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455 1456
1456 1457 def _pullphase(pullop):
1457 1458 # Get remote phases data from remote
1458 1459 if 'phases' in pullop.stepsdone:
1459 1460 return
1460 1461 remotephases = pullop.remote.listkeys('phases')
1461 1462 _pullapplyphases(pullop, remotephases)
1462 1463
1463 1464 def _pullapplyphases(pullop, remotephases):
1464 1465 """apply phase movement from observed remote state"""
1465 1466 if 'phases' in pullop.stepsdone:
1466 1467 return
1467 1468 pullop.stepsdone.add('phases')
1468 1469 publishing = bool(remotephases.get('publishing', False))
1469 1470 if remotephases and not publishing:
1470 1471 # remote is new and non-publishing
1471 1472 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 1473 pullop.pulledsubset,
1473 1474 remotephases)
1474 1475 dheads = pullop.pulledsubset
1475 1476 else:
1476 1477 # Remote is old or publishing all common changesets
1477 1478 # should be seen as public
1478 1479 pheads = pullop.pulledsubset
1479 1480 dheads = []
1480 1481 unfi = pullop.repo.unfiltered()
1481 1482 phase = unfi._phasecache.phase
1482 1483 rev = unfi.changelog.nodemap.get
1483 1484 public = phases.public
1484 1485 draft = phases.draft
1485 1486
1486 1487 # exclude changesets already public locally and update the others
1487 1488 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 1489 if pheads:
1489 1490 tr = pullop.gettransaction()
1490 1491 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491 1492
1492 1493 # exclude changesets already draft locally and update the others
1493 1494 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 1495 if dheads:
1495 1496 tr = pullop.gettransaction()
1496 1497 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497 1498
1498 1499 def _pullbookmarks(pullop):
1499 1500 """process the remote bookmark information to update the local one"""
1500 1501 if 'bookmarks' in pullop.stepsdone:
1501 1502 return
1502 1503 pullop.stepsdone.add('bookmarks')
1503 1504 repo = pullop.repo
1504 1505 remotebookmarks = pullop.remotebookmarks
1505 1506 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 1507 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 1508 pullop.remote.url(),
1508 1509 pullop.gettransaction,
1509 1510 explicit=pullop.explicitbookmarks)
1510 1511
1511 1512 def _pullobsolete(pullop):
1512 1513 """utility function to pull obsolete markers from a remote
1513 1514
1514 1515 The `gettransaction` is function that return the pull transaction, creating
1515 1516 one if necessary. We return the transaction to inform the calling code that
1516 1517 a new transaction have been created (when applicable).
1517 1518
1518 1519 Exists mostly to allow overriding for experimentation purpose"""
1519 1520 if 'obsmarkers' in pullop.stepsdone:
1520 1521 return
1521 1522 pullop.stepsdone.add('obsmarkers')
1522 1523 tr = None
1523 1524 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 1525 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 1526 remoteobs = pullop.remote.listkeys('obsolete')
1526 1527 if 'dump0' in remoteobs:
1527 1528 tr = pullop.gettransaction()
1528 1529 markers = []
1529 1530 for key in sorted(remoteobs, reverse=True):
1530 1531 if key.startswith('dump'):
1531 1532 data = util.b85decode(remoteobs[key])
1532 1533 version, newmarks = obsolete._readmarkers(data)
1533 1534 markers += newmarks
1534 1535 if markers:
1535 1536 pullop.repo.obsstore.add(tr, markers)
1536 1537 pullop.repo.invalidatevolatilesets()
1537 1538 return tr
1538 1539
1539 1540 def caps20to10(repo):
1540 1541 """return a set with appropriate options to use bundle20 during getbundle"""
1541 1542 caps = {'HG20'}
1542 1543 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 1544 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 1545 return caps
1545 1546
1546 1547 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 1548 getbundle2partsorder = []
1548 1549
1549 1550 # Mapping between step name and function
1550 1551 #
1551 1552 # This exists to help extensions wrap steps if necessary
1552 1553 getbundle2partsmapping = {}
1553 1554
1554 1555 def getbundle2partsgenerator(stepname, idx=None):
1555 1556 """decorator for function generating bundle2 part for getbundle
1556 1557
1557 1558 The function is added to the step -> function mapping and appended to the
1558 1559 list of steps. Beware that decorated functions will be added in order
1559 1560 (this may matter).
1560 1561
1561 1562 You can only use this decorator for new steps, if you want to wrap a step
1562 1563 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 1564 def dec(func):
1564 1565 assert stepname not in getbundle2partsmapping
1565 1566 getbundle2partsmapping[stepname] = func
1566 1567 if idx is None:
1567 1568 getbundle2partsorder.append(stepname)
1568 1569 else:
1569 1570 getbundle2partsorder.insert(idx, stepname)
1570 1571 return func
1571 1572 return dec
1572 1573
1573 1574 def bundle2requested(bundlecaps):
1574 1575 if bundlecaps is not None:
1575 1576 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 1577 return False
1577 1578
1578 1579 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 1580 **kwargs):
1580 1581 """Return chunks constituting a bundle's raw data.
1581 1582
1582 1583 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 1584 passed.
1584 1585
1585 1586 Returns an iterator over raw chunks (of varying sizes).
1586 1587 """
1587 1588 kwargs = pycompat.byteskwargs(kwargs)
1588 1589 usebundle2 = bundle2requested(bundlecaps)
1589 1590 # bundle10 case
1590 1591 if not usebundle2:
1591 1592 if bundlecaps and not kwargs.get('cg', True):
1592 1593 raise ValueError(_('request for bundle10 must include changegroup'))
1593 1594
1594 1595 if kwargs:
1595 1596 raise ValueError(_('unsupported getbundle arguments: %s')
1596 1597 % ', '.join(sorted(kwargs.keys())))
1597 1598 outgoing = _computeoutgoing(repo, heads, common)
1598 1599 bundler = changegroup.getbundler('01', repo, bundlecaps)
1599 1600 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1600 1601
1601 1602 # bundle20 case
1602 1603 b2caps = {}
1603 1604 for bcaps in bundlecaps:
1604 1605 if bcaps.startswith('bundle2='):
1605 1606 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 1607 b2caps.update(bundle2.decodecaps(blob))
1607 1608 bundler = bundle2.bundle20(repo.ui, b2caps)
1608 1609
1609 1610 kwargs['heads'] = heads
1610 1611 kwargs['common'] = common
1611 1612
1612 1613 for name in getbundle2partsorder:
1613 1614 func = getbundle2partsmapping[name]
1614 1615 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 1616 **pycompat.strkwargs(kwargs))
1616 1617
1617 1618 return bundler.getchunks()
1618 1619
1619 1620 @getbundle2partsgenerator('changegroup')
1620 1621 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 1622 b2caps=None, heads=None, common=None, **kwargs):
1622 1623 """add a changegroup part to the requested bundle"""
1623 1624 cg = None
1624 1625 if kwargs.get('cg', True):
1625 1626 # build changegroup bundle here.
1626 1627 version = '01'
1627 1628 cgversions = b2caps.get('changegroup')
1628 1629 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 1630 cgversions = [v for v in cgversions
1630 1631 if v in changegroup.supportedoutgoingversions(repo)]
1631 1632 if not cgversions:
1632 1633 raise ValueError(_('no common changegroup version'))
1633 1634 version = max(cgversions)
1634 1635 outgoing = _computeoutgoing(repo, heads, common)
1635 1636 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1636 1637 bundlecaps=bundlecaps,
1637 1638 version=version)
1638 1639
1639 1640 if cg:
1640 1641 part = bundler.newpart('changegroup', data=cg)
1641 1642 if cgversions:
1642 1643 part.addparam('version', version)
1643 1644 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1644 1645 if 'treemanifest' in repo.requirements:
1645 1646 part.addparam('treemanifest', '1')
1646 1647
1647 1648 @getbundle2partsgenerator('listkeys')
1648 1649 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1649 1650 b2caps=None, **kwargs):
1650 1651 """add parts containing listkeys namespaces to the requested bundle"""
1651 1652 listkeys = kwargs.get('listkeys', ())
1652 1653 for namespace in listkeys:
1653 1654 part = bundler.newpart('listkeys')
1654 1655 part.addparam('namespace', namespace)
1655 1656 keys = repo.listkeys(namespace).items()
1656 1657 part.data = pushkey.encodekeys(keys)
1657 1658
1658 1659 @getbundle2partsgenerator('obsmarkers')
1659 1660 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1660 1661 b2caps=None, heads=None, **kwargs):
1661 1662 """add an obsolescence markers part to the requested bundle"""
1662 1663 if kwargs.get('obsmarkers', False):
1663 1664 if heads is None:
1664 1665 heads = repo.heads()
1665 1666 subset = [c.node() for c in repo.set('::%ln', heads)]
1666 1667 markers = repo.obsstore.relevantmarkers(subset)
1667 1668 markers = sorted(markers)
1668 1669 bundle2.buildobsmarkerspart(bundler, markers)
1669 1670
1670 1671 @getbundle2partsgenerator('hgtagsfnodes')
1671 1672 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1672 1673 b2caps=None, heads=None, common=None,
1673 1674 **kwargs):
1674 1675 """Transfer the .hgtags filenodes mapping.
1675 1676
1676 1677 Only values for heads in this bundle will be transferred.
1677 1678
1678 1679 The part data consists of pairs of 20 byte changeset node and .hgtags
1679 1680 filenodes raw values.
1680 1681 """
1681 1682 # Don't send unless:
1682 1683 # - changeset are being exchanged,
1683 1684 # - the client supports it.
1684 1685 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1685 1686 return
1686 1687
1687 1688 outgoing = _computeoutgoing(repo, heads, common)
1688 1689 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1689 1690
1690 1691 def _getbookmarks(repo, **kwargs):
1691 1692 """Returns bookmark to node mapping.
1692 1693
1693 1694 This function is primarily used to generate `bookmarks` bundle2 part.
1694 1695 It is a separate function in order to make it easy to wrap it
1695 1696 in extensions. Passing `kwargs` to the function makes it easy to
1696 1697 add new parameters in extensions.
1697 1698 """
1698 1699
1699 1700 return dict(bookmod.listbinbookmarks(repo))
1700 1701
1701 1702 def check_heads(repo, their_heads, context):
1702 1703 """check if the heads of a repo have been modified
1703 1704
1704 1705 Used by peer for unbundling.
1705 1706 """
1706 1707 heads = repo.heads()
1707 1708 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1708 1709 if not (their_heads == ['force'] or their_heads == heads or
1709 1710 their_heads == ['hashed', heads_hash]):
1710 1711 # someone else committed/pushed/unbundled while we
1711 1712 # were transferring data
1712 1713 raise error.PushRaced('repository changed while %s - '
1713 1714 'please try again' % context)
1714 1715
1715 1716 def unbundle(repo, cg, heads, source, url):
1716 1717 """Apply a bundle to a repo.
1717 1718
1718 1719 this function makes sure the repo is locked during the application and have
1719 1720 mechanism to check that no push race occurred between the creation of the
1720 1721 bundle and its application.
1721 1722
1722 1723 If the push was raced as PushRaced exception is raised."""
1723 1724 r = 0
1724 1725 # need a transaction when processing a bundle2 stream
1725 1726 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1726 1727 lockandtr = [None, None, None]
1727 1728 recordout = None
1728 1729 # quick fix for output mismatch with bundle2 in 3.4
1729 1730 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1730 1731 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1731 1732 captureoutput = True
1732 1733 try:
1733 1734 # note: outside bundle1, 'heads' is expected to be empty and this
1734 1735 # 'check_heads' call wil be a no-op
1735 1736 check_heads(repo, heads, 'uploading changes')
1736 1737 # push can proceed
1737 1738 if not isinstance(cg, bundle2.unbundle20):
1738 1739 # legacy case: bundle1 (changegroup 01)
1739 1740 txnname = "\n".join([source, util.hidepassword(url)])
1740 1741 with repo.lock(), repo.transaction(txnname) as tr:
1741 1742 op = bundle2.applybundle(repo, cg, tr, source, url)
1742 1743 r = bundle2.combinechangegroupresults(op)
1743 1744 else:
1744 1745 r = None
1745 1746 try:
1746 1747 def gettransaction():
1747 1748 if not lockandtr[2]:
1748 1749 lockandtr[0] = repo.wlock()
1749 1750 lockandtr[1] = repo.lock()
1750 1751 lockandtr[2] = repo.transaction(source)
1751 1752 lockandtr[2].hookargs['source'] = source
1752 1753 lockandtr[2].hookargs['url'] = url
1753 1754 lockandtr[2].hookargs['bundle2'] = '1'
1754 1755 return lockandtr[2]
1755 1756
1756 1757 # Do greedy locking by default until we're satisfied with lazy
1757 1758 # locking.
1758 1759 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1759 1760 gettransaction()
1760 1761
1761 1762 op = bundle2.bundleoperation(repo, gettransaction,
1762 1763 captureoutput=captureoutput)
1763 1764 try:
1764 1765 op = bundle2.processbundle(repo, cg, op=op)
1765 1766 finally:
1766 1767 r = op.reply
1767 1768 if captureoutput and r is not None:
1768 1769 repo.ui.pushbuffer(error=True, subproc=True)
1769 1770 def recordout(output):
1770 1771 r.newpart('output', data=output, mandatory=False)
1771 1772 if lockandtr[2] is not None:
1772 1773 lockandtr[2].close()
1773 1774 except BaseException as exc:
1774 1775 exc.duringunbundle2 = True
1775 1776 if captureoutput and r is not None:
1776 1777 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1777 1778 def recordout(output):
1778 1779 part = bundle2.bundlepart('output', data=output,
1779 1780 mandatory=False)
1780 1781 parts.append(part)
1781 1782 raise
1782 1783 finally:
1783 1784 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1784 1785 if recordout is not None:
1785 1786 recordout(repo.ui.popbuffer())
1786 1787 return r
1787 1788
1788 1789 def _maybeapplyclonebundle(pullop):
1789 1790 """Apply a clone bundle from a remote, if possible."""
1790 1791
1791 1792 repo = pullop.repo
1792 1793 remote = pullop.remote
1793 1794
1794 1795 if not repo.ui.configbool('ui', 'clonebundles'):
1795 1796 return
1796 1797
1797 1798 # Only run if local repo is empty.
1798 1799 if len(repo):
1799 1800 return
1800 1801
1801 1802 if pullop.heads:
1802 1803 return
1803 1804
1804 1805 if not remote.capable('clonebundles'):
1805 1806 return
1806 1807
1807 1808 res = remote._call('clonebundles')
1808 1809
1809 1810 # If we call the wire protocol command, that's good enough to record the
1810 1811 # attempt.
1811 1812 pullop.clonebundleattempted = True
1812 1813
1813 1814 entries = parseclonebundlesmanifest(repo, res)
1814 1815 if not entries:
1815 1816 repo.ui.note(_('no clone bundles available on remote; '
1816 1817 'falling back to regular clone\n'))
1817 1818 return
1818 1819
1819 1820 entries = filterclonebundleentries(repo, entries)
1820 1821 if not entries:
1821 1822 # There is a thundering herd concern here. However, if a server
1822 1823 # operator doesn't advertise bundles appropriate for its clients,
1823 1824 # they deserve what's coming. Furthermore, from a client's
1824 1825 # perspective, no automatic fallback would mean not being able to
1825 1826 # clone!
1826 1827 repo.ui.warn(_('no compatible clone bundles available on server; '
1827 1828 'falling back to regular clone\n'))
1828 1829 repo.ui.warn(_('(you may want to report this to the server '
1829 1830 'operator)\n'))
1830 1831 return
1831 1832
1832 1833 entries = sortclonebundleentries(repo.ui, entries)
1833 1834
1834 1835 url = entries[0]['URL']
1835 1836 repo.ui.status(_('applying clone bundle from %s\n') % url)
1836 1837 if trypullbundlefromurl(repo.ui, repo, url):
1837 1838 repo.ui.status(_('finished applying clone bundle\n'))
1838 1839 # Bundle failed.
1839 1840 #
1840 1841 # We abort by default to avoid the thundering herd of
1841 1842 # clients flooding a server that was expecting expensive
1842 1843 # clone load to be offloaded.
1843 1844 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1844 1845 repo.ui.warn(_('falling back to normal clone\n'))
1845 1846 else:
1846 1847 raise error.Abort(_('error applying bundle'),
1847 1848 hint=_('if this error persists, consider contacting '
1848 1849 'the server operator or disable clone '
1849 1850 'bundles via '
1850 1851 '"--config ui.clonebundles=false"'))
1851 1852
1852 1853 def parseclonebundlesmanifest(repo, s):
1853 1854 """Parses the raw text of a clone bundles manifest.
1854 1855
1855 1856 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1856 1857 to the URL and other keys are the attributes for the entry.
1857 1858 """
1858 1859 m = []
1859 1860 for line in s.splitlines():
1860 1861 fields = line.split()
1861 1862 if not fields:
1862 1863 continue
1863 1864 attrs = {'URL': fields[0]}
1864 1865 for rawattr in fields[1:]:
1865 1866 key, value = rawattr.split('=', 1)
1866 1867 key = urlreq.unquote(key)
1867 1868 value = urlreq.unquote(value)
1868 1869 attrs[key] = value
1869 1870
1870 1871 # Parse BUNDLESPEC into components. This makes client-side
1871 1872 # preferences easier to specify since you can prefer a single
1872 1873 # component of the BUNDLESPEC.
1873 1874 if key == 'BUNDLESPEC':
1874 1875 try:
1875 1876 comp, version, params = parsebundlespec(repo, value,
1876 1877 externalnames=True)
1877 1878 attrs['COMPRESSION'] = comp
1878 1879 attrs['VERSION'] = version
1879 1880 except error.InvalidBundleSpecification:
1880 1881 pass
1881 1882 except error.UnsupportedBundleSpecification:
1882 1883 pass
1883 1884
1884 1885 m.append(attrs)
1885 1886
1886 1887 return m
1887 1888
1888 1889 def filterclonebundleentries(repo, entries):
1889 1890 """Remove incompatible clone bundle manifest entries.
1890 1891
1891 1892 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1892 1893 and returns a new list consisting of only the entries that this client
1893 1894 should be able to apply.
1894 1895
1895 1896 There is no guarantee we'll be able to apply all returned entries because
1896 1897 the metadata we use to filter on may be missing or wrong.
1897 1898 """
1898 1899 newentries = []
1899 1900 for entry in entries:
1900 1901 spec = entry.get('BUNDLESPEC')
1901 1902 if spec:
1902 1903 try:
1903 1904 parsebundlespec(repo, spec, strict=True)
1904 1905 except error.InvalidBundleSpecification as e:
1905 1906 repo.ui.debug(str(e) + '\n')
1906 1907 continue
1907 1908 except error.UnsupportedBundleSpecification as e:
1908 1909 repo.ui.debug('filtering %s because unsupported bundle '
1909 1910 'spec: %s\n' % (entry['URL'], str(e)))
1910 1911 continue
1911 1912
1912 1913 if 'REQUIRESNI' in entry and not sslutil.hassni:
1913 1914 repo.ui.debug('filtering %s because SNI not supported\n' %
1914 1915 entry['URL'])
1915 1916 continue
1916 1917
1917 1918 newentries.append(entry)
1918 1919
1919 1920 return newentries
1920 1921
1921 1922 class clonebundleentry(object):
1922 1923 """Represents an item in a clone bundles manifest.
1923 1924
1924 1925 This rich class is needed to support sorting since sorted() in Python 3
1925 1926 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1926 1927 won't work.
1927 1928 """
1928 1929
1929 1930 def __init__(self, value, prefers):
1930 1931 self.value = value
1931 1932 self.prefers = prefers
1932 1933
1933 1934 def _cmp(self, other):
1934 1935 for prefkey, prefvalue in self.prefers:
1935 1936 avalue = self.value.get(prefkey)
1936 1937 bvalue = other.value.get(prefkey)
1937 1938
1938 1939 # Special case for b missing attribute and a matches exactly.
1939 1940 if avalue is not None and bvalue is None and avalue == prefvalue:
1940 1941 return -1
1941 1942
1942 1943 # Special case for a missing attribute and b matches exactly.
1943 1944 if bvalue is not None and avalue is None and bvalue == prefvalue:
1944 1945 return 1
1945 1946
1946 1947 # We can't compare unless attribute present on both.
1947 1948 if avalue is None or bvalue is None:
1948 1949 continue
1949 1950
1950 1951 # Same values should fall back to next attribute.
1951 1952 if avalue == bvalue:
1952 1953 continue
1953 1954
1954 1955 # Exact matches come first.
1955 1956 if avalue == prefvalue:
1956 1957 return -1
1957 1958 if bvalue == prefvalue:
1958 1959 return 1
1959 1960
1960 1961 # Fall back to next attribute.
1961 1962 continue
1962 1963
1963 1964 # If we got here we couldn't sort by attributes and prefers. Fall
1964 1965 # back to index order.
1965 1966 return 0
1966 1967
1967 1968 def __lt__(self, other):
1968 1969 return self._cmp(other) < 0
1969 1970
1970 1971 def __gt__(self, other):
1971 1972 return self._cmp(other) > 0
1972 1973
1973 1974 def __eq__(self, other):
1974 1975 return self._cmp(other) == 0
1975 1976
1976 1977 def __le__(self, other):
1977 1978 return self._cmp(other) <= 0
1978 1979
1979 1980 def __ge__(self, other):
1980 1981 return self._cmp(other) >= 0
1981 1982
1982 1983 def __ne__(self, other):
1983 1984 return self._cmp(other) != 0
1984 1985
1985 1986 def sortclonebundleentries(ui, entries):
1986 1987 prefers = ui.configlist('ui', 'clonebundleprefers')
1987 1988 if not prefers:
1988 1989 return list(entries)
1989 1990
1990 1991 prefers = [p.split('=', 1) for p in prefers]
1991 1992
1992 1993 items = sorted(clonebundleentry(v, prefers) for v in entries)
1993 1994 return [i.value for i in items]
1994 1995
1995 1996 def trypullbundlefromurl(ui, repo, url):
1996 1997 """Attempt to apply a bundle from a URL."""
1997 1998 with repo.lock(), repo.transaction('bundleurl') as tr:
1998 1999 try:
1999 2000 fh = urlmod.open(ui, url)
2000 2001 cg = readbundle(ui, fh, 'stream')
2001 2002
2002 2003 if isinstance(cg, streamclone.streamcloneapplier):
2003 2004 cg.apply(repo)
2004 2005 else:
2005 2006 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2006 2007 return True
2007 2008 except urlerr.httperror as e:
2008 2009 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2009 2010 except urlerr.urlerror as e:
2010 2011 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2011 2012
2012 2013 return False
@@ -1,1270 +1,1270 b''
1 1 This test file test the various templates related to obsmarkers.
2 2
3 3 Global setup
4 4 ============
5 5
6 6 $ . $TESTDIR/testlib/obsmarker-common.sh
7 7 $ cat >> $HGRCPATH <<EOF
8 8 > [ui]
9 9 > interactive = true
10 10 > [phases]
11 11 > publish=False
12 12 > [experimental]
13 13 > evolution=all
14 14 > [alias]
15 15 > tlog = log -G -T '{node|short}\
16 16 > {if(predecessors, "\n Predecessors: {predecessors}")}\
17 17 > {if(predecessors, "\n semi-colon: {join(predecessors, "; ")}")}\
18 18 > {if(predecessors, "\n json: {predecessors|json}")}\
19 19 > {if(predecessors, "\n map: {join(predecessors % "{rev}:{node}", " ")}")}\
20 20 > {if(successorssets, "\n Successors: {successorssets}")}\
21 21 > {if(successorssets, "\n multi-line: {join(successorssets, "\n multi-line: ")}")}\
22 22 > {if(successorssets, "\n json: {successorssets|json}")}\n'
23 23 > EOF
24 24
25 25 Test templates on amended commit
26 26 ================================
27 27
28 28 Test setup
29 29 ----------
30 30
31 31 $ hg init $TESTTMP/templates-local-amend
32 32 $ cd $TESTTMP/templates-local-amend
33 33 $ mkcommit ROOT
34 34 $ mkcommit A0
35 35 $ echo 42 >> A0
36 36 $ hg commit --amend -m "A1"
37 37 $ hg commit --amend -m "A2"
38 38
39 39 $ hg log --hidden -G
40 40 @ changeset: 4:d004c8f274b9
41 41 | tag: tip
42 42 | parent: 0:ea207398892e
43 43 | user: test
44 44 | date: Thu Jan 01 00:00:00 1970 +0000
45 45 | summary: A2
46 46 |
47 47 | x changeset: 3:a468dc9b3633
48 48 |/ parent: 0:ea207398892e
49 49 | user: test
50 50 | date: Thu Jan 01 00:00:00 1970 +0000
51 51 | summary: A1
52 52 |
53 53 | x changeset: 2:f137d23bb3e1
54 54 | | user: test
55 55 | | date: Thu Jan 01 00:00:00 1970 +0000
56 56 | | summary: temporary amend commit for 471f378eab4c
57 57 | |
58 58 | x changeset: 1:471f378eab4c
59 59 |/ user: test
60 60 | date: Thu Jan 01 00:00:00 1970 +0000
61 61 | summary: A0
62 62 |
63 63 o changeset: 0:ea207398892e
64 64 user: test
65 65 date: Thu Jan 01 00:00:00 1970 +0000
66 66 summary: ROOT
67 67
68 68 Check templates
69 69 ---------------
70 70 $ hg up 'desc(A0)' --hidden
71 71 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
72 72
73 73 Predecessors template should show current revision as it is the working copy
74 74 $ hg tlog
75 75 o d004c8f274b9
76 76 | Predecessors: 1:471f378eab4c
77 77 | semi-colon: 1:471f378eab4c
78 78 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
79 79 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
80 80 | @ 471f378eab4c
81 81 |/ Successors: 4:d004c8f274b9
82 82 | multi-line: 4:d004c8f274b9
83 83 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
84 84 o ea207398892e
85 85
86 86 $ hg up 'desc(A1)' --hidden
87 87 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
88 88
89 89 Predecessors template should show current revision as it is the working copy
90 90 $ hg tlog
91 91 o d004c8f274b9
92 92 | Predecessors: 3:a468dc9b3633
93 93 | semi-colon: 3:a468dc9b3633
94 94 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
95 95 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
96 96 | @ a468dc9b3633
97 97 |/ Successors: 4:d004c8f274b9
98 98 | multi-line: 4:d004c8f274b9
99 99 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
100 100 o ea207398892e
101 101
102 102 Predecessors template should show all the predecessors as we force their display
103 103 with --hidden
104 104 $ hg tlog --hidden
105 105 o d004c8f274b9
106 106 | Predecessors: 3:a468dc9b3633
107 107 | semi-colon: 3:a468dc9b3633
108 108 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
109 109 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
110 110 | @ a468dc9b3633
111 111 |/ Predecessors: 1:471f378eab4c
112 112 | semi-colon: 1:471f378eab4c
113 113 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
114 114 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
115 115 | Successors: 4:d004c8f274b9
116 116 | multi-line: 4:d004c8f274b9
117 117 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
118 118 | x f137d23bb3e1
119 119 | |
120 120 | x 471f378eab4c
121 121 |/ Successors: 3:a468dc9b3633
122 122 | multi-line: 3:a468dc9b3633
123 123 | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
124 124 o ea207398892e
125 125
126 126
127 127 Predecessors template shouldn't show anything as all obsolete commit are not
128 128 visible.
129 129 $ hg up 'desc(A2)'
130 130 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
131 131 $ hg tlog
132 132 @ d004c8f274b9
133 133 |
134 134 o ea207398892e
135 135
136 136 $ hg tlog --hidden
137 137 @ d004c8f274b9
138 138 | Predecessors: 3:a468dc9b3633
139 139 | semi-colon: 3:a468dc9b3633
140 140 | json: ["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]
141 141 | map: 3:a468dc9b36338b14fdb7825f55ce3df4e71517ad
142 142 | x a468dc9b3633
143 143 |/ Predecessors: 1:471f378eab4c
144 144 | semi-colon: 1:471f378eab4c
145 145 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
146 146 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
147 147 | Successors: 4:d004c8f274b9
148 148 | multi-line: 4:d004c8f274b9
149 149 | json: [["d004c8f274b9ec480a47a93c10dac5eee63adb78"]]
150 150 | x f137d23bb3e1
151 151 | |
152 152 | x 471f378eab4c
153 153 |/ Successors: 3:a468dc9b3633
154 154 | multi-line: 3:a468dc9b3633
155 155 | json: [["a468dc9b36338b14fdb7825f55ce3df4e71517ad"]]
156 156 o ea207398892e
157 157
158 158
159 159 Test templates with splitted commit
160 160 ===================================
161 161
162 162 $ hg init $TESTTMP/templates-local-split
163 163 $ cd $TESTTMP/templates-local-split
164 164 $ mkcommit ROOT
165 165 $ echo 42 >> a
166 166 $ echo 43 >> b
167 167 $ hg commit -A -m "A0"
168 168 adding a
169 169 adding b
170 170 $ hg log --hidden -G
171 171 @ changeset: 1:471597cad322
172 172 | tag: tip
173 173 | user: test
174 174 | date: Thu Jan 01 00:00:00 1970 +0000
175 175 | summary: A0
176 176 |
177 177 o changeset: 0:ea207398892e
178 178 user: test
179 179 date: Thu Jan 01 00:00:00 1970 +0000
180 180 summary: ROOT
181 181
182 182 # Simulate split
183 183 $ hg up -r "desc(ROOT)"
184 184 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
185 185 $ echo 42 >> a
186 186 $ hg commit -A -m "A0"
187 187 adding a
188 188 created new head
189 189 $ echo 43 >> b
190 190 $ hg commit -A -m "A0"
191 191 adding b
192 192 $ hg debugobsolete `getid "1"` `getid "2"` `getid "3"`
193 193 obsoleted 1 changesets
194 194
195 195 $ hg log --hidden -G
196 196 @ changeset: 3:f257fde29c7a
197 197 | tag: tip
198 198 | user: test
199 199 | date: Thu Jan 01 00:00:00 1970 +0000
200 200 | summary: A0
201 201 |
202 202 o changeset: 2:337fec4d2edc
203 203 | parent: 0:ea207398892e
204 204 | user: test
205 205 | date: Thu Jan 01 00:00:00 1970 +0000
206 206 | summary: A0
207 207 |
208 208 | x changeset: 1:471597cad322
209 209 |/ user: test
210 210 | date: Thu Jan 01 00:00:00 1970 +0000
211 211 | summary: A0
212 212 |
213 213 o changeset: 0:ea207398892e
214 214 user: test
215 215 date: Thu Jan 01 00:00:00 1970 +0000
216 216 summary: ROOT
217 217
218 218 Check templates
219 219 ---------------
220 220
221 221 $ hg up 'obsolete()' --hidden
222 222 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
223 223
224 224 Predecessors template should show current revision as it is the working copy
225 225 $ hg tlog
226 226 o f257fde29c7a
227 227 | Predecessors: 1:471597cad322
228 228 | semi-colon: 1:471597cad322
229 229 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
230 230 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
231 231 o 337fec4d2edc
232 232 | Predecessors: 1:471597cad322
233 233 | semi-colon: 1:471597cad322
234 234 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
235 235 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
236 236 | @ 471597cad322
237 237 |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
238 238 | multi-line: 2:337fec4d2edc 3:f257fde29c7a
239 239 | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
240 240 o ea207398892e
241 241
242 242 $ hg up f257fde29c7a
243 243 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
244 244
245 245 Predecessors template should not show a predecessor as it's not displayed in
246 246 the log
247 247 $ hg tlog
248 248 @ f257fde29c7a
249 249 |
250 250 o 337fec4d2edc
251 251 |
252 252 o ea207398892e
253 253
254 254 Predecessors template should show both predecessors as we force their display
255 255 with --hidden
256 256 $ hg tlog --hidden
257 257 @ f257fde29c7a
258 258 | Predecessors: 1:471597cad322
259 259 | semi-colon: 1:471597cad322
260 260 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
261 261 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
262 262 o 337fec4d2edc
263 263 | Predecessors: 1:471597cad322
264 264 | semi-colon: 1:471597cad322
265 265 | json: ["471597cad322d1f659bb169751be9133dad92ef3"]
266 266 | map: 1:471597cad322d1f659bb169751be9133dad92ef3
267 267 | x 471597cad322
268 268 |/ Successors: 2:337fec4d2edc 3:f257fde29c7a
269 269 | multi-line: 2:337fec4d2edc 3:f257fde29c7a
270 270 | json: [["337fec4d2edcf0e7a467e35f818234bc620068b5", "f257fde29c7a847c9b607f6e958656d0df0fb15c"]]
271 271 o ea207398892e
272 272
273 273 Test templates with folded commit
274 274 =================================
275 275
276 276 Test setup
277 277 ----------
278 278
279 279 $ hg init $TESTTMP/templates-local-fold
280 280 $ cd $TESTTMP/templates-local-fold
281 281 $ mkcommit ROOT
282 282 $ mkcommit A0
283 283 $ mkcommit B0
284 284 $ hg log --hidden -G
285 285 @ changeset: 2:0dec01379d3b
286 286 | tag: tip
287 287 | user: test
288 288 | date: Thu Jan 01 00:00:00 1970 +0000
289 289 | summary: B0
290 290 |
291 291 o changeset: 1:471f378eab4c
292 292 | user: test
293 293 | date: Thu Jan 01 00:00:00 1970 +0000
294 294 | summary: A0
295 295 |
296 296 o changeset: 0:ea207398892e
297 297 user: test
298 298 date: Thu Jan 01 00:00:00 1970 +0000
299 299 summary: ROOT
300 300
301 301 Simulate a fold
302 302 $ hg up -r "desc(ROOT)"
303 303 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
304 304 $ echo "A0" > A0
305 305 $ echo "B0" > B0
306 306 $ hg commit -A -m "C0"
307 307 adding A0
308 308 adding B0
309 309 created new head
310 310 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
311 311 obsoleted 1 changesets
312 312 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
313 313 obsoleted 1 changesets
314 314
315 315 $ hg log --hidden -G
316 316 @ changeset: 3:eb5a0daa2192
317 317 | tag: tip
318 318 | parent: 0:ea207398892e
319 319 | user: test
320 320 | date: Thu Jan 01 00:00:00 1970 +0000
321 321 | summary: C0
322 322 |
323 323 | x changeset: 2:0dec01379d3b
324 324 | | user: test
325 325 | | date: Thu Jan 01 00:00:00 1970 +0000
326 326 | | summary: B0
327 327 | |
328 328 | x changeset: 1:471f378eab4c
329 329 |/ user: test
330 330 | date: Thu Jan 01 00:00:00 1970 +0000
331 331 | summary: A0
332 332 |
333 333 o changeset: 0:ea207398892e
334 334 user: test
335 335 date: Thu Jan 01 00:00:00 1970 +0000
336 336 summary: ROOT
337 337
338 338 Check templates
339 339 ---------------
340 340
341 341 $ hg up 'desc(A0)' --hidden
342 342 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
343 343
344 344 Predecessors template should show current revision as it is the working copy
345 345 $ hg tlog
346 346 o eb5a0daa2192
347 347 | Predecessors: 1:471f378eab4c
348 348 | semi-colon: 1:471f378eab4c
349 349 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
350 350 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
351 351 | @ 471f378eab4c
352 352 |/ Successors: 3:eb5a0daa2192
353 353 | multi-line: 3:eb5a0daa2192
354 354 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
355 355 o ea207398892e
356 356
357 357 $ hg up 'desc(B0)' --hidden
358 358 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
359 359
360 360 Predecessors template should show both predecessors as they should be both
361 361 displayed
362 362 $ hg tlog
363 363 o eb5a0daa2192
364 364 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
365 365 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
366 366 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
367 367 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
368 368 | @ 0dec01379d3b
369 369 | | Successors: 3:eb5a0daa2192
370 370 | | multi-line: 3:eb5a0daa2192
371 371 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
372 372 | x 471f378eab4c
373 373 |/ Successors: 3:eb5a0daa2192
374 374 | multi-line: 3:eb5a0daa2192
375 375 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
376 376 o ea207398892e
377 377
378 378 $ hg up 'desc(C0)'
379 379 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
380 380
381 381 Predecessors template should not show predecessors as they are not displayed in
382 382 the log
383 383 $ hg tlog
384 384 @ eb5a0daa2192
385 385 |
386 386 o ea207398892e
387 387
388 388 Predecessors template should show both predecessors as we force their display
389 389 with --hidden
390 390 $ hg tlog --hidden
391 391 @ eb5a0daa2192
392 392 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
393 393 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
394 394 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
395 395 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
396 396 | x 0dec01379d3b
397 397 | | Successors: 3:eb5a0daa2192
398 398 | | multi-line: 3:eb5a0daa2192
399 399 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
400 400 | x 471f378eab4c
401 401 |/ Successors: 3:eb5a0daa2192
402 402 | multi-line: 3:eb5a0daa2192
403 403 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
404 404 o ea207398892e
405 405
406 406
407 407 Test templates with divergence
408 408 ==============================
409 409
410 410 Test setup
411 411 ----------
412 412
413 413 $ hg init $TESTTMP/templates-local-divergence
414 414 $ cd $TESTTMP/templates-local-divergence
415 415 $ mkcommit ROOT
416 416 $ mkcommit A0
417 417 $ hg commit --amend -m "A1"
418 418 $ hg log --hidden -G
419 419 @ changeset: 2:fdf9bde5129a
420 420 | tag: tip
421 421 | parent: 0:ea207398892e
422 422 | user: test
423 423 | date: Thu Jan 01 00:00:00 1970 +0000
424 424 | summary: A1
425 425 |
426 426 | x changeset: 1:471f378eab4c
427 427 |/ user: test
428 428 | date: Thu Jan 01 00:00:00 1970 +0000
429 429 | summary: A0
430 430 |
431 431 o changeset: 0:ea207398892e
432 432 user: test
433 433 date: Thu Jan 01 00:00:00 1970 +0000
434 434 summary: ROOT
435 435
436 436 $ hg update --hidden 'desc(A0)'
437 437 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
438 438 $ hg commit --amend -m "A2"
439 439 $ hg log --hidden -G
440 440 @ changeset: 3:65b757b745b9
441 441 | tag: tip
442 442 | parent: 0:ea207398892e
443 443 | user: test
444 444 | date: Thu Jan 01 00:00:00 1970 +0000
445 | instability: divergent
445 | instability: content-divergent
446 446 | summary: A2
447 447 |
448 448 | o changeset: 2:fdf9bde5129a
449 449 |/ parent: 0:ea207398892e
450 450 | user: test
451 451 | date: Thu Jan 01 00:00:00 1970 +0000
452 | instability: divergent
452 | instability: content-divergent
453 453 | summary: A1
454 454 |
455 455 | x changeset: 1:471f378eab4c
456 456 |/ user: test
457 457 | date: Thu Jan 01 00:00:00 1970 +0000
458 458 | summary: A0
459 459 |
460 460 o changeset: 0:ea207398892e
461 461 user: test
462 462 date: Thu Jan 01 00:00:00 1970 +0000
463 463 summary: ROOT
464 464
465 465 $ hg commit --amend -m 'A3'
466 466 $ hg log --hidden -G
467 467 @ changeset: 4:019fadeab383
468 468 | tag: tip
469 469 | parent: 0:ea207398892e
470 470 | user: test
471 471 | date: Thu Jan 01 00:00:00 1970 +0000
472 | instability: divergent
472 | instability: content-divergent
473 473 | summary: A3
474 474 |
475 475 | x changeset: 3:65b757b745b9
476 476 |/ parent: 0:ea207398892e
477 477 | user: test
478 478 | date: Thu Jan 01 00:00:00 1970 +0000
479 479 | summary: A2
480 480 |
481 481 | o changeset: 2:fdf9bde5129a
482 482 |/ parent: 0:ea207398892e
483 483 | user: test
484 484 | date: Thu Jan 01 00:00:00 1970 +0000
485 | instability: divergent
485 | instability: content-divergent
486 486 | summary: A1
487 487 |
488 488 | x changeset: 1:471f378eab4c
489 489 |/ user: test
490 490 | date: Thu Jan 01 00:00:00 1970 +0000
491 491 | summary: A0
492 492 |
493 493 o changeset: 0:ea207398892e
494 494 user: test
495 495 date: Thu Jan 01 00:00:00 1970 +0000
496 496 summary: ROOT
497 497
498 498
499 499 Check templates
500 500 ---------------
501 501
502 502 $ hg up 'desc(A0)' --hidden
503 503 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
504 504
505 505 Predecessors template should show current revision as it is the working copy
506 506 $ hg tlog
507 507 o 019fadeab383
508 508 | Predecessors: 1:471f378eab4c
509 509 | semi-colon: 1:471f378eab4c
510 510 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
511 511 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
512 512 | o fdf9bde5129a
513 513 |/ Predecessors: 1:471f378eab4c
514 514 | semi-colon: 1:471f378eab4c
515 515 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
516 516 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
517 517 | @ 471f378eab4c
518 518 |/ Successors: 2:fdf9bde5129a; 4:019fadeab383
519 519 | multi-line: 2:fdf9bde5129a
520 520 | multi-line: 4:019fadeab383
521 521 | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
522 522 o ea207398892e
523 523
524 524 $ hg up 'desc(A1)'
525 525 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
526 526
527 527 Predecessors template should not show predecessors as they are not displayed in
528 528 the log
529 529 $ hg tlog
530 530 o 019fadeab383
531 531 |
532 532 | @ fdf9bde5129a
533 533 |/
534 534 o ea207398892e
535 535
536 536 Predecessors template should the predecessors as we force their display with
537 537 --hidden
538 538 $ hg tlog --hidden
539 539 o 019fadeab383
540 540 | Predecessors: 3:65b757b745b9
541 541 | semi-colon: 3:65b757b745b9
542 542 | json: ["65b757b745b935093c87a2bccd877521cccffcbd"]
543 543 | map: 3:65b757b745b935093c87a2bccd877521cccffcbd
544 544 | x 65b757b745b9
545 545 |/ Predecessors: 1:471f378eab4c
546 546 | semi-colon: 1:471f378eab4c
547 547 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
548 548 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
549 549 | Successors: 4:019fadeab383
550 550 | multi-line: 4:019fadeab383
551 551 | json: [["019fadeab383f6699fa83ad7bdb4d82ed2c0e5ab"]]
552 552 | @ fdf9bde5129a
553 553 |/ Predecessors: 1:471f378eab4c
554 554 | semi-colon: 1:471f378eab4c
555 555 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
556 556 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
557 557 | x 471f378eab4c
558 558 |/ Successors: 2:fdf9bde5129a; 3:65b757b745b9
559 559 | multi-line: 2:fdf9bde5129a
560 560 | multi-line: 3:65b757b745b9
561 561 | json: [["fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e"], ["65b757b745b935093c87a2bccd877521cccffcbd"]]
562 562 o ea207398892e
563 563
564 564
565 565 Test templates with amended + folded commit
566 566 ===========================================
567 567
568 568 Test setup
569 569 ----------
570 570
571 571 $ hg init $TESTTMP/templates-local-amend-fold
572 572 $ cd $TESTTMP/templates-local-amend-fold
573 573 $ mkcommit ROOT
574 574 $ mkcommit A0
575 575 $ mkcommit B0
576 576 $ hg commit --amend -m "B1"
577 577 $ hg log --hidden -G
578 578 @ changeset: 3:b7ea6d14e664
579 579 | tag: tip
580 580 | parent: 1:471f378eab4c
581 581 | user: test
582 582 | date: Thu Jan 01 00:00:00 1970 +0000
583 583 | summary: B1
584 584 |
585 585 | x changeset: 2:0dec01379d3b
586 586 |/ user: test
587 587 | date: Thu Jan 01 00:00:00 1970 +0000
588 588 | summary: B0
589 589 |
590 590 o changeset: 1:471f378eab4c
591 591 | user: test
592 592 | date: Thu Jan 01 00:00:00 1970 +0000
593 593 | summary: A0
594 594 |
595 595 o changeset: 0:ea207398892e
596 596 user: test
597 597 date: Thu Jan 01 00:00:00 1970 +0000
598 598 summary: ROOT
599 599
600 600 # Simulate a fold
601 601 $ hg up -r "desc(ROOT)"
602 602 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
603 603 $ echo "A0" > A0
604 604 $ echo "B0" > B0
605 605 $ hg commit -A -m "C0"
606 606 adding A0
607 607 adding B0
608 608 created new head
609 609 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(C0)"`
610 610 obsoleted 1 changesets
611 611 $ hg debugobsolete `getid "desc(B1)"` `getid "desc(C0)"`
612 612 obsoleted 1 changesets
613 613
614 614 $ hg log --hidden -G
615 615 @ changeset: 4:eb5a0daa2192
616 616 | tag: tip
617 617 | parent: 0:ea207398892e
618 618 | user: test
619 619 | date: Thu Jan 01 00:00:00 1970 +0000
620 620 | summary: C0
621 621 |
622 622 | x changeset: 3:b7ea6d14e664
623 623 | | parent: 1:471f378eab4c
624 624 | | user: test
625 625 | | date: Thu Jan 01 00:00:00 1970 +0000
626 626 | | summary: B1
627 627 | |
628 628 | | x changeset: 2:0dec01379d3b
629 629 | |/ user: test
630 630 | | date: Thu Jan 01 00:00:00 1970 +0000
631 631 | | summary: B0
632 632 | |
633 633 | x changeset: 1:471f378eab4c
634 634 |/ user: test
635 635 | date: Thu Jan 01 00:00:00 1970 +0000
636 636 | summary: A0
637 637 |
638 638 o changeset: 0:ea207398892e
639 639 user: test
640 640 date: Thu Jan 01 00:00:00 1970 +0000
641 641 summary: ROOT
642 642
643 643 Check templates
644 644 ---------------
645 645
646 646 $ hg up 'desc(A0)' --hidden
647 647 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
648 648
649 649 Predecessors template should show current revision as it is the working copy
650 650 $ hg tlog
651 651 o eb5a0daa2192
652 652 | Predecessors: 1:471f378eab4c
653 653 | semi-colon: 1:471f378eab4c
654 654 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
655 655 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
656 656 | @ 471f378eab4c
657 657 |/ Successors: 4:eb5a0daa2192
658 658 | multi-line: 4:eb5a0daa2192
659 659 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
660 660 o ea207398892e
661 661
662 662 $ hg up 'desc(B0)' --hidden
663 663 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
664 664
665 665 Predecessors template should both predecessors as they are visible
666 666 $ hg tlog
667 667 o eb5a0daa2192
668 668 | Predecessors: 2:0dec01379d3b 1:471f378eab4c
669 669 | semi-colon: 2:0dec01379d3b; 1:471f378eab4c
670 670 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5", "471f378eab4c5e25f6c77f785b27c936efb22874"]
671 671 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5 1:471f378eab4c5e25f6c77f785b27c936efb22874
672 672 | @ 0dec01379d3b
673 673 | | Successors: 4:eb5a0daa2192
674 674 | | multi-line: 4:eb5a0daa2192
675 675 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
676 676 | x 471f378eab4c
677 677 |/ Successors: 4:eb5a0daa2192
678 678 | multi-line: 4:eb5a0daa2192
679 679 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
680 680 o ea207398892e
681 681
682 682 $ hg up 'desc(B1)' --hidden
683 683 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
684 684
685 685 Predecessors template should both predecessors as they are visible
686 686 $ hg tlog
687 687 o eb5a0daa2192
688 688 | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
689 689 | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
690 690 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
691 691 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
692 692 | @ b7ea6d14e664
693 693 | | Successors: 4:eb5a0daa2192
694 694 | | multi-line: 4:eb5a0daa2192
695 695 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
696 696 | x 471f378eab4c
697 697 |/ Successors: 4:eb5a0daa2192
698 698 | multi-line: 4:eb5a0daa2192
699 699 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
700 700 o ea207398892e
701 701
702 702 $ hg up 'desc(C0)'
703 703 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
704 704
705 705 Predecessors template should show no predecessors as they are both non visible
706 706 $ hg tlog
707 707 @ eb5a0daa2192
708 708 |
709 709 o ea207398892e
710 710
711 711 Predecessors template should show all predecessors as we force their display
712 712 with --hidden
713 713 $ hg tlog --hidden
714 714 @ eb5a0daa2192
715 715 | Predecessors: 1:471f378eab4c 3:b7ea6d14e664
716 716 | semi-colon: 1:471f378eab4c; 3:b7ea6d14e664
717 717 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874", "b7ea6d14e664bdc8922221f7992631b50da3fb07"]
718 718 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874 3:b7ea6d14e664bdc8922221f7992631b50da3fb07
719 719 | x b7ea6d14e664
720 720 | | Predecessors: 2:0dec01379d3b
721 721 | | semi-colon: 2:0dec01379d3b
722 722 | | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
723 723 | | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
724 724 | | Successors: 4:eb5a0daa2192
725 725 | | multi-line: 4:eb5a0daa2192
726 726 | | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
727 727 | | x 0dec01379d3b
728 728 | |/ Successors: 3:b7ea6d14e664
729 729 | | multi-line: 3:b7ea6d14e664
730 730 | | json: [["b7ea6d14e664bdc8922221f7992631b50da3fb07"]]
731 731 | x 471f378eab4c
732 732 |/ Successors: 4:eb5a0daa2192
733 733 | multi-line: 4:eb5a0daa2192
734 734 | json: [["eb5a0daa21923bbf8caeb2c42085b9e463861fd0"]]
735 735 o ea207398892e
736 736
737 737
738 738 Test template with pushed and pulled obs markers
739 739 ================================================
740 740
741 741 Test setup
742 742 ----------
743 743
744 744 $ hg init $TESTTMP/templates-local-remote-markers-1
745 745 $ cd $TESTTMP/templates-local-remote-markers-1
746 746 $ mkcommit ROOT
747 747 $ mkcommit A0
748 748 $ hg clone $TESTTMP/templates-local-remote-markers-1 $TESTTMP/templates-local-remote-markers-2
749 749 updating to branch default
750 750 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
751 751 $ cd $TESTTMP/templates-local-remote-markers-2
752 752 $ hg log --hidden -G
753 753 @ changeset: 1:471f378eab4c
754 754 | tag: tip
755 755 | user: test
756 756 | date: Thu Jan 01 00:00:00 1970 +0000
757 757 | summary: A0
758 758 |
759 759 o changeset: 0:ea207398892e
760 760 user: test
761 761 date: Thu Jan 01 00:00:00 1970 +0000
762 762 summary: ROOT
763 763
764 764 $ cd $TESTTMP/templates-local-remote-markers-1
765 765 $ hg commit --amend -m "A1"
766 766 $ hg commit --amend -m "A2"
767 767 $ hg log --hidden -G
768 768 @ changeset: 3:7a230b46bf61
769 769 | tag: tip
770 770 | parent: 0:ea207398892e
771 771 | user: test
772 772 | date: Thu Jan 01 00:00:00 1970 +0000
773 773 | summary: A2
774 774 |
775 775 | x changeset: 2:fdf9bde5129a
776 776 |/ parent: 0:ea207398892e
777 777 | user: test
778 778 | date: Thu Jan 01 00:00:00 1970 +0000
779 779 | summary: A1
780 780 |
781 781 | x changeset: 1:471f378eab4c
782 782 |/ user: test
783 783 | date: Thu Jan 01 00:00:00 1970 +0000
784 784 | summary: A0
785 785 |
786 786 o changeset: 0:ea207398892e
787 787 user: test
788 788 date: Thu Jan 01 00:00:00 1970 +0000
789 789 summary: ROOT
790 790
791 791 $ cd $TESTTMP/templates-local-remote-markers-2
792 792 $ hg pull
793 793 pulling from $TESTTMP/templates-local-remote-markers-1 (glob)
794 794 searching for changes
795 795 adding changesets
796 796 adding manifests
797 797 adding file changes
798 798 added 1 changesets with 0 changes to 1 files (+1 heads)
799 799 2 new obsolescence markers
800 800 obsoleted 1 changesets
801 801 (run 'hg heads' to see heads, 'hg merge' to merge)
802 802 $ hg log --hidden -G
803 803 o changeset: 2:7a230b46bf61
804 804 | tag: tip
805 805 | parent: 0:ea207398892e
806 806 | user: test
807 807 | date: Thu Jan 01 00:00:00 1970 +0000
808 808 | summary: A2
809 809 |
810 810 | @ changeset: 1:471f378eab4c
811 811 |/ user: test
812 812 | date: Thu Jan 01 00:00:00 1970 +0000
813 813 | summary: A0
814 814 |
815 815 o changeset: 0:ea207398892e
816 816 user: test
817 817 date: Thu Jan 01 00:00:00 1970 +0000
818 818 summary: ROOT
819 819
820 820
821 821 $ hg debugobsolete
822 822 471f378eab4c5e25f6c77f785b27c936efb22874 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
823 823 fdf9bde5129a28d4548fadd3f62b265cdd3b7a2e 7a230b46bf61e50b30308c6cfd7bd1269ef54702 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
824 824
825 825 Check templates
826 826 ---------------
827 827
828 828 Predecessors template should show current revision as it is the working copy
829 829 $ hg tlog
830 830 o 7a230b46bf61
831 831 | Predecessors: 1:471f378eab4c
832 832 | semi-colon: 1:471f378eab4c
833 833 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
834 834 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
835 835 | @ 471f378eab4c
836 836 |/ Successors: 2:7a230b46bf61
837 837 | multi-line: 2:7a230b46bf61
838 838 | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
839 839 o ea207398892e
840 840
841 841 $ hg up 'desc(A2)'
842 842 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
843 843
844 844 Predecessors template should show no predecessors as they are non visible
845 845 $ hg tlog
846 846 @ 7a230b46bf61
847 847 |
848 848 o ea207398892e
849 849
850 850 Predecessors template should show all predecessors as we force their display
851 851 with --hidden
852 852 $ hg tlog --hidden
853 853 @ 7a230b46bf61
854 854 | Predecessors: 1:471f378eab4c
855 855 | semi-colon: 1:471f378eab4c
856 856 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
857 857 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
858 858 | x 471f378eab4c
859 859 |/ Successors: 2:7a230b46bf61
860 860 | multi-line: 2:7a230b46bf61
861 861 | json: [["7a230b46bf61e50b30308c6cfd7bd1269ef54702"]]
862 862 o ea207398892e
863 863
864 864
865 865 Test template with obsmarkers cycle
866 866 ===================================
867 867
868 868 Test setup
869 869 ----------
870 870
871 871 $ hg init $TESTTMP/templates-local-cycle
872 872 $ cd $TESTTMP/templates-local-cycle
873 873 $ mkcommit ROOT
874 874 $ mkcommit A0
875 875 $ mkcommit B0
876 876 $ hg up -r 0
877 877 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
878 878 $ mkcommit C0
879 879 created new head
880 880
881 881 Create the cycle
882 882
883 883 $ hg debugobsolete `getid "desc(A0)"` `getid "desc(B0)"`
884 884 obsoleted 1 changesets
885 885 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(C0)"`
886 886 obsoleted 1 changesets
887 887 $ hg debugobsolete `getid "desc(B0)"` `getid "desc(A0)"`
888 888
889 889 Check templates
890 890 ---------------
891 891
892 892 $ hg tlog
893 893 @ f897c6137566
894 894 |
895 895 o ea207398892e
896 896
897 897
898 898 $ hg up -r "desc(B0)" --hidden
899 899 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
900 900 $ hg tlog
901 901 o f897c6137566
902 902 | Predecessors: 2:0dec01379d3b
903 903 | semi-colon: 2:0dec01379d3b
904 904 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
905 905 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
906 906 | @ 0dec01379d3b
907 907 | | Predecessors: 1:471f378eab4c
908 908 | | semi-colon: 1:471f378eab4c
909 909 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
910 910 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
911 911 | | Successors: 3:f897c6137566; 1:471f378eab4c
912 912 | | multi-line: 3:f897c6137566
913 913 | | multi-line: 1:471f378eab4c
914 914 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
915 915 | x 471f378eab4c
916 916 |/ Predecessors: 2:0dec01379d3b
917 917 | semi-colon: 2:0dec01379d3b
918 918 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
919 919 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
920 920 | Successors: 2:0dec01379d3b
921 921 | multi-line: 2:0dec01379d3b
922 922 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
923 923 o ea207398892e
924 924
925 925
926 926 $ hg up -r "desc(A0)" --hidden
927 927 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
928 928 $ hg tlog
929 929 o f897c6137566
930 930 | Predecessors: 1:471f378eab4c
931 931 | semi-colon: 1:471f378eab4c
932 932 | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
933 933 | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
934 934 | @ 471f378eab4c
935 935 |/
936 936 o ea207398892e
937 937
938 938
939 939 $ hg up -r "desc(ROOT)" --hidden
940 940 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
941 941 $ hg tlog
942 942 o f897c6137566
943 943 |
944 944 @ ea207398892e
945 945
946 946
947 947 $ hg tlog --hidden
948 948 o f897c6137566
949 949 | Predecessors: 2:0dec01379d3b
950 950 | semi-colon: 2:0dec01379d3b
951 951 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
952 952 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
953 953 | x 0dec01379d3b
954 954 | | Predecessors: 1:471f378eab4c
955 955 | | semi-colon: 1:471f378eab4c
956 956 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
957 957 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
958 958 | | Successors: 3:f897c6137566; 1:471f378eab4c
959 959 | | multi-line: 3:f897c6137566
960 960 | | multi-line: 1:471f378eab4c
961 961 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
962 962 | x 471f378eab4c
963 963 |/ Predecessors: 2:0dec01379d3b
964 964 | semi-colon: 2:0dec01379d3b
965 965 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
966 966 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
967 967 | Successors: 2:0dec01379d3b
968 968 | multi-line: 2:0dec01379d3b
969 969 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
970 970 @ ea207398892e
971 971
972 972 Test template with split + divergence with cycles
973 973 =================================================
974 974
975 975 $ hg log -G
976 976 o changeset: 3:f897c6137566
977 977 | tag: tip
978 978 | parent: 0:ea207398892e
979 979 | user: test
980 980 | date: Thu Jan 01 00:00:00 1970 +0000
981 981 | summary: C0
982 982 |
983 983 @ changeset: 0:ea207398892e
984 984 user: test
985 985 date: Thu Jan 01 00:00:00 1970 +0000
986 986 summary: ROOT
987 987
988 988 $ hg up
989 989 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
990 990
991 991 Create a commit with three files
992 992 $ touch A B C
993 993 $ hg commit -A -m "Add A,B,C" A B C
994 994
995 995 Split it
996 996 $ hg up 3
997 997 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
998 998 $ touch A
999 999 $ hg commit -A -m "Add A,B,C" A
1000 1000 created new head
1001 1001
1002 1002 $ touch B
1003 1003 $ hg commit -A -m "Add A,B,C" B
1004 1004
1005 1005 $ touch C
1006 1006 $ hg commit -A -m "Add A,B,C" C
1007 1007
1008 1008 $ hg log -G
1009 1009 @ changeset: 7:ba2ed02b0c9a
1010 1010 | tag: tip
1011 1011 | user: test
1012 1012 | date: Thu Jan 01 00:00:00 1970 +0000
1013 1013 | summary: Add A,B,C
1014 1014 |
1015 1015 o changeset: 6:4a004186e638
1016 1016 | user: test
1017 1017 | date: Thu Jan 01 00:00:00 1970 +0000
1018 1018 | summary: Add A,B,C
1019 1019 |
1020 1020 o changeset: 5:dd800401bd8c
1021 1021 | parent: 3:f897c6137566
1022 1022 | user: test
1023 1023 | date: Thu Jan 01 00:00:00 1970 +0000
1024 1024 | summary: Add A,B,C
1025 1025 |
1026 1026 | o changeset: 4:9bd10a0775e4
1027 1027 |/ user: test
1028 1028 | date: Thu Jan 01 00:00:00 1970 +0000
1029 1029 | summary: Add A,B,C
1030 1030 |
1031 1031 o changeset: 3:f897c6137566
1032 1032 | parent: 0:ea207398892e
1033 1033 | user: test
1034 1034 | date: Thu Jan 01 00:00:00 1970 +0000
1035 1035 | summary: C0
1036 1036 |
1037 1037 o changeset: 0:ea207398892e
1038 1038 user: test
1039 1039 date: Thu Jan 01 00:00:00 1970 +0000
1040 1040 summary: ROOT
1041 1041
1042 1042 $ hg debugobsolete `getid "4"` `getid "5"` `getid "6"` `getid "7"`
1043 1043 obsoleted 1 changesets
1044 1044 $ hg log -G
1045 1045 @ changeset: 7:ba2ed02b0c9a
1046 1046 | tag: tip
1047 1047 | user: test
1048 1048 | date: Thu Jan 01 00:00:00 1970 +0000
1049 1049 | summary: Add A,B,C
1050 1050 |
1051 1051 o changeset: 6:4a004186e638
1052 1052 | user: test
1053 1053 | date: Thu Jan 01 00:00:00 1970 +0000
1054 1054 | summary: Add A,B,C
1055 1055 |
1056 1056 o changeset: 5:dd800401bd8c
1057 1057 | parent: 3:f897c6137566
1058 1058 | user: test
1059 1059 | date: Thu Jan 01 00:00:00 1970 +0000
1060 1060 | summary: Add A,B,C
1061 1061 |
1062 1062 o changeset: 3:f897c6137566
1063 1063 | parent: 0:ea207398892e
1064 1064 | user: test
1065 1065 | date: Thu Jan 01 00:00:00 1970 +0000
1066 1066 | summary: C0
1067 1067 |
1068 1068 o changeset: 0:ea207398892e
1069 1069 user: test
1070 1070 date: Thu Jan 01 00:00:00 1970 +0000
1071 1071 summary: ROOT
1072 1072
1073 1073 Diverge one of the splitted commit
1074 1074
1075 1075 $ hg up 6
1076 1076 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1077 1077 $ hg commit --amend -m "Add only B"
1078 1078
1079 1079 $ hg up 6 --hidden
1080 1080 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1081 1081 $ hg commit --amend -m "Add B only"
1082 1082
1083 1083 $ hg log -G
1084 1084 @ changeset: 9:0b997eb7ceee
1085 1085 | tag: tip
1086 1086 | parent: 5:dd800401bd8c
1087 1087 | user: test
1088 1088 | date: Thu Jan 01 00:00:00 1970 +0000
1089 | instability: divergent
1089 | instability: content-divergent
1090 1090 | summary: Add B only
1091 1091 |
1092 1092 | o changeset: 8:b18bc8331526
1093 1093 |/ parent: 5:dd800401bd8c
1094 1094 | user: test
1095 1095 | date: Thu Jan 01 00:00:00 1970 +0000
1096 | instability: divergent
1096 | instability: content-divergent
1097 1097 | summary: Add only B
1098 1098 |
1099 1099 | o changeset: 7:ba2ed02b0c9a
1100 1100 | | user: test
1101 1101 | | date: Thu Jan 01 00:00:00 1970 +0000
1102 | | instability: orphan, divergent
1102 | | instability: orphan, content-divergent
1103 1103 | | summary: Add A,B,C
1104 1104 | |
1105 1105 | x changeset: 6:4a004186e638
1106 1106 |/ user: test
1107 1107 | date: Thu Jan 01 00:00:00 1970 +0000
1108 1108 | summary: Add A,B,C
1109 1109 |
1110 1110 o changeset: 5:dd800401bd8c
1111 1111 | parent: 3:f897c6137566
1112 1112 | user: test
1113 1113 | date: Thu Jan 01 00:00:00 1970 +0000
1114 | instability: divergent
1114 | instability: content-divergent
1115 1115 | summary: Add A,B,C
1116 1116 |
1117 1117 o changeset: 3:f897c6137566
1118 1118 | parent: 0:ea207398892e
1119 1119 | user: test
1120 1120 | date: Thu Jan 01 00:00:00 1970 +0000
1121 1121 | summary: C0
1122 1122 |
1123 1123 o changeset: 0:ea207398892e
1124 1124 user: test
1125 1125 date: Thu Jan 01 00:00:00 1970 +0000
1126 1126 summary: ROOT
1127 1127
1128 1128
1129 1129 Check templates
1130 1130 ---------------
1131 1131
1132 1132 $ hg tlog
1133 1133 @ 0b997eb7ceee
1134 1134 | Predecessors: 6:4a004186e638
1135 1135 | semi-colon: 6:4a004186e638
1136 1136 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1137 1137 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1138 1138 | o b18bc8331526
1139 1139 |/ Predecessors: 6:4a004186e638
1140 1140 | semi-colon: 6:4a004186e638
1141 1141 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1142 1142 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1143 1143 | o ba2ed02b0c9a
1144 1144 | |
1145 1145 | x 4a004186e638
1146 1146 |/ Successors: 8:b18bc8331526; 9:0b997eb7ceee
1147 1147 | multi-line: 8:b18bc8331526
1148 1148 | multi-line: 9:0b997eb7ceee
1149 1149 | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
1150 1150 o dd800401bd8c
1151 1151 |
1152 1152 o f897c6137566
1153 1153 |
1154 1154 o ea207398892e
1155 1155
1156 1156 $ hg tlog --hidden
1157 1157 @ 0b997eb7ceee
1158 1158 | Predecessors: 6:4a004186e638
1159 1159 | semi-colon: 6:4a004186e638
1160 1160 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1161 1161 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1162 1162 | o b18bc8331526
1163 1163 |/ Predecessors: 6:4a004186e638
1164 1164 | semi-colon: 6:4a004186e638
1165 1165 | json: ["4a004186e63889f20cb16434fcbd72220bd1eace"]
1166 1166 | map: 6:4a004186e63889f20cb16434fcbd72220bd1eace
1167 1167 | o ba2ed02b0c9a
1168 1168 | | Predecessors: 4:9bd10a0775e4
1169 1169 | | semi-colon: 4:9bd10a0775e4
1170 1170 | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1171 1171 | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1172 1172 | x 4a004186e638
1173 1173 |/ Predecessors: 4:9bd10a0775e4
1174 1174 | semi-colon: 4:9bd10a0775e4
1175 1175 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1176 1176 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1177 1177 | Successors: 8:b18bc8331526; 9:0b997eb7ceee
1178 1178 | multi-line: 8:b18bc8331526
1179 1179 | multi-line: 9:0b997eb7ceee
1180 1180 | json: [["b18bc8331526a22cbb1801022bd1555bf291c48b"], ["0b997eb7ceeee06200a02f8aab185979092d514e"]]
1181 1181 o dd800401bd8c
1182 1182 | Predecessors: 4:9bd10a0775e4
1183 1183 | semi-colon: 4:9bd10a0775e4
1184 1184 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1185 1185 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1186 1186 | x 9bd10a0775e4
1187 1187 |/ Successors: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
1188 1188 | multi-line: 5:dd800401bd8c 6:4a004186e638 7:ba2ed02b0c9a
1189 1189 | json: [["dd800401bd8c79d815329277739e433e883f784e", "4a004186e63889f20cb16434fcbd72220bd1eace", "ba2ed02b0c9a56b9fdbc4e79c7e57866984d8a1f"]]
1190 1190 o f897c6137566
1191 1191 | Predecessors: 2:0dec01379d3b
1192 1192 | semi-colon: 2:0dec01379d3b
1193 1193 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
1194 1194 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
1195 1195 | x 0dec01379d3b
1196 1196 | | Predecessors: 1:471f378eab4c
1197 1197 | | semi-colon: 1:471f378eab4c
1198 1198 | | json: ["471f378eab4c5e25f6c77f785b27c936efb22874"]
1199 1199 | | map: 1:471f378eab4c5e25f6c77f785b27c936efb22874
1200 1200 | | Successors: 3:f897c6137566; 1:471f378eab4c
1201 1201 | | multi-line: 3:f897c6137566
1202 1202 | | multi-line: 1:471f378eab4c
1203 1203 | | json: [["f897c6137566320b081514b4c7227ecc3d384b39"], ["471f378eab4c5e25f6c77f785b27c936efb22874"]]
1204 1204 | x 471f378eab4c
1205 1205 |/ Predecessors: 2:0dec01379d3b
1206 1206 | semi-colon: 2:0dec01379d3b
1207 1207 | json: ["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]
1208 1208 | map: 2:0dec01379d3be6318c470ead31b1fe7ae7cb53d5
1209 1209 | Successors: 2:0dec01379d3b
1210 1210 | multi-line: 2:0dec01379d3b
1211 1211 | json: [["0dec01379d3be6318c470ead31b1fe7ae7cb53d5"]]
1212 1212 o ea207398892e
1213 1213
1214 1214 $ hg up --hidden 4
1215 1215 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1216 1216 $ hg rebase -r 7 -d 8 --config extensions.rebase=
1217 1217 rebasing 7:ba2ed02b0c9a "Add A,B,C"
1218 1218 $ hg tlog
1219 1219 o eceed8f98ffc
1220 1220 | Predecessors: 4:9bd10a0775e4
1221 1221 | semi-colon: 4:9bd10a0775e4
1222 1222 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1223 1223 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1224 1224 | o 0b997eb7ceee
1225 1225 | | Predecessors: 4:9bd10a0775e4
1226 1226 | | semi-colon: 4:9bd10a0775e4
1227 1227 | | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1228 1228 | | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1229 1229 o | b18bc8331526
1230 1230 |/ Predecessors: 4:9bd10a0775e4
1231 1231 | semi-colon: 4:9bd10a0775e4
1232 1232 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1233 1233 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1234 1234 o dd800401bd8c
1235 1235 | Predecessors: 4:9bd10a0775e4
1236 1236 | semi-colon: 4:9bd10a0775e4
1237 1237 | json: ["9bd10a0775e478708cada5f176ec6de654359ce7"]
1238 1238 | map: 4:9bd10a0775e478708cada5f176ec6de654359ce7
1239 1239 | @ 9bd10a0775e4
1240 1240 |/ Successors: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc; 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
1241 1241 | multi-line: 5:dd800401bd8c 9:0b997eb7ceee 10:eceed8f98ffc
1242 1242 | multi-line: 5:dd800401bd8c 8:b18bc8331526 10:eceed8f98ffc
1243 1243 | json: [["dd800401bd8c79d815329277739e433e883f784e", "0b997eb7ceeee06200a02f8aab185979092d514e", "eceed8f98ffc4186032e29a6542ab98888ebf68d"], ["dd800401bd8c79d815329277739e433e883f784e", "b18bc8331526a22cbb1801022bd1555bf291c48b", "eceed8f98ffc4186032e29a6542ab98888ebf68d"]]
1244 1244 o f897c6137566
1245 1245 |
1246 1246 o ea207398892e
1247 1247
1248 1248 Test templates with pruned commits
1249 1249 ==================================
1250 1250
1251 1251 Test setup
1252 1252 ----------
1253 1253
1254 1254 $ hg init $TESTTMP/templates-local-prune
1255 1255 $ cd $TESTTMP/templates-local-prune
1256 1256 $ mkcommit ROOT
1257 1257 $ mkcommit A0
1258 1258 $ hg debugobsolete --record-parent `getid "."`
1259 1259 obsoleted 1 changesets
1260 1260
1261 1261 Check output
1262 1262 ------------
1263 1263
1264 1264 $ hg up "desc(A0)" --hidden
1265 1265 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1266 1266 $ hg tlog
1267 1267 @ 471f378eab4c
1268 1268 |
1269 1269 o ea207398892e
1270 1270
@@ -1,685 +1,685 b''
1 1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2 2
3 3 This is the most complex troubles from far so we isolate it in a dedicated
4 4 file.
5 5
6 6 Enable obsolete
7 7
8 8 $ cat >> $HGRCPATH << EOF
9 9 > [ui]
10 10 > logtemplate = {rev}:{node|short} {desc}\n
11 11 > [experimental]
12 12 > evolution=createmarkers
13 13 > [extensions]
14 14 > drawdag=$TESTDIR/drawdag.py
15 15 > [alias]
16 16 > debugobsolete = debugobsolete -d '0 0'
17 17 > [phases]
18 18 > publish=False
19 19 > EOF
20 20
21 21
22 22 $ mkcommit() {
23 23 > echo "$1" > "$1"
24 24 > hg add "$1"
25 25 > hg ci -m "$1"
26 26 > }
27 27 $ getid() {
28 28 > hg log --hidden -r "desc('$1')" -T '{node}\n'
29 29 > }
30 30
31 31 setup repo
32 32
33 33 $ hg init reference
34 34 $ cd reference
35 35 $ mkcommit base
36 36 $ mkcommit A_0
37 37 $ hg up 0
38 38 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
39 39 $ mkcommit A_1
40 40 created new head
41 41 $ hg up 0
42 42 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
43 43 $ mkcommit A_2
44 44 created new head
45 45 $ hg up 0
46 46 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
47 47 $ cd ..
48 48
49 49
50 50 $ newcase() {
51 51 > hg clone -u 0 -q reference $1
52 52 > cd $1
53 53 > }
54 54
55 55 direct divergence
56 56 -----------------
57 57
58 58 A_1 have two direct and divergent successors A_1 and A_1
59 59
60 60 $ newcase direct
61 61 $ hg debugobsolete `getid A_0` `getid A_1`
62 62 obsoleted 1 changesets
63 63 $ hg debugobsolete `getid A_0` `getid A_2`
64 64 $ hg log -G --hidden
65 65 o 3:392fd25390da A_2
66 66 |
67 67 | o 2:82623d38b9ba A_1
68 68 |/
69 69 | x 1:007dc284c1f8 A_0
70 70 |/
71 71 @ 0:d20a80d4def3 base
72 72
73 73 $ hg debugsuccessorssets --hidden 'all()'
74 74 d20a80d4def3
75 75 d20a80d4def3
76 76 007dc284c1f8
77 77 82623d38b9ba
78 78 392fd25390da
79 79 82623d38b9ba
80 80 82623d38b9ba
81 81 392fd25390da
82 82 392fd25390da
83 83 $ hg log -r 'divergent()'
84 84 2:82623d38b9ba A_1
85 85 3:392fd25390da A_2
86 86 $ hg debugsuccessorssets 'all()' --closest
87 87 d20a80d4def3
88 88 d20a80d4def3
89 89 82623d38b9ba
90 90 82623d38b9ba
91 91 392fd25390da
92 92 392fd25390da
93 93 $ hg debugsuccessorssets 'all()' --closest --hidden
94 94 d20a80d4def3
95 95 d20a80d4def3
96 96 007dc284c1f8
97 97 82623d38b9ba
98 98 392fd25390da
99 99 82623d38b9ba
100 100 82623d38b9ba
101 101 392fd25390da
102 102 392fd25390da
103 103
104 104 check that mercurial refuse to push
105 105
106 106 $ hg init ../other
107 107 $ hg push ../other
108 108 pushing to ../other
109 109 searching for changes
110 abort: push includes divergent changeset: 392fd25390da!
110 abort: push includes content-divergent changeset: 392fd25390da!
111 111 [255]
112 112
113 113 $ cd ..
114 114
115 115
116 116 indirect divergence with known changeset
117 117 -------------------------------------------
118 118
119 119 $ newcase indirect_known
120 120 $ hg debugobsolete `getid A_0` `getid A_1`
121 121 obsoleted 1 changesets
122 122 $ hg debugobsolete `getid A_0` `getid A_2`
123 123 $ mkcommit A_3
124 124 created new head
125 125 $ hg debugobsolete `getid A_2` `getid A_3`
126 126 obsoleted 1 changesets
127 127 $ hg log -G --hidden
128 128 @ 4:01f36c5a8fda A_3
129 129 |
130 130 | x 3:392fd25390da A_2
131 131 |/
132 132 | o 2:82623d38b9ba A_1
133 133 |/
134 134 | x 1:007dc284c1f8 A_0
135 135 |/
136 136 o 0:d20a80d4def3 base
137 137
138 138 $ hg debugsuccessorssets --hidden 'all()'
139 139 d20a80d4def3
140 140 d20a80d4def3
141 141 007dc284c1f8
142 142 82623d38b9ba
143 143 01f36c5a8fda
144 144 82623d38b9ba
145 145 82623d38b9ba
146 146 392fd25390da
147 147 01f36c5a8fda
148 148 01f36c5a8fda
149 149 01f36c5a8fda
150 150 $ hg log -r 'divergent()'
151 151 2:82623d38b9ba A_1
152 152 4:01f36c5a8fda A_3
153 153 $ hg debugsuccessorssets 'all()' --closest
154 154 d20a80d4def3
155 155 d20a80d4def3
156 156 82623d38b9ba
157 157 82623d38b9ba
158 158 01f36c5a8fda
159 159 01f36c5a8fda
160 160 $ hg debugsuccessorssets 'all()' --closest --hidden
161 161 d20a80d4def3
162 162 d20a80d4def3
163 163 007dc284c1f8
164 164 82623d38b9ba
165 165 392fd25390da
166 166 82623d38b9ba
167 167 82623d38b9ba
168 168 392fd25390da
169 169 392fd25390da
170 170 01f36c5a8fda
171 171 01f36c5a8fda
172 172 $ cd ..
173 173
174 174
175 175 indirect divergence with known changeset
176 176 -------------------------------------------
177 177
178 178 $ newcase indirect_unknown
179 179 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
180 180 obsoleted 1 changesets
181 181 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
182 182 $ hg debugobsolete `getid A_0` `getid A_2`
183 183 $ hg log -G --hidden
184 184 o 3:392fd25390da A_2
185 185 |
186 186 | o 2:82623d38b9ba A_1
187 187 |/
188 188 | x 1:007dc284c1f8 A_0
189 189 |/
190 190 @ 0:d20a80d4def3 base
191 191
192 192 $ hg debugsuccessorssets --hidden 'all()'
193 193 d20a80d4def3
194 194 d20a80d4def3
195 195 007dc284c1f8
196 196 82623d38b9ba
197 197 392fd25390da
198 198 82623d38b9ba
199 199 82623d38b9ba
200 200 392fd25390da
201 201 392fd25390da
202 202 $ hg log -r 'divergent()'
203 203 2:82623d38b9ba A_1
204 204 3:392fd25390da A_2
205 205 $ hg debugsuccessorssets 'all()' --closest
206 206 d20a80d4def3
207 207 d20a80d4def3
208 208 82623d38b9ba
209 209 82623d38b9ba
210 210 392fd25390da
211 211 392fd25390da
212 212 $ hg debugsuccessorssets 'all()' --closest --hidden
213 213 d20a80d4def3
214 214 d20a80d4def3
215 215 007dc284c1f8
216 216 82623d38b9ba
217 217 392fd25390da
218 218 82623d38b9ba
219 219 82623d38b9ba
220 220 392fd25390da
221 221 392fd25390da
222 222 $ cd ..
223 223
224 224 do not take unknown node in account if they are final
225 225 -----------------------------------------------------
226 226
227 227 $ newcase final-unknown
228 228 $ hg debugobsolete `getid A_0` `getid A_1`
229 229 obsoleted 1 changesets
230 230 $ hg debugobsolete `getid A_1` `getid A_2`
231 231 obsoleted 1 changesets
232 232 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
233 233 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
234 234 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
235 235
236 236 $ hg debugsuccessorssets --hidden 'desc('A_0')'
237 237 007dc284c1f8
238 238 392fd25390da
239 239 $ hg debugsuccessorssets 'desc('A_0')' --closest
240 240 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
241 241 007dc284c1f8
242 242 82623d38b9ba
243 243
244 244 $ cd ..
245 245
246 246 divergence that converge again is not divergence anymore
247 247 -----------------------------------------------------
248 248
249 249 $ newcase converged_divergence
250 250 $ hg debugobsolete `getid A_0` `getid A_1`
251 251 obsoleted 1 changesets
252 252 $ hg debugobsolete `getid A_0` `getid A_2`
253 253 $ mkcommit A_3
254 254 created new head
255 255 $ hg debugobsolete `getid A_1` `getid A_3`
256 256 obsoleted 1 changesets
257 257 $ hg debugobsolete `getid A_2` `getid A_3`
258 258 obsoleted 1 changesets
259 259 $ hg log -G --hidden
260 260 @ 4:01f36c5a8fda A_3
261 261 |
262 262 | x 3:392fd25390da A_2
263 263 |/
264 264 | x 2:82623d38b9ba A_1
265 265 |/
266 266 | x 1:007dc284c1f8 A_0
267 267 |/
268 268 o 0:d20a80d4def3 base
269 269
270 270 $ hg debugsuccessorssets --hidden 'all()'
271 271 d20a80d4def3
272 272 d20a80d4def3
273 273 007dc284c1f8
274 274 01f36c5a8fda
275 275 82623d38b9ba
276 276 01f36c5a8fda
277 277 392fd25390da
278 278 01f36c5a8fda
279 279 01f36c5a8fda
280 280 01f36c5a8fda
281 281 $ hg log -r 'divergent()'
282 282 $ hg debugsuccessorssets 'all()' --closest
283 283 d20a80d4def3
284 284 d20a80d4def3
285 285 01f36c5a8fda
286 286 01f36c5a8fda
287 287 $ hg debugsuccessorssets 'all()' --closest --hidden
288 288 d20a80d4def3
289 289 d20a80d4def3
290 290 007dc284c1f8
291 291 82623d38b9ba
292 292 392fd25390da
293 293 82623d38b9ba
294 294 82623d38b9ba
295 295 392fd25390da
296 296 392fd25390da
297 297 01f36c5a8fda
298 298 01f36c5a8fda
299 299 $ cd ..
300 300
301 301 split is not divergences
302 302 -----------------------------
303 303
304 304 $ newcase split
305 305 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
306 306 obsoleted 1 changesets
307 307 $ hg log -G --hidden
308 308 o 3:392fd25390da A_2
309 309 |
310 310 | o 2:82623d38b9ba A_1
311 311 |/
312 312 | x 1:007dc284c1f8 A_0
313 313 |/
314 314 @ 0:d20a80d4def3 base
315 315
316 316 $ hg debugsuccessorssets --hidden 'all()'
317 317 d20a80d4def3
318 318 d20a80d4def3
319 319 007dc284c1f8
320 320 82623d38b9ba 392fd25390da
321 321 82623d38b9ba
322 322 82623d38b9ba
323 323 392fd25390da
324 324 392fd25390da
325 325 $ hg log -r 'divergent()'
326 326 $ hg debugsuccessorssets 'all()' --closest
327 327 d20a80d4def3
328 328 d20a80d4def3
329 329 82623d38b9ba
330 330 82623d38b9ba
331 331 392fd25390da
332 332 392fd25390da
333 333 $ hg debugsuccessorssets 'all()' --closest --hidden
334 334 d20a80d4def3
335 335 d20a80d4def3
336 336 007dc284c1f8
337 337 82623d38b9ba 392fd25390da
338 338 82623d38b9ba
339 339 82623d38b9ba
340 340 392fd25390da
341 341 392fd25390da
342 342
343 343 Even when subsequent rewriting happen
344 344
345 345 $ mkcommit A_3
346 346 created new head
347 347 $ hg debugobsolete `getid A_1` `getid A_3`
348 348 obsoleted 1 changesets
349 349 $ hg up 0
350 350 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
351 351 $ mkcommit A_4
352 352 created new head
353 353 $ hg debugobsolete `getid A_2` `getid A_4`
354 354 obsoleted 1 changesets
355 355 $ hg up 0
356 356 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
357 357 $ mkcommit A_5
358 358 created new head
359 359 $ hg debugobsolete `getid A_4` `getid A_5`
360 360 obsoleted 1 changesets
361 361 $ hg log -G --hidden
362 362 @ 6:e442cfc57690 A_5
363 363 |
364 364 | x 5:6a411f0d7a0a A_4
365 365 |/
366 366 | o 4:01f36c5a8fda A_3
367 367 |/
368 368 | x 3:392fd25390da A_2
369 369 |/
370 370 | x 2:82623d38b9ba A_1
371 371 |/
372 372 | x 1:007dc284c1f8 A_0
373 373 |/
374 374 o 0:d20a80d4def3 base
375 375
376 376 $ hg debugsuccessorssets --hidden 'all()'
377 377 d20a80d4def3
378 378 d20a80d4def3
379 379 007dc284c1f8
380 380 01f36c5a8fda e442cfc57690
381 381 82623d38b9ba
382 382 01f36c5a8fda
383 383 392fd25390da
384 384 e442cfc57690
385 385 01f36c5a8fda
386 386 01f36c5a8fda
387 387 6a411f0d7a0a
388 388 e442cfc57690
389 389 e442cfc57690
390 390 e442cfc57690
391 391 $ hg debugsuccessorssets 'all()' --closest
392 392 d20a80d4def3
393 393 d20a80d4def3
394 394 01f36c5a8fda
395 395 01f36c5a8fda
396 396 e442cfc57690
397 397 e442cfc57690
398 398 $ hg debugsuccessorssets 'all()' --closest --hidden
399 399 d20a80d4def3
400 400 d20a80d4def3
401 401 007dc284c1f8
402 402 82623d38b9ba 392fd25390da
403 403 82623d38b9ba
404 404 82623d38b9ba
405 405 392fd25390da
406 406 392fd25390da
407 407 01f36c5a8fda
408 408 01f36c5a8fda
409 409 6a411f0d7a0a
410 410 e442cfc57690
411 411 e442cfc57690
412 412 e442cfc57690
413 413 $ hg log -r 'divergent()'
414 414
415 415 Check more complex obsolescence graft (with divergence)
416 416
417 417 $ mkcommit B_0; hg up 0
418 418 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
419 419 $ hg debugobsolete `getid B_0` `getid A_2`
420 420 obsoleted 1 changesets
421 421 $ mkcommit A_7; hg up 0
422 422 created new head
423 423 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
424 424 $ mkcommit A_8; hg up 0
425 425 created new head
426 426 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
427 427 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
428 428 obsoleted 1 changesets
429 429 $ mkcommit A_9; hg up 0
430 430 created new head
431 431 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
432 432 $ hg debugobsolete `getid A_5` `getid A_9`
433 433 $ hg log -G --hidden
434 434 o 10:bed64f5d2f5a A_9
435 435 |
436 436 | o 9:14608b260df8 A_8
437 437 |/
438 438 | o 8:7ae126973a96 A_7
439 439 |/
440 440 | x 7:3750ebee865d B_0
441 441 | |
442 442 | x 6:e442cfc57690 A_5
443 443 |/
444 444 | x 5:6a411f0d7a0a A_4
445 445 |/
446 446 | o 4:01f36c5a8fda A_3
447 447 |/
448 448 | x 3:392fd25390da A_2
449 449 |/
450 450 | x 2:82623d38b9ba A_1
451 451 |/
452 452 | x 1:007dc284c1f8 A_0
453 453 |/
454 454 @ 0:d20a80d4def3 base
455 455
456 456 $ hg debugsuccessorssets --hidden 'all()'
457 457 d20a80d4def3
458 458 d20a80d4def3
459 459 007dc284c1f8
460 460 01f36c5a8fda bed64f5d2f5a
461 461 01f36c5a8fda 7ae126973a96 14608b260df8
462 462 82623d38b9ba
463 463 01f36c5a8fda
464 464 392fd25390da
465 465 bed64f5d2f5a
466 466 7ae126973a96 14608b260df8
467 467 01f36c5a8fda
468 468 01f36c5a8fda
469 469 6a411f0d7a0a
470 470 bed64f5d2f5a
471 471 7ae126973a96 14608b260df8
472 472 e442cfc57690
473 473 bed64f5d2f5a
474 474 7ae126973a96 14608b260df8
475 475 3750ebee865d
476 476 bed64f5d2f5a
477 477 7ae126973a96 14608b260df8
478 478 7ae126973a96
479 479 7ae126973a96
480 480 14608b260df8
481 481 14608b260df8
482 482 bed64f5d2f5a
483 483 bed64f5d2f5a
484 484 $ hg debugsuccessorssets 'all()' --closest
485 485 d20a80d4def3
486 486 d20a80d4def3
487 487 01f36c5a8fda
488 488 01f36c5a8fda
489 489 7ae126973a96
490 490 7ae126973a96
491 491 14608b260df8
492 492 14608b260df8
493 493 bed64f5d2f5a
494 494 bed64f5d2f5a
495 495 $ hg debugsuccessorssets 'all()' --closest --hidden
496 496 d20a80d4def3
497 497 d20a80d4def3
498 498 007dc284c1f8
499 499 82623d38b9ba 392fd25390da
500 500 82623d38b9ba
501 501 82623d38b9ba
502 502 392fd25390da
503 503 392fd25390da
504 504 01f36c5a8fda
505 505 01f36c5a8fda
506 506 6a411f0d7a0a
507 507 e442cfc57690
508 508 e442cfc57690
509 509 e442cfc57690
510 510 3750ebee865d
511 511 392fd25390da
512 512 7ae126973a96
513 513 7ae126973a96
514 514 14608b260df8
515 515 14608b260df8
516 516 bed64f5d2f5a
517 517 bed64f5d2f5a
518 518 $ hg log -r 'divergent()'
519 519 4:01f36c5a8fda A_3
520 520 8:7ae126973a96 A_7
521 521 9:14608b260df8 A_8
522 522 10:bed64f5d2f5a A_9
523 523
524 524 fix the divergence
525 525
526 526 $ mkcommit A_A; hg up 0
527 527 created new head
528 528 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
529 529 $ hg debugobsolete `getid A_9` `getid A_A`
530 530 obsoleted 1 changesets
531 531 $ hg debugobsolete `getid A_7` `getid A_A`
532 532 obsoleted 1 changesets
533 533 $ hg debugobsolete `getid A_8` `getid A_A`
534 534 obsoleted 1 changesets
535 535 $ hg log -G --hidden
536 536 o 11:a139f71be9da A_A
537 537 |
538 538 | x 10:bed64f5d2f5a A_9
539 539 |/
540 540 | x 9:14608b260df8 A_8
541 541 |/
542 542 | x 8:7ae126973a96 A_7
543 543 |/
544 544 | x 7:3750ebee865d B_0
545 545 | |
546 546 | x 6:e442cfc57690 A_5
547 547 |/
548 548 | x 5:6a411f0d7a0a A_4
549 549 |/
550 550 | o 4:01f36c5a8fda A_3
551 551 |/
552 552 | x 3:392fd25390da A_2
553 553 |/
554 554 | x 2:82623d38b9ba A_1
555 555 |/
556 556 | x 1:007dc284c1f8 A_0
557 557 |/
558 558 @ 0:d20a80d4def3 base
559 559
560 560 $ hg debugsuccessorssets --hidden 'all()'
561 561 d20a80d4def3
562 562 d20a80d4def3
563 563 007dc284c1f8
564 564 01f36c5a8fda a139f71be9da
565 565 82623d38b9ba
566 566 01f36c5a8fda
567 567 392fd25390da
568 568 a139f71be9da
569 569 01f36c5a8fda
570 570 01f36c5a8fda
571 571 6a411f0d7a0a
572 572 a139f71be9da
573 573 e442cfc57690
574 574 a139f71be9da
575 575 3750ebee865d
576 576 a139f71be9da
577 577 7ae126973a96
578 578 a139f71be9da
579 579 14608b260df8
580 580 a139f71be9da
581 581 bed64f5d2f5a
582 582 a139f71be9da
583 583 a139f71be9da
584 584 a139f71be9da
585 585 $ hg debugsuccessorssets 'all()' --closest
586 586 d20a80d4def3
587 587 d20a80d4def3
588 588 01f36c5a8fda
589 589 01f36c5a8fda
590 590 a139f71be9da
591 591 a139f71be9da
592 592 $ hg debugsuccessorssets 'all()' --closest --hidden
593 593 d20a80d4def3
594 594 d20a80d4def3
595 595 007dc284c1f8
596 596 82623d38b9ba 392fd25390da
597 597 82623d38b9ba
598 598 82623d38b9ba
599 599 392fd25390da
600 600 392fd25390da
601 601 01f36c5a8fda
602 602 01f36c5a8fda
603 603 6a411f0d7a0a
604 604 e442cfc57690
605 605 e442cfc57690
606 606 e442cfc57690
607 607 3750ebee865d
608 608 392fd25390da
609 609 7ae126973a96
610 610 a139f71be9da
611 611 14608b260df8
612 612 a139f71be9da
613 613 bed64f5d2f5a
614 614 a139f71be9da
615 615 a139f71be9da
616 616 a139f71be9da
617 617 $ hg log -r 'divergent()'
618 618
619 619 $ cd ..
620 620
621 621
622 622 Subset does not diverge
623 623 ------------------------------
624 624
625 625 Do not report divergent successors-set if it is a subset of another
626 626 successors-set. (report [A,B] not [A] + [A,B])
627 627
628 628 $ newcase subset
629 629 $ hg debugobsolete `getid A_0` `getid A_2`
630 630 obsoleted 1 changesets
631 631 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
632 632 $ hg debugsuccessorssets --hidden 'desc('A_0')'
633 633 007dc284c1f8
634 634 82623d38b9ba 392fd25390da
635 635 $ hg debugsuccessorssets 'desc('A_0')' --closest
636 636 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
637 637 007dc284c1f8
638 638 82623d38b9ba 392fd25390da
639 639
640 640 $ cd ..
641 641
642 642 Use scmutil.cleanupnodes API to create divergence
643 643
644 644 $ hg init cleanupnodes
645 645 $ cd cleanupnodes
646 646 $ hg debugdrawdag <<'EOS'
647 647 > B1 B3 B4
648 648 > | \|
649 649 > A Z
650 650 > EOS
651 651
652 652 $ hg update -q B1
653 653 $ echo 3 >> B
654 654 $ hg commit --amend -m B2
655 655 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
656 656 > from mercurial import registrar, scmutil
657 657 > cmdtable = {}
658 658 > command = registrar.command(cmdtable)
659 659 > @command('cleanup')
660 660 > def cleanup(ui, repo):
661 661 > def node(expr):
662 662 > unfi = repo.unfiltered()
663 663 > rev = unfi.revs(expr).first()
664 664 > return unfi.changelog.node(rev)
665 665 > with repo.wlock(), repo.lock(), repo.transaction('delayedstrip'):
666 666 > mapping = {node('desc(B1)'): [node('desc(B3)')],
667 667 > node('desc(B3)'): [node('desc(B4)')]}
668 668 > scmutil.cleanupnodes(repo, mapping, 'test')
669 669 > EOF
670 670
671 671 $ rm .hg/localtags
672 672 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
673 673 $ hg log -G -T '{rev}:{node|short} {desc} {troubles}' -r 'sort(all(), topo)'
674 @ 5:1a2a9b5b0030 B2 divergent
674 @ 5:1a2a9b5b0030 B2 content-divergent
675 675 |
676 | o 4:70d5a63ca112 B4 divergent
676 | o 4:70d5a63ca112 B4 content-divergent
677 677 | |
678 678 | o 1:48b9aae0607f Z
679 679 |
680 680 o 0:426bada5c675 A
681 681
682 682 $ hg debugobsolete
683 683 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
684 684 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
685 685 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
@@ -1,1158 +1,1158 b''
1 1 ==========================
2 2 Test rebase with obsolete
3 3 ==========================
4 4
5 5 Enable obsolete
6 6
7 7 $ cat >> $HGRCPATH << EOF
8 8 > [ui]
9 9 > logtemplate= {rev}:{node|short} {desc|firstline}
10 10 > [experimental]
11 11 > evolution=createmarkers,allowunstable
12 12 > [phases]
13 13 > publish=False
14 14 > [extensions]
15 15 > rebase=
16 16 > drawdag=$TESTDIR/drawdag.py
17 17 > EOF
18 18
19 19 Setup rebase canonical repo
20 20
21 21 $ hg init base
22 22 $ cd base
23 23 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
24 24 adding changesets
25 25 adding manifests
26 26 adding file changes
27 27 added 8 changesets with 7 changes to 7 files (+2 heads)
28 28 (run 'hg heads' to see heads, 'hg merge' to merge)
29 29 $ hg up tip
30 30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 31 $ hg log -G
32 32 @ 7:02de42196ebe H
33 33 |
34 34 | o 6:eea13746799a G
35 35 |/|
36 36 o | 5:24b6387c8c8c F
37 37 | |
38 38 | o 4:9520eea781bc E
39 39 |/
40 40 | o 3:32af7686d403 D
41 41 | |
42 42 | o 2:5fddd98957c8 C
43 43 | |
44 44 | o 1:42ccdea3bb16 B
45 45 |/
46 46 o 0:cd010b8cd998 A
47 47
48 48 $ cd ..
49 49
50 50 simple rebase
51 51 ---------------------------------
52 52
53 53 $ hg clone base simple
54 54 updating to branch default
55 55 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 56 $ cd simple
57 57 $ hg up 32af7686d403
58 58 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
59 59 $ hg rebase -d eea13746799a
60 60 rebasing 1:42ccdea3bb16 "B"
61 61 rebasing 2:5fddd98957c8 "C"
62 62 rebasing 3:32af7686d403 "D"
63 63 $ hg log -G
64 64 @ 10:8eeb3c33ad33 D
65 65 |
66 66 o 9:2327fea05063 C
67 67 |
68 68 o 8:e4e5be0395b2 B
69 69 |
70 70 | o 7:02de42196ebe H
71 71 | |
72 72 o | 6:eea13746799a G
73 73 |\|
74 74 | o 5:24b6387c8c8c F
75 75 | |
76 76 o | 4:9520eea781bc E
77 77 |/
78 78 o 0:cd010b8cd998 A
79 79
80 80 $ hg log --hidden -G
81 81 @ 10:8eeb3c33ad33 D
82 82 |
83 83 o 9:2327fea05063 C
84 84 |
85 85 o 8:e4e5be0395b2 B
86 86 |
87 87 | o 7:02de42196ebe H
88 88 | |
89 89 o | 6:eea13746799a G
90 90 |\|
91 91 | o 5:24b6387c8c8c F
92 92 | |
93 93 o | 4:9520eea781bc E
94 94 |/
95 95 | x 3:32af7686d403 D
96 96 | |
97 97 | x 2:5fddd98957c8 C
98 98 | |
99 99 | x 1:42ccdea3bb16 B
100 100 |/
101 101 o 0:cd010b8cd998 A
102 102
103 103 $ hg debugobsolete
104 104 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (*) {'user': 'test'} (glob)
105 105 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (*) {'user': 'test'} (glob)
106 106 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (*) {'user': 'test'} (glob)
107 107
108 108
109 109 $ cd ..
110 110
111 111 empty changeset
112 112 ---------------------------------
113 113
114 114 $ hg clone base empty
115 115 updating to branch default
116 116 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
117 117 $ cd empty
118 118 $ hg up eea13746799a
119 119 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
120 120
121 121 We make a copy of both the first changeset in the rebased and some other in the
122 122 set.
123 123
124 124 $ hg graft 42ccdea3bb16 32af7686d403
125 125 grafting 1:42ccdea3bb16 "B"
126 126 grafting 3:32af7686d403 "D"
127 127 $ hg rebase -s 42ccdea3bb16 -d .
128 128 rebasing 1:42ccdea3bb16 "B"
129 129 note: rebase of 1:42ccdea3bb16 created no changes to commit
130 130 rebasing 2:5fddd98957c8 "C"
131 131 rebasing 3:32af7686d403 "D"
132 132 note: rebase of 3:32af7686d403 created no changes to commit
133 133 $ hg log -G
134 134 o 10:5ae4c968c6ac C
135 135 |
136 136 @ 9:08483444fef9 D
137 137 |
138 138 o 8:8877864f1edb B
139 139 |
140 140 | o 7:02de42196ebe H
141 141 | |
142 142 o | 6:eea13746799a G
143 143 |\|
144 144 | o 5:24b6387c8c8c F
145 145 | |
146 146 o | 4:9520eea781bc E
147 147 |/
148 148 o 0:cd010b8cd998 A
149 149
150 150 $ hg log --hidden -G
151 151 o 10:5ae4c968c6ac C
152 152 |
153 153 @ 9:08483444fef9 D
154 154 |
155 155 o 8:8877864f1edb B
156 156 |
157 157 | o 7:02de42196ebe H
158 158 | |
159 159 o | 6:eea13746799a G
160 160 |\|
161 161 | o 5:24b6387c8c8c F
162 162 | |
163 163 o | 4:9520eea781bc E
164 164 |/
165 165 | x 3:32af7686d403 D
166 166 | |
167 167 | x 2:5fddd98957c8 C
168 168 | |
169 169 | x 1:42ccdea3bb16 B
170 170 |/
171 171 o 0:cd010b8cd998 A
172 172
173 173 $ hg debugobsolete
174 174 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
175 175 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
176 176 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
177 177
178 178
179 179 More complex case where part of the rebase set were already rebased
180 180
181 181 $ hg rebase --rev 'desc(D)' --dest 'desc(H)'
182 182 rebasing 9:08483444fef9 "D"
183 183 $ hg debugobsolete
184 184 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
185 185 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
186 186 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
187 187 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
188 188 $ hg log -G
189 189 @ 11:4596109a6a43 D
190 190 |
191 191 | o 10:5ae4c968c6ac C
192 192 | |
193 193 | x 9:08483444fef9 D
194 194 | |
195 195 | o 8:8877864f1edb B
196 196 | |
197 197 o | 7:02de42196ebe H
198 198 | |
199 199 | o 6:eea13746799a G
200 200 |/|
201 201 o | 5:24b6387c8c8c F
202 202 | |
203 203 | o 4:9520eea781bc E
204 204 |/
205 205 o 0:cd010b8cd998 A
206 206
207 207 $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True
208 208 rebasing 8:8877864f1edb "B"
209 209 note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D"
210 210 rebasing 10:5ae4c968c6ac "C"
211 211 $ hg debugobsolete
212 212 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob)
213 213 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob)
214 214 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob)
215 215 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob)
216 216 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (*) {'user': 'test'} (glob)
217 217 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (*) {'user': 'test'} (glob)
218 218 $ hg log --rev 'divergent()'
219 219 $ hg log -G
220 220 o 13:98f6af4ee953 C
221 221 |
222 222 o 12:462a34d07e59 B
223 223 |
224 224 @ 11:4596109a6a43 D
225 225 |
226 226 o 7:02de42196ebe H
227 227 |
228 228 | o 6:eea13746799a G
229 229 |/|
230 230 o | 5:24b6387c8c8c F
231 231 | |
232 232 | o 4:9520eea781bc E
233 233 |/
234 234 o 0:cd010b8cd998 A
235 235
236 236 $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003
237 237 changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003
238 238 phase: draft
239 239 parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6
240 240 parent: -1:0000000000000000000000000000000000000000
241 241 manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905
242 242 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
243 243 date: Sat Apr 30 15:24:48 2011 +0200
244 244 files+: D
245 245 extra: branch=default
246 246 extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c
247 247 extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a
248 248 description:
249 249 D
250 250
251 251
252 252 $ hg up -qr 'desc(G)'
253 253 $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003
254 254 grafting 11:4596109a6a43 "D"
255 255 $ hg up -qr 'desc(E)'
256 256 $ hg rebase -s tip -d .
257 257 rebasing 14:9e36056a46e3 "D" (tip)
258 258 $ hg log --style default --debug -r tip
259 259 changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab
260 260 tag: tip
261 261 phase: draft
262 262 parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba
263 263 parent: -1:0000000000000000000000000000000000000000
264 264 manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42
265 265 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
266 266 date: Sat Apr 30 15:24:48 2011 +0200
267 267 files+: D
268 268 extra: branch=default
269 269 extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003
270 270 extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f
271 271 extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a
272 272 description:
273 273 D
274 274
275 275
276 276 Start rebase from a commit that is obsolete but not hidden only because it's
277 277 a working copy parent. We should be moved back to the starting commit as usual
278 278 even though it is hidden (until we're moved there).
279 279
280 280 $ hg --hidden up -qr 'first(hidden())'
281 281 $ hg rebase --rev 13 --dest 15
282 282 rebasing 13:98f6af4ee953 "C"
283 283 $ hg log -G
284 284 o 16:294a2b93eb4d C
285 285 |
286 286 o 15:627d46148090 D
287 287 |
288 288 | o 12:462a34d07e59 B
289 289 | |
290 290 | o 11:4596109a6a43 D
291 291 | |
292 292 | o 7:02de42196ebe H
293 293 | |
294 294 +---o 6:eea13746799a G
295 295 | |/
296 296 | o 5:24b6387c8c8c F
297 297 | |
298 298 o | 4:9520eea781bc E
299 299 |/
300 300 | @ 1:42ccdea3bb16 B
301 301 |/
302 302 o 0:cd010b8cd998 A
303 303
304 304
305 305 $ cd ..
306 306
307 307 collapse rebase
308 308 ---------------------------------
309 309
310 310 $ hg clone base collapse
311 311 updating to branch default
312 312 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
313 313 $ cd collapse
314 314 $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse
315 315 rebasing 1:42ccdea3bb16 "B"
316 316 rebasing 2:5fddd98957c8 "C"
317 317 rebasing 3:32af7686d403 "D"
318 318 $ hg log -G
319 319 o 8:4dc2197e807b Collapsed revision
320 320 |
321 321 | @ 7:02de42196ebe H
322 322 | |
323 323 o | 6:eea13746799a G
324 324 |\|
325 325 | o 5:24b6387c8c8c F
326 326 | |
327 327 o | 4:9520eea781bc E
328 328 |/
329 329 o 0:cd010b8cd998 A
330 330
331 331 $ hg log --hidden -G
332 332 o 8:4dc2197e807b Collapsed revision
333 333 |
334 334 | @ 7:02de42196ebe H
335 335 | |
336 336 o | 6:eea13746799a G
337 337 |\|
338 338 | o 5:24b6387c8c8c F
339 339 | |
340 340 o | 4:9520eea781bc E
341 341 |/
342 342 | x 3:32af7686d403 D
343 343 | |
344 344 | x 2:5fddd98957c8 C
345 345 | |
346 346 | x 1:42ccdea3bb16 B
347 347 |/
348 348 o 0:cd010b8cd998 A
349 349
350 350 $ hg id --debug -r tip
351 351 4dc2197e807bae9817f09905b50ab288be2dbbcf tip
352 352 $ hg debugobsolete
353 353 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
354 354 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
355 355 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob)
356 356
357 357 $ cd ..
358 358
359 359 Rebase set has hidden descendants
360 360 ---------------------------------
361 361
362 362 We rebase a changeset which has a hidden changeset. The hidden changeset must
363 363 not be rebased.
364 364
365 365 $ hg clone base hidden
366 366 updating to branch default
367 367 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
368 368 $ cd hidden
369 369 $ hg rebase -s 5fddd98957c8 -d eea13746799a
370 370 rebasing 2:5fddd98957c8 "C"
371 371 rebasing 3:32af7686d403 "D"
372 372 $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe
373 373 rebasing 1:42ccdea3bb16 "B"
374 374 $ hg log -G
375 375 o 10:7c6027df6a99 B
376 376 |
377 377 | o 9:cf44d2f5a9f4 D
378 378 | |
379 379 | o 8:e273c5e7d2d2 C
380 380 | |
381 381 @ | 7:02de42196ebe H
382 382 | |
383 383 | o 6:eea13746799a G
384 384 |/|
385 385 o | 5:24b6387c8c8c F
386 386 | |
387 387 | o 4:9520eea781bc E
388 388 |/
389 389 o 0:cd010b8cd998 A
390 390
391 391 $ hg log --hidden -G
392 392 o 10:7c6027df6a99 B
393 393 |
394 394 | o 9:cf44d2f5a9f4 D
395 395 | |
396 396 | o 8:e273c5e7d2d2 C
397 397 | |
398 398 @ | 7:02de42196ebe H
399 399 | |
400 400 | o 6:eea13746799a G
401 401 |/|
402 402 o | 5:24b6387c8c8c F
403 403 | |
404 404 | o 4:9520eea781bc E
405 405 |/
406 406 | x 3:32af7686d403 D
407 407 | |
408 408 | x 2:5fddd98957c8 C
409 409 | |
410 410 | x 1:42ccdea3bb16 B
411 411 |/
412 412 o 0:cd010b8cd998 A
413 413
414 414 $ hg debugobsolete
415 415 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (*) {'user': 'test'} (glob)
416 416 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (*) {'user': 'test'} (glob)
417 417 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (*) {'user': 'test'} (glob)
418 418
419 419 Test that rewriting leaving instability behind is allowed
420 420 ---------------------------------------------------------------------
421 421
422 422 $ hg log -r 'children(8)'
423 423 9:cf44d2f5a9f4 D (no-eol)
424 424 $ hg rebase -r 8
425 425 rebasing 8:e273c5e7d2d2 "C"
426 426 $ hg log -G
427 427 o 11:0d8f238b634c C
428 428 |
429 429 o 10:7c6027df6a99 B
430 430 |
431 431 | o 9:cf44d2f5a9f4 D
432 432 | |
433 433 | x 8:e273c5e7d2d2 C
434 434 | |
435 435 @ | 7:02de42196ebe H
436 436 | |
437 437 | o 6:eea13746799a G
438 438 |/|
439 439 o | 5:24b6387c8c8c F
440 440 | |
441 441 | o 4:9520eea781bc E
442 442 |/
443 443 o 0:cd010b8cd998 A
444 444
445 445
446 446
447 447 Test multiple root handling
448 448 ------------------------------------
449 449
450 450 $ hg rebase --dest 4 --rev '7+11+9'
451 451 rebasing 9:cf44d2f5a9f4 "D"
452 452 rebasing 7:02de42196ebe "H"
453 453 not rebasing ignored 10:7c6027df6a99 "B"
454 454 rebasing 11:0d8f238b634c "C" (tip)
455 455 $ hg log -G
456 456 o 14:1e8370e38cca C
457 457 |
458 458 @ 13:bfe264faf697 H
459 459 |
460 460 | o 12:102b4c1d889b D
461 461 |/
462 462 | o 10:7c6027df6a99 B
463 463 | |
464 464 | x 7:02de42196ebe H
465 465 | |
466 466 +---o 6:eea13746799a G
467 467 | |/
468 468 | o 5:24b6387c8c8c F
469 469 | |
470 470 o | 4:9520eea781bc E
471 471 |/
472 472 o 0:cd010b8cd998 A
473 473
474 474 $ cd ..
475 475
476 476 Detach both parents
477 477
478 478 $ hg init double-detach
479 479 $ cd double-detach
480 480
481 481 $ hg debugdrawdag <<EOF
482 482 > F
483 483 > /|
484 484 > C E
485 485 > | |
486 486 > B D G
487 487 > \|/
488 488 > A
489 489 > EOF
490 490
491 491 BROKEN: This raises an exception
492 492 $ hg rebase -d G -r 'B + D + F' 2>&1 | grep '^AssertionError'
493 493 AssertionError: no base found to rebase on (defineparents called wrong)
494 494
495 495 $ cd ..
496 496
497 497 test on rebase dropping a merge
498 498
499 499 (setup)
500 500
501 501 $ hg init dropmerge
502 502 $ cd dropmerge
503 503 $ hg unbundle "$TESTDIR/bundles/rebase.hg"
504 504 adding changesets
505 505 adding manifests
506 506 adding file changes
507 507 added 8 changesets with 7 changes to 7 files (+2 heads)
508 508 (run 'hg heads' to see heads, 'hg merge' to merge)
509 509 $ hg up 3
510 510 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
511 511 $ hg merge 7
512 512 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
513 513 (branch merge, don't forget to commit)
514 514 $ hg ci -m 'M'
515 515 $ echo I > I
516 516 $ hg add I
517 517 $ hg ci -m I
518 518 $ hg log -G
519 519 @ 9:4bde274eefcf I
520 520 |
521 521 o 8:53a6a128b2b7 M
522 522 |\
523 523 | o 7:02de42196ebe H
524 524 | |
525 525 | | o 6:eea13746799a G
526 526 | |/|
527 527 | o | 5:24b6387c8c8c F
528 528 | | |
529 529 | | o 4:9520eea781bc E
530 530 | |/
531 531 o | 3:32af7686d403 D
532 532 | |
533 533 o | 2:5fddd98957c8 C
534 534 | |
535 535 o | 1:42ccdea3bb16 B
536 536 |/
537 537 o 0:cd010b8cd998 A
538 538
539 539 (actual test)
540 540
541 541 $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)'
542 542 rebasing 3:32af7686d403 "D"
543 543 rebasing 7:02de42196ebe "H"
544 544 not rebasing ignored 8:53a6a128b2b7 "M"
545 545 rebasing 9:4bde274eefcf "I" (tip)
546 546 $ hg log -G
547 547 @ 12:acd174b7ab39 I
548 548 |
549 549 o 11:6c11a6218c97 H
550 550 |
551 551 | o 10:b5313c85b22e D
552 552 |/
553 553 | o 8:53a6a128b2b7 M
554 554 | |\
555 555 | | x 7:02de42196ebe H
556 556 | | |
557 557 o---+ 6:eea13746799a G
558 558 | | |
559 559 | | o 5:24b6387c8c8c F
560 560 | | |
561 561 o---+ 4:9520eea781bc E
562 562 / /
563 563 x | 3:32af7686d403 D
564 564 | |
565 565 o | 2:5fddd98957c8 C
566 566 | |
567 567 o | 1:42ccdea3bb16 B
568 568 |/
569 569 o 0:cd010b8cd998 A
570 570
571 571
572 572 Test hidden changesets in the rebase set (issue4504)
573 573
574 574 $ hg up --hidden 9
575 575 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
576 576 $ echo J > J
577 577 $ hg add J
578 578 $ hg commit -m J
579 579 $ hg debugobsolete `hg log --rev . -T '{node}'`
580 580 obsoleted 1 changesets
581 581
582 582 $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off
583 583 rebasing 9:4bde274eefcf "I"
584 584 rebasing 13:06edfc82198f "J" (tip)
585 585 $ hg log -G
586 586 @ 15:5ae8a643467b J
587 587 |
588 588 o 14:9ad579b4a5de I
589 589 |
590 590 | o 12:acd174b7ab39 I
591 591 | |
592 592 | o 11:6c11a6218c97 H
593 593 | |
594 594 o | 10:b5313c85b22e D
595 595 |/
596 596 | o 8:53a6a128b2b7 M
597 597 | |\
598 598 | | x 7:02de42196ebe H
599 599 | | |
600 600 o---+ 6:eea13746799a G
601 601 | | |
602 602 | | o 5:24b6387c8c8c F
603 603 | | |
604 604 o---+ 4:9520eea781bc E
605 605 / /
606 606 x | 3:32af7686d403 D
607 607 | |
608 608 o | 2:5fddd98957c8 C
609 609 | |
610 610 o | 1:42ccdea3bb16 B
611 611 |/
612 612 o 0:cd010b8cd998 A
613 613
614 614 $ hg up 14 -C
615 615 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
616 616 $ echo "K" > K
617 617 $ hg add K
618 618 $ hg commit --amend -m "K"
619 619 $ echo "L" > L
620 620 $ hg add L
621 621 $ hg commit -m "L"
622 622 $ hg up '.^'
623 623 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
624 624 $ echo "M" > M
625 625 $ hg add M
626 626 $ hg commit --amend -m "M"
627 627 $ hg log -G
628 628 @ 20:bfaedf8eb73b M
629 629 |
630 630 | o 18:97219452e4bd L
631 631 | |
632 632 | x 17:fc37a630c901 K
633 633 |/
634 634 | o 15:5ae8a643467b J
635 635 | |
636 636 | x 14:9ad579b4a5de I
637 637 |/
638 638 | o 12:acd174b7ab39 I
639 639 | |
640 640 | o 11:6c11a6218c97 H
641 641 | |
642 642 o | 10:b5313c85b22e D
643 643 |/
644 644 | o 8:53a6a128b2b7 M
645 645 | |\
646 646 | | x 7:02de42196ebe H
647 647 | | |
648 648 o---+ 6:eea13746799a G
649 649 | | |
650 650 | | o 5:24b6387c8c8c F
651 651 | | |
652 652 o---+ 4:9520eea781bc E
653 653 / /
654 654 x | 3:32af7686d403 D
655 655 | |
656 656 o | 2:5fddd98957c8 C
657 657 | |
658 658 o | 1:42ccdea3bb16 B
659 659 |/
660 660 o 0:cd010b8cd998 A
661 661
662 662 $ hg rebase -s 14 -d 18 --config experimental.rebaseskipobsolete=True
663 663 note: not rebasing 14:9ad579b4a5de "I", already in destination as 17:fc37a630c901 "K"
664 664 rebasing 15:5ae8a643467b "J"
665 665
666 666 $ cd ..
667 667
668 668 Skip obsolete changeset even with multiple hops
669 669 -----------------------------------------------
670 670
671 671 setup
672 672
673 673 $ hg init obsskip
674 674 $ cd obsskip
675 675 $ cat << EOF >> .hg/hgrc
676 676 > [experimental]
677 677 > rebaseskipobsolete = True
678 678 > [extensions]
679 679 > strip =
680 680 > EOF
681 681 $ echo A > A
682 682 $ hg add A
683 683 $ hg commit -m A
684 684 $ echo B > B
685 685 $ hg add B
686 686 $ hg commit -m B0
687 687 $ hg commit --amend -m B1
688 688 $ hg commit --amend -m B2
689 689 $ hg up --hidden 'desc(B0)'
690 690 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
691 691 $ echo C > C
692 692 $ hg add C
693 693 $ hg commit -m C
694 694
695 695 Rebase finds its way in a chain of marker
696 696
697 697 $ hg rebase -d 'desc(B2)'
698 698 note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2"
699 699 rebasing 4:212cb178bcbb "C" (tip)
700 700
701 701 Even when the chain include missing node
702 702
703 703 $ hg up --hidden 'desc(B0)'
704 704 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
705 705 $ echo D > D
706 706 $ hg add D
707 707 $ hg commit -m D
708 708 $ hg --hidden strip -r 'desc(B1)'
709 709 saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg (glob)
710 710
711 711 $ hg rebase -d 'desc(B2)'
712 712 note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2"
713 713 rebasing 5:1a79b7535141 "D" (tip)
714 714 $ hg up 4
715 715 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
716 716 $ echo "O" > O
717 717 $ hg add O
718 718 $ hg commit -m O
719 719 $ echo "P" > P
720 720 $ hg add P
721 721 $ hg commit -m P
722 722 $ hg log -G
723 723 @ 8:8d47583e023f P
724 724 |
725 725 o 7:360bbaa7d3ce O
726 726 |
727 727 | o 6:9c48361117de D
728 728 | |
729 729 o | 4:ff2c4d47b71d C
730 730 |/
731 731 o 2:261e70097290 B2
732 732 |
733 733 o 0:4a2df7238c3b A
734 734
735 735 $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=all
736 736 obsoleted 1 changesets
737 737 $ hg rebase -d 6 -r "4::"
738 738 rebasing 4:ff2c4d47b71d "C"
739 739 note: not rebasing 7:360bbaa7d3ce "O", it has no successor
740 740 rebasing 8:8d47583e023f "P" (tip)
741 741
742 742 If all the changeset to be rebased are obsolete and present in the destination, we
743 743 should display a friendly error message
744 744
745 745 $ hg log -G
746 746 @ 10:121d9e3bc4c6 P
747 747 |
748 748 o 9:4be60e099a77 C
749 749 |
750 750 o 6:9c48361117de D
751 751 |
752 752 o 2:261e70097290 B2
753 753 |
754 754 o 0:4a2df7238c3b A
755 755
756 756
757 757 $ hg up 9
758 758 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
759 759 $ echo "non-relevant change" > nonrelevant
760 760 $ hg add nonrelevant
761 761 $ hg commit -m nonrelevant
762 762 created new head
763 763 $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=all
764 764 obsoleted 1 changesets
765 765 $ hg rebase -r . -d 10
766 766 note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
767 767
768 768 If a rebase is going to create divergence, it should abort
769 769
770 770 $ hg log -G
771 771 @ 11:f44da1f4954c nonrelevant
772 772 |
773 773 | o 10:121d9e3bc4c6 P
774 774 |/
775 775 o 9:4be60e099a77 C
776 776 |
777 777 o 6:9c48361117de D
778 778 |
779 779 o 2:261e70097290 B2
780 780 |
781 781 o 0:4a2df7238c3b A
782 782
783 783
784 784 $ hg up 9
785 785 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
786 786 $ echo "john" > doe
787 787 $ hg add doe
788 788 $ hg commit -m "john doe"
789 789 created new head
790 790 $ hg up 10
791 791 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
792 792 $ echo "foo" > bar
793 793 $ hg add bar
794 794 $ hg commit --amend -m "10'"
795 795 $ hg up 10 --hidden
796 796 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
797 797 $ echo "bar" > foo
798 798 $ hg add foo
799 799 $ hg commit -m "bar foo"
800 800 $ hg log -G
801 801 @ 15:73568ab6879d bar foo
802 802 |
803 803 | o 14:77d874d096a2 10'
804 804 | |
805 805 | | o 12:3eb461388009 john doe
806 806 | |/
807 807 x | 10:121d9e3bc4c6 P
808 808 |/
809 809 o 9:4be60e099a77 C
810 810 |
811 811 o 6:9c48361117de D
812 812 |
813 813 o 2:261e70097290 B2
814 814 |
815 815 o 0:4a2df7238c3b A
816 816
817 817 $ hg summary
818 818 parent: 15:73568ab6879d tip (orphan)
819 819 bar foo
820 820 branch: default
821 821 commit: (clean)
822 822 update: 2 new changesets, 3 branch heads (merge)
823 823 phases: 8 draft
824 824 orphan: 1 changesets
825 825 $ hg rebase -s 10 -d 12
826 826 abort: this rebase will cause divergences from: 121d9e3bc4c6
827 827 (to force the rebase please set experimental.allowdivergence=True)
828 828 [255]
829 829 $ hg log -G
830 830 @ 15:73568ab6879d bar foo
831 831 |
832 832 | o 14:77d874d096a2 10'
833 833 | |
834 834 | | o 12:3eb461388009 john doe
835 835 | |/
836 836 x | 10:121d9e3bc4c6 P
837 837 |/
838 838 o 9:4be60e099a77 C
839 839 |
840 840 o 6:9c48361117de D
841 841 |
842 842 o 2:261e70097290 B2
843 843 |
844 844 o 0:4a2df7238c3b A
845 845
846 846 With experimental.allowdivergence=True, rebase can create divergence
847 847
848 848 $ hg rebase -s 10 -d 12 --config experimental.allowdivergence=True
849 849 rebasing 10:121d9e3bc4c6 "P"
850 850 rebasing 15:73568ab6879d "bar foo" (tip)
851 851 $ hg summary
852 852 parent: 17:61bd55f69bc4 tip
853 853 bar foo
854 854 branch: default
855 855 commit: (clean)
856 856 update: 1 new changesets, 2 branch heads (merge)
857 857 phases: 8 draft
858 divergent: 2 changesets
858 content-divergent: 2 changesets
859 859
860 860 rebase --continue + skipped rev because their successors are in destination
861 861 we make a change in trunk and work on conflicting changes to make rebase abort.
862 862
863 863 $ hg log -G -r 17::
864 864 @ 17:61bd55f69bc4 bar foo
865 865 |
866 866 ~
867 867
868 868 Create the two changes in trunk
869 869 $ printf "a" > willconflict
870 870 $ hg add willconflict
871 871 $ hg commit -m "willconflict first version"
872 872
873 873 $ printf "dummy" > C
874 874 $ hg commit -m "dummy change successor"
875 875
876 876 Create the changes that we will rebase
877 877 $ hg update -C 17 -q
878 878 $ printf "b" > willconflict
879 879 $ hg add willconflict
880 880 $ hg commit -m "willconflict second version"
881 881 created new head
882 882 $ printf "dummy" > K
883 883 $ hg add K
884 884 $ hg commit -m "dummy change"
885 885 $ printf "dummy" > L
886 886 $ hg add L
887 887 $ hg commit -m "dummy change"
888 888 $ hg debugobsolete `hg log -r ".^" -T '{node}'` `hg log -r 19 -T '{node}'` --config experimental.evolution=all
889 889 obsoleted 1 changesets
890 890
891 891 $ hg log -G -r 17::
892 892 @ 22:7bdc8a87673d dummy change
893 893 |
894 894 x 21:8b31da3c4919 dummy change
895 895 |
896 896 o 20:b82fb57ea638 willconflict second version
897 897 |
898 898 | o 19:601db7a18f51 dummy change successor
899 899 | |
900 900 | o 18:357ddf1602d5 willconflict first version
901 901 |/
902 902 o 17:61bd55f69bc4 bar foo
903 903 |
904 904 ~
905 905 $ hg rebase -r ".^^ + .^ + ." -d 19
906 906 rebasing 20:b82fb57ea638 "willconflict second version"
907 907 merging willconflict
908 908 warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark')
909 909 unresolved conflicts (see hg resolve, then hg rebase --continue)
910 910 [1]
911 911
912 912 $ hg resolve --mark willconflict
913 913 (no more unresolved files)
914 914 continue: hg rebase --continue
915 915 $ hg rebase --continue
916 916 rebasing 20:b82fb57ea638 "willconflict second version"
917 917 note: not rebasing 21:8b31da3c4919 "dummy change", already in destination as 19:601db7a18f51 "dummy change successor"
918 918 rebasing 22:7bdc8a87673d "dummy change" (tip)
919 919 $ cd ..
920 920
921 921 Rebase merge where successor of one parent is equal to destination (issue5198)
922 922
923 923 $ hg init p1-succ-is-dest
924 924 $ cd p1-succ-is-dest
925 925
926 926 $ hg debugdrawdag <<EOF
927 927 > F
928 928 > /|
929 929 > E D B # replace: D -> B
930 930 > \|/
931 931 > A
932 932 > EOF
933 933
934 934 $ hg rebase -d B -s D
935 935 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
936 936 rebasing 4:66f1a38021c9 "F" (F tip)
937 937 $ hg log -G
938 938 o 5:50e9d60b99c6 F
939 939 |\
940 940 | | x 4:66f1a38021c9 F
941 941 | |/|
942 942 | o | 3:7fb047a69f22 E
943 943 | | |
944 944 | | x 2:b18e25de2cf5 D
945 945 | |/
946 946 o | 1:112478962961 B
947 947 |/
948 948 o 0:426bada5c675 A
949 949
950 950 $ cd ..
951 951
952 952 Rebase merge where successor of other parent is equal to destination
953 953
954 954 $ hg init p2-succ-is-dest
955 955 $ cd p2-succ-is-dest
956 956
957 957 $ hg debugdrawdag <<EOF
958 958 > F
959 959 > /|
960 960 > E D B # replace: E -> B
961 961 > \|/
962 962 > A
963 963 > EOF
964 964
965 965 BROKEN: Raises an exception
966 966 $ hg rebase -d B -s E 2>&1 | grep AssertionError:
967 967 AssertionError: no base found to rebase on (defineparents called wrong)
968 968 $ hg log -G
969 969 o 4:66f1a38021c9 F
970 970 |\
971 971 | x 3:7fb047a69f22 E
972 972 | |
973 973 o | 2:b18e25de2cf5 D
974 974 |/
975 975 | o 1:112478962961 B
976 976 |/
977 977 o 0:426bada5c675 A
978 978
979 979 $ cd ..
980 980
981 981 Rebase merge where successor of one parent is ancestor of destination
982 982
983 983 $ hg init p1-succ-in-dest
984 984 $ cd p1-succ-in-dest
985 985
986 986 $ hg debugdrawdag <<EOF
987 987 > F C
988 988 > /| |
989 989 > E D B # replace: D -> B
990 990 > \|/
991 991 > A
992 992 > EOF
993 993
994 994 $ hg rebase -d C -s D
995 995 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
996 996 rebasing 5:66f1a38021c9 "F" (F tip)
997 997 BROKEN: not rebased on top of requested destination (C)
998 998 $ hg log -G
999 999 o 6:50e9d60b99c6 F
1000 1000 |\
1001 1001 | | x 5:66f1a38021c9 F
1002 1002 | |/|
1003 1003 +-----o 4:26805aba1e60 C
1004 1004 | | |
1005 1005 | o | 3:7fb047a69f22 E
1006 1006 | | |
1007 1007 | | x 2:b18e25de2cf5 D
1008 1008 | |/
1009 1009 o | 1:112478962961 B
1010 1010 |/
1011 1011 o 0:426bada5c675 A
1012 1012
1013 1013 $ cd ..
1014 1014
1015 1015 Rebase merge where successor of other parent is ancestor of destination
1016 1016
1017 1017 $ hg init p2-succ-in-dest
1018 1018 $ cd p2-succ-in-dest
1019 1019
1020 1020 $ hg debugdrawdag <<EOF
1021 1021 > F C
1022 1022 > /| |
1023 1023 > E D B # replace: E -> B
1024 1024 > \|/
1025 1025 > A
1026 1026 > EOF
1027 1027
1028 1028 BROKEN: Raises an exception
1029 1029 $ hg rebase -d C -s E 2>&1 | grep AssertionError:
1030 1030 AssertionError: no base found to rebase on (defineparents called wrong)
1031 1031 $ hg log -G
1032 1032 o 5:66f1a38021c9 F
1033 1033 |\
1034 1034 | | o 4:26805aba1e60 C
1035 1035 | | |
1036 1036 | x | 3:7fb047a69f22 E
1037 1037 | | |
1038 1038 o | | 2:b18e25de2cf5 D
1039 1039 |/ /
1040 1040 | o 1:112478962961 B
1041 1041 |/
1042 1042 o 0:426bada5c675 A
1043 1043
1044 1044 $ cd ..
1045 1045
1046 1046 Rebase merge where successor of one parent is ancestor of destination
1047 1047
1048 1048 $ hg init p1-succ-in-dest-b
1049 1049 $ cd p1-succ-in-dest-b
1050 1050
1051 1051 $ hg debugdrawdag <<EOF
1052 1052 > F C
1053 1053 > /| |
1054 1054 > E D B # replace: E -> B
1055 1055 > \|/
1056 1056 > A
1057 1057 > EOF
1058 1058
1059 1059 $ hg rebase -d C -b F
1060 1060 rebasing 2:b18e25de2cf5 "D" (D)
1061 1061 note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B"
1062 1062 rebasing 5:66f1a38021c9 "F" (F tip)
1063 1063 $ hg log -G
1064 1064 o 7:9ed45af61fa0 F
1065 1065 |
1066 1066 o 6:8f47515dda15 D
1067 1067 |
1068 1068 | x 5:66f1a38021c9 F
1069 1069 | |\
1070 1070 o | | 4:26805aba1e60 C
1071 1071 | | |
1072 1072 | | x 3:7fb047a69f22 E
1073 1073 | | |
1074 1074 | x | 2:b18e25de2cf5 D
1075 1075 | |/
1076 1076 o / 1:112478962961 B
1077 1077 |/
1078 1078 o 0:426bada5c675 A
1079 1079
1080 1080 $ cd ..
1081 1081
1082 1082 Rebase merge where successor of other parent is ancestor of destination
1083 1083
1084 1084 $ hg init p2-succ-in-dest-b
1085 1085 $ cd p2-succ-in-dest-b
1086 1086
1087 1087 $ hg debugdrawdag <<EOF
1088 1088 > F C
1089 1089 > /| |
1090 1090 > E D B # replace: D -> B
1091 1091 > \|/
1092 1092 > A
1093 1093 > EOF
1094 1094
1095 1095 $ hg rebase -d C -b F
1096 1096 note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B"
1097 1097 rebasing 3:7fb047a69f22 "E" (E)
1098 1098 rebasing 5:66f1a38021c9 "F" (F tip)
1099 1099 BROKEN: This should have resulted in a rebased F with one parent, just like in
1100 1100 the test case above
1101 1101 $ hg log -G
1102 1102 o 7:c1e6f26e339d F
1103 1103 |\
1104 1104 | o 6:533690786a86 E
1105 1105 |/
1106 1106 | x 5:66f1a38021c9 F
1107 1107 | |\
1108 1108 o | | 4:26805aba1e60 C
1109 1109 | | |
1110 1110 | | x 3:7fb047a69f22 E
1111 1111 | | |
1112 1112 | x | 2:b18e25de2cf5 D
1113 1113 | |/
1114 1114 o / 1:112478962961 B
1115 1115 |/
1116 1116 o 0:426bada5c675 A
1117 1117
1118 1118 $ cd ..
1119 1119
1120 1120 Test that bookmark is moved and working dir is updated when all changesets have
1121 1121 equivalents in destination
1122 1122 $ hg init rbsrepo && cd rbsrepo
1123 1123 $ echo "[experimental]" > .hg/hgrc
1124 1124 $ echo "evolution=all" >> .hg/hgrc
1125 1125 $ echo "rebaseskipobsolete=on" >> .hg/hgrc
1126 1126 $ echo root > root && hg ci -Am root
1127 1127 adding root
1128 1128 $ echo a > a && hg ci -Am a
1129 1129 adding a
1130 1130 $ hg up 0
1131 1131 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1132 1132 $ echo b > b && hg ci -Am b
1133 1133 adding b
1134 1134 created new head
1135 1135 $ hg rebase -r 2 -d 1
1136 1136 rebasing 2:1e9a3c00cbe9 "b" (tip)
1137 1137 $ hg log -r . # working dir is at rev 3 (successor of 2)
1138 1138 3:be1832deae9a b (no-eol)
1139 1139 $ hg book -r 2 mybook --hidden # rev 2 has a bookmark on it now
1140 1140 $ hg up 2 && hg log -r . # working dir is at rev 2 again
1141 1141 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1142 1142 2:1e9a3c00cbe9 b (no-eol)
1143 1143 $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
1144 1144 note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b"
1145 1145 Check that working directory was updated to rev 3 although rev 2 was skipped
1146 1146 during the rebase operation
1147 1147 $ hg log -r .
1148 1148 3:be1832deae9a b (no-eol)
1149 1149
1150 1150 Check that bookmark was not moved to rev 3 if rev 2 was skipped during the
1151 1151 rebase operation. This makes sense because if rev 2 has a successor, the
1152 1152 operation generating that successor (ex. rebase) should be responsible for
1153 1153 moving bookmarks. If the bookmark is on a precursor, like rev 2, that means the
1154 1154 user manually moved it back. In that case we should not move it again.
1155 1155 $ hg bookmarks
1156 1156 mybook 2:1e9a3c00cbe9
1157 1157 $ hg debugobsolete --rev tip
1158 1158 1e9a3c00cbe90d236ac05ef61efcc5e40b7412bc be1832deae9ac531caa7438b8dcf6055a122cd8e 0 (*) {'user': 'test'} (glob)
General Comments 0
You need to be logged in to leave comments. Login now