##// END OF EJS Templates
bundlespec: introduce an attr-based class for bundlespec...
Boris Feld -
r37181:b229fd9a default
parent child Browse files
Show More
@@ -1,5637 +1,5638
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import os
13 13 import re
14 14 import sys
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23 from . import (
24 24 archival,
25 25 bookmarks,
26 26 bundle2,
27 27 changegroup,
28 28 cmdutil,
29 29 copies,
30 30 debugcommands as debugcommandsmod,
31 31 destutil,
32 32 dirstateguard,
33 33 discovery,
34 34 encoding,
35 35 error,
36 36 exchange,
37 37 extensions,
38 38 formatter,
39 39 graphmod,
40 40 hbisect,
41 41 help,
42 42 hg,
43 43 lock as lockmod,
44 44 logcmdutil,
45 45 merge as mergemod,
46 46 obsolete,
47 47 obsutil,
48 48 patch,
49 49 phases,
50 50 pycompat,
51 51 rcutil,
52 52 registrar,
53 53 revsetlang,
54 54 rewriteutil,
55 55 scmutil,
56 56 server,
57 57 streamclone,
58 58 tags as tagsmod,
59 59 templatekw,
60 60 ui as uimod,
61 61 util,
62 62 wireprotoserver,
63 63 )
64 64 from .utils import (
65 65 dateutil,
66 66 procutil,
67 67 stringutil,
68 68 )
69 69
70 70 release = lockmod.release
71 71
72 72 table = {}
73 73 table.update(debugcommandsmod.command._table)
74 74
75 75 command = registrar.command(table)
76 76 readonly = registrar.command.readonly
77 77
78 78 # common command options
79 79
80 80 globalopts = [
81 81 ('R', 'repository', '',
82 82 _('repository root directory or name of overlay bundle file'),
83 83 _('REPO')),
84 84 ('', 'cwd', '',
85 85 _('change working directory'), _('DIR')),
86 86 ('y', 'noninteractive', None,
87 87 _('do not prompt, automatically pick the first choice for all prompts')),
88 88 ('q', 'quiet', None, _('suppress output')),
89 89 ('v', 'verbose', None, _('enable additional output')),
90 90 ('', 'color', '',
91 91 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
92 92 # and should not be translated
93 93 _("when to colorize (boolean, always, auto, never, or debug)"),
94 94 _('TYPE')),
95 95 ('', 'config', [],
96 96 _('set/override config option (use \'section.name=value\')'),
97 97 _('CONFIG')),
98 98 ('', 'debug', None, _('enable debugging output')),
99 99 ('', 'debugger', None, _('start debugger')),
100 100 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
101 101 _('ENCODE')),
102 102 ('', 'encodingmode', encoding.encodingmode,
103 103 _('set the charset encoding mode'), _('MODE')),
104 104 ('', 'traceback', None, _('always print a traceback on exception')),
105 105 ('', 'time', None, _('time how long the command takes')),
106 106 ('', 'profile', None, _('print command execution profile')),
107 107 ('', 'version', None, _('output version information and exit')),
108 108 ('h', 'help', None, _('display help and exit')),
109 109 ('', 'hidden', False, _('consider hidden changesets')),
110 110 ('', 'pager', 'auto',
111 111 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
112 112 ]
113 113
114 114 dryrunopts = cmdutil.dryrunopts
115 115 remoteopts = cmdutil.remoteopts
116 116 walkopts = cmdutil.walkopts
117 117 commitopts = cmdutil.commitopts
118 118 commitopts2 = cmdutil.commitopts2
119 119 formatteropts = cmdutil.formatteropts
120 120 templateopts = cmdutil.templateopts
121 121 logopts = cmdutil.logopts
122 122 diffopts = cmdutil.diffopts
123 123 diffwsopts = cmdutil.diffwsopts
124 124 diffopts2 = cmdutil.diffopts2
125 125 mergetoolopts = cmdutil.mergetoolopts
126 126 similarityopts = cmdutil.similarityopts
127 127 subrepoopts = cmdutil.subrepoopts
128 128 debugrevlogopts = cmdutil.debugrevlogopts
129 129
130 130 # Commands start here, listed alphabetically
131 131
132 132 @command('^add',
133 133 walkopts + subrepoopts + dryrunopts,
134 134 _('[OPTION]... [FILE]...'),
135 135 inferrepo=True)
136 136 def add(ui, repo, *pats, **opts):
137 137 """add the specified files on the next commit
138 138
139 139 Schedule files to be version controlled and added to the
140 140 repository.
141 141
142 142 The files will be added to the repository at the next commit. To
143 143 undo an add before that, see :hg:`forget`.
144 144
145 145 If no names are given, add all files to the repository (except
146 146 files matching ``.hgignore``).
147 147
148 148 .. container:: verbose
149 149
150 150 Examples:
151 151
152 152 - New (unknown) files are added
153 153 automatically by :hg:`add`::
154 154
155 155 $ ls
156 156 foo.c
157 157 $ hg status
158 158 ? foo.c
159 159 $ hg add
160 160 adding foo.c
161 161 $ hg status
162 162 A foo.c
163 163
164 164 - Specific files to be added can be specified::
165 165
166 166 $ ls
167 167 bar.c foo.c
168 168 $ hg status
169 169 ? bar.c
170 170 ? foo.c
171 171 $ hg add bar.c
172 172 $ hg status
173 173 A bar.c
174 174 ? foo.c
175 175
176 176 Returns 0 if all files are successfully added.
177 177 """
178 178
179 179 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
180 180 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
181 181 return rejected and 1 or 0
182 182
183 183 @command('addremove',
184 184 similarityopts + subrepoopts + walkopts + dryrunopts,
185 185 _('[OPTION]... [FILE]...'),
186 186 inferrepo=True)
187 187 def addremove(ui, repo, *pats, **opts):
188 188 """add all new files, delete all missing files
189 189
190 190 Add all new files and remove all missing files from the
191 191 repository.
192 192
193 193 Unless names are given, new files are ignored if they match any of
194 194 the patterns in ``.hgignore``. As with add, these changes take
195 195 effect at the next commit.
196 196
197 197 Use the -s/--similarity option to detect renamed files. This
198 198 option takes a percentage between 0 (disabled) and 100 (files must
199 199 be identical) as its parameter. With a parameter greater than 0,
200 200 this compares every removed file with every added file and records
201 201 those similar enough as renames. Detecting renamed files this way
202 202 can be expensive. After using this option, :hg:`status -C` can be
203 203 used to check which files were identified as moved or renamed. If
204 204 not specified, -s/--similarity defaults to 100 and only renames of
205 205 identical files are detected.
206 206
207 207 .. container:: verbose
208 208
209 209 Examples:
210 210
211 211 - A number of files (bar.c and foo.c) are new,
212 212 while foobar.c has been removed (without using :hg:`remove`)
213 213 from the repository::
214 214
215 215 $ ls
216 216 bar.c foo.c
217 217 $ hg status
218 218 ! foobar.c
219 219 ? bar.c
220 220 ? foo.c
221 221 $ hg addremove
222 222 adding bar.c
223 223 adding foo.c
224 224 removing foobar.c
225 225 $ hg status
226 226 A bar.c
227 227 A foo.c
228 228 R foobar.c
229 229
230 230 - A file foobar.c was moved to foo.c without using :hg:`rename`.
231 231 Afterwards, it was edited slightly::
232 232
233 233 $ ls
234 234 foo.c
235 235 $ hg status
236 236 ! foobar.c
237 237 ? foo.c
238 238 $ hg addremove --similarity 90
239 239 removing foobar.c
240 240 adding foo.c
241 241 recording removal of foobar.c as rename to foo.c (94% similar)
242 242 $ hg status -C
243 243 A foo.c
244 244 foobar.c
245 245 R foobar.c
246 246
247 247 Returns 0 if all files are successfully added.
248 248 """
249 249 opts = pycompat.byteskwargs(opts)
250 250 try:
251 251 sim = float(opts.get('similarity') or 100)
252 252 except ValueError:
253 253 raise error.Abort(_('similarity must be a number'))
254 254 if sim < 0 or sim > 100:
255 255 raise error.Abort(_('similarity must be between 0 and 100'))
256 256 matcher = scmutil.match(repo[None], pats, opts)
257 257 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
258 258
259 259 @command('^annotate|blame',
260 260 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
261 261 ('', 'follow', None,
262 262 _('follow copies/renames and list the filename (DEPRECATED)')),
263 263 ('', 'no-follow', None, _("don't follow copies and renames")),
264 264 ('a', 'text', None, _('treat all files as text')),
265 265 ('u', 'user', None, _('list the author (long with -v)')),
266 266 ('f', 'file', None, _('list the filename')),
267 267 ('d', 'date', None, _('list the date (short with -q)')),
268 268 ('n', 'number', None, _('list the revision number (default)')),
269 269 ('c', 'changeset', None, _('list the changeset')),
270 270 ('l', 'line-number', None, _('show line number at the first appearance')),
271 271 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
272 272 ] + diffwsopts + walkopts + formatteropts,
273 273 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
274 274 inferrepo=True)
275 275 def annotate(ui, repo, *pats, **opts):
276 276 """show changeset information by line for each file
277 277
278 278 List changes in files, showing the revision id responsible for
279 279 each line.
280 280
281 281 This command is useful for discovering when a change was made and
282 282 by whom.
283 283
284 284 If you include --file, --user, or --date, the revision number is
285 285 suppressed unless you also include --number.
286 286
287 287 Without the -a/--text option, annotate will avoid processing files
288 288 it detects as binary. With -a, annotate will annotate the file
289 289 anyway, although the results will probably be neither useful
290 290 nor desirable.
291 291
292 292 Returns 0 on success.
293 293 """
294 294 opts = pycompat.byteskwargs(opts)
295 295 if not pats:
296 296 raise error.Abort(_('at least one filename or pattern is required'))
297 297
298 298 if opts.get('follow'):
299 299 # --follow is deprecated and now just an alias for -f/--file
300 300 # to mimic the behavior of Mercurial before version 1.5
301 301 opts['file'] = True
302 302
303 303 rev = opts.get('rev')
304 304 if rev:
305 305 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
306 306 ctx = scmutil.revsingle(repo, rev)
307 307
308 308 rootfm = ui.formatter('annotate', opts)
309 309 if ui.quiet:
310 310 datefunc = dateutil.shortdate
311 311 else:
312 312 datefunc = dateutil.datestr
313 313 if ctx.rev() is None:
314 314 def hexfn(node):
315 315 if node is None:
316 316 return None
317 317 else:
318 318 return rootfm.hexfunc(node)
319 319 if opts.get('changeset'):
320 320 # omit "+" suffix which is appended to node hex
321 321 def formatrev(rev):
322 322 if rev is None:
323 323 return '%d' % ctx.p1().rev()
324 324 else:
325 325 return '%d' % rev
326 326 else:
327 327 def formatrev(rev):
328 328 if rev is None:
329 329 return '%d+' % ctx.p1().rev()
330 330 else:
331 331 return '%d ' % rev
332 332 def formathex(hex):
333 333 if hex is None:
334 334 return '%s+' % rootfm.hexfunc(ctx.p1().node())
335 335 else:
336 336 return '%s ' % hex
337 337 else:
338 338 hexfn = rootfm.hexfunc
339 339 formatrev = formathex = pycompat.bytestr
340 340
341 341 opmap = [('user', ' ', lambda x: x.fctx.user(), ui.shortuser),
342 342 ('number', ' ', lambda x: x.fctx.rev(), formatrev),
343 343 ('changeset', ' ', lambda x: hexfn(x.fctx.node()), formathex),
344 344 ('date', ' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
345 345 ('file', ' ', lambda x: x.fctx.path(), pycompat.bytestr),
346 346 ('line_number', ':', lambda x: x.lineno, pycompat.bytestr),
347 347 ]
348 348 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
349 349
350 350 if (not opts.get('user') and not opts.get('changeset')
351 351 and not opts.get('date') and not opts.get('file')):
352 352 opts['number'] = True
353 353
354 354 linenumber = opts.get('line_number') is not None
355 355 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
356 356 raise error.Abort(_('at least one of -n/-c is required for -l'))
357 357
358 358 ui.pager('annotate')
359 359
360 360 if rootfm.isplain():
361 361 def makefunc(get, fmt):
362 362 return lambda x: fmt(get(x))
363 363 else:
364 364 def makefunc(get, fmt):
365 365 return get
366 366 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
367 367 if opts.get(op)]
368 368 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
369 369 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
370 370 if opts.get(op))
371 371
372 372 def bad(x, y):
373 373 raise error.Abort("%s: %s" % (x, y))
374 374
375 375 m = scmutil.match(ctx, pats, opts, badfn=bad)
376 376
377 377 follow = not opts.get('no_follow')
378 378 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
379 379 whitespace=True)
380 380 skiprevs = opts.get('skip')
381 381 if skiprevs:
382 382 skiprevs = scmutil.revrange(repo, skiprevs)
383 383
384 384 for abs in ctx.walk(m):
385 385 fctx = ctx[abs]
386 386 rootfm.startitem()
387 387 rootfm.data(abspath=abs, path=m.rel(abs))
388 388 if not opts.get('text') and fctx.isbinary():
389 389 rootfm.plain(_("%s: binary file\n")
390 390 % ((pats and m.rel(abs)) or abs))
391 391 continue
392 392
393 393 fm = rootfm.nested('lines')
394 394 lines = fctx.annotate(follow=follow, skiprevs=skiprevs,
395 395 diffopts=diffopts)
396 396 if not lines:
397 397 fm.end()
398 398 continue
399 399 formats = []
400 400 pieces = []
401 401
402 402 for f, sep in funcmap:
403 403 l = [f(n) for n in lines]
404 404 if fm.isplain():
405 405 sizes = [encoding.colwidth(x) for x in l]
406 406 ml = max(sizes)
407 407 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
408 408 else:
409 409 formats.append(['%s' for x in l])
410 410 pieces.append(l)
411 411
412 412 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
413 413 fm.startitem()
414 414 fm.context(fctx=n.fctx)
415 415 fm.write(fields, "".join(f), *p)
416 416 if n.skip:
417 417 fmt = "* %s"
418 418 else:
419 419 fmt = ": %s"
420 420 fm.write('line', fmt, n.text)
421 421
422 422 if not lines[-1].text.endswith('\n'):
423 423 fm.plain('\n')
424 424 fm.end()
425 425
426 426 rootfm.end()
427 427
428 428 @command('archive',
429 429 [('', 'no-decode', None, _('do not pass files through decoders')),
430 430 ('p', 'prefix', '', _('directory prefix for files in archive'),
431 431 _('PREFIX')),
432 432 ('r', 'rev', '', _('revision to distribute'), _('REV')),
433 433 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
434 434 ] + subrepoopts + walkopts,
435 435 _('[OPTION]... DEST'))
436 436 def archive(ui, repo, dest, **opts):
437 437 '''create an unversioned archive of a repository revision
438 438
439 439 By default, the revision used is the parent of the working
440 440 directory; use -r/--rev to specify a different revision.
441 441
442 442 The archive type is automatically detected based on file
443 443 extension (to override, use -t/--type).
444 444
445 445 .. container:: verbose
446 446
447 447 Examples:
448 448
449 449 - create a zip file containing the 1.0 release::
450 450
451 451 hg archive -r 1.0 project-1.0.zip
452 452
453 453 - create a tarball excluding .hg files::
454 454
455 455 hg archive project.tar.gz -X ".hg*"
456 456
457 457 Valid types are:
458 458
459 459 :``files``: a directory full of files (default)
460 460 :``tar``: tar archive, uncompressed
461 461 :``tbz2``: tar archive, compressed using bzip2
462 462 :``tgz``: tar archive, compressed using gzip
463 463 :``uzip``: zip archive, uncompressed
464 464 :``zip``: zip archive, compressed using deflate
465 465
466 466 The exact name of the destination archive or directory is given
467 467 using a format string; see :hg:`help export` for details.
468 468
469 469 Each member added to an archive file has a directory prefix
470 470 prepended. Use -p/--prefix to specify a format string for the
471 471 prefix. The default is the basename of the archive, with suffixes
472 472 removed.
473 473
474 474 Returns 0 on success.
475 475 '''
476 476
477 477 opts = pycompat.byteskwargs(opts)
478 478 rev = opts.get('rev')
479 479 if rev:
480 480 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
481 481 ctx = scmutil.revsingle(repo, rev)
482 482 if not ctx:
483 483 raise error.Abort(_('no working directory: please specify a revision'))
484 484 node = ctx.node()
485 485 dest = cmdutil.makefilename(ctx, dest)
486 486 if os.path.realpath(dest) == repo.root:
487 487 raise error.Abort(_('repository root cannot be destination'))
488 488
489 489 kind = opts.get('type') or archival.guesskind(dest) or 'files'
490 490 prefix = opts.get('prefix')
491 491
492 492 if dest == '-':
493 493 if kind == 'files':
494 494 raise error.Abort(_('cannot archive plain files to stdout'))
495 495 dest = cmdutil.makefileobj(ctx, dest)
496 496 if not prefix:
497 497 prefix = os.path.basename(repo.root) + '-%h'
498 498
499 499 prefix = cmdutil.makefilename(ctx, prefix)
500 500 match = scmutil.match(ctx, [], opts)
501 501 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
502 502 match, prefix, subrepos=opts.get('subrepos'))
503 503
504 504 @command('backout',
505 505 [('', 'merge', None, _('merge with old dirstate parent after backout')),
506 506 ('', 'commit', None,
507 507 _('commit if no conflicts were encountered (DEPRECATED)')),
508 508 ('', 'no-commit', None, _('do not commit')),
509 509 ('', 'parent', '',
510 510 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
511 511 ('r', 'rev', '', _('revision to backout'), _('REV')),
512 512 ('e', 'edit', False, _('invoke editor on commit messages')),
513 513 ] + mergetoolopts + walkopts + commitopts + commitopts2,
514 514 _('[OPTION]... [-r] REV'))
515 515 def backout(ui, repo, node=None, rev=None, **opts):
516 516 '''reverse effect of earlier changeset
517 517
518 518 Prepare a new changeset with the effect of REV undone in the
519 519 current working directory. If no conflicts were encountered,
520 520 it will be committed immediately.
521 521
522 522 If REV is the parent of the working directory, then this new changeset
523 523 is committed automatically (unless --no-commit is specified).
524 524
525 525 .. note::
526 526
527 527 :hg:`backout` cannot be used to fix either an unwanted or
528 528 incorrect merge.
529 529
530 530 .. container:: verbose
531 531
532 532 Examples:
533 533
534 534 - Reverse the effect of the parent of the working directory.
535 535 This backout will be committed immediately::
536 536
537 537 hg backout -r .
538 538
539 539 - Reverse the effect of previous bad revision 23::
540 540
541 541 hg backout -r 23
542 542
543 543 - Reverse the effect of previous bad revision 23 and
544 544 leave changes uncommitted::
545 545
546 546 hg backout -r 23 --no-commit
547 547 hg commit -m "Backout revision 23"
548 548
549 549 By default, the pending changeset will have one parent,
550 550 maintaining a linear history. With --merge, the pending
551 551 changeset will instead have two parents: the old parent of the
552 552 working directory and a new child of REV that simply undoes REV.
553 553
554 554 Before version 1.7, the behavior without --merge was equivalent
555 555 to specifying --merge followed by :hg:`update --clean .` to
556 556 cancel the merge and leave the child of REV as a head to be
557 557 merged separately.
558 558
559 559 See :hg:`help dates` for a list of formats valid for -d/--date.
560 560
561 561 See :hg:`help revert` for a way to restore files to the state
562 562 of another revision.
563 563
564 564 Returns 0 on success, 1 if nothing to backout or there are unresolved
565 565 files.
566 566 '''
567 567 wlock = lock = None
568 568 try:
569 569 wlock = repo.wlock()
570 570 lock = repo.lock()
571 571 return _dobackout(ui, repo, node, rev, **opts)
572 572 finally:
573 573 release(lock, wlock)
574 574
575 575 def _dobackout(ui, repo, node=None, rev=None, **opts):
576 576 opts = pycompat.byteskwargs(opts)
577 577 if opts.get('commit') and opts.get('no_commit'):
578 578 raise error.Abort(_("cannot use --commit with --no-commit"))
579 579 if opts.get('merge') and opts.get('no_commit'):
580 580 raise error.Abort(_("cannot use --merge with --no-commit"))
581 581
582 582 if rev and node:
583 583 raise error.Abort(_("please specify just one revision"))
584 584
585 585 if not rev:
586 586 rev = node
587 587
588 588 if not rev:
589 589 raise error.Abort(_("please specify a revision to backout"))
590 590
591 591 date = opts.get('date')
592 592 if date:
593 593 opts['date'] = dateutil.parsedate(date)
594 594
595 595 cmdutil.checkunfinished(repo)
596 596 cmdutil.bailifchanged(repo)
597 597 node = scmutil.revsingle(repo, rev).node()
598 598
599 599 op1, op2 = repo.dirstate.parents()
600 600 if not repo.changelog.isancestor(node, op1):
601 601 raise error.Abort(_('cannot backout change that is not an ancestor'))
602 602
603 603 p1, p2 = repo.changelog.parents(node)
604 604 if p1 == nullid:
605 605 raise error.Abort(_('cannot backout a change with no parents'))
606 606 if p2 != nullid:
607 607 if not opts.get('parent'):
608 608 raise error.Abort(_('cannot backout a merge changeset'))
609 609 p = repo.lookup(opts['parent'])
610 610 if p not in (p1, p2):
611 611 raise error.Abort(_('%s is not a parent of %s') %
612 612 (short(p), short(node)))
613 613 parent = p
614 614 else:
615 615 if opts.get('parent'):
616 616 raise error.Abort(_('cannot use --parent on non-merge changeset'))
617 617 parent = p1
618 618
619 619 # the backout should appear on the same branch
620 620 branch = repo.dirstate.branch()
621 621 bheads = repo.branchheads(branch)
622 622 rctx = scmutil.revsingle(repo, hex(parent))
623 623 if not opts.get('merge') and op1 != node:
624 624 dsguard = dirstateguard.dirstateguard(repo, 'backout')
625 625 try:
626 626 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
627 627 'backout')
628 628 stats = mergemod.update(repo, parent, True, True, node, False)
629 629 repo.setparents(op1, op2)
630 630 dsguard.close()
631 631 hg._showstats(repo, stats)
632 632 if stats.unresolvedcount:
633 633 repo.ui.status(_("use 'hg resolve' to retry unresolved "
634 634 "file merges\n"))
635 635 return 1
636 636 finally:
637 637 ui.setconfig('ui', 'forcemerge', '', '')
638 638 lockmod.release(dsguard)
639 639 else:
640 640 hg.clean(repo, node, show_stats=False)
641 641 repo.dirstate.setbranch(branch)
642 642 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
643 643
644 644 if opts.get('no_commit'):
645 645 msg = _("changeset %s backed out, "
646 646 "don't forget to commit.\n")
647 647 ui.status(msg % short(node))
648 648 return 0
649 649
650 650 def commitfunc(ui, repo, message, match, opts):
651 651 editform = 'backout'
652 652 e = cmdutil.getcommiteditor(editform=editform,
653 653 **pycompat.strkwargs(opts))
654 654 if not message:
655 655 # we don't translate commit messages
656 656 message = "Backed out changeset %s" % short(node)
657 657 e = cmdutil.getcommiteditor(edit=True, editform=editform)
658 658 return repo.commit(message, opts.get('user'), opts.get('date'),
659 659 match, editor=e)
660 660 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
661 661 if not newnode:
662 662 ui.status(_("nothing changed\n"))
663 663 return 1
664 664 cmdutil.commitstatus(repo, newnode, branch, bheads)
665 665
666 666 def nice(node):
667 667 return '%d:%s' % (repo.changelog.rev(node), short(node))
668 668 ui.status(_('changeset %s backs out changeset %s\n') %
669 669 (nice(repo.changelog.tip()), nice(node)))
670 670 if opts.get('merge') and op1 != node:
671 671 hg.clean(repo, op1, show_stats=False)
672 672 ui.status(_('merging with changeset %s\n')
673 673 % nice(repo.changelog.tip()))
674 674 try:
675 675 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
676 676 'backout')
677 677 return hg.merge(repo, hex(repo.changelog.tip()))
678 678 finally:
679 679 ui.setconfig('ui', 'forcemerge', '', '')
680 680 return 0
681 681
682 682 @command('bisect',
683 683 [('r', 'reset', False, _('reset bisect state')),
684 684 ('g', 'good', False, _('mark changeset good')),
685 685 ('b', 'bad', False, _('mark changeset bad')),
686 686 ('s', 'skip', False, _('skip testing changeset')),
687 687 ('e', 'extend', False, _('extend the bisect range')),
688 688 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
689 689 ('U', 'noupdate', False, _('do not update to target'))],
690 690 _("[-gbsr] [-U] [-c CMD] [REV]"))
691 691 def bisect(ui, repo, rev=None, extra=None, command=None,
692 692 reset=None, good=None, bad=None, skip=None, extend=None,
693 693 noupdate=None):
694 694 """subdivision search of changesets
695 695
696 696 This command helps to find changesets which introduce problems. To
697 697 use, mark the earliest changeset you know exhibits the problem as
698 698 bad, then mark the latest changeset which is free from the problem
699 699 as good. Bisect will update your working directory to a revision
700 700 for testing (unless the -U/--noupdate option is specified). Once
701 701 you have performed tests, mark the working directory as good or
702 702 bad, and bisect will either update to another candidate changeset
703 703 or announce that it has found the bad revision.
704 704
705 705 As a shortcut, you can also use the revision argument to mark a
706 706 revision as good or bad without checking it out first.
707 707
708 708 If you supply a command, it will be used for automatic bisection.
709 709 The environment variable HG_NODE will contain the ID of the
710 710 changeset being tested. The exit status of the command will be
711 711 used to mark revisions as good or bad: status 0 means good, 125
712 712 means to skip the revision, 127 (command not found) will abort the
713 713 bisection, and any other non-zero exit status means the revision
714 714 is bad.
715 715
716 716 .. container:: verbose
717 717
718 718 Some examples:
719 719
720 720 - start a bisection with known bad revision 34, and good revision 12::
721 721
722 722 hg bisect --bad 34
723 723 hg bisect --good 12
724 724
725 725 - advance the current bisection by marking current revision as good or
726 726 bad::
727 727
728 728 hg bisect --good
729 729 hg bisect --bad
730 730
731 731 - mark the current revision, or a known revision, to be skipped (e.g. if
732 732 that revision is not usable because of another issue)::
733 733
734 734 hg bisect --skip
735 735 hg bisect --skip 23
736 736
737 737 - skip all revisions that do not touch directories ``foo`` or ``bar``::
738 738
739 739 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
740 740
741 741 - forget the current bisection::
742 742
743 743 hg bisect --reset
744 744
745 745 - use 'make && make tests' to automatically find the first broken
746 746 revision::
747 747
748 748 hg bisect --reset
749 749 hg bisect --bad 34
750 750 hg bisect --good 12
751 751 hg bisect --command "make && make tests"
752 752
753 753 - see all changesets whose states are already known in the current
754 754 bisection::
755 755
756 756 hg log -r "bisect(pruned)"
757 757
758 758 - see the changeset currently being bisected (especially useful
759 759 if running with -U/--noupdate)::
760 760
761 761 hg log -r "bisect(current)"
762 762
763 763 - see all changesets that took part in the current bisection::
764 764
765 765 hg log -r "bisect(range)"
766 766
767 767 - you can even get a nice graph::
768 768
769 769 hg log --graph -r "bisect(range)"
770 770
771 771 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
772 772
773 773 Returns 0 on success.
774 774 """
775 775 # backward compatibility
776 776 if rev in "good bad reset init".split():
777 777 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
778 778 cmd, rev, extra = rev, extra, None
779 779 if cmd == "good":
780 780 good = True
781 781 elif cmd == "bad":
782 782 bad = True
783 783 else:
784 784 reset = True
785 785 elif extra:
786 786 raise error.Abort(_('incompatible arguments'))
787 787
788 788 incompatibles = {
789 789 '--bad': bad,
790 790 '--command': bool(command),
791 791 '--extend': extend,
792 792 '--good': good,
793 793 '--reset': reset,
794 794 '--skip': skip,
795 795 }
796 796
797 797 enabled = [x for x in incompatibles if incompatibles[x]]
798 798
799 799 if len(enabled) > 1:
800 800 raise error.Abort(_('%s and %s are incompatible') %
801 801 tuple(sorted(enabled)[0:2]))
802 802
803 803 if reset:
804 804 hbisect.resetstate(repo)
805 805 return
806 806
807 807 state = hbisect.load_state(repo)
808 808
809 809 # update state
810 810 if good or bad or skip:
811 811 if rev:
812 812 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
813 813 else:
814 814 nodes = [repo.lookup('.')]
815 815 if good:
816 816 state['good'] += nodes
817 817 elif bad:
818 818 state['bad'] += nodes
819 819 elif skip:
820 820 state['skip'] += nodes
821 821 hbisect.save_state(repo, state)
822 822 if not (state['good'] and state['bad']):
823 823 return
824 824
825 825 def mayupdate(repo, node, show_stats=True):
826 826 """common used update sequence"""
827 827 if noupdate:
828 828 return
829 829 cmdutil.checkunfinished(repo)
830 830 cmdutil.bailifchanged(repo)
831 831 return hg.clean(repo, node, show_stats=show_stats)
832 832
833 833 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
834 834
835 835 if command:
836 836 changesets = 1
837 837 if noupdate:
838 838 try:
839 839 node = state['current'][0]
840 840 except LookupError:
841 841 raise error.Abort(_('current bisect revision is unknown - '
842 842 'start a new bisect to fix'))
843 843 else:
844 844 node, p2 = repo.dirstate.parents()
845 845 if p2 != nullid:
846 846 raise error.Abort(_('current bisect revision is a merge'))
847 847 if rev:
848 848 node = repo[scmutil.revsingle(repo, rev, node)].node()
849 849 try:
850 850 while changesets:
851 851 # update state
852 852 state['current'] = [node]
853 853 hbisect.save_state(repo, state)
854 854 status = ui.system(command, environ={'HG_NODE': hex(node)},
855 855 blockedtag='bisect_check')
856 856 if status == 125:
857 857 transition = "skip"
858 858 elif status == 0:
859 859 transition = "good"
860 860 # status < 0 means process was killed
861 861 elif status == 127:
862 862 raise error.Abort(_("failed to execute %s") % command)
863 863 elif status < 0:
864 864 raise error.Abort(_("%s killed") % command)
865 865 else:
866 866 transition = "bad"
867 867 state[transition].append(node)
868 868 ctx = repo[node]
869 869 ui.status(_('changeset %d:%s: %s\n') % (ctx.rev(), ctx,
870 870 transition))
871 871 hbisect.checkstate(state)
872 872 # bisect
873 873 nodes, changesets, bgood = hbisect.bisect(repo, state)
874 874 # update to next check
875 875 node = nodes[0]
876 876 mayupdate(repo, node, show_stats=False)
877 877 finally:
878 878 state['current'] = [node]
879 879 hbisect.save_state(repo, state)
880 880 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
881 881 return
882 882
883 883 hbisect.checkstate(state)
884 884
885 885 # actually bisect
886 886 nodes, changesets, good = hbisect.bisect(repo, state)
887 887 if extend:
888 888 if not changesets:
889 889 extendnode = hbisect.extendrange(repo, state, nodes, good)
890 890 if extendnode is not None:
891 891 ui.write(_("Extending search to changeset %d:%s\n")
892 892 % (extendnode.rev(), extendnode))
893 893 state['current'] = [extendnode.node()]
894 894 hbisect.save_state(repo, state)
895 895 return mayupdate(repo, extendnode.node())
896 896 raise error.Abort(_("nothing to extend"))
897 897
898 898 if changesets == 0:
899 899 hbisect.printresult(ui, repo, state, displayer, nodes, good)
900 900 else:
901 901 assert len(nodes) == 1 # only a single node can be tested next
902 902 node = nodes[0]
903 903 # compute the approximate number of remaining tests
904 904 tests, size = 0, 2
905 905 while size <= changesets:
906 906 tests, size = tests + 1, size * 2
907 907 rev = repo.changelog.rev(node)
908 908 ui.write(_("Testing changeset %d:%s "
909 909 "(%d changesets remaining, ~%d tests)\n")
910 910 % (rev, short(node), changesets, tests))
911 911 state['current'] = [node]
912 912 hbisect.save_state(repo, state)
913 913 return mayupdate(repo, node)
914 914
915 915 @command('bookmarks|bookmark',
916 916 [('f', 'force', False, _('force')),
917 917 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
918 918 ('d', 'delete', False, _('delete a given bookmark')),
919 919 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
920 920 ('i', 'inactive', False, _('mark a bookmark inactive')),
921 921 ] + formatteropts,
922 922 _('hg bookmarks [OPTIONS]... [NAME]...'))
923 923 def bookmark(ui, repo, *names, **opts):
924 924 '''create a new bookmark or list existing bookmarks
925 925
926 926 Bookmarks are labels on changesets to help track lines of development.
927 927 Bookmarks are unversioned and can be moved, renamed and deleted.
928 928 Deleting or moving a bookmark has no effect on the associated changesets.
929 929
930 930 Creating or updating to a bookmark causes it to be marked as 'active'.
931 931 The active bookmark is indicated with a '*'.
932 932 When a commit is made, the active bookmark will advance to the new commit.
933 933 A plain :hg:`update` will also advance an active bookmark, if possible.
934 934 Updating away from a bookmark will cause it to be deactivated.
935 935
936 936 Bookmarks can be pushed and pulled between repositories (see
937 937 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
938 938 diverged, a new 'divergent bookmark' of the form 'name@path' will
939 939 be created. Using :hg:`merge` will resolve the divergence.
940 940
941 941 Specifying bookmark as '.' to -m or -d options is equivalent to specifying
942 942 the active bookmark's name.
943 943
944 944 A bookmark named '@' has the special property that :hg:`clone` will
945 945 check it out by default if it exists.
946 946
947 947 .. container:: verbose
948 948
949 949 Examples:
950 950
951 951 - create an active bookmark for a new line of development::
952 952
953 953 hg book new-feature
954 954
955 955 - create an inactive bookmark as a place marker::
956 956
957 957 hg book -i reviewed
958 958
959 959 - create an inactive bookmark on another changeset::
960 960
961 961 hg book -r .^ tested
962 962
963 963 - rename bookmark turkey to dinner::
964 964
965 965 hg book -m turkey dinner
966 966
967 967 - move the '@' bookmark from another branch::
968 968
969 969 hg book -f @
970 970 '''
971 971 force = opts.get(r'force')
972 972 rev = opts.get(r'rev')
973 973 delete = opts.get(r'delete')
974 974 rename = opts.get(r'rename')
975 975 inactive = opts.get(r'inactive')
976 976
977 977 if delete and rename:
978 978 raise error.Abort(_("--delete and --rename are incompatible"))
979 979 if delete and rev:
980 980 raise error.Abort(_("--rev is incompatible with --delete"))
981 981 if rename and rev:
982 982 raise error.Abort(_("--rev is incompatible with --rename"))
983 983 if not names and (delete or rev):
984 984 raise error.Abort(_("bookmark name required"))
985 985
986 986 if delete or rename or names or inactive:
987 987 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
988 988 if delete:
989 989 names = pycompat.maplist(repo._bookmarks.expandname, names)
990 990 bookmarks.delete(repo, tr, names)
991 991 elif rename:
992 992 if not names:
993 993 raise error.Abort(_("new bookmark name required"))
994 994 elif len(names) > 1:
995 995 raise error.Abort(_("only one new bookmark name allowed"))
996 996 rename = repo._bookmarks.expandname(rename)
997 997 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
998 998 elif names:
999 999 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1000 1000 elif inactive:
1001 1001 if len(repo._bookmarks) == 0:
1002 1002 ui.status(_("no bookmarks set\n"))
1003 1003 elif not repo._activebookmark:
1004 1004 ui.status(_("no active bookmark\n"))
1005 1005 else:
1006 1006 bookmarks.deactivate(repo)
1007 1007 else: # show bookmarks
1008 1008 bookmarks.printbookmarks(ui, repo, **opts)
1009 1009
1010 1010 @command('branch',
1011 1011 [('f', 'force', None,
1012 1012 _('set branch name even if it shadows an existing branch')),
1013 1013 ('C', 'clean', None, _('reset branch name to parent branch name')),
1014 1014 ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
1015 1015 ],
1016 1016 _('[-fC] [NAME]'))
1017 1017 def branch(ui, repo, label=None, **opts):
1018 1018 """set or show the current branch name
1019 1019
1020 1020 .. note::
1021 1021
1022 1022 Branch names are permanent and global. Use :hg:`bookmark` to create a
1023 1023 light-weight bookmark instead. See :hg:`help glossary` for more
1024 1024 information about named branches and bookmarks.
1025 1025
1026 1026 With no argument, show the current branch name. With one argument,
1027 1027 set the working directory branch name (the branch will not exist
1028 1028 in the repository until the next commit). Standard practice
1029 1029 recommends that primary development take place on the 'default'
1030 1030 branch.
1031 1031
1032 1032 Unless -f/--force is specified, branch will not let you set a
1033 1033 branch name that already exists.
1034 1034
1035 1035 Use -C/--clean to reset the working directory branch to that of
1036 1036 the parent of the working directory, negating a previous branch
1037 1037 change.
1038 1038
1039 1039 Use the command :hg:`update` to switch to an existing branch. Use
1040 1040 :hg:`commit --close-branch` to mark this branch head as closed.
1041 1041 When all heads of a branch are closed, the branch will be
1042 1042 considered closed.
1043 1043
1044 1044 Returns 0 on success.
1045 1045 """
1046 1046 opts = pycompat.byteskwargs(opts)
1047 1047 revs = opts.get('rev')
1048 1048 if label:
1049 1049 label = label.strip()
1050 1050
1051 1051 if not opts.get('clean') and not label:
1052 1052 if revs:
1053 1053 raise error.Abort(_("no branch name specified for the revisions"))
1054 1054 ui.write("%s\n" % repo.dirstate.branch())
1055 1055 return
1056 1056
1057 1057 with repo.wlock():
1058 1058 if opts.get('clean'):
1059 1059 label = repo[None].p1().branch()
1060 1060 repo.dirstate.setbranch(label)
1061 1061 ui.status(_('reset working directory to branch %s\n') % label)
1062 1062 elif label:
1063 1063
1064 1064 scmutil.checknewlabel(repo, label, 'branch')
1065 1065 if revs:
1066 1066 return cmdutil.changebranch(ui, repo, revs, label)
1067 1067
1068 1068 if not opts.get('force') and label in repo.branchmap():
1069 1069 if label not in [p.branch() for p in repo[None].parents()]:
1070 1070 raise error.Abort(_('a branch of the same name already'
1071 1071 ' exists'),
1072 1072 # i18n: "it" refers to an existing branch
1073 1073 hint=_("use 'hg update' to switch to it"))
1074 1074
1075 1075 repo.dirstate.setbranch(label)
1076 1076 ui.status(_('marked working directory as branch %s\n') % label)
1077 1077
1078 1078 # find any open named branches aside from default
1079 1079 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1080 1080 if n != "default" and not c]
1081 1081 if not others:
1082 1082 ui.status(_('(branches are permanent and global, '
1083 1083 'did you want a bookmark?)\n'))
1084 1084
1085 1085 @command('branches',
1086 1086 [('a', 'active', False,
1087 1087 _('show only branches that have unmerged heads (DEPRECATED)')),
1088 1088 ('c', 'closed', False, _('show normal and closed branches')),
1089 1089 ] + formatteropts,
1090 1090 _('[-c]'), cmdtype=readonly)
1091 1091 def branches(ui, repo, active=False, closed=False, **opts):
1092 1092 """list repository named branches
1093 1093
1094 1094 List the repository's named branches, indicating which ones are
1095 1095 inactive. If -c/--closed is specified, also list branches which have
1096 1096 been marked closed (see :hg:`commit --close-branch`).
1097 1097
1098 1098 Use the command :hg:`update` to switch to an existing branch.
1099 1099
1100 1100 Returns 0.
1101 1101 """
1102 1102
1103 1103 opts = pycompat.byteskwargs(opts)
1104 1104 ui.pager('branches')
1105 1105 fm = ui.formatter('branches', opts)
1106 1106 hexfunc = fm.hexfunc
1107 1107
1108 1108 allheads = set(repo.heads())
1109 1109 branches = []
1110 1110 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1111 1111 isactive = False
1112 1112 if not isclosed:
1113 1113 openheads = set(repo.branchmap().iteropen(heads))
1114 1114 isactive = bool(openheads & allheads)
1115 1115 branches.append((tag, repo[tip], isactive, not isclosed))
1116 1116 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1117 1117 reverse=True)
1118 1118
1119 1119 for tag, ctx, isactive, isopen in branches:
1120 1120 if active and not isactive:
1121 1121 continue
1122 1122 if isactive:
1123 1123 label = 'branches.active'
1124 1124 notice = ''
1125 1125 elif not isopen:
1126 1126 if not closed:
1127 1127 continue
1128 1128 label = 'branches.closed'
1129 1129 notice = _(' (closed)')
1130 1130 else:
1131 1131 label = 'branches.inactive'
1132 1132 notice = _(' (inactive)')
1133 1133 current = (tag == repo.dirstate.branch())
1134 1134 if current:
1135 1135 label = 'branches.current'
1136 1136
1137 1137 fm.startitem()
1138 1138 fm.write('branch', '%s', tag, label=label)
1139 1139 rev = ctx.rev()
1140 1140 padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
1141 1141 fmt = ' ' * padsize + ' %d:%s'
1142 1142 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1143 1143 label='log.changeset changeset.%s' % ctx.phasestr())
1144 1144 fm.context(ctx=ctx)
1145 1145 fm.data(active=isactive, closed=not isopen, current=current)
1146 1146 if not ui.quiet:
1147 1147 fm.plain(notice)
1148 1148 fm.plain('\n')
1149 1149 fm.end()
1150 1150
1151 1151 @command('bundle',
1152 1152 [('f', 'force', None, _('run even when the destination is unrelated')),
1153 1153 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1154 1154 _('REV')),
1155 1155 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1156 1156 _('BRANCH')),
1157 1157 ('', 'base', [],
1158 1158 _('a base changeset assumed to be available at the destination'),
1159 1159 _('REV')),
1160 1160 ('a', 'all', None, _('bundle all changesets in the repository')),
1161 1161 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1162 1162 ] + remoteopts,
1163 1163 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1164 1164 def bundle(ui, repo, fname, dest=None, **opts):
1165 1165 """create a bundle file
1166 1166
1167 1167 Generate a bundle file containing data to be transferred to another
1168 1168 repository.
1169 1169
1170 1170 To create a bundle containing all changesets, use -a/--all
1171 1171 (or --base null). Otherwise, hg assumes the destination will have
1172 1172 all the nodes you specify with --base parameters. Otherwise, hg
1173 1173 will assume the repository has all the nodes in destination, or
1174 1174 default-push/default if no destination is specified, where destination
1175 1175 is the repository you provide through DEST option.
1176 1176
1177 1177 You can change bundle format with the -t/--type option. See
1178 1178 :hg:`help bundlespec` for documentation on this format. By default,
1179 1179 the most appropriate format is used and compression defaults to
1180 1180 bzip2.
1181 1181
1182 1182 The bundle file can then be transferred using conventional means
1183 1183 and applied to another repository with the unbundle or pull
1184 1184 command. This is useful when direct push and pull are not
1185 1185 available or when exporting an entire repository is undesirable.
1186 1186
1187 1187 Applying bundles preserves all changeset contents including
1188 1188 permissions, copy/rename information, and revision history.
1189 1189
1190 1190 Returns 0 on success, 1 if no changes found.
1191 1191 """
1192 1192 opts = pycompat.byteskwargs(opts)
1193 1193 revs = None
1194 1194 if 'rev' in opts:
1195 1195 revstrings = opts['rev']
1196 1196 revs = scmutil.revrange(repo, revstrings)
1197 1197 if revstrings and not revs:
1198 1198 raise error.Abort(_('no commits to bundle'))
1199 1199
1200 1200 bundletype = opts.get('type', 'bzip2').lower()
1201 1201 try:
1202 bcompression, cgversion, params = exchange.parsebundlespec(
1203 repo, bundletype, strict=False)
1202 bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
1204 1203 except error.UnsupportedBundleSpecification as e:
1205 1204 raise error.Abort(pycompat.bytestr(e),
1206 1205 hint=_("see 'hg help bundlespec' for supported "
1207 1206 "values for --type"))
1207 cgversion = bundlespec.version
1208 1208
1209 1209 # Packed bundles are a pseudo bundle format for now.
1210 1210 if cgversion == 's1':
1211 1211 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1212 1212 hint=_("use 'hg debugcreatestreamclonebundle'"))
1213 1213
1214 1214 if opts.get('all'):
1215 1215 if dest:
1216 1216 raise error.Abort(_("--all is incompatible with specifying "
1217 1217 "a destination"))
1218 1218 if opts.get('base'):
1219 1219 ui.warn(_("ignoring --base because --all was specified\n"))
1220 1220 base = ['null']
1221 1221 else:
1222 1222 base = scmutil.revrange(repo, opts.get('base'))
1223 1223 if cgversion not in changegroup.supportedoutgoingversions(repo):
1224 1224 raise error.Abort(_("repository does not support bundle version %s") %
1225 1225 cgversion)
1226 1226
1227 1227 if base:
1228 1228 if dest:
1229 1229 raise error.Abort(_("--base is incompatible with specifying "
1230 1230 "a destination"))
1231 1231 common = [repo.lookup(rev) for rev in base]
1232 1232 heads = [repo.lookup(r) for r in revs] if revs else None
1233 1233 outgoing = discovery.outgoing(repo, common, heads)
1234 1234 else:
1235 1235 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1236 1236 dest, branches = hg.parseurl(dest, opts.get('branch'))
1237 1237 other = hg.peer(repo, opts, dest)
1238 1238 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1239 1239 heads = revs and map(repo.lookup, revs) or revs
1240 1240 outgoing = discovery.findcommonoutgoing(repo, other,
1241 1241 onlyheads=heads,
1242 1242 force=opts.get('force'),
1243 1243 portable=True)
1244 1244
1245 1245 if not outgoing.missing:
1246 1246 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1247 1247 return 1
1248 1248
1249 bcompression = bundlespec.compression
1249 1250 if cgversion == '01': #bundle1
1250 1251 if bcompression is None:
1251 1252 bcompression = 'UN'
1252 1253 bversion = 'HG10' + bcompression
1253 1254 bcompression = None
1254 1255 elif cgversion in ('02', '03'):
1255 1256 bversion = 'HG20'
1256 1257 else:
1257 1258 raise error.ProgrammingError(
1258 1259 'bundle: unexpected changegroup version %s' % cgversion)
1259 1260
1260 1261 # TODO compression options should be derived from bundlespec parsing.
1261 1262 # This is a temporary hack to allow adjusting bundle compression
1262 1263 # level without a) formalizing the bundlespec changes to declare it
1263 1264 # b) introducing a command flag.
1264 1265 compopts = {}
1265 1266 complevel = ui.configint('experimental', 'bundlecomplevel')
1266 1267 if complevel is not None:
1267 1268 compopts['level'] = complevel
1268 1269
1269 1270
1270 1271 contentopts = {'cg.version': cgversion, 'changegroup': True}
1271 1272 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker'):
1272 1273 contentopts['obsolescence'] = True
1273 1274 if repo.ui.configbool('experimental', 'bundle-phases'):
1274 1275 contentopts['phases'] = True
1275 1276 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1276 1277 contentopts, compression=bcompression,
1277 1278 compopts=compopts)
1278 1279
1279 1280 @command('cat',
1280 1281 [('o', 'output', '',
1281 1282 _('print output to file with formatted name'), _('FORMAT')),
1282 1283 ('r', 'rev', '', _('print the given revision'), _('REV')),
1283 1284 ('', 'decode', None, _('apply any matching decode filter')),
1284 1285 ] + walkopts + formatteropts,
1285 1286 _('[OPTION]... FILE...'),
1286 1287 inferrepo=True, cmdtype=readonly)
1287 1288 def cat(ui, repo, file1, *pats, **opts):
1288 1289 """output the current or given revision of files
1289 1290
1290 1291 Print the specified files as they were at the given revision. If
1291 1292 no revision is given, the parent of the working directory is used.
1292 1293
1293 1294 Output may be to a file, in which case the name of the file is
1294 1295 given using a template string. See :hg:`help templates`. In addition
1295 1296 to the common template keywords, the following formatting rules are
1296 1297 supported:
1297 1298
1298 1299 :``%%``: literal "%" character
1299 1300 :``%s``: basename of file being printed
1300 1301 :``%d``: dirname of file being printed, or '.' if in repository root
1301 1302 :``%p``: root-relative path name of file being printed
1302 1303 :``%H``: changeset hash (40 hexadecimal digits)
1303 1304 :``%R``: changeset revision number
1304 1305 :``%h``: short-form changeset hash (12 hexadecimal digits)
1305 1306 :``%r``: zero-padded changeset revision number
1306 1307 :``%b``: basename of the exporting repository
1307 1308 :``\\``: literal "\\" character
1308 1309
1309 1310 Returns 0 on success.
1310 1311 """
1311 1312 opts = pycompat.byteskwargs(opts)
1312 1313 rev = opts.get('rev')
1313 1314 if rev:
1314 1315 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
1315 1316 ctx = scmutil.revsingle(repo, rev)
1316 1317 m = scmutil.match(ctx, (file1,) + pats, opts)
1317 1318 fntemplate = opts.pop('output', '')
1318 1319 if cmdutil.isstdiofilename(fntemplate):
1319 1320 fntemplate = ''
1320 1321
1321 1322 if fntemplate:
1322 1323 fm = formatter.nullformatter(ui, 'cat')
1323 1324 else:
1324 1325 ui.pager('cat')
1325 1326 fm = ui.formatter('cat', opts)
1326 1327 with fm:
1327 1328 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
1328 1329 **pycompat.strkwargs(opts))
1329 1330
1330 1331 @command('^clone',
1331 1332 [('U', 'noupdate', None, _('the clone will include an empty working '
1332 1333 'directory (only a repository)')),
1333 1334 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1334 1335 _('REV')),
1335 1336 ('r', 'rev', [], _('do not clone everything, but include this changeset'
1336 1337 ' and its ancestors'), _('REV')),
1337 1338 ('b', 'branch', [], _('do not clone everything, but include this branch\'s'
1338 1339 ' changesets and their ancestors'), _('BRANCH')),
1339 1340 ('', 'pull', None, _('use pull protocol to copy metadata')),
1340 1341 ('', 'uncompressed', None,
1341 1342 _('an alias to --stream (DEPRECATED)')),
1342 1343 ('', 'stream', None,
1343 1344 _('clone with minimal data processing')),
1344 1345 ] + remoteopts,
1345 1346 _('[OPTION]... SOURCE [DEST]'),
1346 1347 norepo=True)
1347 1348 def clone(ui, source, dest=None, **opts):
1348 1349 """make a copy of an existing repository
1349 1350
1350 1351 Create a copy of an existing repository in a new directory.
1351 1352
1352 1353 If no destination directory name is specified, it defaults to the
1353 1354 basename of the source.
1354 1355
1355 1356 The location of the source is added to the new repository's
1356 1357 ``.hg/hgrc`` file, as the default to be used for future pulls.
1357 1358
1358 1359 Only local paths and ``ssh://`` URLs are supported as
1359 1360 destinations. For ``ssh://`` destinations, no working directory or
1360 1361 ``.hg/hgrc`` will be created on the remote side.
1361 1362
1362 1363 If the source repository has a bookmark called '@' set, that
1363 1364 revision will be checked out in the new repository by default.
1364 1365
1365 1366 To check out a particular version, use -u/--update, or
1366 1367 -U/--noupdate to create a clone with no working directory.
1367 1368
1368 1369 To pull only a subset of changesets, specify one or more revisions
1369 1370 identifiers with -r/--rev or branches with -b/--branch. The
1370 1371 resulting clone will contain only the specified changesets and
1371 1372 their ancestors. These options (or 'clone src#rev dest') imply
1372 1373 --pull, even for local source repositories.
1373 1374
1374 1375 In normal clone mode, the remote normalizes repository data into a common
1375 1376 exchange format and the receiving end translates this data into its local
1376 1377 storage format. --stream activates a different clone mode that essentially
1377 1378 copies repository files from the remote with minimal data processing. This
1378 1379 significantly reduces the CPU cost of a clone both remotely and locally.
1379 1380 However, it often increases the transferred data size by 30-40%. This can
1380 1381 result in substantially faster clones where I/O throughput is plentiful,
1381 1382 especially for larger repositories. A side-effect of --stream clones is
1382 1383 that storage settings and requirements on the remote are applied locally:
1383 1384 a modern client may inherit legacy or inefficient storage used by the
1384 1385 remote or a legacy Mercurial client may not be able to clone from a
1385 1386 modern Mercurial remote.
1386 1387
1387 1388 .. note::
1388 1389
1389 1390 Specifying a tag will include the tagged changeset but not the
1390 1391 changeset containing the tag.
1391 1392
1392 1393 .. container:: verbose
1393 1394
1394 1395 For efficiency, hardlinks are used for cloning whenever the
1395 1396 source and destination are on the same filesystem (note this
1396 1397 applies only to the repository data, not to the working
1397 1398 directory). Some filesystems, such as AFS, implement hardlinking
1398 1399 incorrectly, but do not report errors. In these cases, use the
1399 1400 --pull option to avoid hardlinking.
1400 1401
1401 1402 Mercurial will update the working directory to the first applicable
1402 1403 revision from this list:
1403 1404
1404 1405 a) null if -U or the source repository has no changesets
1405 1406 b) if -u . and the source repository is local, the first parent of
1406 1407 the source repository's working directory
1407 1408 c) the changeset specified with -u (if a branch name, this means the
1408 1409 latest head of that branch)
1409 1410 d) the changeset specified with -r
1410 1411 e) the tipmost head specified with -b
1411 1412 f) the tipmost head specified with the url#branch source syntax
1412 1413 g) the revision marked with the '@' bookmark, if present
1413 1414 h) the tipmost head of the default branch
1414 1415 i) tip
1415 1416
1416 1417 When cloning from servers that support it, Mercurial may fetch
1417 1418 pre-generated data from a server-advertised URL. When this is done,
1418 1419 hooks operating on incoming changesets and changegroups may fire twice,
1419 1420 once for the bundle fetched from the URL and another for any additional
1420 1421 data not fetched from this URL. In addition, if an error occurs, the
1421 1422 repository may be rolled back to a partial clone. This behavior may
1422 1423 change in future releases. See :hg:`help -e clonebundles` for more.
1423 1424
1424 1425 Examples:
1425 1426
1426 1427 - clone a remote repository to a new directory named hg/::
1427 1428
1428 1429 hg clone https://www.mercurial-scm.org/repo/hg/
1429 1430
1430 1431 - create a lightweight local clone::
1431 1432
1432 1433 hg clone project/ project-feature/
1433 1434
1434 1435 - clone from an absolute path on an ssh server (note double-slash)::
1435 1436
1436 1437 hg clone ssh://user@server//home/projects/alpha/
1437 1438
1438 1439 - do a streaming clone while checking out a specified version::
1439 1440
1440 1441 hg clone --stream http://server/repo -u 1.5
1441 1442
1442 1443 - create a repository without changesets after a particular revision::
1443 1444
1444 1445 hg clone -r 04e544 experimental/ good/
1445 1446
1446 1447 - clone (and track) a particular named branch::
1447 1448
1448 1449 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1449 1450
1450 1451 See :hg:`help urls` for details on specifying URLs.
1451 1452
1452 1453 Returns 0 on success.
1453 1454 """
1454 1455 opts = pycompat.byteskwargs(opts)
1455 1456 if opts.get('noupdate') and opts.get('updaterev'):
1456 1457 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1457 1458
1458 1459 r = hg.clone(ui, opts, source, dest,
1459 1460 pull=opts.get('pull'),
1460 1461 stream=opts.get('stream') or opts.get('uncompressed'),
1461 1462 rev=opts.get('rev'),
1462 1463 update=opts.get('updaterev') or not opts.get('noupdate'),
1463 1464 branch=opts.get('branch'),
1464 1465 shareopts=opts.get('shareopts'))
1465 1466
1466 1467 return r is None
1467 1468
1468 1469 @command('^commit|ci',
1469 1470 [('A', 'addremove', None,
1470 1471 _('mark new/missing files as added/removed before committing')),
1471 1472 ('', 'close-branch', None,
1472 1473 _('mark a branch head as closed')),
1473 1474 ('', 'amend', None, _('amend the parent of the working directory')),
1474 1475 ('s', 'secret', None, _('use the secret phase for committing')),
1475 1476 ('e', 'edit', None, _('invoke editor on commit messages')),
1476 1477 ('i', 'interactive', None, _('use interactive mode')),
1477 1478 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1478 1479 _('[OPTION]... [FILE]...'),
1479 1480 inferrepo=True)
1480 1481 def commit(ui, repo, *pats, **opts):
1481 1482 """commit the specified files or all outstanding changes
1482 1483
1483 1484 Commit changes to the given files into the repository. Unlike a
1484 1485 centralized SCM, this operation is a local operation. See
1485 1486 :hg:`push` for a way to actively distribute your changes.
1486 1487
1487 1488 If a list of files is omitted, all changes reported by :hg:`status`
1488 1489 will be committed.
1489 1490
1490 1491 If you are committing the result of a merge, do not provide any
1491 1492 filenames or -I/-X filters.
1492 1493
1493 1494 If no commit message is specified, Mercurial starts your
1494 1495 configured editor where you can enter a message. In case your
1495 1496 commit fails, you will find a backup of your message in
1496 1497 ``.hg/last-message.txt``.
1497 1498
1498 1499 The --close-branch flag can be used to mark the current branch
1499 1500 head closed. When all heads of a branch are closed, the branch
1500 1501 will be considered closed and no longer listed.
1501 1502
1502 1503 The --amend flag can be used to amend the parent of the
1503 1504 working directory with a new commit that contains the changes
1504 1505 in the parent in addition to those currently reported by :hg:`status`,
1505 1506 if there are any. The old commit is stored in a backup bundle in
1506 1507 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1507 1508 on how to restore it).
1508 1509
1509 1510 Message, user and date are taken from the amended commit unless
1510 1511 specified. When a message isn't specified on the command line,
1511 1512 the editor will open with the message of the amended commit.
1512 1513
1513 1514 It is not possible to amend public changesets (see :hg:`help phases`)
1514 1515 or changesets that have children.
1515 1516
1516 1517 See :hg:`help dates` for a list of formats valid for -d/--date.
1517 1518
1518 1519 Returns 0 on success, 1 if nothing changed.
1519 1520
1520 1521 .. container:: verbose
1521 1522
1522 1523 Examples:
1523 1524
1524 1525 - commit all files ending in .py::
1525 1526
1526 1527 hg commit --include "set:**.py"
1527 1528
1528 1529 - commit all non-binary files::
1529 1530
1530 1531 hg commit --exclude "set:binary()"
1531 1532
1532 1533 - amend the current commit and set the date to now::
1533 1534
1534 1535 hg commit --amend --date now
1535 1536 """
1536 1537 wlock = lock = None
1537 1538 try:
1538 1539 wlock = repo.wlock()
1539 1540 lock = repo.lock()
1540 1541 return _docommit(ui, repo, *pats, **opts)
1541 1542 finally:
1542 1543 release(lock, wlock)
1543 1544
1544 1545 def _docommit(ui, repo, *pats, **opts):
1545 1546 if opts.get(r'interactive'):
1546 1547 opts.pop(r'interactive')
1547 1548 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1548 1549 cmdutil.recordfilter, *pats,
1549 1550 **opts)
1550 1551 # ret can be 0 (no changes to record) or the value returned by
1551 1552 # commit(), 1 if nothing changed or None on success.
1552 1553 return 1 if ret == 0 else ret
1553 1554
1554 1555 opts = pycompat.byteskwargs(opts)
1555 1556 if opts.get('subrepos'):
1556 1557 if opts.get('amend'):
1557 1558 raise error.Abort(_('cannot amend with --subrepos'))
1558 1559 # Let --subrepos on the command line override config setting.
1559 1560 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1560 1561
1561 1562 cmdutil.checkunfinished(repo, commit=True)
1562 1563
1563 1564 branch = repo[None].branch()
1564 1565 bheads = repo.branchheads(branch)
1565 1566
1566 1567 extra = {}
1567 1568 if opts.get('close_branch'):
1568 1569 extra['close'] = '1'
1569 1570
1570 1571 if not bheads:
1571 1572 raise error.Abort(_('can only close branch heads'))
1572 1573 elif opts.get('amend'):
1573 1574 if repo[None].parents()[0].p1().branch() != branch and \
1574 1575 repo[None].parents()[0].p2().branch() != branch:
1575 1576 raise error.Abort(_('can only close branch heads'))
1576 1577
1577 1578 if opts.get('amend'):
1578 1579 if ui.configbool('ui', 'commitsubrepos'):
1579 1580 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1580 1581
1581 1582 old = repo['.']
1582 1583 rewriteutil.precheck(repo, [old.rev()], 'amend')
1583 1584
1584 1585 # Currently histedit gets confused if an amend happens while histedit
1585 1586 # is in progress. Since we have a checkunfinished command, we are
1586 1587 # temporarily honoring it.
1587 1588 #
1588 1589 # Note: eventually this guard will be removed. Please do not expect
1589 1590 # this behavior to remain.
1590 1591 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1591 1592 cmdutil.checkunfinished(repo)
1592 1593
1593 1594 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
1594 1595 if node == old.node():
1595 1596 ui.status(_("nothing changed\n"))
1596 1597 return 1
1597 1598 else:
1598 1599 def commitfunc(ui, repo, message, match, opts):
1599 1600 overrides = {}
1600 1601 if opts.get('secret'):
1601 1602 overrides[('phases', 'new-commit')] = 'secret'
1602 1603
1603 1604 baseui = repo.baseui
1604 1605 with baseui.configoverride(overrides, 'commit'):
1605 1606 with ui.configoverride(overrides, 'commit'):
1606 1607 editform = cmdutil.mergeeditform(repo[None],
1607 1608 'commit.normal')
1608 1609 editor = cmdutil.getcommiteditor(
1609 1610 editform=editform, **pycompat.strkwargs(opts))
1610 1611 return repo.commit(message,
1611 1612 opts.get('user'),
1612 1613 opts.get('date'),
1613 1614 match,
1614 1615 editor=editor,
1615 1616 extra=extra)
1616 1617
1617 1618 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1618 1619
1619 1620 if not node:
1620 1621 stat = cmdutil.postcommitstatus(repo, pats, opts)
1621 1622 if stat[3]:
1622 1623 ui.status(_("nothing changed (%d missing files, see "
1623 1624 "'hg status')\n") % len(stat[3]))
1624 1625 else:
1625 1626 ui.status(_("nothing changed\n"))
1626 1627 return 1
1627 1628
1628 1629 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1629 1630
1630 1631 @command('config|showconfig|debugconfig',
1631 1632 [('u', 'untrusted', None, _('show untrusted configuration options')),
1632 1633 ('e', 'edit', None, _('edit user config')),
1633 1634 ('l', 'local', None, _('edit repository config')),
1634 1635 ('g', 'global', None, _('edit global config'))] + formatteropts,
1635 1636 _('[-u] [NAME]...'),
1636 1637 optionalrepo=True, cmdtype=readonly)
1637 1638 def config(ui, repo, *values, **opts):
1638 1639 """show combined config settings from all hgrc files
1639 1640
1640 1641 With no arguments, print names and values of all config items.
1641 1642
1642 1643 With one argument of the form section.name, print just the value
1643 1644 of that config item.
1644 1645
1645 1646 With multiple arguments, print names and values of all config
1646 1647 items with matching section names or section.names.
1647 1648
1648 1649 With --edit, start an editor on the user-level config file. With
1649 1650 --global, edit the system-wide config file. With --local, edit the
1650 1651 repository-level config file.
1651 1652
1652 1653 With --debug, the source (filename and line number) is printed
1653 1654 for each config item.
1654 1655
1655 1656 See :hg:`help config` for more information about config files.
1656 1657
1657 1658 Returns 0 on success, 1 if NAME does not exist.
1658 1659
1659 1660 """
1660 1661
1661 1662 opts = pycompat.byteskwargs(opts)
1662 1663 if opts.get('edit') or opts.get('local') or opts.get('global'):
1663 1664 if opts.get('local') and opts.get('global'):
1664 1665 raise error.Abort(_("can't use --local and --global together"))
1665 1666
1666 1667 if opts.get('local'):
1667 1668 if not repo:
1668 1669 raise error.Abort(_("can't use --local outside a repository"))
1669 1670 paths = [repo.vfs.join('hgrc')]
1670 1671 elif opts.get('global'):
1671 1672 paths = rcutil.systemrcpath()
1672 1673 else:
1673 1674 paths = rcutil.userrcpath()
1674 1675
1675 1676 for f in paths:
1676 1677 if os.path.exists(f):
1677 1678 break
1678 1679 else:
1679 1680 if opts.get('global'):
1680 1681 samplehgrc = uimod.samplehgrcs['global']
1681 1682 elif opts.get('local'):
1682 1683 samplehgrc = uimod.samplehgrcs['local']
1683 1684 else:
1684 1685 samplehgrc = uimod.samplehgrcs['user']
1685 1686
1686 1687 f = paths[0]
1687 1688 fp = open(f, "wb")
1688 1689 fp.write(util.tonativeeol(samplehgrc))
1689 1690 fp.close()
1690 1691
1691 1692 editor = ui.geteditor()
1692 1693 ui.system("%s \"%s\"" % (editor, f),
1693 1694 onerr=error.Abort, errprefix=_("edit failed"),
1694 1695 blockedtag='config_edit')
1695 1696 return
1696 1697 ui.pager('config')
1697 1698 fm = ui.formatter('config', opts)
1698 1699 for t, f in rcutil.rccomponents():
1699 1700 if t == 'path':
1700 1701 ui.debug('read config from: %s\n' % f)
1701 1702 elif t == 'items':
1702 1703 for section, name, value, source in f:
1703 1704 ui.debug('set config by: %s\n' % source)
1704 1705 else:
1705 1706 raise error.ProgrammingError('unknown rctype: %s' % t)
1706 1707 untrusted = bool(opts.get('untrusted'))
1707 1708
1708 1709 selsections = selentries = []
1709 1710 if values:
1710 1711 selsections = [v for v in values if '.' not in v]
1711 1712 selentries = [v for v in values if '.' in v]
1712 1713 uniquesel = (len(selentries) == 1 and not selsections)
1713 1714 selsections = set(selsections)
1714 1715 selentries = set(selentries)
1715 1716
1716 1717 matched = False
1717 1718 for section, name, value in ui.walkconfig(untrusted=untrusted):
1718 1719 source = ui.configsource(section, name, untrusted)
1719 1720 value = pycompat.bytestr(value)
1720 1721 if fm.isplain():
1721 1722 source = source or 'none'
1722 1723 value = value.replace('\n', '\\n')
1723 1724 entryname = section + '.' + name
1724 1725 if values and not (section in selsections or entryname in selentries):
1725 1726 continue
1726 1727 fm.startitem()
1727 1728 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1728 1729 if uniquesel:
1729 1730 fm.data(name=entryname)
1730 1731 fm.write('value', '%s\n', value)
1731 1732 else:
1732 1733 fm.write('name value', '%s=%s\n', entryname, value)
1733 1734 matched = True
1734 1735 fm.end()
1735 1736 if matched:
1736 1737 return 0
1737 1738 return 1
1738 1739
1739 1740 @command('copy|cp',
1740 1741 [('A', 'after', None, _('record a copy that has already occurred')),
1741 1742 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1742 1743 ] + walkopts + dryrunopts,
1743 1744 _('[OPTION]... [SOURCE]... DEST'))
1744 1745 def copy(ui, repo, *pats, **opts):
1745 1746 """mark files as copied for the next commit
1746 1747
1747 1748 Mark dest as having copies of source files. If dest is a
1748 1749 directory, copies are put in that directory. If dest is a file,
1749 1750 the source must be a single file.
1750 1751
1751 1752 By default, this command copies the contents of files as they
1752 1753 exist in the working directory. If invoked with -A/--after, the
1753 1754 operation is recorded, but no copying is performed.
1754 1755
1755 1756 This command takes effect with the next commit. To undo a copy
1756 1757 before that, see :hg:`revert`.
1757 1758
1758 1759 Returns 0 on success, 1 if errors are encountered.
1759 1760 """
1760 1761 opts = pycompat.byteskwargs(opts)
1761 1762 with repo.wlock(False):
1762 1763 return cmdutil.copy(ui, repo, pats, opts)
1763 1764
1764 1765 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1765 1766 def debugcommands(ui, cmd='', *args):
1766 1767 """list all available commands and options"""
1767 1768 for cmd, vals in sorted(table.iteritems()):
1768 1769 cmd = cmd.split('|')[0].strip('^')
1769 1770 opts = ', '.join([i[1] for i in vals[1]])
1770 1771 ui.write('%s: %s\n' % (cmd, opts))
1771 1772
1772 1773 @command('debugcomplete',
1773 1774 [('o', 'options', None, _('show the command options'))],
1774 1775 _('[-o] CMD'),
1775 1776 norepo=True)
1776 1777 def debugcomplete(ui, cmd='', **opts):
1777 1778 """returns the completion list associated with the given command"""
1778 1779
1779 1780 if opts.get(r'options'):
1780 1781 options = []
1781 1782 otables = [globalopts]
1782 1783 if cmd:
1783 1784 aliases, entry = cmdutil.findcmd(cmd, table, False)
1784 1785 otables.append(entry[1])
1785 1786 for t in otables:
1786 1787 for o in t:
1787 1788 if "(DEPRECATED)" in o[3]:
1788 1789 continue
1789 1790 if o[0]:
1790 1791 options.append('-%s' % o[0])
1791 1792 options.append('--%s' % o[1])
1792 1793 ui.write("%s\n" % "\n".join(options))
1793 1794 return
1794 1795
1795 1796 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1796 1797 if ui.verbose:
1797 1798 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1798 1799 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1799 1800
1800 1801 @command('^diff',
1801 1802 [('r', 'rev', [], _('revision'), _('REV')),
1802 1803 ('c', 'change', '', _('change made by revision'), _('REV'))
1803 1804 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1804 1805 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1805 1806 inferrepo=True, cmdtype=readonly)
1806 1807 def diff(ui, repo, *pats, **opts):
1807 1808 """diff repository (or selected files)
1808 1809
1809 1810 Show differences between revisions for the specified files.
1810 1811
1811 1812 Differences between files are shown using the unified diff format.
1812 1813
1813 1814 .. note::
1814 1815
1815 1816 :hg:`diff` may generate unexpected results for merges, as it will
1816 1817 default to comparing against the working directory's first
1817 1818 parent changeset if no revisions are specified.
1818 1819
1819 1820 When two revision arguments are given, then changes are shown
1820 1821 between those revisions. If only one revision is specified then
1821 1822 that revision is compared to the working directory, and, when no
1822 1823 revisions are specified, the working directory files are compared
1823 1824 to its first parent.
1824 1825
1825 1826 Alternatively you can specify -c/--change with a revision to see
1826 1827 the changes in that changeset relative to its first parent.
1827 1828
1828 1829 Without the -a/--text option, diff will avoid generating diffs of
1829 1830 files it detects as binary. With -a, diff will generate a diff
1830 1831 anyway, probably with undesirable results.
1831 1832
1832 1833 Use the -g/--git option to generate diffs in the git extended diff
1833 1834 format. For more information, read :hg:`help diffs`.
1834 1835
1835 1836 .. container:: verbose
1836 1837
1837 1838 Examples:
1838 1839
1839 1840 - compare a file in the current working directory to its parent::
1840 1841
1841 1842 hg diff foo.c
1842 1843
1843 1844 - compare two historical versions of a directory, with rename info::
1844 1845
1845 1846 hg diff --git -r 1.0:1.2 lib/
1846 1847
1847 1848 - get change stats relative to the last change on some date::
1848 1849
1849 1850 hg diff --stat -r "date('may 2')"
1850 1851
1851 1852 - diff all newly-added files that contain a keyword::
1852 1853
1853 1854 hg diff "set:added() and grep(GNU)"
1854 1855
1855 1856 - compare a revision and its parents::
1856 1857
1857 1858 hg diff -c 9353 # compare against first parent
1858 1859 hg diff -r 9353^:9353 # same using revset syntax
1859 1860 hg diff -r 9353^2:9353 # compare against the second parent
1860 1861
1861 1862 Returns 0 on success.
1862 1863 """
1863 1864
1864 1865 opts = pycompat.byteskwargs(opts)
1865 1866 revs = opts.get('rev')
1866 1867 change = opts.get('change')
1867 1868 stat = opts.get('stat')
1868 1869 reverse = opts.get('reverse')
1869 1870
1870 1871 if revs and change:
1871 1872 msg = _('cannot specify --rev and --change at the same time')
1872 1873 raise error.Abort(msg)
1873 1874 elif change:
1874 1875 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
1875 1876 node2 = scmutil.revsingle(repo, change, None).node()
1876 1877 node1 = repo[node2].p1().node()
1877 1878 else:
1878 1879 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
1879 1880 node1, node2 = scmutil.revpair(repo, revs)
1880 1881
1881 1882 if reverse:
1882 1883 node1, node2 = node2, node1
1883 1884
1884 1885 diffopts = patch.diffallopts(ui, opts)
1885 1886 m = scmutil.match(repo[node2], pats, opts)
1886 1887 ui.pager('diff')
1887 1888 logcmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1888 1889 listsubrepos=opts.get('subrepos'),
1889 1890 root=opts.get('root'))
1890 1891
1891 1892 @command('^export',
1892 1893 [('o', 'output', '',
1893 1894 _('print output to file with formatted name'), _('FORMAT')),
1894 1895 ('', 'switch-parent', None, _('diff against the second parent')),
1895 1896 ('r', 'rev', [], _('revisions to export'), _('REV')),
1896 1897 ] + diffopts,
1897 1898 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'), cmdtype=readonly)
1898 1899 def export(ui, repo, *changesets, **opts):
1899 1900 """dump the header and diffs for one or more changesets
1900 1901
1901 1902 Print the changeset header and diffs for one or more revisions.
1902 1903 If no revision is given, the parent of the working directory is used.
1903 1904
1904 1905 The information shown in the changeset header is: author, date,
1905 1906 branch name (if non-default), changeset hash, parent(s) and commit
1906 1907 comment.
1907 1908
1908 1909 .. note::
1909 1910
1910 1911 :hg:`export` may generate unexpected diff output for merge
1911 1912 changesets, as it will compare the merge changeset against its
1912 1913 first parent only.
1913 1914
1914 1915 Output may be to a file, in which case the name of the file is
1915 1916 given using a template string. See :hg:`help templates`. In addition
1916 1917 to the common template keywords, the following formatting rules are
1917 1918 supported:
1918 1919
1919 1920 :``%%``: literal "%" character
1920 1921 :``%H``: changeset hash (40 hexadecimal digits)
1921 1922 :``%N``: number of patches being generated
1922 1923 :``%R``: changeset revision number
1923 1924 :``%b``: basename of the exporting repository
1924 1925 :``%h``: short-form changeset hash (12 hexadecimal digits)
1925 1926 :``%m``: first line of the commit message (only alphanumeric characters)
1926 1927 :``%n``: zero-padded sequence number, starting at 1
1927 1928 :``%r``: zero-padded changeset revision number
1928 1929 :``\\``: literal "\\" character
1929 1930
1930 1931 Without the -a/--text option, export will avoid generating diffs
1931 1932 of files it detects as binary. With -a, export will generate a
1932 1933 diff anyway, probably with undesirable results.
1933 1934
1934 1935 Use the -g/--git option to generate diffs in the git extended diff
1935 1936 format. See :hg:`help diffs` for more information.
1936 1937
1937 1938 With the --switch-parent option, the diff will be against the
1938 1939 second parent. It can be useful to review a merge.
1939 1940
1940 1941 .. container:: verbose
1941 1942
1942 1943 Examples:
1943 1944
1944 1945 - use export and import to transplant a bugfix to the current
1945 1946 branch::
1946 1947
1947 1948 hg export -r 9353 | hg import -
1948 1949
1949 1950 - export all the changesets between two revisions to a file with
1950 1951 rename information::
1951 1952
1952 1953 hg export --git -r 123:150 > changes.txt
1953 1954
1954 1955 - split outgoing changes into a series of patches with
1955 1956 descriptive names::
1956 1957
1957 1958 hg export -r "outgoing()" -o "%n-%m.patch"
1958 1959
1959 1960 Returns 0 on success.
1960 1961 """
1961 1962 opts = pycompat.byteskwargs(opts)
1962 1963 changesets += tuple(opts.get('rev', []))
1963 1964 if not changesets:
1964 1965 changesets = ['.']
1965 1966 repo = scmutil.unhidehashlikerevs(repo, changesets, 'nowarn')
1966 1967 revs = scmutil.revrange(repo, changesets)
1967 1968 if not revs:
1968 1969 raise error.Abort(_("export requires at least one changeset"))
1969 1970 if len(revs) > 1:
1970 1971 ui.note(_('exporting patches:\n'))
1971 1972 else:
1972 1973 ui.note(_('exporting patch:\n'))
1973 1974 ui.pager('export')
1974 1975 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1975 1976 switch_parent=opts.get('switch_parent'),
1976 1977 opts=patch.diffallopts(ui, opts))
1977 1978
1978 1979 @command('files',
1979 1980 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1980 1981 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1981 1982 ] + walkopts + formatteropts + subrepoopts,
1982 1983 _('[OPTION]... [FILE]...'), cmdtype=readonly)
1983 1984 def files(ui, repo, *pats, **opts):
1984 1985 """list tracked files
1985 1986
1986 1987 Print files under Mercurial control in the working directory or
1987 1988 specified revision for given files (excluding removed files).
1988 1989 Files can be specified as filenames or filesets.
1989 1990
1990 1991 If no files are given to match, this command prints the names
1991 1992 of all files under Mercurial control.
1992 1993
1993 1994 .. container:: verbose
1994 1995
1995 1996 Examples:
1996 1997
1997 1998 - list all files under the current directory::
1998 1999
1999 2000 hg files .
2000 2001
2001 2002 - shows sizes and flags for current revision::
2002 2003
2003 2004 hg files -vr .
2004 2005
2005 2006 - list all files named README::
2006 2007
2007 2008 hg files -I "**/README"
2008 2009
2009 2010 - list all binary files::
2010 2011
2011 2012 hg files "set:binary()"
2012 2013
2013 2014 - find files containing a regular expression::
2014 2015
2015 2016 hg files "set:grep('bob')"
2016 2017
2017 2018 - search tracked file contents with xargs and grep::
2018 2019
2019 2020 hg files -0 | xargs -0 grep foo
2020 2021
2021 2022 See :hg:`help patterns` and :hg:`help filesets` for more information
2022 2023 on specifying file patterns.
2023 2024
2024 2025 Returns 0 if a match is found, 1 otherwise.
2025 2026
2026 2027 """
2027 2028
2028 2029 opts = pycompat.byteskwargs(opts)
2029 2030 rev = opts.get('rev')
2030 2031 if rev:
2031 2032 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2032 2033 ctx = scmutil.revsingle(repo, rev, None)
2033 2034
2034 2035 end = '\n'
2035 2036 if opts.get('print0'):
2036 2037 end = '\0'
2037 2038 fmt = '%s' + end
2038 2039
2039 2040 m = scmutil.match(ctx, pats, opts)
2040 2041 ui.pager('files')
2041 2042 with ui.formatter('files', opts) as fm:
2042 2043 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
2043 2044
2044 2045 @command(
2045 2046 '^forget',
2046 2047 walkopts + dryrunopts,
2047 2048 _('[OPTION]... FILE...'), inferrepo=True)
2048 2049 def forget(ui, repo, *pats, **opts):
2049 2050 """forget the specified files on the next commit
2050 2051
2051 2052 Mark the specified files so they will no longer be tracked
2052 2053 after the next commit.
2053 2054
2054 2055 This only removes files from the current branch, not from the
2055 2056 entire project history, and it does not delete them from the
2056 2057 working directory.
2057 2058
2058 2059 To delete the file from the working directory, see :hg:`remove`.
2059 2060
2060 2061 To undo a forget before the next commit, see :hg:`add`.
2061 2062
2062 2063 .. container:: verbose
2063 2064
2064 2065 Examples:
2065 2066
2066 2067 - forget newly-added binary files::
2067 2068
2068 2069 hg forget "set:added() and binary()"
2069 2070
2070 2071 - forget files that would be excluded by .hgignore::
2071 2072
2072 2073 hg forget "set:hgignore()"
2073 2074
2074 2075 Returns 0 on success.
2075 2076 """
2076 2077
2077 2078 opts = pycompat.byteskwargs(opts)
2078 2079 if not pats:
2079 2080 raise error.Abort(_('no files specified'))
2080 2081
2081 2082 m = scmutil.match(repo[None], pats, opts)
2082 2083 dryrun = opts.get(r'dry_run')
2083 2084 rejected = cmdutil.forget(ui, repo, m, prefix="",
2084 2085 explicitonly=False, dryrun=dryrun)[0]
2085 2086 return rejected and 1 or 0
2086 2087
2087 2088 @command(
2088 2089 'graft',
2089 2090 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2090 2091 ('c', 'continue', False, _('resume interrupted graft')),
2091 2092 ('e', 'edit', False, _('invoke editor on commit messages')),
2092 2093 ('', 'log', None, _('append graft info to log message')),
2093 2094 ('f', 'force', False, _('force graft')),
2094 2095 ('D', 'currentdate', False,
2095 2096 _('record the current date as commit date')),
2096 2097 ('U', 'currentuser', False,
2097 2098 _('record the current user as committer'), _('DATE'))]
2098 2099 + commitopts2 + mergetoolopts + dryrunopts,
2099 2100 _('[OPTION]... [-r REV]... REV...'))
2100 2101 def graft(ui, repo, *revs, **opts):
2101 2102 '''copy changes from other branches onto the current branch
2102 2103
2103 2104 This command uses Mercurial's merge logic to copy individual
2104 2105 changes from other branches without merging branches in the
2105 2106 history graph. This is sometimes known as 'backporting' or
2106 2107 'cherry-picking'. By default, graft will copy user, date, and
2107 2108 description from the source changesets.
2108 2109
2109 2110 Changesets that are ancestors of the current revision, that have
2110 2111 already been grafted, or that are merges will be skipped.
2111 2112
2112 2113 If --log is specified, log messages will have a comment appended
2113 2114 of the form::
2114 2115
2115 2116 (grafted from CHANGESETHASH)
2116 2117
2117 2118 If --force is specified, revisions will be grafted even if they
2118 2119 are already ancestors of, or have been grafted to, the destination.
2119 2120 This is useful when the revisions have since been backed out.
2120 2121
2121 2122 If a graft merge results in conflicts, the graft process is
2122 2123 interrupted so that the current merge can be manually resolved.
2123 2124 Once all conflicts are addressed, the graft process can be
2124 2125 continued with the -c/--continue option.
2125 2126
2126 2127 .. note::
2127 2128
2128 2129 The -c/--continue option does not reapply earlier options, except
2129 2130 for --force.
2130 2131
2131 2132 .. container:: verbose
2132 2133
2133 2134 Examples:
2134 2135
2135 2136 - copy a single change to the stable branch and edit its description::
2136 2137
2137 2138 hg update stable
2138 2139 hg graft --edit 9393
2139 2140
2140 2141 - graft a range of changesets with one exception, updating dates::
2141 2142
2142 2143 hg graft -D "2085::2093 and not 2091"
2143 2144
2144 2145 - continue a graft after resolving conflicts::
2145 2146
2146 2147 hg graft -c
2147 2148
2148 2149 - show the source of a grafted changeset::
2149 2150
2150 2151 hg log --debug -r .
2151 2152
2152 2153 - show revisions sorted by date::
2153 2154
2154 2155 hg log -r "sort(all(), date)"
2155 2156
2156 2157 See :hg:`help revisions` for more about specifying revisions.
2157 2158
2158 2159 Returns 0 on successful completion.
2159 2160 '''
2160 2161 with repo.wlock():
2161 2162 return _dograft(ui, repo, *revs, **opts)
2162 2163
2163 2164 def _dograft(ui, repo, *revs, **opts):
2164 2165 opts = pycompat.byteskwargs(opts)
2165 2166 if revs and opts.get('rev'):
2166 2167 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2167 2168 'revision ordering!\n'))
2168 2169
2169 2170 revs = list(revs)
2170 2171 revs.extend(opts.get('rev'))
2171 2172
2172 2173 if not opts.get('user') and opts.get('currentuser'):
2173 2174 opts['user'] = ui.username()
2174 2175 if not opts.get('date') and opts.get('currentdate'):
2175 2176 opts['date'] = "%d %d" % dateutil.makedate()
2176 2177
2177 2178 editor = cmdutil.getcommiteditor(editform='graft',
2178 2179 **pycompat.strkwargs(opts))
2179 2180
2180 2181 cont = False
2181 2182 if opts.get('continue'):
2182 2183 cont = True
2183 2184 if revs:
2184 2185 raise error.Abort(_("can't specify --continue and revisions"))
2185 2186 # read in unfinished revisions
2186 2187 try:
2187 2188 nodes = repo.vfs.read('graftstate').splitlines()
2188 2189 revs = [repo[node].rev() for node in nodes]
2189 2190 except IOError as inst:
2190 2191 if inst.errno != errno.ENOENT:
2191 2192 raise
2192 2193 cmdutil.wrongtooltocontinue(repo, _('graft'))
2193 2194 else:
2194 2195 if not revs:
2195 2196 raise error.Abort(_('no revisions specified'))
2196 2197 cmdutil.checkunfinished(repo)
2197 2198 cmdutil.bailifchanged(repo)
2198 2199 revs = scmutil.revrange(repo, revs)
2199 2200
2200 2201 skipped = set()
2201 2202 # check for merges
2202 2203 for rev in repo.revs('%ld and merge()', revs):
2203 2204 ui.warn(_('skipping ungraftable merge revision %d\n') % rev)
2204 2205 skipped.add(rev)
2205 2206 revs = [r for r in revs if r not in skipped]
2206 2207 if not revs:
2207 2208 return -1
2208 2209
2209 2210 # Don't check in the --continue case, in effect retaining --force across
2210 2211 # --continues. That's because without --force, any revisions we decided to
2211 2212 # skip would have been filtered out here, so they wouldn't have made their
2212 2213 # way to the graftstate. With --force, any revisions we would have otherwise
2213 2214 # skipped would not have been filtered out, and if they hadn't been applied
2214 2215 # already, they'd have been in the graftstate.
2215 2216 if not (cont or opts.get('force')):
2216 2217 # check for ancestors of dest branch
2217 2218 crev = repo['.'].rev()
2218 2219 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2219 2220 # XXX make this lazy in the future
2220 2221 # don't mutate while iterating, create a copy
2221 2222 for rev in list(revs):
2222 2223 if rev in ancestors:
2223 2224 ui.warn(_('skipping ancestor revision %d:%s\n') %
2224 2225 (rev, repo[rev]))
2225 2226 # XXX remove on list is slow
2226 2227 revs.remove(rev)
2227 2228 if not revs:
2228 2229 return -1
2229 2230
2230 2231 # analyze revs for earlier grafts
2231 2232 ids = {}
2232 2233 for ctx in repo.set("%ld", revs):
2233 2234 ids[ctx.hex()] = ctx.rev()
2234 2235 n = ctx.extra().get('source')
2235 2236 if n:
2236 2237 ids[n] = ctx.rev()
2237 2238
2238 2239 # check ancestors for earlier grafts
2239 2240 ui.debug('scanning for duplicate grafts\n')
2240 2241
2241 2242 # The only changesets we can be sure doesn't contain grafts of any
2242 2243 # revs, are the ones that are common ancestors of *all* revs:
2243 2244 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2244 2245 ctx = repo[rev]
2245 2246 n = ctx.extra().get('source')
2246 2247 if n in ids:
2247 2248 try:
2248 2249 r = repo[n].rev()
2249 2250 except error.RepoLookupError:
2250 2251 r = None
2251 2252 if r in revs:
2252 2253 ui.warn(_('skipping revision %d:%s '
2253 2254 '(already grafted to %d:%s)\n')
2254 2255 % (r, repo[r], rev, ctx))
2255 2256 revs.remove(r)
2256 2257 elif ids[n] in revs:
2257 2258 if r is None:
2258 2259 ui.warn(_('skipping already grafted revision %d:%s '
2259 2260 '(%d:%s also has unknown origin %s)\n')
2260 2261 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2261 2262 else:
2262 2263 ui.warn(_('skipping already grafted revision %d:%s '
2263 2264 '(%d:%s also has origin %d:%s)\n')
2264 2265 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2265 2266 revs.remove(ids[n])
2266 2267 elif ctx.hex() in ids:
2267 2268 r = ids[ctx.hex()]
2268 2269 ui.warn(_('skipping already grafted revision %d:%s '
2269 2270 '(was grafted from %d:%s)\n') %
2270 2271 (r, repo[r], rev, ctx))
2271 2272 revs.remove(r)
2272 2273 if not revs:
2273 2274 return -1
2274 2275
2275 2276 for pos, ctx in enumerate(repo.set("%ld", revs)):
2276 2277 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2277 2278 ctx.description().split('\n', 1)[0])
2278 2279 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2279 2280 if names:
2280 2281 desc += ' (%s)' % ' '.join(names)
2281 2282 ui.status(_('grafting %s\n') % desc)
2282 2283 if opts.get('dry_run'):
2283 2284 continue
2284 2285
2285 2286 source = ctx.extra().get('source')
2286 2287 extra = {}
2287 2288 if source:
2288 2289 extra['source'] = source
2289 2290 extra['intermediate-source'] = ctx.hex()
2290 2291 else:
2291 2292 extra['source'] = ctx.hex()
2292 2293 user = ctx.user()
2293 2294 if opts.get('user'):
2294 2295 user = opts['user']
2295 2296 date = ctx.date()
2296 2297 if opts.get('date'):
2297 2298 date = opts['date']
2298 2299 message = ctx.description()
2299 2300 if opts.get('log'):
2300 2301 message += '\n(grafted from %s)' % ctx.hex()
2301 2302
2302 2303 # we don't merge the first commit when continuing
2303 2304 if not cont:
2304 2305 # perform the graft merge with p1(rev) as 'ancestor'
2305 2306 try:
2306 2307 # ui.forcemerge is an internal variable, do not document
2307 2308 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2308 2309 'graft')
2309 2310 stats = mergemod.graft(repo, ctx, ctx.p1(),
2310 2311 ['local', 'graft'])
2311 2312 finally:
2312 2313 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2313 2314 # report any conflicts
2314 2315 if stats.unresolvedcount > 0:
2315 2316 # write out state for --continue
2316 2317 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2317 2318 repo.vfs.write('graftstate', ''.join(nodelines))
2318 2319 extra = ''
2319 2320 if opts.get('user'):
2320 2321 extra += ' --user %s' % procutil.shellquote(opts['user'])
2321 2322 if opts.get('date'):
2322 2323 extra += ' --date %s' % procutil.shellquote(opts['date'])
2323 2324 if opts.get('log'):
2324 2325 extra += ' --log'
2325 2326 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2326 2327 raise error.Abort(
2327 2328 _("unresolved conflicts, can't continue"),
2328 2329 hint=hint)
2329 2330 else:
2330 2331 cont = False
2331 2332
2332 2333 # commit
2333 2334 node = repo.commit(text=message, user=user,
2334 2335 date=date, extra=extra, editor=editor)
2335 2336 if node is None:
2336 2337 ui.warn(
2337 2338 _('note: graft of %d:%s created no changes to commit\n') %
2338 2339 (ctx.rev(), ctx))
2339 2340
2340 2341 # remove state when we complete successfully
2341 2342 if not opts.get('dry_run'):
2342 2343 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2343 2344
2344 2345 return 0
2345 2346
2346 2347 @command('grep',
2347 2348 [('0', 'print0', None, _('end fields with NUL')),
2348 2349 ('', 'all', None, _('print all revisions that match')),
2349 2350 ('a', 'text', None, _('treat all files as text')),
2350 2351 ('f', 'follow', None,
2351 2352 _('follow changeset history,'
2352 2353 ' or file history across copies and renames')),
2353 2354 ('i', 'ignore-case', None, _('ignore case when matching')),
2354 2355 ('l', 'files-with-matches', None,
2355 2356 _('print only filenames and revisions that match')),
2356 2357 ('n', 'line-number', None, _('print matching line numbers')),
2357 2358 ('r', 'rev', [],
2358 2359 _('only search files changed within revision range'), _('REV')),
2359 2360 ('u', 'user', None, _('list the author (long with -v)')),
2360 2361 ('d', 'date', None, _('list the date (short with -q)')),
2361 2362 ] + formatteropts + walkopts,
2362 2363 _('[OPTION]... PATTERN [FILE]...'),
2363 2364 inferrepo=True, cmdtype=readonly)
2364 2365 def grep(ui, repo, pattern, *pats, **opts):
2365 2366 """search revision history for a pattern in specified files
2366 2367
2367 2368 Search revision history for a regular expression in the specified
2368 2369 files or the entire project.
2369 2370
2370 2371 By default, grep prints the most recent revision number for each
2371 2372 file in which it finds a match. To get it to print every revision
2372 2373 that contains a change in match status ("-" for a match that becomes
2373 2374 a non-match, or "+" for a non-match that becomes a match), use the
2374 2375 --all flag.
2375 2376
2376 2377 PATTERN can be any Python (roughly Perl-compatible) regular
2377 2378 expression.
2378 2379
2379 2380 If no FILEs are specified (and -f/--follow isn't set), all files in
2380 2381 the repository are searched, including those that don't exist in the
2381 2382 current branch or have been deleted in a prior changeset.
2382 2383
2383 2384 Returns 0 if a match is found, 1 otherwise.
2384 2385 """
2385 2386 opts = pycompat.byteskwargs(opts)
2386 2387 reflags = re.M
2387 2388 if opts.get('ignore_case'):
2388 2389 reflags |= re.I
2389 2390 try:
2390 2391 regexp = util.re.compile(pattern, reflags)
2391 2392 except re.error as inst:
2392 2393 ui.warn(_("grep: invalid match pattern: %s\n") % pycompat.bytestr(inst))
2393 2394 return 1
2394 2395 sep, eol = ':', '\n'
2395 2396 if opts.get('print0'):
2396 2397 sep = eol = '\0'
2397 2398
2398 2399 getfile = util.lrucachefunc(repo.file)
2399 2400
2400 2401 def matchlines(body):
2401 2402 begin = 0
2402 2403 linenum = 0
2403 2404 while begin < len(body):
2404 2405 match = regexp.search(body, begin)
2405 2406 if not match:
2406 2407 break
2407 2408 mstart, mend = match.span()
2408 2409 linenum += body.count('\n', begin, mstart) + 1
2409 2410 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2410 2411 begin = body.find('\n', mend) + 1 or len(body) + 1
2411 2412 lend = begin - 1
2412 2413 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2413 2414
2414 2415 class linestate(object):
2415 2416 def __init__(self, line, linenum, colstart, colend):
2416 2417 self.line = line
2417 2418 self.linenum = linenum
2418 2419 self.colstart = colstart
2419 2420 self.colend = colend
2420 2421
2421 2422 def __hash__(self):
2422 2423 return hash((self.linenum, self.line))
2423 2424
2424 2425 def __eq__(self, other):
2425 2426 return self.line == other.line
2426 2427
2427 2428 def findpos(self):
2428 2429 """Iterate all (start, end) indices of matches"""
2429 2430 yield self.colstart, self.colend
2430 2431 p = self.colend
2431 2432 while p < len(self.line):
2432 2433 m = regexp.search(self.line, p)
2433 2434 if not m:
2434 2435 break
2435 2436 yield m.span()
2436 2437 p = m.end()
2437 2438
2438 2439 matches = {}
2439 2440 copies = {}
2440 2441 def grepbody(fn, rev, body):
2441 2442 matches[rev].setdefault(fn, [])
2442 2443 m = matches[rev][fn]
2443 2444 for lnum, cstart, cend, line in matchlines(body):
2444 2445 s = linestate(line, lnum, cstart, cend)
2445 2446 m.append(s)
2446 2447
2447 2448 def difflinestates(a, b):
2448 2449 sm = difflib.SequenceMatcher(None, a, b)
2449 2450 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2450 2451 if tag == 'insert':
2451 2452 for i in xrange(blo, bhi):
2452 2453 yield ('+', b[i])
2453 2454 elif tag == 'delete':
2454 2455 for i in xrange(alo, ahi):
2455 2456 yield ('-', a[i])
2456 2457 elif tag == 'replace':
2457 2458 for i in xrange(alo, ahi):
2458 2459 yield ('-', a[i])
2459 2460 for i in xrange(blo, bhi):
2460 2461 yield ('+', b[i])
2461 2462
2462 2463 def display(fm, fn, ctx, pstates, states):
2463 2464 rev = ctx.rev()
2464 2465 if fm.isplain():
2465 2466 formatuser = ui.shortuser
2466 2467 else:
2467 2468 formatuser = str
2468 2469 if ui.quiet:
2469 2470 datefmt = '%Y-%m-%d'
2470 2471 else:
2471 2472 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2472 2473 found = False
2473 2474 @util.cachefunc
2474 2475 def binary():
2475 2476 flog = getfile(fn)
2476 2477 return stringutil.binary(flog.read(ctx.filenode(fn)))
2477 2478
2478 2479 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2479 2480 if opts.get('all'):
2480 2481 iter = difflinestates(pstates, states)
2481 2482 else:
2482 2483 iter = [('', l) for l in states]
2483 2484 for change, l in iter:
2484 2485 fm.startitem()
2485 2486 fm.data(node=fm.hexfunc(ctx.node()))
2486 2487 cols = [
2487 2488 ('filename', fn, True),
2488 2489 ('rev', rev, True),
2489 2490 ('linenumber', l.linenum, opts.get('line_number')),
2490 2491 ]
2491 2492 if opts.get('all'):
2492 2493 cols.append(('change', change, True))
2493 2494 cols.extend([
2494 2495 ('user', formatuser(ctx.user()), opts.get('user')),
2495 2496 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2496 2497 ])
2497 2498 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2498 2499 for name, data, cond in cols:
2499 2500 field = fieldnamemap.get(name, name)
2500 2501 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2501 2502 if cond and name != lastcol:
2502 2503 fm.plain(sep, label='grep.sep')
2503 2504 if not opts.get('files_with_matches'):
2504 2505 fm.plain(sep, label='grep.sep')
2505 2506 if not opts.get('text') and binary():
2506 2507 fm.plain(_(" Binary file matches"))
2507 2508 else:
2508 2509 displaymatches(fm.nested('texts'), l)
2509 2510 fm.plain(eol)
2510 2511 found = True
2511 2512 if opts.get('files_with_matches'):
2512 2513 break
2513 2514 return found
2514 2515
2515 2516 def displaymatches(fm, l):
2516 2517 p = 0
2517 2518 for s, e in l.findpos():
2518 2519 if p < s:
2519 2520 fm.startitem()
2520 2521 fm.write('text', '%s', l.line[p:s])
2521 2522 fm.data(matched=False)
2522 2523 fm.startitem()
2523 2524 fm.write('text', '%s', l.line[s:e], label='grep.match')
2524 2525 fm.data(matched=True)
2525 2526 p = e
2526 2527 if p < len(l.line):
2527 2528 fm.startitem()
2528 2529 fm.write('text', '%s', l.line[p:])
2529 2530 fm.data(matched=False)
2530 2531 fm.end()
2531 2532
2532 2533 skip = {}
2533 2534 revfiles = {}
2534 2535 match = scmutil.match(repo[None], pats, opts)
2535 2536 found = False
2536 2537 follow = opts.get('follow')
2537 2538
2538 2539 def prep(ctx, fns):
2539 2540 rev = ctx.rev()
2540 2541 pctx = ctx.p1()
2541 2542 parent = pctx.rev()
2542 2543 matches.setdefault(rev, {})
2543 2544 matches.setdefault(parent, {})
2544 2545 files = revfiles.setdefault(rev, [])
2545 2546 for fn in fns:
2546 2547 flog = getfile(fn)
2547 2548 try:
2548 2549 fnode = ctx.filenode(fn)
2549 2550 except error.LookupError:
2550 2551 continue
2551 2552
2552 2553 copied = flog.renamed(fnode)
2553 2554 copy = follow and copied and copied[0]
2554 2555 if copy:
2555 2556 copies.setdefault(rev, {})[fn] = copy
2556 2557 if fn in skip:
2557 2558 if copy:
2558 2559 skip[copy] = True
2559 2560 continue
2560 2561 files.append(fn)
2561 2562
2562 2563 if fn not in matches[rev]:
2563 2564 grepbody(fn, rev, flog.read(fnode))
2564 2565
2565 2566 pfn = copy or fn
2566 2567 if pfn not in matches[parent]:
2567 2568 try:
2568 2569 fnode = pctx.filenode(pfn)
2569 2570 grepbody(pfn, parent, flog.read(fnode))
2570 2571 except error.LookupError:
2571 2572 pass
2572 2573
2573 2574 ui.pager('grep')
2574 2575 fm = ui.formatter('grep', opts)
2575 2576 for ctx in cmdutil.walkchangerevs(repo, match, opts, prep):
2576 2577 rev = ctx.rev()
2577 2578 parent = ctx.p1().rev()
2578 2579 for fn in sorted(revfiles.get(rev, [])):
2579 2580 states = matches[rev][fn]
2580 2581 copy = copies.get(rev, {}).get(fn)
2581 2582 if fn in skip:
2582 2583 if copy:
2583 2584 skip[copy] = True
2584 2585 continue
2585 2586 pstates = matches.get(parent, {}).get(copy or fn, [])
2586 2587 if pstates or states:
2587 2588 r = display(fm, fn, ctx, pstates, states)
2588 2589 found = found or r
2589 2590 if r and not opts.get('all'):
2590 2591 skip[fn] = True
2591 2592 if copy:
2592 2593 skip[copy] = True
2593 2594 del revfiles[rev]
2594 2595 # We will keep the matches dict for the duration of the window
2595 2596 # clear the matches dict once the window is over
2596 2597 if not revfiles:
2597 2598 matches.clear()
2598 2599 fm.end()
2599 2600
2600 2601 return not found
2601 2602
2602 2603 @command('heads',
2603 2604 [('r', 'rev', '',
2604 2605 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2605 2606 ('t', 'topo', False, _('show topological heads only')),
2606 2607 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2607 2608 ('c', 'closed', False, _('show normal and closed branch heads')),
2608 2609 ] + templateopts,
2609 2610 _('[-ct] [-r STARTREV] [REV]...'), cmdtype=readonly)
2610 2611 def heads(ui, repo, *branchrevs, **opts):
2611 2612 """show branch heads
2612 2613
2613 2614 With no arguments, show all open branch heads in the repository.
2614 2615 Branch heads are changesets that have no descendants on the
2615 2616 same branch. They are where development generally takes place and
2616 2617 are the usual targets for update and merge operations.
2617 2618
2618 2619 If one or more REVs are given, only open branch heads on the
2619 2620 branches associated with the specified changesets are shown. This
2620 2621 means that you can use :hg:`heads .` to see the heads on the
2621 2622 currently checked-out branch.
2622 2623
2623 2624 If -c/--closed is specified, also show branch heads marked closed
2624 2625 (see :hg:`commit --close-branch`).
2625 2626
2626 2627 If STARTREV is specified, only those heads that are descendants of
2627 2628 STARTREV will be displayed.
2628 2629
2629 2630 If -t/--topo is specified, named branch mechanics will be ignored and only
2630 2631 topological heads (changesets with no children) will be shown.
2631 2632
2632 2633 Returns 0 if matching heads are found, 1 if not.
2633 2634 """
2634 2635
2635 2636 opts = pycompat.byteskwargs(opts)
2636 2637 start = None
2637 2638 rev = opts.get('rev')
2638 2639 if rev:
2639 2640 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2640 2641 start = scmutil.revsingle(repo, rev, None).node()
2641 2642
2642 2643 if opts.get('topo'):
2643 2644 heads = [repo[h] for h in repo.heads(start)]
2644 2645 else:
2645 2646 heads = []
2646 2647 for branch in repo.branchmap():
2647 2648 heads += repo.branchheads(branch, start, opts.get('closed'))
2648 2649 heads = [repo[h] for h in heads]
2649 2650
2650 2651 if branchrevs:
2651 2652 branches = set(repo[br].branch() for br in branchrevs)
2652 2653 heads = [h for h in heads if h.branch() in branches]
2653 2654
2654 2655 if opts.get('active') and branchrevs:
2655 2656 dagheads = repo.heads(start)
2656 2657 heads = [h for h in heads if h.node() in dagheads]
2657 2658
2658 2659 if branchrevs:
2659 2660 haveheads = set(h.branch() for h in heads)
2660 2661 if branches - haveheads:
2661 2662 headless = ', '.join(b for b in branches - haveheads)
2662 2663 msg = _('no open branch heads found on branches %s')
2663 2664 if opts.get('rev'):
2664 2665 msg += _(' (started at %s)') % opts['rev']
2665 2666 ui.warn((msg + '\n') % headless)
2666 2667
2667 2668 if not heads:
2668 2669 return 1
2669 2670
2670 2671 ui.pager('heads')
2671 2672 heads = sorted(heads, key=lambda x: -x.rev())
2672 2673 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
2673 2674 for ctx in heads:
2674 2675 displayer.show(ctx)
2675 2676 displayer.close()
2676 2677
2677 2678 @command('help',
2678 2679 [('e', 'extension', None, _('show only help for extensions')),
2679 2680 ('c', 'command', None, _('show only help for commands')),
2680 2681 ('k', 'keyword', None, _('show topics matching keyword')),
2681 2682 ('s', 'system', [], _('show help for specific platform(s)')),
2682 2683 ],
2683 2684 _('[-ecks] [TOPIC]'),
2684 2685 norepo=True, cmdtype=readonly)
2685 2686 def help_(ui, name=None, **opts):
2686 2687 """show help for a given topic or a help overview
2687 2688
2688 2689 With no arguments, print a list of commands with short help messages.
2689 2690
2690 2691 Given a topic, extension, or command name, print help for that
2691 2692 topic.
2692 2693
2693 2694 Returns 0 if successful.
2694 2695 """
2695 2696
2696 2697 keep = opts.get(r'system') or []
2697 2698 if len(keep) == 0:
2698 2699 if pycompat.sysplatform.startswith('win'):
2699 2700 keep.append('windows')
2700 2701 elif pycompat.sysplatform == 'OpenVMS':
2701 2702 keep.append('vms')
2702 2703 elif pycompat.sysplatform == 'plan9':
2703 2704 keep.append('plan9')
2704 2705 else:
2705 2706 keep.append('unix')
2706 2707 keep.append(pycompat.sysplatform.lower())
2707 2708 if ui.verbose:
2708 2709 keep.append('verbose')
2709 2710
2710 2711 commands = sys.modules[__name__]
2711 2712 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2712 2713 ui.pager('help')
2713 2714 ui.write(formatted)
2714 2715
2715 2716
2716 2717 @command('identify|id',
2717 2718 [('r', 'rev', '',
2718 2719 _('identify the specified revision'), _('REV')),
2719 2720 ('n', 'num', None, _('show local revision number')),
2720 2721 ('i', 'id', None, _('show global revision id')),
2721 2722 ('b', 'branch', None, _('show branch')),
2722 2723 ('t', 'tags', None, _('show tags')),
2723 2724 ('B', 'bookmarks', None, _('show bookmarks')),
2724 2725 ] + remoteopts + formatteropts,
2725 2726 _('[-nibtB] [-r REV] [SOURCE]'),
2726 2727 optionalrepo=True, cmdtype=readonly)
2727 2728 def identify(ui, repo, source=None, rev=None,
2728 2729 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2729 2730 """identify the working directory or specified revision
2730 2731
2731 2732 Print a summary identifying the repository state at REV using one or
2732 2733 two parent hash identifiers, followed by a "+" if the working
2733 2734 directory has uncommitted changes, the branch name (if not default),
2734 2735 a list of tags, and a list of bookmarks.
2735 2736
2736 2737 When REV is not given, print a summary of the current state of the
2737 2738 repository including the working directory. Specify -r. to get information
2738 2739 of the working directory parent without scanning uncommitted changes.
2739 2740
2740 2741 Specifying a path to a repository root or Mercurial bundle will
2741 2742 cause lookup to operate on that repository/bundle.
2742 2743
2743 2744 .. container:: verbose
2744 2745
2745 2746 Examples:
2746 2747
2747 2748 - generate a build identifier for the working directory::
2748 2749
2749 2750 hg id --id > build-id.dat
2750 2751
2751 2752 - find the revision corresponding to a tag::
2752 2753
2753 2754 hg id -n -r 1.3
2754 2755
2755 2756 - check the most recent revision of a remote repository::
2756 2757
2757 2758 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2758 2759
2759 2760 See :hg:`log` for generating more information about specific revisions,
2760 2761 including full hash identifiers.
2761 2762
2762 2763 Returns 0 if successful.
2763 2764 """
2764 2765
2765 2766 opts = pycompat.byteskwargs(opts)
2766 2767 if not repo and not source:
2767 2768 raise error.Abort(_("there is no Mercurial repository here "
2768 2769 "(.hg not found)"))
2769 2770
2770 2771 if ui.debugflag:
2771 2772 hexfunc = hex
2772 2773 else:
2773 2774 hexfunc = short
2774 2775 default = not (num or id or branch or tags or bookmarks)
2775 2776 output = []
2776 2777 revs = []
2777 2778
2778 2779 if source:
2779 2780 source, branches = hg.parseurl(ui.expandpath(source))
2780 2781 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2781 2782 repo = peer.local()
2782 2783 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2783 2784
2784 2785 fm = ui.formatter('identify', opts)
2785 2786 fm.startitem()
2786 2787
2787 2788 if not repo:
2788 2789 if num or branch or tags:
2789 2790 raise error.Abort(
2790 2791 _("can't query remote revision number, branch, or tags"))
2791 2792 if not rev and revs:
2792 2793 rev = revs[0]
2793 2794 if not rev:
2794 2795 rev = "tip"
2795 2796
2796 2797 remoterev = peer.lookup(rev)
2797 2798 hexrev = hexfunc(remoterev)
2798 2799 if default or id:
2799 2800 output = [hexrev]
2800 2801 fm.data(id=hexrev)
2801 2802
2802 2803 def getbms():
2803 2804 bms = []
2804 2805
2805 2806 if 'bookmarks' in peer.listkeys('namespaces'):
2806 2807 hexremoterev = hex(remoterev)
2807 2808 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2808 2809 if bmr == hexremoterev]
2809 2810
2810 2811 return sorted(bms)
2811 2812
2812 2813 bms = getbms()
2813 2814 if bookmarks:
2814 2815 output.extend(bms)
2815 2816 elif default and not ui.quiet:
2816 2817 # multiple bookmarks for a single parent separated by '/'
2817 2818 bm = '/'.join(bms)
2818 2819 if bm:
2819 2820 output.append(bm)
2820 2821
2821 2822 fm.data(node=hex(remoterev))
2822 2823 fm.data(bookmarks=fm.formatlist(bms, name='bookmark'))
2823 2824 else:
2824 2825 if rev:
2825 2826 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
2826 2827 ctx = scmutil.revsingle(repo, rev, None)
2827 2828
2828 2829 if ctx.rev() is None:
2829 2830 ctx = repo[None]
2830 2831 parents = ctx.parents()
2831 2832 taglist = []
2832 2833 for p in parents:
2833 2834 taglist.extend(p.tags())
2834 2835
2835 2836 dirty = ""
2836 2837 if ctx.dirty(missing=True, merge=False, branch=False):
2837 2838 dirty = '+'
2838 2839 fm.data(dirty=dirty)
2839 2840
2840 2841 hexoutput = [hexfunc(p.node()) for p in parents]
2841 2842 if default or id:
2842 2843 output = ["%s%s" % ('+'.join(hexoutput), dirty)]
2843 2844 fm.data(id="%s%s" % ('+'.join(hexoutput), dirty))
2844 2845
2845 2846 if num:
2846 2847 numoutput = ["%d" % p.rev() for p in parents]
2847 2848 output.append("%s%s" % ('+'.join(numoutput), dirty))
2848 2849
2849 2850 fn = fm.nested('parents')
2850 2851 for p in parents:
2851 2852 fn.startitem()
2852 2853 fn.data(rev=p.rev())
2853 2854 fn.data(node=p.hex())
2854 2855 fn.context(ctx=p)
2855 2856 fn.end()
2856 2857 else:
2857 2858 hexoutput = hexfunc(ctx.node())
2858 2859 if default or id:
2859 2860 output = [hexoutput]
2860 2861 fm.data(id=hexoutput)
2861 2862
2862 2863 if num:
2863 2864 output.append(pycompat.bytestr(ctx.rev()))
2864 2865 taglist = ctx.tags()
2865 2866
2866 2867 if default and not ui.quiet:
2867 2868 b = ctx.branch()
2868 2869 if b != 'default':
2869 2870 output.append("(%s)" % b)
2870 2871
2871 2872 # multiple tags for a single parent separated by '/'
2872 2873 t = '/'.join(taglist)
2873 2874 if t:
2874 2875 output.append(t)
2875 2876
2876 2877 # multiple bookmarks for a single parent separated by '/'
2877 2878 bm = '/'.join(ctx.bookmarks())
2878 2879 if bm:
2879 2880 output.append(bm)
2880 2881 else:
2881 2882 if branch:
2882 2883 output.append(ctx.branch())
2883 2884
2884 2885 if tags:
2885 2886 output.extend(taglist)
2886 2887
2887 2888 if bookmarks:
2888 2889 output.extend(ctx.bookmarks())
2889 2890
2890 2891 fm.data(node=ctx.hex())
2891 2892 fm.data(branch=ctx.branch())
2892 2893 fm.data(tags=fm.formatlist(taglist, name='tag', sep=':'))
2893 2894 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'))
2894 2895 fm.context(ctx=ctx)
2895 2896
2896 2897 fm.plain("%s\n" % ' '.join(output))
2897 2898 fm.end()
2898 2899
2899 2900 @command('import|patch',
2900 2901 [('p', 'strip', 1,
2901 2902 _('directory strip option for patch. This has the same '
2902 2903 'meaning as the corresponding patch option'), _('NUM')),
2903 2904 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2904 2905 ('e', 'edit', False, _('invoke editor on commit messages')),
2905 2906 ('f', 'force', None,
2906 2907 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2907 2908 ('', 'no-commit', None,
2908 2909 _("don't commit, just update the working directory")),
2909 2910 ('', 'bypass', None,
2910 2911 _("apply patch without touching the working directory")),
2911 2912 ('', 'partial', None,
2912 2913 _('commit even if some hunks fail')),
2913 2914 ('', 'exact', None,
2914 2915 _('abort if patch would apply lossily')),
2915 2916 ('', 'prefix', '',
2916 2917 _('apply patch to subdirectory'), _('DIR')),
2917 2918 ('', 'import-branch', None,
2918 2919 _('use any branch information in patch (implied by --exact)'))] +
2919 2920 commitopts + commitopts2 + similarityopts,
2920 2921 _('[OPTION]... PATCH...'))
2921 2922 def import_(ui, repo, patch1=None, *patches, **opts):
2922 2923 """import an ordered set of patches
2923 2924
2924 2925 Import a list of patches and commit them individually (unless
2925 2926 --no-commit is specified).
2926 2927
2927 2928 To read a patch from standard input (stdin), use "-" as the patch
2928 2929 name. If a URL is specified, the patch will be downloaded from
2929 2930 there.
2930 2931
2931 2932 Import first applies changes to the working directory (unless
2932 2933 --bypass is specified), import will abort if there are outstanding
2933 2934 changes.
2934 2935
2935 2936 Use --bypass to apply and commit patches directly to the
2936 2937 repository, without affecting the working directory. Without
2937 2938 --exact, patches will be applied on top of the working directory
2938 2939 parent revision.
2939 2940
2940 2941 You can import a patch straight from a mail message. Even patches
2941 2942 as attachments work (to use the body part, it must have type
2942 2943 text/plain or text/x-patch). From and Subject headers of email
2943 2944 message are used as default committer and commit message. All
2944 2945 text/plain body parts before first diff are added to the commit
2945 2946 message.
2946 2947
2947 2948 If the imported patch was generated by :hg:`export`, user and
2948 2949 description from patch override values from message headers and
2949 2950 body. Values given on command line with -m/--message and -u/--user
2950 2951 override these.
2951 2952
2952 2953 If --exact is specified, import will set the working directory to
2953 2954 the parent of each patch before applying it, and will abort if the
2954 2955 resulting changeset has a different ID than the one recorded in
2955 2956 the patch. This will guard against various ways that portable
2956 2957 patch formats and mail systems might fail to transfer Mercurial
2957 2958 data or metadata. See :hg:`bundle` for lossless transmission.
2958 2959
2959 2960 Use --partial to ensure a changeset will be created from the patch
2960 2961 even if some hunks fail to apply. Hunks that fail to apply will be
2961 2962 written to a <target-file>.rej file. Conflicts can then be resolved
2962 2963 by hand before :hg:`commit --amend` is run to update the created
2963 2964 changeset. This flag exists to let people import patches that
2964 2965 partially apply without losing the associated metadata (author,
2965 2966 date, description, ...).
2966 2967
2967 2968 .. note::
2968 2969
2969 2970 When no hunks apply cleanly, :hg:`import --partial` will create
2970 2971 an empty changeset, importing only the patch metadata.
2971 2972
2972 2973 With -s/--similarity, hg will attempt to discover renames and
2973 2974 copies in the patch in the same way as :hg:`addremove`.
2974 2975
2975 2976 It is possible to use external patch programs to perform the patch
2976 2977 by setting the ``ui.patch`` configuration option. For the default
2977 2978 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2978 2979 See :hg:`help config` for more information about configuration
2979 2980 files and how to use these options.
2980 2981
2981 2982 See :hg:`help dates` for a list of formats valid for -d/--date.
2982 2983
2983 2984 .. container:: verbose
2984 2985
2985 2986 Examples:
2986 2987
2987 2988 - import a traditional patch from a website and detect renames::
2988 2989
2989 2990 hg import -s 80 http://example.com/bugfix.patch
2990 2991
2991 2992 - import a changeset from an hgweb server::
2992 2993
2993 2994 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2994 2995
2995 2996 - import all the patches in an Unix-style mbox::
2996 2997
2997 2998 hg import incoming-patches.mbox
2998 2999
2999 3000 - import patches from stdin::
3000 3001
3001 3002 hg import -
3002 3003
3003 3004 - attempt to exactly restore an exported changeset (not always
3004 3005 possible)::
3005 3006
3006 3007 hg import --exact proposed-fix.patch
3007 3008
3008 3009 - use an external tool to apply a patch which is too fuzzy for
3009 3010 the default internal tool.
3010 3011
3011 3012 hg import --config ui.patch="patch --merge" fuzzy.patch
3012 3013
3013 3014 - change the default fuzzing from 2 to a less strict 7
3014 3015
3015 3016 hg import --config ui.fuzz=7 fuzz.patch
3016 3017
3017 3018 Returns 0 on success, 1 on partial success (see --partial).
3018 3019 """
3019 3020
3020 3021 opts = pycompat.byteskwargs(opts)
3021 3022 if not patch1:
3022 3023 raise error.Abort(_('need at least one patch to import'))
3023 3024
3024 3025 patches = (patch1,) + patches
3025 3026
3026 3027 date = opts.get('date')
3027 3028 if date:
3028 3029 opts['date'] = dateutil.parsedate(date)
3029 3030
3030 3031 exact = opts.get('exact')
3031 3032 update = not opts.get('bypass')
3032 3033 if not update and opts.get('no_commit'):
3033 3034 raise error.Abort(_('cannot use --no-commit with --bypass'))
3034 3035 try:
3035 3036 sim = float(opts.get('similarity') or 0)
3036 3037 except ValueError:
3037 3038 raise error.Abort(_('similarity must be a number'))
3038 3039 if sim < 0 or sim > 100:
3039 3040 raise error.Abort(_('similarity must be between 0 and 100'))
3040 3041 if sim and not update:
3041 3042 raise error.Abort(_('cannot use --similarity with --bypass'))
3042 3043 if exact:
3043 3044 if opts.get('edit'):
3044 3045 raise error.Abort(_('cannot use --exact with --edit'))
3045 3046 if opts.get('prefix'):
3046 3047 raise error.Abort(_('cannot use --exact with --prefix'))
3047 3048
3048 3049 base = opts["base"]
3049 3050 wlock = dsguard = lock = tr = None
3050 3051 msgs = []
3051 3052 ret = 0
3052 3053
3053 3054
3054 3055 try:
3055 3056 wlock = repo.wlock()
3056 3057
3057 3058 if update:
3058 3059 cmdutil.checkunfinished(repo)
3059 3060 if (exact or not opts.get('force')):
3060 3061 cmdutil.bailifchanged(repo)
3061 3062
3062 3063 if not opts.get('no_commit'):
3063 3064 lock = repo.lock()
3064 3065 tr = repo.transaction('import')
3065 3066 else:
3066 3067 dsguard = dirstateguard.dirstateguard(repo, 'import')
3067 3068 parents = repo[None].parents()
3068 3069 for patchurl in patches:
3069 3070 if patchurl == '-':
3070 3071 ui.status(_('applying patch from stdin\n'))
3071 3072 patchfile = ui.fin
3072 3073 patchurl = 'stdin' # for error message
3073 3074 else:
3074 3075 patchurl = os.path.join(base, patchurl)
3075 3076 ui.status(_('applying %s\n') % patchurl)
3076 3077 patchfile = hg.openpath(ui, patchurl)
3077 3078
3078 3079 haspatch = False
3079 3080 for hunk in patch.split(patchfile):
3080 3081 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3081 3082 parents, opts,
3082 3083 msgs, hg.clean)
3083 3084 if msg:
3084 3085 haspatch = True
3085 3086 ui.note(msg + '\n')
3086 3087 if update or exact:
3087 3088 parents = repo[None].parents()
3088 3089 else:
3089 3090 parents = [repo[node]]
3090 3091 if rej:
3091 3092 ui.write_err(_("patch applied partially\n"))
3092 3093 ui.write_err(_("(fix the .rej files and run "
3093 3094 "`hg commit --amend`)\n"))
3094 3095 ret = 1
3095 3096 break
3096 3097
3097 3098 if not haspatch:
3098 3099 raise error.Abort(_('%s: no diffs found') % patchurl)
3099 3100
3100 3101 if tr:
3101 3102 tr.close()
3102 3103 if msgs:
3103 3104 repo.savecommitmessage('\n* * *\n'.join(msgs))
3104 3105 if dsguard:
3105 3106 dsguard.close()
3106 3107 return ret
3107 3108 finally:
3108 3109 if tr:
3109 3110 tr.release()
3110 3111 release(lock, dsguard, wlock)
3111 3112
3112 3113 @command('incoming|in',
3113 3114 [('f', 'force', None,
3114 3115 _('run even if remote repository is unrelated')),
3115 3116 ('n', 'newest-first', None, _('show newest record first')),
3116 3117 ('', 'bundle', '',
3117 3118 _('file to store the bundles into'), _('FILE')),
3118 3119 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3119 3120 ('B', 'bookmarks', False, _("compare bookmarks")),
3120 3121 ('b', 'branch', [],
3121 3122 _('a specific branch you would like to pull'), _('BRANCH')),
3122 3123 ] + logopts + remoteopts + subrepoopts,
3123 3124 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3124 3125 def incoming(ui, repo, source="default", **opts):
3125 3126 """show new changesets found in source
3126 3127
3127 3128 Show new changesets found in the specified path/URL or the default
3128 3129 pull location. These are the changesets that would have been pulled
3129 3130 by :hg:`pull` at the time you issued this command.
3130 3131
3131 3132 See pull for valid source format details.
3132 3133
3133 3134 .. container:: verbose
3134 3135
3135 3136 With -B/--bookmarks, the result of bookmark comparison between
3136 3137 local and remote repositories is displayed. With -v/--verbose,
3137 3138 status is also displayed for each bookmark like below::
3138 3139
3139 3140 BM1 01234567890a added
3140 3141 BM2 1234567890ab advanced
3141 3142 BM3 234567890abc diverged
3142 3143 BM4 34567890abcd changed
3143 3144
3144 3145 The action taken locally when pulling depends on the
3145 3146 status of each bookmark:
3146 3147
3147 3148 :``added``: pull will create it
3148 3149 :``advanced``: pull will update it
3149 3150 :``diverged``: pull will create a divergent bookmark
3150 3151 :``changed``: result depends on remote changesets
3151 3152
3152 3153 From the point of view of pulling behavior, bookmark
3153 3154 existing only in the remote repository are treated as ``added``,
3154 3155 even if it is in fact locally deleted.
3155 3156
3156 3157 .. container:: verbose
3157 3158
3158 3159 For remote repository, using --bundle avoids downloading the
3159 3160 changesets twice if the incoming is followed by a pull.
3160 3161
3161 3162 Examples:
3162 3163
3163 3164 - show incoming changes with patches and full description::
3164 3165
3165 3166 hg incoming -vp
3166 3167
3167 3168 - show incoming changes excluding merges, store a bundle::
3168 3169
3169 3170 hg in -vpM --bundle incoming.hg
3170 3171 hg pull incoming.hg
3171 3172
3172 3173 - briefly list changes inside a bundle::
3173 3174
3174 3175 hg in changes.hg -T "{desc|firstline}\\n"
3175 3176
3176 3177 Returns 0 if there are incoming changes, 1 otherwise.
3177 3178 """
3178 3179 opts = pycompat.byteskwargs(opts)
3179 3180 if opts.get('graph'):
3180 3181 logcmdutil.checkunsupportedgraphflags([], opts)
3181 3182 def display(other, chlist, displayer):
3182 3183 revdag = logcmdutil.graphrevs(other, chlist, opts)
3183 3184 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3184 3185 graphmod.asciiedges)
3185 3186
3186 3187 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3187 3188 return 0
3188 3189
3189 3190 if opts.get('bundle') and opts.get('subrepos'):
3190 3191 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3191 3192
3192 3193 if opts.get('bookmarks'):
3193 3194 source, branches = hg.parseurl(ui.expandpath(source),
3194 3195 opts.get('branch'))
3195 3196 other = hg.peer(repo, opts, source)
3196 3197 if 'bookmarks' not in other.listkeys('namespaces'):
3197 3198 ui.warn(_("remote doesn't support bookmarks\n"))
3198 3199 return 0
3199 3200 ui.pager('incoming')
3200 3201 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3201 3202 return bookmarks.incoming(ui, repo, other)
3202 3203
3203 3204 repo._subtoppath = ui.expandpath(source)
3204 3205 try:
3205 3206 return hg.incoming(ui, repo, source, opts)
3206 3207 finally:
3207 3208 del repo._subtoppath
3208 3209
3209 3210
3210 3211 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3211 3212 norepo=True)
3212 3213 def init(ui, dest=".", **opts):
3213 3214 """create a new repository in the given directory
3214 3215
3215 3216 Initialize a new repository in the given directory. If the given
3216 3217 directory does not exist, it will be created.
3217 3218
3218 3219 If no directory is given, the current directory is used.
3219 3220
3220 3221 It is possible to specify an ``ssh://`` URL as the destination.
3221 3222 See :hg:`help urls` for more information.
3222 3223
3223 3224 Returns 0 on success.
3224 3225 """
3225 3226 opts = pycompat.byteskwargs(opts)
3226 3227 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3227 3228
3228 3229 @command('locate',
3229 3230 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3230 3231 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3231 3232 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3232 3233 ] + walkopts,
3233 3234 _('[OPTION]... [PATTERN]...'))
3234 3235 def locate(ui, repo, *pats, **opts):
3235 3236 """locate files matching specific patterns (DEPRECATED)
3236 3237
3237 3238 Print files under Mercurial control in the working directory whose
3238 3239 names match the given patterns.
3239 3240
3240 3241 By default, this command searches all directories in the working
3241 3242 directory. To search just the current directory and its
3242 3243 subdirectories, use "--include .".
3243 3244
3244 3245 If no patterns are given to match, this command prints the names
3245 3246 of all files under Mercurial control in the working directory.
3246 3247
3247 3248 If you want to feed the output of this command into the "xargs"
3248 3249 command, use the -0 option to both this command and "xargs". This
3249 3250 will avoid the problem of "xargs" treating single filenames that
3250 3251 contain whitespace as multiple filenames.
3251 3252
3252 3253 See :hg:`help files` for a more versatile command.
3253 3254
3254 3255 Returns 0 if a match is found, 1 otherwise.
3255 3256 """
3256 3257 opts = pycompat.byteskwargs(opts)
3257 3258 if opts.get('print0'):
3258 3259 end = '\0'
3259 3260 else:
3260 3261 end = '\n'
3261 3262 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3262 3263
3263 3264 ret = 1
3264 3265 m = scmutil.match(ctx, pats, opts, default='relglob',
3265 3266 badfn=lambda x, y: False)
3266 3267
3267 3268 ui.pager('locate')
3268 3269 for abs in ctx.matches(m):
3269 3270 if opts.get('fullpath'):
3270 3271 ui.write(repo.wjoin(abs), end)
3271 3272 else:
3272 3273 ui.write(((pats and m.rel(abs)) or abs), end)
3273 3274 ret = 0
3274 3275
3275 3276 return ret
3276 3277
3277 3278 @command('^log|history',
3278 3279 [('f', 'follow', None,
3279 3280 _('follow changeset history, or file history across copies and renames')),
3280 3281 ('', 'follow-first', None,
3281 3282 _('only follow the first parent of merge changesets (DEPRECATED)')),
3282 3283 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3283 3284 ('C', 'copies', None, _('show copied files')),
3284 3285 ('k', 'keyword', [],
3285 3286 _('do case-insensitive search for a given text'), _('TEXT')),
3286 3287 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3287 3288 ('L', 'line-range', [],
3288 3289 _('follow line range of specified file (EXPERIMENTAL)'),
3289 3290 _('FILE,RANGE')),
3290 3291 ('', 'removed', None, _('include revisions where files were removed')),
3291 3292 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3292 3293 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3293 3294 ('', 'only-branch', [],
3294 3295 _('show only changesets within the given named branch (DEPRECATED)'),
3295 3296 _('BRANCH')),
3296 3297 ('b', 'branch', [],
3297 3298 _('show changesets within the given named branch'), _('BRANCH')),
3298 3299 ('P', 'prune', [],
3299 3300 _('do not display revision or any of its ancestors'), _('REV')),
3300 3301 ] + logopts + walkopts,
3301 3302 _('[OPTION]... [FILE]'),
3302 3303 inferrepo=True, cmdtype=readonly)
3303 3304 def log(ui, repo, *pats, **opts):
3304 3305 """show revision history of entire repository or files
3305 3306
3306 3307 Print the revision history of the specified files or the entire
3307 3308 project.
3308 3309
3309 3310 If no revision range is specified, the default is ``tip:0`` unless
3310 3311 --follow is set, in which case the working directory parent is
3311 3312 used as the starting revision.
3312 3313
3313 3314 File history is shown without following rename or copy history of
3314 3315 files. Use -f/--follow with a filename to follow history across
3315 3316 renames and copies. --follow without a filename will only show
3316 3317 ancestors of the starting revision.
3317 3318
3318 3319 By default this command prints revision number and changeset id,
3319 3320 tags, non-trivial parents, user, date and time, and a summary for
3320 3321 each commit. When the -v/--verbose switch is used, the list of
3321 3322 changed files and full commit message are shown.
3322 3323
3323 3324 With --graph the revisions are shown as an ASCII art DAG with the most
3324 3325 recent changeset at the top.
3325 3326 'o' is a changeset, '@' is a working directory parent, '_' closes a branch,
3326 3327 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
3327 3328 changeset from the lines below is a parent of the 'o' merge on the same
3328 3329 line.
3329 3330 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3330 3331 of a '|' indicates one or more revisions in a path are omitted.
3331 3332
3332 3333 .. container:: verbose
3333 3334
3334 3335 Use -L/--line-range FILE,M:N options to follow the history of lines
3335 3336 from M to N in FILE. With -p/--patch only diff hunks affecting
3336 3337 specified line range will be shown. This option requires --follow;
3337 3338 it can be specified multiple times. Currently, this option is not
3338 3339 compatible with --graph. This option is experimental.
3339 3340
3340 3341 .. note::
3341 3342
3342 3343 :hg:`log --patch` may generate unexpected diff output for merge
3343 3344 changesets, as it will only compare the merge changeset against
3344 3345 its first parent. Also, only files different from BOTH parents
3345 3346 will appear in files:.
3346 3347
3347 3348 .. note::
3348 3349
3349 3350 For performance reasons, :hg:`log FILE` may omit duplicate changes
3350 3351 made on branches and will not show removals or mode changes. To
3351 3352 see all such changes, use the --removed switch.
3352 3353
3353 3354 .. container:: verbose
3354 3355
3355 3356 .. note::
3356 3357
3357 3358 The history resulting from -L/--line-range options depends on diff
3358 3359 options; for instance if white-spaces are ignored, respective changes
3359 3360 with only white-spaces in specified line range will not be listed.
3360 3361
3361 3362 .. container:: verbose
3362 3363
3363 3364 Some examples:
3364 3365
3365 3366 - changesets with full descriptions and file lists::
3366 3367
3367 3368 hg log -v
3368 3369
3369 3370 - changesets ancestral to the working directory::
3370 3371
3371 3372 hg log -f
3372 3373
3373 3374 - last 10 commits on the current branch::
3374 3375
3375 3376 hg log -l 10 -b .
3376 3377
3377 3378 - changesets showing all modifications of a file, including removals::
3378 3379
3379 3380 hg log --removed file.c
3380 3381
3381 3382 - all changesets that touch a directory, with diffs, excluding merges::
3382 3383
3383 3384 hg log -Mp lib/
3384 3385
3385 3386 - all revision numbers that match a keyword::
3386 3387
3387 3388 hg log -k bug --template "{rev}\\n"
3388 3389
3389 3390 - the full hash identifier of the working directory parent::
3390 3391
3391 3392 hg log -r . --template "{node}\\n"
3392 3393
3393 3394 - list available log templates::
3394 3395
3395 3396 hg log -T list
3396 3397
3397 3398 - check if a given changeset is included in a tagged release::
3398 3399
3399 3400 hg log -r "a21ccf and ancestor(1.9)"
3400 3401
3401 3402 - find all changesets by some user in a date range::
3402 3403
3403 3404 hg log -k alice -d "may 2008 to jul 2008"
3404 3405
3405 3406 - summary of all changesets after the last tag::
3406 3407
3407 3408 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3408 3409
3409 3410 - changesets touching lines 13 to 23 for file.c::
3410 3411
3411 3412 hg log -L file.c,13:23
3412 3413
3413 3414 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
3414 3415 main.c with patch::
3415 3416
3416 3417 hg log -L file.c,13:23 -L main.c,2:6 -p
3417 3418
3418 3419 See :hg:`help dates` for a list of formats valid for -d/--date.
3419 3420
3420 3421 See :hg:`help revisions` for more about specifying and ordering
3421 3422 revisions.
3422 3423
3423 3424 See :hg:`help templates` for more about pre-packaged styles and
3424 3425 specifying custom templates. The default template used by the log
3425 3426 command can be customized via the ``ui.logtemplate`` configuration
3426 3427 setting.
3427 3428
3428 3429 Returns 0 on success.
3429 3430
3430 3431 """
3431 3432 opts = pycompat.byteskwargs(opts)
3432 3433 linerange = opts.get('line_range')
3433 3434
3434 3435 if linerange and not opts.get('follow'):
3435 3436 raise error.Abort(_('--line-range requires --follow'))
3436 3437
3437 3438 if linerange and pats:
3438 3439 # TODO: take pats as patterns with no line-range filter
3439 3440 raise error.Abort(
3440 3441 _('FILE arguments are not compatible with --line-range option')
3441 3442 )
3442 3443
3443 3444 repo = scmutil.unhidehashlikerevs(repo, opts.get('rev'), 'nowarn')
3444 3445 revs, differ = logcmdutil.getrevs(repo, pats, opts)
3445 3446 if linerange:
3446 3447 # TODO: should follow file history from logcmdutil._initialrevs(),
3447 3448 # then filter the result by logcmdutil._makerevset() and --limit
3448 3449 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
3449 3450
3450 3451 getrenamed = None
3451 3452 if opts.get('copies'):
3452 3453 endrev = None
3453 3454 if opts.get('rev'):
3454 3455 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3455 3456 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3456 3457
3457 3458 ui.pager('log')
3458 3459 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
3459 3460 buffered=True)
3460 3461 if opts.get('graph'):
3461 3462 displayfn = logcmdutil.displaygraphrevs
3462 3463 else:
3463 3464 displayfn = logcmdutil.displayrevs
3464 3465 displayfn(ui, repo, revs, displayer, getrenamed)
3465 3466
3466 3467 @command('manifest',
3467 3468 [('r', 'rev', '', _('revision to display'), _('REV')),
3468 3469 ('', 'all', False, _("list files from all revisions"))]
3469 3470 + formatteropts,
3470 3471 _('[-r REV]'), cmdtype=readonly)
3471 3472 def manifest(ui, repo, node=None, rev=None, **opts):
3472 3473 """output the current or given revision of the project manifest
3473 3474
3474 3475 Print a list of version controlled files for the given revision.
3475 3476 If no revision is given, the first parent of the working directory
3476 3477 is used, or the null revision if no revision is checked out.
3477 3478
3478 3479 With -v, print file permissions, symlink and executable bits.
3479 3480 With --debug, print file revision hashes.
3480 3481
3481 3482 If option --all is specified, the list of all files from all revisions
3482 3483 is printed. This includes deleted and renamed files.
3483 3484
3484 3485 Returns 0 on success.
3485 3486 """
3486 3487 opts = pycompat.byteskwargs(opts)
3487 3488 fm = ui.formatter('manifest', opts)
3488 3489
3489 3490 if opts.get('all'):
3490 3491 if rev or node:
3491 3492 raise error.Abort(_("can't specify a revision with --all"))
3492 3493
3493 3494 res = []
3494 3495 prefix = "data/"
3495 3496 suffix = ".i"
3496 3497 plen = len(prefix)
3497 3498 slen = len(suffix)
3498 3499 with repo.lock():
3499 3500 for fn, b, size in repo.store.datafiles():
3500 3501 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3501 3502 res.append(fn[plen:-slen])
3502 3503 ui.pager('manifest')
3503 3504 for f in res:
3504 3505 fm.startitem()
3505 3506 fm.write("path", '%s\n', f)
3506 3507 fm.end()
3507 3508 return
3508 3509
3509 3510 if rev and node:
3510 3511 raise error.Abort(_("please specify just one revision"))
3511 3512
3512 3513 if not node:
3513 3514 node = rev
3514 3515
3515 3516 char = {'l': '@', 'x': '*', '': '', 't': 'd'}
3516 3517 mode = {'l': '644', 'x': '755', '': '644', 't': '755'}
3517 3518 if node:
3518 3519 repo = scmutil.unhidehashlikerevs(repo, [node], 'nowarn')
3519 3520 ctx = scmutil.revsingle(repo, node)
3520 3521 mf = ctx.manifest()
3521 3522 ui.pager('manifest')
3522 3523 for f in ctx:
3523 3524 fm.startitem()
3524 3525 fl = ctx[f].flags()
3525 3526 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3526 3527 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3527 3528 fm.write('path', '%s\n', f)
3528 3529 fm.end()
3529 3530
3530 3531 @command('^merge',
3531 3532 [('f', 'force', None,
3532 3533 _('force a merge including outstanding changes (DEPRECATED)')),
3533 3534 ('r', 'rev', '', _('revision to merge'), _('REV')),
3534 3535 ('P', 'preview', None,
3535 3536 _('review revisions to merge (no merge is performed)')),
3536 3537 ('', 'abort', None, _('abort the ongoing merge')),
3537 3538 ] + mergetoolopts,
3538 3539 _('[-P] [[-r] REV]'))
3539 3540 def merge(ui, repo, node=None, **opts):
3540 3541 """merge another revision into working directory
3541 3542
3542 3543 The current working directory is updated with all changes made in
3543 3544 the requested revision since the last common predecessor revision.
3544 3545
3545 3546 Files that changed between either parent are marked as changed for
3546 3547 the next commit and a commit must be performed before any further
3547 3548 updates to the repository are allowed. The next commit will have
3548 3549 two parents.
3549 3550
3550 3551 ``--tool`` can be used to specify the merge tool used for file
3551 3552 merges. It overrides the HGMERGE environment variable and your
3552 3553 configuration files. See :hg:`help merge-tools` for options.
3553 3554
3554 3555 If no revision is specified, the working directory's parent is a
3555 3556 head revision, and the current branch contains exactly one other
3556 3557 head, the other head is merged with by default. Otherwise, an
3557 3558 explicit revision with which to merge with must be provided.
3558 3559
3559 3560 See :hg:`help resolve` for information on handling file conflicts.
3560 3561
3561 3562 To undo an uncommitted merge, use :hg:`merge --abort` which
3562 3563 will check out a clean copy of the original merge parent, losing
3563 3564 all changes.
3564 3565
3565 3566 Returns 0 on success, 1 if there are unresolved files.
3566 3567 """
3567 3568
3568 3569 opts = pycompat.byteskwargs(opts)
3569 3570 abort = opts.get('abort')
3570 3571 if abort and repo.dirstate.p2() == nullid:
3571 3572 cmdutil.wrongtooltocontinue(repo, _('merge'))
3572 3573 if abort:
3573 3574 if node:
3574 3575 raise error.Abort(_("cannot specify a node with --abort"))
3575 3576 if opts.get('rev'):
3576 3577 raise error.Abort(_("cannot specify both --rev and --abort"))
3577 3578 if opts.get('preview'):
3578 3579 raise error.Abort(_("cannot specify --preview with --abort"))
3579 3580 if opts.get('rev') and node:
3580 3581 raise error.Abort(_("please specify just one revision"))
3581 3582 if not node:
3582 3583 node = opts.get('rev')
3583 3584
3584 3585 if node:
3585 3586 node = scmutil.revsingle(repo, node).node()
3586 3587
3587 3588 if not node and not abort:
3588 3589 node = repo[destutil.destmerge(repo)].node()
3589 3590
3590 3591 if opts.get('preview'):
3591 3592 # find nodes that are ancestors of p2 but not of p1
3592 3593 p1 = repo.lookup('.')
3593 3594 p2 = repo.lookup(node)
3594 3595 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3595 3596
3596 3597 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3597 3598 for node in nodes:
3598 3599 displayer.show(repo[node])
3599 3600 displayer.close()
3600 3601 return 0
3601 3602
3602 3603 try:
3603 3604 # ui.forcemerge is an internal variable, do not document
3604 3605 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3605 3606 force = opts.get('force')
3606 3607 labels = ['working copy', 'merge rev']
3607 3608 return hg.merge(repo, node, force=force, mergeforce=force,
3608 3609 labels=labels, abort=abort)
3609 3610 finally:
3610 3611 ui.setconfig('ui', 'forcemerge', '', 'merge')
3611 3612
3612 3613 @command('outgoing|out',
3613 3614 [('f', 'force', None, _('run even when the destination is unrelated')),
3614 3615 ('r', 'rev', [],
3615 3616 _('a changeset intended to be included in the destination'), _('REV')),
3616 3617 ('n', 'newest-first', None, _('show newest record first')),
3617 3618 ('B', 'bookmarks', False, _('compare bookmarks')),
3618 3619 ('b', 'branch', [], _('a specific branch you would like to push'),
3619 3620 _('BRANCH')),
3620 3621 ] + logopts + remoteopts + subrepoopts,
3621 3622 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3622 3623 def outgoing(ui, repo, dest=None, **opts):
3623 3624 """show changesets not found in the destination
3624 3625
3625 3626 Show changesets not found in the specified destination repository
3626 3627 or the default push location. These are the changesets that would
3627 3628 be pushed if a push was requested.
3628 3629
3629 3630 See pull for details of valid destination formats.
3630 3631
3631 3632 .. container:: verbose
3632 3633
3633 3634 With -B/--bookmarks, the result of bookmark comparison between
3634 3635 local and remote repositories is displayed. With -v/--verbose,
3635 3636 status is also displayed for each bookmark like below::
3636 3637
3637 3638 BM1 01234567890a added
3638 3639 BM2 deleted
3639 3640 BM3 234567890abc advanced
3640 3641 BM4 34567890abcd diverged
3641 3642 BM5 4567890abcde changed
3642 3643
3643 3644 The action taken when pushing depends on the
3644 3645 status of each bookmark:
3645 3646
3646 3647 :``added``: push with ``-B`` will create it
3647 3648 :``deleted``: push with ``-B`` will delete it
3648 3649 :``advanced``: push will update it
3649 3650 :``diverged``: push with ``-B`` will update it
3650 3651 :``changed``: push with ``-B`` will update it
3651 3652
3652 3653 From the point of view of pushing behavior, bookmarks
3653 3654 existing only in the remote repository are treated as
3654 3655 ``deleted``, even if it is in fact added remotely.
3655 3656
3656 3657 Returns 0 if there are outgoing changes, 1 otherwise.
3657 3658 """
3658 3659 opts = pycompat.byteskwargs(opts)
3659 3660 if opts.get('graph'):
3660 3661 logcmdutil.checkunsupportedgraphflags([], opts)
3661 3662 o, other = hg._outgoing(ui, repo, dest, opts)
3662 3663 if not o:
3663 3664 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3664 3665 return
3665 3666
3666 3667 revdag = logcmdutil.graphrevs(repo, o, opts)
3667 3668 ui.pager('outgoing')
3668 3669 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
3669 3670 logcmdutil.displaygraph(ui, repo, revdag, displayer,
3670 3671 graphmod.asciiedges)
3671 3672 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3672 3673 return 0
3673 3674
3674 3675 if opts.get('bookmarks'):
3675 3676 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3676 3677 dest, branches = hg.parseurl(dest, opts.get('branch'))
3677 3678 other = hg.peer(repo, opts, dest)
3678 3679 if 'bookmarks' not in other.listkeys('namespaces'):
3679 3680 ui.warn(_("remote doesn't support bookmarks\n"))
3680 3681 return 0
3681 3682 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3682 3683 ui.pager('outgoing')
3683 3684 return bookmarks.outgoing(ui, repo, other)
3684 3685
3685 3686 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3686 3687 try:
3687 3688 return hg.outgoing(ui, repo, dest, opts)
3688 3689 finally:
3689 3690 del repo._subtoppath
3690 3691
3691 3692 @command('parents',
3692 3693 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3693 3694 ] + templateopts,
3694 3695 _('[-r REV] [FILE]'),
3695 3696 inferrepo=True)
3696 3697 def parents(ui, repo, file_=None, **opts):
3697 3698 """show the parents of the working directory or revision (DEPRECATED)
3698 3699
3699 3700 Print the working directory's parent revisions. If a revision is
3700 3701 given via -r/--rev, the parent of that revision will be printed.
3701 3702 If a file argument is given, the revision in which the file was
3702 3703 last changed (before the working directory revision or the
3703 3704 argument to --rev if given) is printed.
3704 3705
3705 3706 This command is equivalent to::
3706 3707
3707 3708 hg log -r "p1()+p2()" or
3708 3709 hg log -r "p1(REV)+p2(REV)" or
3709 3710 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3710 3711 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3711 3712
3712 3713 See :hg:`summary` and :hg:`help revsets` for related information.
3713 3714
3714 3715 Returns 0 on success.
3715 3716 """
3716 3717
3717 3718 opts = pycompat.byteskwargs(opts)
3718 3719 rev = opts.get('rev')
3719 3720 if rev:
3720 3721 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
3721 3722 ctx = scmutil.revsingle(repo, rev, None)
3722 3723
3723 3724 if file_:
3724 3725 m = scmutil.match(ctx, (file_,), opts)
3725 3726 if m.anypats() or len(m.files()) != 1:
3726 3727 raise error.Abort(_('can only specify an explicit filename'))
3727 3728 file_ = m.files()[0]
3728 3729 filenodes = []
3729 3730 for cp in ctx.parents():
3730 3731 if not cp:
3731 3732 continue
3732 3733 try:
3733 3734 filenodes.append(cp.filenode(file_))
3734 3735 except error.LookupError:
3735 3736 pass
3736 3737 if not filenodes:
3737 3738 raise error.Abort(_("'%s' not found in manifest!") % file_)
3738 3739 p = []
3739 3740 for fn in filenodes:
3740 3741 fctx = repo.filectx(file_, fileid=fn)
3741 3742 p.append(fctx.node())
3742 3743 else:
3743 3744 p = [cp.node() for cp in ctx.parents()]
3744 3745
3745 3746 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3746 3747 for n in p:
3747 3748 if n != nullid:
3748 3749 displayer.show(repo[n])
3749 3750 displayer.close()
3750 3751
3751 3752 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True,
3752 3753 cmdtype=readonly)
3753 3754 def paths(ui, repo, search=None, **opts):
3754 3755 """show aliases for remote repositories
3755 3756
3756 3757 Show definition of symbolic path name NAME. If no name is given,
3757 3758 show definition of all available names.
3758 3759
3759 3760 Option -q/--quiet suppresses all output when searching for NAME
3760 3761 and shows only the path names when listing all definitions.
3761 3762
3762 3763 Path names are defined in the [paths] section of your
3763 3764 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3764 3765 repository, ``.hg/hgrc`` is used, too.
3765 3766
3766 3767 The path names ``default`` and ``default-push`` have a special
3767 3768 meaning. When performing a push or pull operation, they are used
3768 3769 as fallbacks if no location is specified on the command-line.
3769 3770 When ``default-push`` is set, it will be used for push and
3770 3771 ``default`` will be used for pull; otherwise ``default`` is used
3771 3772 as the fallback for both. When cloning a repository, the clone
3772 3773 source is written as ``default`` in ``.hg/hgrc``.
3773 3774
3774 3775 .. note::
3775 3776
3776 3777 ``default`` and ``default-push`` apply to all inbound (e.g.
3777 3778 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3778 3779 and :hg:`bundle`) operations.
3779 3780
3780 3781 See :hg:`help urls` for more information.
3781 3782
3782 3783 Returns 0 on success.
3783 3784 """
3784 3785
3785 3786 opts = pycompat.byteskwargs(opts)
3786 3787 ui.pager('paths')
3787 3788 if search:
3788 3789 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3789 3790 if name == search]
3790 3791 else:
3791 3792 pathitems = sorted(ui.paths.iteritems())
3792 3793
3793 3794 fm = ui.formatter('paths', opts)
3794 3795 if fm.isplain():
3795 3796 hidepassword = util.hidepassword
3796 3797 else:
3797 3798 hidepassword = bytes
3798 3799 if ui.quiet:
3799 3800 namefmt = '%s\n'
3800 3801 else:
3801 3802 namefmt = '%s = '
3802 3803 showsubopts = not search and not ui.quiet
3803 3804
3804 3805 for name, path in pathitems:
3805 3806 fm.startitem()
3806 3807 fm.condwrite(not search, 'name', namefmt, name)
3807 3808 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3808 3809 for subopt, value in sorted(path.suboptions.items()):
3809 3810 assert subopt not in ('name', 'url')
3810 3811 if showsubopts:
3811 3812 fm.plain('%s:%s = ' % (name, subopt))
3812 3813 fm.condwrite(showsubopts, subopt, '%s\n', value)
3813 3814
3814 3815 fm.end()
3815 3816
3816 3817 if search and not pathitems:
3817 3818 if not ui.quiet:
3818 3819 ui.warn(_("not found!\n"))
3819 3820 return 1
3820 3821 else:
3821 3822 return 0
3822 3823
3823 3824 @command('phase',
3824 3825 [('p', 'public', False, _('set changeset phase to public')),
3825 3826 ('d', 'draft', False, _('set changeset phase to draft')),
3826 3827 ('s', 'secret', False, _('set changeset phase to secret')),
3827 3828 ('f', 'force', False, _('allow to move boundary backward')),
3828 3829 ('r', 'rev', [], _('target revision'), _('REV')),
3829 3830 ],
3830 3831 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3831 3832 def phase(ui, repo, *revs, **opts):
3832 3833 """set or show the current phase name
3833 3834
3834 3835 With no argument, show the phase name of the current revision(s).
3835 3836
3836 3837 With one of -p/--public, -d/--draft or -s/--secret, change the
3837 3838 phase value of the specified revisions.
3838 3839
3839 3840 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
3840 3841 lower phase to a higher phase. Phases are ordered as follows::
3841 3842
3842 3843 public < draft < secret
3843 3844
3844 3845 Returns 0 on success, 1 if some phases could not be changed.
3845 3846
3846 3847 (For more information about the phases concept, see :hg:`help phases`.)
3847 3848 """
3848 3849 opts = pycompat.byteskwargs(opts)
3849 3850 # search for a unique phase argument
3850 3851 targetphase = None
3851 3852 for idx, name in enumerate(phases.phasenames):
3852 3853 if opts[name]:
3853 3854 if targetphase is not None:
3854 3855 raise error.Abort(_('only one phase can be specified'))
3855 3856 targetphase = idx
3856 3857
3857 3858 # look for specified revision
3858 3859 revs = list(revs)
3859 3860 revs.extend(opts['rev'])
3860 3861 if not revs:
3861 3862 # display both parents as the second parent phase can influence
3862 3863 # the phase of a merge commit
3863 3864 revs = [c.rev() for c in repo[None].parents()]
3864 3865
3865 3866 revs = scmutil.revrange(repo, revs)
3866 3867
3867 3868 ret = 0
3868 3869 if targetphase is None:
3869 3870 # display
3870 3871 for r in revs:
3871 3872 ctx = repo[r]
3872 3873 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3873 3874 else:
3874 3875 with repo.lock(), repo.transaction("phase") as tr:
3875 3876 # set phase
3876 3877 if not revs:
3877 3878 raise error.Abort(_('empty revision set'))
3878 3879 nodes = [repo[r].node() for r in revs]
3879 3880 # moving revision from public to draft may hide them
3880 3881 # We have to check result on an unfiltered repository
3881 3882 unfi = repo.unfiltered()
3882 3883 getphase = unfi._phasecache.phase
3883 3884 olddata = [getphase(unfi, r) for r in unfi]
3884 3885 phases.advanceboundary(repo, tr, targetphase, nodes)
3885 3886 if opts['force']:
3886 3887 phases.retractboundary(repo, tr, targetphase, nodes)
3887 3888 getphase = unfi._phasecache.phase
3888 3889 newdata = [getphase(unfi, r) for r in unfi]
3889 3890 changes = sum(newdata[r] != olddata[r] for r in unfi)
3890 3891 cl = unfi.changelog
3891 3892 rejected = [n for n in nodes
3892 3893 if newdata[cl.rev(n)] < targetphase]
3893 3894 if rejected:
3894 3895 ui.warn(_('cannot move %i changesets to a higher '
3895 3896 'phase, use --force\n') % len(rejected))
3896 3897 ret = 1
3897 3898 if changes:
3898 3899 msg = _('phase changed for %i changesets\n') % changes
3899 3900 if ret:
3900 3901 ui.status(msg)
3901 3902 else:
3902 3903 ui.note(msg)
3903 3904 else:
3904 3905 ui.warn(_('no phases changed\n'))
3905 3906 return ret
3906 3907
3907 3908 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3908 3909 """Run after a changegroup has been added via pull/unbundle
3909 3910
3910 3911 This takes arguments below:
3911 3912
3912 3913 :modheads: change of heads by pull/unbundle
3913 3914 :optupdate: updating working directory is needed or not
3914 3915 :checkout: update destination revision (or None to default destination)
3915 3916 :brev: a name, which might be a bookmark to be activated after updating
3916 3917 """
3917 3918 if modheads == 0:
3918 3919 return
3919 3920 if optupdate:
3920 3921 try:
3921 3922 return hg.updatetotally(ui, repo, checkout, brev)
3922 3923 except error.UpdateAbort as inst:
3923 3924 msg = _("not updating: %s") % stringutil.forcebytestr(inst)
3924 3925 hint = inst.hint
3925 3926 raise error.UpdateAbort(msg, hint=hint)
3926 3927 if modheads > 1:
3927 3928 currentbranchheads = len(repo.branchheads())
3928 3929 if currentbranchheads == modheads:
3929 3930 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3930 3931 elif currentbranchheads > 1:
3931 3932 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3932 3933 "merge)\n"))
3933 3934 else:
3934 3935 ui.status(_("(run 'hg heads' to see heads)\n"))
3935 3936 elif not ui.configbool('commands', 'update.requiredest'):
3936 3937 ui.status(_("(run 'hg update' to get a working copy)\n"))
3937 3938
3938 3939 @command('^pull',
3939 3940 [('u', 'update', None,
3940 3941 _('update to new branch head if new descendants were pulled')),
3941 3942 ('f', 'force', None, _('run even when remote repository is unrelated')),
3942 3943 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3943 3944 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3944 3945 ('b', 'branch', [], _('a specific branch you would like to pull'),
3945 3946 _('BRANCH')),
3946 3947 ] + remoteopts,
3947 3948 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3948 3949 def pull(ui, repo, source="default", **opts):
3949 3950 """pull changes from the specified source
3950 3951
3951 3952 Pull changes from a remote repository to a local one.
3952 3953
3953 3954 This finds all changes from the repository at the specified path
3954 3955 or URL and adds them to a local repository (the current one unless
3955 3956 -R is specified). By default, this does not update the copy of the
3956 3957 project in the working directory.
3957 3958
3958 3959 Use :hg:`incoming` if you want to see what would have been added
3959 3960 by a pull at the time you issued this command. If you then decide
3960 3961 to add those changes to the repository, you should use :hg:`pull
3961 3962 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3962 3963
3963 3964 If SOURCE is omitted, the 'default' path will be used.
3964 3965 See :hg:`help urls` for more information.
3965 3966
3966 3967 Specifying bookmark as ``.`` is equivalent to specifying the active
3967 3968 bookmark's name.
3968 3969
3969 3970 Returns 0 on success, 1 if an update had unresolved files.
3970 3971 """
3971 3972
3972 3973 opts = pycompat.byteskwargs(opts)
3973 3974 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3974 3975 msg = _('update destination required by configuration')
3975 3976 hint = _('use hg pull followed by hg update DEST')
3976 3977 raise error.Abort(msg, hint=hint)
3977 3978
3978 3979 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3979 3980 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3980 3981 other = hg.peer(repo, opts, source)
3981 3982 try:
3982 3983 revs, checkout = hg.addbranchrevs(repo, other, branches,
3983 3984 opts.get('rev'))
3984 3985
3985 3986
3986 3987 pullopargs = {}
3987 3988 if opts.get('bookmark'):
3988 3989 if not revs:
3989 3990 revs = []
3990 3991 # The list of bookmark used here is not the one used to actually
3991 3992 # update the bookmark name. This can result in the revision pulled
3992 3993 # not ending up with the name of the bookmark because of a race
3993 3994 # condition on the server. (See issue 4689 for details)
3994 3995 remotebookmarks = other.listkeys('bookmarks')
3995 3996 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
3996 3997 pullopargs['remotebookmarks'] = remotebookmarks
3997 3998 for b in opts['bookmark']:
3998 3999 b = repo._bookmarks.expandname(b)
3999 4000 if b not in remotebookmarks:
4000 4001 raise error.Abort(_('remote bookmark %s not found!') % b)
4001 4002 revs.append(hex(remotebookmarks[b]))
4002 4003
4003 4004 if revs:
4004 4005 try:
4005 4006 # When 'rev' is a bookmark name, we cannot guarantee that it
4006 4007 # will be updated with that name because of a race condition
4007 4008 # server side. (See issue 4689 for details)
4008 4009 oldrevs = revs
4009 4010 revs = [] # actually, nodes
4010 4011 for r in oldrevs:
4011 4012 node = other.lookup(r)
4012 4013 revs.append(node)
4013 4014 if r == checkout:
4014 4015 checkout = node
4015 4016 except error.CapabilityError:
4016 4017 err = _("other repository doesn't support revision lookup, "
4017 4018 "so a rev cannot be specified.")
4018 4019 raise error.Abort(err)
4019 4020
4020 4021 wlock = util.nullcontextmanager()
4021 4022 if opts.get('update'):
4022 4023 wlock = repo.wlock()
4023 4024 with wlock:
4024 4025 pullopargs.update(opts.get('opargs', {}))
4025 4026 modheads = exchange.pull(repo, other, heads=revs,
4026 4027 force=opts.get('force'),
4027 4028 bookmarks=opts.get('bookmark', ()),
4028 4029 opargs=pullopargs).cgresult
4029 4030
4030 4031 # brev is a name, which might be a bookmark to be activated at
4031 4032 # the end of the update. In other words, it is an explicit
4032 4033 # destination of the update
4033 4034 brev = None
4034 4035
4035 4036 if checkout:
4036 4037 checkout = "%d" % repo.changelog.rev(checkout)
4037 4038
4038 4039 # order below depends on implementation of
4039 4040 # hg.addbranchrevs(). opts['bookmark'] is ignored,
4040 4041 # because 'checkout' is determined without it.
4041 4042 if opts.get('rev'):
4042 4043 brev = opts['rev'][0]
4043 4044 elif opts.get('branch'):
4044 4045 brev = opts['branch'][0]
4045 4046 else:
4046 4047 brev = branches[0]
4047 4048 repo._subtoppath = source
4048 4049 try:
4049 4050 ret = postincoming(ui, repo, modheads, opts.get('update'),
4050 4051 checkout, brev)
4051 4052
4052 4053 finally:
4053 4054 del repo._subtoppath
4054 4055
4055 4056 finally:
4056 4057 other.close()
4057 4058 return ret
4058 4059
4059 4060 @command('^push',
4060 4061 [('f', 'force', None, _('force push')),
4061 4062 ('r', 'rev', [],
4062 4063 _('a changeset intended to be included in the destination'),
4063 4064 _('REV')),
4064 4065 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4065 4066 ('b', 'branch', [],
4066 4067 _('a specific branch you would like to push'), _('BRANCH')),
4067 4068 ('', 'new-branch', False, _('allow pushing a new branch')),
4068 4069 ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
4069 4070 ] + remoteopts,
4070 4071 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4071 4072 def push(ui, repo, dest=None, **opts):
4072 4073 """push changes to the specified destination
4073 4074
4074 4075 Push changesets from the local repository to the specified
4075 4076 destination.
4076 4077
4077 4078 This operation is symmetrical to pull: it is identical to a pull
4078 4079 in the destination repository from the current one.
4079 4080
4080 4081 By default, push will not allow creation of new heads at the
4081 4082 destination, since multiple heads would make it unclear which head
4082 4083 to use. In this situation, it is recommended to pull and merge
4083 4084 before pushing.
4084 4085
4085 4086 Use --new-branch if you want to allow push to create a new named
4086 4087 branch that is not present at the destination. This allows you to
4087 4088 only create a new branch without forcing other changes.
4088 4089
4089 4090 .. note::
4090 4091
4091 4092 Extra care should be taken with the -f/--force option,
4092 4093 which will push all new heads on all branches, an action which will
4093 4094 almost always cause confusion for collaborators.
4094 4095
4095 4096 If -r/--rev is used, the specified revision and all its ancestors
4096 4097 will be pushed to the remote repository.
4097 4098
4098 4099 If -B/--bookmark is used, the specified bookmarked revision, its
4099 4100 ancestors, and the bookmark will be pushed to the remote
4100 4101 repository. Specifying ``.`` is equivalent to specifying the active
4101 4102 bookmark's name.
4102 4103
4103 4104 Please see :hg:`help urls` for important details about ``ssh://``
4104 4105 URLs. If DESTINATION is omitted, a default path will be used.
4105 4106
4106 4107 .. container:: verbose
4107 4108
4108 4109 The --pushvars option sends strings to the server that become
4109 4110 environment variables prepended with ``HG_USERVAR_``. For example,
4110 4111 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
4111 4112 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
4112 4113
4113 4114 pushvars can provide for user-overridable hooks as well as set debug
4114 4115 levels. One example is having a hook that blocks commits containing
4115 4116 conflict markers, but enables the user to override the hook if the file
4116 4117 is using conflict markers for testing purposes or the file format has
4117 4118 strings that look like conflict markers.
4118 4119
4119 4120 By default, servers will ignore `--pushvars`. To enable it add the
4120 4121 following to your configuration file::
4121 4122
4122 4123 [push]
4123 4124 pushvars.server = true
4124 4125
4125 4126 Returns 0 if push was successful, 1 if nothing to push.
4126 4127 """
4127 4128
4128 4129 opts = pycompat.byteskwargs(opts)
4129 4130 if opts.get('bookmark'):
4130 4131 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4131 4132 for b in opts['bookmark']:
4132 4133 # translate -B options to -r so changesets get pushed
4133 4134 b = repo._bookmarks.expandname(b)
4134 4135 if b in repo._bookmarks:
4135 4136 opts.setdefault('rev', []).append(b)
4136 4137 else:
4137 4138 # if we try to push a deleted bookmark, translate it to null
4138 4139 # this lets simultaneous -r, -b options continue working
4139 4140 opts.setdefault('rev', []).append("null")
4140 4141
4141 4142 path = ui.paths.getpath(dest, default=('default-push', 'default'))
4142 4143 if not path:
4143 4144 raise error.Abort(_('default repository not configured!'),
4144 4145 hint=_("see 'hg help config.paths'"))
4145 4146 dest = path.pushloc or path.loc
4146 4147 branches = (path.branch, opts.get('branch') or [])
4147 4148 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4148 4149 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4149 4150 other = hg.peer(repo, opts, dest)
4150 4151
4151 4152 if revs:
4152 4153 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4153 4154 if not revs:
4154 4155 raise error.Abort(_("specified revisions evaluate to an empty set"),
4155 4156 hint=_("use different revision arguments"))
4156 4157 elif path.pushrev:
4157 4158 # It doesn't make any sense to specify ancestor revisions. So limit
4158 4159 # to DAG heads to make discovery simpler.
4159 4160 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4160 4161 revs = scmutil.revrange(repo, [expr])
4161 4162 revs = [repo[rev].node() for rev in revs]
4162 4163 if not revs:
4163 4164 raise error.Abort(_('default push revset for path evaluates to an '
4164 4165 'empty set'))
4165 4166
4166 4167 repo._subtoppath = dest
4167 4168 try:
4168 4169 # push subrepos depth-first for coherent ordering
4169 4170 c = repo['.']
4170 4171 subs = c.substate # only repos that are committed
4171 4172 for s in sorted(subs):
4172 4173 result = c.sub(s).push(opts)
4173 4174 if result == 0:
4174 4175 return not result
4175 4176 finally:
4176 4177 del repo._subtoppath
4177 4178
4178 4179 opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
4179 4180 opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
4180 4181
4181 4182 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4182 4183 newbranch=opts.get('new_branch'),
4183 4184 bookmarks=opts.get('bookmark', ()),
4184 4185 opargs=opargs)
4185 4186
4186 4187 result = not pushop.cgresult
4187 4188
4188 4189 if pushop.bkresult is not None:
4189 4190 if pushop.bkresult == 2:
4190 4191 result = 2
4191 4192 elif not result and pushop.bkresult:
4192 4193 result = 2
4193 4194
4194 4195 return result
4195 4196
4196 4197 @command('recover', [])
4197 4198 def recover(ui, repo):
4198 4199 """roll back an interrupted transaction
4199 4200
4200 4201 Recover from an interrupted commit or pull.
4201 4202
4202 4203 This command tries to fix the repository status after an
4203 4204 interrupted operation. It should only be necessary when Mercurial
4204 4205 suggests it.
4205 4206
4206 4207 Returns 0 if successful, 1 if nothing to recover or verify fails.
4207 4208 """
4208 4209 if repo.recover():
4209 4210 return hg.verify(repo)
4210 4211 return 1
4211 4212
4212 4213 @command('^remove|rm',
4213 4214 [('A', 'after', None, _('record delete for missing files')),
4214 4215 ('f', 'force', None,
4215 4216 _('forget added files, delete modified files')),
4216 4217 ] + subrepoopts + walkopts + dryrunopts,
4217 4218 _('[OPTION]... FILE...'),
4218 4219 inferrepo=True)
4219 4220 def remove(ui, repo, *pats, **opts):
4220 4221 """remove the specified files on the next commit
4221 4222
4222 4223 Schedule the indicated files for removal from the current branch.
4223 4224
4224 4225 This command schedules the files to be removed at the next commit.
4225 4226 To undo a remove before that, see :hg:`revert`. To undo added
4226 4227 files, see :hg:`forget`.
4227 4228
4228 4229 .. container:: verbose
4229 4230
4230 4231 -A/--after can be used to remove only files that have already
4231 4232 been deleted, -f/--force can be used to force deletion, and -Af
4232 4233 can be used to remove files from the next revision without
4233 4234 deleting them from the working directory.
4234 4235
4235 4236 The following table details the behavior of remove for different
4236 4237 file states (columns) and option combinations (rows). The file
4237 4238 states are Added [A], Clean [C], Modified [M] and Missing [!]
4238 4239 (as reported by :hg:`status`). The actions are Warn, Remove
4239 4240 (from branch) and Delete (from disk):
4240 4241
4241 4242 ========= == == == ==
4242 4243 opt/state A C M !
4243 4244 ========= == == == ==
4244 4245 none W RD W R
4245 4246 -f R RD RD R
4246 4247 -A W W W R
4247 4248 -Af R R R R
4248 4249 ========= == == == ==
4249 4250
4250 4251 .. note::
4251 4252
4252 4253 :hg:`remove` never deletes files in Added [A] state from the
4253 4254 working directory, not even if ``--force`` is specified.
4254 4255
4255 4256 Returns 0 on success, 1 if any warnings encountered.
4256 4257 """
4257 4258
4258 4259 opts = pycompat.byteskwargs(opts)
4259 4260 after, force = opts.get('after'), opts.get('force')
4260 4261 dryrun = opts.get('dry_run')
4261 4262 if not pats and not after:
4262 4263 raise error.Abort(_('no files specified'))
4263 4264
4264 4265 m = scmutil.match(repo[None], pats, opts)
4265 4266 subrepos = opts.get('subrepos')
4266 4267 return cmdutil.remove(ui, repo, m, "", after, force, subrepos,
4267 4268 dryrun=dryrun)
4268 4269
4269 4270 @command('rename|move|mv',
4270 4271 [('A', 'after', None, _('record a rename that has already occurred')),
4271 4272 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4272 4273 ] + walkopts + dryrunopts,
4273 4274 _('[OPTION]... SOURCE... DEST'))
4274 4275 def rename(ui, repo, *pats, **opts):
4275 4276 """rename files; equivalent of copy + remove
4276 4277
4277 4278 Mark dest as copies of sources; mark sources for deletion. If dest
4278 4279 is a directory, copies are put in that directory. If dest is a
4279 4280 file, there can only be one source.
4280 4281
4281 4282 By default, this command copies the contents of files as they
4282 4283 exist in the working directory. If invoked with -A/--after, the
4283 4284 operation is recorded, but no copying is performed.
4284 4285
4285 4286 This command takes effect at the next commit. To undo a rename
4286 4287 before that, see :hg:`revert`.
4287 4288
4288 4289 Returns 0 on success, 1 if errors are encountered.
4289 4290 """
4290 4291 opts = pycompat.byteskwargs(opts)
4291 4292 with repo.wlock(False):
4292 4293 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4293 4294
4294 4295 @command('resolve',
4295 4296 [('a', 'all', None, _('select all unresolved files')),
4296 4297 ('l', 'list', None, _('list state of files needing merge')),
4297 4298 ('m', 'mark', None, _('mark files as resolved')),
4298 4299 ('u', 'unmark', None, _('mark files as unresolved')),
4299 4300 ('n', 'no-status', None, _('hide status prefix'))]
4300 4301 + mergetoolopts + walkopts + formatteropts,
4301 4302 _('[OPTION]... [FILE]...'),
4302 4303 inferrepo=True)
4303 4304 def resolve(ui, repo, *pats, **opts):
4304 4305 """redo merges or set/view the merge status of files
4305 4306
4306 4307 Merges with unresolved conflicts are often the result of
4307 4308 non-interactive merging using the ``internal:merge`` configuration
4308 4309 setting, or a command-line merge tool like ``diff3``. The resolve
4309 4310 command is used to manage the files involved in a merge, after
4310 4311 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4311 4312 working directory must have two parents). See :hg:`help
4312 4313 merge-tools` for information on configuring merge tools.
4313 4314
4314 4315 The resolve command can be used in the following ways:
4315 4316
4316 4317 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4317 4318 files, discarding any previous merge attempts. Re-merging is not
4318 4319 performed for files already marked as resolved. Use ``--all/-a``
4319 4320 to select all unresolved files. ``--tool`` can be used to specify
4320 4321 the merge tool used for the given files. It overrides the HGMERGE
4321 4322 environment variable and your configuration files. Previous file
4322 4323 contents are saved with a ``.orig`` suffix.
4323 4324
4324 4325 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4325 4326 (e.g. after having manually fixed-up the files). The default is
4326 4327 to mark all unresolved files.
4327 4328
4328 4329 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4329 4330 default is to mark all resolved files.
4330 4331
4331 4332 - :hg:`resolve -l`: list files which had or still have conflicts.
4332 4333 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4333 4334 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4334 4335 the list. See :hg:`help filesets` for details.
4335 4336
4336 4337 .. note::
4337 4338
4338 4339 Mercurial will not let you commit files with unresolved merge
4339 4340 conflicts. You must use :hg:`resolve -m ...` before you can
4340 4341 commit after a conflicting merge.
4341 4342
4342 4343 Returns 0 on success, 1 if any files fail a resolve attempt.
4343 4344 """
4344 4345
4345 4346 opts = pycompat.byteskwargs(opts)
4346 4347 flaglist = 'all mark unmark list no_status'.split()
4347 4348 all, mark, unmark, show, nostatus = \
4348 4349 [opts.get(o) for o in flaglist]
4349 4350
4350 4351 if (show and (mark or unmark)) or (mark and unmark):
4351 4352 raise error.Abort(_("too many options specified"))
4352 4353 if pats and all:
4353 4354 raise error.Abort(_("can't specify --all and patterns"))
4354 4355 if not (all or pats or show or mark or unmark):
4355 4356 raise error.Abort(_('no files or directories specified'),
4356 4357 hint=('use --all to re-merge all unresolved files'))
4357 4358
4358 4359 if show:
4359 4360 ui.pager('resolve')
4360 4361 fm = ui.formatter('resolve', opts)
4361 4362 ms = mergemod.mergestate.read(repo)
4362 4363 m = scmutil.match(repo[None], pats, opts)
4363 4364
4364 4365 # Labels and keys based on merge state. Unresolved path conflicts show
4365 4366 # as 'P'. Resolved path conflicts show as 'R', the same as normal
4366 4367 # resolved conflicts.
4367 4368 mergestateinfo = {
4368 4369 mergemod.MERGE_RECORD_UNRESOLVED: ('resolve.unresolved', 'U'),
4369 4370 mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
4370 4371 mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
4371 4372 mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
4372 4373 mergemod.MERGE_RECORD_DRIVER_RESOLVED: ('resolve.driverresolved',
4373 4374 'D'),
4374 4375 }
4375 4376
4376 4377 for f in ms:
4377 4378 if not m(f):
4378 4379 continue
4379 4380
4380 4381 label, key = mergestateinfo[ms[f]]
4381 4382 fm.startitem()
4382 4383 fm.condwrite(not nostatus, 'status', '%s ', key, label=label)
4383 4384 fm.write('path', '%s\n', f, label=label)
4384 4385 fm.end()
4385 4386 return 0
4386 4387
4387 4388 with repo.wlock():
4388 4389 ms = mergemod.mergestate.read(repo)
4389 4390
4390 4391 if not (ms.active() or repo.dirstate.p2() != nullid):
4391 4392 raise error.Abort(
4392 4393 _('resolve command not applicable when not merging'))
4393 4394
4394 4395 wctx = repo[None]
4395 4396
4396 4397 if (ms.mergedriver
4397 4398 and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED):
4398 4399 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4399 4400 ms.commit()
4400 4401 # allow mark and unmark to go through
4401 4402 if not mark and not unmark and not proceed:
4402 4403 return 1
4403 4404
4404 4405 m = scmutil.match(wctx, pats, opts)
4405 4406 ret = 0
4406 4407 didwork = False
4407 4408 runconclude = False
4408 4409
4409 4410 tocomplete = []
4410 4411 for f in ms:
4411 4412 if not m(f):
4412 4413 continue
4413 4414
4414 4415 didwork = True
4415 4416
4416 4417 # don't let driver-resolved files be marked, and run the conclude
4417 4418 # step if asked to resolve
4418 4419 if ms[f] == mergemod.MERGE_RECORD_DRIVER_RESOLVED:
4419 4420 exact = m.exact(f)
4420 4421 if mark:
4421 4422 if exact:
4422 4423 ui.warn(_('not marking %s as it is driver-resolved\n')
4423 4424 % f)
4424 4425 elif unmark:
4425 4426 if exact:
4426 4427 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4427 4428 % f)
4428 4429 else:
4429 4430 runconclude = True
4430 4431 continue
4431 4432
4432 4433 # path conflicts must be resolved manually
4433 4434 if ms[f] in (mergemod.MERGE_RECORD_UNRESOLVED_PATH,
4434 4435 mergemod.MERGE_RECORD_RESOLVED_PATH):
4435 4436 if mark:
4436 4437 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED_PATH)
4437 4438 elif unmark:
4438 4439 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
4439 4440 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
4440 4441 ui.warn(_('%s: path conflict must be resolved manually\n')
4441 4442 % f)
4442 4443 continue
4443 4444
4444 4445 if mark:
4445 4446 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
4446 4447 elif unmark:
4447 4448 ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED)
4448 4449 else:
4449 4450 # backup pre-resolve (merge uses .orig for its own purposes)
4450 4451 a = repo.wjoin(f)
4451 4452 try:
4452 4453 util.copyfile(a, a + ".resolve")
4453 4454 except (IOError, OSError) as inst:
4454 4455 if inst.errno != errno.ENOENT:
4455 4456 raise
4456 4457
4457 4458 try:
4458 4459 # preresolve file
4459 4460 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4460 4461 'resolve')
4461 4462 complete, r = ms.preresolve(f, wctx)
4462 4463 if not complete:
4463 4464 tocomplete.append(f)
4464 4465 elif r:
4465 4466 ret = 1
4466 4467 finally:
4467 4468 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4468 4469 ms.commit()
4469 4470
4470 4471 # replace filemerge's .orig file with our resolve file, but only
4471 4472 # for merges that are complete
4472 4473 if complete:
4473 4474 try:
4474 4475 util.rename(a + ".resolve",
4475 4476 scmutil.origpath(ui, repo, a))
4476 4477 except OSError as inst:
4477 4478 if inst.errno != errno.ENOENT:
4478 4479 raise
4479 4480
4480 4481 for f in tocomplete:
4481 4482 try:
4482 4483 # resolve file
4483 4484 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4484 4485 'resolve')
4485 4486 r = ms.resolve(f, wctx)
4486 4487 if r:
4487 4488 ret = 1
4488 4489 finally:
4489 4490 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4490 4491 ms.commit()
4491 4492
4492 4493 # replace filemerge's .orig file with our resolve file
4493 4494 a = repo.wjoin(f)
4494 4495 try:
4495 4496 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4496 4497 except OSError as inst:
4497 4498 if inst.errno != errno.ENOENT:
4498 4499 raise
4499 4500
4500 4501 ms.commit()
4501 4502 ms.recordactions()
4502 4503
4503 4504 if not didwork and pats:
4504 4505 hint = None
4505 4506 if not any([p for p in pats if p.find(':') >= 0]):
4506 4507 pats = ['path:%s' % p for p in pats]
4507 4508 m = scmutil.match(wctx, pats, opts)
4508 4509 for f in ms:
4509 4510 if not m(f):
4510 4511 continue
4511 4512 flags = ''.join(['-%s ' % o[0:1] for o in flaglist
4512 4513 if opts.get(o)])
4513 4514 hint = _("(try: hg resolve %s%s)\n") % (
4514 4515 flags,
4515 4516 ' '.join(pats))
4516 4517 break
4517 4518 ui.warn(_("arguments do not match paths that need resolving\n"))
4518 4519 if hint:
4519 4520 ui.warn(hint)
4520 4521 elif ms.mergedriver and ms.mdstate() != 's':
4521 4522 # run conclude step when either a driver-resolved file is requested
4522 4523 # or there are no driver-resolved files
4523 4524 # we can't use 'ret' to determine whether any files are unresolved
4524 4525 # because we might not have tried to resolve some
4525 4526 if ((runconclude or not list(ms.driverresolved()))
4526 4527 and not list(ms.unresolved())):
4527 4528 proceed = mergemod.driverconclude(repo, ms, wctx)
4528 4529 ms.commit()
4529 4530 if not proceed:
4530 4531 return 1
4531 4532
4532 4533 # Nudge users into finishing an unfinished operation
4533 4534 unresolvedf = list(ms.unresolved())
4534 4535 driverresolvedf = list(ms.driverresolved())
4535 4536 if not unresolvedf and not driverresolvedf:
4536 4537 ui.status(_('(no more unresolved files)\n'))
4537 4538 cmdutil.checkafterresolved(repo)
4538 4539 elif not unresolvedf:
4539 4540 ui.status(_('(no more unresolved files -- '
4540 4541 'run "hg resolve --all" to conclude)\n'))
4541 4542
4542 4543 return ret
4543 4544
4544 4545 @command('revert',
4545 4546 [('a', 'all', None, _('revert all changes when no arguments given')),
4546 4547 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4547 4548 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4548 4549 ('C', 'no-backup', None, _('do not save backup copies of files')),
4549 4550 ('i', 'interactive', None, _('interactively select the changes')),
4550 4551 ] + walkopts + dryrunopts,
4551 4552 _('[OPTION]... [-r REV] [NAME]...'))
4552 4553 def revert(ui, repo, *pats, **opts):
4553 4554 """restore files to their checkout state
4554 4555
4555 4556 .. note::
4556 4557
4557 4558 To check out earlier revisions, you should use :hg:`update REV`.
4558 4559 To cancel an uncommitted merge (and lose your changes),
4559 4560 use :hg:`merge --abort`.
4560 4561
4561 4562 With no revision specified, revert the specified files or directories
4562 4563 to the contents they had in the parent of the working directory.
4563 4564 This restores the contents of files to an unmodified
4564 4565 state and unschedules adds, removes, copies, and renames. If the
4565 4566 working directory has two parents, you must explicitly specify a
4566 4567 revision.
4567 4568
4568 4569 Using the -r/--rev or -d/--date options, revert the given files or
4569 4570 directories to their states as of a specific revision. Because
4570 4571 revert does not change the working directory parents, this will
4571 4572 cause these files to appear modified. This can be helpful to "back
4572 4573 out" some or all of an earlier change. See :hg:`backout` for a
4573 4574 related method.
4574 4575
4575 4576 Modified files are saved with a .orig suffix before reverting.
4576 4577 To disable these backups, use --no-backup. It is possible to store
4577 4578 the backup files in a custom directory relative to the root of the
4578 4579 repository by setting the ``ui.origbackuppath`` configuration
4579 4580 option.
4580 4581
4581 4582 See :hg:`help dates` for a list of formats valid for -d/--date.
4582 4583
4583 4584 See :hg:`help backout` for a way to reverse the effect of an
4584 4585 earlier changeset.
4585 4586
4586 4587 Returns 0 on success.
4587 4588 """
4588 4589
4589 4590 opts = pycompat.byteskwargs(opts)
4590 4591 if opts.get("date"):
4591 4592 if opts.get("rev"):
4592 4593 raise error.Abort(_("you can't specify a revision and a date"))
4593 4594 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4594 4595
4595 4596 parent, p2 = repo.dirstate.parents()
4596 4597 if not opts.get('rev') and p2 != nullid:
4597 4598 # revert after merge is a trap for new users (issue2915)
4598 4599 raise error.Abort(_('uncommitted merge with no revision specified'),
4599 4600 hint=_("use 'hg update' or see 'hg help revert'"))
4600 4601
4601 4602 rev = opts.get('rev')
4602 4603 if rev:
4603 4604 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
4604 4605 ctx = scmutil.revsingle(repo, rev)
4605 4606
4606 4607 if (not (pats or opts.get('include') or opts.get('exclude') or
4607 4608 opts.get('all') or opts.get('interactive'))):
4608 4609 msg = _("no files or directories specified")
4609 4610 if p2 != nullid:
4610 4611 hint = _("uncommitted merge, use --all to discard all changes,"
4611 4612 " or 'hg update -C .' to abort the merge")
4612 4613 raise error.Abort(msg, hint=hint)
4613 4614 dirty = any(repo.status())
4614 4615 node = ctx.node()
4615 4616 if node != parent:
4616 4617 if dirty:
4617 4618 hint = _("uncommitted changes, use --all to discard all"
4618 4619 " changes, or 'hg update %s' to update") % ctx.rev()
4619 4620 else:
4620 4621 hint = _("use --all to revert all files,"
4621 4622 " or 'hg update %s' to update") % ctx.rev()
4622 4623 elif dirty:
4623 4624 hint = _("uncommitted changes, use --all to discard all changes")
4624 4625 else:
4625 4626 hint = _("use --all to revert all files")
4626 4627 raise error.Abort(msg, hint=hint)
4627 4628
4628 4629 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
4629 4630 **pycompat.strkwargs(opts))
4630 4631
4631 4632 @command('rollback', dryrunopts +
4632 4633 [('f', 'force', False, _('ignore safety measures'))])
4633 4634 def rollback(ui, repo, **opts):
4634 4635 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4635 4636
4636 4637 Please use :hg:`commit --amend` instead of rollback to correct
4637 4638 mistakes in the last commit.
4638 4639
4639 4640 This command should be used with care. There is only one level of
4640 4641 rollback, and there is no way to undo a rollback. It will also
4641 4642 restore the dirstate at the time of the last transaction, losing
4642 4643 any dirstate changes since that time. This command does not alter
4643 4644 the working directory.
4644 4645
4645 4646 Transactions are used to encapsulate the effects of all commands
4646 4647 that create new changesets or propagate existing changesets into a
4647 4648 repository.
4648 4649
4649 4650 .. container:: verbose
4650 4651
4651 4652 For example, the following commands are transactional, and their
4652 4653 effects can be rolled back:
4653 4654
4654 4655 - commit
4655 4656 - import
4656 4657 - pull
4657 4658 - push (with this repository as the destination)
4658 4659 - unbundle
4659 4660
4660 4661 To avoid permanent data loss, rollback will refuse to rollback a
4661 4662 commit transaction if it isn't checked out. Use --force to
4662 4663 override this protection.
4663 4664
4664 4665 The rollback command can be entirely disabled by setting the
4665 4666 ``ui.rollback`` configuration setting to false. If you're here
4666 4667 because you want to use rollback and it's disabled, you can
4667 4668 re-enable the command by setting ``ui.rollback`` to true.
4668 4669
4669 4670 This command is not intended for use on public repositories. Once
4670 4671 changes are visible for pull by other users, rolling a transaction
4671 4672 back locally is ineffective (someone else may already have pulled
4672 4673 the changes). Furthermore, a race is possible with readers of the
4673 4674 repository; for example an in-progress pull from the repository
4674 4675 may fail if a rollback is performed.
4675 4676
4676 4677 Returns 0 on success, 1 if no rollback data is available.
4677 4678 """
4678 4679 if not ui.configbool('ui', 'rollback'):
4679 4680 raise error.Abort(_('rollback is disabled because it is unsafe'),
4680 4681 hint=('see `hg help -v rollback` for information'))
4681 4682 return repo.rollback(dryrun=opts.get(r'dry_run'),
4682 4683 force=opts.get(r'force'))
4683 4684
4684 4685 @command('root', [], cmdtype=readonly)
4685 4686 def root(ui, repo):
4686 4687 """print the root (top) of the current working directory
4687 4688
4688 4689 Print the root directory of the current repository.
4689 4690
4690 4691 Returns 0 on success.
4691 4692 """
4692 4693 ui.write(repo.root + "\n")
4693 4694
4694 4695 @command('^serve',
4695 4696 [('A', 'accesslog', '', _('name of access log file to write to'),
4696 4697 _('FILE')),
4697 4698 ('d', 'daemon', None, _('run server in background')),
4698 4699 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4699 4700 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4700 4701 # use string type, then we can check if something was passed
4701 4702 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4702 4703 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4703 4704 _('ADDR')),
4704 4705 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4705 4706 _('PREFIX')),
4706 4707 ('n', 'name', '',
4707 4708 _('name to show in web pages (default: working directory)'), _('NAME')),
4708 4709 ('', 'web-conf', '',
4709 4710 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4710 4711 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4711 4712 _('FILE')),
4712 4713 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4713 4714 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4714 4715 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4715 4716 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4716 4717 ('', 'style', '', _('template style to use'), _('STYLE')),
4717 4718 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4718 4719 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4719 4720 + subrepoopts,
4720 4721 _('[OPTION]...'),
4721 4722 optionalrepo=True)
4722 4723 def serve(ui, repo, **opts):
4723 4724 """start stand-alone webserver
4724 4725
4725 4726 Start a local HTTP repository browser and pull server. You can use
4726 4727 this for ad-hoc sharing and browsing of repositories. It is
4727 4728 recommended to use a real web server to serve a repository for
4728 4729 longer periods of time.
4729 4730
4730 4731 Please note that the server does not implement access control.
4731 4732 This means that, by default, anybody can read from the server and
4732 4733 nobody can write to it by default. Set the ``web.allow-push``
4733 4734 option to ``*`` to allow everybody to push to the server. You
4734 4735 should use a real web server if you need to authenticate users.
4735 4736
4736 4737 By default, the server logs accesses to stdout and errors to
4737 4738 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4738 4739 files.
4739 4740
4740 4741 To have the server choose a free port number to listen on, specify
4741 4742 a port number of 0; in this case, the server will print the port
4742 4743 number it uses.
4743 4744
4744 4745 Returns 0 on success.
4745 4746 """
4746 4747
4747 4748 opts = pycompat.byteskwargs(opts)
4748 4749 if opts["stdio"] and opts["cmdserver"]:
4749 4750 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4750 4751
4751 4752 if opts["stdio"]:
4752 4753 if repo is None:
4753 4754 raise error.RepoError(_("there is no Mercurial repository here"
4754 4755 " (.hg not found)"))
4755 4756 s = wireprotoserver.sshserver(ui, repo)
4756 4757 s.serve_forever()
4757 4758
4758 4759 service = server.createservice(ui, repo, opts)
4759 4760 return server.runservice(opts, initfn=service.init, runfn=service.run)
4760 4761
4761 4762 @command('^status|st',
4762 4763 [('A', 'all', None, _('show status of all files')),
4763 4764 ('m', 'modified', None, _('show only modified files')),
4764 4765 ('a', 'added', None, _('show only added files')),
4765 4766 ('r', 'removed', None, _('show only removed files')),
4766 4767 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4767 4768 ('c', 'clean', None, _('show only files without changes')),
4768 4769 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4769 4770 ('i', 'ignored', None, _('show only ignored files')),
4770 4771 ('n', 'no-status', None, _('hide status prefix')),
4771 4772 ('t', 'terse', '', _('show the terse output (EXPERIMENTAL)')),
4772 4773 ('C', 'copies', None, _('show source of copied files')),
4773 4774 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4774 4775 ('', 'rev', [], _('show difference from revision'), _('REV')),
4775 4776 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4776 4777 ] + walkopts + subrepoopts + formatteropts,
4777 4778 _('[OPTION]... [FILE]...'),
4778 4779 inferrepo=True, cmdtype=readonly)
4779 4780 def status(ui, repo, *pats, **opts):
4780 4781 """show changed files in the working directory
4781 4782
4782 4783 Show status of files in the repository. If names are given, only
4783 4784 files that match are shown. Files that are clean or ignored or
4784 4785 the source of a copy/move operation, are not listed unless
4785 4786 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4786 4787 Unless options described with "show only ..." are given, the
4787 4788 options -mardu are used.
4788 4789
4789 4790 Option -q/--quiet hides untracked (unknown and ignored) files
4790 4791 unless explicitly requested with -u/--unknown or -i/--ignored.
4791 4792
4792 4793 .. note::
4793 4794
4794 4795 :hg:`status` may appear to disagree with diff if permissions have
4795 4796 changed or a merge has occurred. The standard diff format does
4796 4797 not report permission changes and diff only reports changes
4797 4798 relative to one merge parent.
4798 4799
4799 4800 If one revision is given, it is used as the base revision.
4800 4801 If two revisions are given, the differences between them are
4801 4802 shown. The --change option can also be used as a shortcut to list
4802 4803 the changed files of a revision from its first parent.
4803 4804
4804 4805 The codes used to show the status of files are::
4805 4806
4806 4807 M = modified
4807 4808 A = added
4808 4809 R = removed
4809 4810 C = clean
4810 4811 ! = missing (deleted by non-hg command, but still tracked)
4811 4812 ? = not tracked
4812 4813 I = ignored
4813 4814 = origin of the previous file (with --copies)
4814 4815
4815 4816 .. container:: verbose
4816 4817
4817 4818 The -t/--terse option abbreviates the output by showing only the directory
4818 4819 name if all the files in it share the same status. The option takes an
4819 4820 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
4820 4821 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
4821 4822 for 'ignored' and 'c' for clean.
4822 4823
4823 4824 It abbreviates only those statuses which are passed. Note that clean and
4824 4825 ignored files are not displayed with '--terse ic' unless the -c/--clean
4825 4826 and -i/--ignored options are also used.
4826 4827
4827 4828 The -v/--verbose option shows information when the repository is in an
4828 4829 unfinished merge, shelve, rebase state etc. You can have this behavior
4829 4830 turned on by default by enabling the ``commands.status.verbose`` option.
4830 4831
4831 4832 You can skip displaying some of these states by setting
4832 4833 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
4833 4834 'histedit', 'merge', 'rebase', or 'unshelve'.
4834 4835
4835 4836 Examples:
4836 4837
4837 4838 - show changes in the working directory relative to a
4838 4839 changeset::
4839 4840
4840 4841 hg status --rev 9353
4841 4842
4842 4843 - show changes in the working directory relative to the
4843 4844 current directory (see :hg:`help patterns` for more information)::
4844 4845
4845 4846 hg status re:
4846 4847
4847 4848 - show all changes including copies in an existing changeset::
4848 4849
4849 4850 hg status --copies --change 9353
4850 4851
4851 4852 - get a NUL separated list of added files, suitable for xargs::
4852 4853
4853 4854 hg status -an0
4854 4855
4855 4856 - show more information about the repository status, abbreviating
4856 4857 added, removed, modified, deleted, and untracked paths::
4857 4858
4858 4859 hg status -v -t mardu
4859 4860
4860 4861 Returns 0 on success.
4861 4862
4862 4863 """
4863 4864
4864 4865 opts = pycompat.byteskwargs(opts)
4865 4866 revs = opts.get('rev')
4866 4867 change = opts.get('change')
4867 4868 terse = opts.get('terse')
4868 4869
4869 4870 if revs and change:
4870 4871 msg = _('cannot specify --rev and --change at the same time')
4871 4872 raise error.Abort(msg)
4872 4873 elif revs and terse:
4873 4874 msg = _('cannot use --terse with --rev')
4874 4875 raise error.Abort(msg)
4875 4876 elif change:
4876 4877 repo = scmutil.unhidehashlikerevs(repo, [change], 'nowarn')
4877 4878 node2 = scmutil.revsingle(repo, change, None).node()
4878 4879 node1 = repo[node2].p1().node()
4879 4880 else:
4880 4881 repo = scmutil.unhidehashlikerevs(repo, revs, 'nowarn')
4881 4882 node1, node2 = scmutil.revpair(repo, revs)
4882 4883
4883 4884 if pats or ui.configbool('commands', 'status.relative'):
4884 4885 cwd = repo.getcwd()
4885 4886 else:
4886 4887 cwd = ''
4887 4888
4888 4889 if opts.get('print0'):
4889 4890 end = '\0'
4890 4891 else:
4891 4892 end = '\n'
4892 4893 copy = {}
4893 4894 states = 'modified added removed deleted unknown ignored clean'.split()
4894 4895 show = [k for k in states if opts.get(k)]
4895 4896 if opts.get('all'):
4896 4897 show += ui.quiet and (states[:4] + ['clean']) or states
4897 4898
4898 4899 if not show:
4899 4900 if ui.quiet:
4900 4901 show = states[:4]
4901 4902 else:
4902 4903 show = states[:5]
4903 4904
4904 4905 m = scmutil.match(repo[node2], pats, opts)
4905 4906 if terse:
4906 4907 # we need to compute clean and unknown to terse
4907 4908 stat = repo.status(node1, node2, m,
4908 4909 'ignored' in show or 'i' in terse,
4909 4910 True, True, opts.get('subrepos'))
4910 4911
4911 4912 stat = cmdutil.tersedir(stat, terse)
4912 4913 else:
4913 4914 stat = repo.status(node1, node2, m,
4914 4915 'ignored' in show, 'clean' in show,
4915 4916 'unknown' in show, opts.get('subrepos'))
4916 4917
4917 4918 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4918 4919
4919 4920 if (opts.get('all') or opts.get('copies')
4920 4921 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4921 4922 copy = copies.pathcopies(repo[node1], repo[node2], m)
4922 4923
4923 4924 ui.pager('status')
4924 4925 fm = ui.formatter('status', opts)
4925 4926 fmt = '%s' + end
4926 4927 showchar = not opts.get('no_status')
4927 4928
4928 4929 for state, char, files in changestates:
4929 4930 if state in show:
4930 4931 label = 'status.' + state
4931 4932 for f in files:
4932 4933 fm.startitem()
4933 4934 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4934 4935 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4935 4936 if f in copy:
4936 4937 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4937 4938 label='status.copied')
4938 4939
4939 4940 if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
4940 4941 and not ui.plain()):
4941 4942 cmdutil.morestatus(repo, fm)
4942 4943 fm.end()
4943 4944
4944 4945 @command('^summary|sum',
4945 4946 [('', 'remote', None, _('check for push and pull'))],
4946 4947 '[--remote]', cmdtype=readonly)
4947 4948 def summary(ui, repo, **opts):
4948 4949 """summarize working directory state
4949 4950
4950 4951 This generates a brief summary of the working directory state,
4951 4952 including parents, branch, commit status, phase and available updates.
4952 4953
4953 4954 With the --remote option, this will check the default paths for
4954 4955 incoming and outgoing changes. This can be time-consuming.
4955 4956
4956 4957 Returns 0 on success.
4957 4958 """
4958 4959
4959 4960 opts = pycompat.byteskwargs(opts)
4960 4961 ui.pager('summary')
4961 4962 ctx = repo[None]
4962 4963 parents = ctx.parents()
4963 4964 pnode = parents[0].node()
4964 4965 marks = []
4965 4966
4966 4967 ms = None
4967 4968 try:
4968 4969 ms = mergemod.mergestate.read(repo)
4969 4970 except error.UnsupportedMergeRecords as e:
4970 4971 s = ' '.join(e.recordtypes)
4971 4972 ui.warn(
4972 4973 _('warning: merge state has unsupported record types: %s\n') % s)
4973 4974 unresolved = []
4974 4975 else:
4975 4976 unresolved = list(ms.unresolved())
4976 4977
4977 4978 for p in parents:
4978 4979 # label with log.changeset (instead of log.parent) since this
4979 4980 # shows a working directory parent *changeset*:
4980 4981 # i18n: column positioning for "hg summary"
4981 4982 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4982 4983 label=logcmdutil.changesetlabels(p))
4983 4984 ui.write(' '.join(p.tags()), label='log.tag')
4984 4985 if p.bookmarks():
4985 4986 marks.extend(p.bookmarks())
4986 4987 if p.rev() == -1:
4987 4988 if not len(repo):
4988 4989 ui.write(_(' (empty repository)'))
4989 4990 else:
4990 4991 ui.write(_(' (no revision checked out)'))
4991 4992 if p.obsolete():
4992 4993 ui.write(_(' (obsolete)'))
4993 4994 if p.isunstable():
4994 4995 instabilities = (ui.label(instability, 'trouble.%s' % instability)
4995 4996 for instability in p.instabilities())
4996 4997 ui.write(' ('
4997 4998 + ', '.join(instabilities)
4998 4999 + ')')
4999 5000 ui.write('\n')
5000 5001 if p.description():
5001 5002 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5002 5003 label='log.summary')
5003 5004
5004 5005 branch = ctx.branch()
5005 5006 bheads = repo.branchheads(branch)
5006 5007 # i18n: column positioning for "hg summary"
5007 5008 m = _('branch: %s\n') % branch
5008 5009 if branch != 'default':
5009 5010 ui.write(m, label='log.branch')
5010 5011 else:
5011 5012 ui.status(m, label='log.branch')
5012 5013
5013 5014 if marks:
5014 5015 active = repo._activebookmark
5015 5016 # i18n: column positioning for "hg summary"
5016 5017 ui.write(_('bookmarks:'), label='log.bookmark')
5017 5018 if active is not None:
5018 5019 if active in marks:
5019 5020 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
5020 5021 marks.remove(active)
5021 5022 else:
5022 5023 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
5023 5024 for m in marks:
5024 5025 ui.write(' ' + m, label='log.bookmark')
5025 5026 ui.write('\n', label='log.bookmark')
5026 5027
5027 5028 status = repo.status(unknown=True)
5028 5029
5029 5030 c = repo.dirstate.copies()
5030 5031 copied, renamed = [], []
5031 5032 for d, s in c.iteritems():
5032 5033 if s in status.removed:
5033 5034 status.removed.remove(s)
5034 5035 renamed.append(d)
5035 5036 else:
5036 5037 copied.append(d)
5037 5038 if d in status.added:
5038 5039 status.added.remove(d)
5039 5040
5040 5041 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5041 5042
5042 5043 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
5043 5044 (ui.label(_('%d added'), 'status.added'), status.added),
5044 5045 (ui.label(_('%d removed'), 'status.removed'), status.removed),
5045 5046 (ui.label(_('%d renamed'), 'status.copied'), renamed),
5046 5047 (ui.label(_('%d copied'), 'status.copied'), copied),
5047 5048 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
5048 5049 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
5049 5050 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
5050 5051 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
5051 5052 t = []
5052 5053 for l, s in labels:
5053 5054 if s:
5054 5055 t.append(l % len(s))
5055 5056
5056 5057 t = ', '.join(t)
5057 5058 cleanworkdir = False
5058 5059
5059 5060 if repo.vfs.exists('graftstate'):
5060 5061 t += _(' (graft in progress)')
5061 5062 if repo.vfs.exists('updatestate'):
5062 5063 t += _(' (interrupted update)')
5063 5064 elif len(parents) > 1:
5064 5065 t += _(' (merge)')
5065 5066 elif branch != parents[0].branch():
5066 5067 t += _(' (new branch)')
5067 5068 elif (parents[0].closesbranch() and
5068 5069 pnode in repo.branchheads(branch, closed=True)):
5069 5070 t += _(' (head closed)')
5070 5071 elif not (status.modified or status.added or status.removed or renamed or
5071 5072 copied or subs):
5072 5073 t += _(' (clean)')
5073 5074 cleanworkdir = True
5074 5075 elif pnode not in bheads:
5075 5076 t += _(' (new branch head)')
5076 5077
5077 5078 if parents:
5078 5079 pendingphase = max(p.phase() for p in parents)
5079 5080 else:
5080 5081 pendingphase = phases.public
5081 5082
5082 5083 if pendingphase > phases.newcommitphase(ui):
5083 5084 t += ' (%s)' % phases.phasenames[pendingphase]
5084 5085
5085 5086 if cleanworkdir:
5086 5087 # i18n: column positioning for "hg summary"
5087 5088 ui.status(_('commit: %s\n') % t.strip())
5088 5089 else:
5089 5090 # i18n: column positioning for "hg summary"
5090 5091 ui.write(_('commit: %s\n') % t.strip())
5091 5092
5092 5093 # all ancestors of branch heads - all ancestors of parent = new csets
5093 5094 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5094 5095 bheads))
5095 5096
5096 5097 if new == 0:
5097 5098 # i18n: column positioning for "hg summary"
5098 5099 ui.status(_('update: (current)\n'))
5099 5100 elif pnode not in bheads:
5100 5101 # i18n: column positioning for "hg summary"
5101 5102 ui.write(_('update: %d new changesets (update)\n') % new)
5102 5103 else:
5103 5104 # i18n: column positioning for "hg summary"
5104 5105 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5105 5106 (new, len(bheads)))
5106 5107
5107 5108 t = []
5108 5109 draft = len(repo.revs('draft()'))
5109 5110 if draft:
5110 5111 t.append(_('%d draft') % draft)
5111 5112 secret = len(repo.revs('secret()'))
5112 5113 if secret:
5113 5114 t.append(_('%d secret') % secret)
5114 5115
5115 5116 if draft or secret:
5116 5117 ui.status(_('phases: %s\n') % ', '.join(t))
5117 5118
5118 5119 if obsolete.isenabled(repo, obsolete.createmarkersopt):
5119 5120 for trouble in ("orphan", "contentdivergent", "phasedivergent"):
5120 5121 numtrouble = len(repo.revs(trouble + "()"))
5121 5122 # We write all the possibilities to ease translation
5122 5123 troublemsg = {
5123 5124 "orphan": _("orphan: %d changesets"),
5124 5125 "contentdivergent": _("content-divergent: %d changesets"),
5125 5126 "phasedivergent": _("phase-divergent: %d changesets"),
5126 5127 }
5127 5128 if numtrouble > 0:
5128 5129 ui.status(troublemsg[trouble] % numtrouble + "\n")
5129 5130
5130 5131 cmdutil.summaryhooks(ui, repo)
5131 5132
5132 5133 if opts.get('remote'):
5133 5134 needsincoming, needsoutgoing = True, True
5134 5135 else:
5135 5136 needsincoming, needsoutgoing = False, False
5136 5137 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5137 5138 if i:
5138 5139 needsincoming = True
5139 5140 if o:
5140 5141 needsoutgoing = True
5141 5142 if not needsincoming and not needsoutgoing:
5142 5143 return
5143 5144
5144 5145 def getincoming():
5145 5146 source, branches = hg.parseurl(ui.expandpath('default'))
5146 5147 sbranch = branches[0]
5147 5148 try:
5148 5149 other = hg.peer(repo, {}, source)
5149 5150 except error.RepoError:
5150 5151 if opts.get('remote'):
5151 5152 raise
5152 5153 return source, sbranch, None, None, None
5153 5154 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5154 5155 if revs:
5155 5156 revs = [other.lookup(rev) for rev in revs]
5156 5157 ui.debug('comparing with %s\n' % util.hidepassword(source))
5157 5158 repo.ui.pushbuffer()
5158 5159 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5159 5160 repo.ui.popbuffer()
5160 5161 return source, sbranch, other, commoninc, commoninc[1]
5161 5162
5162 5163 if needsincoming:
5163 5164 source, sbranch, sother, commoninc, incoming = getincoming()
5164 5165 else:
5165 5166 source = sbranch = sother = commoninc = incoming = None
5166 5167
5167 5168 def getoutgoing():
5168 5169 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5169 5170 dbranch = branches[0]
5170 5171 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5171 5172 if source != dest:
5172 5173 try:
5173 5174 dother = hg.peer(repo, {}, dest)
5174 5175 except error.RepoError:
5175 5176 if opts.get('remote'):
5176 5177 raise
5177 5178 return dest, dbranch, None, None
5178 5179 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5179 5180 elif sother is None:
5180 5181 # there is no explicit destination peer, but source one is invalid
5181 5182 return dest, dbranch, None, None
5182 5183 else:
5183 5184 dother = sother
5184 5185 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5185 5186 common = None
5186 5187 else:
5187 5188 common = commoninc
5188 5189 if revs:
5189 5190 revs = [repo.lookup(rev) for rev in revs]
5190 5191 repo.ui.pushbuffer()
5191 5192 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5192 5193 commoninc=common)
5193 5194 repo.ui.popbuffer()
5194 5195 return dest, dbranch, dother, outgoing
5195 5196
5196 5197 if needsoutgoing:
5197 5198 dest, dbranch, dother, outgoing = getoutgoing()
5198 5199 else:
5199 5200 dest = dbranch = dother = outgoing = None
5200 5201
5201 5202 if opts.get('remote'):
5202 5203 t = []
5203 5204 if incoming:
5204 5205 t.append(_('1 or more incoming'))
5205 5206 o = outgoing.missing
5206 5207 if o:
5207 5208 t.append(_('%d outgoing') % len(o))
5208 5209 other = dother or sother
5209 5210 if 'bookmarks' in other.listkeys('namespaces'):
5210 5211 counts = bookmarks.summary(repo, other)
5211 5212 if counts[0] > 0:
5212 5213 t.append(_('%d incoming bookmarks') % counts[0])
5213 5214 if counts[1] > 0:
5214 5215 t.append(_('%d outgoing bookmarks') % counts[1])
5215 5216
5216 5217 if t:
5217 5218 # i18n: column positioning for "hg summary"
5218 5219 ui.write(_('remote: %s\n') % (', '.join(t)))
5219 5220 else:
5220 5221 # i18n: column positioning for "hg summary"
5221 5222 ui.status(_('remote: (synced)\n'))
5222 5223
5223 5224 cmdutil.summaryremotehooks(ui, repo, opts,
5224 5225 ((source, sbranch, sother, commoninc),
5225 5226 (dest, dbranch, dother, outgoing)))
5226 5227
5227 5228 @command('tag',
5228 5229 [('f', 'force', None, _('force tag')),
5229 5230 ('l', 'local', None, _('make the tag local')),
5230 5231 ('r', 'rev', '', _('revision to tag'), _('REV')),
5231 5232 ('', 'remove', None, _('remove a tag')),
5232 5233 # -l/--local is already there, commitopts cannot be used
5233 5234 ('e', 'edit', None, _('invoke editor on commit messages')),
5234 5235 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5235 5236 ] + commitopts2,
5236 5237 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5237 5238 def tag(ui, repo, name1, *names, **opts):
5238 5239 """add one or more tags for the current or given revision
5239 5240
5240 5241 Name a particular revision using <name>.
5241 5242
5242 5243 Tags are used to name particular revisions of the repository and are
5243 5244 very useful to compare different revisions, to go back to significant
5244 5245 earlier versions or to mark branch points as releases, etc. Changing
5245 5246 an existing tag is normally disallowed; use -f/--force to override.
5246 5247
5247 5248 If no revision is given, the parent of the working directory is
5248 5249 used.
5249 5250
5250 5251 To facilitate version control, distribution, and merging of tags,
5251 5252 they are stored as a file named ".hgtags" which is managed similarly
5252 5253 to other project files and can be hand-edited if necessary. This
5253 5254 also means that tagging creates a new commit. The file
5254 5255 ".hg/localtags" is used for local tags (not shared among
5255 5256 repositories).
5256 5257
5257 5258 Tag commits are usually made at the head of a branch. If the parent
5258 5259 of the working directory is not a branch head, :hg:`tag` aborts; use
5259 5260 -f/--force to force the tag commit to be based on a non-head
5260 5261 changeset.
5261 5262
5262 5263 See :hg:`help dates` for a list of formats valid for -d/--date.
5263 5264
5264 5265 Since tag names have priority over branch names during revision
5265 5266 lookup, using an existing branch name as a tag name is discouraged.
5266 5267
5267 5268 Returns 0 on success.
5268 5269 """
5269 5270 opts = pycompat.byteskwargs(opts)
5270 5271 wlock = lock = None
5271 5272 try:
5272 5273 wlock = repo.wlock()
5273 5274 lock = repo.lock()
5274 5275 rev_ = "."
5275 5276 names = [t.strip() for t in (name1,) + names]
5276 5277 if len(names) != len(set(names)):
5277 5278 raise error.Abort(_('tag names must be unique'))
5278 5279 for n in names:
5279 5280 scmutil.checknewlabel(repo, n, 'tag')
5280 5281 if not n:
5281 5282 raise error.Abort(_('tag names cannot consist entirely of '
5282 5283 'whitespace'))
5283 5284 if opts.get('rev') and opts.get('remove'):
5284 5285 raise error.Abort(_("--rev and --remove are incompatible"))
5285 5286 if opts.get('rev'):
5286 5287 rev_ = opts['rev']
5287 5288 message = opts.get('message')
5288 5289 if opts.get('remove'):
5289 5290 if opts.get('local'):
5290 5291 expectedtype = 'local'
5291 5292 else:
5292 5293 expectedtype = 'global'
5293 5294
5294 5295 for n in names:
5295 5296 if not repo.tagtype(n):
5296 5297 raise error.Abort(_("tag '%s' does not exist") % n)
5297 5298 if repo.tagtype(n) != expectedtype:
5298 5299 if expectedtype == 'global':
5299 5300 raise error.Abort(_("tag '%s' is not a global tag") % n)
5300 5301 else:
5301 5302 raise error.Abort(_("tag '%s' is not a local tag") % n)
5302 5303 rev_ = 'null'
5303 5304 if not message:
5304 5305 # we don't translate commit messages
5305 5306 message = 'Removed tag %s' % ', '.join(names)
5306 5307 elif not opts.get('force'):
5307 5308 for n in names:
5308 5309 if n in repo.tags():
5309 5310 raise error.Abort(_("tag '%s' already exists "
5310 5311 "(use -f to force)") % n)
5311 5312 if not opts.get('local'):
5312 5313 p1, p2 = repo.dirstate.parents()
5313 5314 if p2 != nullid:
5314 5315 raise error.Abort(_('uncommitted merge'))
5315 5316 bheads = repo.branchheads()
5316 5317 if not opts.get('force') and bheads and p1 not in bheads:
5317 5318 raise error.Abort(_('working directory is not at a branch head '
5318 5319 '(use -f to force)'))
5319 5320 node = scmutil.revsingle(repo, rev_).node()
5320 5321
5321 5322 if not message:
5322 5323 # we don't translate commit messages
5323 5324 message = ('Added tag %s for changeset %s' %
5324 5325 (', '.join(names), short(node)))
5325 5326
5326 5327 date = opts.get('date')
5327 5328 if date:
5328 5329 date = dateutil.parsedate(date)
5329 5330
5330 5331 if opts.get('remove'):
5331 5332 editform = 'tag.remove'
5332 5333 else:
5333 5334 editform = 'tag.add'
5334 5335 editor = cmdutil.getcommiteditor(editform=editform,
5335 5336 **pycompat.strkwargs(opts))
5336 5337
5337 5338 # don't allow tagging the null rev
5338 5339 if (not opts.get('remove') and
5339 5340 scmutil.revsingle(repo, rev_).rev() == nullrev):
5340 5341 raise error.Abort(_("cannot tag null revision"))
5341 5342
5342 5343 tagsmod.tag(repo, names, node, message, opts.get('local'),
5343 5344 opts.get('user'), date, editor=editor)
5344 5345 finally:
5345 5346 release(lock, wlock)
5346 5347
5347 5348 @command('tags', formatteropts, '', cmdtype=readonly)
5348 5349 def tags(ui, repo, **opts):
5349 5350 """list repository tags
5350 5351
5351 5352 This lists both regular and local tags. When the -v/--verbose
5352 5353 switch is used, a third column "local" is printed for local tags.
5353 5354 When the -q/--quiet switch is used, only the tag name is printed.
5354 5355
5355 5356 Returns 0 on success.
5356 5357 """
5357 5358
5358 5359 opts = pycompat.byteskwargs(opts)
5359 5360 ui.pager('tags')
5360 5361 fm = ui.formatter('tags', opts)
5361 5362 hexfunc = fm.hexfunc
5362 5363 tagtype = ""
5363 5364
5364 5365 for t, n in reversed(repo.tagslist()):
5365 5366 hn = hexfunc(n)
5366 5367 label = 'tags.normal'
5367 5368 tagtype = ''
5368 5369 if repo.tagtype(t) == 'local':
5369 5370 label = 'tags.local'
5370 5371 tagtype = 'local'
5371 5372
5372 5373 fm.startitem()
5373 5374 fm.write('tag', '%s', t, label=label)
5374 5375 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5375 5376 fm.condwrite(not ui.quiet, 'rev node', fmt,
5376 5377 repo.changelog.rev(n), hn, label=label)
5377 5378 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5378 5379 tagtype, label=label)
5379 5380 fm.plain('\n')
5380 5381 fm.end()
5381 5382
5382 5383 @command('tip',
5383 5384 [('p', 'patch', None, _('show patch')),
5384 5385 ('g', 'git', None, _('use git extended diff format')),
5385 5386 ] + templateopts,
5386 5387 _('[-p] [-g]'))
5387 5388 def tip(ui, repo, **opts):
5388 5389 """show the tip revision (DEPRECATED)
5389 5390
5390 5391 The tip revision (usually just called the tip) is the changeset
5391 5392 most recently added to the repository (and therefore the most
5392 5393 recently changed head).
5393 5394
5394 5395 If you have just made a commit, that commit will be the tip. If
5395 5396 you have just pulled changes from another repository, the tip of
5396 5397 that repository becomes the current tip. The "tip" tag is special
5397 5398 and cannot be renamed or assigned to a different changeset.
5398 5399
5399 5400 This command is deprecated, please use :hg:`heads` instead.
5400 5401
5401 5402 Returns 0 on success.
5402 5403 """
5403 5404 opts = pycompat.byteskwargs(opts)
5404 5405 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
5405 5406 displayer.show(repo['tip'])
5406 5407 displayer.close()
5407 5408
5408 5409 @command('unbundle',
5409 5410 [('u', 'update', None,
5410 5411 _('update to new branch head if changesets were unbundled'))],
5411 5412 _('[-u] FILE...'))
5412 5413 def unbundle(ui, repo, fname1, *fnames, **opts):
5413 5414 """apply one or more bundle files
5414 5415
5415 5416 Apply one or more bundle files generated by :hg:`bundle`.
5416 5417
5417 5418 Returns 0 on success, 1 if an update has unresolved files.
5418 5419 """
5419 5420 fnames = (fname1,) + fnames
5420 5421
5421 5422 with repo.lock():
5422 5423 for fname in fnames:
5423 5424 f = hg.openpath(ui, fname)
5424 5425 gen = exchange.readbundle(ui, f, fname)
5425 5426 if isinstance(gen, streamclone.streamcloneapplier):
5426 5427 raise error.Abort(
5427 5428 _('packed bundles cannot be applied with '
5428 5429 '"hg unbundle"'),
5429 5430 hint=_('use "hg debugapplystreamclonebundle"'))
5430 5431 url = 'bundle:' + fname
5431 5432 try:
5432 5433 txnname = 'unbundle'
5433 5434 if not isinstance(gen, bundle2.unbundle20):
5434 5435 txnname = 'unbundle\n%s' % util.hidepassword(url)
5435 5436 with repo.transaction(txnname) as tr:
5436 5437 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
5437 5438 url=url)
5438 5439 except error.BundleUnknownFeatureError as exc:
5439 5440 raise error.Abort(
5440 5441 _('%s: unknown bundle feature, %s') % (fname, exc),
5441 5442 hint=_("see https://mercurial-scm.org/"
5442 5443 "wiki/BundleFeature for more "
5443 5444 "information"))
5444 5445 modheads = bundle2.combinechangegroupresults(op)
5445 5446
5446 5447 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5447 5448
5448 5449 @command('^update|up|checkout|co',
5449 5450 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5450 5451 ('c', 'check', None, _('require clean working directory')),
5451 5452 ('m', 'merge', None, _('merge uncommitted changes')),
5452 5453 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5453 5454 ('r', 'rev', '', _('revision'), _('REV'))
5454 5455 ] + mergetoolopts,
5455 5456 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5456 5457 def update(ui, repo, node=None, **opts):
5457 5458 """update working directory (or switch revisions)
5458 5459
5459 5460 Update the repository's working directory to the specified
5460 5461 changeset. If no changeset is specified, update to the tip of the
5461 5462 current named branch and move the active bookmark (see :hg:`help
5462 5463 bookmarks`).
5463 5464
5464 5465 Update sets the working directory's parent revision to the specified
5465 5466 changeset (see :hg:`help parents`).
5466 5467
5467 5468 If the changeset is not a descendant or ancestor of the working
5468 5469 directory's parent and there are uncommitted changes, the update is
5469 5470 aborted. With the -c/--check option, the working directory is checked
5470 5471 for uncommitted changes; if none are found, the working directory is
5471 5472 updated to the specified changeset.
5472 5473
5473 5474 .. container:: verbose
5474 5475
5475 5476 The -C/--clean, -c/--check, and -m/--merge options control what
5476 5477 happens if the working directory contains uncommitted changes.
5477 5478 At most of one of them can be specified.
5478 5479
5479 5480 1. If no option is specified, and if
5480 5481 the requested changeset is an ancestor or descendant of
5481 5482 the working directory's parent, the uncommitted changes
5482 5483 are merged into the requested changeset and the merged
5483 5484 result is left uncommitted. If the requested changeset is
5484 5485 not an ancestor or descendant (that is, it is on another
5485 5486 branch), the update is aborted and the uncommitted changes
5486 5487 are preserved.
5487 5488
5488 5489 2. With the -m/--merge option, the update is allowed even if the
5489 5490 requested changeset is not an ancestor or descendant of
5490 5491 the working directory's parent.
5491 5492
5492 5493 3. With the -c/--check option, the update is aborted and the
5493 5494 uncommitted changes are preserved.
5494 5495
5495 5496 4. With the -C/--clean option, uncommitted changes are discarded and
5496 5497 the working directory is updated to the requested changeset.
5497 5498
5498 5499 To cancel an uncommitted merge (and lose your changes), use
5499 5500 :hg:`merge --abort`.
5500 5501
5501 5502 Use null as the changeset to remove the working directory (like
5502 5503 :hg:`clone -U`).
5503 5504
5504 5505 If you want to revert just one file to an older revision, use
5505 5506 :hg:`revert [-r REV] NAME`.
5506 5507
5507 5508 See :hg:`help dates` for a list of formats valid for -d/--date.
5508 5509
5509 5510 Returns 0 on success, 1 if there are unresolved files.
5510 5511 """
5511 5512 rev = opts.get(r'rev')
5512 5513 date = opts.get(r'date')
5513 5514 clean = opts.get(r'clean')
5514 5515 check = opts.get(r'check')
5515 5516 merge = opts.get(r'merge')
5516 5517 if rev and node:
5517 5518 raise error.Abort(_("please specify just one revision"))
5518 5519
5519 5520 if ui.configbool('commands', 'update.requiredest'):
5520 5521 if not node and not rev and not date:
5521 5522 raise error.Abort(_('you must specify a destination'),
5522 5523 hint=_('for example: hg update ".::"'))
5523 5524
5524 5525 if rev is None or rev == '':
5525 5526 rev = node
5526 5527
5527 5528 if date and rev is not None:
5528 5529 raise error.Abort(_("you can't specify a revision and a date"))
5529 5530
5530 5531 if len([x for x in (clean, check, merge) if x]) > 1:
5531 5532 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5532 5533 "or -m/--merge"))
5533 5534
5534 5535 updatecheck = None
5535 5536 if check:
5536 5537 updatecheck = 'abort'
5537 5538 elif merge:
5538 5539 updatecheck = 'none'
5539 5540
5540 5541 with repo.wlock():
5541 5542 cmdutil.clearunfinished(repo)
5542 5543
5543 5544 if date:
5544 5545 rev = cmdutil.finddate(ui, repo, date)
5545 5546
5546 5547 # if we defined a bookmark, we have to remember the original name
5547 5548 brev = rev
5548 5549 if rev:
5549 5550 repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
5550 5551 ctx = scmutil.revsingle(repo, rev, rev)
5551 5552 rev = ctx.rev()
5552 5553 if ctx.hidden():
5553 5554 ctxstr = ctx.hex()[:12]
5554 5555 ui.warn(_("updating to a hidden changeset %s\n") % ctxstr)
5555 5556
5556 5557 if ctx.obsolete():
5557 5558 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
5558 5559 ui.warn("(%s)\n" % obsfatemsg)
5559 5560
5560 5561 repo.ui.setconfig('ui', 'forcemerge', opts.get(r'tool'), 'update')
5561 5562
5562 5563 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5563 5564 updatecheck=updatecheck)
5564 5565
5565 5566 @command('verify', [])
5566 5567 def verify(ui, repo):
5567 5568 """verify the integrity of the repository
5568 5569
5569 5570 Verify the integrity of the current repository.
5570 5571
5571 5572 This will perform an extensive check of the repository's
5572 5573 integrity, validating the hashes and checksums of each entry in
5573 5574 the changelog, manifest, and tracked files, as well as the
5574 5575 integrity of their crosslinks and indices.
5575 5576
5576 5577 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5577 5578 for more information about recovery from corruption of the
5578 5579 repository.
5579 5580
5580 5581 Returns 0 on success, 1 if errors are encountered.
5581 5582 """
5582 5583 return hg.verify(repo)
5583 5584
5584 5585 @command('version', [] + formatteropts, norepo=True, cmdtype=readonly)
5585 5586 def version_(ui, **opts):
5586 5587 """output version and copyright information"""
5587 5588 opts = pycompat.byteskwargs(opts)
5588 5589 if ui.verbose:
5589 5590 ui.pager('version')
5590 5591 fm = ui.formatter("version", opts)
5591 5592 fm.startitem()
5592 5593 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5593 5594 util.version())
5594 5595 license = _(
5595 5596 "(see https://mercurial-scm.org for more information)\n"
5596 5597 "\nCopyright (C) 2005-2018 Matt Mackall and others\n"
5597 5598 "This is free software; see the source for copying conditions. "
5598 5599 "There is NO\nwarranty; "
5599 5600 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5600 5601 )
5601 5602 if not ui.quiet:
5602 5603 fm.plain(license)
5603 5604
5604 5605 if ui.verbose:
5605 5606 fm.plain(_("\nEnabled extensions:\n\n"))
5606 5607 # format names and versions into columns
5607 5608 names = []
5608 5609 vers = []
5609 5610 isinternals = []
5610 5611 for name, module in extensions.extensions():
5611 5612 names.append(name)
5612 5613 vers.append(extensions.moduleversion(module) or None)
5613 5614 isinternals.append(extensions.ismoduleinternal(module))
5614 5615 fn = fm.nested("extensions")
5615 5616 if names:
5616 5617 namefmt = " %%-%ds " % max(len(n) for n in names)
5617 5618 places = [_("external"), _("internal")]
5618 5619 for n, v, p in zip(names, vers, isinternals):
5619 5620 fn.startitem()
5620 5621 fn.condwrite(ui.verbose, "name", namefmt, n)
5621 5622 if ui.verbose:
5622 5623 fn.plain("%s " % places[p])
5623 5624 fn.data(bundled=p)
5624 5625 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5625 5626 if ui.verbose:
5626 5627 fn.plain("\n")
5627 5628 fn.end()
5628 5629 fm.end()
5629 5630
5630 5631 def loadcmdtable(ui, name, cmdtable):
5631 5632 """Load command functions from specified cmdtable
5632 5633 """
5633 5634 overrides = [cmd for cmd in cmdtable if cmd in table]
5634 5635 if overrides:
5635 5636 ui.warn(_("extension '%s' overrides commands: %s\n")
5636 5637 % (name, " ".join(overrides)))
5637 5638 table.update(cmdtable)
@@ -1,2297 +1,2310
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import collections
11 11 import errno
12 12 import hashlib
13 13
14 14 from .i18n import _
15 15 from .node import (
16 16 bin,
17 17 hex,
18 18 nullid,
19 19 )
20 from .thirdparty import (
21 attr,
22 )
20 23 from . import (
21 24 bookmarks as bookmod,
22 25 bundle2,
23 26 changegroup,
24 27 discovery,
25 28 error,
26 29 lock as lockmod,
27 30 logexchange,
28 31 obsolete,
29 32 phases,
30 33 pushkey,
31 34 pycompat,
32 35 scmutil,
33 36 sslutil,
34 37 streamclone,
35 38 url as urlmod,
36 39 util,
37 40 )
38 41 from .utils import (
39 42 stringutil,
40 43 )
41 44
42 45 urlerr = util.urlerr
43 46 urlreq = util.urlreq
44 47
45 48 # Maps bundle version human names to changegroup versions.
46 49 _bundlespeccgversions = {'v1': '01',
47 50 'v2': '02',
48 51 'packed1': 's1',
49 52 'bundle2': '02', #legacy
50 53 }
51 54
52 55 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
53 56 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
54 57
58 @attr.s
59 class bundlespec(object):
60 compression = attr.ib()
61 version = attr.ib()
62 params = attr.ib()
63
55 64 def parsebundlespec(repo, spec, strict=True, externalnames=False):
56 65 """Parse a bundle string specification into parts.
57 66
58 67 Bundle specifications denote a well-defined bundle/exchange format.
59 68 The content of a given specification should not change over time in
60 69 order to ensure that bundles produced by a newer version of Mercurial are
61 70 readable from an older version.
62 71
63 72 The string currently has the form:
64 73
65 74 <compression>-<type>[;<parameter0>[;<parameter1>]]
66 75
67 76 Where <compression> is one of the supported compression formats
68 77 and <type> is (currently) a version string. A ";" can follow the type and
69 78 all text afterwards is interpreted as URI encoded, ";" delimited key=value
70 79 pairs.
71 80
72 81 If ``strict`` is True (the default) <compression> is required. Otherwise,
73 82 it is optional.
74 83
75 84 If ``externalnames`` is False (the default), the human-centric names will
76 85 be converted to their internal representation.
77 86
78 Returns a 3-tuple of (compression, version, parameters). Compression will
79 be ``None`` if not in strict mode and a compression isn't defined.
87 Returns a bundlespec object of (compression, version, parameters).
88 Compression will be ``None`` if not in strict mode and a compression isn't
89 defined.
80 90
81 91 An ``InvalidBundleSpecification`` is raised when the specification is
82 92 not syntactically well formed.
83 93
84 94 An ``UnsupportedBundleSpecification`` is raised when the compression or
85 95 bundle type/version is not recognized.
86 96
87 97 Note: this function will likely eventually return a more complex data
88 98 structure, including bundle2 part information.
89 99 """
90 100 def parseparams(s):
91 101 if ';' not in s:
92 102 return s, {}
93 103
94 104 params = {}
95 105 version, paramstr = s.split(';', 1)
96 106
97 107 for p in paramstr.split(';'):
98 108 if '=' not in p:
99 109 raise error.InvalidBundleSpecification(
100 110 _('invalid bundle specification: '
101 111 'missing "=" in parameter: %s') % p)
102 112
103 113 key, value = p.split('=', 1)
104 114 key = urlreq.unquote(key)
105 115 value = urlreq.unquote(value)
106 116 params[key] = value
107 117
108 118 return version, params
109 119
110 120
111 121 if strict and '-' not in spec:
112 122 raise error.InvalidBundleSpecification(
113 123 _('invalid bundle specification; '
114 124 'must be prefixed with compression: %s') % spec)
115 125
116 126 if '-' in spec:
117 127 compression, version = spec.split('-', 1)
118 128
119 129 if compression not in util.compengines.supportedbundlenames:
120 130 raise error.UnsupportedBundleSpecification(
121 131 _('%s compression is not supported') % compression)
122 132
123 133 version, params = parseparams(version)
124 134
125 135 if version not in _bundlespeccgversions:
126 136 raise error.UnsupportedBundleSpecification(
127 137 _('%s is not a recognized bundle version') % version)
128 138 else:
129 139 # Value could be just the compression or just the version, in which
130 140 # case some defaults are assumed (but only when not in strict mode).
131 141 assert not strict
132 142
133 143 spec, params = parseparams(spec)
134 144
135 145 if spec in util.compengines.supportedbundlenames:
136 146 compression = spec
137 147 version = 'v1'
138 148 # Generaldelta repos require v2.
139 149 if 'generaldelta' in repo.requirements:
140 150 version = 'v2'
141 151 # Modern compression engines require v2.
142 152 if compression not in _bundlespecv1compengines:
143 153 version = 'v2'
144 154 elif spec in _bundlespeccgversions:
145 155 if spec == 'packed1':
146 156 compression = 'none'
147 157 else:
148 158 compression = 'bzip2'
149 159 version = spec
150 160 else:
151 161 raise error.UnsupportedBundleSpecification(
152 162 _('%s is not a recognized bundle specification') % spec)
153 163
154 164 # Bundle version 1 only supports a known set of compression engines.
155 165 if version == 'v1' and compression not in _bundlespecv1compengines:
156 166 raise error.UnsupportedBundleSpecification(
157 167 _('compression engine %s is not supported on v1 bundles') %
158 168 compression)
159 169
160 170 # The specification for packed1 can optionally declare the data formats
161 171 # required to apply it. If we see this metadata, compare against what the
162 172 # repo supports and error if the bundle isn't compatible.
163 173 if version == 'packed1' and 'requirements' in params:
164 174 requirements = set(params['requirements'].split(','))
165 175 missingreqs = requirements - repo.supportedformats
166 176 if missingreqs:
167 177 raise error.UnsupportedBundleSpecification(
168 178 _('missing support for repository features: %s') %
169 179 ', '.join(sorted(missingreqs)))
170 180
171 181 if not externalnames:
172 182 engine = util.compengines.forbundlename(compression)
173 183 compression = engine.bundletype()[1]
174 184 version = _bundlespeccgversions[version]
175 return compression, version, params
185
186 return bundlespec(compression, version, params)
176 187
177 188 def readbundle(ui, fh, fname, vfs=None):
178 189 header = changegroup.readexactly(fh, 4)
179 190
180 191 alg = None
181 192 if not fname:
182 193 fname = "stream"
183 194 if not header.startswith('HG') and header.startswith('\0'):
184 195 fh = changegroup.headerlessfixup(fh, header)
185 196 header = "HG10"
186 197 alg = 'UN'
187 198 elif vfs:
188 199 fname = vfs.join(fname)
189 200
190 201 magic, version = header[0:2], header[2:4]
191 202
192 203 if magic != 'HG':
193 204 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
194 205 if version == '10':
195 206 if alg is None:
196 207 alg = changegroup.readexactly(fh, 2)
197 208 return changegroup.cg1unpacker(fh, alg)
198 209 elif version.startswith('2'):
199 210 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
200 211 elif version == 'S1':
201 212 return streamclone.streamcloneapplier(fh)
202 213 else:
203 214 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
204 215
205 216 def _formatrequirementsspec(requirements):
206 217 return urlreq.quote(','.join(sorted(requirements)))
207 218
208 219 def _formatrequirementsparams(requirements):
209 220 requirements = _formatrequirementsspec(requirements)
210 221 params = "%s%s" % (urlreq.quote("requirements="), requirements)
211 222 return params
212 223
213 224 def getbundlespec(ui, fh):
214 225 """Infer the bundlespec from a bundle file handle.
215 226
216 227 The input file handle is seeked and the original seek position is not
217 228 restored.
218 229 """
219 230 def speccompression(alg):
220 231 try:
221 232 return util.compengines.forbundletype(alg).bundletype()[0]
222 233 except KeyError:
223 234 return None
224 235
225 236 b = readbundle(ui, fh, None)
226 237 if isinstance(b, changegroup.cg1unpacker):
227 238 alg = b._type
228 239 if alg == '_truncatedBZ':
229 240 alg = 'BZ'
230 241 comp = speccompression(alg)
231 242 if not comp:
232 243 raise error.Abort(_('unknown compression algorithm: %s') % alg)
233 244 return '%s-v1' % comp
234 245 elif isinstance(b, bundle2.unbundle20):
235 246 if 'Compression' in b.params:
236 247 comp = speccompression(b.params['Compression'])
237 248 if not comp:
238 249 raise error.Abort(_('unknown compression algorithm: %s') % comp)
239 250 else:
240 251 comp = 'none'
241 252
242 253 version = None
243 254 for part in b.iterparts():
244 255 if part.type == 'changegroup':
245 256 version = part.params['version']
246 257 if version in ('01', '02'):
247 258 version = 'v2'
248 259 else:
249 260 raise error.Abort(_('changegroup version %s does not have '
250 261 'a known bundlespec') % version,
251 262 hint=_('try upgrading your Mercurial '
252 263 'client'))
253 264
254 265 if not version:
255 266 raise error.Abort(_('could not identify changegroup version in '
256 267 'bundle'))
257 268
258 269 return '%s-%s' % (comp, version)
259 270 elif isinstance(b, streamclone.streamcloneapplier):
260 271 requirements = streamclone.readbundle1header(fh)[2]
261 272 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
262 273 else:
263 274 raise error.Abort(_('unknown bundle type: %s') % b)
264 275
265 276 def _computeoutgoing(repo, heads, common):
266 277 """Computes which revs are outgoing given a set of common
267 278 and a set of heads.
268 279
269 280 This is a separate function so extensions can have access to
270 281 the logic.
271 282
272 283 Returns a discovery.outgoing object.
273 284 """
274 285 cl = repo.changelog
275 286 if common:
276 287 hasnode = cl.hasnode
277 288 common = [n for n in common if hasnode(n)]
278 289 else:
279 290 common = [nullid]
280 291 if not heads:
281 292 heads = cl.heads()
282 293 return discovery.outgoing(repo, common, heads)
283 294
284 295 def _forcebundle1(op):
285 296 """return true if a pull/push must use bundle1
286 297
287 298 This function is used to allow testing of the older bundle version"""
288 299 ui = op.repo.ui
289 300 # The goal is this config is to allow developer to choose the bundle
290 301 # version used during exchanged. This is especially handy during test.
291 302 # Value is a list of bundle version to be picked from, highest version
292 303 # should be used.
293 304 #
294 305 # developer config: devel.legacy.exchange
295 306 exchange = ui.configlist('devel', 'legacy.exchange')
296 307 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
297 308 return forcebundle1 or not op.remote.capable('bundle2')
298 309
299 310 class pushoperation(object):
300 311 """A object that represent a single push operation
301 312
302 313 Its purpose is to carry push related state and very common operations.
303 314
304 315 A new pushoperation should be created at the beginning of each push and
305 316 discarded afterward.
306 317 """
307 318
308 319 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
309 320 bookmarks=(), pushvars=None):
310 321 # repo we push from
311 322 self.repo = repo
312 323 self.ui = repo.ui
313 324 # repo we push to
314 325 self.remote = remote
315 326 # force option provided
316 327 self.force = force
317 328 # revs to be pushed (None is "all")
318 329 self.revs = revs
319 330 # bookmark explicitly pushed
320 331 self.bookmarks = bookmarks
321 332 # allow push of new branch
322 333 self.newbranch = newbranch
323 334 # step already performed
324 335 # (used to check what steps have been already performed through bundle2)
325 336 self.stepsdone = set()
326 337 # Integer version of the changegroup push result
327 338 # - None means nothing to push
328 339 # - 0 means HTTP error
329 340 # - 1 means we pushed and remote head count is unchanged *or*
330 341 # we have outgoing changesets but refused to push
331 342 # - other values as described by addchangegroup()
332 343 self.cgresult = None
333 344 # Boolean value for the bookmark push
334 345 self.bkresult = None
335 346 # discover.outgoing object (contains common and outgoing data)
336 347 self.outgoing = None
337 348 # all remote topological heads before the push
338 349 self.remoteheads = None
339 350 # Details of the remote branch pre and post push
340 351 #
341 352 # mapping: {'branch': ([remoteheads],
342 353 # [newheads],
343 354 # [unsyncedheads],
344 355 # [discardedheads])}
345 356 # - branch: the branch name
346 357 # - remoteheads: the list of remote heads known locally
347 358 # None if the branch is new
348 359 # - newheads: the new remote heads (known locally) with outgoing pushed
349 360 # - unsyncedheads: the list of remote heads unknown locally.
350 361 # - discardedheads: the list of remote heads made obsolete by the push
351 362 self.pushbranchmap = None
352 363 # testable as a boolean indicating if any nodes are missing locally.
353 364 self.incoming = None
354 365 # summary of the remote phase situation
355 366 self.remotephases = None
356 367 # phases changes that must be pushed along side the changesets
357 368 self.outdatedphases = None
358 369 # phases changes that must be pushed if changeset push fails
359 370 self.fallbackoutdatedphases = None
360 371 # outgoing obsmarkers
361 372 self.outobsmarkers = set()
362 373 # outgoing bookmarks
363 374 self.outbookmarks = []
364 375 # transaction manager
365 376 self.trmanager = None
366 377 # map { pushkey partid -> callback handling failure}
367 378 # used to handle exception from mandatory pushkey part failure
368 379 self.pkfailcb = {}
369 380 # an iterable of pushvars or None
370 381 self.pushvars = pushvars
371 382
372 383 @util.propertycache
373 384 def futureheads(self):
374 385 """future remote heads if the changeset push succeeds"""
375 386 return self.outgoing.missingheads
376 387
377 388 @util.propertycache
378 389 def fallbackheads(self):
379 390 """future remote heads if the changeset push fails"""
380 391 if self.revs is None:
381 392 # not target to push, all common are relevant
382 393 return self.outgoing.commonheads
383 394 unfi = self.repo.unfiltered()
384 395 # I want cheads = heads(::missingheads and ::commonheads)
385 396 # (missingheads is revs with secret changeset filtered out)
386 397 #
387 398 # This can be expressed as:
388 399 # cheads = ( (missingheads and ::commonheads)
389 400 # + (commonheads and ::missingheads))"
390 401 # )
391 402 #
392 403 # while trying to push we already computed the following:
393 404 # common = (::commonheads)
394 405 # missing = ((commonheads::missingheads) - commonheads)
395 406 #
396 407 # We can pick:
397 408 # * missingheads part of common (::commonheads)
398 409 common = self.outgoing.common
399 410 nm = self.repo.changelog.nodemap
400 411 cheads = [node for node in self.revs if nm[node] in common]
401 412 # and
402 413 # * commonheads parents on missing
403 414 revset = unfi.set('%ln and parents(roots(%ln))',
404 415 self.outgoing.commonheads,
405 416 self.outgoing.missing)
406 417 cheads.extend(c.node() for c in revset)
407 418 return cheads
408 419
409 420 @property
410 421 def commonheads(self):
411 422 """set of all common heads after changeset bundle push"""
412 423 if self.cgresult:
413 424 return self.futureheads
414 425 else:
415 426 return self.fallbackheads
416 427
417 428 # mapping of message used when pushing bookmark
418 429 bookmsgmap = {'update': (_("updating bookmark %s\n"),
419 430 _('updating bookmark %s failed!\n')),
420 431 'export': (_("exporting bookmark %s\n"),
421 432 _('exporting bookmark %s failed!\n')),
422 433 'delete': (_("deleting remote bookmark %s\n"),
423 434 _('deleting remote bookmark %s failed!\n')),
424 435 }
425 436
426 437
427 438 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
428 439 opargs=None):
429 440 '''Push outgoing changesets (limited by revs) from a local
430 441 repository to remote. Return an integer:
431 442 - None means nothing to push
432 443 - 0 means HTTP error
433 444 - 1 means we pushed and remote head count is unchanged *or*
434 445 we have outgoing changesets but refused to push
435 446 - other values as described by addchangegroup()
436 447 '''
437 448 if opargs is None:
438 449 opargs = {}
439 450 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
440 451 **pycompat.strkwargs(opargs))
441 452 if pushop.remote.local():
442 453 missing = (set(pushop.repo.requirements)
443 454 - pushop.remote.local().supported)
444 455 if missing:
445 456 msg = _("required features are not"
446 457 " supported in the destination:"
447 458 " %s") % (', '.join(sorted(missing)))
448 459 raise error.Abort(msg)
449 460
450 461 if not pushop.remote.canpush():
451 462 raise error.Abort(_("destination does not support push"))
452 463
453 464 if not pushop.remote.capable('unbundle'):
454 465 raise error.Abort(_('cannot push: destination does not support the '
455 466 'unbundle wire protocol command'))
456 467
457 468 # get lock as we might write phase data
458 469 wlock = lock = None
459 470 try:
460 471 # bundle2 push may receive a reply bundle touching bookmarks or other
461 472 # things requiring the wlock. Take it now to ensure proper ordering.
462 473 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
463 474 if (not _forcebundle1(pushop)) and maypushback:
464 475 wlock = pushop.repo.wlock()
465 476 lock = pushop.repo.lock()
466 477 pushop.trmanager = transactionmanager(pushop.repo,
467 478 'push-response',
468 479 pushop.remote.url())
469 480 except IOError as err:
470 481 if err.errno != errno.EACCES:
471 482 raise
472 483 # source repo cannot be locked.
473 484 # We do not abort the push, but just disable the local phase
474 485 # synchronisation.
475 486 msg = 'cannot lock source repository: %s\n' % err
476 487 pushop.ui.debug(msg)
477 488
478 489 with wlock or util.nullcontextmanager(), \
479 490 lock or util.nullcontextmanager(), \
480 491 pushop.trmanager or util.nullcontextmanager():
481 492 pushop.repo.checkpush(pushop)
482 493 _pushdiscovery(pushop)
483 494 if not _forcebundle1(pushop):
484 495 _pushbundle2(pushop)
485 496 _pushchangeset(pushop)
486 497 _pushsyncphase(pushop)
487 498 _pushobsolete(pushop)
488 499 _pushbookmark(pushop)
489 500
490 501 return pushop
491 502
492 503 # list of steps to perform discovery before push
493 504 pushdiscoveryorder = []
494 505
495 506 # Mapping between step name and function
496 507 #
497 508 # This exists to help extensions wrap steps if necessary
498 509 pushdiscoverymapping = {}
499 510
500 511 def pushdiscovery(stepname):
501 512 """decorator for function performing discovery before push
502 513
503 514 The function is added to the step -> function mapping and appended to the
504 515 list of steps. Beware that decorated function will be added in order (this
505 516 may matter).
506 517
507 518 You can only use this decorator for a new step, if you want to wrap a step
508 519 from an extension, change the pushdiscovery dictionary directly."""
509 520 def dec(func):
510 521 assert stepname not in pushdiscoverymapping
511 522 pushdiscoverymapping[stepname] = func
512 523 pushdiscoveryorder.append(stepname)
513 524 return func
514 525 return dec
515 526
516 527 def _pushdiscovery(pushop):
517 528 """Run all discovery steps"""
518 529 for stepname in pushdiscoveryorder:
519 530 step = pushdiscoverymapping[stepname]
520 531 step(pushop)
521 532
522 533 @pushdiscovery('changeset')
523 534 def _pushdiscoverychangeset(pushop):
524 535 """discover the changeset that need to be pushed"""
525 536 fci = discovery.findcommonincoming
526 537 if pushop.revs:
527 538 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
528 539 ancestorsof=pushop.revs)
529 540 else:
530 541 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
531 542 common, inc, remoteheads = commoninc
532 543 fco = discovery.findcommonoutgoing
533 544 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
534 545 commoninc=commoninc, force=pushop.force)
535 546 pushop.outgoing = outgoing
536 547 pushop.remoteheads = remoteheads
537 548 pushop.incoming = inc
538 549
539 550 @pushdiscovery('phase')
540 551 def _pushdiscoveryphase(pushop):
541 552 """discover the phase that needs to be pushed
542 553
543 554 (computed for both success and failure case for changesets push)"""
544 555 outgoing = pushop.outgoing
545 556 unfi = pushop.repo.unfiltered()
546 557 remotephases = pushop.remote.listkeys('phases')
547 558 if (pushop.ui.configbool('ui', '_usedassubrepo')
548 559 and remotephases # server supports phases
549 560 and not pushop.outgoing.missing # no changesets to be pushed
550 561 and remotephases.get('publishing', False)):
551 562 # When:
552 563 # - this is a subrepo push
553 564 # - and remote support phase
554 565 # - and no changeset are to be pushed
555 566 # - and remote is publishing
556 567 # We may be in issue 3781 case!
557 568 # We drop the possible phase synchronisation done by
558 569 # courtesy to publish changesets possibly locally draft
559 570 # on the remote.
560 571 pushop.outdatedphases = []
561 572 pushop.fallbackoutdatedphases = []
562 573 return
563 574
564 575 pushop.remotephases = phases.remotephasessummary(pushop.repo,
565 576 pushop.fallbackheads,
566 577 remotephases)
567 578 droots = pushop.remotephases.draftroots
568 579
569 580 extracond = ''
570 581 if not pushop.remotephases.publishing:
571 582 extracond = ' and public()'
572 583 revset = 'heads((%%ln::%%ln) %s)' % extracond
573 584 # Get the list of all revs draft on remote by public here.
574 585 # XXX Beware that revset break if droots is not strictly
575 586 # XXX root we may want to ensure it is but it is costly
576 587 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
577 588 if not outgoing.missing:
578 589 future = fallback
579 590 else:
580 591 # adds changeset we are going to push as draft
581 592 #
582 593 # should not be necessary for publishing server, but because of an
583 594 # issue fixed in xxxxx we have to do it anyway.
584 595 fdroots = list(unfi.set('roots(%ln + %ln::)',
585 596 outgoing.missing, droots))
586 597 fdroots = [f.node() for f in fdroots]
587 598 future = list(unfi.set(revset, fdroots, pushop.futureheads))
588 599 pushop.outdatedphases = future
589 600 pushop.fallbackoutdatedphases = fallback
590 601
591 602 @pushdiscovery('obsmarker')
592 603 def _pushdiscoveryobsmarkers(pushop):
593 604 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
594 605 and pushop.repo.obsstore
595 606 and 'obsolete' in pushop.remote.listkeys('namespaces')):
596 607 repo = pushop.repo
597 608 # very naive computation, that can be quite expensive on big repo.
598 609 # However: evolution is currently slow on them anyway.
599 610 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
600 611 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
601 612
602 613 @pushdiscovery('bookmarks')
603 614 def _pushdiscoverybookmarks(pushop):
604 615 ui = pushop.ui
605 616 repo = pushop.repo.unfiltered()
606 617 remote = pushop.remote
607 618 ui.debug("checking for updated bookmarks\n")
608 619 ancestors = ()
609 620 if pushop.revs:
610 621 revnums = map(repo.changelog.rev, pushop.revs)
611 622 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
612 623 remotebookmark = remote.listkeys('bookmarks')
613 624
614 625 explicit = set([repo._bookmarks.expandname(bookmark)
615 626 for bookmark in pushop.bookmarks])
616 627
617 628 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
618 629 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
619 630
620 631 def safehex(x):
621 632 if x is None:
622 633 return x
623 634 return hex(x)
624 635
625 636 def hexifycompbookmarks(bookmarks):
626 637 return [(b, safehex(scid), safehex(dcid))
627 638 for (b, scid, dcid) in bookmarks]
628 639
629 640 comp = [hexifycompbookmarks(marks) for marks in comp]
630 641 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
631 642
632 643 def _processcompared(pushop, pushed, explicit, remotebms, comp):
633 644 """take decision on bookmark to pull from the remote bookmark
634 645
635 646 Exist to help extensions who want to alter this behavior.
636 647 """
637 648 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
638 649
639 650 repo = pushop.repo
640 651
641 652 for b, scid, dcid in advsrc:
642 653 if b in explicit:
643 654 explicit.remove(b)
644 655 if not pushed or repo[scid].rev() in pushed:
645 656 pushop.outbookmarks.append((b, dcid, scid))
646 657 # search added bookmark
647 658 for b, scid, dcid in addsrc:
648 659 if b in explicit:
649 660 explicit.remove(b)
650 661 pushop.outbookmarks.append((b, '', scid))
651 662 # search for overwritten bookmark
652 663 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
653 664 if b in explicit:
654 665 explicit.remove(b)
655 666 pushop.outbookmarks.append((b, dcid, scid))
656 667 # search for bookmark to delete
657 668 for b, scid, dcid in adddst:
658 669 if b in explicit:
659 670 explicit.remove(b)
660 671 # treat as "deleted locally"
661 672 pushop.outbookmarks.append((b, dcid, ''))
662 673 # identical bookmarks shouldn't get reported
663 674 for b, scid, dcid in same:
664 675 if b in explicit:
665 676 explicit.remove(b)
666 677
667 678 if explicit:
668 679 explicit = sorted(explicit)
669 680 # we should probably list all of them
670 681 pushop.ui.warn(_('bookmark %s does not exist on the local '
671 682 'or remote repository!\n') % explicit[0])
672 683 pushop.bkresult = 2
673 684
674 685 pushop.outbookmarks.sort()
675 686
676 687 def _pushcheckoutgoing(pushop):
677 688 outgoing = pushop.outgoing
678 689 unfi = pushop.repo.unfiltered()
679 690 if not outgoing.missing:
680 691 # nothing to push
681 692 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
682 693 return False
683 694 # something to push
684 695 if not pushop.force:
685 696 # if repo.obsstore == False --> no obsolete
686 697 # then, save the iteration
687 698 if unfi.obsstore:
688 699 # this message are here for 80 char limit reason
689 700 mso = _("push includes obsolete changeset: %s!")
690 701 mspd = _("push includes phase-divergent changeset: %s!")
691 702 mscd = _("push includes content-divergent changeset: %s!")
692 703 mst = {"orphan": _("push includes orphan changeset: %s!"),
693 704 "phase-divergent": mspd,
694 705 "content-divergent": mscd}
695 706 # If we are to push if there is at least one
696 707 # obsolete or unstable changeset in missing, at
697 708 # least one of the missinghead will be obsolete or
698 709 # unstable. So checking heads only is ok
699 710 for node in outgoing.missingheads:
700 711 ctx = unfi[node]
701 712 if ctx.obsolete():
702 713 raise error.Abort(mso % ctx)
703 714 elif ctx.isunstable():
704 715 # TODO print more than one instability in the abort
705 716 # message
706 717 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
707 718
708 719 discovery.checkheads(pushop)
709 720 return True
710 721
711 722 # List of names of steps to perform for an outgoing bundle2, order matters.
712 723 b2partsgenorder = []
713 724
714 725 # Mapping between step name and function
715 726 #
716 727 # This exists to help extensions wrap steps if necessary
717 728 b2partsgenmapping = {}
718 729
719 730 def b2partsgenerator(stepname, idx=None):
720 731 """decorator for function generating bundle2 part
721 732
722 733 The function is added to the step -> function mapping and appended to the
723 734 list of steps. Beware that decorated functions will be added in order
724 735 (this may matter).
725 736
726 737 You can only use this decorator for new steps, if you want to wrap a step
727 738 from an extension, attack the b2partsgenmapping dictionary directly."""
728 739 def dec(func):
729 740 assert stepname not in b2partsgenmapping
730 741 b2partsgenmapping[stepname] = func
731 742 if idx is None:
732 743 b2partsgenorder.append(stepname)
733 744 else:
734 745 b2partsgenorder.insert(idx, stepname)
735 746 return func
736 747 return dec
737 748
738 749 def _pushb2ctxcheckheads(pushop, bundler):
739 750 """Generate race condition checking parts
740 751
741 752 Exists as an independent function to aid extensions
742 753 """
743 754 # * 'force' do not check for push race,
744 755 # * if we don't push anything, there are nothing to check.
745 756 if not pushop.force and pushop.outgoing.missingheads:
746 757 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
747 758 emptyremote = pushop.pushbranchmap is None
748 759 if not allowunrelated or emptyremote:
749 760 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
750 761 else:
751 762 affected = set()
752 763 for branch, heads in pushop.pushbranchmap.iteritems():
753 764 remoteheads, newheads, unsyncedheads, discardedheads = heads
754 765 if remoteheads is not None:
755 766 remote = set(remoteheads)
756 767 affected |= set(discardedheads) & remote
757 768 affected |= remote - set(newheads)
758 769 if affected:
759 770 data = iter(sorted(affected))
760 771 bundler.newpart('check:updated-heads', data=data)
761 772
762 773 def _pushing(pushop):
763 774 """return True if we are pushing anything"""
764 775 return bool(pushop.outgoing.missing
765 776 or pushop.outdatedphases
766 777 or pushop.outobsmarkers
767 778 or pushop.outbookmarks)
768 779
769 780 @b2partsgenerator('check-bookmarks')
770 781 def _pushb2checkbookmarks(pushop, bundler):
771 782 """insert bookmark move checking"""
772 783 if not _pushing(pushop) or pushop.force:
773 784 return
774 785 b2caps = bundle2.bundle2caps(pushop.remote)
775 786 hasbookmarkcheck = 'bookmarks' in b2caps
776 787 if not (pushop.outbookmarks and hasbookmarkcheck):
777 788 return
778 789 data = []
779 790 for book, old, new in pushop.outbookmarks:
780 791 old = bin(old)
781 792 data.append((book, old))
782 793 checkdata = bookmod.binaryencode(data)
783 794 bundler.newpart('check:bookmarks', data=checkdata)
784 795
785 796 @b2partsgenerator('check-phases')
786 797 def _pushb2checkphases(pushop, bundler):
787 798 """insert phase move checking"""
788 799 if not _pushing(pushop) or pushop.force:
789 800 return
790 801 b2caps = bundle2.bundle2caps(pushop.remote)
791 802 hasphaseheads = 'heads' in b2caps.get('phases', ())
792 803 if pushop.remotephases is not None and hasphaseheads:
793 804 # check that the remote phase has not changed
794 805 checks = [[] for p in phases.allphases]
795 806 checks[phases.public].extend(pushop.remotephases.publicheads)
796 807 checks[phases.draft].extend(pushop.remotephases.draftroots)
797 808 if any(checks):
798 809 for nodes in checks:
799 810 nodes.sort()
800 811 checkdata = phases.binaryencode(checks)
801 812 bundler.newpart('check:phases', data=checkdata)
802 813
803 814 @b2partsgenerator('changeset')
804 815 def _pushb2ctx(pushop, bundler):
805 816 """handle changegroup push through bundle2
806 817
807 818 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
808 819 """
809 820 if 'changesets' in pushop.stepsdone:
810 821 return
811 822 pushop.stepsdone.add('changesets')
812 823 # Send known heads to the server for race detection.
813 824 if not _pushcheckoutgoing(pushop):
814 825 return
815 826 pushop.repo.prepushoutgoinghooks(pushop)
816 827
817 828 _pushb2ctxcheckheads(pushop, bundler)
818 829
819 830 b2caps = bundle2.bundle2caps(pushop.remote)
820 831 version = '01'
821 832 cgversions = b2caps.get('changegroup')
822 833 if cgversions: # 3.1 and 3.2 ship with an empty value
823 834 cgversions = [v for v in cgversions
824 835 if v in changegroup.supportedoutgoingversions(
825 836 pushop.repo)]
826 837 if not cgversions:
827 838 raise ValueError(_('no common changegroup version'))
828 839 version = max(cgversions)
829 840 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
830 841 'push')
831 842 cgpart = bundler.newpart('changegroup', data=cgstream)
832 843 if cgversions:
833 844 cgpart.addparam('version', version)
834 845 if 'treemanifest' in pushop.repo.requirements:
835 846 cgpart.addparam('treemanifest', '1')
836 847 def handlereply(op):
837 848 """extract addchangegroup returns from server reply"""
838 849 cgreplies = op.records.getreplies(cgpart.id)
839 850 assert len(cgreplies['changegroup']) == 1
840 851 pushop.cgresult = cgreplies['changegroup'][0]['return']
841 852 return handlereply
842 853
843 854 @b2partsgenerator('phase')
844 855 def _pushb2phases(pushop, bundler):
845 856 """handle phase push through bundle2"""
846 857 if 'phases' in pushop.stepsdone:
847 858 return
848 859 b2caps = bundle2.bundle2caps(pushop.remote)
849 860 ui = pushop.repo.ui
850 861
851 862 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
852 863 haspushkey = 'pushkey' in b2caps
853 864 hasphaseheads = 'heads' in b2caps.get('phases', ())
854 865
855 866 if hasphaseheads and not legacyphase:
856 867 return _pushb2phaseheads(pushop, bundler)
857 868 elif haspushkey:
858 869 return _pushb2phasespushkey(pushop, bundler)
859 870
860 871 def _pushb2phaseheads(pushop, bundler):
861 872 """push phase information through a bundle2 - binary part"""
862 873 pushop.stepsdone.add('phases')
863 874 if pushop.outdatedphases:
864 875 updates = [[] for p in phases.allphases]
865 876 updates[0].extend(h.node() for h in pushop.outdatedphases)
866 877 phasedata = phases.binaryencode(updates)
867 878 bundler.newpart('phase-heads', data=phasedata)
868 879
869 880 def _pushb2phasespushkey(pushop, bundler):
870 881 """push phase information through a bundle2 - pushkey part"""
871 882 pushop.stepsdone.add('phases')
872 883 part2node = []
873 884
874 885 def handlefailure(pushop, exc):
875 886 targetid = int(exc.partid)
876 887 for partid, node in part2node:
877 888 if partid == targetid:
878 889 raise error.Abort(_('updating %s to public failed') % node)
879 890
880 891 enc = pushkey.encode
881 892 for newremotehead in pushop.outdatedphases:
882 893 part = bundler.newpart('pushkey')
883 894 part.addparam('namespace', enc('phases'))
884 895 part.addparam('key', enc(newremotehead.hex()))
885 896 part.addparam('old', enc('%d' % phases.draft))
886 897 part.addparam('new', enc('%d' % phases.public))
887 898 part2node.append((part.id, newremotehead))
888 899 pushop.pkfailcb[part.id] = handlefailure
889 900
890 901 def handlereply(op):
891 902 for partid, node in part2node:
892 903 partrep = op.records.getreplies(partid)
893 904 results = partrep['pushkey']
894 905 assert len(results) <= 1
895 906 msg = None
896 907 if not results:
897 908 msg = _('server ignored update of %s to public!\n') % node
898 909 elif not int(results[0]['return']):
899 910 msg = _('updating %s to public failed!\n') % node
900 911 if msg is not None:
901 912 pushop.ui.warn(msg)
902 913 return handlereply
903 914
904 915 @b2partsgenerator('obsmarkers')
905 916 def _pushb2obsmarkers(pushop, bundler):
906 917 if 'obsmarkers' in pushop.stepsdone:
907 918 return
908 919 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
909 920 if obsolete.commonversion(remoteversions) is None:
910 921 return
911 922 pushop.stepsdone.add('obsmarkers')
912 923 if pushop.outobsmarkers:
913 924 markers = sorted(pushop.outobsmarkers)
914 925 bundle2.buildobsmarkerspart(bundler, markers)
915 926
916 927 @b2partsgenerator('bookmarks')
917 928 def _pushb2bookmarks(pushop, bundler):
918 929 """handle bookmark push through bundle2"""
919 930 if 'bookmarks' in pushop.stepsdone:
920 931 return
921 932 b2caps = bundle2.bundle2caps(pushop.remote)
922 933
923 934 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
924 935 legacybooks = 'bookmarks' in legacy
925 936
926 937 if not legacybooks and 'bookmarks' in b2caps:
927 938 return _pushb2bookmarkspart(pushop, bundler)
928 939 elif 'pushkey' in b2caps:
929 940 return _pushb2bookmarkspushkey(pushop, bundler)
930 941
931 942 def _bmaction(old, new):
932 943 """small utility for bookmark pushing"""
933 944 if not old:
934 945 return 'export'
935 946 elif not new:
936 947 return 'delete'
937 948 return 'update'
938 949
939 950 def _pushb2bookmarkspart(pushop, bundler):
940 951 pushop.stepsdone.add('bookmarks')
941 952 if not pushop.outbookmarks:
942 953 return
943 954
944 955 allactions = []
945 956 data = []
946 957 for book, old, new in pushop.outbookmarks:
947 958 new = bin(new)
948 959 data.append((book, new))
949 960 allactions.append((book, _bmaction(old, new)))
950 961 checkdata = bookmod.binaryencode(data)
951 962 bundler.newpart('bookmarks', data=checkdata)
952 963
953 964 def handlereply(op):
954 965 ui = pushop.ui
955 966 # if success
956 967 for book, action in allactions:
957 968 ui.status(bookmsgmap[action][0] % book)
958 969
959 970 return handlereply
960 971
961 972 def _pushb2bookmarkspushkey(pushop, bundler):
962 973 pushop.stepsdone.add('bookmarks')
963 974 part2book = []
964 975 enc = pushkey.encode
965 976
966 977 def handlefailure(pushop, exc):
967 978 targetid = int(exc.partid)
968 979 for partid, book, action in part2book:
969 980 if partid == targetid:
970 981 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
971 982 # we should not be called for part we did not generated
972 983 assert False
973 984
974 985 for book, old, new in pushop.outbookmarks:
975 986 part = bundler.newpart('pushkey')
976 987 part.addparam('namespace', enc('bookmarks'))
977 988 part.addparam('key', enc(book))
978 989 part.addparam('old', enc(old))
979 990 part.addparam('new', enc(new))
980 991 action = 'update'
981 992 if not old:
982 993 action = 'export'
983 994 elif not new:
984 995 action = 'delete'
985 996 part2book.append((part.id, book, action))
986 997 pushop.pkfailcb[part.id] = handlefailure
987 998
988 999 def handlereply(op):
989 1000 ui = pushop.ui
990 1001 for partid, book, action in part2book:
991 1002 partrep = op.records.getreplies(partid)
992 1003 results = partrep['pushkey']
993 1004 assert len(results) <= 1
994 1005 if not results:
995 1006 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
996 1007 else:
997 1008 ret = int(results[0]['return'])
998 1009 if ret:
999 1010 ui.status(bookmsgmap[action][0] % book)
1000 1011 else:
1001 1012 ui.warn(bookmsgmap[action][1] % book)
1002 1013 if pushop.bkresult is not None:
1003 1014 pushop.bkresult = 1
1004 1015 return handlereply
1005 1016
1006 1017 @b2partsgenerator('pushvars', idx=0)
1007 1018 def _getbundlesendvars(pushop, bundler):
1008 1019 '''send shellvars via bundle2'''
1009 1020 pushvars = pushop.pushvars
1010 1021 if pushvars:
1011 1022 shellvars = {}
1012 1023 for raw in pushvars:
1013 1024 if '=' not in raw:
1014 1025 msg = ("unable to parse variable '%s', should follow "
1015 1026 "'KEY=VALUE' or 'KEY=' format")
1016 1027 raise error.Abort(msg % raw)
1017 1028 k, v = raw.split('=', 1)
1018 1029 shellvars[k] = v
1019 1030
1020 1031 part = bundler.newpart('pushvars')
1021 1032
1022 1033 for key, value in shellvars.iteritems():
1023 1034 part.addparam(key, value, mandatory=False)
1024 1035
1025 1036 def _pushbundle2(pushop):
1026 1037 """push data to the remote using bundle2
1027 1038
1028 1039 The only currently supported type of data is changegroup but this will
1029 1040 evolve in the future."""
1030 1041 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1031 1042 pushback = (pushop.trmanager
1032 1043 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1033 1044
1034 1045 # create reply capability
1035 1046 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1036 1047 allowpushback=pushback,
1037 1048 role='client'))
1038 1049 bundler.newpart('replycaps', data=capsblob)
1039 1050 replyhandlers = []
1040 1051 for partgenname in b2partsgenorder:
1041 1052 partgen = b2partsgenmapping[partgenname]
1042 1053 ret = partgen(pushop, bundler)
1043 1054 if callable(ret):
1044 1055 replyhandlers.append(ret)
1045 1056 # do not push if nothing to push
1046 1057 if bundler.nbparts <= 1:
1047 1058 return
1048 1059 stream = util.chunkbuffer(bundler.getchunks())
1049 1060 try:
1050 1061 try:
1051 1062 reply = pushop.remote.unbundle(
1052 1063 stream, ['force'], pushop.remote.url())
1053 1064 except error.BundleValueError as exc:
1054 1065 raise error.Abort(_('missing support for %s') % exc)
1055 1066 try:
1056 1067 trgetter = None
1057 1068 if pushback:
1058 1069 trgetter = pushop.trmanager.transaction
1059 1070 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1060 1071 except error.BundleValueError as exc:
1061 1072 raise error.Abort(_('missing support for %s') % exc)
1062 1073 except bundle2.AbortFromPart as exc:
1063 1074 pushop.ui.status(_('remote: %s\n') % exc)
1064 1075 if exc.hint is not None:
1065 1076 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1066 1077 raise error.Abort(_('push failed on remote'))
1067 1078 except error.PushkeyFailed as exc:
1068 1079 partid = int(exc.partid)
1069 1080 if partid not in pushop.pkfailcb:
1070 1081 raise
1071 1082 pushop.pkfailcb[partid](pushop, exc)
1072 1083 for rephand in replyhandlers:
1073 1084 rephand(op)
1074 1085
1075 1086 def _pushchangeset(pushop):
1076 1087 """Make the actual push of changeset bundle to remote repo"""
1077 1088 if 'changesets' in pushop.stepsdone:
1078 1089 return
1079 1090 pushop.stepsdone.add('changesets')
1080 1091 if not _pushcheckoutgoing(pushop):
1081 1092 return
1082 1093
1083 1094 # Should have verified this in push().
1084 1095 assert pushop.remote.capable('unbundle')
1085 1096
1086 1097 pushop.repo.prepushoutgoinghooks(pushop)
1087 1098 outgoing = pushop.outgoing
1088 1099 # TODO: get bundlecaps from remote
1089 1100 bundlecaps = None
1090 1101 # create a changegroup from local
1091 1102 if pushop.revs is None and not (outgoing.excluded
1092 1103 or pushop.repo.changelog.filteredrevs):
1093 1104 # push everything,
1094 1105 # use the fast path, no race possible on push
1095 1106 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1096 1107 fastpath=True, bundlecaps=bundlecaps)
1097 1108 else:
1098 1109 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1099 1110 'push', bundlecaps=bundlecaps)
1100 1111
1101 1112 # apply changegroup to remote
1102 1113 # local repo finds heads on server, finds out what
1103 1114 # revs it must push. once revs transferred, if server
1104 1115 # finds it has different heads (someone else won
1105 1116 # commit/push race), server aborts.
1106 1117 if pushop.force:
1107 1118 remoteheads = ['force']
1108 1119 else:
1109 1120 remoteheads = pushop.remoteheads
1110 1121 # ssh: return remote's addchangegroup()
1111 1122 # http: return remote's addchangegroup() or 0 for error
1112 1123 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1113 1124 pushop.repo.url())
1114 1125
1115 1126 def _pushsyncphase(pushop):
1116 1127 """synchronise phase information locally and remotely"""
1117 1128 cheads = pushop.commonheads
1118 1129 # even when we don't push, exchanging phase data is useful
1119 1130 remotephases = pushop.remote.listkeys('phases')
1120 1131 if (pushop.ui.configbool('ui', '_usedassubrepo')
1121 1132 and remotephases # server supports phases
1122 1133 and pushop.cgresult is None # nothing was pushed
1123 1134 and remotephases.get('publishing', False)):
1124 1135 # When:
1125 1136 # - this is a subrepo push
1126 1137 # - and remote support phase
1127 1138 # - and no changeset was pushed
1128 1139 # - and remote is publishing
1129 1140 # We may be in issue 3871 case!
1130 1141 # We drop the possible phase synchronisation done by
1131 1142 # courtesy to publish changesets possibly locally draft
1132 1143 # on the remote.
1133 1144 remotephases = {'publishing': 'True'}
1134 1145 if not remotephases: # old server or public only reply from non-publishing
1135 1146 _localphasemove(pushop, cheads)
1136 1147 # don't push any phase data as there is nothing to push
1137 1148 else:
1138 1149 ana = phases.analyzeremotephases(pushop.repo, cheads,
1139 1150 remotephases)
1140 1151 pheads, droots = ana
1141 1152 ### Apply remote phase on local
1142 1153 if remotephases.get('publishing', False):
1143 1154 _localphasemove(pushop, cheads)
1144 1155 else: # publish = False
1145 1156 _localphasemove(pushop, pheads)
1146 1157 _localphasemove(pushop, cheads, phases.draft)
1147 1158 ### Apply local phase on remote
1148 1159
1149 1160 if pushop.cgresult:
1150 1161 if 'phases' in pushop.stepsdone:
1151 1162 # phases already pushed though bundle2
1152 1163 return
1153 1164 outdated = pushop.outdatedphases
1154 1165 else:
1155 1166 outdated = pushop.fallbackoutdatedphases
1156 1167
1157 1168 pushop.stepsdone.add('phases')
1158 1169
1159 1170 # filter heads already turned public by the push
1160 1171 outdated = [c for c in outdated if c.node() not in pheads]
1161 1172 # fallback to independent pushkey command
1162 1173 for newremotehead in outdated:
1163 1174 r = pushop.remote.pushkey('phases',
1164 1175 newremotehead.hex(),
1165 1176 ('%d' % phases.draft),
1166 1177 ('%d' % phases.public))
1167 1178 if not r:
1168 1179 pushop.ui.warn(_('updating %s to public failed!\n')
1169 1180 % newremotehead)
1170 1181
1171 1182 def _localphasemove(pushop, nodes, phase=phases.public):
1172 1183 """move <nodes> to <phase> in the local source repo"""
1173 1184 if pushop.trmanager:
1174 1185 phases.advanceboundary(pushop.repo,
1175 1186 pushop.trmanager.transaction(),
1176 1187 phase,
1177 1188 nodes)
1178 1189 else:
1179 1190 # repo is not locked, do not change any phases!
1180 1191 # Informs the user that phases should have been moved when
1181 1192 # applicable.
1182 1193 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1183 1194 phasestr = phases.phasenames[phase]
1184 1195 if actualmoves:
1185 1196 pushop.ui.status(_('cannot lock source repo, skipping '
1186 1197 'local %s phase update\n') % phasestr)
1187 1198
1188 1199 def _pushobsolete(pushop):
1189 1200 """utility function to push obsolete markers to a remote"""
1190 1201 if 'obsmarkers' in pushop.stepsdone:
1191 1202 return
1192 1203 repo = pushop.repo
1193 1204 remote = pushop.remote
1194 1205 pushop.stepsdone.add('obsmarkers')
1195 1206 if pushop.outobsmarkers:
1196 1207 pushop.ui.debug('try to push obsolete markers to remote\n')
1197 1208 rslts = []
1198 1209 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1199 1210 for key in sorted(remotedata, reverse=True):
1200 1211 # reverse sort to ensure we end with dump0
1201 1212 data = remotedata[key]
1202 1213 rslts.append(remote.pushkey('obsolete', key, '', data))
1203 1214 if [r for r in rslts if not r]:
1204 1215 msg = _('failed to push some obsolete markers!\n')
1205 1216 repo.ui.warn(msg)
1206 1217
1207 1218 def _pushbookmark(pushop):
1208 1219 """Update bookmark position on remote"""
1209 1220 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1210 1221 return
1211 1222 pushop.stepsdone.add('bookmarks')
1212 1223 ui = pushop.ui
1213 1224 remote = pushop.remote
1214 1225
1215 1226 for b, old, new in pushop.outbookmarks:
1216 1227 action = 'update'
1217 1228 if not old:
1218 1229 action = 'export'
1219 1230 elif not new:
1220 1231 action = 'delete'
1221 1232 if remote.pushkey('bookmarks', b, old, new):
1222 1233 ui.status(bookmsgmap[action][0] % b)
1223 1234 else:
1224 1235 ui.warn(bookmsgmap[action][1] % b)
1225 1236 # discovery can have set the value form invalid entry
1226 1237 if pushop.bkresult is not None:
1227 1238 pushop.bkresult = 1
1228 1239
1229 1240 class pulloperation(object):
1230 1241 """A object that represent a single pull operation
1231 1242
1232 1243 It purpose is to carry pull related state and very common operation.
1233 1244
1234 1245 A new should be created at the beginning of each pull and discarded
1235 1246 afterward.
1236 1247 """
1237 1248
1238 1249 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1239 1250 remotebookmarks=None, streamclonerequested=None):
1240 1251 # repo we pull into
1241 1252 self.repo = repo
1242 1253 # repo we pull from
1243 1254 self.remote = remote
1244 1255 # revision we try to pull (None is "all")
1245 1256 self.heads = heads
1246 1257 # bookmark pulled explicitly
1247 1258 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1248 1259 for bookmark in bookmarks]
1249 1260 # do we force pull?
1250 1261 self.force = force
1251 1262 # whether a streaming clone was requested
1252 1263 self.streamclonerequested = streamclonerequested
1253 1264 # transaction manager
1254 1265 self.trmanager = None
1255 1266 # set of common changeset between local and remote before pull
1256 1267 self.common = None
1257 1268 # set of pulled head
1258 1269 self.rheads = None
1259 1270 # list of missing changeset to fetch remotely
1260 1271 self.fetch = None
1261 1272 # remote bookmarks data
1262 1273 self.remotebookmarks = remotebookmarks
1263 1274 # result of changegroup pulling (used as return code by pull)
1264 1275 self.cgresult = None
1265 1276 # list of step already done
1266 1277 self.stepsdone = set()
1267 1278 # Whether we attempted a clone from pre-generated bundles.
1268 1279 self.clonebundleattempted = False
1269 1280
1270 1281 @util.propertycache
1271 1282 def pulledsubset(self):
1272 1283 """heads of the set of changeset target by the pull"""
1273 1284 # compute target subset
1274 1285 if self.heads is None:
1275 1286 # We pulled every thing possible
1276 1287 # sync on everything common
1277 1288 c = set(self.common)
1278 1289 ret = list(self.common)
1279 1290 for n in self.rheads:
1280 1291 if n not in c:
1281 1292 ret.append(n)
1282 1293 return ret
1283 1294 else:
1284 1295 # We pulled a specific subset
1285 1296 # sync on this subset
1286 1297 return self.heads
1287 1298
1288 1299 @util.propertycache
1289 1300 def canusebundle2(self):
1290 1301 return not _forcebundle1(self)
1291 1302
1292 1303 @util.propertycache
1293 1304 def remotebundle2caps(self):
1294 1305 return bundle2.bundle2caps(self.remote)
1295 1306
1296 1307 def gettransaction(self):
1297 1308 # deprecated; talk to trmanager directly
1298 1309 return self.trmanager.transaction()
1299 1310
1300 1311 class transactionmanager(util.transactional):
1301 1312 """An object to manage the life cycle of a transaction
1302 1313
1303 1314 It creates the transaction on demand and calls the appropriate hooks when
1304 1315 closing the transaction."""
1305 1316 def __init__(self, repo, source, url):
1306 1317 self.repo = repo
1307 1318 self.source = source
1308 1319 self.url = url
1309 1320 self._tr = None
1310 1321
1311 1322 def transaction(self):
1312 1323 """Return an open transaction object, constructing if necessary"""
1313 1324 if not self._tr:
1314 1325 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1315 1326 self._tr = self.repo.transaction(trname)
1316 1327 self._tr.hookargs['source'] = self.source
1317 1328 self._tr.hookargs['url'] = self.url
1318 1329 return self._tr
1319 1330
1320 1331 def close(self):
1321 1332 """close transaction if created"""
1322 1333 if self._tr is not None:
1323 1334 self._tr.close()
1324 1335
1325 1336 def release(self):
1326 1337 """release transaction if created"""
1327 1338 if self._tr is not None:
1328 1339 self._tr.release()
1329 1340
1330 1341 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1331 1342 streamclonerequested=None):
1332 1343 """Fetch repository data from a remote.
1333 1344
1334 1345 This is the main function used to retrieve data from a remote repository.
1335 1346
1336 1347 ``repo`` is the local repository to clone into.
1337 1348 ``remote`` is a peer instance.
1338 1349 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1339 1350 default) means to pull everything from the remote.
1340 1351 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1341 1352 default, all remote bookmarks are pulled.
1342 1353 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1343 1354 initialization.
1344 1355 ``streamclonerequested`` is a boolean indicating whether a "streaming
1345 1356 clone" is requested. A "streaming clone" is essentially a raw file copy
1346 1357 of revlogs from the server. This only works when the local repository is
1347 1358 empty. The default value of ``None`` means to respect the server
1348 1359 configuration for preferring stream clones.
1349 1360
1350 1361 Returns the ``pulloperation`` created for this pull.
1351 1362 """
1352 1363 if opargs is None:
1353 1364 opargs = {}
1354 1365 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1355 1366 streamclonerequested=streamclonerequested,
1356 1367 **pycompat.strkwargs(opargs))
1357 1368
1358 1369 peerlocal = pullop.remote.local()
1359 1370 if peerlocal:
1360 1371 missing = set(peerlocal.requirements) - pullop.repo.supported
1361 1372 if missing:
1362 1373 msg = _("required features are not"
1363 1374 " supported in the destination:"
1364 1375 " %s") % (', '.join(sorted(missing)))
1365 1376 raise error.Abort(msg)
1366 1377
1367 1378 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1368 1379 with repo.wlock(), repo.lock(), pullop.trmanager:
1369 1380 # This should ideally be in _pullbundle2(). However, it needs to run
1370 1381 # before discovery to avoid extra work.
1371 1382 _maybeapplyclonebundle(pullop)
1372 1383 streamclone.maybeperformlegacystreamclone(pullop)
1373 1384 _pulldiscovery(pullop)
1374 1385 if pullop.canusebundle2:
1375 1386 _pullbundle2(pullop)
1376 1387 _pullchangeset(pullop)
1377 1388 _pullphase(pullop)
1378 1389 _pullbookmarks(pullop)
1379 1390 _pullobsolete(pullop)
1380 1391
1381 1392 # storing remotenames
1382 1393 if repo.ui.configbool('experimental', 'remotenames'):
1383 1394 logexchange.pullremotenames(repo, remote)
1384 1395
1385 1396 return pullop
1386 1397
1387 1398 # list of steps to perform discovery before pull
1388 1399 pulldiscoveryorder = []
1389 1400
1390 1401 # Mapping between step name and function
1391 1402 #
1392 1403 # This exists to help extensions wrap steps if necessary
1393 1404 pulldiscoverymapping = {}
1394 1405
1395 1406 def pulldiscovery(stepname):
1396 1407 """decorator for function performing discovery before pull
1397 1408
1398 1409 The function is added to the step -> function mapping and appended to the
1399 1410 list of steps. Beware that decorated function will be added in order (this
1400 1411 may matter).
1401 1412
1402 1413 You can only use this decorator for a new step, if you want to wrap a step
1403 1414 from an extension, change the pulldiscovery dictionary directly."""
1404 1415 def dec(func):
1405 1416 assert stepname not in pulldiscoverymapping
1406 1417 pulldiscoverymapping[stepname] = func
1407 1418 pulldiscoveryorder.append(stepname)
1408 1419 return func
1409 1420 return dec
1410 1421
1411 1422 def _pulldiscovery(pullop):
1412 1423 """Run all discovery steps"""
1413 1424 for stepname in pulldiscoveryorder:
1414 1425 step = pulldiscoverymapping[stepname]
1415 1426 step(pullop)
1416 1427
1417 1428 @pulldiscovery('b1:bookmarks')
1418 1429 def _pullbookmarkbundle1(pullop):
1419 1430 """fetch bookmark data in bundle1 case
1420 1431
1421 1432 If not using bundle2, we have to fetch bookmarks before changeset
1422 1433 discovery to reduce the chance and impact of race conditions."""
1423 1434 if pullop.remotebookmarks is not None:
1424 1435 return
1425 1436 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1426 1437 # all known bundle2 servers now support listkeys, but lets be nice with
1427 1438 # new implementation.
1428 1439 return
1429 1440 books = pullop.remote.listkeys('bookmarks')
1430 1441 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1431 1442
1432 1443
1433 1444 @pulldiscovery('changegroup')
1434 1445 def _pulldiscoverychangegroup(pullop):
1435 1446 """discovery phase for the pull
1436 1447
1437 1448 Current handle changeset discovery only, will change handle all discovery
1438 1449 at some point."""
1439 1450 tmp = discovery.findcommonincoming(pullop.repo,
1440 1451 pullop.remote,
1441 1452 heads=pullop.heads,
1442 1453 force=pullop.force)
1443 1454 common, fetch, rheads = tmp
1444 1455 nm = pullop.repo.unfiltered().changelog.nodemap
1445 1456 if fetch and rheads:
1446 1457 # If a remote heads is filtered locally, put in back in common.
1447 1458 #
1448 1459 # This is a hackish solution to catch most of "common but locally
1449 1460 # hidden situation". We do not performs discovery on unfiltered
1450 1461 # repository because it end up doing a pathological amount of round
1451 1462 # trip for w huge amount of changeset we do not care about.
1452 1463 #
1453 1464 # If a set of such "common but filtered" changeset exist on the server
1454 1465 # but are not including a remote heads, we'll not be able to detect it,
1455 1466 scommon = set(common)
1456 1467 for n in rheads:
1457 1468 if n in nm:
1458 1469 if n not in scommon:
1459 1470 common.append(n)
1460 1471 if set(rheads).issubset(set(common)):
1461 1472 fetch = []
1462 1473 pullop.common = common
1463 1474 pullop.fetch = fetch
1464 1475 pullop.rheads = rheads
1465 1476
1466 1477 def _pullbundle2(pullop):
1467 1478 """pull data using bundle2
1468 1479
1469 1480 For now, the only supported data are changegroup."""
1470 1481 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1471 1482
1472 1483 # make ui easier to access
1473 1484 ui = pullop.repo.ui
1474 1485
1475 1486 # At the moment we don't do stream clones over bundle2. If that is
1476 1487 # implemented then here's where the check for that will go.
1477 1488 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1478 1489
1479 1490 # declare pull perimeters
1480 1491 kwargs['common'] = pullop.common
1481 1492 kwargs['heads'] = pullop.heads or pullop.rheads
1482 1493
1483 1494 if streaming:
1484 1495 kwargs['cg'] = False
1485 1496 kwargs['stream'] = True
1486 1497 pullop.stepsdone.add('changegroup')
1487 1498 pullop.stepsdone.add('phases')
1488 1499
1489 1500 else:
1490 1501 # pulling changegroup
1491 1502 pullop.stepsdone.add('changegroup')
1492 1503
1493 1504 kwargs['cg'] = pullop.fetch
1494 1505
1495 1506 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1496 1507 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1497 1508 if (not legacyphase and hasbinaryphase):
1498 1509 kwargs['phases'] = True
1499 1510 pullop.stepsdone.add('phases')
1500 1511
1501 1512 if 'listkeys' in pullop.remotebundle2caps:
1502 1513 if 'phases' not in pullop.stepsdone:
1503 1514 kwargs['listkeys'] = ['phases']
1504 1515
1505 1516 bookmarksrequested = False
1506 1517 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1507 1518 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1508 1519
1509 1520 if pullop.remotebookmarks is not None:
1510 1521 pullop.stepsdone.add('request-bookmarks')
1511 1522
1512 1523 if ('request-bookmarks' not in pullop.stepsdone
1513 1524 and pullop.remotebookmarks is None
1514 1525 and not legacybookmark and hasbinarybook):
1515 1526 kwargs['bookmarks'] = True
1516 1527 bookmarksrequested = True
1517 1528
1518 1529 if 'listkeys' in pullop.remotebundle2caps:
1519 1530 if 'request-bookmarks' not in pullop.stepsdone:
1520 1531 # make sure to always includes bookmark data when migrating
1521 1532 # `hg incoming --bundle` to using this function.
1522 1533 pullop.stepsdone.add('request-bookmarks')
1523 1534 kwargs.setdefault('listkeys', []).append('bookmarks')
1524 1535
1525 1536 # If this is a full pull / clone and the server supports the clone bundles
1526 1537 # feature, tell the server whether we attempted a clone bundle. The
1527 1538 # presence of this flag indicates the client supports clone bundles. This
1528 1539 # will enable the server to treat clients that support clone bundles
1529 1540 # differently from those that don't.
1530 1541 if (pullop.remote.capable('clonebundles')
1531 1542 and pullop.heads is None and list(pullop.common) == [nullid]):
1532 1543 kwargs['cbattempted'] = pullop.clonebundleattempted
1533 1544
1534 1545 if streaming:
1535 1546 pullop.repo.ui.status(_('streaming all changes\n'))
1536 1547 elif not pullop.fetch:
1537 1548 pullop.repo.ui.status(_("no changes found\n"))
1538 1549 pullop.cgresult = 0
1539 1550 else:
1540 1551 if pullop.heads is None and list(pullop.common) == [nullid]:
1541 1552 pullop.repo.ui.status(_("requesting all changes\n"))
1542 1553 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1543 1554 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1544 1555 if obsolete.commonversion(remoteversions) is not None:
1545 1556 kwargs['obsmarkers'] = True
1546 1557 pullop.stepsdone.add('obsmarkers')
1547 1558 _pullbundle2extraprepare(pullop, kwargs)
1548 1559 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1549 1560 try:
1550 1561 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1551 1562 op.modes['bookmarks'] = 'records'
1552 1563 bundle2.processbundle(pullop.repo, bundle, op=op)
1553 1564 except bundle2.AbortFromPart as exc:
1554 1565 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1555 1566 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1556 1567 except error.BundleValueError as exc:
1557 1568 raise error.Abort(_('missing support for %s') % exc)
1558 1569
1559 1570 if pullop.fetch:
1560 1571 pullop.cgresult = bundle2.combinechangegroupresults(op)
1561 1572
1562 1573 # processing phases change
1563 1574 for namespace, value in op.records['listkeys']:
1564 1575 if namespace == 'phases':
1565 1576 _pullapplyphases(pullop, value)
1566 1577
1567 1578 # processing bookmark update
1568 1579 if bookmarksrequested:
1569 1580 books = {}
1570 1581 for record in op.records['bookmarks']:
1571 1582 books[record['bookmark']] = record["node"]
1572 1583 pullop.remotebookmarks = books
1573 1584 else:
1574 1585 for namespace, value in op.records['listkeys']:
1575 1586 if namespace == 'bookmarks':
1576 1587 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1577 1588
1578 1589 # bookmark data were either already there or pulled in the bundle
1579 1590 if pullop.remotebookmarks is not None:
1580 1591 _pullbookmarks(pullop)
1581 1592
1582 1593 def _pullbundle2extraprepare(pullop, kwargs):
1583 1594 """hook function so that extensions can extend the getbundle call"""
1584 1595
1585 1596 def _pullchangeset(pullop):
1586 1597 """pull changeset from unbundle into the local repo"""
1587 1598 # We delay the open of the transaction as late as possible so we
1588 1599 # don't open transaction for nothing or you break future useful
1589 1600 # rollback call
1590 1601 if 'changegroup' in pullop.stepsdone:
1591 1602 return
1592 1603 pullop.stepsdone.add('changegroup')
1593 1604 if not pullop.fetch:
1594 1605 pullop.repo.ui.status(_("no changes found\n"))
1595 1606 pullop.cgresult = 0
1596 1607 return
1597 1608 tr = pullop.gettransaction()
1598 1609 if pullop.heads is None and list(pullop.common) == [nullid]:
1599 1610 pullop.repo.ui.status(_("requesting all changes\n"))
1600 1611 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1601 1612 # issue1320, avoid a race if remote changed after discovery
1602 1613 pullop.heads = pullop.rheads
1603 1614
1604 1615 if pullop.remote.capable('getbundle'):
1605 1616 # TODO: get bundlecaps from remote
1606 1617 cg = pullop.remote.getbundle('pull', common=pullop.common,
1607 1618 heads=pullop.heads or pullop.rheads)
1608 1619 elif pullop.heads is None:
1609 1620 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1610 1621 elif not pullop.remote.capable('changegroupsubset'):
1611 1622 raise error.Abort(_("partial pull cannot be done because "
1612 1623 "other repository doesn't support "
1613 1624 "changegroupsubset."))
1614 1625 else:
1615 1626 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1616 1627 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1617 1628 pullop.remote.url())
1618 1629 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1619 1630
1620 1631 def _pullphase(pullop):
1621 1632 # Get remote phases data from remote
1622 1633 if 'phases' in pullop.stepsdone:
1623 1634 return
1624 1635 remotephases = pullop.remote.listkeys('phases')
1625 1636 _pullapplyphases(pullop, remotephases)
1626 1637
1627 1638 def _pullapplyphases(pullop, remotephases):
1628 1639 """apply phase movement from observed remote state"""
1629 1640 if 'phases' in pullop.stepsdone:
1630 1641 return
1631 1642 pullop.stepsdone.add('phases')
1632 1643 publishing = bool(remotephases.get('publishing', False))
1633 1644 if remotephases and not publishing:
1634 1645 # remote is new and non-publishing
1635 1646 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1636 1647 pullop.pulledsubset,
1637 1648 remotephases)
1638 1649 dheads = pullop.pulledsubset
1639 1650 else:
1640 1651 # Remote is old or publishing all common changesets
1641 1652 # should be seen as public
1642 1653 pheads = pullop.pulledsubset
1643 1654 dheads = []
1644 1655 unfi = pullop.repo.unfiltered()
1645 1656 phase = unfi._phasecache.phase
1646 1657 rev = unfi.changelog.nodemap.get
1647 1658 public = phases.public
1648 1659 draft = phases.draft
1649 1660
1650 1661 # exclude changesets already public locally and update the others
1651 1662 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1652 1663 if pheads:
1653 1664 tr = pullop.gettransaction()
1654 1665 phases.advanceboundary(pullop.repo, tr, public, pheads)
1655 1666
1656 1667 # exclude changesets already draft locally and update the others
1657 1668 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1658 1669 if dheads:
1659 1670 tr = pullop.gettransaction()
1660 1671 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1661 1672
1662 1673 def _pullbookmarks(pullop):
1663 1674 """process the remote bookmark information to update the local one"""
1664 1675 if 'bookmarks' in pullop.stepsdone:
1665 1676 return
1666 1677 pullop.stepsdone.add('bookmarks')
1667 1678 repo = pullop.repo
1668 1679 remotebookmarks = pullop.remotebookmarks
1669 1680 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1670 1681 pullop.remote.url(),
1671 1682 pullop.gettransaction,
1672 1683 explicit=pullop.explicitbookmarks)
1673 1684
1674 1685 def _pullobsolete(pullop):
1675 1686 """utility function to pull obsolete markers from a remote
1676 1687
1677 1688 The `gettransaction` is function that return the pull transaction, creating
1678 1689 one if necessary. We return the transaction to inform the calling code that
1679 1690 a new transaction have been created (when applicable).
1680 1691
1681 1692 Exists mostly to allow overriding for experimentation purpose"""
1682 1693 if 'obsmarkers' in pullop.stepsdone:
1683 1694 return
1684 1695 pullop.stepsdone.add('obsmarkers')
1685 1696 tr = None
1686 1697 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1687 1698 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1688 1699 remoteobs = pullop.remote.listkeys('obsolete')
1689 1700 if 'dump0' in remoteobs:
1690 1701 tr = pullop.gettransaction()
1691 1702 markers = []
1692 1703 for key in sorted(remoteobs, reverse=True):
1693 1704 if key.startswith('dump'):
1694 1705 data = util.b85decode(remoteobs[key])
1695 1706 version, newmarks = obsolete._readmarkers(data)
1696 1707 markers += newmarks
1697 1708 if markers:
1698 1709 pullop.repo.obsstore.add(tr, markers)
1699 1710 pullop.repo.invalidatevolatilesets()
1700 1711 return tr
1701 1712
1702 1713 def caps20to10(repo, role):
1703 1714 """return a set with appropriate options to use bundle20 during getbundle"""
1704 1715 caps = {'HG20'}
1705 1716 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1706 1717 caps.add('bundle2=' + urlreq.quote(capsblob))
1707 1718 return caps
1708 1719
1709 1720 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1710 1721 getbundle2partsorder = []
1711 1722
1712 1723 # Mapping between step name and function
1713 1724 #
1714 1725 # This exists to help extensions wrap steps if necessary
1715 1726 getbundle2partsmapping = {}
1716 1727
1717 1728 def getbundle2partsgenerator(stepname, idx=None):
1718 1729 """decorator for function generating bundle2 part for getbundle
1719 1730
1720 1731 The function is added to the step -> function mapping and appended to the
1721 1732 list of steps. Beware that decorated functions will be added in order
1722 1733 (this may matter).
1723 1734
1724 1735 You can only use this decorator for new steps, if you want to wrap a step
1725 1736 from an extension, attack the getbundle2partsmapping dictionary directly."""
1726 1737 def dec(func):
1727 1738 assert stepname not in getbundle2partsmapping
1728 1739 getbundle2partsmapping[stepname] = func
1729 1740 if idx is None:
1730 1741 getbundle2partsorder.append(stepname)
1731 1742 else:
1732 1743 getbundle2partsorder.insert(idx, stepname)
1733 1744 return func
1734 1745 return dec
1735 1746
1736 1747 def bundle2requested(bundlecaps):
1737 1748 if bundlecaps is not None:
1738 1749 return any(cap.startswith('HG2') for cap in bundlecaps)
1739 1750 return False
1740 1751
1741 1752 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1742 1753 **kwargs):
1743 1754 """Return chunks constituting a bundle's raw data.
1744 1755
1745 1756 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1746 1757 passed.
1747 1758
1748 1759 Returns a 2-tuple of a dict with metadata about the generated bundle
1749 1760 and an iterator over raw chunks (of varying sizes).
1750 1761 """
1751 1762 kwargs = pycompat.byteskwargs(kwargs)
1752 1763 info = {}
1753 1764 usebundle2 = bundle2requested(bundlecaps)
1754 1765 # bundle10 case
1755 1766 if not usebundle2:
1756 1767 if bundlecaps and not kwargs.get('cg', True):
1757 1768 raise ValueError(_('request for bundle10 must include changegroup'))
1758 1769
1759 1770 if kwargs:
1760 1771 raise ValueError(_('unsupported getbundle arguments: %s')
1761 1772 % ', '.join(sorted(kwargs.keys())))
1762 1773 outgoing = _computeoutgoing(repo, heads, common)
1763 1774 info['bundleversion'] = 1
1764 1775 return info, changegroup.makestream(repo, outgoing, '01', source,
1765 1776 bundlecaps=bundlecaps)
1766 1777
1767 1778 # bundle20 case
1768 1779 info['bundleversion'] = 2
1769 1780 b2caps = {}
1770 1781 for bcaps in bundlecaps:
1771 1782 if bcaps.startswith('bundle2='):
1772 1783 blob = urlreq.unquote(bcaps[len('bundle2='):])
1773 1784 b2caps.update(bundle2.decodecaps(blob))
1774 1785 bundler = bundle2.bundle20(repo.ui, b2caps)
1775 1786
1776 1787 kwargs['heads'] = heads
1777 1788 kwargs['common'] = common
1778 1789
1779 1790 for name in getbundle2partsorder:
1780 1791 func = getbundle2partsmapping[name]
1781 1792 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1782 1793 **pycompat.strkwargs(kwargs))
1783 1794
1784 1795 info['prefercompressed'] = bundler.prefercompressed
1785 1796
1786 1797 return info, bundler.getchunks()
1787 1798
1788 1799 @getbundle2partsgenerator('stream2')
1789 1800 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1790 1801 b2caps=None, heads=None, common=None, **kwargs):
1791 1802 if not kwargs.get('stream', False):
1792 1803 return
1793 1804
1794 1805 if not streamclone.allowservergeneration(repo):
1795 1806 raise error.Abort(_('stream data requested but server does not allow '
1796 1807 'this feature'),
1797 1808 hint=_('well-behaved clients should not be '
1798 1809 'requesting stream data from servers not '
1799 1810 'advertising it; the client may be buggy'))
1800 1811
1801 1812 # Stream clones don't compress well. And compression undermines a
1802 1813 # goal of stream clones, which is to be fast. Communicate the desire
1803 1814 # to avoid compression to consumers of the bundle.
1804 1815 bundler.prefercompressed = False
1805 1816
1806 1817 filecount, bytecount, it = streamclone.generatev2(repo)
1807 1818 requirements = _formatrequirementsspec(repo.requirements)
1808 1819 part = bundler.newpart('stream2', data=it)
1809 1820 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1810 1821 part.addparam('filecount', '%d' % filecount, mandatory=True)
1811 1822 part.addparam('requirements', requirements, mandatory=True)
1812 1823
1813 1824 @getbundle2partsgenerator('changegroup')
1814 1825 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1815 1826 b2caps=None, heads=None, common=None, **kwargs):
1816 1827 """add a changegroup part to the requested bundle"""
1817 1828 cgstream = None
1818 1829 if kwargs.get(r'cg', True):
1819 1830 # build changegroup bundle here.
1820 1831 version = '01'
1821 1832 cgversions = b2caps.get('changegroup')
1822 1833 if cgversions: # 3.1 and 3.2 ship with an empty value
1823 1834 cgversions = [v for v in cgversions
1824 1835 if v in changegroup.supportedoutgoingversions(repo)]
1825 1836 if not cgversions:
1826 1837 raise ValueError(_('no common changegroup version'))
1827 1838 version = max(cgversions)
1828 1839 outgoing = _computeoutgoing(repo, heads, common)
1829 1840 if outgoing.missing:
1830 1841 cgstream = changegroup.makestream(repo, outgoing, version, source,
1831 1842 bundlecaps=bundlecaps)
1832 1843
1833 1844 if cgstream:
1834 1845 part = bundler.newpart('changegroup', data=cgstream)
1835 1846 if cgversions:
1836 1847 part.addparam('version', version)
1837 1848 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1838 1849 mandatory=False)
1839 1850 if 'treemanifest' in repo.requirements:
1840 1851 part.addparam('treemanifest', '1')
1841 1852
1842 1853 @getbundle2partsgenerator('bookmarks')
1843 1854 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1844 1855 b2caps=None, **kwargs):
1845 1856 """add a bookmark part to the requested bundle"""
1846 1857 if not kwargs.get(r'bookmarks', False):
1847 1858 return
1848 1859 if 'bookmarks' not in b2caps:
1849 1860 raise ValueError(_('no common bookmarks exchange method'))
1850 1861 books = bookmod.listbinbookmarks(repo)
1851 1862 data = bookmod.binaryencode(books)
1852 1863 if data:
1853 1864 bundler.newpart('bookmarks', data=data)
1854 1865
1855 1866 @getbundle2partsgenerator('listkeys')
1856 1867 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1857 1868 b2caps=None, **kwargs):
1858 1869 """add parts containing listkeys namespaces to the requested bundle"""
1859 1870 listkeys = kwargs.get(r'listkeys', ())
1860 1871 for namespace in listkeys:
1861 1872 part = bundler.newpart('listkeys')
1862 1873 part.addparam('namespace', namespace)
1863 1874 keys = repo.listkeys(namespace).items()
1864 1875 part.data = pushkey.encodekeys(keys)
1865 1876
1866 1877 @getbundle2partsgenerator('obsmarkers')
1867 1878 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1868 1879 b2caps=None, heads=None, **kwargs):
1869 1880 """add an obsolescence markers part to the requested bundle"""
1870 1881 if kwargs.get(r'obsmarkers', False):
1871 1882 if heads is None:
1872 1883 heads = repo.heads()
1873 1884 subset = [c.node() for c in repo.set('::%ln', heads)]
1874 1885 markers = repo.obsstore.relevantmarkers(subset)
1875 1886 markers = sorted(markers)
1876 1887 bundle2.buildobsmarkerspart(bundler, markers)
1877 1888
1878 1889 @getbundle2partsgenerator('phases')
1879 1890 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1880 1891 b2caps=None, heads=None, **kwargs):
1881 1892 """add phase heads part to the requested bundle"""
1882 1893 if kwargs.get(r'phases', False):
1883 1894 if not 'heads' in b2caps.get('phases'):
1884 1895 raise ValueError(_('no common phases exchange method'))
1885 1896 if heads is None:
1886 1897 heads = repo.heads()
1887 1898
1888 1899 headsbyphase = collections.defaultdict(set)
1889 1900 if repo.publishing():
1890 1901 headsbyphase[phases.public] = heads
1891 1902 else:
1892 1903 # find the appropriate heads to move
1893 1904
1894 1905 phase = repo._phasecache.phase
1895 1906 node = repo.changelog.node
1896 1907 rev = repo.changelog.rev
1897 1908 for h in heads:
1898 1909 headsbyphase[phase(repo, rev(h))].add(h)
1899 1910 seenphases = list(headsbyphase.keys())
1900 1911
1901 1912 # We do not handle anything but public and draft phase for now)
1902 1913 if seenphases:
1903 1914 assert max(seenphases) <= phases.draft
1904 1915
1905 1916 # if client is pulling non-public changesets, we need to find
1906 1917 # intermediate public heads.
1907 1918 draftheads = headsbyphase.get(phases.draft, set())
1908 1919 if draftheads:
1909 1920 publicheads = headsbyphase.get(phases.public, set())
1910 1921
1911 1922 revset = 'heads(only(%ln, %ln) and public())'
1912 1923 extraheads = repo.revs(revset, draftheads, publicheads)
1913 1924 for r in extraheads:
1914 1925 headsbyphase[phases.public].add(node(r))
1915 1926
1916 1927 # transform data in a format used by the encoding function
1917 1928 phasemapping = []
1918 1929 for phase in phases.allphases:
1919 1930 phasemapping.append(sorted(headsbyphase[phase]))
1920 1931
1921 1932 # generate the actual part
1922 1933 phasedata = phases.binaryencode(phasemapping)
1923 1934 bundler.newpart('phase-heads', data=phasedata)
1924 1935
1925 1936 @getbundle2partsgenerator('hgtagsfnodes')
1926 1937 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1927 1938 b2caps=None, heads=None, common=None,
1928 1939 **kwargs):
1929 1940 """Transfer the .hgtags filenodes mapping.
1930 1941
1931 1942 Only values for heads in this bundle will be transferred.
1932 1943
1933 1944 The part data consists of pairs of 20 byte changeset node and .hgtags
1934 1945 filenodes raw values.
1935 1946 """
1936 1947 # Don't send unless:
1937 1948 # - changeset are being exchanged,
1938 1949 # - the client supports it.
1939 1950 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1940 1951 return
1941 1952
1942 1953 outgoing = _computeoutgoing(repo, heads, common)
1943 1954 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1944 1955
1945 1956 @getbundle2partsgenerator('cache:rev-branch-cache')
1946 1957 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
1947 1958 b2caps=None, heads=None, common=None,
1948 1959 **kwargs):
1949 1960 """Transfer the rev-branch-cache mapping
1950 1961
1951 1962 The payload is a series of data related to each branch
1952 1963
1953 1964 1) branch name length
1954 1965 2) number of open heads
1955 1966 3) number of closed heads
1956 1967 4) open heads nodes
1957 1968 5) closed heads nodes
1958 1969 """
1959 1970 # Don't send unless:
1960 1971 # - changeset are being exchanged,
1961 1972 # - the client supports it.
1962 1973 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
1963 1974 return
1964 1975 outgoing = _computeoutgoing(repo, heads, common)
1965 1976 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
1966 1977
1967 1978 def check_heads(repo, their_heads, context):
1968 1979 """check if the heads of a repo have been modified
1969 1980
1970 1981 Used by peer for unbundling.
1971 1982 """
1972 1983 heads = repo.heads()
1973 1984 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1974 1985 if not (their_heads == ['force'] or their_heads == heads or
1975 1986 their_heads == ['hashed', heads_hash]):
1976 1987 # someone else committed/pushed/unbundled while we
1977 1988 # were transferring data
1978 1989 raise error.PushRaced('repository changed while %s - '
1979 1990 'please try again' % context)
1980 1991
1981 1992 def unbundle(repo, cg, heads, source, url):
1982 1993 """Apply a bundle to a repo.
1983 1994
1984 1995 this function makes sure the repo is locked during the application and have
1985 1996 mechanism to check that no push race occurred between the creation of the
1986 1997 bundle and its application.
1987 1998
1988 1999 If the push was raced as PushRaced exception is raised."""
1989 2000 r = 0
1990 2001 # need a transaction when processing a bundle2 stream
1991 2002 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1992 2003 lockandtr = [None, None, None]
1993 2004 recordout = None
1994 2005 # quick fix for output mismatch with bundle2 in 3.4
1995 2006 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1996 2007 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1997 2008 captureoutput = True
1998 2009 try:
1999 2010 # note: outside bundle1, 'heads' is expected to be empty and this
2000 2011 # 'check_heads' call wil be a no-op
2001 2012 check_heads(repo, heads, 'uploading changes')
2002 2013 # push can proceed
2003 2014 if not isinstance(cg, bundle2.unbundle20):
2004 2015 # legacy case: bundle1 (changegroup 01)
2005 2016 txnname = "\n".join([source, util.hidepassword(url)])
2006 2017 with repo.lock(), repo.transaction(txnname) as tr:
2007 2018 op = bundle2.applybundle(repo, cg, tr, source, url)
2008 2019 r = bundle2.combinechangegroupresults(op)
2009 2020 else:
2010 2021 r = None
2011 2022 try:
2012 2023 def gettransaction():
2013 2024 if not lockandtr[2]:
2014 2025 lockandtr[0] = repo.wlock()
2015 2026 lockandtr[1] = repo.lock()
2016 2027 lockandtr[2] = repo.transaction(source)
2017 2028 lockandtr[2].hookargs['source'] = source
2018 2029 lockandtr[2].hookargs['url'] = url
2019 2030 lockandtr[2].hookargs['bundle2'] = '1'
2020 2031 return lockandtr[2]
2021 2032
2022 2033 # Do greedy locking by default until we're satisfied with lazy
2023 2034 # locking.
2024 2035 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2025 2036 gettransaction()
2026 2037
2027 2038 op = bundle2.bundleoperation(repo, gettransaction,
2028 2039 captureoutput=captureoutput)
2029 2040 try:
2030 2041 op = bundle2.processbundle(repo, cg, op=op)
2031 2042 finally:
2032 2043 r = op.reply
2033 2044 if captureoutput and r is not None:
2034 2045 repo.ui.pushbuffer(error=True, subproc=True)
2035 2046 def recordout(output):
2036 2047 r.newpart('output', data=output, mandatory=False)
2037 2048 if lockandtr[2] is not None:
2038 2049 lockandtr[2].close()
2039 2050 except BaseException as exc:
2040 2051 exc.duringunbundle2 = True
2041 2052 if captureoutput and r is not None:
2042 2053 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2043 2054 def recordout(output):
2044 2055 part = bundle2.bundlepart('output', data=output,
2045 2056 mandatory=False)
2046 2057 parts.append(part)
2047 2058 raise
2048 2059 finally:
2049 2060 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2050 2061 if recordout is not None:
2051 2062 recordout(repo.ui.popbuffer())
2052 2063 return r
2053 2064
2054 2065 def _maybeapplyclonebundle(pullop):
2055 2066 """Apply a clone bundle from a remote, if possible."""
2056 2067
2057 2068 repo = pullop.repo
2058 2069 remote = pullop.remote
2059 2070
2060 2071 if not repo.ui.configbool('ui', 'clonebundles'):
2061 2072 return
2062 2073
2063 2074 # Only run if local repo is empty.
2064 2075 if len(repo):
2065 2076 return
2066 2077
2067 2078 if pullop.heads:
2068 2079 return
2069 2080
2070 2081 if not remote.capable('clonebundles'):
2071 2082 return
2072 2083
2073 2084 res = remote._call('clonebundles')
2074 2085
2075 2086 # If we call the wire protocol command, that's good enough to record the
2076 2087 # attempt.
2077 2088 pullop.clonebundleattempted = True
2078 2089
2079 2090 entries = parseclonebundlesmanifest(repo, res)
2080 2091 if not entries:
2081 2092 repo.ui.note(_('no clone bundles available on remote; '
2082 2093 'falling back to regular clone\n'))
2083 2094 return
2084 2095
2085 2096 entries = filterclonebundleentries(
2086 2097 repo, entries, streamclonerequested=pullop.streamclonerequested)
2087 2098
2088 2099 if not entries:
2089 2100 # There is a thundering herd concern here. However, if a server
2090 2101 # operator doesn't advertise bundles appropriate for its clients,
2091 2102 # they deserve what's coming. Furthermore, from a client's
2092 2103 # perspective, no automatic fallback would mean not being able to
2093 2104 # clone!
2094 2105 repo.ui.warn(_('no compatible clone bundles available on server; '
2095 2106 'falling back to regular clone\n'))
2096 2107 repo.ui.warn(_('(you may want to report this to the server '
2097 2108 'operator)\n'))
2098 2109 return
2099 2110
2100 2111 entries = sortclonebundleentries(repo.ui, entries)
2101 2112
2102 2113 url = entries[0]['URL']
2103 2114 repo.ui.status(_('applying clone bundle from %s\n') % url)
2104 2115 if trypullbundlefromurl(repo.ui, repo, url):
2105 2116 repo.ui.status(_('finished applying clone bundle\n'))
2106 2117 # Bundle failed.
2107 2118 #
2108 2119 # We abort by default to avoid the thundering herd of
2109 2120 # clients flooding a server that was expecting expensive
2110 2121 # clone load to be offloaded.
2111 2122 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2112 2123 repo.ui.warn(_('falling back to normal clone\n'))
2113 2124 else:
2114 2125 raise error.Abort(_('error applying bundle'),
2115 2126 hint=_('if this error persists, consider contacting '
2116 2127 'the server operator or disable clone '
2117 2128 'bundles via '
2118 2129 '"--config ui.clonebundles=false"'))
2119 2130
2120 2131 def parseclonebundlesmanifest(repo, s):
2121 2132 """Parses the raw text of a clone bundles manifest.
2122 2133
2123 2134 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2124 2135 to the URL and other keys are the attributes for the entry.
2125 2136 """
2126 2137 m = []
2127 2138 for line in s.splitlines():
2128 2139 fields = line.split()
2129 2140 if not fields:
2130 2141 continue
2131 2142 attrs = {'URL': fields[0]}
2132 2143 for rawattr in fields[1:]:
2133 2144 key, value = rawattr.split('=', 1)
2134 2145 key = urlreq.unquote(key)
2135 2146 value = urlreq.unquote(value)
2136 2147 attrs[key] = value
2137 2148
2138 2149 # Parse BUNDLESPEC into components. This makes client-side
2139 2150 # preferences easier to specify since you can prefer a single
2140 2151 # component of the BUNDLESPEC.
2141 2152 if key == 'BUNDLESPEC':
2142 2153 try:
2143 comp, version, params = parsebundlespec(repo, value,
2144 externalnames=True)
2145 attrs['COMPRESSION'] = comp
2146 attrs['VERSION'] = version
2154 bundlespec = parsebundlespec(repo, value,
2155 externalnames=True)
2156 attrs['COMPRESSION'] = bundlespec.compression
2157 attrs['VERSION'] = bundlespec.version
2147 2158 except error.InvalidBundleSpecification:
2148 2159 pass
2149 2160 except error.UnsupportedBundleSpecification:
2150 2161 pass
2151 2162
2152 2163 m.append(attrs)
2153 2164
2154 2165 return m
2155 2166
2156 2167 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2157 2168 """Remove incompatible clone bundle manifest entries.
2158 2169
2159 2170 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2160 2171 and returns a new list consisting of only the entries that this client
2161 2172 should be able to apply.
2162 2173
2163 2174 There is no guarantee we'll be able to apply all returned entries because
2164 2175 the metadata we use to filter on may be missing or wrong.
2165 2176 """
2166 2177 newentries = []
2167 2178 for entry in entries:
2168 2179 spec = entry.get('BUNDLESPEC')
2169 2180 if spec:
2170 2181 try:
2171 comp, version, params = parsebundlespec(repo, spec, strict=True)
2182 bundlespec = parsebundlespec(repo, spec, strict=True)
2172 2183
2173 2184 # If a stream clone was requested, filter out non-streamclone
2174 2185 # entries.
2186 comp = bundlespec.compression
2187 version = bundlespec.version
2175 2188 if streamclonerequested and (comp != 'UN' or version != 's1'):
2176 2189 repo.ui.debug('filtering %s because not a stream clone\n' %
2177 2190 entry['URL'])
2178 2191 continue
2179 2192
2180 2193 except error.InvalidBundleSpecification as e:
2181 2194 repo.ui.debug(str(e) + '\n')
2182 2195 continue
2183 2196 except error.UnsupportedBundleSpecification as e:
2184 2197 repo.ui.debug('filtering %s because unsupported bundle '
2185 2198 'spec: %s\n' % (
2186 2199 entry['URL'], stringutil.forcebytestr(e)))
2187 2200 continue
2188 2201 # If we don't have a spec and requested a stream clone, we don't know
2189 2202 # what the entry is so don't attempt to apply it.
2190 2203 elif streamclonerequested:
2191 2204 repo.ui.debug('filtering %s because cannot determine if a stream '
2192 2205 'clone bundle\n' % entry['URL'])
2193 2206 continue
2194 2207
2195 2208 if 'REQUIRESNI' in entry and not sslutil.hassni:
2196 2209 repo.ui.debug('filtering %s because SNI not supported\n' %
2197 2210 entry['URL'])
2198 2211 continue
2199 2212
2200 2213 newentries.append(entry)
2201 2214
2202 2215 return newentries
2203 2216
2204 2217 class clonebundleentry(object):
2205 2218 """Represents an item in a clone bundles manifest.
2206 2219
2207 2220 This rich class is needed to support sorting since sorted() in Python 3
2208 2221 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2209 2222 won't work.
2210 2223 """
2211 2224
2212 2225 def __init__(self, value, prefers):
2213 2226 self.value = value
2214 2227 self.prefers = prefers
2215 2228
2216 2229 def _cmp(self, other):
2217 2230 for prefkey, prefvalue in self.prefers:
2218 2231 avalue = self.value.get(prefkey)
2219 2232 bvalue = other.value.get(prefkey)
2220 2233
2221 2234 # Special case for b missing attribute and a matches exactly.
2222 2235 if avalue is not None and bvalue is None and avalue == prefvalue:
2223 2236 return -1
2224 2237
2225 2238 # Special case for a missing attribute and b matches exactly.
2226 2239 if bvalue is not None and avalue is None and bvalue == prefvalue:
2227 2240 return 1
2228 2241
2229 2242 # We can't compare unless attribute present on both.
2230 2243 if avalue is None or bvalue is None:
2231 2244 continue
2232 2245
2233 2246 # Same values should fall back to next attribute.
2234 2247 if avalue == bvalue:
2235 2248 continue
2236 2249
2237 2250 # Exact matches come first.
2238 2251 if avalue == prefvalue:
2239 2252 return -1
2240 2253 if bvalue == prefvalue:
2241 2254 return 1
2242 2255
2243 2256 # Fall back to next attribute.
2244 2257 continue
2245 2258
2246 2259 # If we got here we couldn't sort by attributes and prefers. Fall
2247 2260 # back to index order.
2248 2261 return 0
2249 2262
2250 2263 def __lt__(self, other):
2251 2264 return self._cmp(other) < 0
2252 2265
2253 2266 def __gt__(self, other):
2254 2267 return self._cmp(other) > 0
2255 2268
2256 2269 def __eq__(self, other):
2257 2270 return self._cmp(other) == 0
2258 2271
2259 2272 def __le__(self, other):
2260 2273 return self._cmp(other) <= 0
2261 2274
2262 2275 def __ge__(self, other):
2263 2276 return self._cmp(other) >= 0
2264 2277
2265 2278 def __ne__(self, other):
2266 2279 return self._cmp(other) != 0
2267 2280
2268 2281 def sortclonebundleentries(ui, entries):
2269 2282 prefers = ui.configlist('ui', 'clonebundleprefers')
2270 2283 if not prefers:
2271 2284 return list(entries)
2272 2285
2273 2286 prefers = [p.split('=', 1) for p in prefers]
2274 2287
2275 2288 items = sorted(clonebundleentry(v, prefers) for v in entries)
2276 2289 return [i.value for i in items]
2277 2290
2278 2291 def trypullbundlefromurl(ui, repo, url):
2279 2292 """Attempt to apply a bundle from a URL."""
2280 2293 with repo.lock(), repo.transaction('bundleurl') as tr:
2281 2294 try:
2282 2295 fh = urlmod.open(ui, url)
2283 2296 cg = readbundle(ui, fh, 'stream')
2284 2297
2285 2298 if isinstance(cg, streamclone.streamcloneapplier):
2286 2299 cg.apply(repo)
2287 2300 else:
2288 2301 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2289 2302 return True
2290 2303 except urlerr.httperror as e:
2291 2304 ui.warn(_('HTTP error fetching bundle: %s\n') %
2292 2305 stringutil.forcebytestr(e))
2293 2306 except urlerr.urlerror as e:
2294 2307 ui.warn(_('error fetching bundle: %s\n') %
2295 2308 stringutil.forcebytestr(e.reason))
2296 2309
2297 2310 return False
General Comments 0
You need to be logged in to leave comments. Login now