##// END OF EJS Templates
exchange: support parameters in bundle specification strings...
Gregory Szorc -
r26759:c0f475ac default
parent child Browse files
Show More
@@ -1,6716 +1,6716
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _
11 11 import os, re, difflib, time, tempfile, errno, shlex
12 12 import sys, socket
13 13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 14 import patch, help, encoding, templatekw, discovery
15 15 import archival, changegroup, cmdutil, hbisect
16 16 import sshserver, hgweb
17 17 import extensions
18 18 from hgweb import server as hgweb_server
19 19 import merge as mergemod
20 20 import minirst, revset, fileset
21 21 import dagparser, context, simplemerge, graphmod, copies
22 22 import random, operator
23 23 import setdiscovery, treediscovery, dagutil, pvec, localrepo, destutil
24 24 import phases, obsolete, exchange, bundle2, repair, lock as lockmod
25 25 import ui as uimod
26 26 import streamclone
27 27
28 28 table = {}
29 29
30 30 command = cmdutil.command(table)
31 31
32 32 # Space delimited list of commands that don't require local repositories.
33 33 # This should be populated by passing norepo=True into the @command decorator.
34 34 norepo = ''
35 35 # Space delimited list of commands that optionally require local repositories.
36 36 # This should be populated by passing optionalrepo=True into the @command
37 37 # decorator.
38 38 optionalrepo = ''
39 39 # Space delimited list of commands that will examine arguments looking for
40 40 # a repository. This should be populated by passing inferrepo=True into the
41 41 # @command decorator.
42 42 inferrepo = ''
43 43
44 44 # label constants
45 45 # until 3.5, bookmarks.current was the advertised name, not
46 46 # bookmarks.active, so we must use both to avoid breaking old
47 47 # custom styles
48 48 activebookmarklabel = 'bookmarks.active bookmarks.current'
49 49
50 50 # common command options
51 51
52 52 globalopts = [
53 53 ('R', 'repository', '',
54 54 _('repository root directory or name of overlay bundle file'),
55 55 _('REPO')),
56 56 ('', 'cwd', '',
57 57 _('change working directory'), _('DIR')),
58 58 ('y', 'noninteractive', None,
59 59 _('do not prompt, automatically pick the first choice for all prompts')),
60 60 ('q', 'quiet', None, _('suppress output')),
61 61 ('v', 'verbose', None, _('enable additional output')),
62 62 ('', 'config', [],
63 63 _('set/override config option (use \'section.name=value\')'),
64 64 _('CONFIG')),
65 65 ('', 'debug', None, _('enable debugging output')),
66 66 ('', 'debugger', None, _('start debugger')),
67 67 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
68 68 _('ENCODE')),
69 69 ('', 'encodingmode', encoding.encodingmode,
70 70 _('set the charset encoding mode'), _('MODE')),
71 71 ('', 'traceback', None, _('always print a traceback on exception')),
72 72 ('', 'time', None, _('time how long the command takes')),
73 73 ('', 'profile', None, _('print command execution profile')),
74 74 ('', 'version', None, _('output version information and exit')),
75 75 ('h', 'help', None, _('display help and exit')),
76 76 ('', 'hidden', False, _('consider hidden changesets')),
77 77 ]
78 78
79 79 dryrunopts = [('n', 'dry-run', None,
80 80 _('do not perform actions, just print output'))]
81 81
82 82 remoteopts = [
83 83 ('e', 'ssh', '',
84 84 _('specify ssh command to use'), _('CMD')),
85 85 ('', 'remotecmd', '',
86 86 _('specify hg command to run on the remote side'), _('CMD')),
87 87 ('', 'insecure', None,
88 88 _('do not verify server certificate (ignoring web.cacerts config)')),
89 89 ]
90 90
91 91 walkopts = [
92 92 ('I', 'include', [],
93 93 _('include names matching the given patterns'), _('PATTERN')),
94 94 ('X', 'exclude', [],
95 95 _('exclude names matching the given patterns'), _('PATTERN')),
96 96 ]
97 97
98 98 commitopts = [
99 99 ('m', 'message', '',
100 100 _('use text as commit message'), _('TEXT')),
101 101 ('l', 'logfile', '',
102 102 _('read commit message from file'), _('FILE')),
103 103 ]
104 104
105 105 commitopts2 = [
106 106 ('d', 'date', '',
107 107 _('record the specified date as commit date'), _('DATE')),
108 108 ('u', 'user', '',
109 109 _('record the specified user as committer'), _('USER')),
110 110 ]
111 111
112 112 # hidden for now
113 113 formatteropts = [
114 114 ('T', 'template', '',
115 115 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
116 116 ]
117 117
118 118 templateopts = [
119 119 ('', 'style', '',
120 120 _('display using template map file (DEPRECATED)'), _('STYLE')),
121 121 ('T', 'template', '',
122 122 _('display with template'), _('TEMPLATE')),
123 123 ]
124 124
125 125 logopts = [
126 126 ('p', 'patch', None, _('show patch')),
127 127 ('g', 'git', None, _('use git extended diff format')),
128 128 ('l', 'limit', '',
129 129 _('limit number of changes displayed'), _('NUM')),
130 130 ('M', 'no-merges', None, _('do not show merges')),
131 131 ('', 'stat', None, _('output diffstat-style summary of changes')),
132 132 ('G', 'graph', None, _("show the revision DAG")),
133 133 ] + templateopts
134 134
135 135 diffopts = [
136 136 ('a', 'text', None, _('treat all files as text')),
137 137 ('g', 'git', None, _('use git extended diff format')),
138 138 ('', 'nodates', None, _('omit dates from diff headers'))
139 139 ]
140 140
141 141 diffwsopts = [
142 142 ('w', 'ignore-all-space', None,
143 143 _('ignore white space when comparing lines')),
144 144 ('b', 'ignore-space-change', None,
145 145 _('ignore changes in the amount of white space')),
146 146 ('B', 'ignore-blank-lines', None,
147 147 _('ignore changes whose lines are all blank')),
148 148 ]
149 149
150 150 diffopts2 = [
151 151 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
152 152 ('p', 'show-function', None, _('show which function each change is in')),
153 153 ('', 'reverse', None, _('produce a diff that undoes the changes')),
154 154 ] + diffwsopts + [
155 155 ('U', 'unified', '',
156 156 _('number of lines of context to show'), _('NUM')),
157 157 ('', 'stat', None, _('output diffstat-style summary of changes')),
158 158 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
159 159 ]
160 160
161 161 mergetoolopts = [
162 162 ('t', 'tool', '', _('specify merge tool')),
163 163 ]
164 164
165 165 similarityopts = [
166 166 ('s', 'similarity', '',
167 167 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
168 168 ]
169 169
170 170 subrepoopts = [
171 171 ('S', 'subrepos', None,
172 172 _('recurse into subrepositories'))
173 173 ]
174 174
175 175 # Commands start here, listed alphabetically
176 176
177 177 @command('^add',
178 178 walkopts + subrepoopts + dryrunopts,
179 179 _('[OPTION]... [FILE]...'),
180 180 inferrepo=True)
181 181 def add(ui, repo, *pats, **opts):
182 182 """add the specified files on the next commit
183 183
184 184 Schedule files to be version controlled and added to the
185 185 repository.
186 186
187 187 The files will be added to the repository at the next commit. To
188 188 undo an add before that, see :hg:`forget`.
189 189
190 190 If no names are given, add all files to the repository.
191 191
192 192 .. container:: verbose
193 193
194 194 An example showing how new (unknown) files are added
195 195 automatically by :hg:`add`::
196 196
197 197 $ ls
198 198 foo.c
199 199 $ hg status
200 200 ? foo.c
201 201 $ hg add
202 202 adding foo.c
203 203 $ hg status
204 204 A foo.c
205 205
206 206 Returns 0 if all files are successfully added.
207 207 """
208 208
209 209 m = scmutil.match(repo[None], pats, opts)
210 210 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
211 211 return rejected and 1 or 0
212 212
213 213 @command('addremove',
214 214 similarityopts + subrepoopts + walkopts + dryrunopts,
215 215 _('[OPTION]... [FILE]...'),
216 216 inferrepo=True)
217 217 def addremove(ui, repo, *pats, **opts):
218 218 """add all new files, delete all missing files
219 219
220 220 Add all new files and remove all missing files from the
221 221 repository.
222 222
223 223 New files are ignored if they match any of the patterns in
224 224 ``.hgignore``. As with add, these changes take effect at the next
225 225 commit.
226 226
227 227 Use the -s/--similarity option to detect renamed files. This
228 228 option takes a percentage between 0 (disabled) and 100 (files must
229 229 be identical) as its parameter. With a parameter greater than 0,
230 230 this compares every removed file with every added file and records
231 231 those similar enough as renames. Detecting renamed files this way
232 232 can be expensive. After using this option, :hg:`status -C` can be
233 233 used to check which files were identified as moved or renamed. If
234 234 not specified, -s/--similarity defaults to 100 and only renames of
235 235 identical files are detected.
236 236
237 237 Returns 0 if all files are successfully added.
238 238 """
239 239 try:
240 240 sim = float(opts.get('similarity') or 100)
241 241 except ValueError:
242 242 raise error.Abort(_('similarity must be a number'))
243 243 if sim < 0 or sim > 100:
244 244 raise error.Abort(_('similarity must be between 0 and 100'))
245 245 matcher = scmutil.match(repo[None], pats, opts)
246 246 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
247 247
248 248 @command('^annotate|blame',
249 249 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
250 250 ('', 'follow', None,
251 251 _('follow copies/renames and list the filename (DEPRECATED)')),
252 252 ('', 'no-follow', None, _("don't follow copies and renames")),
253 253 ('a', 'text', None, _('treat all files as text')),
254 254 ('u', 'user', None, _('list the author (long with -v)')),
255 255 ('f', 'file', None, _('list the filename')),
256 256 ('d', 'date', None, _('list the date (short with -q)')),
257 257 ('n', 'number', None, _('list the revision number (default)')),
258 258 ('c', 'changeset', None, _('list the changeset')),
259 259 ('l', 'line-number', None, _('show line number at the first appearance'))
260 260 ] + diffwsopts + walkopts + formatteropts,
261 261 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
262 262 inferrepo=True)
263 263 def annotate(ui, repo, *pats, **opts):
264 264 """show changeset information by line for each file
265 265
266 266 List changes in files, showing the revision id responsible for
267 267 each line
268 268
269 269 This command is useful for discovering when a change was made and
270 270 by whom.
271 271
272 272 Without the -a/--text option, annotate will avoid processing files
273 273 it detects as binary. With -a, annotate will annotate the file
274 274 anyway, although the results will probably be neither useful
275 275 nor desirable.
276 276
277 277 Returns 0 on success.
278 278 """
279 279 if not pats:
280 280 raise error.Abort(_('at least one filename or pattern is required'))
281 281
282 282 if opts.get('follow'):
283 283 # --follow is deprecated and now just an alias for -f/--file
284 284 # to mimic the behavior of Mercurial before version 1.5
285 285 opts['file'] = True
286 286
287 287 ctx = scmutil.revsingle(repo, opts.get('rev'))
288 288
289 289 fm = ui.formatter('annotate', opts)
290 290 if ui.quiet:
291 291 datefunc = util.shortdate
292 292 else:
293 293 datefunc = util.datestr
294 294 if ctx.rev() is None:
295 295 def hexfn(node):
296 296 if node is None:
297 297 return None
298 298 else:
299 299 return fm.hexfunc(node)
300 300 if opts.get('changeset'):
301 301 # omit "+" suffix which is appended to node hex
302 302 def formatrev(rev):
303 303 if rev is None:
304 304 return '%d' % ctx.p1().rev()
305 305 else:
306 306 return '%d' % rev
307 307 else:
308 308 def formatrev(rev):
309 309 if rev is None:
310 310 return '%d+' % ctx.p1().rev()
311 311 else:
312 312 return '%d ' % rev
313 313 def formathex(hex):
314 314 if hex is None:
315 315 return '%s+' % fm.hexfunc(ctx.p1().node())
316 316 else:
317 317 return '%s ' % hex
318 318 else:
319 319 hexfn = fm.hexfunc
320 320 formatrev = formathex = str
321 321
322 322 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
323 323 ('number', ' ', lambda x: x[0].rev(), formatrev),
324 324 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
325 325 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
326 326 ('file', ' ', lambda x: x[0].path(), str),
327 327 ('line_number', ':', lambda x: x[1], str),
328 328 ]
329 329 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
330 330
331 331 if (not opts.get('user') and not opts.get('changeset')
332 332 and not opts.get('date') and not opts.get('file')):
333 333 opts['number'] = True
334 334
335 335 linenumber = opts.get('line_number') is not None
336 336 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
337 337 raise error.Abort(_('at least one of -n/-c is required for -l'))
338 338
339 339 if fm:
340 340 def makefunc(get, fmt):
341 341 return get
342 342 else:
343 343 def makefunc(get, fmt):
344 344 return lambda x: fmt(get(x))
345 345 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
346 346 if opts.get(op)]
347 347 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
348 348 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
349 349 if opts.get(op))
350 350
351 351 def bad(x, y):
352 352 raise error.Abort("%s: %s" % (x, y))
353 353
354 354 m = scmutil.match(ctx, pats, opts, badfn=bad)
355 355
356 356 follow = not opts.get('no_follow')
357 357 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
358 358 whitespace=True)
359 359 for abs in ctx.walk(m):
360 360 fctx = ctx[abs]
361 361 if not opts.get('text') and util.binary(fctx.data()):
362 362 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
363 363 continue
364 364
365 365 lines = fctx.annotate(follow=follow, linenumber=linenumber,
366 366 diffopts=diffopts)
367 367 formats = []
368 368 pieces = []
369 369
370 370 for f, sep in funcmap:
371 371 l = [f(n) for n, dummy in lines]
372 372 if l:
373 373 if fm:
374 374 formats.append(['%s' for x in l])
375 375 else:
376 376 sizes = [encoding.colwidth(x) for x in l]
377 377 ml = max(sizes)
378 378 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
379 379 pieces.append(l)
380 380
381 381 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
382 382 fm.startitem()
383 383 fm.write(fields, "".join(f), *p)
384 384 fm.write('line', ": %s", l[1])
385 385
386 386 if lines and not lines[-1][1].endswith('\n'):
387 387 fm.plain('\n')
388 388
389 389 fm.end()
390 390
391 391 @command('archive',
392 392 [('', 'no-decode', None, _('do not pass files through decoders')),
393 393 ('p', 'prefix', '', _('directory prefix for files in archive'),
394 394 _('PREFIX')),
395 395 ('r', 'rev', '', _('revision to distribute'), _('REV')),
396 396 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
397 397 ] + subrepoopts + walkopts,
398 398 _('[OPTION]... DEST'))
399 399 def archive(ui, repo, dest, **opts):
400 400 '''create an unversioned archive of a repository revision
401 401
402 402 By default, the revision used is the parent of the working
403 403 directory; use -r/--rev to specify a different revision.
404 404
405 405 The archive type is automatically detected based on file
406 406 extension (or override using -t/--type).
407 407
408 408 .. container:: verbose
409 409
410 410 Examples:
411 411
412 412 - create a zip file containing the 1.0 release::
413 413
414 414 hg archive -r 1.0 project-1.0.zip
415 415
416 416 - create a tarball excluding .hg files::
417 417
418 418 hg archive project.tar.gz -X ".hg*"
419 419
420 420 Valid types are:
421 421
422 422 :``files``: a directory full of files (default)
423 423 :``tar``: tar archive, uncompressed
424 424 :``tbz2``: tar archive, compressed using bzip2
425 425 :``tgz``: tar archive, compressed using gzip
426 426 :``uzip``: zip archive, uncompressed
427 427 :``zip``: zip archive, compressed using deflate
428 428
429 429 The exact name of the destination archive or directory is given
430 430 using a format string; see :hg:`help export` for details.
431 431
432 432 Each member added to an archive file has a directory prefix
433 433 prepended. Use -p/--prefix to specify a format string for the
434 434 prefix. The default is the basename of the archive, with suffixes
435 435 removed.
436 436
437 437 Returns 0 on success.
438 438 '''
439 439
440 440 ctx = scmutil.revsingle(repo, opts.get('rev'))
441 441 if not ctx:
442 442 raise error.Abort(_('no working directory: please specify a revision'))
443 443 node = ctx.node()
444 444 dest = cmdutil.makefilename(repo, dest, node)
445 445 if os.path.realpath(dest) == repo.root:
446 446 raise error.Abort(_('repository root cannot be destination'))
447 447
448 448 kind = opts.get('type') or archival.guesskind(dest) or 'files'
449 449 prefix = opts.get('prefix')
450 450
451 451 if dest == '-':
452 452 if kind == 'files':
453 453 raise error.Abort(_('cannot archive plain files to stdout'))
454 454 dest = cmdutil.makefileobj(repo, dest)
455 455 if not prefix:
456 456 prefix = os.path.basename(repo.root) + '-%h'
457 457
458 458 prefix = cmdutil.makefilename(repo, prefix, node)
459 459 matchfn = scmutil.match(ctx, [], opts)
460 460 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
461 461 matchfn, prefix, subrepos=opts.get('subrepos'))
462 462
463 463 @command('backout',
464 464 [('', 'merge', None, _('merge with old dirstate parent after backout')),
465 465 ('', 'commit', None, _('commit if no conflicts were encountered')),
466 466 ('', 'parent', '',
467 467 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
468 468 ('r', 'rev', '', _('revision to backout'), _('REV')),
469 469 ('e', 'edit', False, _('invoke editor on commit messages')),
470 470 ] + mergetoolopts + walkopts + commitopts + commitopts2,
471 471 _('[OPTION]... [-r] REV'))
472 472 def backout(ui, repo, node=None, rev=None, commit=False, **opts):
473 473 '''reverse effect of earlier changeset
474 474
475 475 Prepare a new changeset with the effect of REV undone in the
476 476 current working directory.
477 477
478 478 If REV is the parent of the working directory, then this new changeset
479 479 is committed automatically. Otherwise, hg needs to merge the
480 480 changes and the merged result is left uncommitted.
481 481
482 482 .. note::
483 483
484 484 backout cannot be used to fix either an unwanted or
485 485 incorrect merge.
486 486
487 487 .. container:: verbose
488 488
489 489 By default, the pending changeset will have one parent,
490 490 maintaining a linear history. With --merge, the pending
491 491 changeset will instead have two parents: the old parent of the
492 492 working directory and a new child of REV that simply undoes REV.
493 493
494 494 Before version 1.7, the behavior without --merge was equivalent
495 495 to specifying --merge followed by :hg:`update --clean .` to
496 496 cancel the merge and leave the child of REV as a head to be
497 497 merged separately.
498 498
499 499 See :hg:`help dates` for a list of formats valid for -d/--date.
500 500
501 501 See :hg:`help revert` for a way to restore files to the state
502 502 of another revision.
503 503
504 504 Returns 0 on success, 1 if nothing to backout or there are unresolved
505 505 files.
506 506 '''
507 507 if rev and node:
508 508 raise error.Abort(_("please specify just one revision"))
509 509
510 510 if not rev:
511 511 rev = node
512 512
513 513 if not rev:
514 514 raise error.Abort(_("please specify a revision to backout"))
515 515
516 516 date = opts.get('date')
517 517 if date:
518 518 opts['date'] = util.parsedate(date)
519 519
520 520 cmdutil.checkunfinished(repo)
521 521 cmdutil.bailifchanged(repo)
522 522 node = scmutil.revsingle(repo, rev).node()
523 523
524 524 op1, op2 = repo.dirstate.parents()
525 525 if not repo.changelog.isancestor(node, op1):
526 526 raise error.Abort(_('cannot backout change that is not an ancestor'))
527 527
528 528 p1, p2 = repo.changelog.parents(node)
529 529 if p1 == nullid:
530 530 raise error.Abort(_('cannot backout a change with no parents'))
531 531 if p2 != nullid:
532 532 if not opts.get('parent'):
533 533 raise error.Abort(_('cannot backout a merge changeset'))
534 534 p = repo.lookup(opts['parent'])
535 535 if p not in (p1, p2):
536 536 raise error.Abort(_('%s is not a parent of %s') %
537 537 (short(p), short(node)))
538 538 parent = p
539 539 else:
540 540 if opts.get('parent'):
541 541 raise error.Abort(_('cannot use --parent on non-merge changeset'))
542 542 parent = p1
543 543
544 544 # the backout should appear on the same branch
545 545 wlock = repo.wlock()
546 546 try:
547 547 branch = repo.dirstate.branch()
548 548 bheads = repo.branchheads(branch)
549 549 rctx = scmutil.revsingle(repo, hex(parent))
550 550 if not opts.get('merge') and op1 != node:
551 551 dsguard = cmdutil.dirstateguard(repo, 'backout')
552 552 try:
553 553 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
554 554 'backout')
555 555 stats = mergemod.update(repo, parent, True, True, False,
556 556 node, False)
557 557 repo.setparents(op1, op2)
558 558 dsguard.close()
559 559 hg._showstats(repo, stats)
560 560 if stats[3]:
561 561 repo.ui.status(_("use 'hg resolve' to retry unresolved "
562 562 "file merges\n"))
563 563 return 1
564 564 elif not commit:
565 565 msg = _("changeset %s backed out, "
566 566 "don't forget to commit.\n")
567 567 ui.status(msg % short(node))
568 568 return 0
569 569 finally:
570 570 ui.setconfig('ui', 'forcemerge', '', '')
571 571 lockmod.release(dsguard)
572 572 else:
573 573 hg.clean(repo, node, show_stats=False)
574 574 repo.dirstate.setbranch(branch)
575 575 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
576 576
577 577
578 578 def commitfunc(ui, repo, message, match, opts):
579 579 editform = 'backout'
580 580 e = cmdutil.getcommiteditor(editform=editform, **opts)
581 581 if not message:
582 582 # we don't translate commit messages
583 583 message = "Backed out changeset %s" % short(node)
584 584 e = cmdutil.getcommiteditor(edit=True, editform=editform)
585 585 return repo.commit(message, opts.get('user'), opts.get('date'),
586 586 match, editor=e)
587 587 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
588 588 if not newnode:
589 589 ui.status(_("nothing changed\n"))
590 590 return 1
591 591 cmdutil.commitstatus(repo, newnode, branch, bheads)
592 592
593 593 def nice(node):
594 594 return '%d:%s' % (repo.changelog.rev(node), short(node))
595 595 ui.status(_('changeset %s backs out changeset %s\n') %
596 596 (nice(repo.changelog.tip()), nice(node)))
597 597 if opts.get('merge') and op1 != node:
598 598 hg.clean(repo, op1, show_stats=False)
599 599 ui.status(_('merging with changeset %s\n')
600 600 % nice(repo.changelog.tip()))
601 601 try:
602 602 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
603 603 'backout')
604 604 return hg.merge(repo, hex(repo.changelog.tip()))
605 605 finally:
606 606 ui.setconfig('ui', 'forcemerge', '', '')
607 607 finally:
608 608 wlock.release()
609 609 return 0
610 610
611 611 @command('bisect',
612 612 [('r', 'reset', False, _('reset bisect state')),
613 613 ('g', 'good', False, _('mark changeset good')),
614 614 ('b', 'bad', False, _('mark changeset bad')),
615 615 ('s', 'skip', False, _('skip testing changeset')),
616 616 ('e', 'extend', False, _('extend the bisect range')),
617 617 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
618 618 ('U', 'noupdate', False, _('do not update to target'))],
619 619 _("[-gbsr] [-U] [-c CMD] [REV]"))
620 620 def bisect(ui, repo, rev=None, extra=None, command=None,
621 621 reset=None, good=None, bad=None, skip=None, extend=None,
622 622 noupdate=None):
623 623 """subdivision search of changesets
624 624
625 625 This command helps to find changesets which introduce problems. To
626 626 use, mark the earliest changeset you know exhibits the problem as
627 627 bad, then mark the latest changeset which is free from the problem
628 628 as good. Bisect will update your working directory to a revision
629 629 for testing (unless the -U/--noupdate option is specified). Once
630 630 you have performed tests, mark the working directory as good or
631 631 bad, and bisect will either update to another candidate changeset
632 632 or announce that it has found the bad revision.
633 633
634 634 As a shortcut, you can also use the revision argument to mark a
635 635 revision as good or bad without checking it out first.
636 636
637 637 If you supply a command, it will be used for automatic bisection.
638 638 The environment variable HG_NODE will contain the ID of the
639 639 changeset being tested. The exit status of the command will be
640 640 used to mark revisions as good or bad: status 0 means good, 125
641 641 means to skip the revision, 127 (command not found) will abort the
642 642 bisection, and any other non-zero exit status means the revision
643 643 is bad.
644 644
645 645 .. container:: verbose
646 646
647 647 Some examples:
648 648
649 649 - start a bisection with known bad revision 34, and good revision 12::
650 650
651 651 hg bisect --bad 34
652 652 hg bisect --good 12
653 653
654 654 - advance the current bisection by marking current revision as good or
655 655 bad::
656 656
657 657 hg bisect --good
658 658 hg bisect --bad
659 659
660 660 - mark the current revision, or a known revision, to be skipped (e.g. if
661 661 that revision is not usable because of another issue)::
662 662
663 663 hg bisect --skip
664 664 hg bisect --skip 23
665 665
666 666 - skip all revisions that do not touch directories ``foo`` or ``bar``::
667 667
668 668 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
669 669
670 670 - forget the current bisection::
671 671
672 672 hg bisect --reset
673 673
674 674 - use 'make && make tests' to automatically find the first broken
675 675 revision::
676 676
677 677 hg bisect --reset
678 678 hg bisect --bad 34
679 679 hg bisect --good 12
680 680 hg bisect --command "make && make tests"
681 681
682 682 - see all changesets whose states are already known in the current
683 683 bisection::
684 684
685 685 hg log -r "bisect(pruned)"
686 686
687 687 - see the changeset currently being bisected (especially useful
688 688 if running with -U/--noupdate)::
689 689
690 690 hg log -r "bisect(current)"
691 691
692 692 - see all changesets that took part in the current bisection::
693 693
694 694 hg log -r "bisect(range)"
695 695
696 696 - you can even get a nice graph::
697 697
698 698 hg log --graph -r "bisect(range)"
699 699
700 700 See :hg:`help revsets` for more about the `bisect()` keyword.
701 701
702 702 Returns 0 on success.
703 703 """
704 704 def extendbisectrange(nodes, good):
705 705 # bisect is incomplete when it ends on a merge node and
706 706 # one of the parent was not checked.
707 707 parents = repo[nodes[0]].parents()
708 708 if len(parents) > 1:
709 709 if good:
710 710 side = state['bad']
711 711 else:
712 712 side = state['good']
713 713 num = len(set(i.node() for i in parents) & set(side))
714 714 if num == 1:
715 715 return parents[0].ancestor(parents[1])
716 716 return None
717 717
718 718 def print_result(nodes, good):
719 719 displayer = cmdutil.show_changeset(ui, repo, {})
720 720 if len(nodes) == 1:
721 721 # narrowed it down to a single revision
722 722 if good:
723 723 ui.write(_("The first good revision is:\n"))
724 724 else:
725 725 ui.write(_("The first bad revision is:\n"))
726 726 displayer.show(repo[nodes[0]])
727 727 extendnode = extendbisectrange(nodes, good)
728 728 if extendnode is not None:
729 729 ui.write(_('Not all ancestors of this changeset have been'
730 730 ' checked.\nUse bisect --extend to continue the '
731 731 'bisection from\nthe common ancestor, %s.\n')
732 732 % extendnode)
733 733 else:
734 734 # multiple possible revisions
735 735 if good:
736 736 ui.write(_("Due to skipped revisions, the first "
737 737 "good revision could be any of:\n"))
738 738 else:
739 739 ui.write(_("Due to skipped revisions, the first "
740 740 "bad revision could be any of:\n"))
741 741 for n in nodes:
742 742 displayer.show(repo[n])
743 743 displayer.close()
744 744
745 745 def check_state(state, interactive=True):
746 746 if not state['good'] or not state['bad']:
747 747 if (good or bad or skip or reset) and interactive:
748 748 return
749 749 if not state['good']:
750 750 raise error.Abort(_('cannot bisect (no known good revisions)'))
751 751 else:
752 752 raise error.Abort(_('cannot bisect (no known bad revisions)'))
753 753 return True
754 754
755 755 # backward compatibility
756 756 if rev in "good bad reset init".split():
757 757 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
758 758 cmd, rev, extra = rev, extra, None
759 759 if cmd == "good":
760 760 good = True
761 761 elif cmd == "bad":
762 762 bad = True
763 763 else:
764 764 reset = True
765 765 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
766 766 raise error.Abort(_('incompatible arguments'))
767 767
768 768 cmdutil.checkunfinished(repo)
769 769
770 770 if reset:
771 771 p = repo.join("bisect.state")
772 772 if os.path.exists(p):
773 773 os.unlink(p)
774 774 return
775 775
776 776 state = hbisect.load_state(repo)
777 777
778 778 if command:
779 779 changesets = 1
780 780 if noupdate:
781 781 try:
782 782 node = state['current'][0]
783 783 except LookupError:
784 784 raise error.Abort(_('current bisect revision is unknown - '
785 785 'start a new bisect to fix'))
786 786 else:
787 787 node, p2 = repo.dirstate.parents()
788 788 if p2 != nullid:
789 789 raise error.Abort(_('current bisect revision is a merge'))
790 790 try:
791 791 while changesets:
792 792 # update state
793 793 state['current'] = [node]
794 794 hbisect.save_state(repo, state)
795 795 status = ui.system(command, environ={'HG_NODE': hex(node)})
796 796 if status == 125:
797 797 transition = "skip"
798 798 elif status == 0:
799 799 transition = "good"
800 800 # status < 0 means process was killed
801 801 elif status == 127:
802 802 raise error.Abort(_("failed to execute %s") % command)
803 803 elif status < 0:
804 804 raise error.Abort(_("%s killed") % command)
805 805 else:
806 806 transition = "bad"
807 807 ctx = scmutil.revsingle(repo, rev, node)
808 808 rev = None # clear for future iterations
809 809 state[transition].append(ctx.node())
810 810 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
811 811 check_state(state, interactive=False)
812 812 # bisect
813 813 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
814 814 # update to next check
815 815 node = nodes[0]
816 816 if not noupdate:
817 817 cmdutil.bailifchanged(repo)
818 818 hg.clean(repo, node, show_stats=False)
819 819 finally:
820 820 state['current'] = [node]
821 821 hbisect.save_state(repo, state)
822 822 print_result(nodes, bgood)
823 823 return
824 824
825 825 # update state
826 826
827 827 if rev:
828 828 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
829 829 else:
830 830 nodes = [repo.lookup('.')]
831 831
832 832 if good or bad or skip:
833 833 if good:
834 834 state['good'] += nodes
835 835 elif bad:
836 836 state['bad'] += nodes
837 837 elif skip:
838 838 state['skip'] += nodes
839 839 hbisect.save_state(repo, state)
840 840
841 841 if not check_state(state):
842 842 return
843 843
844 844 # actually bisect
845 845 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
846 846 if extend:
847 847 if not changesets:
848 848 extendnode = extendbisectrange(nodes, good)
849 849 if extendnode is not None:
850 850 ui.write(_("Extending search to changeset %d:%s\n")
851 851 % (extendnode.rev(), extendnode))
852 852 state['current'] = [extendnode.node()]
853 853 hbisect.save_state(repo, state)
854 854 if noupdate:
855 855 return
856 856 cmdutil.bailifchanged(repo)
857 857 return hg.clean(repo, extendnode.node())
858 858 raise error.Abort(_("nothing to extend"))
859 859
860 860 if changesets == 0:
861 861 print_result(nodes, good)
862 862 else:
863 863 assert len(nodes) == 1 # only a single node can be tested next
864 864 node = nodes[0]
865 865 # compute the approximate number of remaining tests
866 866 tests, size = 0, 2
867 867 while size <= changesets:
868 868 tests, size = tests + 1, size * 2
869 869 rev = repo.changelog.rev(node)
870 870 ui.write(_("Testing changeset %d:%s "
871 871 "(%d changesets remaining, ~%d tests)\n")
872 872 % (rev, short(node), changesets, tests))
873 873 state['current'] = [node]
874 874 hbisect.save_state(repo, state)
875 875 if not noupdate:
876 876 cmdutil.bailifchanged(repo)
877 877 return hg.clean(repo, node)
878 878
879 879 @command('bookmarks|bookmark',
880 880 [('f', 'force', False, _('force')),
881 881 ('r', 'rev', '', _('revision'), _('REV')),
882 882 ('d', 'delete', False, _('delete a given bookmark')),
883 883 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
884 884 ('i', 'inactive', False, _('mark a bookmark inactive')),
885 885 ] + formatteropts,
886 886 _('hg bookmarks [OPTIONS]... [NAME]...'))
887 887 def bookmark(ui, repo, *names, **opts):
888 888 '''create a new bookmark or list existing bookmarks
889 889
890 890 Bookmarks are labels on changesets to help track lines of development.
891 891 Bookmarks are unversioned and can be moved, renamed and deleted.
892 892 Deleting or moving a bookmark has no effect on the associated changesets.
893 893
894 894 Creating or updating to a bookmark causes it to be marked as 'active'.
895 895 The active bookmark is indicated with a '*'.
896 896 When a commit is made, the active bookmark will advance to the new commit.
897 897 A plain :hg:`update` will also advance an active bookmark, if possible.
898 898 Updating away from a bookmark will cause it to be deactivated.
899 899
900 900 Bookmarks can be pushed and pulled between repositories (see
901 901 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
902 902 diverged, a new 'divergent bookmark' of the form 'name@path' will
903 903 be created. Using :hg:`merge` will resolve the divergence.
904 904
905 905 A bookmark named '@' has the special property that :hg:`clone` will
906 906 check it out by default if it exists.
907 907
908 908 .. container:: verbose
909 909
910 910 Examples:
911 911
912 912 - create an active bookmark for a new line of development::
913 913
914 914 hg book new-feature
915 915
916 916 - create an inactive bookmark as a place marker::
917 917
918 918 hg book -i reviewed
919 919
920 920 - create an inactive bookmark on another changeset::
921 921
922 922 hg book -r .^ tested
923 923
924 924 - rename bookmark turkey to dinner::
925 925
926 926 hg book -m turkey dinner
927 927
928 928 - move the '@' bookmark from another branch::
929 929
930 930 hg book -f @
931 931 '''
932 932 force = opts.get('force')
933 933 rev = opts.get('rev')
934 934 delete = opts.get('delete')
935 935 rename = opts.get('rename')
936 936 inactive = opts.get('inactive')
937 937
938 938 def checkformat(mark):
939 939 mark = mark.strip()
940 940 if not mark:
941 941 raise error.Abort(_("bookmark names cannot consist entirely of "
942 942 "whitespace"))
943 943 scmutil.checknewlabel(repo, mark, 'bookmark')
944 944 return mark
945 945
946 946 def checkconflict(repo, mark, cur, force=False, target=None):
947 947 if mark in marks and not force:
948 948 if target:
949 949 if marks[mark] == target and target == cur:
950 950 # re-activating a bookmark
951 951 return
952 952 anc = repo.changelog.ancestors([repo[target].rev()])
953 953 bmctx = repo[marks[mark]]
954 954 divs = [repo[b].node() for b in marks
955 955 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
956 956
957 957 # allow resolving a single divergent bookmark even if moving
958 958 # the bookmark across branches when a revision is specified
959 959 # that contains a divergent bookmark
960 960 if bmctx.rev() not in anc and target in divs:
961 961 bookmarks.deletedivergent(repo, [target], mark)
962 962 return
963 963
964 964 deletefrom = [b for b in divs
965 965 if repo[b].rev() in anc or b == target]
966 966 bookmarks.deletedivergent(repo, deletefrom, mark)
967 967 if bookmarks.validdest(repo, bmctx, repo[target]):
968 968 ui.status(_("moving bookmark '%s' forward from %s\n") %
969 969 (mark, short(bmctx.node())))
970 970 return
971 971 raise error.Abort(_("bookmark '%s' already exists "
972 972 "(use -f to force)") % mark)
973 973 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
974 974 and not force):
975 975 raise error.Abort(
976 976 _("a bookmark cannot have the name of an existing branch"))
977 977
978 978 if delete and rename:
979 979 raise error.Abort(_("--delete and --rename are incompatible"))
980 980 if delete and rev:
981 981 raise error.Abort(_("--rev is incompatible with --delete"))
982 982 if rename and rev:
983 983 raise error.Abort(_("--rev is incompatible with --rename"))
984 984 if not names and (delete or rev):
985 985 raise error.Abort(_("bookmark name required"))
986 986
987 987 if delete or rename or names or inactive:
988 988 wlock = lock = tr = None
989 989 try:
990 990 wlock = repo.wlock()
991 991 lock = repo.lock()
992 992 cur = repo.changectx('.').node()
993 993 marks = repo._bookmarks
994 994 if delete:
995 995 tr = repo.transaction('bookmark')
996 996 for mark in names:
997 997 if mark not in marks:
998 998 raise error.Abort(_("bookmark '%s' does not exist") %
999 999 mark)
1000 1000 if mark == repo._activebookmark:
1001 1001 bookmarks.deactivate(repo)
1002 1002 del marks[mark]
1003 1003
1004 1004 elif rename:
1005 1005 tr = repo.transaction('bookmark')
1006 1006 if not names:
1007 1007 raise error.Abort(_("new bookmark name required"))
1008 1008 elif len(names) > 1:
1009 1009 raise error.Abort(_("only one new bookmark name allowed"))
1010 1010 mark = checkformat(names[0])
1011 1011 if rename not in marks:
1012 1012 raise error.Abort(_("bookmark '%s' does not exist")
1013 1013 % rename)
1014 1014 checkconflict(repo, mark, cur, force)
1015 1015 marks[mark] = marks[rename]
1016 1016 if repo._activebookmark == rename and not inactive:
1017 1017 bookmarks.activate(repo, mark)
1018 1018 del marks[rename]
1019 1019 elif names:
1020 1020 tr = repo.transaction('bookmark')
1021 1021 newact = None
1022 1022 for mark in names:
1023 1023 mark = checkformat(mark)
1024 1024 if newact is None:
1025 1025 newact = mark
1026 1026 if inactive and mark == repo._activebookmark:
1027 1027 bookmarks.deactivate(repo)
1028 1028 return
1029 1029 tgt = cur
1030 1030 if rev:
1031 1031 tgt = scmutil.revsingle(repo, rev).node()
1032 1032 checkconflict(repo, mark, cur, force, tgt)
1033 1033 marks[mark] = tgt
1034 1034 if not inactive and cur == marks[newact] and not rev:
1035 1035 bookmarks.activate(repo, newact)
1036 1036 elif cur != tgt and newact == repo._activebookmark:
1037 1037 bookmarks.deactivate(repo)
1038 1038 elif inactive:
1039 1039 if len(marks) == 0:
1040 1040 ui.status(_("no bookmarks set\n"))
1041 1041 elif not repo._activebookmark:
1042 1042 ui.status(_("no active bookmark\n"))
1043 1043 else:
1044 1044 bookmarks.deactivate(repo)
1045 1045 if tr is not None:
1046 1046 marks.recordchange(tr)
1047 1047 tr.close()
1048 1048 finally:
1049 1049 lockmod.release(tr, lock, wlock)
1050 1050 else: # show bookmarks
1051 1051 fm = ui.formatter('bookmarks', opts)
1052 1052 hexfn = fm.hexfunc
1053 1053 marks = repo._bookmarks
1054 1054 if len(marks) == 0 and not fm:
1055 1055 ui.status(_("no bookmarks set\n"))
1056 1056 for bmark, n in sorted(marks.iteritems()):
1057 1057 active = repo._activebookmark
1058 1058 if bmark == active:
1059 1059 prefix, label = '*', activebookmarklabel
1060 1060 else:
1061 1061 prefix, label = ' ', ''
1062 1062
1063 1063 fm.startitem()
1064 1064 if not ui.quiet:
1065 1065 fm.plain(' %s ' % prefix, label=label)
1066 1066 fm.write('bookmark', '%s', bmark, label=label)
1067 1067 pad = " " * (25 - encoding.colwidth(bmark))
1068 1068 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1069 1069 repo.changelog.rev(n), hexfn(n), label=label)
1070 1070 fm.data(active=(bmark == active))
1071 1071 fm.plain('\n')
1072 1072 fm.end()
1073 1073
1074 1074 @command('branch',
1075 1075 [('f', 'force', None,
1076 1076 _('set branch name even if it shadows an existing branch')),
1077 1077 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1078 1078 _('[-fC] [NAME]'))
1079 1079 def branch(ui, repo, label=None, **opts):
1080 1080 """set or show the current branch name
1081 1081
1082 1082 .. note::
1083 1083
1084 1084 Branch names are permanent and global. Use :hg:`bookmark` to create a
1085 1085 light-weight bookmark instead. See :hg:`help glossary` for more
1086 1086 information about named branches and bookmarks.
1087 1087
1088 1088 With no argument, show the current branch name. With one argument,
1089 1089 set the working directory branch name (the branch will not exist
1090 1090 in the repository until the next commit). Standard practice
1091 1091 recommends that primary development take place on the 'default'
1092 1092 branch.
1093 1093
1094 1094 Unless -f/--force is specified, branch will not let you set a
1095 1095 branch name that already exists.
1096 1096
1097 1097 Use -C/--clean to reset the working directory branch to that of
1098 1098 the parent of the working directory, negating a previous branch
1099 1099 change.
1100 1100
1101 1101 Use the command :hg:`update` to switch to an existing branch. Use
1102 1102 :hg:`commit --close-branch` to mark this branch head as closed.
1103 1103 When all heads of the branch are closed, the branch will be
1104 1104 considered closed.
1105 1105
1106 1106 Returns 0 on success.
1107 1107 """
1108 1108 if label:
1109 1109 label = label.strip()
1110 1110
1111 1111 if not opts.get('clean') and not label:
1112 1112 ui.write("%s\n" % repo.dirstate.branch())
1113 1113 return
1114 1114
1115 1115 wlock = repo.wlock()
1116 1116 try:
1117 1117 if opts.get('clean'):
1118 1118 label = repo[None].p1().branch()
1119 1119 repo.dirstate.setbranch(label)
1120 1120 ui.status(_('reset working directory to branch %s\n') % label)
1121 1121 elif label:
1122 1122 if not opts.get('force') and label in repo.branchmap():
1123 1123 if label not in [p.branch() for p in repo.parents()]:
1124 1124 raise error.Abort(_('a branch of the same name already'
1125 1125 ' exists'),
1126 1126 # i18n: "it" refers to an existing branch
1127 1127 hint=_("use 'hg update' to switch to it"))
1128 1128 scmutil.checknewlabel(repo, label, 'branch')
1129 1129 repo.dirstate.setbranch(label)
1130 1130 ui.status(_('marked working directory as branch %s\n') % label)
1131 1131
1132 1132 # find any open named branches aside from default
1133 1133 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1134 1134 if n != "default" and not c]
1135 1135 if not others:
1136 1136 ui.status(_('(branches are permanent and global, '
1137 1137 'did you want a bookmark?)\n'))
1138 1138 finally:
1139 1139 wlock.release()
1140 1140
1141 1141 @command('branches',
1142 1142 [('a', 'active', False,
1143 1143 _('show only branches that have unmerged heads (DEPRECATED)')),
1144 1144 ('c', 'closed', False, _('show normal and closed branches')),
1145 1145 ] + formatteropts,
1146 1146 _('[-ac]'))
1147 1147 def branches(ui, repo, active=False, closed=False, **opts):
1148 1148 """list repository named branches
1149 1149
1150 1150 List the repository's named branches, indicating which ones are
1151 1151 inactive. If -c/--closed is specified, also list branches which have
1152 1152 been marked closed (see :hg:`commit --close-branch`).
1153 1153
1154 1154 Use the command :hg:`update` to switch to an existing branch.
1155 1155
1156 1156 Returns 0.
1157 1157 """
1158 1158
1159 1159 fm = ui.formatter('branches', opts)
1160 1160 hexfunc = fm.hexfunc
1161 1161
1162 1162 allheads = set(repo.heads())
1163 1163 branches = []
1164 1164 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1165 1165 isactive = not isclosed and bool(set(heads) & allheads)
1166 1166 branches.append((tag, repo[tip], isactive, not isclosed))
1167 1167 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1168 1168 reverse=True)
1169 1169
1170 1170 for tag, ctx, isactive, isopen in branches:
1171 1171 if active and not isactive:
1172 1172 continue
1173 1173 if isactive:
1174 1174 label = 'branches.active'
1175 1175 notice = ''
1176 1176 elif not isopen:
1177 1177 if not closed:
1178 1178 continue
1179 1179 label = 'branches.closed'
1180 1180 notice = _(' (closed)')
1181 1181 else:
1182 1182 label = 'branches.inactive'
1183 1183 notice = _(' (inactive)')
1184 1184 current = (tag == repo.dirstate.branch())
1185 1185 if current:
1186 1186 label = 'branches.current'
1187 1187
1188 1188 fm.startitem()
1189 1189 fm.write('branch', '%s', tag, label=label)
1190 1190 rev = ctx.rev()
1191 1191 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1192 1192 fmt = ' ' * padsize + ' %d:%s'
1193 1193 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1194 1194 label='log.changeset changeset.%s' % ctx.phasestr())
1195 1195 fm.data(active=isactive, closed=not isopen, current=current)
1196 1196 if not ui.quiet:
1197 1197 fm.plain(notice)
1198 1198 fm.plain('\n')
1199 1199 fm.end()
1200 1200
1201 1201 @command('bundle',
1202 1202 [('f', 'force', None, _('run even when the destination is unrelated')),
1203 1203 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1204 1204 _('REV')),
1205 1205 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1206 1206 _('BRANCH')),
1207 1207 ('', 'base', [],
1208 1208 _('a base changeset assumed to be available at the destination'),
1209 1209 _('REV')),
1210 1210 ('a', 'all', None, _('bundle all changesets in the repository')),
1211 1211 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1212 1212 ] + remoteopts,
1213 1213 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1214 1214 def bundle(ui, repo, fname, dest=None, **opts):
1215 1215 """create a changegroup file
1216 1216
1217 1217 Generate a compressed changegroup file collecting changesets not
1218 1218 known to be in another repository.
1219 1219
1220 1220 If you omit the destination repository, then hg assumes the
1221 1221 destination will have all the nodes you specify with --base
1222 1222 parameters. To create a bundle containing all changesets, use
1223 1223 -a/--all (or --base null).
1224 1224
1225 1225 You can change bundle format with the -t/--type option. You can
1226 1226 specify a compression, a bundle version or both using a dash
1227 1227 (comp-version). The available compression methods are: none, bzip2,
1228 1228 and gzip (by default, bundles are compressed using bzip2). The
1229 1229 available format are: v1, v2 (default to most suitable).
1230 1230
1231 1231 The bundle file can then be transferred using conventional means
1232 1232 and applied to another repository with the unbundle or pull
1233 1233 command. This is useful when direct push and pull are not
1234 1234 available or when exporting an entire repository is undesirable.
1235 1235
1236 1236 Applying bundles preserves all changeset contents including
1237 1237 permissions, copy/rename information, and revision history.
1238 1238
1239 1239 Returns 0 on success, 1 if no changes found.
1240 1240 """
1241 1241 revs = None
1242 1242 if 'rev' in opts:
1243 1243 revs = scmutil.revrange(repo, opts['rev'])
1244 1244
1245 1245 bundletype = opts.get('type', 'bzip2').lower()
1246 1246 try:
1247 bcompression, cgversion = exchange.parsebundlespec(
1247 bcompression, cgversion, params = exchange.parsebundlespec(
1248 1248 repo, bundletype, strict=False)
1249 1249 except error.UnsupportedBundleSpecification as e:
1250 1250 raise error.Abort(str(e),
1251 1251 hint=_('see "hg help bundle" for supported '
1252 1252 'values for --type'))
1253 1253
1254 1254 # Packed bundles are a pseudo bundle format for now.
1255 1255 if cgversion == 's1':
1256 1256 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1257 1257 hint=_('use "hg debugcreatestreamclonebundle"'))
1258 1258
1259 1259 if opts.get('all'):
1260 1260 base = ['null']
1261 1261 else:
1262 1262 base = scmutil.revrange(repo, opts.get('base'))
1263 1263 # TODO: get desired bundlecaps from command line.
1264 1264 bundlecaps = None
1265 1265 if base:
1266 1266 if dest:
1267 1267 raise error.Abort(_("--base is incompatible with specifying "
1268 1268 "a destination"))
1269 1269 common = [repo.lookup(rev) for rev in base]
1270 1270 heads = revs and map(repo.lookup, revs) or revs
1271 1271 cg = changegroup.getchangegroup(repo, 'bundle', heads=heads,
1272 1272 common=common, bundlecaps=bundlecaps,
1273 1273 version=cgversion)
1274 1274 outgoing = None
1275 1275 else:
1276 1276 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1277 1277 dest, branches = hg.parseurl(dest, opts.get('branch'))
1278 1278 other = hg.peer(repo, opts, dest)
1279 1279 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1280 1280 heads = revs and map(repo.lookup, revs) or revs
1281 1281 outgoing = discovery.findcommonoutgoing(repo, other,
1282 1282 onlyheads=heads,
1283 1283 force=opts.get('force'),
1284 1284 portable=True)
1285 1285 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1286 1286 bundlecaps, version=cgversion)
1287 1287 if not cg:
1288 1288 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1289 1289 return 1
1290 1290
1291 1291 if cgversion == '01': #bundle1
1292 1292 if bcompression is None:
1293 1293 bcompression = 'UN'
1294 1294 bversion = 'HG10' + bcompression
1295 1295 bcompression = None
1296 1296 else:
1297 1297 assert cgversion == '02'
1298 1298 bversion = 'HG20'
1299 1299
1300 1300
1301 1301 changegroup.writebundle(ui, cg, fname, bversion, compression=bcompression)
1302 1302
1303 1303 @command('cat',
1304 1304 [('o', 'output', '',
1305 1305 _('print output to file with formatted name'), _('FORMAT')),
1306 1306 ('r', 'rev', '', _('print the given revision'), _('REV')),
1307 1307 ('', 'decode', None, _('apply any matching decode filter')),
1308 1308 ] + walkopts,
1309 1309 _('[OPTION]... FILE...'),
1310 1310 inferrepo=True)
1311 1311 def cat(ui, repo, file1, *pats, **opts):
1312 1312 """output the current or given revision of files
1313 1313
1314 1314 Print the specified files as they were at the given revision. If
1315 1315 no revision is given, the parent of the working directory is used.
1316 1316
1317 1317 Output may be to a file, in which case the name of the file is
1318 1318 given using a format string. The formatting rules as follows:
1319 1319
1320 1320 :``%%``: literal "%" character
1321 1321 :``%s``: basename of file being printed
1322 1322 :``%d``: dirname of file being printed, or '.' if in repository root
1323 1323 :``%p``: root-relative path name of file being printed
1324 1324 :``%H``: changeset hash (40 hexadecimal digits)
1325 1325 :``%R``: changeset revision number
1326 1326 :``%h``: short-form changeset hash (12 hexadecimal digits)
1327 1327 :``%r``: zero-padded changeset revision number
1328 1328 :``%b``: basename of the exporting repository
1329 1329
1330 1330 Returns 0 on success.
1331 1331 """
1332 1332 ctx = scmutil.revsingle(repo, opts.get('rev'))
1333 1333 m = scmutil.match(ctx, (file1,) + pats, opts)
1334 1334
1335 1335 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1336 1336
1337 1337 @command('^clone',
1338 1338 [('U', 'noupdate', None, _('the clone will include an empty working '
1339 1339 'directory (only a repository)')),
1340 1340 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1341 1341 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1342 1342 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1343 1343 ('', 'pull', None, _('use pull protocol to copy metadata')),
1344 1344 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1345 1345 ] + remoteopts,
1346 1346 _('[OPTION]... SOURCE [DEST]'),
1347 1347 norepo=True)
1348 1348 def clone(ui, source, dest=None, **opts):
1349 1349 """make a copy of an existing repository
1350 1350
1351 1351 Create a copy of an existing repository in a new directory.
1352 1352
1353 1353 If no destination directory name is specified, it defaults to the
1354 1354 basename of the source.
1355 1355
1356 1356 The location of the source is added to the new repository's
1357 1357 ``.hg/hgrc`` file, as the default to be used for future pulls.
1358 1358
1359 1359 Only local paths and ``ssh://`` URLs are supported as
1360 1360 destinations. For ``ssh://`` destinations, no working directory or
1361 1361 ``.hg/hgrc`` will be created on the remote side.
1362 1362
1363 1363 To pull only a subset of changesets, specify one or more revisions
1364 1364 identifiers with -r/--rev or branches with -b/--branch. The
1365 1365 resulting clone will contain only the specified changesets and
1366 1366 their ancestors. These options (or 'clone src#rev dest') imply
1367 1367 --pull, even for local source repositories. Note that specifying a
1368 1368 tag will include the tagged changeset but not the changeset
1369 1369 containing the tag.
1370 1370
1371 1371 If the source repository has a bookmark called '@' set, that
1372 1372 revision will be checked out in the new repository by default.
1373 1373
1374 1374 To check out a particular version, use -u/--update, or
1375 1375 -U/--noupdate to create a clone with no working directory.
1376 1376
1377 1377 .. container:: verbose
1378 1378
1379 1379 For efficiency, hardlinks are used for cloning whenever the
1380 1380 source and destination are on the same filesystem (note this
1381 1381 applies only to the repository data, not to the working
1382 1382 directory). Some filesystems, such as AFS, implement hardlinking
1383 1383 incorrectly, but do not report errors. In these cases, use the
1384 1384 --pull option to avoid hardlinking.
1385 1385
1386 1386 In some cases, you can clone repositories and the working
1387 1387 directory using full hardlinks with ::
1388 1388
1389 1389 $ cp -al REPO REPOCLONE
1390 1390
1391 1391 This is the fastest way to clone, but it is not always safe. The
1392 1392 operation is not atomic (making sure REPO is not modified during
1393 1393 the operation is up to you) and you have to make sure your
1394 1394 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1395 1395 so). Also, this is not compatible with certain extensions that
1396 1396 place their metadata under the .hg directory, such as mq.
1397 1397
1398 1398 Mercurial will update the working directory to the first applicable
1399 1399 revision from this list:
1400 1400
1401 1401 a) null if -U or the source repository has no changesets
1402 1402 b) if -u . and the source repository is local, the first parent of
1403 1403 the source repository's working directory
1404 1404 c) the changeset specified with -u (if a branch name, this means the
1405 1405 latest head of that branch)
1406 1406 d) the changeset specified with -r
1407 1407 e) the tipmost head specified with -b
1408 1408 f) the tipmost head specified with the url#branch source syntax
1409 1409 g) the revision marked with the '@' bookmark, if present
1410 1410 h) the tipmost head of the default branch
1411 1411 i) tip
1412 1412
1413 1413 Examples:
1414 1414
1415 1415 - clone a remote repository to a new directory named hg/::
1416 1416
1417 1417 hg clone http://selenic.com/hg
1418 1418
1419 1419 - create a lightweight local clone::
1420 1420
1421 1421 hg clone project/ project-feature/
1422 1422
1423 1423 - clone from an absolute path on an ssh server (note double-slash)::
1424 1424
1425 1425 hg clone ssh://user@server//home/projects/alpha/
1426 1426
1427 1427 - do a high-speed clone over a LAN while checking out a
1428 1428 specified version::
1429 1429
1430 1430 hg clone --uncompressed http://server/repo -u 1.5
1431 1431
1432 1432 - create a repository without changesets after a particular revision::
1433 1433
1434 1434 hg clone -r 04e544 experimental/ good/
1435 1435
1436 1436 - clone (and track) a particular named branch::
1437 1437
1438 1438 hg clone http://selenic.com/hg#stable
1439 1439
1440 1440 See :hg:`help urls` for details on specifying URLs.
1441 1441
1442 1442 Returns 0 on success.
1443 1443 """
1444 1444 if opts.get('noupdate') and opts.get('updaterev'):
1445 1445 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1446 1446
1447 1447 r = hg.clone(ui, opts, source, dest,
1448 1448 pull=opts.get('pull'),
1449 1449 stream=opts.get('uncompressed'),
1450 1450 rev=opts.get('rev'),
1451 1451 update=opts.get('updaterev') or not opts.get('noupdate'),
1452 1452 branch=opts.get('branch'),
1453 1453 shareopts=opts.get('shareopts'))
1454 1454
1455 1455 return r is None
1456 1456
1457 1457 @command('^commit|ci',
1458 1458 [('A', 'addremove', None,
1459 1459 _('mark new/missing files as added/removed before committing')),
1460 1460 ('', 'close-branch', None,
1461 1461 _('mark a branch head as closed')),
1462 1462 ('', 'amend', None, _('amend the parent of the working directory')),
1463 1463 ('s', 'secret', None, _('use the secret phase for committing')),
1464 1464 ('e', 'edit', None, _('invoke editor on commit messages')),
1465 1465 ('i', 'interactive', None, _('use interactive mode')),
1466 1466 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1467 1467 _('[OPTION]... [FILE]...'),
1468 1468 inferrepo=True)
1469 1469 def commit(ui, repo, *pats, **opts):
1470 1470 """commit the specified files or all outstanding changes
1471 1471
1472 1472 Commit changes to the given files into the repository. Unlike a
1473 1473 centralized SCM, this operation is a local operation. See
1474 1474 :hg:`push` for a way to actively distribute your changes.
1475 1475
1476 1476 If a list of files is omitted, all changes reported by :hg:`status`
1477 1477 will be committed.
1478 1478
1479 1479 If you are committing the result of a merge, do not provide any
1480 1480 filenames or -I/-X filters.
1481 1481
1482 1482 If no commit message is specified, Mercurial starts your
1483 1483 configured editor where you can enter a message. In case your
1484 1484 commit fails, you will find a backup of your message in
1485 1485 ``.hg/last-message.txt``.
1486 1486
1487 1487 The --close-branch flag can be used to mark the current branch
1488 1488 head closed. When all heads of a branch are closed, the branch
1489 1489 will be considered closed and no longer listed.
1490 1490
1491 1491 The --amend flag can be used to amend the parent of the
1492 1492 working directory with a new commit that contains the changes
1493 1493 in the parent in addition to those currently reported by :hg:`status`,
1494 1494 if there are any. The old commit is stored in a backup bundle in
1495 1495 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1496 1496 on how to restore it).
1497 1497
1498 1498 Message, user and date are taken from the amended commit unless
1499 1499 specified. When a message isn't specified on the command line,
1500 1500 the editor will open with the message of the amended commit.
1501 1501
1502 1502 It is not possible to amend public changesets (see :hg:`help phases`)
1503 1503 or changesets that have children.
1504 1504
1505 1505 See :hg:`help dates` for a list of formats valid for -d/--date.
1506 1506
1507 1507 Returns 0 on success, 1 if nothing changed.
1508 1508 """
1509 1509 if opts.get('interactive'):
1510 1510 opts.pop('interactive')
1511 1511 cmdutil.dorecord(ui, repo, commit, None, False,
1512 1512 cmdutil.recordfilter, *pats, **opts)
1513 1513 return
1514 1514
1515 1515 if opts.get('subrepos'):
1516 1516 if opts.get('amend'):
1517 1517 raise error.Abort(_('cannot amend with --subrepos'))
1518 1518 # Let --subrepos on the command line override config setting.
1519 1519 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1520 1520
1521 1521 cmdutil.checkunfinished(repo, commit=True)
1522 1522
1523 1523 branch = repo[None].branch()
1524 1524 bheads = repo.branchheads(branch)
1525 1525
1526 1526 extra = {}
1527 1527 if opts.get('close_branch'):
1528 1528 extra['close'] = 1
1529 1529
1530 1530 if not bheads:
1531 1531 raise error.Abort(_('can only close branch heads'))
1532 1532 elif opts.get('amend'):
1533 1533 if repo.parents()[0].p1().branch() != branch and \
1534 1534 repo.parents()[0].p2().branch() != branch:
1535 1535 raise error.Abort(_('can only close branch heads'))
1536 1536
1537 1537 if opts.get('amend'):
1538 1538 if ui.configbool('ui', 'commitsubrepos'):
1539 1539 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1540 1540
1541 1541 old = repo['.']
1542 1542 if not old.mutable():
1543 1543 raise error.Abort(_('cannot amend public changesets'))
1544 1544 if len(repo[None].parents()) > 1:
1545 1545 raise error.Abort(_('cannot amend while merging'))
1546 1546 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1547 1547 if not allowunstable and old.children():
1548 1548 raise error.Abort(_('cannot amend changeset with children'))
1549 1549
1550 1550 # commitfunc is used only for temporary amend commit by cmdutil.amend
1551 1551 def commitfunc(ui, repo, message, match, opts):
1552 1552 return repo.commit(message,
1553 1553 opts.get('user') or old.user(),
1554 1554 opts.get('date') or old.date(),
1555 1555 match,
1556 1556 extra=extra)
1557 1557
1558 1558 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1559 1559 if node == old.node():
1560 1560 ui.status(_("nothing changed\n"))
1561 1561 return 1
1562 1562 else:
1563 1563 def commitfunc(ui, repo, message, match, opts):
1564 1564 backup = ui.backupconfig('phases', 'new-commit')
1565 1565 baseui = repo.baseui
1566 1566 basebackup = baseui.backupconfig('phases', 'new-commit')
1567 1567 try:
1568 1568 if opts.get('secret'):
1569 1569 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1570 1570 # Propagate to subrepos
1571 1571 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1572 1572
1573 1573 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1574 1574 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1575 1575 return repo.commit(message, opts.get('user'), opts.get('date'),
1576 1576 match,
1577 1577 editor=editor,
1578 1578 extra=extra)
1579 1579 finally:
1580 1580 ui.restoreconfig(backup)
1581 1581 repo.baseui.restoreconfig(basebackup)
1582 1582
1583 1583
1584 1584 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1585 1585
1586 1586 if not node:
1587 1587 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1588 1588 if stat[3]:
1589 1589 ui.status(_("nothing changed (%d missing files, see "
1590 1590 "'hg status')\n") % len(stat[3]))
1591 1591 else:
1592 1592 ui.status(_("nothing changed\n"))
1593 1593 return 1
1594 1594
1595 1595 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1596 1596
1597 1597 @command('config|showconfig|debugconfig',
1598 1598 [('u', 'untrusted', None, _('show untrusted configuration options')),
1599 1599 ('e', 'edit', None, _('edit user config')),
1600 1600 ('l', 'local', None, _('edit repository config')),
1601 1601 ('g', 'global', None, _('edit global config'))],
1602 1602 _('[-u] [NAME]...'),
1603 1603 optionalrepo=True)
1604 1604 def config(ui, repo, *values, **opts):
1605 1605 """show combined config settings from all hgrc files
1606 1606
1607 1607 With no arguments, print names and values of all config items.
1608 1608
1609 1609 With one argument of the form section.name, print just the value
1610 1610 of that config item.
1611 1611
1612 1612 With multiple arguments, print names and values of all config
1613 1613 items with matching section names.
1614 1614
1615 1615 With --edit, start an editor on the user-level config file. With
1616 1616 --global, edit the system-wide config file. With --local, edit the
1617 1617 repository-level config file.
1618 1618
1619 1619 With --debug, the source (filename and line number) is printed
1620 1620 for each config item.
1621 1621
1622 1622 See :hg:`help config` for more information about config files.
1623 1623
1624 1624 Returns 0 on success, 1 if NAME does not exist.
1625 1625
1626 1626 """
1627 1627
1628 1628 if opts.get('edit') or opts.get('local') or opts.get('global'):
1629 1629 if opts.get('local') and opts.get('global'):
1630 1630 raise error.Abort(_("can't use --local and --global together"))
1631 1631
1632 1632 if opts.get('local'):
1633 1633 if not repo:
1634 1634 raise error.Abort(_("can't use --local outside a repository"))
1635 1635 paths = [repo.join('hgrc')]
1636 1636 elif opts.get('global'):
1637 1637 paths = scmutil.systemrcpath()
1638 1638 else:
1639 1639 paths = scmutil.userrcpath()
1640 1640
1641 1641 for f in paths:
1642 1642 if os.path.exists(f):
1643 1643 break
1644 1644 else:
1645 1645 if opts.get('global'):
1646 1646 samplehgrc = uimod.samplehgrcs['global']
1647 1647 elif opts.get('local'):
1648 1648 samplehgrc = uimod.samplehgrcs['local']
1649 1649 else:
1650 1650 samplehgrc = uimod.samplehgrcs['user']
1651 1651
1652 1652 f = paths[0]
1653 1653 fp = open(f, "w")
1654 1654 fp.write(samplehgrc)
1655 1655 fp.close()
1656 1656
1657 1657 editor = ui.geteditor()
1658 1658 ui.system("%s \"%s\"" % (editor, f),
1659 1659 onerr=error.Abort, errprefix=_("edit failed"))
1660 1660 return
1661 1661
1662 1662 for f in scmutil.rcpath():
1663 1663 ui.debug('read config from: %s\n' % f)
1664 1664 untrusted = bool(opts.get('untrusted'))
1665 1665 if values:
1666 1666 sections = [v for v in values if '.' not in v]
1667 1667 items = [v for v in values if '.' in v]
1668 1668 if len(items) > 1 or items and sections:
1669 1669 raise error.Abort(_('only one config item permitted'))
1670 1670 matched = False
1671 1671 for section, name, value in ui.walkconfig(untrusted=untrusted):
1672 1672 value = str(value).replace('\n', '\\n')
1673 1673 sectname = section + '.' + name
1674 1674 if values:
1675 1675 for v in values:
1676 1676 if v == section:
1677 1677 ui.debug('%s: ' %
1678 1678 ui.configsource(section, name, untrusted))
1679 1679 ui.write('%s=%s\n' % (sectname, value))
1680 1680 matched = True
1681 1681 elif v == sectname:
1682 1682 ui.debug('%s: ' %
1683 1683 ui.configsource(section, name, untrusted))
1684 1684 ui.write(value, '\n')
1685 1685 matched = True
1686 1686 else:
1687 1687 ui.debug('%s: ' %
1688 1688 ui.configsource(section, name, untrusted))
1689 1689 ui.write('%s=%s\n' % (sectname, value))
1690 1690 matched = True
1691 1691 if matched:
1692 1692 return 0
1693 1693 return 1
1694 1694
1695 1695 @command('copy|cp',
1696 1696 [('A', 'after', None, _('record a copy that has already occurred')),
1697 1697 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1698 1698 ] + walkopts + dryrunopts,
1699 1699 _('[OPTION]... [SOURCE]... DEST'))
1700 1700 def copy(ui, repo, *pats, **opts):
1701 1701 """mark files as copied for the next commit
1702 1702
1703 1703 Mark dest as having copies of source files. If dest is a
1704 1704 directory, copies are put in that directory. If dest is a file,
1705 1705 the source must be a single file.
1706 1706
1707 1707 By default, this command copies the contents of files as they
1708 1708 exist in the working directory. If invoked with -A/--after, the
1709 1709 operation is recorded, but no copying is performed.
1710 1710
1711 1711 This command takes effect with the next commit. To undo a copy
1712 1712 before that, see :hg:`revert`.
1713 1713
1714 1714 Returns 0 on success, 1 if errors are encountered.
1715 1715 """
1716 1716 wlock = repo.wlock(False)
1717 1717 try:
1718 1718 return cmdutil.copy(ui, repo, pats, opts)
1719 1719 finally:
1720 1720 wlock.release()
1721 1721
1722 1722 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1723 1723 def debugancestor(ui, repo, *args):
1724 1724 """find the ancestor revision of two revisions in a given index"""
1725 1725 if len(args) == 3:
1726 1726 index, rev1, rev2 = args
1727 1727 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1728 1728 lookup = r.lookup
1729 1729 elif len(args) == 2:
1730 1730 if not repo:
1731 1731 raise error.Abort(_("there is no Mercurial repository here "
1732 1732 "(.hg not found)"))
1733 1733 rev1, rev2 = args
1734 1734 r = repo.changelog
1735 1735 lookup = repo.lookup
1736 1736 else:
1737 1737 raise error.Abort(_('either two or three arguments required'))
1738 1738 a = r.ancestor(lookup(rev1), lookup(rev2))
1739 1739 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1740 1740
1741 1741 @command('debugbuilddag',
1742 1742 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1743 1743 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1744 1744 ('n', 'new-file', None, _('add new file at each rev'))],
1745 1745 _('[OPTION]... [TEXT]'))
1746 1746 def debugbuilddag(ui, repo, text=None,
1747 1747 mergeable_file=False,
1748 1748 overwritten_file=False,
1749 1749 new_file=False):
1750 1750 """builds a repo with a given DAG from scratch in the current empty repo
1751 1751
1752 1752 The description of the DAG is read from stdin if not given on the
1753 1753 command line.
1754 1754
1755 1755 Elements:
1756 1756
1757 1757 - "+n" is a linear run of n nodes based on the current default parent
1758 1758 - "." is a single node based on the current default parent
1759 1759 - "$" resets the default parent to null (implied at the start);
1760 1760 otherwise the default parent is always the last node created
1761 1761 - "<p" sets the default parent to the backref p
1762 1762 - "*p" is a fork at parent p, which is a backref
1763 1763 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1764 1764 - "/p2" is a merge of the preceding node and p2
1765 1765 - ":tag" defines a local tag for the preceding node
1766 1766 - "@branch" sets the named branch for subsequent nodes
1767 1767 - "#...\\n" is a comment up to the end of the line
1768 1768
1769 1769 Whitespace between the above elements is ignored.
1770 1770
1771 1771 A backref is either
1772 1772
1773 1773 - a number n, which references the node curr-n, where curr is the current
1774 1774 node, or
1775 1775 - the name of a local tag you placed earlier using ":tag", or
1776 1776 - empty to denote the default parent.
1777 1777
1778 1778 All string valued-elements are either strictly alphanumeric, or must
1779 1779 be enclosed in double quotes ("..."), with "\\" as escape character.
1780 1780 """
1781 1781
1782 1782 if text is None:
1783 1783 ui.status(_("reading DAG from stdin\n"))
1784 1784 text = ui.fin.read()
1785 1785
1786 1786 cl = repo.changelog
1787 1787 if len(cl) > 0:
1788 1788 raise error.Abort(_('repository is not empty'))
1789 1789
1790 1790 # determine number of revs in DAG
1791 1791 total = 0
1792 1792 for type, data in dagparser.parsedag(text):
1793 1793 if type == 'n':
1794 1794 total += 1
1795 1795
1796 1796 if mergeable_file:
1797 1797 linesperrev = 2
1798 1798 # make a file with k lines per rev
1799 1799 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1800 1800 initialmergedlines.append("")
1801 1801
1802 1802 tags = []
1803 1803
1804 1804 lock = tr = None
1805 1805 try:
1806 1806 lock = repo.lock()
1807 1807 tr = repo.transaction("builddag")
1808 1808
1809 1809 at = -1
1810 1810 atbranch = 'default'
1811 1811 nodeids = []
1812 1812 id = 0
1813 1813 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1814 1814 for type, data in dagparser.parsedag(text):
1815 1815 if type == 'n':
1816 1816 ui.note(('node %s\n' % str(data)))
1817 1817 id, ps = data
1818 1818
1819 1819 files = []
1820 1820 fctxs = {}
1821 1821
1822 1822 p2 = None
1823 1823 if mergeable_file:
1824 1824 fn = "mf"
1825 1825 p1 = repo[ps[0]]
1826 1826 if len(ps) > 1:
1827 1827 p2 = repo[ps[1]]
1828 1828 pa = p1.ancestor(p2)
1829 1829 base, local, other = [x[fn].data() for x in (pa, p1,
1830 1830 p2)]
1831 1831 m3 = simplemerge.Merge3Text(base, local, other)
1832 1832 ml = [l.strip() for l in m3.merge_lines()]
1833 1833 ml.append("")
1834 1834 elif at > 0:
1835 1835 ml = p1[fn].data().split("\n")
1836 1836 else:
1837 1837 ml = initialmergedlines
1838 1838 ml[id * linesperrev] += " r%i" % id
1839 1839 mergedtext = "\n".join(ml)
1840 1840 files.append(fn)
1841 1841 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1842 1842
1843 1843 if overwritten_file:
1844 1844 fn = "of"
1845 1845 files.append(fn)
1846 1846 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1847 1847
1848 1848 if new_file:
1849 1849 fn = "nf%i" % id
1850 1850 files.append(fn)
1851 1851 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1852 1852 if len(ps) > 1:
1853 1853 if not p2:
1854 1854 p2 = repo[ps[1]]
1855 1855 for fn in p2:
1856 1856 if fn.startswith("nf"):
1857 1857 files.append(fn)
1858 1858 fctxs[fn] = p2[fn]
1859 1859
1860 1860 def fctxfn(repo, cx, path):
1861 1861 return fctxs.get(path)
1862 1862
1863 1863 if len(ps) == 0 or ps[0] < 0:
1864 1864 pars = [None, None]
1865 1865 elif len(ps) == 1:
1866 1866 pars = [nodeids[ps[0]], None]
1867 1867 else:
1868 1868 pars = [nodeids[p] for p in ps]
1869 1869 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1870 1870 date=(id, 0),
1871 1871 user="debugbuilddag",
1872 1872 extra={'branch': atbranch})
1873 1873 nodeid = repo.commitctx(cx)
1874 1874 nodeids.append(nodeid)
1875 1875 at = id
1876 1876 elif type == 'l':
1877 1877 id, name = data
1878 1878 ui.note(('tag %s\n' % name))
1879 1879 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1880 1880 elif type == 'a':
1881 1881 ui.note(('branch %s\n' % data))
1882 1882 atbranch = data
1883 1883 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1884 1884 tr.close()
1885 1885
1886 1886 if tags:
1887 1887 repo.vfs.write("localtags", "".join(tags))
1888 1888 finally:
1889 1889 ui.progress(_('building'), None)
1890 1890 release(tr, lock)
1891 1891
1892 1892 @command('debugbundle',
1893 1893 [('a', 'all', None, _('show all details'))],
1894 1894 _('FILE'),
1895 1895 norepo=True)
1896 1896 def debugbundle(ui, bundlepath, all=None, **opts):
1897 1897 """lists the contents of a bundle"""
1898 1898 f = hg.openpath(ui, bundlepath)
1899 1899 try:
1900 1900 gen = exchange.readbundle(ui, f, bundlepath)
1901 1901 if isinstance(gen, bundle2.unbundle20):
1902 1902 return _debugbundle2(ui, gen, all=all, **opts)
1903 1903 if all:
1904 1904 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1905 1905
1906 1906 def showchunks(named):
1907 1907 ui.write("\n%s\n" % named)
1908 1908 chain = None
1909 1909 while True:
1910 1910 chunkdata = gen.deltachunk(chain)
1911 1911 if not chunkdata:
1912 1912 break
1913 1913 node = chunkdata['node']
1914 1914 p1 = chunkdata['p1']
1915 1915 p2 = chunkdata['p2']
1916 1916 cs = chunkdata['cs']
1917 1917 deltabase = chunkdata['deltabase']
1918 1918 delta = chunkdata['delta']
1919 1919 ui.write("%s %s %s %s %s %s\n" %
1920 1920 (hex(node), hex(p1), hex(p2),
1921 1921 hex(cs), hex(deltabase), len(delta)))
1922 1922 chain = node
1923 1923
1924 1924 chunkdata = gen.changelogheader()
1925 1925 showchunks("changelog")
1926 1926 chunkdata = gen.manifestheader()
1927 1927 showchunks("manifest")
1928 1928 while True:
1929 1929 chunkdata = gen.filelogheader()
1930 1930 if not chunkdata:
1931 1931 break
1932 1932 fname = chunkdata['filename']
1933 1933 showchunks(fname)
1934 1934 else:
1935 1935 if isinstance(gen, bundle2.unbundle20):
1936 1936 raise error.Abort(_('use debugbundle2 for this file'))
1937 1937 chunkdata = gen.changelogheader()
1938 1938 chain = None
1939 1939 while True:
1940 1940 chunkdata = gen.deltachunk(chain)
1941 1941 if not chunkdata:
1942 1942 break
1943 1943 node = chunkdata['node']
1944 1944 ui.write("%s\n" % hex(node))
1945 1945 chain = node
1946 1946 finally:
1947 1947 f.close()
1948 1948
1949 1949 def _debugbundle2(ui, gen, **opts):
1950 1950 """lists the contents of a bundle2"""
1951 1951 if not isinstance(gen, bundle2.unbundle20):
1952 1952 raise error.Abort(_('not a bundle2 file'))
1953 1953 ui.write(('Stream params: %s\n' % repr(gen.params)))
1954 1954 for part in gen.iterparts():
1955 1955 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
1956 1956 if part.type == 'changegroup':
1957 1957 version = part.params.get('version', '01')
1958 1958 cg = changegroup.packermap[version][1](part, 'UN')
1959 1959 chunkdata = cg.changelogheader()
1960 1960 chain = None
1961 1961 while True:
1962 1962 chunkdata = cg.deltachunk(chain)
1963 1963 if not chunkdata:
1964 1964 break
1965 1965 node = chunkdata['node']
1966 1966 ui.write(" %s\n" % hex(node))
1967 1967 chain = node
1968 1968
1969 1969 @command('debugcreatestreamclonebundle', [], 'FILE')
1970 1970 def debugcreatestreamclonebundle(ui, repo, fname):
1971 1971 """create a stream clone bundle file
1972 1972
1973 1973 Stream bundles are special bundles that are essentially archives of
1974 1974 revlog files. They are commonly used for cloning very quickly.
1975 1975 """
1976 1976 requirements, gen = streamclone.generatebundlev1(repo)
1977 1977 changegroup.writechunks(ui, gen, fname)
1978 1978
1979 1979 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
1980 1980
1981 1981 @command('debugapplystreamclonebundle', [], 'FILE')
1982 1982 def debugapplystreamclonebundle(ui, repo, fname):
1983 1983 """apply a stream clone bundle file"""
1984 1984 f = hg.openpath(ui, fname)
1985 1985 gen = exchange.readbundle(ui, f, fname)
1986 1986 gen.apply(repo)
1987 1987
1988 1988 @command('debugcheckstate', [], '')
1989 1989 def debugcheckstate(ui, repo):
1990 1990 """validate the correctness of the current dirstate"""
1991 1991 parent1, parent2 = repo.dirstate.parents()
1992 1992 m1 = repo[parent1].manifest()
1993 1993 m2 = repo[parent2].manifest()
1994 1994 errors = 0
1995 1995 for f in repo.dirstate:
1996 1996 state = repo.dirstate[f]
1997 1997 if state in "nr" and f not in m1:
1998 1998 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1999 1999 errors += 1
2000 2000 if state in "a" and f in m1:
2001 2001 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
2002 2002 errors += 1
2003 2003 if state in "m" and f not in m1 and f not in m2:
2004 2004 ui.warn(_("%s in state %s, but not in either manifest\n") %
2005 2005 (f, state))
2006 2006 errors += 1
2007 2007 for f in m1:
2008 2008 state = repo.dirstate[f]
2009 2009 if state not in "nrm":
2010 2010 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
2011 2011 errors += 1
2012 2012 if errors:
2013 2013 error = _(".hg/dirstate inconsistent with current parent's manifest")
2014 2014 raise error.Abort(error)
2015 2015
2016 2016 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
2017 2017 def debugcommands(ui, cmd='', *args):
2018 2018 """list all available commands and options"""
2019 2019 for cmd, vals in sorted(table.iteritems()):
2020 2020 cmd = cmd.split('|')[0].strip('^')
2021 2021 opts = ', '.join([i[1] for i in vals[1]])
2022 2022 ui.write('%s: %s\n' % (cmd, opts))
2023 2023
2024 2024 @command('debugcomplete',
2025 2025 [('o', 'options', None, _('show the command options'))],
2026 2026 _('[-o] CMD'),
2027 2027 norepo=True)
2028 2028 def debugcomplete(ui, cmd='', **opts):
2029 2029 """returns the completion list associated with the given command"""
2030 2030
2031 2031 if opts.get('options'):
2032 2032 options = []
2033 2033 otables = [globalopts]
2034 2034 if cmd:
2035 2035 aliases, entry = cmdutil.findcmd(cmd, table, False)
2036 2036 otables.append(entry[1])
2037 2037 for t in otables:
2038 2038 for o in t:
2039 2039 if "(DEPRECATED)" in o[3]:
2040 2040 continue
2041 2041 if o[0]:
2042 2042 options.append('-%s' % o[0])
2043 2043 options.append('--%s' % o[1])
2044 2044 ui.write("%s\n" % "\n".join(options))
2045 2045 return
2046 2046
2047 2047 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2048 2048 if ui.verbose:
2049 2049 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
2050 2050 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
2051 2051
2052 2052 @command('debugdag',
2053 2053 [('t', 'tags', None, _('use tags as labels')),
2054 2054 ('b', 'branches', None, _('annotate with branch names')),
2055 2055 ('', 'dots', None, _('use dots for runs')),
2056 2056 ('s', 'spaces', None, _('separate elements by spaces'))],
2057 2057 _('[OPTION]... [FILE [REV]...]'),
2058 2058 optionalrepo=True)
2059 2059 def debugdag(ui, repo, file_=None, *revs, **opts):
2060 2060 """format the changelog or an index DAG as a concise textual description
2061 2061
2062 2062 If you pass a revlog index, the revlog's DAG is emitted. If you list
2063 2063 revision numbers, they get labeled in the output as rN.
2064 2064
2065 2065 Otherwise, the changelog DAG of the current repo is emitted.
2066 2066 """
2067 2067 spaces = opts.get('spaces')
2068 2068 dots = opts.get('dots')
2069 2069 if file_:
2070 2070 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2071 2071 revs = set((int(r) for r in revs))
2072 2072 def events():
2073 2073 for r in rlog:
2074 2074 yield 'n', (r, list(p for p in rlog.parentrevs(r)
2075 2075 if p != -1))
2076 2076 if r in revs:
2077 2077 yield 'l', (r, "r%i" % r)
2078 2078 elif repo:
2079 2079 cl = repo.changelog
2080 2080 tags = opts.get('tags')
2081 2081 branches = opts.get('branches')
2082 2082 if tags:
2083 2083 labels = {}
2084 2084 for l, n in repo.tags().items():
2085 2085 labels.setdefault(cl.rev(n), []).append(l)
2086 2086 def events():
2087 2087 b = "default"
2088 2088 for r in cl:
2089 2089 if branches:
2090 2090 newb = cl.read(cl.node(r))[5]['branch']
2091 2091 if newb != b:
2092 2092 yield 'a', newb
2093 2093 b = newb
2094 2094 yield 'n', (r, list(p for p in cl.parentrevs(r)
2095 2095 if p != -1))
2096 2096 if tags:
2097 2097 ls = labels.get(r)
2098 2098 if ls:
2099 2099 for l in ls:
2100 2100 yield 'l', (r, l)
2101 2101 else:
2102 2102 raise error.Abort(_('need repo for changelog dag'))
2103 2103
2104 2104 for line in dagparser.dagtextlines(events(),
2105 2105 addspaces=spaces,
2106 2106 wraplabels=True,
2107 2107 wrapannotations=True,
2108 2108 wrapnonlinear=dots,
2109 2109 usedots=dots,
2110 2110 maxlinewidth=70):
2111 2111 ui.write(line)
2112 2112 ui.write("\n")
2113 2113
2114 2114 @command('debugdata',
2115 2115 [('c', 'changelog', False, _('open changelog')),
2116 2116 ('m', 'manifest', False, _('open manifest')),
2117 2117 ('', 'dir', False, _('open directory manifest'))],
2118 2118 _('-c|-m|FILE REV'))
2119 2119 def debugdata(ui, repo, file_, rev=None, **opts):
2120 2120 """dump the contents of a data file revision"""
2121 2121 if opts.get('changelog') or opts.get('manifest'):
2122 2122 file_, rev = None, file_
2123 2123 elif rev is None:
2124 2124 raise error.CommandError('debugdata', _('invalid arguments'))
2125 2125 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
2126 2126 try:
2127 2127 ui.write(r.revision(r.lookup(rev)))
2128 2128 except KeyError:
2129 2129 raise error.Abort(_('invalid revision identifier %s') % rev)
2130 2130
2131 2131 @command('debugdate',
2132 2132 [('e', 'extended', None, _('try extended date formats'))],
2133 2133 _('[-e] DATE [RANGE]'),
2134 2134 norepo=True, optionalrepo=True)
2135 2135 def debugdate(ui, date, range=None, **opts):
2136 2136 """parse and display a date"""
2137 2137 if opts["extended"]:
2138 2138 d = util.parsedate(date, util.extendeddateformats)
2139 2139 else:
2140 2140 d = util.parsedate(date)
2141 2141 ui.write(("internal: %s %s\n") % d)
2142 2142 ui.write(("standard: %s\n") % util.datestr(d))
2143 2143 if range:
2144 2144 m = util.matchdate(range)
2145 2145 ui.write(("match: %s\n") % m(d[0]))
2146 2146
2147 2147 @command('debugdiscovery',
2148 2148 [('', 'old', None, _('use old-style discovery')),
2149 2149 ('', 'nonheads', None,
2150 2150 _('use old-style discovery with non-heads included')),
2151 2151 ] + remoteopts,
2152 2152 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
2153 2153 def debugdiscovery(ui, repo, remoteurl="default", **opts):
2154 2154 """runs the changeset discovery protocol in isolation"""
2155 2155 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
2156 2156 opts.get('branch'))
2157 2157 remote = hg.peer(repo, opts, remoteurl)
2158 2158 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
2159 2159
2160 2160 # make sure tests are repeatable
2161 2161 random.seed(12323)
2162 2162
2163 2163 def doit(localheads, remoteheads, remote=remote):
2164 2164 if opts.get('old'):
2165 2165 if localheads:
2166 2166 raise error.Abort('cannot use localheads with old style '
2167 2167 'discovery')
2168 2168 if not util.safehasattr(remote, 'branches'):
2169 2169 # enable in-client legacy support
2170 2170 remote = localrepo.locallegacypeer(remote.local())
2171 2171 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2172 2172 force=True)
2173 2173 common = set(common)
2174 2174 if not opts.get('nonheads'):
2175 2175 ui.write(("unpruned common: %s\n") %
2176 2176 " ".join(sorted(short(n) for n in common)))
2177 2177 dag = dagutil.revlogdag(repo.changelog)
2178 2178 all = dag.ancestorset(dag.internalizeall(common))
2179 2179 common = dag.externalizeall(dag.headsetofconnecteds(all))
2180 2180 else:
2181 2181 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2182 2182 common = set(common)
2183 2183 rheads = set(hds)
2184 2184 lheads = set(repo.heads())
2185 2185 ui.write(("common heads: %s\n") %
2186 2186 " ".join(sorted(short(n) for n in common)))
2187 2187 if lheads <= common:
2188 2188 ui.write(("local is subset\n"))
2189 2189 elif rheads <= common:
2190 2190 ui.write(("remote is subset\n"))
2191 2191
2192 2192 serverlogs = opts.get('serverlog')
2193 2193 if serverlogs:
2194 2194 for filename in serverlogs:
2195 2195 logfile = open(filename, 'r')
2196 2196 try:
2197 2197 line = logfile.readline()
2198 2198 while line:
2199 2199 parts = line.strip().split(';')
2200 2200 op = parts[1]
2201 2201 if op == 'cg':
2202 2202 pass
2203 2203 elif op == 'cgss':
2204 2204 doit(parts[2].split(' '), parts[3].split(' '))
2205 2205 elif op == 'unb':
2206 2206 doit(parts[3].split(' '), parts[2].split(' '))
2207 2207 line = logfile.readline()
2208 2208 finally:
2209 2209 logfile.close()
2210 2210
2211 2211 else:
2212 2212 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2213 2213 opts.get('remote_head'))
2214 2214 localrevs = opts.get('local_head')
2215 2215 doit(localrevs, remoterevs)
2216 2216
2217 2217 @command('debugextensions', formatteropts, [], norepo=True)
2218 2218 def debugextensions(ui, **opts):
2219 2219 '''show information about active extensions'''
2220 2220 exts = extensions.extensions(ui)
2221 2221 fm = ui.formatter('debugextensions', opts)
2222 2222 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
2223 2223 extsource = extmod.__file__
2224 2224 exttestedwith = getattr(extmod, 'testedwith', None)
2225 2225 if exttestedwith is not None:
2226 2226 exttestedwith = exttestedwith.split()
2227 2227 extbuglink = getattr(extmod, 'buglink', None)
2228 2228
2229 2229 fm.startitem()
2230 2230
2231 2231 if ui.quiet or ui.verbose:
2232 2232 fm.write('name', '%s\n', extname)
2233 2233 else:
2234 2234 fm.write('name', '%s', extname)
2235 2235 if not exttestedwith:
2236 2236 fm.plain(_(' (untested!)\n'))
2237 2237 else:
2238 2238 if exttestedwith == ['internal'] or \
2239 2239 util.version() in exttestedwith:
2240 2240 fm.plain('\n')
2241 2241 else:
2242 2242 lasttestedversion = exttestedwith[-1]
2243 2243 fm.plain(' (%s!)\n' % lasttestedversion)
2244 2244
2245 2245 fm.condwrite(ui.verbose and extsource, 'source',
2246 2246 _(' location: %s\n'), extsource or "")
2247 2247
2248 2248 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
2249 2249 _(' tested with: %s\n'), ' '.join(exttestedwith or []))
2250 2250
2251 2251 fm.condwrite(ui.verbose and extbuglink, 'buglink',
2252 2252 _(' bug reporting: %s\n'), extbuglink or "")
2253 2253
2254 2254 fm.end()
2255 2255
2256 2256 @command('debugfileset',
2257 2257 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2258 2258 _('[-r REV] FILESPEC'))
2259 2259 def debugfileset(ui, repo, expr, **opts):
2260 2260 '''parse and apply a fileset specification'''
2261 2261 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2262 2262 if ui.verbose:
2263 2263 tree = fileset.parse(expr)
2264 2264 ui.note(fileset.prettyformat(tree), "\n")
2265 2265
2266 2266 for f in ctx.getfileset(expr):
2267 2267 ui.write("%s\n" % f)
2268 2268
2269 2269 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2270 2270 def debugfsinfo(ui, path="."):
2271 2271 """show information detected about current filesystem"""
2272 2272 util.writefile('.debugfsinfo', '')
2273 2273 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2274 2274 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2275 2275 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2276 2276 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2277 2277 and 'yes' or 'no'))
2278 2278 os.unlink('.debugfsinfo')
2279 2279
2280 2280 @command('debuggetbundle',
2281 2281 [('H', 'head', [], _('id of head node'), _('ID')),
2282 2282 ('C', 'common', [], _('id of common node'), _('ID')),
2283 2283 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2284 2284 _('REPO FILE [-H|-C ID]...'),
2285 2285 norepo=True)
2286 2286 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2287 2287 """retrieves a bundle from a repo
2288 2288
2289 2289 Every ID must be a full-length hex node id string. Saves the bundle to the
2290 2290 given file.
2291 2291 """
2292 2292 repo = hg.peer(ui, opts, repopath)
2293 2293 if not repo.capable('getbundle'):
2294 2294 raise error.Abort("getbundle() not supported by target repository")
2295 2295 args = {}
2296 2296 if common:
2297 2297 args['common'] = [bin(s) for s in common]
2298 2298 if head:
2299 2299 args['heads'] = [bin(s) for s in head]
2300 2300 # TODO: get desired bundlecaps from command line.
2301 2301 args['bundlecaps'] = None
2302 2302 bundle = repo.getbundle('debug', **args)
2303 2303
2304 2304 bundletype = opts.get('type', 'bzip2').lower()
2305 2305 btypes = {'none': 'HG10UN',
2306 2306 'bzip2': 'HG10BZ',
2307 2307 'gzip': 'HG10GZ',
2308 2308 'bundle2': 'HG20'}
2309 2309 bundletype = btypes.get(bundletype)
2310 2310 if bundletype not in changegroup.bundletypes:
2311 2311 raise error.Abort(_('unknown bundle type specified with --type'))
2312 2312 changegroup.writebundle(ui, bundle, bundlepath, bundletype)
2313 2313
2314 2314 @command('debugignore', [], '')
2315 2315 def debugignore(ui, repo, *values, **opts):
2316 2316 """display the combined ignore pattern"""
2317 2317 ignore = repo.dirstate._ignore
2318 2318 includepat = getattr(ignore, 'includepat', None)
2319 2319 if includepat is not None:
2320 2320 ui.write("%s\n" % includepat)
2321 2321 else:
2322 2322 raise error.Abort(_("no ignore patterns found"))
2323 2323
2324 2324 @command('debugindex',
2325 2325 [('c', 'changelog', False, _('open changelog')),
2326 2326 ('m', 'manifest', False, _('open manifest')),
2327 2327 ('', 'dir', False, _('open directory manifest')),
2328 2328 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2329 2329 _('[-f FORMAT] -c|-m|FILE'),
2330 2330 optionalrepo=True)
2331 2331 def debugindex(ui, repo, file_=None, **opts):
2332 2332 """dump the contents of an index file"""
2333 2333 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2334 2334 format = opts.get('format', 0)
2335 2335 if format not in (0, 1):
2336 2336 raise error.Abort(_("unknown format %d") % format)
2337 2337
2338 2338 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2339 2339 if generaldelta:
2340 2340 basehdr = ' delta'
2341 2341 else:
2342 2342 basehdr = ' base'
2343 2343
2344 2344 if ui.debugflag:
2345 2345 shortfn = hex
2346 2346 else:
2347 2347 shortfn = short
2348 2348
2349 2349 # There might not be anything in r, so have a sane default
2350 2350 idlen = 12
2351 2351 for i in r:
2352 2352 idlen = len(shortfn(r.node(i)))
2353 2353 break
2354 2354
2355 2355 if format == 0:
2356 2356 ui.write(" rev offset length " + basehdr + " linkrev"
2357 2357 " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
2358 2358 elif format == 1:
2359 2359 ui.write(" rev flag offset length"
2360 2360 " size " + basehdr + " link p1 p2"
2361 2361 " %s\n" % "nodeid".rjust(idlen))
2362 2362
2363 2363 for i in r:
2364 2364 node = r.node(i)
2365 2365 if generaldelta:
2366 2366 base = r.deltaparent(i)
2367 2367 else:
2368 2368 base = r.chainbase(i)
2369 2369 if format == 0:
2370 2370 try:
2371 2371 pp = r.parents(node)
2372 2372 except Exception:
2373 2373 pp = [nullid, nullid]
2374 2374 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2375 2375 i, r.start(i), r.length(i), base, r.linkrev(i),
2376 2376 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2377 2377 elif format == 1:
2378 2378 pr = r.parentrevs(i)
2379 2379 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2380 2380 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2381 2381 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
2382 2382
2383 2383 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2384 2384 def debugindexdot(ui, repo, file_):
2385 2385 """dump an index DAG as a graphviz dot file"""
2386 2386 r = None
2387 2387 if repo:
2388 2388 filelog = repo.file(file_)
2389 2389 if len(filelog):
2390 2390 r = filelog
2391 2391 if not r:
2392 2392 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2393 2393 ui.write(("digraph G {\n"))
2394 2394 for i in r:
2395 2395 node = r.node(i)
2396 2396 pp = r.parents(node)
2397 2397 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2398 2398 if pp[1] != nullid:
2399 2399 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2400 2400 ui.write("}\n")
2401 2401
2402 2402 @command('debuginstall', [], '', norepo=True)
2403 2403 def debuginstall(ui):
2404 2404 '''test Mercurial installation
2405 2405
2406 2406 Returns 0 on success.
2407 2407 '''
2408 2408
2409 2409 def writetemp(contents):
2410 2410 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2411 2411 f = os.fdopen(fd, "wb")
2412 2412 f.write(contents)
2413 2413 f.close()
2414 2414 return name
2415 2415
2416 2416 problems = 0
2417 2417
2418 2418 # encoding
2419 2419 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2420 2420 try:
2421 2421 encoding.fromlocal("test")
2422 2422 except error.Abort as inst:
2423 2423 ui.write(" %s\n" % inst)
2424 2424 ui.write(_(" (check that your locale is properly set)\n"))
2425 2425 problems += 1
2426 2426
2427 2427 # Python
2428 2428 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2429 2429 ui.status(_("checking Python version (%s)\n")
2430 2430 % ("%s.%s.%s" % sys.version_info[:3]))
2431 2431 ui.status(_("checking Python lib (%s)...\n")
2432 2432 % os.path.dirname(os.__file__))
2433 2433
2434 2434 # compiled modules
2435 2435 ui.status(_("checking installed modules (%s)...\n")
2436 2436 % os.path.dirname(__file__))
2437 2437 try:
2438 2438 import bdiff, mpatch, base85, osutil
2439 2439 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2440 2440 except Exception as inst:
2441 2441 ui.write(" %s\n" % inst)
2442 2442 ui.write(_(" One or more extensions could not be found"))
2443 2443 ui.write(_(" (check that you compiled the extensions)\n"))
2444 2444 problems += 1
2445 2445
2446 2446 # templates
2447 2447 import templater
2448 2448 p = templater.templatepaths()
2449 2449 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2450 2450 if p:
2451 2451 m = templater.templatepath("map-cmdline.default")
2452 2452 if m:
2453 2453 # template found, check if it is working
2454 2454 try:
2455 2455 templater.templater(m)
2456 2456 except Exception as inst:
2457 2457 ui.write(" %s\n" % inst)
2458 2458 p = None
2459 2459 else:
2460 2460 ui.write(_(" template 'default' not found\n"))
2461 2461 p = None
2462 2462 else:
2463 2463 ui.write(_(" no template directories found\n"))
2464 2464 if not p:
2465 2465 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2466 2466 problems += 1
2467 2467
2468 2468 # editor
2469 2469 ui.status(_("checking commit editor...\n"))
2470 2470 editor = ui.geteditor()
2471 2471 editor = util.expandpath(editor)
2472 2472 cmdpath = util.findexe(shlex.split(editor)[0])
2473 2473 if not cmdpath:
2474 2474 if editor == 'vi':
2475 2475 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2476 2476 ui.write(_(" (specify a commit editor in your configuration"
2477 2477 " file)\n"))
2478 2478 else:
2479 2479 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2480 2480 ui.write(_(" (specify a commit editor in your configuration"
2481 2481 " file)\n"))
2482 2482 problems += 1
2483 2483
2484 2484 # check username
2485 2485 ui.status(_("checking username...\n"))
2486 2486 try:
2487 2487 ui.username()
2488 2488 except error.Abort as e:
2489 2489 ui.write(" %s\n" % e)
2490 2490 ui.write(_(" (specify a username in your configuration file)\n"))
2491 2491 problems += 1
2492 2492
2493 2493 if not problems:
2494 2494 ui.status(_("no problems detected\n"))
2495 2495 else:
2496 2496 ui.write(_("%s problems detected,"
2497 2497 " please check your install!\n") % problems)
2498 2498
2499 2499 return problems
2500 2500
2501 2501 @command('debugknown', [], _('REPO ID...'), norepo=True)
2502 2502 def debugknown(ui, repopath, *ids, **opts):
2503 2503 """test whether node ids are known to a repo
2504 2504
2505 2505 Every ID must be a full-length hex node id string. Returns a list of 0s
2506 2506 and 1s indicating unknown/known.
2507 2507 """
2508 2508 repo = hg.peer(ui, opts, repopath)
2509 2509 if not repo.capable('known'):
2510 2510 raise error.Abort("known() not supported by target repository")
2511 2511 flags = repo.known([bin(s) for s in ids])
2512 2512 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2513 2513
2514 2514 @command('debuglabelcomplete', [], _('LABEL...'))
2515 2515 def debuglabelcomplete(ui, repo, *args):
2516 2516 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2517 2517 debugnamecomplete(ui, repo, *args)
2518 2518
2519 2519 @command('debugmergestate', [], '')
2520 2520 def debugmergestate(ui, repo, *args):
2521 2521 """print merge state
2522 2522
2523 2523 Use --verbose to print out information about whether v1 or v2 merge state
2524 2524 was chosen."""
2525 2525 def printrecords(version):
2526 2526 ui.write(('* version %s records\n') % version)
2527 2527 if version == 1:
2528 2528 records = v1records
2529 2529 else:
2530 2530 records = v2records
2531 2531
2532 2532 for rtype, record in records:
2533 2533 # pretty print some record types
2534 2534 if rtype == 'L':
2535 2535 ui.write(('local: %s\n') % record)
2536 2536 elif rtype == 'O':
2537 2537 ui.write(('other: %s\n') % record)
2538 2538 elif rtype == 'm':
2539 2539 driver, mdstate = record.split('\0', 1)
2540 2540 ui.write(('merge driver: %s (state "%s")\n')
2541 2541 % (driver, mdstate))
2542 2542 elif rtype in 'FD':
2543 2543 r = record.split('\0')
2544 2544 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2545 2545 if version == 1:
2546 2546 onode = 'not stored in v1 format'
2547 2547 flags = r[7]
2548 2548 else:
2549 2549 onode, flags = r[7:9]
2550 2550 ui.write(('file: %s (state "%s", hash %s)\n')
2551 2551 % (f, state, hash))
2552 2552 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2553 2553 ui.write((' ancestor path: %s (node %s)\n') % (afile, anode))
2554 2554 ui.write((' other path: %s (node %s)\n') % (ofile, onode))
2555 2555 else:
2556 2556 ui.write(('unrecognized entry: %s\t%s\n')
2557 2557 % (rtype, record.replace('\0', '\t')))
2558 2558
2559 2559 ms = mergemod.mergestate(repo)
2560 2560
2561 2561 # sort so that reasonable information is on top
2562 2562 v1records = ms._readrecordsv1()
2563 2563 v2records = ms._readrecordsv2()
2564 2564 order = 'LOm'
2565 2565 def key(r):
2566 2566 idx = order.find(r[0])
2567 2567 if idx == -1:
2568 2568 return (1, r[1])
2569 2569 else:
2570 2570 return (0, idx)
2571 2571 v1records.sort(key=key)
2572 2572 v2records.sort(key=key)
2573 2573
2574 2574 if not v1records and not v2records:
2575 2575 ui.write(('no merge state found\n'))
2576 2576 elif not v2records:
2577 2577 ui.note(('no version 2 merge state\n'))
2578 2578 printrecords(1)
2579 2579 elif ms._v1v2match(v1records, v2records):
2580 2580 ui.note(('v1 and v2 states match: using v2\n'))
2581 2581 printrecords(2)
2582 2582 else:
2583 2583 ui.note(('v1 and v2 states mismatch: using v1\n'))
2584 2584 printrecords(1)
2585 2585 if ui.verbose:
2586 2586 printrecords(2)
2587 2587
2588 2588 @command('debugnamecomplete', [], _('NAME...'))
2589 2589 def debugnamecomplete(ui, repo, *args):
2590 2590 '''complete "names" - tags, open branch names, bookmark names'''
2591 2591
2592 2592 names = set()
2593 2593 # since we previously only listed open branches, we will handle that
2594 2594 # specially (after this for loop)
2595 2595 for name, ns in repo.names.iteritems():
2596 2596 if name != 'branches':
2597 2597 names.update(ns.listnames(repo))
2598 2598 names.update(tag for (tag, heads, tip, closed)
2599 2599 in repo.branchmap().iterbranches() if not closed)
2600 2600 completions = set()
2601 2601 if not args:
2602 2602 args = ['']
2603 2603 for a in args:
2604 2604 completions.update(n for n in names if n.startswith(a))
2605 2605 ui.write('\n'.join(sorted(completions)))
2606 2606 ui.write('\n')
2607 2607
2608 2608 @command('debuglocks',
2609 2609 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2610 2610 ('W', 'force-wlock', None,
2611 2611 _('free the working state lock (DANGEROUS)'))],
2612 2612 _('[OPTION]...'))
2613 2613 def debuglocks(ui, repo, **opts):
2614 2614 """show or modify state of locks
2615 2615
2616 2616 By default, this command will show which locks are held. This
2617 2617 includes the user and process holding the lock, the amount of time
2618 2618 the lock has been held, and the machine name where the process is
2619 2619 running if it's not local.
2620 2620
2621 2621 Locks protect the integrity of Mercurial's data, so should be
2622 2622 treated with care. System crashes or other interruptions may cause
2623 2623 locks to not be properly released, though Mercurial will usually
2624 2624 detect and remove such stale locks automatically.
2625 2625
2626 2626 However, detecting stale locks may not always be possible (for
2627 2627 instance, on a shared filesystem). Removing locks may also be
2628 2628 blocked by filesystem permissions.
2629 2629
2630 2630 Returns 0 if no locks are held.
2631 2631
2632 2632 """
2633 2633
2634 2634 if opts.get('force_lock'):
2635 2635 repo.svfs.unlink('lock')
2636 2636 if opts.get('force_wlock'):
2637 2637 repo.vfs.unlink('wlock')
2638 2638 if opts.get('force_lock') or opts.get('force_lock'):
2639 2639 return 0
2640 2640
2641 2641 now = time.time()
2642 2642 held = 0
2643 2643
2644 2644 def report(vfs, name, method):
2645 2645 # this causes stale locks to get reaped for more accurate reporting
2646 2646 try:
2647 2647 l = method(False)
2648 2648 except error.LockHeld:
2649 2649 l = None
2650 2650
2651 2651 if l:
2652 2652 l.release()
2653 2653 else:
2654 2654 try:
2655 2655 stat = vfs.lstat(name)
2656 2656 age = now - stat.st_mtime
2657 2657 user = util.username(stat.st_uid)
2658 2658 locker = vfs.readlock(name)
2659 2659 if ":" in locker:
2660 2660 host, pid = locker.split(':')
2661 2661 if host == socket.gethostname():
2662 2662 locker = 'user %s, process %s' % (user, pid)
2663 2663 else:
2664 2664 locker = 'user %s, process %s, host %s' \
2665 2665 % (user, pid, host)
2666 2666 ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
2667 2667 return 1
2668 2668 except OSError as e:
2669 2669 if e.errno != errno.ENOENT:
2670 2670 raise
2671 2671
2672 2672 ui.write("%-6s free\n" % (name + ":"))
2673 2673 return 0
2674 2674
2675 2675 held += report(repo.svfs, "lock", repo.lock)
2676 2676 held += report(repo.vfs, "wlock", repo.wlock)
2677 2677
2678 2678 return held
2679 2679
2680 2680 @command('debugobsolete',
2681 2681 [('', 'flags', 0, _('markers flag')),
2682 2682 ('', 'record-parents', False,
2683 2683 _('record parent information for the precursor')),
2684 2684 ('r', 'rev', [], _('display markers relevant to REV')),
2685 2685 ] + commitopts2,
2686 2686 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2687 2687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2688 2688 """create arbitrary obsolete marker
2689 2689
2690 2690 With no arguments, displays the list of obsolescence markers."""
2691 2691
2692 2692 def parsenodeid(s):
2693 2693 try:
2694 2694 # We do not use revsingle/revrange functions here to accept
2695 2695 # arbitrary node identifiers, possibly not present in the
2696 2696 # local repository.
2697 2697 n = bin(s)
2698 2698 if len(n) != len(nullid):
2699 2699 raise TypeError()
2700 2700 return n
2701 2701 except TypeError:
2702 2702 raise error.Abort('changeset references must be full hexadecimal '
2703 2703 'node identifiers')
2704 2704
2705 2705 if precursor is not None:
2706 2706 if opts['rev']:
2707 2707 raise error.Abort('cannot select revision when creating marker')
2708 2708 metadata = {}
2709 2709 metadata['user'] = opts['user'] or ui.username()
2710 2710 succs = tuple(parsenodeid(succ) for succ in successors)
2711 2711 l = repo.lock()
2712 2712 try:
2713 2713 tr = repo.transaction('debugobsolete')
2714 2714 try:
2715 2715 date = opts.get('date')
2716 2716 if date:
2717 2717 date = util.parsedate(date)
2718 2718 else:
2719 2719 date = None
2720 2720 prec = parsenodeid(precursor)
2721 2721 parents = None
2722 2722 if opts['record_parents']:
2723 2723 if prec not in repo.unfiltered():
2724 2724 raise error.Abort('cannot used --record-parents on '
2725 2725 'unknown changesets')
2726 2726 parents = repo.unfiltered()[prec].parents()
2727 2727 parents = tuple(p.node() for p in parents)
2728 2728 repo.obsstore.create(tr, prec, succs, opts['flags'],
2729 2729 parents=parents, date=date,
2730 2730 metadata=metadata)
2731 2731 tr.close()
2732 2732 except ValueError as exc:
2733 2733 raise error.Abort(_('bad obsmarker input: %s') % exc)
2734 2734 finally:
2735 2735 tr.release()
2736 2736 finally:
2737 2737 l.release()
2738 2738 else:
2739 2739 if opts['rev']:
2740 2740 revs = scmutil.revrange(repo, opts['rev'])
2741 2741 nodes = [repo[r].node() for r in revs]
2742 2742 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2743 2743 markers.sort(key=lambda x: x._data)
2744 2744 else:
2745 2745 markers = obsolete.getmarkers(repo)
2746 2746
2747 2747 for m in markers:
2748 2748 cmdutil.showmarker(ui, m)
2749 2749
2750 2750 @command('debugpathcomplete',
2751 2751 [('f', 'full', None, _('complete an entire path')),
2752 2752 ('n', 'normal', None, _('show only normal files')),
2753 2753 ('a', 'added', None, _('show only added files')),
2754 2754 ('r', 'removed', None, _('show only removed files'))],
2755 2755 _('FILESPEC...'))
2756 2756 def debugpathcomplete(ui, repo, *specs, **opts):
2757 2757 '''complete part or all of a tracked path
2758 2758
2759 2759 This command supports shells that offer path name completion. It
2760 2760 currently completes only files already known to the dirstate.
2761 2761
2762 2762 Completion extends only to the next path segment unless
2763 2763 --full is specified, in which case entire paths are used.'''
2764 2764
2765 2765 def complete(path, acceptable):
2766 2766 dirstate = repo.dirstate
2767 2767 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2768 2768 rootdir = repo.root + os.sep
2769 2769 if spec != repo.root and not spec.startswith(rootdir):
2770 2770 return [], []
2771 2771 if os.path.isdir(spec):
2772 2772 spec += '/'
2773 2773 spec = spec[len(rootdir):]
2774 2774 fixpaths = os.sep != '/'
2775 2775 if fixpaths:
2776 2776 spec = spec.replace(os.sep, '/')
2777 2777 speclen = len(spec)
2778 2778 fullpaths = opts['full']
2779 2779 files, dirs = set(), set()
2780 2780 adddir, addfile = dirs.add, files.add
2781 2781 for f, st in dirstate.iteritems():
2782 2782 if f.startswith(spec) and st[0] in acceptable:
2783 2783 if fixpaths:
2784 2784 f = f.replace('/', os.sep)
2785 2785 if fullpaths:
2786 2786 addfile(f)
2787 2787 continue
2788 2788 s = f.find(os.sep, speclen)
2789 2789 if s >= 0:
2790 2790 adddir(f[:s])
2791 2791 else:
2792 2792 addfile(f)
2793 2793 return files, dirs
2794 2794
2795 2795 acceptable = ''
2796 2796 if opts['normal']:
2797 2797 acceptable += 'nm'
2798 2798 if opts['added']:
2799 2799 acceptable += 'a'
2800 2800 if opts['removed']:
2801 2801 acceptable += 'r'
2802 2802 cwd = repo.getcwd()
2803 2803 if not specs:
2804 2804 specs = ['.']
2805 2805
2806 2806 files, dirs = set(), set()
2807 2807 for spec in specs:
2808 2808 f, d = complete(spec, acceptable or 'nmar')
2809 2809 files.update(f)
2810 2810 dirs.update(d)
2811 2811 files.update(dirs)
2812 2812 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2813 2813 ui.write('\n')
2814 2814
2815 2815 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2816 2816 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2817 2817 '''access the pushkey key/value protocol
2818 2818
2819 2819 With two args, list the keys in the given namespace.
2820 2820
2821 2821 With five args, set a key to new if it currently is set to old.
2822 2822 Reports success or failure.
2823 2823 '''
2824 2824
2825 2825 target = hg.peer(ui, {}, repopath)
2826 2826 if keyinfo:
2827 2827 key, old, new = keyinfo
2828 2828 r = target.pushkey(namespace, key, old, new)
2829 2829 ui.status(str(r) + '\n')
2830 2830 return not r
2831 2831 else:
2832 2832 for k, v in sorted(target.listkeys(namespace).iteritems()):
2833 2833 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2834 2834 v.encode('string-escape')))
2835 2835
2836 2836 @command('debugpvec', [], _('A B'))
2837 2837 def debugpvec(ui, repo, a, b=None):
2838 2838 ca = scmutil.revsingle(repo, a)
2839 2839 cb = scmutil.revsingle(repo, b)
2840 2840 pa = pvec.ctxpvec(ca)
2841 2841 pb = pvec.ctxpvec(cb)
2842 2842 if pa == pb:
2843 2843 rel = "="
2844 2844 elif pa > pb:
2845 2845 rel = ">"
2846 2846 elif pa < pb:
2847 2847 rel = "<"
2848 2848 elif pa | pb:
2849 2849 rel = "|"
2850 2850 ui.write(_("a: %s\n") % pa)
2851 2851 ui.write(_("b: %s\n") % pb)
2852 2852 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2853 2853 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2854 2854 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2855 2855 pa.distance(pb), rel))
2856 2856
2857 2857 @command('debugrebuilddirstate|debugrebuildstate',
2858 2858 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2859 2859 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2860 2860 'the working copy parent')),
2861 2861 ],
2862 2862 _('[-r REV]'))
2863 2863 def debugrebuilddirstate(ui, repo, rev, **opts):
2864 2864 """rebuild the dirstate as it would look like for the given revision
2865 2865
2866 2866 If no revision is specified the first current parent will be used.
2867 2867
2868 2868 The dirstate will be set to the files of the given revision.
2869 2869 The actual working directory content or existing dirstate
2870 2870 information such as adds or removes is not considered.
2871 2871
2872 2872 ``minimal`` will only rebuild the dirstate status for files that claim to be
2873 2873 tracked but are not in the parent manifest, or that exist in the parent
2874 2874 manifest but are not in the dirstate. It will not change adds, removes, or
2875 2875 modified files that are in the working copy parent.
2876 2876
2877 2877 One use of this command is to make the next :hg:`status` invocation
2878 2878 check the actual file content.
2879 2879 """
2880 2880 ctx = scmutil.revsingle(repo, rev)
2881 2881 wlock = repo.wlock()
2882 2882 try:
2883 2883 dirstate = repo.dirstate
2884 2884
2885 2885 # See command doc for what minimal does.
2886 2886 if opts.get('minimal'):
2887 2887 dirstatefiles = set(dirstate)
2888 2888 ctxfiles = set(ctx.manifest().keys())
2889 2889 for file in (dirstatefiles | ctxfiles):
2890 2890 indirstate = file in dirstatefiles
2891 2891 inctx = file in ctxfiles
2892 2892
2893 2893 if indirstate and not inctx and dirstate[file] != 'a':
2894 2894 dirstate.drop(file)
2895 2895 elif inctx and not indirstate:
2896 2896 dirstate.normallookup(file)
2897 2897 else:
2898 2898 dirstate.rebuild(ctx.node(), ctx.manifest())
2899 2899 finally:
2900 2900 wlock.release()
2901 2901
2902 2902 @command('debugrebuildfncache', [], '')
2903 2903 def debugrebuildfncache(ui, repo):
2904 2904 """rebuild the fncache file"""
2905 2905 repair.rebuildfncache(ui, repo)
2906 2906
2907 2907 @command('debugrename',
2908 2908 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2909 2909 _('[-r REV] FILE'))
2910 2910 def debugrename(ui, repo, file1, *pats, **opts):
2911 2911 """dump rename information"""
2912 2912
2913 2913 ctx = scmutil.revsingle(repo, opts.get('rev'))
2914 2914 m = scmutil.match(ctx, (file1,) + pats, opts)
2915 2915 for abs in ctx.walk(m):
2916 2916 fctx = ctx[abs]
2917 2917 o = fctx.filelog().renamed(fctx.filenode())
2918 2918 rel = m.rel(abs)
2919 2919 if o:
2920 2920 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2921 2921 else:
2922 2922 ui.write(_("%s not renamed\n") % rel)
2923 2923
2924 2924 @command('debugrevlog',
2925 2925 [('c', 'changelog', False, _('open changelog')),
2926 2926 ('m', 'manifest', False, _('open manifest')),
2927 2927 ('', 'dir', False, _('open directory manifest')),
2928 2928 ('d', 'dump', False, _('dump index data'))],
2929 2929 _('-c|-m|FILE'),
2930 2930 optionalrepo=True)
2931 2931 def debugrevlog(ui, repo, file_=None, **opts):
2932 2932 """show data and statistics about a revlog"""
2933 2933 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2934 2934
2935 2935 if opts.get("dump"):
2936 2936 numrevs = len(r)
2937 2937 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2938 2938 " rawsize totalsize compression heads chainlen\n")
2939 2939 ts = 0
2940 2940 heads = set()
2941 2941
2942 2942 for rev in xrange(numrevs):
2943 2943 dbase = r.deltaparent(rev)
2944 2944 if dbase == -1:
2945 2945 dbase = rev
2946 2946 cbase = r.chainbase(rev)
2947 2947 clen = r.chainlen(rev)
2948 2948 p1, p2 = r.parentrevs(rev)
2949 2949 rs = r.rawsize(rev)
2950 2950 ts = ts + rs
2951 2951 heads -= set(r.parentrevs(rev))
2952 2952 heads.add(rev)
2953 2953 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2954 2954 "%11d %5d %8d\n" %
2955 2955 (rev, p1, p2, r.start(rev), r.end(rev),
2956 2956 r.start(dbase), r.start(cbase),
2957 2957 r.start(p1), r.start(p2),
2958 2958 rs, ts, ts / r.end(rev), len(heads), clen))
2959 2959 return 0
2960 2960
2961 2961 v = r.version
2962 2962 format = v & 0xFFFF
2963 2963 flags = []
2964 2964 gdelta = False
2965 2965 if v & revlog.REVLOGNGINLINEDATA:
2966 2966 flags.append('inline')
2967 2967 if v & revlog.REVLOGGENERALDELTA:
2968 2968 gdelta = True
2969 2969 flags.append('generaldelta')
2970 2970 if not flags:
2971 2971 flags = ['(none)']
2972 2972
2973 2973 nummerges = 0
2974 2974 numfull = 0
2975 2975 numprev = 0
2976 2976 nump1 = 0
2977 2977 nump2 = 0
2978 2978 numother = 0
2979 2979 nump1prev = 0
2980 2980 nump2prev = 0
2981 2981 chainlengths = []
2982 2982
2983 2983 datasize = [None, 0, 0L]
2984 2984 fullsize = [None, 0, 0L]
2985 2985 deltasize = [None, 0, 0L]
2986 2986
2987 2987 def addsize(size, l):
2988 2988 if l[0] is None or size < l[0]:
2989 2989 l[0] = size
2990 2990 if size > l[1]:
2991 2991 l[1] = size
2992 2992 l[2] += size
2993 2993
2994 2994 numrevs = len(r)
2995 2995 for rev in xrange(numrevs):
2996 2996 p1, p2 = r.parentrevs(rev)
2997 2997 delta = r.deltaparent(rev)
2998 2998 if format > 0:
2999 2999 addsize(r.rawsize(rev), datasize)
3000 3000 if p2 != nullrev:
3001 3001 nummerges += 1
3002 3002 size = r.length(rev)
3003 3003 if delta == nullrev:
3004 3004 chainlengths.append(0)
3005 3005 numfull += 1
3006 3006 addsize(size, fullsize)
3007 3007 else:
3008 3008 chainlengths.append(chainlengths[delta] + 1)
3009 3009 addsize(size, deltasize)
3010 3010 if delta == rev - 1:
3011 3011 numprev += 1
3012 3012 if delta == p1:
3013 3013 nump1prev += 1
3014 3014 elif delta == p2:
3015 3015 nump2prev += 1
3016 3016 elif delta == p1:
3017 3017 nump1 += 1
3018 3018 elif delta == p2:
3019 3019 nump2 += 1
3020 3020 elif delta != nullrev:
3021 3021 numother += 1
3022 3022
3023 3023 # Adjust size min value for empty cases
3024 3024 for size in (datasize, fullsize, deltasize):
3025 3025 if size[0] is None:
3026 3026 size[0] = 0
3027 3027
3028 3028 numdeltas = numrevs - numfull
3029 3029 numoprev = numprev - nump1prev - nump2prev
3030 3030 totalrawsize = datasize[2]
3031 3031 datasize[2] /= numrevs
3032 3032 fulltotal = fullsize[2]
3033 3033 fullsize[2] /= numfull
3034 3034 deltatotal = deltasize[2]
3035 3035 if numrevs - numfull > 0:
3036 3036 deltasize[2] /= numrevs - numfull
3037 3037 totalsize = fulltotal + deltatotal
3038 3038 avgchainlen = sum(chainlengths) / numrevs
3039 3039 maxchainlen = max(chainlengths)
3040 3040 compratio = totalrawsize / totalsize
3041 3041
3042 3042 basedfmtstr = '%%%dd\n'
3043 3043 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
3044 3044
3045 3045 def dfmtstr(max):
3046 3046 return basedfmtstr % len(str(max))
3047 3047 def pcfmtstr(max, padding=0):
3048 3048 return basepcfmtstr % (len(str(max)), ' ' * padding)
3049 3049
3050 3050 def pcfmt(value, total):
3051 3051 return (value, 100 * float(value) / total)
3052 3052
3053 3053 ui.write(('format : %d\n') % format)
3054 3054 ui.write(('flags : %s\n') % ', '.join(flags))
3055 3055
3056 3056 ui.write('\n')
3057 3057 fmt = pcfmtstr(totalsize)
3058 3058 fmt2 = dfmtstr(totalsize)
3059 3059 ui.write(('revisions : ') + fmt2 % numrevs)
3060 3060 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
3061 3061 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
3062 3062 ui.write(('revisions : ') + fmt2 % numrevs)
3063 3063 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
3064 3064 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
3065 3065 ui.write(('revision size : ') + fmt2 % totalsize)
3066 3066 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
3067 3067 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
3068 3068
3069 3069 ui.write('\n')
3070 3070 fmt = dfmtstr(max(avgchainlen, compratio))
3071 3071 ui.write(('avg chain length : ') + fmt % avgchainlen)
3072 3072 ui.write(('max chain length : ') + fmt % maxchainlen)
3073 3073 ui.write(('compression ratio : ') + fmt % compratio)
3074 3074
3075 3075 if format > 0:
3076 3076 ui.write('\n')
3077 3077 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
3078 3078 % tuple(datasize))
3079 3079 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
3080 3080 % tuple(fullsize))
3081 3081 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
3082 3082 % tuple(deltasize))
3083 3083
3084 3084 if numdeltas > 0:
3085 3085 ui.write('\n')
3086 3086 fmt = pcfmtstr(numdeltas)
3087 3087 fmt2 = pcfmtstr(numdeltas, 4)
3088 3088 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
3089 3089 if numprev > 0:
3090 3090 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
3091 3091 numprev))
3092 3092 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
3093 3093 numprev))
3094 3094 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
3095 3095 numprev))
3096 3096 if gdelta:
3097 3097 ui.write(('deltas against p1 : ')
3098 3098 + fmt % pcfmt(nump1, numdeltas))
3099 3099 ui.write(('deltas against p2 : ')
3100 3100 + fmt % pcfmt(nump2, numdeltas))
3101 3101 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
3102 3102 numdeltas))
3103 3103
3104 3104 @command('debugrevspec',
3105 3105 [('', 'optimize', None, _('print parsed tree after optimizing'))],
3106 3106 ('REVSPEC'))
3107 3107 def debugrevspec(ui, repo, expr, **opts):
3108 3108 """parse and apply a revision specification
3109 3109
3110 3110 Use --verbose to print the parsed tree before and after aliases
3111 3111 expansion.
3112 3112 """
3113 3113 if ui.verbose:
3114 3114 tree = revset.parse(expr, lookup=repo.__contains__)
3115 3115 ui.note(revset.prettyformat(tree), "\n")
3116 3116 newtree = revset.findaliases(ui, tree)
3117 3117 if newtree != tree:
3118 3118 ui.note(revset.prettyformat(newtree), "\n")
3119 3119 tree = newtree
3120 3120 newtree = revset.foldconcat(tree)
3121 3121 if newtree != tree:
3122 3122 ui.note(revset.prettyformat(newtree), "\n")
3123 3123 if opts["optimize"]:
3124 3124 weight, optimizedtree = revset.optimize(newtree, True)
3125 3125 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
3126 3126 func = revset.match(ui, expr, repo)
3127 3127 revs = func(repo)
3128 3128 if ui.verbose:
3129 3129 ui.note("* set:\n", revset.prettyformatset(revs), "\n")
3130 3130 for c in revs:
3131 3131 ui.write("%s\n" % c)
3132 3132
3133 3133 @command('debugsetparents', [], _('REV1 [REV2]'))
3134 3134 def debugsetparents(ui, repo, rev1, rev2=None):
3135 3135 """manually set the parents of the current working directory
3136 3136
3137 3137 This is useful for writing repository conversion tools, but should
3138 3138 be used with care. For example, neither the working directory nor the
3139 3139 dirstate is updated, so file status may be incorrect after running this
3140 3140 command.
3141 3141
3142 3142 Returns 0 on success.
3143 3143 """
3144 3144
3145 3145 r1 = scmutil.revsingle(repo, rev1).node()
3146 3146 r2 = scmutil.revsingle(repo, rev2, 'null').node()
3147 3147
3148 3148 wlock = repo.wlock()
3149 3149 try:
3150 3150 repo.dirstate.beginparentchange()
3151 3151 repo.setparents(r1, r2)
3152 3152 repo.dirstate.endparentchange()
3153 3153 finally:
3154 3154 wlock.release()
3155 3155
3156 3156 @command('debugdirstate|debugstate',
3157 3157 [('', 'nodates', None, _('do not display the saved mtime')),
3158 3158 ('', 'datesort', None, _('sort by saved mtime'))],
3159 3159 _('[OPTION]...'))
3160 3160 def debugstate(ui, repo, nodates=None, datesort=None):
3161 3161 """show the contents of the current dirstate"""
3162 3162 timestr = ""
3163 3163 if datesort:
3164 3164 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
3165 3165 else:
3166 3166 keyfunc = None # sort by filename
3167 3167 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
3168 3168 if ent[3] == -1:
3169 3169 timestr = 'unset '
3170 3170 elif nodates:
3171 3171 timestr = 'set '
3172 3172 else:
3173 3173 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
3174 3174 time.localtime(ent[3]))
3175 3175 if ent[1] & 0o20000:
3176 3176 mode = 'lnk'
3177 3177 else:
3178 3178 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
3179 3179 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
3180 3180 for f in repo.dirstate.copies():
3181 3181 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
3182 3182
3183 3183 @command('debugsub',
3184 3184 [('r', 'rev', '',
3185 3185 _('revision to check'), _('REV'))],
3186 3186 _('[-r REV] [REV]'))
3187 3187 def debugsub(ui, repo, rev=None):
3188 3188 ctx = scmutil.revsingle(repo, rev, None)
3189 3189 for k, v in sorted(ctx.substate.items()):
3190 3190 ui.write(('path %s\n') % k)
3191 3191 ui.write((' source %s\n') % v[0])
3192 3192 ui.write((' revision %s\n') % v[1])
3193 3193
3194 3194 @command('debugsuccessorssets',
3195 3195 [],
3196 3196 _('[REV]'))
3197 3197 def debugsuccessorssets(ui, repo, *revs):
3198 3198 """show set of successors for revision
3199 3199
3200 3200 A successors set of changeset A is a consistent group of revisions that
3201 3201 succeed A. It contains non-obsolete changesets only.
3202 3202
3203 3203 In most cases a changeset A has a single successors set containing a single
3204 3204 successor (changeset A replaced by A').
3205 3205
3206 3206 A changeset that is made obsolete with no successors are called "pruned".
3207 3207 Such changesets have no successors sets at all.
3208 3208
3209 3209 A changeset that has been "split" will have a successors set containing
3210 3210 more than one successor.
3211 3211
3212 3212 A changeset that has been rewritten in multiple different ways is called
3213 3213 "divergent". Such changesets have multiple successor sets (each of which
3214 3214 may also be split, i.e. have multiple successors).
3215 3215
3216 3216 Results are displayed as follows::
3217 3217
3218 3218 <rev1>
3219 3219 <successors-1A>
3220 3220 <rev2>
3221 3221 <successors-2A>
3222 3222 <successors-2B1> <successors-2B2> <successors-2B3>
3223 3223
3224 3224 Here rev2 has two possible (i.e. divergent) successors sets. The first
3225 3225 holds one element, whereas the second holds three (i.e. the changeset has
3226 3226 been split).
3227 3227 """
3228 3228 # passed to successorssets caching computation from one call to another
3229 3229 cache = {}
3230 3230 ctx2str = str
3231 3231 node2str = short
3232 3232 if ui.debug():
3233 3233 def ctx2str(ctx):
3234 3234 return ctx.hex()
3235 3235 node2str = hex
3236 3236 for rev in scmutil.revrange(repo, revs):
3237 3237 ctx = repo[rev]
3238 3238 ui.write('%s\n'% ctx2str(ctx))
3239 3239 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
3240 3240 if succsset:
3241 3241 ui.write(' ')
3242 3242 ui.write(node2str(succsset[0]))
3243 3243 for node in succsset[1:]:
3244 3244 ui.write(' ')
3245 3245 ui.write(node2str(node))
3246 3246 ui.write('\n')
3247 3247
3248 3248 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
3249 3249 def debugwalk(ui, repo, *pats, **opts):
3250 3250 """show how files match on given patterns"""
3251 3251 m = scmutil.match(repo[None], pats, opts)
3252 3252 items = list(repo.walk(m))
3253 3253 if not items:
3254 3254 return
3255 3255 f = lambda fn: fn
3256 3256 if ui.configbool('ui', 'slash') and os.sep != '/':
3257 3257 f = lambda fn: util.normpath(fn)
3258 3258 fmt = 'f %%-%ds %%-%ds %%s' % (
3259 3259 max([len(abs) for abs in items]),
3260 3260 max([len(m.rel(abs)) for abs in items]))
3261 3261 for abs in items:
3262 3262 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3263 3263 ui.write("%s\n" % line.rstrip())
3264 3264
3265 3265 @command('debugwireargs',
3266 3266 [('', 'three', '', 'three'),
3267 3267 ('', 'four', '', 'four'),
3268 3268 ('', 'five', '', 'five'),
3269 3269 ] + remoteopts,
3270 3270 _('REPO [OPTIONS]... [ONE [TWO]]'),
3271 3271 norepo=True)
3272 3272 def debugwireargs(ui, repopath, *vals, **opts):
3273 3273 repo = hg.peer(ui, opts, repopath)
3274 3274 for opt in remoteopts:
3275 3275 del opts[opt[1]]
3276 3276 args = {}
3277 3277 for k, v in opts.iteritems():
3278 3278 if v:
3279 3279 args[k] = v
3280 3280 # run twice to check that we don't mess up the stream for the next command
3281 3281 res1 = repo.debugwireargs(*vals, **args)
3282 3282 res2 = repo.debugwireargs(*vals, **args)
3283 3283 ui.write("%s\n" % res1)
3284 3284 if res1 != res2:
3285 3285 ui.warn("%s\n" % res2)
3286 3286
3287 3287 @command('^diff',
3288 3288 [('r', 'rev', [], _('revision'), _('REV')),
3289 3289 ('c', 'change', '', _('change made by revision'), _('REV'))
3290 3290 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3291 3291 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3292 3292 inferrepo=True)
3293 3293 def diff(ui, repo, *pats, **opts):
3294 3294 """diff repository (or selected files)
3295 3295
3296 3296 Show differences between revisions for the specified files.
3297 3297
3298 3298 Differences between files are shown using the unified diff format.
3299 3299
3300 3300 .. note::
3301 3301
3302 3302 diff may generate unexpected results for merges, as it will
3303 3303 default to comparing against the working directory's first
3304 3304 parent changeset if no revisions are specified.
3305 3305
3306 3306 When two revision arguments are given, then changes are shown
3307 3307 between those revisions. If only one revision is specified then
3308 3308 that revision is compared to the working directory, and, when no
3309 3309 revisions are specified, the working directory files are compared
3310 3310 to its parent.
3311 3311
3312 3312 Alternatively you can specify -c/--change with a revision to see
3313 3313 the changes in that changeset relative to its first parent.
3314 3314
3315 3315 Without the -a/--text option, diff will avoid generating diffs of
3316 3316 files it detects as binary. With -a, diff will generate a diff
3317 3317 anyway, probably with undesirable results.
3318 3318
3319 3319 Use the -g/--git option to generate diffs in the git extended diff
3320 3320 format. For more information, read :hg:`help diffs`.
3321 3321
3322 3322 .. container:: verbose
3323 3323
3324 3324 Examples:
3325 3325
3326 3326 - compare a file in the current working directory to its parent::
3327 3327
3328 3328 hg diff foo.c
3329 3329
3330 3330 - compare two historical versions of a directory, with rename info::
3331 3331
3332 3332 hg diff --git -r 1.0:1.2 lib/
3333 3333
3334 3334 - get change stats relative to the last change on some date::
3335 3335
3336 3336 hg diff --stat -r "date('may 2')"
3337 3337
3338 3338 - diff all newly-added files that contain a keyword::
3339 3339
3340 3340 hg diff "set:added() and grep(GNU)"
3341 3341
3342 3342 - compare a revision and its parents::
3343 3343
3344 3344 hg diff -c 9353 # compare against first parent
3345 3345 hg diff -r 9353^:9353 # same using revset syntax
3346 3346 hg diff -r 9353^2:9353 # compare against the second parent
3347 3347
3348 3348 Returns 0 on success.
3349 3349 """
3350 3350
3351 3351 revs = opts.get('rev')
3352 3352 change = opts.get('change')
3353 3353 stat = opts.get('stat')
3354 3354 reverse = opts.get('reverse')
3355 3355
3356 3356 if revs and change:
3357 3357 msg = _('cannot specify --rev and --change at the same time')
3358 3358 raise error.Abort(msg)
3359 3359 elif change:
3360 3360 node2 = scmutil.revsingle(repo, change, None).node()
3361 3361 node1 = repo[node2].p1().node()
3362 3362 else:
3363 3363 node1, node2 = scmutil.revpair(repo, revs)
3364 3364
3365 3365 if reverse:
3366 3366 node1, node2 = node2, node1
3367 3367
3368 3368 diffopts = patch.diffallopts(ui, opts)
3369 3369 m = scmutil.match(repo[node2], pats, opts)
3370 3370 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3371 3371 listsubrepos=opts.get('subrepos'),
3372 3372 root=opts.get('root'))
3373 3373
3374 3374 @command('^export',
3375 3375 [('o', 'output', '',
3376 3376 _('print output to file with formatted name'), _('FORMAT')),
3377 3377 ('', 'switch-parent', None, _('diff against the second parent')),
3378 3378 ('r', 'rev', [], _('revisions to export'), _('REV')),
3379 3379 ] + diffopts,
3380 3380 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3381 3381 def export(ui, repo, *changesets, **opts):
3382 3382 """dump the header and diffs for one or more changesets
3383 3383
3384 3384 Print the changeset header and diffs for one or more revisions.
3385 3385 If no revision is given, the parent of the working directory is used.
3386 3386
3387 3387 The information shown in the changeset header is: author, date,
3388 3388 branch name (if non-default), changeset hash, parent(s) and commit
3389 3389 comment.
3390 3390
3391 3391 .. note::
3392 3392
3393 3393 export may generate unexpected diff output for merge
3394 3394 changesets, as it will compare the merge changeset against its
3395 3395 first parent only.
3396 3396
3397 3397 Output may be to a file, in which case the name of the file is
3398 3398 given using a format string. The formatting rules are as follows:
3399 3399
3400 3400 :``%%``: literal "%" character
3401 3401 :``%H``: changeset hash (40 hexadecimal digits)
3402 3402 :``%N``: number of patches being generated
3403 3403 :``%R``: changeset revision number
3404 3404 :``%b``: basename of the exporting repository
3405 3405 :``%h``: short-form changeset hash (12 hexadecimal digits)
3406 3406 :``%m``: first line of the commit message (only alphanumeric characters)
3407 3407 :``%n``: zero-padded sequence number, starting at 1
3408 3408 :``%r``: zero-padded changeset revision number
3409 3409
3410 3410 Without the -a/--text option, export will avoid generating diffs
3411 3411 of files it detects as binary. With -a, export will generate a
3412 3412 diff anyway, probably with undesirable results.
3413 3413
3414 3414 Use the -g/--git option to generate diffs in the git extended diff
3415 3415 format. See :hg:`help diffs` for more information.
3416 3416
3417 3417 With the --switch-parent option, the diff will be against the
3418 3418 second parent. It can be useful to review a merge.
3419 3419
3420 3420 .. container:: verbose
3421 3421
3422 3422 Examples:
3423 3423
3424 3424 - use export and import to transplant a bugfix to the current
3425 3425 branch::
3426 3426
3427 3427 hg export -r 9353 | hg import -
3428 3428
3429 3429 - export all the changesets between two revisions to a file with
3430 3430 rename information::
3431 3431
3432 3432 hg export --git -r 123:150 > changes.txt
3433 3433
3434 3434 - split outgoing changes into a series of patches with
3435 3435 descriptive names::
3436 3436
3437 3437 hg export -r "outgoing()" -o "%n-%m.patch"
3438 3438
3439 3439 Returns 0 on success.
3440 3440 """
3441 3441 changesets += tuple(opts.get('rev', []))
3442 3442 if not changesets:
3443 3443 changesets = ['.']
3444 3444 revs = scmutil.revrange(repo, changesets)
3445 3445 if not revs:
3446 3446 raise error.Abort(_("export requires at least one changeset"))
3447 3447 if len(revs) > 1:
3448 3448 ui.note(_('exporting patches:\n'))
3449 3449 else:
3450 3450 ui.note(_('exporting patch:\n'))
3451 3451 cmdutil.export(repo, revs, template=opts.get('output'),
3452 3452 switch_parent=opts.get('switch_parent'),
3453 3453 opts=patch.diffallopts(ui, opts))
3454 3454
3455 3455 @command('files',
3456 3456 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3457 3457 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3458 3458 ] + walkopts + formatteropts + subrepoopts,
3459 3459 _('[OPTION]... [PATTERN]...'))
3460 3460 def files(ui, repo, *pats, **opts):
3461 3461 """list tracked files
3462 3462
3463 3463 Print files under Mercurial control in the working directory or
3464 3464 specified revision whose names match the given patterns (excluding
3465 3465 removed files).
3466 3466
3467 3467 If no patterns are given to match, this command prints the names
3468 3468 of all files under Mercurial control in the working directory.
3469 3469
3470 3470 .. container:: verbose
3471 3471
3472 3472 Examples:
3473 3473
3474 3474 - list all files under the current directory::
3475 3475
3476 3476 hg files .
3477 3477
3478 3478 - shows sizes and flags for current revision::
3479 3479
3480 3480 hg files -vr .
3481 3481
3482 3482 - list all files named README::
3483 3483
3484 3484 hg files -I "**/README"
3485 3485
3486 3486 - list all binary files::
3487 3487
3488 3488 hg files "set:binary()"
3489 3489
3490 3490 - find files containing a regular expression::
3491 3491
3492 3492 hg files "set:grep('bob')"
3493 3493
3494 3494 - search tracked file contents with xargs and grep::
3495 3495
3496 3496 hg files -0 | xargs -0 grep foo
3497 3497
3498 3498 See :hg:`help patterns` and :hg:`help filesets` for more information
3499 3499 on specifying file patterns.
3500 3500
3501 3501 Returns 0 if a match is found, 1 otherwise.
3502 3502
3503 3503 """
3504 3504 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3505 3505
3506 3506 end = '\n'
3507 3507 if opts.get('print0'):
3508 3508 end = '\0'
3509 3509 fm = ui.formatter('files', opts)
3510 3510 fmt = '%s' + end
3511 3511
3512 3512 m = scmutil.match(ctx, pats, opts)
3513 3513 ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3514 3514
3515 3515 fm.end()
3516 3516
3517 3517 return ret
3518 3518
3519 3519 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3520 3520 def forget(ui, repo, *pats, **opts):
3521 3521 """forget the specified files on the next commit
3522 3522
3523 3523 Mark the specified files so they will no longer be tracked
3524 3524 after the next commit.
3525 3525
3526 3526 This only removes files from the current branch, not from the
3527 3527 entire project history, and it does not delete them from the
3528 3528 working directory.
3529 3529
3530 3530 To delete the file from the working directory, see :hg:`remove`.
3531 3531
3532 3532 To undo a forget before the next commit, see :hg:`add`.
3533 3533
3534 3534 .. container:: verbose
3535 3535
3536 3536 Examples:
3537 3537
3538 3538 - forget newly-added binary files::
3539 3539
3540 3540 hg forget "set:added() and binary()"
3541 3541
3542 3542 - forget files that would be excluded by .hgignore::
3543 3543
3544 3544 hg forget "set:hgignore()"
3545 3545
3546 3546 Returns 0 on success.
3547 3547 """
3548 3548
3549 3549 if not pats:
3550 3550 raise error.Abort(_('no files specified'))
3551 3551
3552 3552 m = scmutil.match(repo[None], pats, opts)
3553 3553 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3554 3554 return rejected and 1 or 0
3555 3555
3556 3556 @command(
3557 3557 'graft',
3558 3558 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3559 3559 ('c', 'continue', False, _('resume interrupted graft')),
3560 3560 ('e', 'edit', False, _('invoke editor on commit messages')),
3561 3561 ('', 'log', None, _('append graft info to log message')),
3562 3562 ('f', 'force', False, _('force graft')),
3563 3563 ('D', 'currentdate', False,
3564 3564 _('record the current date as commit date')),
3565 3565 ('U', 'currentuser', False,
3566 3566 _('record the current user as committer'), _('DATE'))]
3567 3567 + commitopts2 + mergetoolopts + dryrunopts,
3568 3568 _('[OPTION]... [-r] REV...'))
3569 3569 def graft(ui, repo, *revs, **opts):
3570 3570 '''copy changes from other branches onto the current branch
3571 3571
3572 3572 This command uses Mercurial's merge logic to copy individual
3573 3573 changes from other branches without merging branches in the
3574 3574 history graph. This is sometimes known as 'backporting' or
3575 3575 'cherry-picking'. By default, graft will copy user, date, and
3576 3576 description from the source changesets.
3577 3577
3578 3578 Changesets that are ancestors of the current revision, that have
3579 3579 already been grafted, or that are merges will be skipped.
3580 3580
3581 3581 If --log is specified, log messages will have a comment appended
3582 3582 of the form::
3583 3583
3584 3584 (grafted from CHANGESETHASH)
3585 3585
3586 3586 If --force is specified, revisions will be grafted even if they
3587 3587 are already ancestors of or have been grafted to the destination.
3588 3588 This is useful when the revisions have since been backed out.
3589 3589
3590 3590 If a graft merge results in conflicts, the graft process is
3591 3591 interrupted so that the current merge can be manually resolved.
3592 3592 Once all conflicts are addressed, the graft process can be
3593 3593 continued with the -c/--continue option.
3594 3594
3595 3595 .. note::
3596 3596
3597 3597 The -c/--continue option does not reapply earlier options, except
3598 3598 for --force.
3599 3599
3600 3600 .. container:: verbose
3601 3601
3602 3602 Examples:
3603 3603
3604 3604 - copy a single change to the stable branch and edit its description::
3605 3605
3606 3606 hg update stable
3607 3607 hg graft --edit 9393
3608 3608
3609 3609 - graft a range of changesets with one exception, updating dates::
3610 3610
3611 3611 hg graft -D "2085::2093 and not 2091"
3612 3612
3613 3613 - continue a graft after resolving conflicts::
3614 3614
3615 3615 hg graft -c
3616 3616
3617 3617 - show the source of a grafted changeset::
3618 3618
3619 3619 hg log --debug -r .
3620 3620
3621 3621 See :hg:`help revisions` and :hg:`help revsets` for more about
3622 3622 specifying revisions.
3623 3623
3624 3624 Returns 0 on successful completion.
3625 3625 '''
3626 3626
3627 3627 revs = list(revs)
3628 3628 revs.extend(opts['rev'])
3629 3629
3630 3630 if not opts.get('user') and opts.get('currentuser'):
3631 3631 opts['user'] = ui.username()
3632 3632 if not opts.get('date') and opts.get('currentdate'):
3633 3633 opts['date'] = "%d %d" % util.makedate()
3634 3634
3635 3635 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3636 3636
3637 3637 cont = False
3638 3638 if opts['continue']:
3639 3639 cont = True
3640 3640 if revs:
3641 3641 raise error.Abort(_("can't specify --continue and revisions"))
3642 3642 # read in unfinished revisions
3643 3643 try:
3644 3644 nodes = repo.vfs.read('graftstate').splitlines()
3645 3645 revs = [repo[node].rev() for node in nodes]
3646 3646 except IOError as inst:
3647 3647 if inst.errno != errno.ENOENT:
3648 3648 raise
3649 3649 raise error.Abort(_("no graft state found, can't continue"))
3650 3650 else:
3651 3651 cmdutil.checkunfinished(repo)
3652 3652 cmdutil.bailifchanged(repo)
3653 3653 if not revs:
3654 3654 raise error.Abort(_('no revisions specified'))
3655 3655 revs = scmutil.revrange(repo, revs)
3656 3656
3657 3657 skipped = set()
3658 3658 # check for merges
3659 3659 for rev in repo.revs('%ld and merge()', revs):
3660 3660 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3661 3661 skipped.add(rev)
3662 3662 revs = [r for r in revs if r not in skipped]
3663 3663 if not revs:
3664 3664 return -1
3665 3665
3666 3666 # Don't check in the --continue case, in effect retaining --force across
3667 3667 # --continues. That's because without --force, any revisions we decided to
3668 3668 # skip would have been filtered out here, so they wouldn't have made their
3669 3669 # way to the graftstate. With --force, any revisions we would have otherwise
3670 3670 # skipped would not have been filtered out, and if they hadn't been applied
3671 3671 # already, they'd have been in the graftstate.
3672 3672 if not (cont or opts.get('force')):
3673 3673 # check for ancestors of dest branch
3674 3674 crev = repo['.'].rev()
3675 3675 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3676 3676 # Cannot use x.remove(y) on smart set, this has to be a list.
3677 3677 # XXX make this lazy in the future
3678 3678 revs = list(revs)
3679 3679 # don't mutate while iterating, create a copy
3680 3680 for rev in list(revs):
3681 3681 if rev in ancestors:
3682 3682 ui.warn(_('skipping ancestor revision %d:%s\n') %
3683 3683 (rev, repo[rev]))
3684 3684 # XXX remove on list is slow
3685 3685 revs.remove(rev)
3686 3686 if not revs:
3687 3687 return -1
3688 3688
3689 3689 # analyze revs for earlier grafts
3690 3690 ids = {}
3691 3691 for ctx in repo.set("%ld", revs):
3692 3692 ids[ctx.hex()] = ctx.rev()
3693 3693 n = ctx.extra().get('source')
3694 3694 if n:
3695 3695 ids[n] = ctx.rev()
3696 3696
3697 3697 # check ancestors for earlier grafts
3698 3698 ui.debug('scanning for duplicate grafts\n')
3699 3699
3700 3700 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3701 3701 ctx = repo[rev]
3702 3702 n = ctx.extra().get('source')
3703 3703 if n in ids:
3704 3704 try:
3705 3705 r = repo[n].rev()
3706 3706 except error.RepoLookupError:
3707 3707 r = None
3708 3708 if r in revs:
3709 3709 ui.warn(_('skipping revision %d:%s '
3710 3710 '(already grafted to %d:%s)\n')
3711 3711 % (r, repo[r], rev, ctx))
3712 3712 revs.remove(r)
3713 3713 elif ids[n] in revs:
3714 3714 if r is None:
3715 3715 ui.warn(_('skipping already grafted revision %d:%s '
3716 3716 '(%d:%s also has unknown origin %s)\n')
3717 3717 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3718 3718 else:
3719 3719 ui.warn(_('skipping already grafted revision %d:%s '
3720 3720 '(%d:%s also has origin %d:%s)\n')
3721 3721 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3722 3722 revs.remove(ids[n])
3723 3723 elif ctx.hex() in ids:
3724 3724 r = ids[ctx.hex()]
3725 3725 ui.warn(_('skipping already grafted revision %d:%s '
3726 3726 '(was grafted from %d:%s)\n') %
3727 3727 (r, repo[r], rev, ctx))
3728 3728 revs.remove(r)
3729 3729 if not revs:
3730 3730 return -1
3731 3731
3732 3732 wlock = repo.wlock()
3733 3733 try:
3734 3734 for pos, ctx in enumerate(repo.set("%ld", revs)):
3735 3735 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3736 3736 ctx.description().split('\n', 1)[0])
3737 3737 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3738 3738 if names:
3739 3739 desc += ' (%s)' % ' '.join(names)
3740 3740 ui.status(_('grafting %s\n') % desc)
3741 3741 if opts.get('dry_run'):
3742 3742 continue
3743 3743
3744 3744 source = ctx.extra().get('source')
3745 3745 extra = {}
3746 3746 if source:
3747 3747 extra['source'] = source
3748 3748 extra['intermediate-source'] = ctx.hex()
3749 3749 else:
3750 3750 extra['source'] = ctx.hex()
3751 3751 user = ctx.user()
3752 3752 if opts.get('user'):
3753 3753 user = opts['user']
3754 3754 date = ctx.date()
3755 3755 if opts.get('date'):
3756 3756 date = opts['date']
3757 3757 message = ctx.description()
3758 3758 if opts.get('log'):
3759 3759 message += '\n(grafted from %s)' % ctx.hex()
3760 3760
3761 3761 # we don't merge the first commit when continuing
3762 3762 if not cont:
3763 3763 # perform the graft merge with p1(rev) as 'ancestor'
3764 3764 try:
3765 3765 # ui.forcemerge is an internal variable, do not document
3766 3766 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3767 3767 'graft')
3768 3768 stats = mergemod.graft(repo, ctx, ctx.p1(),
3769 3769 ['local', 'graft'])
3770 3770 finally:
3771 3771 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3772 3772 # report any conflicts
3773 3773 if stats and stats[3] > 0:
3774 3774 # write out state for --continue
3775 3775 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3776 3776 repo.vfs.write('graftstate', ''.join(nodelines))
3777 3777 raise error.Abort(
3778 3778 _("unresolved conflicts, can't continue"),
3779 3779 hint=_('use hg resolve and hg graft --continue'))
3780 3780 else:
3781 3781 cont = False
3782 3782
3783 3783 # commit
3784 3784 node = repo.commit(text=message, user=user,
3785 3785 date=date, extra=extra, editor=editor)
3786 3786 if node is None:
3787 3787 ui.warn(
3788 3788 _('note: graft of %d:%s created no changes to commit\n') %
3789 3789 (ctx.rev(), ctx))
3790 3790 finally:
3791 3791 wlock.release()
3792 3792
3793 3793 # remove state when we complete successfully
3794 3794 if not opts.get('dry_run'):
3795 3795 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3796 3796
3797 3797 return 0
3798 3798
3799 3799 @command('grep',
3800 3800 [('0', 'print0', None, _('end fields with NUL')),
3801 3801 ('', 'all', None, _('print all revisions that match')),
3802 3802 ('a', 'text', None, _('treat all files as text')),
3803 3803 ('f', 'follow', None,
3804 3804 _('follow changeset history,'
3805 3805 ' or file history across copies and renames')),
3806 3806 ('i', 'ignore-case', None, _('ignore case when matching')),
3807 3807 ('l', 'files-with-matches', None,
3808 3808 _('print only filenames and revisions that match')),
3809 3809 ('n', 'line-number', None, _('print matching line numbers')),
3810 3810 ('r', 'rev', [],
3811 3811 _('only search files changed within revision range'), _('REV')),
3812 3812 ('u', 'user', None, _('list the author (long with -v)')),
3813 3813 ('d', 'date', None, _('list the date (short with -q)')),
3814 3814 ] + walkopts,
3815 3815 _('[OPTION]... PATTERN [FILE]...'),
3816 3816 inferrepo=True)
3817 3817 def grep(ui, repo, pattern, *pats, **opts):
3818 3818 """search for a pattern in specified files and revisions
3819 3819
3820 3820 Search revisions of files for a regular expression.
3821 3821
3822 3822 This command behaves differently than Unix grep. It only accepts
3823 3823 Python/Perl regexps. It searches repository history, not the
3824 3824 working directory. It always prints the revision number in which a
3825 3825 match appears.
3826 3826
3827 3827 By default, grep only prints output for the first revision of a
3828 3828 file in which it finds a match. To get it to print every revision
3829 3829 that contains a change in match status ("-" for a match that
3830 3830 becomes a non-match, or "+" for a non-match that becomes a match),
3831 3831 use the --all flag.
3832 3832
3833 3833 Returns 0 if a match is found, 1 otherwise.
3834 3834 """
3835 3835 reflags = re.M
3836 3836 if opts.get('ignore_case'):
3837 3837 reflags |= re.I
3838 3838 try:
3839 3839 regexp = util.re.compile(pattern, reflags)
3840 3840 except re.error as inst:
3841 3841 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3842 3842 return 1
3843 3843 sep, eol = ':', '\n'
3844 3844 if opts.get('print0'):
3845 3845 sep = eol = '\0'
3846 3846
3847 3847 getfile = util.lrucachefunc(repo.file)
3848 3848
3849 3849 def matchlines(body):
3850 3850 begin = 0
3851 3851 linenum = 0
3852 3852 while begin < len(body):
3853 3853 match = regexp.search(body, begin)
3854 3854 if not match:
3855 3855 break
3856 3856 mstart, mend = match.span()
3857 3857 linenum += body.count('\n', begin, mstart) + 1
3858 3858 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3859 3859 begin = body.find('\n', mend) + 1 or len(body) + 1
3860 3860 lend = begin - 1
3861 3861 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3862 3862
3863 3863 class linestate(object):
3864 3864 def __init__(self, line, linenum, colstart, colend):
3865 3865 self.line = line
3866 3866 self.linenum = linenum
3867 3867 self.colstart = colstart
3868 3868 self.colend = colend
3869 3869
3870 3870 def __hash__(self):
3871 3871 return hash((self.linenum, self.line))
3872 3872
3873 3873 def __eq__(self, other):
3874 3874 return self.line == other.line
3875 3875
3876 3876 def __iter__(self):
3877 3877 yield (self.line[:self.colstart], '')
3878 3878 yield (self.line[self.colstart:self.colend], 'grep.match')
3879 3879 rest = self.line[self.colend:]
3880 3880 while rest != '':
3881 3881 match = regexp.search(rest)
3882 3882 if not match:
3883 3883 yield (rest, '')
3884 3884 break
3885 3885 mstart, mend = match.span()
3886 3886 yield (rest[:mstart], '')
3887 3887 yield (rest[mstart:mend], 'grep.match')
3888 3888 rest = rest[mend:]
3889 3889
3890 3890 matches = {}
3891 3891 copies = {}
3892 3892 def grepbody(fn, rev, body):
3893 3893 matches[rev].setdefault(fn, [])
3894 3894 m = matches[rev][fn]
3895 3895 for lnum, cstart, cend, line in matchlines(body):
3896 3896 s = linestate(line, lnum, cstart, cend)
3897 3897 m.append(s)
3898 3898
3899 3899 def difflinestates(a, b):
3900 3900 sm = difflib.SequenceMatcher(None, a, b)
3901 3901 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3902 3902 if tag == 'insert':
3903 3903 for i in xrange(blo, bhi):
3904 3904 yield ('+', b[i])
3905 3905 elif tag == 'delete':
3906 3906 for i in xrange(alo, ahi):
3907 3907 yield ('-', a[i])
3908 3908 elif tag == 'replace':
3909 3909 for i in xrange(alo, ahi):
3910 3910 yield ('-', a[i])
3911 3911 for i in xrange(blo, bhi):
3912 3912 yield ('+', b[i])
3913 3913
3914 3914 def display(fn, ctx, pstates, states):
3915 3915 rev = ctx.rev()
3916 3916 if ui.quiet:
3917 3917 datefunc = util.shortdate
3918 3918 else:
3919 3919 datefunc = util.datestr
3920 3920 found = False
3921 3921 @util.cachefunc
3922 3922 def binary():
3923 3923 flog = getfile(fn)
3924 3924 return util.binary(flog.read(ctx.filenode(fn)))
3925 3925
3926 3926 if opts.get('all'):
3927 3927 iter = difflinestates(pstates, states)
3928 3928 else:
3929 3929 iter = [('', l) for l in states]
3930 3930 for change, l in iter:
3931 3931 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3932 3932
3933 3933 if opts.get('line_number'):
3934 3934 cols.append((str(l.linenum), 'grep.linenumber'))
3935 3935 if opts.get('all'):
3936 3936 cols.append((change, 'grep.change'))
3937 3937 if opts.get('user'):
3938 3938 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3939 3939 if opts.get('date'):
3940 3940 cols.append((datefunc(ctx.date()), 'grep.date'))
3941 3941 for col, label in cols[:-1]:
3942 3942 ui.write(col, label=label)
3943 3943 ui.write(sep, label='grep.sep')
3944 3944 ui.write(cols[-1][0], label=cols[-1][1])
3945 3945 if not opts.get('files_with_matches'):
3946 3946 ui.write(sep, label='grep.sep')
3947 3947 if not opts.get('text') and binary():
3948 3948 ui.write(" Binary file matches")
3949 3949 else:
3950 3950 for s, label in l:
3951 3951 ui.write(s, label=label)
3952 3952 ui.write(eol)
3953 3953 found = True
3954 3954 if opts.get('files_with_matches'):
3955 3955 break
3956 3956 return found
3957 3957
3958 3958 skip = {}
3959 3959 revfiles = {}
3960 3960 matchfn = scmutil.match(repo[None], pats, opts)
3961 3961 found = False
3962 3962 follow = opts.get('follow')
3963 3963
3964 3964 def prep(ctx, fns):
3965 3965 rev = ctx.rev()
3966 3966 pctx = ctx.p1()
3967 3967 parent = pctx.rev()
3968 3968 matches.setdefault(rev, {})
3969 3969 matches.setdefault(parent, {})
3970 3970 files = revfiles.setdefault(rev, [])
3971 3971 for fn in fns:
3972 3972 flog = getfile(fn)
3973 3973 try:
3974 3974 fnode = ctx.filenode(fn)
3975 3975 except error.LookupError:
3976 3976 continue
3977 3977
3978 3978 copied = flog.renamed(fnode)
3979 3979 copy = follow and copied and copied[0]
3980 3980 if copy:
3981 3981 copies.setdefault(rev, {})[fn] = copy
3982 3982 if fn in skip:
3983 3983 if copy:
3984 3984 skip[copy] = True
3985 3985 continue
3986 3986 files.append(fn)
3987 3987
3988 3988 if fn not in matches[rev]:
3989 3989 grepbody(fn, rev, flog.read(fnode))
3990 3990
3991 3991 pfn = copy or fn
3992 3992 if pfn not in matches[parent]:
3993 3993 try:
3994 3994 fnode = pctx.filenode(pfn)
3995 3995 grepbody(pfn, parent, flog.read(fnode))
3996 3996 except error.LookupError:
3997 3997 pass
3998 3998
3999 3999 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
4000 4000 rev = ctx.rev()
4001 4001 parent = ctx.p1().rev()
4002 4002 for fn in sorted(revfiles.get(rev, [])):
4003 4003 states = matches[rev][fn]
4004 4004 copy = copies.get(rev, {}).get(fn)
4005 4005 if fn in skip:
4006 4006 if copy:
4007 4007 skip[copy] = True
4008 4008 continue
4009 4009 pstates = matches.get(parent, {}).get(copy or fn, [])
4010 4010 if pstates or states:
4011 4011 r = display(fn, ctx, pstates, states)
4012 4012 found = found or r
4013 4013 if r and not opts.get('all'):
4014 4014 skip[fn] = True
4015 4015 if copy:
4016 4016 skip[copy] = True
4017 4017 del matches[rev]
4018 4018 del revfiles[rev]
4019 4019
4020 4020 return not found
4021 4021
4022 4022 @command('heads',
4023 4023 [('r', 'rev', '',
4024 4024 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
4025 4025 ('t', 'topo', False, _('show topological heads only')),
4026 4026 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
4027 4027 ('c', 'closed', False, _('show normal and closed branch heads')),
4028 4028 ] + templateopts,
4029 4029 _('[-ct] [-r STARTREV] [REV]...'))
4030 4030 def heads(ui, repo, *branchrevs, **opts):
4031 4031 """show branch heads
4032 4032
4033 4033 With no arguments, show all open branch heads in the repository.
4034 4034 Branch heads are changesets that have no descendants on the
4035 4035 same branch. They are where development generally takes place and
4036 4036 are the usual targets for update and merge operations.
4037 4037
4038 4038 If one or more REVs are given, only open branch heads on the
4039 4039 branches associated with the specified changesets are shown. This
4040 4040 means that you can use :hg:`heads .` to see the heads on the
4041 4041 currently checked-out branch.
4042 4042
4043 4043 If -c/--closed is specified, also show branch heads marked closed
4044 4044 (see :hg:`commit --close-branch`).
4045 4045
4046 4046 If STARTREV is specified, only those heads that are descendants of
4047 4047 STARTREV will be displayed.
4048 4048
4049 4049 If -t/--topo is specified, named branch mechanics will be ignored and only
4050 4050 topological heads (changesets with no children) will be shown.
4051 4051
4052 4052 Returns 0 if matching heads are found, 1 if not.
4053 4053 """
4054 4054
4055 4055 start = None
4056 4056 if 'rev' in opts:
4057 4057 start = scmutil.revsingle(repo, opts['rev'], None).node()
4058 4058
4059 4059 if opts.get('topo'):
4060 4060 heads = [repo[h] for h in repo.heads(start)]
4061 4061 else:
4062 4062 heads = []
4063 4063 for branch in repo.branchmap():
4064 4064 heads += repo.branchheads(branch, start, opts.get('closed'))
4065 4065 heads = [repo[h] for h in heads]
4066 4066
4067 4067 if branchrevs:
4068 4068 branches = set(repo[br].branch() for br in branchrevs)
4069 4069 heads = [h for h in heads if h.branch() in branches]
4070 4070
4071 4071 if opts.get('active') and branchrevs:
4072 4072 dagheads = repo.heads(start)
4073 4073 heads = [h for h in heads if h.node() in dagheads]
4074 4074
4075 4075 if branchrevs:
4076 4076 haveheads = set(h.branch() for h in heads)
4077 4077 if branches - haveheads:
4078 4078 headless = ', '.join(b for b in branches - haveheads)
4079 4079 msg = _('no open branch heads found on branches %s')
4080 4080 if opts.get('rev'):
4081 4081 msg += _(' (started at %s)') % opts['rev']
4082 4082 ui.warn((msg + '\n') % headless)
4083 4083
4084 4084 if not heads:
4085 4085 return 1
4086 4086
4087 4087 heads = sorted(heads, key=lambda x: -x.rev())
4088 4088 displayer = cmdutil.show_changeset(ui, repo, opts)
4089 4089 for ctx in heads:
4090 4090 displayer.show(ctx)
4091 4091 displayer.close()
4092 4092
4093 4093 @command('help',
4094 4094 [('e', 'extension', None, _('show only help for extensions')),
4095 4095 ('c', 'command', None, _('show only help for commands')),
4096 4096 ('k', 'keyword', None, _('show topics matching keyword')),
4097 4097 ],
4098 4098 _('[-eck] [TOPIC]'),
4099 4099 norepo=True)
4100 4100 def help_(ui, name=None, **opts):
4101 4101 """show help for a given topic or a help overview
4102 4102
4103 4103 With no arguments, print a list of commands with short help messages.
4104 4104
4105 4105 Given a topic, extension, or command name, print help for that
4106 4106 topic.
4107 4107
4108 4108 Returns 0 if successful.
4109 4109 """
4110 4110
4111 4111 textwidth = min(ui.termwidth(), 80) - 2
4112 4112
4113 4113 keep = []
4114 4114 if ui.verbose:
4115 4115 keep.append('verbose')
4116 4116 if sys.platform.startswith('win'):
4117 4117 keep.append('windows')
4118 4118 elif sys.platform == 'OpenVMS':
4119 4119 keep.append('vms')
4120 4120 elif sys.platform == 'plan9':
4121 4121 keep.append('plan9')
4122 4122 else:
4123 4123 keep.append('unix')
4124 4124 keep.append(sys.platform.lower())
4125 4125
4126 4126 section = None
4127 4127 if name and '.' in name:
4128 4128 name, section = name.split('.', 1)
4129 4129 section = section.lower()
4130 4130
4131 4131 text = help.help_(ui, name, **opts)
4132 4132
4133 4133 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4134 4134 section=section)
4135 4135
4136 4136 # We could have been given a weird ".foo" section without a name
4137 4137 # to look for, or we could have simply failed to found "foo.bar"
4138 4138 # because bar isn't a section of foo
4139 4139 if section and not (formatted and name):
4140 4140 raise error.Abort(_("help section not found"))
4141 4141
4142 4142 if 'verbose' in pruned:
4143 4143 keep.append('omitted')
4144 4144 else:
4145 4145 keep.append('notomitted')
4146 4146 formatted, pruned = minirst.format(text, textwidth, keep=keep,
4147 4147 section=section)
4148 4148 ui.write(formatted)
4149 4149
4150 4150
4151 4151 @command('identify|id',
4152 4152 [('r', 'rev', '',
4153 4153 _('identify the specified revision'), _('REV')),
4154 4154 ('n', 'num', None, _('show local revision number')),
4155 4155 ('i', 'id', None, _('show global revision id')),
4156 4156 ('b', 'branch', None, _('show branch')),
4157 4157 ('t', 'tags', None, _('show tags')),
4158 4158 ('B', 'bookmarks', None, _('show bookmarks')),
4159 4159 ] + remoteopts,
4160 4160 _('[-nibtB] [-r REV] [SOURCE]'),
4161 4161 optionalrepo=True)
4162 4162 def identify(ui, repo, source=None, rev=None,
4163 4163 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
4164 4164 """identify the working directory or specified revision
4165 4165
4166 4166 Print a summary identifying the repository state at REV using one or
4167 4167 two parent hash identifiers, followed by a "+" if the working
4168 4168 directory has uncommitted changes, the branch name (if not default),
4169 4169 a list of tags, and a list of bookmarks.
4170 4170
4171 4171 When REV is not given, print a summary of the current state of the
4172 4172 repository.
4173 4173
4174 4174 Specifying a path to a repository root or Mercurial bundle will
4175 4175 cause lookup to operate on that repository/bundle.
4176 4176
4177 4177 .. container:: verbose
4178 4178
4179 4179 Examples:
4180 4180
4181 4181 - generate a build identifier for the working directory::
4182 4182
4183 4183 hg id --id > build-id.dat
4184 4184
4185 4185 - find the revision corresponding to a tag::
4186 4186
4187 4187 hg id -n -r 1.3
4188 4188
4189 4189 - check the most recent revision of a remote repository::
4190 4190
4191 4191 hg id -r tip http://selenic.com/hg/
4192 4192
4193 4193 Returns 0 if successful.
4194 4194 """
4195 4195
4196 4196 if not repo and not source:
4197 4197 raise error.Abort(_("there is no Mercurial repository here "
4198 4198 "(.hg not found)"))
4199 4199
4200 4200 if ui.debugflag:
4201 4201 hexfunc = hex
4202 4202 else:
4203 4203 hexfunc = short
4204 4204 default = not (num or id or branch or tags or bookmarks)
4205 4205 output = []
4206 4206 revs = []
4207 4207
4208 4208 if source:
4209 4209 source, branches = hg.parseurl(ui.expandpath(source))
4210 4210 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4211 4211 repo = peer.local()
4212 4212 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4213 4213
4214 4214 if not repo:
4215 4215 if num or branch or tags:
4216 4216 raise error.Abort(
4217 4217 _("can't query remote revision number, branch, or tags"))
4218 4218 if not rev and revs:
4219 4219 rev = revs[0]
4220 4220 if not rev:
4221 4221 rev = "tip"
4222 4222
4223 4223 remoterev = peer.lookup(rev)
4224 4224 if default or id:
4225 4225 output = [hexfunc(remoterev)]
4226 4226
4227 4227 def getbms():
4228 4228 bms = []
4229 4229
4230 4230 if 'bookmarks' in peer.listkeys('namespaces'):
4231 4231 hexremoterev = hex(remoterev)
4232 4232 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4233 4233 if bmr == hexremoterev]
4234 4234
4235 4235 return sorted(bms)
4236 4236
4237 4237 if bookmarks:
4238 4238 output.extend(getbms())
4239 4239 elif default and not ui.quiet:
4240 4240 # multiple bookmarks for a single parent separated by '/'
4241 4241 bm = '/'.join(getbms())
4242 4242 if bm:
4243 4243 output.append(bm)
4244 4244 else:
4245 4245 ctx = scmutil.revsingle(repo, rev, None)
4246 4246
4247 4247 if ctx.rev() is None:
4248 4248 ctx = repo[None]
4249 4249 parents = ctx.parents()
4250 4250 taglist = []
4251 4251 for p in parents:
4252 4252 taglist.extend(p.tags())
4253 4253
4254 4254 changed = ""
4255 4255 if default or id or num:
4256 4256 if (any(repo.status())
4257 4257 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4258 4258 changed = '+'
4259 4259 if default or id:
4260 4260 output = ["%s%s" %
4261 4261 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4262 4262 if num:
4263 4263 output.append("%s%s" %
4264 4264 ('+'.join([str(p.rev()) for p in parents]), changed))
4265 4265 else:
4266 4266 if default or id:
4267 4267 output = [hexfunc(ctx.node())]
4268 4268 if num:
4269 4269 output.append(str(ctx.rev()))
4270 4270 taglist = ctx.tags()
4271 4271
4272 4272 if default and not ui.quiet:
4273 4273 b = ctx.branch()
4274 4274 if b != 'default':
4275 4275 output.append("(%s)" % b)
4276 4276
4277 4277 # multiple tags for a single parent separated by '/'
4278 4278 t = '/'.join(taglist)
4279 4279 if t:
4280 4280 output.append(t)
4281 4281
4282 4282 # multiple bookmarks for a single parent separated by '/'
4283 4283 bm = '/'.join(ctx.bookmarks())
4284 4284 if bm:
4285 4285 output.append(bm)
4286 4286 else:
4287 4287 if branch:
4288 4288 output.append(ctx.branch())
4289 4289
4290 4290 if tags:
4291 4291 output.extend(taglist)
4292 4292
4293 4293 if bookmarks:
4294 4294 output.extend(ctx.bookmarks())
4295 4295
4296 4296 ui.write("%s\n" % ' '.join(output))
4297 4297
4298 4298 @command('import|patch',
4299 4299 [('p', 'strip', 1,
4300 4300 _('directory strip option for patch. This has the same '
4301 4301 'meaning as the corresponding patch option'), _('NUM')),
4302 4302 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4303 4303 ('e', 'edit', False, _('invoke editor on commit messages')),
4304 4304 ('f', 'force', None,
4305 4305 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4306 4306 ('', 'no-commit', None,
4307 4307 _("don't commit, just update the working directory")),
4308 4308 ('', 'bypass', None,
4309 4309 _("apply patch without touching the working directory")),
4310 4310 ('', 'partial', None,
4311 4311 _('commit even if some hunks fail')),
4312 4312 ('', 'exact', None,
4313 4313 _('apply patch to the nodes from which it was generated')),
4314 4314 ('', 'prefix', '',
4315 4315 _('apply patch to subdirectory'), _('DIR')),
4316 4316 ('', 'import-branch', None,
4317 4317 _('use any branch information in patch (implied by --exact)'))] +
4318 4318 commitopts + commitopts2 + similarityopts,
4319 4319 _('[OPTION]... PATCH...'))
4320 4320 def import_(ui, repo, patch1=None, *patches, **opts):
4321 4321 """import an ordered set of patches
4322 4322
4323 4323 Import a list of patches and commit them individually (unless
4324 4324 --no-commit is specified).
4325 4325
4326 4326 Because import first applies changes to the working directory,
4327 4327 import will abort if there are outstanding changes.
4328 4328
4329 4329 You can import a patch straight from a mail message. Even patches
4330 4330 as attachments work (to use the body part, it must have type
4331 4331 text/plain or text/x-patch). From and Subject headers of email
4332 4332 message are used as default committer and commit message. All
4333 4333 text/plain body parts before first diff are added to commit
4334 4334 message.
4335 4335
4336 4336 If the imported patch was generated by :hg:`export`, user and
4337 4337 description from patch override values from message headers and
4338 4338 body. Values given on command line with -m/--message and -u/--user
4339 4339 override these.
4340 4340
4341 4341 If --exact is specified, import will set the working directory to
4342 4342 the parent of each patch before applying it, and will abort if the
4343 4343 resulting changeset has a different ID than the one recorded in
4344 4344 the patch. This may happen due to character set problems or other
4345 4345 deficiencies in the text patch format.
4346 4346
4347 4347 Use --bypass to apply and commit patches directly to the
4348 4348 repository, not touching the working directory. Without --exact,
4349 4349 patches will be applied on top of the working directory parent
4350 4350 revision.
4351 4351
4352 4352 With -s/--similarity, hg will attempt to discover renames and
4353 4353 copies in the patch in the same way as :hg:`addremove`.
4354 4354
4355 4355 Use --partial to ensure a changeset will be created from the patch
4356 4356 even if some hunks fail to apply. Hunks that fail to apply will be
4357 4357 written to a <target-file>.rej file. Conflicts can then be resolved
4358 4358 by hand before :hg:`commit --amend` is run to update the created
4359 4359 changeset. This flag exists to let people import patches that
4360 4360 partially apply without losing the associated metadata (author,
4361 4361 date, description, ...). Note that when none of the hunk applies
4362 4362 cleanly, :hg:`import --partial` will create an empty changeset,
4363 4363 importing only the patch metadata.
4364 4364
4365 4365 It is possible to use external patch programs to perform the patch
4366 4366 by setting the ``ui.patch`` configuration option. For the default
4367 4367 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4368 4368 See :hg:`help config` for more information about configuration
4369 4369 files and how to use these options.
4370 4370
4371 4371 To read a patch from standard input, use "-" as the patch name. If
4372 4372 a URL is specified, the patch will be downloaded from it.
4373 4373 See :hg:`help dates` for a list of formats valid for -d/--date.
4374 4374
4375 4375 .. container:: verbose
4376 4376
4377 4377 Examples:
4378 4378
4379 4379 - import a traditional patch from a website and detect renames::
4380 4380
4381 4381 hg import -s 80 http://example.com/bugfix.patch
4382 4382
4383 4383 - import a changeset from an hgweb server::
4384 4384
4385 4385 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
4386 4386
4387 4387 - import all the patches in an Unix-style mbox::
4388 4388
4389 4389 hg import incoming-patches.mbox
4390 4390
4391 4391 - attempt to exactly restore an exported changeset (not always
4392 4392 possible)::
4393 4393
4394 4394 hg import --exact proposed-fix.patch
4395 4395
4396 4396 - use an external tool to apply a patch which is too fuzzy for
4397 4397 the default internal tool.
4398 4398
4399 4399 hg import --config ui.patch="patch --merge" fuzzy.patch
4400 4400
4401 4401 - change the default fuzzing from 2 to a less strict 7
4402 4402
4403 4403 hg import --config ui.fuzz=7 fuzz.patch
4404 4404
4405 4405 Returns 0 on success, 1 on partial success (see --partial).
4406 4406 """
4407 4407
4408 4408 if not patch1:
4409 4409 raise error.Abort(_('need at least one patch to import'))
4410 4410
4411 4411 patches = (patch1,) + patches
4412 4412
4413 4413 date = opts.get('date')
4414 4414 if date:
4415 4415 opts['date'] = util.parsedate(date)
4416 4416
4417 4417 update = not opts.get('bypass')
4418 4418 if not update and opts.get('no_commit'):
4419 4419 raise error.Abort(_('cannot use --no-commit with --bypass'))
4420 4420 try:
4421 4421 sim = float(opts.get('similarity') or 0)
4422 4422 except ValueError:
4423 4423 raise error.Abort(_('similarity must be a number'))
4424 4424 if sim < 0 or sim > 100:
4425 4425 raise error.Abort(_('similarity must be between 0 and 100'))
4426 4426 if sim and not update:
4427 4427 raise error.Abort(_('cannot use --similarity with --bypass'))
4428 4428 if opts.get('exact') and opts.get('edit'):
4429 4429 raise error.Abort(_('cannot use --exact with --edit'))
4430 4430 if opts.get('exact') and opts.get('prefix'):
4431 4431 raise error.Abort(_('cannot use --exact with --prefix'))
4432 4432
4433 4433 if update:
4434 4434 cmdutil.checkunfinished(repo)
4435 4435 if (opts.get('exact') or not opts.get('force')) and update:
4436 4436 cmdutil.bailifchanged(repo)
4437 4437
4438 4438 base = opts["base"]
4439 4439 wlock = dsguard = lock = tr = None
4440 4440 msgs = []
4441 4441 ret = 0
4442 4442
4443 4443
4444 4444 try:
4445 4445 try:
4446 4446 wlock = repo.wlock()
4447 4447 if not opts.get('no_commit'):
4448 4448 lock = repo.lock()
4449 4449 tr = repo.transaction('import')
4450 4450 else:
4451 4451 dsguard = cmdutil.dirstateguard(repo, 'import')
4452 4452 parents = repo.parents()
4453 4453 for patchurl in patches:
4454 4454 if patchurl == '-':
4455 4455 ui.status(_('applying patch from stdin\n'))
4456 4456 patchfile = ui.fin
4457 4457 patchurl = 'stdin' # for error message
4458 4458 else:
4459 4459 patchurl = os.path.join(base, patchurl)
4460 4460 ui.status(_('applying %s\n') % patchurl)
4461 4461 patchfile = hg.openpath(ui, patchurl)
4462 4462
4463 4463 haspatch = False
4464 4464 for hunk in patch.split(patchfile):
4465 4465 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4466 4466 parents, opts,
4467 4467 msgs, hg.clean)
4468 4468 if msg:
4469 4469 haspatch = True
4470 4470 ui.note(msg + '\n')
4471 4471 if update or opts.get('exact'):
4472 4472 parents = repo.parents()
4473 4473 else:
4474 4474 parents = [repo[node]]
4475 4475 if rej:
4476 4476 ui.write_err(_("patch applied partially\n"))
4477 4477 ui.write_err(_("(fix the .rej files and run "
4478 4478 "`hg commit --amend`)\n"))
4479 4479 ret = 1
4480 4480 break
4481 4481
4482 4482 if not haspatch:
4483 4483 raise error.Abort(_('%s: no diffs found') % patchurl)
4484 4484
4485 4485 if tr:
4486 4486 tr.close()
4487 4487 if msgs:
4488 4488 repo.savecommitmessage('\n* * *\n'.join(msgs))
4489 4489 if dsguard:
4490 4490 dsguard.close()
4491 4491 return ret
4492 4492 finally:
4493 4493 # TODO: get rid of this meaningless try/finally enclosing.
4494 4494 # this is kept only to reduce changes in a patch.
4495 4495 pass
4496 4496 finally:
4497 4497 if tr:
4498 4498 tr.release()
4499 4499 release(lock, dsguard, wlock)
4500 4500
4501 4501 @command('incoming|in',
4502 4502 [('f', 'force', None,
4503 4503 _('run even if remote repository is unrelated')),
4504 4504 ('n', 'newest-first', None, _('show newest record first')),
4505 4505 ('', 'bundle', '',
4506 4506 _('file to store the bundles into'), _('FILE')),
4507 4507 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4508 4508 ('B', 'bookmarks', False, _("compare bookmarks")),
4509 4509 ('b', 'branch', [],
4510 4510 _('a specific branch you would like to pull'), _('BRANCH')),
4511 4511 ] + logopts + remoteopts + subrepoopts,
4512 4512 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4513 4513 def incoming(ui, repo, source="default", **opts):
4514 4514 """show new changesets found in source
4515 4515
4516 4516 Show new changesets found in the specified path/URL or the default
4517 4517 pull location. These are the changesets that would have been pulled
4518 4518 if a pull at the time you issued this command.
4519 4519
4520 4520 See pull for valid source format details.
4521 4521
4522 4522 .. container:: verbose
4523 4523
4524 4524 With -B/--bookmarks, the result of bookmark comparison between
4525 4525 local and remote repositories is displayed. With -v/--verbose,
4526 4526 status is also displayed for each bookmark like below::
4527 4527
4528 4528 BM1 01234567890a added
4529 4529 BM2 1234567890ab advanced
4530 4530 BM3 234567890abc diverged
4531 4531 BM4 34567890abcd changed
4532 4532
4533 4533 The action taken locally when pulling depends on the
4534 4534 status of each bookmark:
4535 4535
4536 4536 :``added``: pull will create it
4537 4537 :``advanced``: pull will update it
4538 4538 :``diverged``: pull will create a divergent bookmark
4539 4539 :``changed``: result depends on remote changesets
4540 4540
4541 4541 From the point of view of pulling behavior, bookmark
4542 4542 existing only in the remote repository are treated as ``added``,
4543 4543 even if it is in fact locally deleted.
4544 4544
4545 4545 .. container:: verbose
4546 4546
4547 4547 For remote repository, using --bundle avoids downloading the
4548 4548 changesets twice if the incoming is followed by a pull.
4549 4549
4550 4550 Examples:
4551 4551
4552 4552 - show incoming changes with patches and full description::
4553 4553
4554 4554 hg incoming -vp
4555 4555
4556 4556 - show incoming changes excluding merges, store a bundle::
4557 4557
4558 4558 hg in -vpM --bundle incoming.hg
4559 4559 hg pull incoming.hg
4560 4560
4561 4561 - briefly list changes inside a bundle::
4562 4562
4563 4563 hg in changes.hg -T "{desc|firstline}\\n"
4564 4564
4565 4565 Returns 0 if there are incoming changes, 1 otherwise.
4566 4566 """
4567 4567 if opts.get('graph'):
4568 4568 cmdutil.checkunsupportedgraphflags([], opts)
4569 4569 def display(other, chlist, displayer):
4570 4570 revdag = cmdutil.graphrevs(other, chlist, opts)
4571 4571 showparents = [ctx.node() for ctx in repo[None].parents()]
4572 4572 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4573 4573 graphmod.asciiedges)
4574 4574
4575 4575 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4576 4576 return 0
4577 4577
4578 4578 if opts.get('bundle') and opts.get('subrepos'):
4579 4579 raise error.Abort(_('cannot combine --bundle and --subrepos'))
4580 4580
4581 4581 if opts.get('bookmarks'):
4582 4582 source, branches = hg.parseurl(ui.expandpath(source),
4583 4583 opts.get('branch'))
4584 4584 other = hg.peer(repo, opts, source)
4585 4585 if 'bookmarks' not in other.listkeys('namespaces'):
4586 4586 ui.warn(_("remote doesn't support bookmarks\n"))
4587 4587 return 0
4588 4588 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4589 4589 return bookmarks.incoming(ui, repo, other)
4590 4590
4591 4591 repo._subtoppath = ui.expandpath(source)
4592 4592 try:
4593 4593 return hg.incoming(ui, repo, source, opts)
4594 4594 finally:
4595 4595 del repo._subtoppath
4596 4596
4597 4597
4598 4598 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4599 4599 norepo=True)
4600 4600 def init(ui, dest=".", **opts):
4601 4601 """create a new repository in the given directory
4602 4602
4603 4603 Initialize a new repository in the given directory. If the given
4604 4604 directory does not exist, it will be created.
4605 4605
4606 4606 If no directory is given, the current directory is used.
4607 4607
4608 4608 It is possible to specify an ``ssh://`` URL as the destination.
4609 4609 See :hg:`help urls` for more information.
4610 4610
4611 4611 Returns 0 on success.
4612 4612 """
4613 4613 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4614 4614
4615 4615 @command('locate',
4616 4616 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4617 4617 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4618 4618 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4619 4619 ] + walkopts,
4620 4620 _('[OPTION]... [PATTERN]...'))
4621 4621 def locate(ui, repo, *pats, **opts):
4622 4622 """locate files matching specific patterns (DEPRECATED)
4623 4623
4624 4624 Print files under Mercurial control in the working directory whose
4625 4625 names match the given patterns.
4626 4626
4627 4627 By default, this command searches all directories in the working
4628 4628 directory. To search just the current directory and its
4629 4629 subdirectories, use "--include .".
4630 4630
4631 4631 If no patterns are given to match, this command prints the names
4632 4632 of all files under Mercurial control in the working directory.
4633 4633
4634 4634 If you want to feed the output of this command into the "xargs"
4635 4635 command, use the -0 option to both this command and "xargs". This
4636 4636 will avoid the problem of "xargs" treating single filenames that
4637 4637 contain whitespace as multiple filenames.
4638 4638
4639 4639 See :hg:`help files` for a more versatile command.
4640 4640
4641 4641 Returns 0 if a match is found, 1 otherwise.
4642 4642 """
4643 4643 if opts.get('print0'):
4644 4644 end = '\0'
4645 4645 else:
4646 4646 end = '\n'
4647 4647 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4648 4648
4649 4649 ret = 1
4650 4650 ctx = repo[rev]
4651 4651 m = scmutil.match(ctx, pats, opts, default='relglob',
4652 4652 badfn=lambda x, y: False)
4653 4653
4654 4654 for abs in ctx.matches(m):
4655 4655 if opts.get('fullpath'):
4656 4656 ui.write(repo.wjoin(abs), end)
4657 4657 else:
4658 4658 ui.write(((pats and m.rel(abs)) or abs), end)
4659 4659 ret = 0
4660 4660
4661 4661 return ret
4662 4662
4663 4663 @command('^log|history',
4664 4664 [('f', 'follow', None,
4665 4665 _('follow changeset history, or file history across copies and renames')),
4666 4666 ('', 'follow-first', None,
4667 4667 _('only follow the first parent of merge changesets (DEPRECATED)')),
4668 4668 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4669 4669 ('C', 'copies', None, _('show copied files')),
4670 4670 ('k', 'keyword', [],
4671 4671 _('do case-insensitive search for a given text'), _('TEXT')),
4672 4672 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4673 4673 ('', 'removed', None, _('include revisions where files were removed')),
4674 4674 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4675 4675 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4676 4676 ('', 'only-branch', [],
4677 4677 _('show only changesets within the given named branch (DEPRECATED)'),
4678 4678 _('BRANCH')),
4679 4679 ('b', 'branch', [],
4680 4680 _('show changesets within the given named branch'), _('BRANCH')),
4681 4681 ('P', 'prune', [],
4682 4682 _('do not display revision or any of its ancestors'), _('REV')),
4683 4683 ] + logopts + walkopts,
4684 4684 _('[OPTION]... [FILE]'),
4685 4685 inferrepo=True)
4686 4686 def log(ui, repo, *pats, **opts):
4687 4687 """show revision history of entire repository or files
4688 4688
4689 4689 Print the revision history of the specified files or the entire
4690 4690 project.
4691 4691
4692 4692 If no revision range is specified, the default is ``tip:0`` unless
4693 4693 --follow is set, in which case the working directory parent is
4694 4694 used as the starting revision.
4695 4695
4696 4696 File history is shown without following rename or copy history of
4697 4697 files. Use -f/--follow with a filename to follow history across
4698 4698 renames and copies. --follow without a filename will only show
4699 4699 ancestors or descendants of the starting revision.
4700 4700
4701 4701 By default this command prints revision number and changeset id,
4702 4702 tags, non-trivial parents, user, date and time, and a summary for
4703 4703 each commit. When the -v/--verbose switch is used, the list of
4704 4704 changed files and full commit message are shown.
4705 4705
4706 4706 With --graph the revisions are shown as an ASCII art DAG with the most
4707 4707 recent changeset at the top.
4708 4708 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4709 4709 and '+' represents a fork where the changeset from the lines below is a
4710 4710 parent of the 'o' merge on the same line.
4711 4711
4712 4712 .. note::
4713 4713
4714 4714 log -p/--patch may generate unexpected diff output for merge
4715 4715 changesets, as it will only compare the merge changeset against
4716 4716 its first parent. Also, only files different from BOTH parents
4717 4717 will appear in files:.
4718 4718
4719 4719 .. note::
4720 4720
4721 4721 for performance reasons, log FILE may omit duplicate changes
4722 4722 made on branches and will not show removals or mode changes. To
4723 4723 see all such changes, use the --removed switch.
4724 4724
4725 4725 .. container:: verbose
4726 4726
4727 4727 Some examples:
4728 4728
4729 4729 - changesets with full descriptions and file lists::
4730 4730
4731 4731 hg log -v
4732 4732
4733 4733 - changesets ancestral to the working directory::
4734 4734
4735 4735 hg log -f
4736 4736
4737 4737 - last 10 commits on the current branch::
4738 4738
4739 4739 hg log -l 10 -b .
4740 4740
4741 4741 - changesets showing all modifications of a file, including removals::
4742 4742
4743 4743 hg log --removed file.c
4744 4744
4745 4745 - all changesets that touch a directory, with diffs, excluding merges::
4746 4746
4747 4747 hg log -Mp lib/
4748 4748
4749 4749 - all revision numbers that match a keyword::
4750 4750
4751 4751 hg log -k bug --template "{rev}\\n"
4752 4752
4753 4753 - list available log templates::
4754 4754
4755 4755 hg log -T list
4756 4756
4757 4757 - check if a given changeset is included in a tagged release::
4758 4758
4759 4759 hg log -r "a21ccf and ancestor(1.9)"
4760 4760
4761 4761 - find all changesets by some user in a date range::
4762 4762
4763 4763 hg log -k alice -d "may 2008 to jul 2008"
4764 4764
4765 4765 - summary of all changesets after the last tag::
4766 4766
4767 4767 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4768 4768
4769 4769 See :hg:`help dates` for a list of formats valid for -d/--date.
4770 4770
4771 4771 See :hg:`help revisions` and :hg:`help revsets` for more about
4772 4772 specifying revisions.
4773 4773
4774 4774 See :hg:`help templates` for more about pre-packaged styles and
4775 4775 specifying custom templates.
4776 4776
4777 4777 Returns 0 on success.
4778 4778
4779 4779 """
4780 4780 if opts.get('follow') and opts.get('rev'):
4781 4781 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
4782 4782 del opts['follow']
4783 4783
4784 4784 if opts.get('graph'):
4785 4785 return cmdutil.graphlog(ui, repo, *pats, **opts)
4786 4786
4787 4787 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4788 4788 limit = cmdutil.loglimit(opts)
4789 4789 count = 0
4790 4790
4791 4791 getrenamed = None
4792 4792 if opts.get('copies'):
4793 4793 endrev = None
4794 4794 if opts.get('rev'):
4795 4795 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4796 4796 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4797 4797
4798 4798 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4799 4799 for rev in revs:
4800 4800 if count == limit:
4801 4801 break
4802 4802 ctx = repo[rev]
4803 4803 copies = None
4804 4804 if getrenamed is not None and rev:
4805 4805 copies = []
4806 4806 for fn in ctx.files():
4807 4807 rename = getrenamed(fn, rev)
4808 4808 if rename:
4809 4809 copies.append((fn, rename[0]))
4810 4810 if filematcher:
4811 4811 revmatchfn = filematcher(ctx.rev())
4812 4812 else:
4813 4813 revmatchfn = None
4814 4814 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4815 4815 if displayer.flush(ctx):
4816 4816 count += 1
4817 4817
4818 4818 displayer.close()
4819 4819
4820 4820 @command('manifest',
4821 4821 [('r', 'rev', '', _('revision to display'), _('REV')),
4822 4822 ('', 'all', False, _("list files from all revisions"))]
4823 4823 + formatteropts,
4824 4824 _('[-r REV]'))
4825 4825 def manifest(ui, repo, node=None, rev=None, **opts):
4826 4826 """output the current or given revision of the project manifest
4827 4827
4828 4828 Print a list of version controlled files for the given revision.
4829 4829 If no revision is given, the first parent of the working directory
4830 4830 is used, or the null revision if no revision is checked out.
4831 4831
4832 4832 With -v, print file permissions, symlink and executable bits.
4833 4833 With --debug, print file revision hashes.
4834 4834
4835 4835 If option --all is specified, the list of all files from all revisions
4836 4836 is printed. This includes deleted and renamed files.
4837 4837
4838 4838 Returns 0 on success.
4839 4839 """
4840 4840
4841 4841 fm = ui.formatter('manifest', opts)
4842 4842
4843 4843 if opts.get('all'):
4844 4844 if rev or node:
4845 4845 raise error.Abort(_("can't specify a revision with --all"))
4846 4846
4847 4847 res = []
4848 4848 prefix = "data/"
4849 4849 suffix = ".i"
4850 4850 plen = len(prefix)
4851 4851 slen = len(suffix)
4852 4852 lock = repo.lock()
4853 4853 try:
4854 4854 for fn, b, size in repo.store.datafiles():
4855 4855 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4856 4856 res.append(fn[plen:-slen])
4857 4857 finally:
4858 4858 lock.release()
4859 4859 for f in res:
4860 4860 fm.startitem()
4861 4861 fm.write("path", '%s\n', f)
4862 4862 fm.end()
4863 4863 return
4864 4864
4865 4865 if rev and node:
4866 4866 raise error.Abort(_("please specify just one revision"))
4867 4867
4868 4868 if not node:
4869 4869 node = rev
4870 4870
4871 4871 char = {'l': '@', 'x': '*', '': ''}
4872 4872 mode = {'l': '644', 'x': '755', '': '644'}
4873 4873 ctx = scmutil.revsingle(repo, node)
4874 4874 mf = ctx.manifest()
4875 4875 for f in ctx:
4876 4876 fm.startitem()
4877 4877 fl = ctx[f].flags()
4878 4878 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4879 4879 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4880 4880 fm.write('path', '%s\n', f)
4881 4881 fm.end()
4882 4882
4883 4883 @command('^merge',
4884 4884 [('f', 'force', None,
4885 4885 _('force a merge including outstanding changes (DEPRECATED)')),
4886 4886 ('r', 'rev', '', _('revision to merge'), _('REV')),
4887 4887 ('P', 'preview', None,
4888 4888 _('review revisions to merge (no merge is performed)'))
4889 4889 ] + mergetoolopts,
4890 4890 _('[-P] [-f] [[-r] REV]'))
4891 4891 def merge(ui, repo, node=None, **opts):
4892 4892 """merge another revision into working directory
4893 4893
4894 4894 The current working directory is updated with all changes made in
4895 4895 the requested revision since the last common predecessor revision.
4896 4896
4897 4897 Files that changed between either parent are marked as changed for
4898 4898 the next commit and a commit must be performed before any further
4899 4899 updates to the repository are allowed. The next commit will have
4900 4900 two parents.
4901 4901
4902 4902 ``--tool`` can be used to specify the merge tool used for file
4903 4903 merges. It overrides the HGMERGE environment variable and your
4904 4904 configuration files. See :hg:`help merge-tools` for options.
4905 4905
4906 4906 If no revision is specified, the working directory's parent is a
4907 4907 head revision, and the current branch contains exactly one other
4908 4908 head, the other head is merged with by default. Otherwise, an
4909 4909 explicit revision with which to merge with must be provided.
4910 4910
4911 4911 :hg:`resolve` must be used to resolve unresolved files.
4912 4912
4913 4913 To undo an uncommitted merge, use :hg:`update --clean .` which
4914 4914 will check out a clean copy of the original merge parent, losing
4915 4915 all changes.
4916 4916
4917 4917 Returns 0 on success, 1 if there are unresolved files.
4918 4918 """
4919 4919
4920 4920 if opts.get('rev') and node:
4921 4921 raise error.Abort(_("please specify just one revision"))
4922 4922 if not node:
4923 4923 node = opts.get('rev')
4924 4924
4925 4925 if node:
4926 4926 node = scmutil.revsingle(repo, node).node()
4927 4927
4928 4928 if not node:
4929 4929 node = repo[destutil.destmerge(repo)].node()
4930 4930
4931 4931 if opts.get('preview'):
4932 4932 # find nodes that are ancestors of p2 but not of p1
4933 4933 p1 = repo.lookup('.')
4934 4934 p2 = repo.lookup(node)
4935 4935 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4936 4936
4937 4937 displayer = cmdutil.show_changeset(ui, repo, opts)
4938 4938 for node in nodes:
4939 4939 displayer.show(repo[node])
4940 4940 displayer.close()
4941 4941 return 0
4942 4942
4943 4943 try:
4944 4944 # ui.forcemerge is an internal variable, do not document
4945 4945 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4946 4946 return hg.merge(repo, node, force=opts.get('force'))
4947 4947 finally:
4948 4948 ui.setconfig('ui', 'forcemerge', '', 'merge')
4949 4949
4950 4950 @command('outgoing|out',
4951 4951 [('f', 'force', None, _('run even when the destination is unrelated')),
4952 4952 ('r', 'rev', [],
4953 4953 _('a changeset intended to be included in the destination'), _('REV')),
4954 4954 ('n', 'newest-first', None, _('show newest record first')),
4955 4955 ('B', 'bookmarks', False, _('compare bookmarks')),
4956 4956 ('b', 'branch', [], _('a specific branch you would like to push'),
4957 4957 _('BRANCH')),
4958 4958 ] + logopts + remoteopts + subrepoopts,
4959 4959 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4960 4960 def outgoing(ui, repo, dest=None, **opts):
4961 4961 """show changesets not found in the destination
4962 4962
4963 4963 Show changesets not found in the specified destination repository
4964 4964 or the default push location. These are the changesets that would
4965 4965 be pushed if a push was requested.
4966 4966
4967 4967 See pull for details of valid destination formats.
4968 4968
4969 4969 .. container:: verbose
4970 4970
4971 4971 With -B/--bookmarks, the result of bookmark comparison between
4972 4972 local and remote repositories is displayed. With -v/--verbose,
4973 4973 status is also displayed for each bookmark like below::
4974 4974
4975 4975 BM1 01234567890a added
4976 4976 BM2 deleted
4977 4977 BM3 234567890abc advanced
4978 4978 BM4 34567890abcd diverged
4979 4979 BM5 4567890abcde changed
4980 4980
4981 4981 The action taken when pushing depends on the
4982 4982 status of each bookmark:
4983 4983
4984 4984 :``added``: push with ``-B`` will create it
4985 4985 :``deleted``: push with ``-B`` will delete it
4986 4986 :``advanced``: push will update it
4987 4987 :``diverged``: push with ``-B`` will update it
4988 4988 :``changed``: push with ``-B`` will update it
4989 4989
4990 4990 From the point of view of pushing behavior, bookmarks
4991 4991 existing only in the remote repository are treated as
4992 4992 ``deleted``, even if it is in fact added remotely.
4993 4993
4994 4994 Returns 0 if there are outgoing changes, 1 otherwise.
4995 4995 """
4996 4996 if opts.get('graph'):
4997 4997 cmdutil.checkunsupportedgraphflags([], opts)
4998 4998 o, other = hg._outgoing(ui, repo, dest, opts)
4999 4999 if not o:
5000 5000 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5001 5001 return
5002 5002
5003 5003 revdag = cmdutil.graphrevs(repo, o, opts)
5004 5004 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
5005 5005 showparents = [ctx.node() for ctx in repo[None].parents()]
5006 5006 cmdutil.displaygraph(ui, revdag, displayer, showparents,
5007 5007 graphmod.asciiedges)
5008 5008 cmdutil.outgoinghooks(ui, repo, other, opts, o)
5009 5009 return 0
5010 5010
5011 5011 if opts.get('bookmarks'):
5012 5012 dest = ui.expandpath(dest or 'default-push', dest or 'default')
5013 5013 dest, branches = hg.parseurl(dest, opts.get('branch'))
5014 5014 other = hg.peer(repo, opts, dest)
5015 5015 if 'bookmarks' not in other.listkeys('namespaces'):
5016 5016 ui.warn(_("remote doesn't support bookmarks\n"))
5017 5017 return 0
5018 5018 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
5019 5019 return bookmarks.outgoing(ui, repo, other)
5020 5020
5021 5021 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
5022 5022 try:
5023 5023 return hg.outgoing(ui, repo, dest, opts)
5024 5024 finally:
5025 5025 del repo._subtoppath
5026 5026
5027 5027 @command('parents',
5028 5028 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
5029 5029 ] + templateopts,
5030 5030 _('[-r REV] [FILE]'),
5031 5031 inferrepo=True)
5032 5032 def parents(ui, repo, file_=None, **opts):
5033 5033 """show the parents of the working directory or revision (DEPRECATED)
5034 5034
5035 5035 Print the working directory's parent revisions. If a revision is
5036 5036 given via -r/--rev, the parent of that revision will be printed.
5037 5037 If a file argument is given, the revision in which the file was
5038 5038 last changed (before the working directory revision or the
5039 5039 argument to --rev if given) is printed.
5040 5040
5041 5041 See :hg:`summary` and :hg:`help revsets` for related information.
5042 5042
5043 5043 Returns 0 on success.
5044 5044 """
5045 5045
5046 5046 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
5047 5047
5048 5048 if file_:
5049 5049 m = scmutil.match(ctx, (file_,), opts)
5050 5050 if m.anypats() or len(m.files()) != 1:
5051 5051 raise error.Abort(_('can only specify an explicit filename'))
5052 5052 file_ = m.files()[0]
5053 5053 filenodes = []
5054 5054 for cp in ctx.parents():
5055 5055 if not cp:
5056 5056 continue
5057 5057 try:
5058 5058 filenodes.append(cp.filenode(file_))
5059 5059 except error.LookupError:
5060 5060 pass
5061 5061 if not filenodes:
5062 5062 raise error.Abort(_("'%s' not found in manifest!") % file_)
5063 5063 p = []
5064 5064 for fn in filenodes:
5065 5065 fctx = repo.filectx(file_, fileid=fn)
5066 5066 p.append(fctx.node())
5067 5067 else:
5068 5068 p = [cp.node() for cp in ctx.parents()]
5069 5069
5070 5070 displayer = cmdutil.show_changeset(ui, repo, opts)
5071 5071 for n in p:
5072 5072 if n != nullid:
5073 5073 displayer.show(repo[n])
5074 5074 displayer.close()
5075 5075
5076 5076 @command('paths', [], _('[NAME]'), optionalrepo=True)
5077 5077 def paths(ui, repo, search=None):
5078 5078 """show aliases for remote repositories
5079 5079
5080 5080 Show definition of symbolic path name NAME. If no name is given,
5081 5081 show definition of all available names.
5082 5082
5083 5083 Option -q/--quiet suppresses all output when searching for NAME
5084 5084 and shows only the path names when listing all definitions.
5085 5085
5086 5086 Path names are defined in the [paths] section of your
5087 5087 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5088 5088 repository, ``.hg/hgrc`` is used, too.
5089 5089
5090 5090 The path names ``default`` and ``default-push`` have a special
5091 5091 meaning. When performing a push or pull operation, they are used
5092 5092 as fallbacks if no location is specified on the command-line.
5093 5093 When ``default-push`` is set, it will be used for push and
5094 5094 ``default`` will be used for pull; otherwise ``default`` is used
5095 5095 as the fallback for both. When cloning a repository, the clone
5096 5096 source is written as ``default`` in ``.hg/hgrc``. Note that
5097 5097 ``default`` and ``default-push`` apply to all inbound (e.g.
5098 5098 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
5099 5099 :hg:`bundle`) operations.
5100 5100
5101 5101 See :hg:`help urls` for more information.
5102 5102
5103 5103 Returns 0 on success.
5104 5104 """
5105 5105 if search:
5106 5106 for name, path in sorted(ui.paths.iteritems()):
5107 5107 if name == search:
5108 5108 ui.status("%s\n" % util.hidepassword(path.loc))
5109 5109 return
5110 5110 if not ui.quiet:
5111 5111 ui.warn(_("not found!\n"))
5112 5112 return 1
5113 5113 else:
5114 5114 for name, path in sorted(ui.paths.iteritems()):
5115 5115 if ui.quiet:
5116 5116 ui.write("%s\n" % name)
5117 5117 else:
5118 5118 ui.write("%s = %s\n" % (name,
5119 5119 util.hidepassword(path.loc)))
5120 5120
5121 5121 @command('phase',
5122 5122 [('p', 'public', False, _('set changeset phase to public')),
5123 5123 ('d', 'draft', False, _('set changeset phase to draft')),
5124 5124 ('s', 'secret', False, _('set changeset phase to secret')),
5125 5125 ('f', 'force', False, _('allow to move boundary backward')),
5126 5126 ('r', 'rev', [], _('target revision'), _('REV')),
5127 5127 ],
5128 5128 _('[-p|-d|-s] [-f] [-r] [REV...]'))
5129 5129 def phase(ui, repo, *revs, **opts):
5130 5130 """set or show the current phase name
5131 5131
5132 5132 With no argument, show the phase name of the current revision(s).
5133 5133
5134 5134 With one of -p/--public, -d/--draft or -s/--secret, change the
5135 5135 phase value of the specified revisions.
5136 5136
5137 5137 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
5138 5138 lower phase to an higher phase. Phases are ordered as follows::
5139 5139
5140 5140 public < draft < secret
5141 5141
5142 5142 Returns 0 on success, 1 if some phases could not be changed.
5143 5143
5144 5144 (For more information about the phases concept, see :hg:`help phases`.)
5145 5145 """
5146 5146 # search for a unique phase argument
5147 5147 targetphase = None
5148 5148 for idx, name in enumerate(phases.phasenames):
5149 5149 if opts[name]:
5150 5150 if targetphase is not None:
5151 5151 raise error.Abort(_('only one phase can be specified'))
5152 5152 targetphase = idx
5153 5153
5154 5154 # look for specified revision
5155 5155 revs = list(revs)
5156 5156 revs.extend(opts['rev'])
5157 5157 if not revs:
5158 5158 # display both parents as the second parent phase can influence
5159 5159 # the phase of a merge commit
5160 5160 revs = [c.rev() for c in repo[None].parents()]
5161 5161
5162 5162 revs = scmutil.revrange(repo, revs)
5163 5163
5164 5164 lock = None
5165 5165 ret = 0
5166 5166 if targetphase is None:
5167 5167 # display
5168 5168 for r in revs:
5169 5169 ctx = repo[r]
5170 5170 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5171 5171 else:
5172 5172 tr = None
5173 5173 lock = repo.lock()
5174 5174 try:
5175 5175 tr = repo.transaction("phase")
5176 5176 # set phase
5177 5177 if not revs:
5178 5178 raise error.Abort(_('empty revision set'))
5179 5179 nodes = [repo[r].node() for r in revs]
5180 5180 # moving revision from public to draft may hide them
5181 5181 # We have to check result on an unfiltered repository
5182 5182 unfi = repo.unfiltered()
5183 5183 getphase = unfi._phasecache.phase
5184 5184 olddata = [getphase(unfi, r) for r in unfi]
5185 5185 phases.advanceboundary(repo, tr, targetphase, nodes)
5186 5186 if opts['force']:
5187 5187 phases.retractboundary(repo, tr, targetphase, nodes)
5188 5188 tr.close()
5189 5189 finally:
5190 5190 if tr is not None:
5191 5191 tr.release()
5192 5192 lock.release()
5193 5193 getphase = unfi._phasecache.phase
5194 5194 newdata = [getphase(unfi, r) for r in unfi]
5195 5195 changes = sum(newdata[r] != olddata[r] for r in unfi)
5196 5196 cl = unfi.changelog
5197 5197 rejected = [n for n in nodes
5198 5198 if newdata[cl.rev(n)] < targetphase]
5199 5199 if rejected:
5200 5200 ui.warn(_('cannot move %i changesets to a higher '
5201 5201 'phase, use --force\n') % len(rejected))
5202 5202 ret = 1
5203 5203 if changes:
5204 5204 msg = _('phase changed for %i changesets\n') % changes
5205 5205 if ret:
5206 5206 ui.status(msg)
5207 5207 else:
5208 5208 ui.note(msg)
5209 5209 else:
5210 5210 ui.warn(_('no phases changed\n'))
5211 5211 return ret
5212 5212
5213 5213 def postincoming(ui, repo, modheads, optupdate, checkout):
5214 5214 if modheads == 0:
5215 5215 return
5216 5216 if optupdate:
5217 5217 try:
5218 5218 brev = checkout
5219 5219 movemarkfrom = None
5220 5220 if not checkout:
5221 5221 updata = destutil.destupdate(repo)
5222 5222 checkout, movemarkfrom, brev = updata
5223 5223 ret = hg.update(repo, checkout)
5224 5224 except error.UpdateAbort as inst:
5225 5225 ui.warn(_("not updating: %s\n") % str(inst))
5226 5226 if inst.hint:
5227 5227 ui.warn(_("(%s)\n") % inst.hint)
5228 5228 return 0
5229 5229 if not ret and not checkout:
5230 5230 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
5231 5231 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
5232 5232 return ret
5233 5233 if modheads > 1:
5234 5234 currentbranchheads = len(repo.branchheads())
5235 5235 if currentbranchheads == modheads:
5236 5236 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5237 5237 elif currentbranchheads > 1:
5238 5238 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5239 5239 "merge)\n"))
5240 5240 else:
5241 5241 ui.status(_("(run 'hg heads' to see heads)\n"))
5242 5242 else:
5243 5243 ui.status(_("(run 'hg update' to get a working copy)\n"))
5244 5244
5245 5245 @command('^pull',
5246 5246 [('u', 'update', None,
5247 5247 _('update to new branch head if changesets were pulled')),
5248 5248 ('f', 'force', None, _('run even when remote repository is unrelated')),
5249 5249 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5250 5250 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5251 5251 ('b', 'branch', [], _('a specific branch you would like to pull'),
5252 5252 _('BRANCH')),
5253 5253 ] + remoteopts,
5254 5254 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5255 5255 def pull(ui, repo, source="default", **opts):
5256 5256 """pull changes from the specified source
5257 5257
5258 5258 Pull changes from a remote repository to a local one.
5259 5259
5260 5260 This finds all changes from the repository at the specified path
5261 5261 or URL and adds them to a local repository (the current one unless
5262 5262 -R is specified). By default, this does not update the copy of the
5263 5263 project in the working directory.
5264 5264
5265 5265 Use :hg:`incoming` if you want to see what would have been added
5266 5266 by a pull at the time you issued this command. If you then decide
5267 5267 to add those changes to the repository, you should use :hg:`pull
5268 5268 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5269 5269
5270 5270 If SOURCE is omitted, the 'default' path will be used.
5271 5271 See :hg:`help urls` for more information.
5272 5272
5273 5273 Returns 0 on success, 1 if an update had unresolved files.
5274 5274 """
5275 5275 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5276 5276 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5277 5277 other = hg.peer(repo, opts, source)
5278 5278 try:
5279 5279 revs, checkout = hg.addbranchrevs(repo, other, branches,
5280 5280 opts.get('rev'))
5281 5281
5282 5282
5283 5283 pullopargs = {}
5284 5284 if opts.get('bookmark'):
5285 5285 if not revs:
5286 5286 revs = []
5287 5287 # The list of bookmark used here is not the one used to actually
5288 5288 # update the bookmark name. This can result in the revision pulled
5289 5289 # not ending up with the name of the bookmark because of a race
5290 5290 # condition on the server. (See issue 4689 for details)
5291 5291 remotebookmarks = other.listkeys('bookmarks')
5292 5292 pullopargs['remotebookmarks'] = remotebookmarks
5293 5293 for b in opts['bookmark']:
5294 5294 if b not in remotebookmarks:
5295 5295 raise error.Abort(_('remote bookmark %s not found!') % b)
5296 5296 revs.append(remotebookmarks[b])
5297 5297
5298 5298 if revs:
5299 5299 try:
5300 5300 # When 'rev' is a bookmark name, we cannot guarantee that it
5301 5301 # will be updated with that name because of a race condition
5302 5302 # server side. (See issue 4689 for details)
5303 5303 oldrevs = revs
5304 5304 revs = [] # actually, nodes
5305 5305 for r in oldrevs:
5306 5306 node = other.lookup(r)
5307 5307 revs.append(node)
5308 5308 if r == checkout:
5309 5309 checkout = node
5310 5310 except error.CapabilityError:
5311 5311 err = _("other repository doesn't support revision lookup, "
5312 5312 "so a rev cannot be specified.")
5313 5313 raise error.Abort(err)
5314 5314
5315 5315 modheads = exchange.pull(repo, other, heads=revs,
5316 5316 force=opts.get('force'),
5317 5317 bookmarks=opts.get('bookmark', ()),
5318 5318 opargs=pullopargs).cgresult
5319 5319 if checkout:
5320 5320 checkout = str(repo.changelog.rev(checkout))
5321 5321 repo._subtoppath = source
5322 5322 try:
5323 5323 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
5324 5324
5325 5325 finally:
5326 5326 del repo._subtoppath
5327 5327
5328 5328 finally:
5329 5329 other.close()
5330 5330 return ret
5331 5331
5332 5332 @command('^push',
5333 5333 [('f', 'force', None, _('force push')),
5334 5334 ('r', 'rev', [],
5335 5335 _('a changeset intended to be included in the destination'),
5336 5336 _('REV')),
5337 5337 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5338 5338 ('b', 'branch', [],
5339 5339 _('a specific branch you would like to push'), _('BRANCH')),
5340 5340 ('', 'new-branch', False, _('allow pushing a new branch')),
5341 5341 ] + remoteopts,
5342 5342 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5343 5343 def push(ui, repo, dest=None, **opts):
5344 5344 """push changes to the specified destination
5345 5345
5346 5346 Push changesets from the local repository to the specified
5347 5347 destination.
5348 5348
5349 5349 This operation is symmetrical to pull: it is identical to a pull
5350 5350 in the destination repository from the current one.
5351 5351
5352 5352 By default, push will not allow creation of new heads at the
5353 5353 destination, since multiple heads would make it unclear which head
5354 5354 to use. In this situation, it is recommended to pull and merge
5355 5355 before pushing.
5356 5356
5357 5357 Use --new-branch if you want to allow push to create a new named
5358 5358 branch that is not present at the destination. This allows you to
5359 5359 only create a new branch without forcing other changes.
5360 5360
5361 5361 .. note::
5362 5362
5363 5363 Extra care should be taken with the -f/--force option,
5364 5364 which will push all new heads on all branches, an action which will
5365 5365 almost always cause confusion for collaborators.
5366 5366
5367 5367 If -r/--rev is used, the specified revision and all its ancestors
5368 5368 will be pushed to the remote repository.
5369 5369
5370 5370 If -B/--bookmark is used, the specified bookmarked revision, its
5371 5371 ancestors, and the bookmark will be pushed to the remote
5372 5372 repository.
5373 5373
5374 5374 Please see :hg:`help urls` for important details about ``ssh://``
5375 5375 URLs. If DESTINATION is omitted, a default path will be used.
5376 5376
5377 5377 Returns 0 if push was successful, 1 if nothing to push.
5378 5378 """
5379 5379
5380 5380 if opts.get('bookmark'):
5381 5381 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5382 5382 for b in opts['bookmark']:
5383 5383 # translate -B options to -r so changesets get pushed
5384 5384 if b in repo._bookmarks:
5385 5385 opts.setdefault('rev', []).append(b)
5386 5386 else:
5387 5387 # if we try to push a deleted bookmark, translate it to null
5388 5388 # this lets simultaneous -r, -b options continue working
5389 5389 opts.setdefault('rev', []).append("null")
5390 5390
5391 5391 path = ui.paths.getpath(dest, default='default')
5392 5392 if not path:
5393 5393 raise error.Abort(_('default repository not configured!'),
5394 5394 hint=_('see the "path" section in "hg help config"'))
5395 5395 dest, branches = path.pushloc, (path.branch, opts.get('branch') or [])
5396 5396 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5397 5397 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5398 5398 other = hg.peer(repo, opts, dest)
5399 5399
5400 5400 if revs:
5401 5401 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5402 5402 if not revs:
5403 5403 raise error.Abort(_("specified revisions evaluate to an empty set"),
5404 5404 hint=_("use different revision arguments"))
5405 5405
5406 5406 repo._subtoppath = dest
5407 5407 try:
5408 5408 # push subrepos depth-first for coherent ordering
5409 5409 c = repo['']
5410 5410 subs = c.substate # only repos that are committed
5411 5411 for s in sorted(subs):
5412 5412 result = c.sub(s).push(opts)
5413 5413 if result == 0:
5414 5414 return not result
5415 5415 finally:
5416 5416 del repo._subtoppath
5417 5417 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5418 5418 newbranch=opts.get('new_branch'),
5419 5419 bookmarks=opts.get('bookmark', ()))
5420 5420
5421 5421 result = not pushop.cgresult
5422 5422
5423 5423 if pushop.bkresult is not None:
5424 5424 if pushop.bkresult == 2:
5425 5425 result = 2
5426 5426 elif not result and pushop.bkresult:
5427 5427 result = 2
5428 5428
5429 5429 return result
5430 5430
5431 5431 @command('recover', [])
5432 5432 def recover(ui, repo):
5433 5433 """roll back an interrupted transaction
5434 5434
5435 5435 Recover from an interrupted commit or pull.
5436 5436
5437 5437 This command tries to fix the repository status after an
5438 5438 interrupted operation. It should only be necessary when Mercurial
5439 5439 suggests it.
5440 5440
5441 5441 Returns 0 if successful, 1 if nothing to recover or verify fails.
5442 5442 """
5443 5443 if repo.recover():
5444 5444 return hg.verify(repo)
5445 5445 return 1
5446 5446
5447 5447 @command('^remove|rm',
5448 5448 [('A', 'after', None, _('record delete for missing files')),
5449 5449 ('f', 'force', None,
5450 5450 _('remove (and delete) file even if added or modified')),
5451 5451 ] + subrepoopts + walkopts,
5452 5452 _('[OPTION]... FILE...'),
5453 5453 inferrepo=True)
5454 5454 def remove(ui, repo, *pats, **opts):
5455 5455 """remove the specified files on the next commit
5456 5456
5457 5457 Schedule the indicated files for removal from the current branch.
5458 5458
5459 5459 This command schedules the files to be removed at the next commit.
5460 5460 To undo a remove before that, see :hg:`revert`. To undo added
5461 5461 files, see :hg:`forget`.
5462 5462
5463 5463 .. container:: verbose
5464 5464
5465 5465 -A/--after can be used to remove only files that have already
5466 5466 been deleted, -f/--force can be used to force deletion, and -Af
5467 5467 can be used to remove files from the next revision without
5468 5468 deleting them from the working directory.
5469 5469
5470 5470 The following table details the behavior of remove for different
5471 5471 file states (columns) and option combinations (rows). The file
5472 5472 states are Added [A], Clean [C], Modified [M] and Missing [!]
5473 5473 (as reported by :hg:`status`). The actions are Warn, Remove
5474 5474 (from branch) and Delete (from disk):
5475 5475
5476 5476 ========= == == == ==
5477 5477 opt/state A C M !
5478 5478 ========= == == == ==
5479 5479 none W RD W R
5480 5480 -f R RD RD R
5481 5481 -A W W W R
5482 5482 -Af R R R R
5483 5483 ========= == == == ==
5484 5484
5485 5485 Note that remove never deletes files in Added [A] state from the
5486 5486 working directory, not even if option --force is specified.
5487 5487
5488 5488 Returns 0 on success, 1 if any warnings encountered.
5489 5489 """
5490 5490
5491 5491 after, force = opts.get('after'), opts.get('force')
5492 5492 if not pats and not after:
5493 5493 raise error.Abort(_('no files specified'))
5494 5494
5495 5495 m = scmutil.match(repo[None], pats, opts)
5496 5496 subrepos = opts.get('subrepos')
5497 5497 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5498 5498
5499 5499 @command('rename|move|mv',
5500 5500 [('A', 'after', None, _('record a rename that has already occurred')),
5501 5501 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5502 5502 ] + walkopts + dryrunopts,
5503 5503 _('[OPTION]... SOURCE... DEST'))
5504 5504 def rename(ui, repo, *pats, **opts):
5505 5505 """rename files; equivalent of copy + remove
5506 5506
5507 5507 Mark dest as copies of sources; mark sources for deletion. If dest
5508 5508 is a directory, copies are put in that directory. If dest is a
5509 5509 file, there can only be one source.
5510 5510
5511 5511 By default, this command copies the contents of files as they
5512 5512 exist in the working directory. If invoked with -A/--after, the
5513 5513 operation is recorded, but no copying is performed.
5514 5514
5515 5515 This command takes effect at the next commit. To undo a rename
5516 5516 before that, see :hg:`revert`.
5517 5517
5518 5518 Returns 0 on success, 1 if errors are encountered.
5519 5519 """
5520 5520 wlock = repo.wlock(False)
5521 5521 try:
5522 5522 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5523 5523 finally:
5524 5524 wlock.release()
5525 5525
5526 5526 @command('resolve',
5527 5527 [('a', 'all', None, _('select all unresolved files')),
5528 5528 ('l', 'list', None, _('list state of files needing merge')),
5529 5529 ('m', 'mark', None, _('mark files as resolved')),
5530 5530 ('u', 'unmark', None, _('mark files as unresolved')),
5531 5531 ('n', 'no-status', None, _('hide status prefix'))]
5532 5532 + mergetoolopts + walkopts + formatteropts,
5533 5533 _('[OPTION]... [FILE]...'),
5534 5534 inferrepo=True)
5535 5535 def resolve(ui, repo, *pats, **opts):
5536 5536 """redo merges or set/view the merge status of files
5537 5537
5538 5538 Merges with unresolved conflicts are often the result of
5539 5539 non-interactive merging using the ``internal:merge`` configuration
5540 5540 setting, or a command-line merge tool like ``diff3``. The resolve
5541 5541 command is used to manage the files involved in a merge, after
5542 5542 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5543 5543 working directory must have two parents). See :hg:`help
5544 5544 merge-tools` for information on configuring merge tools.
5545 5545
5546 5546 The resolve command can be used in the following ways:
5547 5547
5548 5548 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5549 5549 files, discarding any previous merge attempts. Re-merging is not
5550 5550 performed for files already marked as resolved. Use ``--all/-a``
5551 5551 to select all unresolved files. ``--tool`` can be used to specify
5552 5552 the merge tool used for the given files. It overrides the HGMERGE
5553 5553 environment variable and your configuration files. Previous file
5554 5554 contents are saved with a ``.orig`` suffix.
5555 5555
5556 5556 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5557 5557 (e.g. after having manually fixed-up the files). The default is
5558 5558 to mark all unresolved files.
5559 5559
5560 5560 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5561 5561 default is to mark all resolved files.
5562 5562
5563 5563 - :hg:`resolve -l`: list files which had or still have conflicts.
5564 5564 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5565 5565
5566 5566 Note that Mercurial will not let you commit files with unresolved
5567 5567 merge conflicts. You must use :hg:`resolve -m ...` before you can
5568 5568 commit after a conflicting merge.
5569 5569
5570 5570 Returns 0 on success, 1 if any files fail a resolve attempt.
5571 5571 """
5572 5572
5573 5573 all, mark, unmark, show, nostatus = \
5574 5574 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5575 5575
5576 5576 if (show and (mark or unmark)) or (mark and unmark):
5577 5577 raise error.Abort(_("too many options specified"))
5578 5578 if pats and all:
5579 5579 raise error.Abort(_("can't specify --all and patterns"))
5580 5580 if not (all or pats or show or mark or unmark):
5581 5581 raise error.Abort(_('no files or directories specified'),
5582 5582 hint=('use --all to re-merge all unresolved files'))
5583 5583
5584 5584 if show:
5585 5585 fm = ui.formatter('resolve', opts)
5586 5586 ms = mergemod.mergestate(repo)
5587 5587 m = scmutil.match(repo[None], pats, opts)
5588 5588 for f in ms:
5589 5589 if not m(f):
5590 5590 continue
5591 5591 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved'}[ms[f]]
5592 5592 fm.startitem()
5593 5593 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5594 5594 fm.write('path', '%s\n', f, label=l)
5595 5595 fm.end()
5596 5596 return 0
5597 5597
5598 5598 wlock = repo.wlock()
5599 5599 try:
5600 5600 ms = mergemod.mergestate(repo)
5601 5601
5602 5602 if not (ms.active() or repo.dirstate.p2() != nullid):
5603 5603 raise error.Abort(
5604 5604 _('resolve command not applicable when not merging'))
5605 5605
5606 5606 m = scmutil.match(repo[None], pats, opts)
5607 5607 ret = 0
5608 5608 didwork = False
5609 5609
5610 5610 tocomplete = []
5611 5611 for f in ms:
5612 5612 if not m(f):
5613 5613 continue
5614 5614
5615 5615 didwork = True
5616 5616
5617 5617 if mark:
5618 5618 ms.mark(f, "r")
5619 5619 elif unmark:
5620 5620 ms.mark(f, "u")
5621 5621 else:
5622 5622 wctx = repo[None]
5623 5623
5624 5624 # backup pre-resolve (merge uses .orig for its own purposes)
5625 5625 a = repo.wjoin(f)
5626 5626 util.copyfile(a, a + ".resolve")
5627 5627
5628 5628 try:
5629 5629 # preresolve file
5630 5630 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5631 5631 'resolve')
5632 5632 complete, r = ms.preresolve(f, wctx)
5633 5633 if not complete:
5634 5634 tocomplete.append(f)
5635 5635 elif r:
5636 5636 ret = 1
5637 5637 finally:
5638 5638 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5639 5639 ms.commit()
5640 5640
5641 5641 # replace filemerge's .orig file with our resolve file
5642 5642 # for files in tocomplete, ms.resolve will not overwrite
5643 5643 # .orig -- only preresolve does
5644 5644 util.rename(a + ".resolve", a + ".orig")
5645 5645
5646 5646 for f in tocomplete:
5647 5647 try:
5648 5648 # resolve file
5649 5649 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5650 5650 'resolve')
5651 5651 r = ms.resolve(f, wctx)
5652 5652 if r:
5653 5653 ret = 1
5654 5654 finally:
5655 5655 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5656 5656 ms.commit()
5657 5657
5658 5658 ms.commit()
5659 5659
5660 5660 if not didwork and pats:
5661 5661 ui.warn(_("arguments do not match paths that need resolving\n"))
5662 5662
5663 5663 finally:
5664 5664 wlock.release()
5665 5665
5666 5666 # Nudge users into finishing an unfinished operation
5667 5667 if not list(ms.unresolved()):
5668 5668 ui.status(_('(no more unresolved files)\n'))
5669 5669
5670 5670 return ret
5671 5671
5672 5672 @command('revert',
5673 5673 [('a', 'all', None, _('revert all changes when no arguments given')),
5674 5674 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5675 5675 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5676 5676 ('C', 'no-backup', None, _('do not save backup copies of files')),
5677 5677 ('i', 'interactive', None,
5678 5678 _('interactively select the changes (EXPERIMENTAL)')),
5679 5679 ] + walkopts + dryrunopts,
5680 5680 _('[OPTION]... [-r REV] [NAME]...'))
5681 5681 def revert(ui, repo, *pats, **opts):
5682 5682 """restore files to their checkout state
5683 5683
5684 5684 .. note::
5685 5685
5686 5686 To check out earlier revisions, you should use :hg:`update REV`.
5687 5687 To cancel an uncommitted merge (and lose your changes),
5688 5688 use :hg:`update --clean .`.
5689 5689
5690 5690 With no revision specified, revert the specified files or directories
5691 5691 to the contents they had in the parent of the working directory.
5692 5692 This restores the contents of files to an unmodified
5693 5693 state and unschedules adds, removes, copies, and renames. If the
5694 5694 working directory has two parents, you must explicitly specify a
5695 5695 revision.
5696 5696
5697 5697 Using the -r/--rev or -d/--date options, revert the given files or
5698 5698 directories to their states as of a specific revision. Because
5699 5699 revert does not change the working directory parents, this will
5700 5700 cause these files to appear modified. This can be helpful to "back
5701 5701 out" some or all of an earlier change. See :hg:`backout` for a
5702 5702 related method.
5703 5703
5704 5704 Modified files are saved with a .orig suffix before reverting.
5705 5705 To disable these backups, use --no-backup.
5706 5706
5707 5707 See :hg:`help dates` for a list of formats valid for -d/--date.
5708 5708
5709 5709 See :hg:`help backout` for a way to reverse the effect of an
5710 5710 earlier changeset.
5711 5711
5712 5712 Returns 0 on success.
5713 5713 """
5714 5714
5715 5715 if opts.get("date"):
5716 5716 if opts.get("rev"):
5717 5717 raise error.Abort(_("you can't specify a revision and a date"))
5718 5718 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5719 5719
5720 5720 parent, p2 = repo.dirstate.parents()
5721 5721 if not opts.get('rev') and p2 != nullid:
5722 5722 # revert after merge is a trap for new users (issue2915)
5723 5723 raise error.Abort(_('uncommitted merge with no revision specified'),
5724 5724 hint=_('use "hg update" or see "hg help revert"'))
5725 5725
5726 5726 ctx = scmutil.revsingle(repo, opts.get('rev'))
5727 5727
5728 5728 if (not (pats or opts.get('include') or opts.get('exclude') or
5729 5729 opts.get('all') or opts.get('interactive'))):
5730 5730 msg = _("no files or directories specified")
5731 5731 if p2 != nullid:
5732 5732 hint = _("uncommitted merge, use --all to discard all changes,"
5733 5733 " or 'hg update -C .' to abort the merge")
5734 5734 raise error.Abort(msg, hint=hint)
5735 5735 dirty = any(repo.status())
5736 5736 node = ctx.node()
5737 5737 if node != parent:
5738 5738 if dirty:
5739 5739 hint = _("uncommitted changes, use --all to discard all"
5740 5740 " changes, or 'hg update %s' to update") % ctx.rev()
5741 5741 else:
5742 5742 hint = _("use --all to revert all files,"
5743 5743 " or 'hg update %s' to update") % ctx.rev()
5744 5744 elif dirty:
5745 5745 hint = _("uncommitted changes, use --all to discard all changes")
5746 5746 else:
5747 5747 hint = _("use --all to revert all files")
5748 5748 raise error.Abort(msg, hint=hint)
5749 5749
5750 5750 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5751 5751
5752 5752 @command('rollback', dryrunopts +
5753 5753 [('f', 'force', False, _('ignore safety measures'))])
5754 5754 def rollback(ui, repo, **opts):
5755 5755 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5756 5756
5757 5757 Please use :hg:`commit --amend` instead of rollback to correct
5758 5758 mistakes in the last commit.
5759 5759
5760 5760 This command should be used with care. There is only one level of
5761 5761 rollback, and there is no way to undo a rollback. It will also
5762 5762 restore the dirstate at the time of the last transaction, losing
5763 5763 any dirstate changes since that time. This command does not alter
5764 5764 the working directory.
5765 5765
5766 5766 Transactions are used to encapsulate the effects of all commands
5767 5767 that create new changesets or propagate existing changesets into a
5768 5768 repository.
5769 5769
5770 5770 .. container:: verbose
5771 5771
5772 5772 For example, the following commands are transactional, and their
5773 5773 effects can be rolled back:
5774 5774
5775 5775 - commit
5776 5776 - import
5777 5777 - pull
5778 5778 - push (with this repository as the destination)
5779 5779 - unbundle
5780 5780
5781 5781 To avoid permanent data loss, rollback will refuse to rollback a
5782 5782 commit transaction if it isn't checked out. Use --force to
5783 5783 override this protection.
5784 5784
5785 5785 This command is not intended for use on public repositories. Once
5786 5786 changes are visible for pull by other users, rolling a transaction
5787 5787 back locally is ineffective (someone else may already have pulled
5788 5788 the changes). Furthermore, a race is possible with readers of the
5789 5789 repository; for example an in-progress pull from the repository
5790 5790 may fail if a rollback is performed.
5791 5791
5792 5792 Returns 0 on success, 1 if no rollback data is available.
5793 5793 """
5794 5794 return repo.rollback(dryrun=opts.get('dry_run'),
5795 5795 force=opts.get('force'))
5796 5796
5797 5797 @command('root', [])
5798 5798 def root(ui, repo):
5799 5799 """print the root (top) of the current working directory
5800 5800
5801 5801 Print the root directory of the current repository.
5802 5802
5803 5803 Returns 0 on success.
5804 5804 """
5805 5805 ui.write(repo.root + "\n")
5806 5806
5807 5807 @command('^serve',
5808 5808 [('A', 'accesslog', '', _('name of access log file to write to'),
5809 5809 _('FILE')),
5810 5810 ('d', 'daemon', None, _('run server in background')),
5811 5811 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')),
5812 5812 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5813 5813 # use string type, then we can check if something was passed
5814 5814 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5815 5815 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5816 5816 _('ADDR')),
5817 5817 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5818 5818 _('PREFIX')),
5819 5819 ('n', 'name', '',
5820 5820 _('name to show in web pages (default: working directory)'), _('NAME')),
5821 5821 ('', 'web-conf', '',
5822 5822 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5823 5823 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5824 5824 _('FILE')),
5825 5825 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5826 5826 ('', 'stdio', None, _('for remote clients')),
5827 5827 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5828 5828 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5829 5829 ('', 'style', '', _('template style to use'), _('STYLE')),
5830 5830 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5831 5831 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5832 5832 _('[OPTION]...'),
5833 5833 optionalrepo=True)
5834 5834 def serve(ui, repo, **opts):
5835 5835 """start stand-alone webserver
5836 5836
5837 5837 Start a local HTTP repository browser and pull server. You can use
5838 5838 this for ad-hoc sharing and browsing of repositories. It is
5839 5839 recommended to use a real web server to serve a repository for
5840 5840 longer periods of time.
5841 5841
5842 5842 Please note that the server does not implement access control.
5843 5843 This means that, by default, anybody can read from the server and
5844 5844 nobody can write to it by default. Set the ``web.allow_push``
5845 5845 option to ``*`` to allow everybody to push to the server. You
5846 5846 should use a real web server if you need to authenticate users.
5847 5847
5848 5848 By default, the server logs accesses to stdout and errors to
5849 5849 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5850 5850 files.
5851 5851
5852 5852 To have the server choose a free port number to listen on, specify
5853 5853 a port number of 0; in this case, the server will print the port
5854 5854 number it uses.
5855 5855
5856 5856 Returns 0 on success.
5857 5857 """
5858 5858
5859 5859 if opts["stdio"] and opts["cmdserver"]:
5860 5860 raise error.Abort(_("cannot use --stdio with --cmdserver"))
5861 5861
5862 5862 if opts["stdio"]:
5863 5863 if repo is None:
5864 5864 raise error.RepoError(_("there is no Mercurial repository here"
5865 5865 " (.hg not found)"))
5866 5866 s = sshserver.sshserver(ui, repo)
5867 5867 s.serve_forever()
5868 5868
5869 5869 if opts["cmdserver"]:
5870 5870 import commandserver
5871 5871 service = commandserver.createservice(ui, repo, opts)
5872 5872 return cmdutil.service(opts, initfn=service.init, runfn=service.run)
5873 5873
5874 5874 # this way we can check if something was given in the command-line
5875 5875 if opts.get('port'):
5876 5876 opts['port'] = util.getport(opts.get('port'))
5877 5877
5878 5878 if repo:
5879 5879 baseui = repo.baseui
5880 5880 else:
5881 5881 baseui = ui
5882 5882 optlist = ("name templates style address port prefix ipv6"
5883 5883 " accesslog errorlog certificate encoding")
5884 5884 for o in optlist.split():
5885 5885 val = opts.get(o, '')
5886 5886 if val in (None, ''): # should check against default options instead
5887 5887 continue
5888 5888 baseui.setconfig("web", o, val, 'serve')
5889 5889 if repo and repo.ui != baseui:
5890 5890 repo.ui.setconfig("web", o, val, 'serve')
5891 5891
5892 5892 o = opts.get('web_conf') or opts.get('webdir_conf')
5893 5893 if not o:
5894 5894 if not repo:
5895 5895 raise error.RepoError(_("there is no Mercurial repository"
5896 5896 " here (.hg not found)"))
5897 5897 o = repo
5898 5898
5899 5899 app = hgweb.hgweb(o, baseui=baseui)
5900 5900 service = httpservice(ui, app, opts)
5901 5901 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5902 5902
5903 5903 class httpservice(object):
5904 5904 def __init__(self, ui, app, opts):
5905 5905 self.ui = ui
5906 5906 self.app = app
5907 5907 self.opts = opts
5908 5908
5909 5909 def init(self):
5910 5910 util.setsignalhandler()
5911 5911 self.httpd = hgweb_server.create_server(self.ui, self.app)
5912 5912
5913 5913 if self.opts['port'] and not self.ui.verbose:
5914 5914 return
5915 5915
5916 5916 if self.httpd.prefix:
5917 5917 prefix = self.httpd.prefix.strip('/') + '/'
5918 5918 else:
5919 5919 prefix = ''
5920 5920
5921 5921 port = ':%d' % self.httpd.port
5922 5922 if port == ':80':
5923 5923 port = ''
5924 5924
5925 5925 bindaddr = self.httpd.addr
5926 5926 if bindaddr == '0.0.0.0':
5927 5927 bindaddr = '*'
5928 5928 elif ':' in bindaddr: # IPv6
5929 5929 bindaddr = '[%s]' % bindaddr
5930 5930
5931 5931 fqaddr = self.httpd.fqaddr
5932 5932 if ':' in fqaddr:
5933 5933 fqaddr = '[%s]' % fqaddr
5934 5934 if self.opts['port']:
5935 5935 write = self.ui.status
5936 5936 else:
5937 5937 write = self.ui.write
5938 5938 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5939 5939 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5940 5940 self.ui.flush() # avoid buffering of status message
5941 5941
5942 5942 def run(self):
5943 5943 self.httpd.serve_forever()
5944 5944
5945 5945
5946 5946 @command('^status|st',
5947 5947 [('A', 'all', None, _('show status of all files')),
5948 5948 ('m', 'modified', None, _('show only modified files')),
5949 5949 ('a', 'added', None, _('show only added files')),
5950 5950 ('r', 'removed', None, _('show only removed files')),
5951 5951 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5952 5952 ('c', 'clean', None, _('show only files without changes')),
5953 5953 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5954 5954 ('i', 'ignored', None, _('show only ignored files')),
5955 5955 ('n', 'no-status', None, _('hide status prefix')),
5956 5956 ('C', 'copies', None, _('show source of copied files')),
5957 5957 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5958 5958 ('', 'rev', [], _('show difference from revision'), _('REV')),
5959 5959 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5960 5960 ] + walkopts + subrepoopts + formatteropts,
5961 5961 _('[OPTION]... [FILE]...'),
5962 5962 inferrepo=True)
5963 5963 def status(ui, repo, *pats, **opts):
5964 5964 """show changed files in the working directory
5965 5965
5966 5966 Show status of files in the repository. If names are given, only
5967 5967 files that match are shown. Files that are clean or ignored or
5968 5968 the source of a copy/move operation, are not listed unless
5969 5969 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5970 5970 Unless options described with "show only ..." are given, the
5971 5971 options -mardu are used.
5972 5972
5973 5973 Option -q/--quiet hides untracked (unknown and ignored) files
5974 5974 unless explicitly requested with -u/--unknown or -i/--ignored.
5975 5975
5976 5976 .. note::
5977 5977
5978 5978 status may appear to disagree with diff if permissions have
5979 5979 changed or a merge has occurred. The standard diff format does
5980 5980 not report permission changes and diff only reports changes
5981 5981 relative to one merge parent.
5982 5982
5983 5983 If one revision is given, it is used as the base revision.
5984 5984 If two revisions are given, the differences between them are
5985 5985 shown. The --change option can also be used as a shortcut to list
5986 5986 the changed files of a revision from its first parent.
5987 5987
5988 5988 The codes used to show the status of files are::
5989 5989
5990 5990 M = modified
5991 5991 A = added
5992 5992 R = removed
5993 5993 C = clean
5994 5994 ! = missing (deleted by non-hg command, but still tracked)
5995 5995 ? = not tracked
5996 5996 I = ignored
5997 5997 = origin of the previous file (with --copies)
5998 5998
5999 5999 .. container:: verbose
6000 6000
6001 6001 Examples:
6002 6002
6003 6003 - show changes in the working directory relative to a
6004 6004 changeset::
6005 6005
6006 6006 hg status --rev 9353
6007 6007
6008 6008 - show changes in the working directory relative to the
6009 6009 current directory (see :hg:`help patterns` for more information)::
6010 6010
6011 6011 hg status re:
6012 6012
6013 6013 - show all changes including copies in an existing changeset::
6014 6014
6015 6015 hg status --copies --change 9353
6016 6016
6017 6017 - get a NUL separated list of added files, suitable for xargs::
6018 6018
6019 6019 hg status -an0
6020 6020
6021 6021 Returns 0 on success.
6022 6022 """
6023 6023
6024 6024 revs = opts.get('rev')
6025 6025 change = opts.get('change')
6026 6026
6027 6027 if revs and change:
6028 6028 msg = _('cannot specify --rev and --change at the same time')
6029 6029 raise error.Abort(msg)
6030 6030 elif change:
6031 6031 node2 = scmutil.revsingle(repo, change, None).node()
6032 6032 node1 = repo[node2].p1().node()
6033 6033 else:
6034 6034 node1, node2 = scmutil.revpair(repo, revs)
6035 6035
6036 6036 if pats:
6037 6037 cwd = repo.getcwd()
6038 6038 else:
6039 6039 cwd = ''
6040 6040
6041 6041 if opts.get('print0'):
6042 6042 end = '\0'
6043 6043 else:
6044 6044 end = '\n'
6045 6045 copy = {}
6046 6046 states = 'modified added removed deleted unknown ignored clean'.split()
6047 6047 show = [k for k in states if opts.get(k)]
6048 6048 if opts.get('all'):
6049 6049 show += ui.quiet and (states[:4] + ['clean']) or states
6050 6050 if not show:
6051 6051 if ui.quiet:
6052 6052 show = states[:4]
6053 6053 else:
6054 6054 show = states[:5]
6055 6055
6056 6056 m = scmutil.match(repo[node2], pats, opts)
6057 6057 stat = repo.status(node1, node2, m,
6058 6058 'ignored' in show, 'clean' in show, 'unknown' in show,
6059 6059 opts.get('subrepos'))
6060 6060 changestates = zip(states, 'MAR!?IC', stat)
6061 6061
6062 6062 if (opts.get('all') or opts.get('copies')
6063 6063 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
6064 6064 copy = copies.pathcopies(repo[node1], repo[node2], m)
6065 6065
6066 6066 fm = ui.formatter('status', opts)
6067 6067 fmt = '%s' + end
6068 6068 showchar = not opts.get('no_status')
6069 6069
6070 6070 for state, char, files in changestates:
6071 6071 if state in show:
6072 6072 label = 'status.' + state
6073 6073 for f in files:
6074 6074 fm.startitem()
6075 6075 fm.condwrite(showchar, 'status', '%s ', char, label=label)
6076 6076 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
6077 6077 if f in copy:
6078 6078 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
6079 6079 label='status.copied')
6080 6080 fm.end()
6081 6081
6082 6082 @command('^summary|sum',
6083 6083 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
6084 6084 def summary(ui, repo, **opts):
6085 6085 """summarize working directory state
6086 6086
6087 6087 This generates a brief summary of the working directory state,
6088 6088 including parents, branch, commit status, phase and available updates.
6089 6089
6090 6090 With the --remote option, this will check the default paths for
6091 6091 incoming and outgoing changes. This can be time-consuming.
6092 6092
6093 6093 Returns 0 on success.
6094 6094 """
6095 6095
6096 6096 ctx = repo[None]
6097 6097 parents = ctx.parents()
6098 6098 pnode = parents[0].node()
6099 6099 marks = []
6100 6100
6101 6101 for p in parents:
6102 6102 # label with log.changeset (instead of log.parent) since this
6103 6103 # shows a working directory parent *changeset*:
6104 6104 # i18n: column positioning for "hg summary"
6105 6105 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
6106 6106 label='log.changeset changeset.%s' % p.phasestr())
6107 6107 ui.write(' '.join(p.tags()), label='log.tag')
6108 6108 if p.bookmarks():
6109 6109 marks.extend(p.bookmarks())
6110 6110 if p.rev() == -1:
6111 6111 if not len(repo):
6112 6112 ui.write(_(' (empty repository)'))
6113 6113 else:
6114 6114 ui.write(_(' (no revision checked out)'))
6115 6115 ui.write('\n')
6116 6116 if p.description():
6117 6117 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
6118 6118 label='log.summary')
6119 6119
6120 6120 branch = ctx.branch()
6121 6121 bheads = repo.branchheads(branch)
6122 6122 # i18n: column positioning for "hg summary"
6123 6123 m = _('branch: %s\n') % branch
6124 6124 if branch != 'default':
6125 6125 ui.write(m, label='log.branch')
6126 6126 else:
6127 6127 ui.status(m, label='log.branch')
6128 6128
6129 6129 if marks:
6130 6130 active = repo._activebookmark
6131 6131 # i18n: column positioning for "hg summary"
6132 6132 ui.write(_('bookmarks:'), label='log.bookmark')
6133 6133 if active is not None:
6134 6134 if active in marks:
6135 6135 ui.write(' *' + active, label=activebookmarklabel)
6136 6136 marks.remove(active)
6137 6137 else:
6138 6138 ui.write(' [%s]' % active, label=activebookmarklabel)
6139 6139 for m in marks:
6140 6140 ui.write(' ' + m, label='log.bookmark')
6141 6141 ui.write('\n', label='log.bookmark')
6142 6142
6143 6143 status = repo.status(unknown=True)
6144 6144
6145 6145 c = repo.dirstate.copies()
6146 6146 copied, renamed = [], []
6147 6147 for d, s in c.iteritems():
6148 6148 if s in status.removed:
6149 6149 status.removed.remove(s)
6150 6150 renamed.append(d)
6151 6151 else:
6152 6152 copied.append(d)
6153 6153 if d in status.added:
6154 6154 status.added.remove(d)
6155 6155
6156 6156 ms = mergemod.mergestate(repo)
6157 6157 unresolved = [f for f in ms if ms[f] == 'u']
6158 6158
6159 6159 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6160 6160
6161 6161 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6162 6162 (ui.label(_('%d added'), 'status.added'), status.added),
6163 6163 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6164 6164 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6165 6165 (ui.label(_('%d copied'), 'status.copied'), copied),
6166 6166 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6167 6167 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6168 6168 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6169 6169 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6170 6170 t = []
6171 6171 for l, s in labels:
6172 6172 if s:
6173 6173 t.append(l % len(s))
6174 6174
6175 6175 t = ', '.join(t)
6176 6176 cleanworkdir = False
6177 6177
6178 6178 if repo.vfs.exists('updatestate'):
6179 6179 t += _(' (interrupted update)')
6180 6180 elif len(parents) > 1:
6181 6181 t += _(' (merge)')
6182 6182 elif branch != parents[0].branch():
6183 6183 t += _(' (new branch)')
6184 6184 elif (parents[0].closesbranch() and
6185 6185 pnode in repo.branchheads(branch, closed=True)):
6186 6186 t += _(' (head closed)')
6187 6187 elif not (status.modified or status.added or status.removed or renamed or
6188 6188 copied or subs):
6189 6189 t += _(' (clean)')
6190 6190 cleanworkdir = True
6191 6191 elif pnode not in bheads:
6192 6192 t += _(' (new branch head)')
6193 6193
6194 6194 if parents:
6195 6195 pendingphase = max(p.phase() for p in parents)
6196 6196 else:
6197 6197 pendingphase = phases.public
6198 6198
6199 6199 if pendingphase > phases.newcommitphase(ui):
6200 6200 t += ' (%s)' % phases.phasenames[pendingphase]
6201 6201
6202 6202 if cleanworkdir:
6203 6203 # i18n: column positioning for "hg summary"
6204 6204 ui.status(_('commit: %s\n') % t.strip())
6205 6205 else:
6206 6206 # i18n: column positioning for "hg summary"
6207 6207 ui.write(_('commit: %s\n') % t.strip())
6208 6208
6209 6209 # all ancestors of branch heads - all ancestors of parent = new csets
6210 6210 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6211 6211 bheads))
6212 6212
6213 6213 if new == 0:
6214 6214 # i18n: column positioning for "hg summary"
6215 6215 ui.status(_('update: (current)\n'))
6216 6216 elif pnode not in bheads:
6217 6217 # i18n: column positioning for "hg summary"
6218 6218 ui.write(_('update: %d new changesets (update)\n') % new)
6219 6219 else:
6220 6220 # i18n: column positioning for "hg summary"
6221 6221 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6222 6222 (new, len(bheads)))
6223 6223
6224 6224 t = []
6225 6225 draft = len(repo.revs('draft()'))
6226 6226 if draft:
6227 6227 t.append(_('%d draft') % draft)
6228 6228 secret = len(repo.revs('secret()'))
6229 6229 if secret:
6230 6230 t.append(_('%d secret') % secret)
6231 6231
6232 6232 if draft or secret:
6233 6233 ui.status(_('phases: %s\n') % ', '.join(t))
6234 6234
6235 6235 cmdutil.summaryhooks(ui, repo)
6236 6236
6237 6237 if opts.get('remote'):
6238 6238 needsincoming, needsoutgoing = True, True
6239 6239 else:
6240 6240 needsincoming, needsoutgoing = False, False
6241 6241 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6242 6242 if i:
6243 6243 needsincoming = True
6244 6244 if o:
6245 6245 needsoutgoing = True
6246 6246 if not needsincoming and not needsoutgoing:
6247 6247 return
6248 6248
6249 6249 def getincoming():
6250 6250 source, branches = hg.parseurl(ui.expandpath('default'))
6251 6251 sbranch = branches[0]
6252 6252 try:
6253 6253 other = hg.peer(repo, {}, source)
6254 6254 except error.RepoError:
6255 6255 if opts.get('remote'):
6256 6256 raise
6257 6257 return source, sbranch, None, None, None
6258 6258 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6259 6259 if revs:
6260 6260 revs = [other.lookup(rev) for rev in revs]
6261 6261 ui.debug('comparing with %s\n' % util.hidepassword(source))
6262 6262 repo.ui.pushbuffer()
6263 6263 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6264 6264 repo.ui.popbuffer()
6265 6265 return source, sbranch, other, commoninc, commoninc[1]
6266 6266
6267 6267 if needsincoming:
6268 6268 source, sbranch, sother, commoninc, incoming = getincoming()
6269 6269 else:
6270 6270 source = sbranch = sother = commoninc = incoming = None
6271 6271
6272 6272 def getoutgoing():
6273 6273 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6274 6274 dbranch = branches[0]
6275 6275 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6276 6276 if source != dest:
6277 6277 try:
6278 6278 dother = hg.peer(repo, {}, dest)
6279 6279 except error.RepoError:
6280 6280 if opts.get('remote'):
6281 6281 raise
6282 6282 return dest, dbranch, None, None
6283 6283 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6284 6284 elif sother is None:
6285 6285 # there is no explicit destination peer, but source one is invalid
6286 6286 return dest, dbranch, None, None
6287 6287 else:
6288 6288 dother = sother
6289 6289 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6290 6290 common = None
6291 6291 else:
6292 6292 common = commoninc
6293 6293 if revs:
6294 6294 revs = [repo.lookup(rev) for rev in revs]
6295 6295 repo.ui.pushbuffer()
6296 6296 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6297 6297 commoninc=common)
6298 6298 repo.ui.popbuffer()
6299 6299 return dest, dbranch, dother, outgoing
6300 6300
6301 6301 if needsoutgoing:
6302 6302 dest, dbranch, dother, outgoing = getoutgoing()
6303 6303 else:
6304 6304 dest = dbranch = dother = outgoing = None
6305 6305
6306 6306 if opts.get('remote'):
6307 6307 t = []
6308 6308 if incoming:
6309 6309 t.append(_('1 or more incoming'))
6310 6310 o = outgoing.missing
6311 6311 if o:
6312 6312 t.append(_('%d outgoing') % len(o))
6313 6313 other = dother or sother
6314 6314 if 'bookmarks' in other.listkeys('namespaces'):
6315 6315 counts = bookmarks.summary(repo, other)
6316 6316 if counts[0] > 0:
6317 6317 t.append(_('%d incoming bookmarks') % counts[0])
6318 6318 if counts[1] > 0:
6319 6319 t.append(_('%d outgoing bookmarks') % counts[1])
6320 6320
6321 6321 if t:
6322 6322 # i18n: column positioning for "hg summary"
6323 6323 ui.write(_('remote: %s\n') % (', '.join(t)))
6324 6324 else:
6325 6325 # i18n: column positioning for "hg summary"
6326 6326 ui.status(_('remote: (synced)\n'))
6327 6327
6328 6328 cmdutil.summaryremotehooks(ui, repo, opts,
6329 6329 ((source, sbranch, sother, commoninc),
6330 6330 (dest, dbranch, dother, outgoing)))
6331 6331
6332 6332 @command('tag',
6333 6333 [('f', 'force', None, _('force tag')),
6334 6334 ('l', 'local', None, _('make the tag local')),
6335 6335 ('r', 'rev', '', _('revision to tag'), _('REV')),
6336 6336 ('', 'remove', None, _('remove a tag')),
6337 6337 # -l/--local is already there, commitopts cannot be used
6338 6338 ('e', 'edit', None, _('invoke editor on commit messages')),
6339 6339 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6340 6340 ] + commitopts2,
6341 6341 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6342 6342 def tag(ui, repo, name1, *names, **opts):
6343 6343 """add one or more tags for the current or given revision
6344 6344
6345 6345 Name a particular revision using <name>.
6346 6346
6347 6347 Tags are used to name particular revisions of the repository and are
6348 6348 very useful to compare different revisions, to go back to significant
6349 6349 earlier versions or to mark branch points as releases, etc. Changing
6350 6350 an existing tag is normally disallowed; use -f/--force to override.
6351 6351
6352 6352 If no revision is given, the parent of the working directory is
6353 6353 used.
6354 6354
6355 6355 To facilitate version control, distribution, and merging of tags,
6356 6356 they are stored as a file named ".hgtags" which is managed similarly
6357 6357 to other project files and can be hand-edited if necessary. This
6358 6358 also means that tagging creates a new commit. The file
6359 6359 ".hg/localtags" is used for local tags (not shared among
6360 6360 repositories).
6361 6361
6362 6362 Tag commits are usually made at the head of a branch. If the parent
6363 6363 of the working directory is not a branch head, :hg:`tag` aborts; use
6364 6364 -f/--force to force the tag commit to be based on a non-head
6365 6365 changeset.
6366 6366
6367 6367 See :hg:`help dates` for a list of formats valid for -d/--date.
6368 6368
6369 6369 Since tag names have priority over branch names during revision
6370 6370 lookup, using an existing branch name as a tag name is discouraged.
6371 6371
6372 6372 Returns 0 on success.
6373 6373 """
6374 6374 wlock = lock = None
6375 6375 try:
6376 6376 wlock = repo.wlock()
6377 6377 lock = repo.lock()
6378 6378 rev_ = "."
6379 6379 names = [t.strip() for t in (name1,) + names]
6380 6380 if len(names) != len(set(names)):
6381 6381 raise error.Abort(_('tag names must be unique'))
6382 6382 for n in names:
6383 6383 scmutil.checknewlabel(repo, n, 'tag')
6384 6384 if not n:
6385 6385 raise error.Abort(_('tag names cannot consist entirely of '
6386 6386 'whitespace'))
6387 6387 if opts.get('rev') and opts.get('remove'):
6388 6388 raise error.Abort(_("--rev and --remove are incompatible"))
6389 6389 if opts.get('rev'):
6390 6390 rev_ = opts['rev']
6391 6391 message = opts.get('message')
6392 6392 if opts.get('remove'):
6393 6393 if opts.get('local'):
6394 6394 expectedtype = 'local'
6395 6395 else:
6396 6396 expectedtype = 'global'
6397 6397
6398 6398 for n in names:
6399 6399 if not repo.tagtype(n):
6400 6400 raise error.Abort(_("tag '%s' does not exist") % n)
6401 6401 if repo.tagtype(n) != expectedtype:
6402 6402 if expectedtype == 'global':
6403 6403 raise error.Abort(_("tag '%s' is not a global tag") % n)
6404 6404 else:
6405 6405 raise error.Abort(_("tag '%s' is not a local tag") % n)
6406 6406 rev_ = 'null'
6407 6407 if not message:
6408 6408 # we don't translate commit messages
6409 6409 message = 'Removed tag %s' % ', '.join(names)
6410 6410 elif not opts.get('force'):
6411 6411 for n in names:
6412 6412 if n in repo.tags():
6413 6413 raise error.Abort(_("tag '%s' already exists "
6414 6414 "(use -f to force)") % n)
6415 6415 if not opts.get('local'):
6416 6416 p1, p2 = repo.dirstate.parents()
6417 6417 if p2 != nullid:
6418 6418 raise error.Abort(_('uncommitted merge'))
6419 6419 bheads = repo.branchheads()
6420 6420 if not opts.get('force') and bheads and p1 not in bheads:
6421 6421 raise error.Abort(_('not at a branch head (use -f to force)'))
6422 6422 r = scmutil.revsingle(repo, rev_).node()
6423 6423
6424 6424 if not message:
6425 6425 # we don't translate commit messages
6426 6426 message = ('Added tag %s for changeset %s' %
6427 6427 (', '.join(names), short(r)))
6428 6428
6429 6429 date = opts.get('date')
6430 6430 if date:
6431 6431 date = util.parsedate(date)
6432 6432
6433 6433 if opts.get('remove'):
6434 6434 editform = 'tag.remove'
6435 6435 else:
6436 6436 editform = 'tag.add'
6437 6437 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6438 6438
6439 6439 # don't allow tagging the null rev
6440 6440 if (not opts.get('remove') and
6441 6441 scmutil.revsingle(repo, rev_).rev() == nullrev):
6442 6442 raise error.Abort(_("cannot tag null revision"))
6443 6443
6444 6444 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6445 6445 editor=editor)
6446 6446 finally:
6447 6447 release(lock, wlock)
6448 6448
6449 6449 @command('tags', formatteropts, '')
6450 6450 def tags(ui, repo, **opts):
6451 6451 """list repository tags
6452 6452
6453 6453 This lists both regular and local tags. When the -v/--verbose
6454 6454 switch is used, a third column "local" is printed for local tags.
6455 6455
6456 6456 Returns 0 on success.
6457 6457 """
6458 6458
6459 6459 fm = ui.formatter('tags', opts)
6460 6460 hexfunc = fm.hexfunc
6461 6461 tagtype = ""
6462 6462
6463 6463 for t, n in reversed(repo.tagslist()):
6464 6464 hn = hexfunc(n)
6465 6465 label = 'tags.normal'
6466 6466 tagtype = ''
6467 6467 if repo.tagtype(t) == 'local':
6468 6468 label = 'tags.local'
6469 6469 tagtype = 'local'
6470 6470
6471 6471 fm.startitem()
6472 6472 fm.write('tag', '%s', t, label=label)
6473 6473 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6474 6474 fm.condwrite(not ui.quiet, 'rev node', fmt,
6475 6475 repo.changelog.rev(n), hn, label=label)
6476 6476 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6477 6477 tagtype, label=label)
6478 6478 fm.plain('\n')
6479 6479 fm.end()
6480 6480
6481 6481 @command('tip',
6482 6482 [('p', 'patch', None, _('show patch')),
6483 6483 ('g', 'git', None, _('use git extended diff format')),
6484 6484 ] + templateopts,
6485 6485 _('[-p] [-g]'))
6486 6486 def tip(ui, repo, **opts):
6487 6487 """show the tip revision (DEPRECATED)
6488 6488
6489 6489 The tip revision (usually just called the tip) is the changeset
6490 6490 most recently added to the repository (and therefore the most
6491 6491 recently changed head).
6492 6492
6493 6493 If you have just made a commit, that commit will be the tip. If
6494 6494 you have just pulled changes from another repository, the tip of
6495 6495 that repository becomes the current tip. The "tip" tag is special
6496 6496 and cannot be renamed or assigned to a different changeset.
6497 6497
6498 6498 This command is deprecated, please use :hg:`heads` instead.
6499 6499
6500 6500 Returns 0 on success.
6501 6501 """
6502 6502 displayer = cmdutil.show_changeset(ui, repo, opts)
6503 6503 displayer.show(repo['tip'])
6504 6504 displayer.close()
6505 6505
6506 6506 @command('unbundle',
6507 6507 [('u', 'update', None,
6508 6508 _('update to new branch head if changesets were unbundled'))],
6509 6509 _('[-u] FILE...'))
6510 6510 def unbundle(ui, repo, fname1, *fnames, **opts):
6511 6511 """apply one or more changegroup files
6512 6512
6513 6513 Apply one or more compressed changegroup files generated by the
6514 6514 bundle command.
6515 6515
6516 6516 Returns 0 on success, 1 if an update has unresolved files.
6517 6517 """
6518 6518 fnames = (fname1,) + fnames
6519 6519
6520 6520 lock = repo.lock()
6521 6521 try:
6522 6522 for fname in fnames:
6523 6523 f = hg.openpath(ui, fname)
6524 6524 gen = exchange.readbundle(ui, f, fname)
6525 6525 if isinstance(gen, bundle2.unbundle20):
6526 6526 tr = repo.transaction('unbundle')
6527 6527 try:
6528 6528 op = bundle2.processbundle(repo, gen, lambda: tr)
6529 6529 tr.close()
6530 6530 except error.BundleUnknownFeatureError as exc:
6531 6531 raise error.Abort(_('%s: unknown bundle feature, %s')
6532 6532 % (fname, exc),
6533 6533 hint=_("see https://mercurial-scm.org/"
6534 6534 "wiki/BundleFeature for more "
6535 6535 "information"))
6536 6536 finally:
6537 6537 if tr:
6538 6538 tr.release()
6539 6539 changes = [r.get('return', 0)
6540 6540 for r in op.records['changegroup']]
6541 6541 modheads = changegroup.combineresults(changes)
6542 6542 elif isinstance(gen, streamclone.streamcloneapplier):
6543 6543 raise error.Abort(
6544 6544 _('packed bundles cannot be applied with '
6545 6545 '"hg unbundle"'),
6546 6546 hint=_('use "hg debugapplystreamclonebundle"'))
6547 6547 else:
6548 6548 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
6549 6549 finally:
6550 6550 lock.release()
6551 6551
6552 6552 return postincoming(ui, repo, modheads, opts.get('update'), None)
6553 6553
6554 6554 @command('^update|up|checkout|co',
6555 6555 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6556 6556 ('c', 'check', None,
6557 6557 _('update across branches if no uncommitted changes')),
6558 6558 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6559 6559 ('r', 'rev', '', _('revision'), _('REV'))
6560 6560 ] + mergetoolopts,
6561 6561 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6562 6562 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6563 6563 tool=None):
6564 6564 """update working directory (or switch revisions)
6565 6565
6566 6566 Update the repository's working directory to the specified
6567 6567 changeset. If no changeset is specified, update to the tip of the
6568 6568 current named branch and move the active bookmark (see :hg:`help
6569 6569 bookmarks`).
6570 6570
6571 6571 Update sets the working directory's parent revision to the specified
6572 6572 changeset (see :hg:`help parents`).
6573 6573
6574 6574 If the changeset is not a descendant or ancestor of the working
6575 6575 directory's parent, the update is aborted. With the -c/--check
6576 6576 option, the working directory is checked for uncommitted changes; if
6577 6577 none are found, the working directory is updated to the specified
6578 6578 changeset.
6579 6579
6580 6580 .. container:: verbose
6581 6581
6582 6582 The following rules apply when the working directory contains
6583 6583 uncommitted changes:
6584 6584
6585 6585 1. If neither -c/--check nor -C/--clean is specified, and if
6586 6586 the requested changeset is an ancestor or descendant of
6587 6587 the working directory's parent, the uncommitted changes
6588 6588 are merged into the requested changeset and the merged
6589 6589 result is left uncommitted. If the requested changeset is
6590 6590 not an ancestor or descendant (that is, it is on another
6591 6591 branch), the update is aborted and the uncommitted changes
6592 6592 are preserved.
6593 6593
6594 6594 2. With the -c/--check option, the update is aborted and the
6595 6595 uncommitted changes are preserved.
6596 6596
6597 6597 3. With the -C/--clean option, uncommitted changes are discarded and
6598 6598 the working directory is updated to the requested changeset.
6599 6599
6600 6600 To cancel an uncommitted merge (and lose your changes), use
6601 6601 :hg:`update --clean .`.
6602 6602
6603 6603 Use null as the changeset to remove the working directory (like
6604 6604 :hg:`clone -U`).
6605 6605
6606 6606 If you want to revert just one file to an older revision, use
6607 6607 :hg:`revert [-r REV] NAME`.
6608 6608
6609 6609 See :hg:`help dates` for a list of formats valid for -d/--date.
6610 6610
6611 6611 Returns 0 on success, 1 if there are unresolved files.
6612 6612 """
6613 6613 movemarkfrom = None
6614 6614 if rev and node:
6615 6615 raise error.Abort(_("please specify just one revision"))
6616 6616
6617 6617 if rev is None or rev == '':
6618 6618 rev = node
6619 6619
6620 6620 wlock = repo.wlock()
6621 6621 try:
6622 6622 cmdutil.clearunfinished(repo)
6623 6623
6624 6624 if date:
6625 6625 if rev is not None:
6626 6626 raise error.Abort(_("you can't specify a revision and a date"))
6627 6627 rev = cmdutil.finddate(ui, repo, date)
6628 6628
6629 6629 # if we defined a bookmark, we have to remember the original name
6630 6630 brev = rev
6631 6631 rev = scmutil.revsingle(repo, rev, rev).rev()
6632 6632
6633 6633 if check and clean:
6634 6634 raise error.Abort(_("cannot specify both -c/--check and -C/--clean")
6635 6635 )
6636 6636
6637 6637 if check:
6638 6638 cmdutil.bailifchanged(repo, merge=False)
6639 6639 if rev is None:
6640 6640 updata = destutil.destupdate(repo, clean=clean, check=check)
6641 6641 rev, movemarkfrom, brev = updata
6642 6642
6643 6643 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6644 6644
6645 6645 if clean:
6646 6646 ret = hg.clean(repo, rev)
6647 6647 else:
6648 6648 ret = hg.update(repo, rev)
6649 6649
6650 6650 if not ret and movemarkfrom:
6651 6651 if movemarkfrom == repo['.'].node():
6652 6652 pass # no-op update
6653 6653 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6654 6654 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
6655 6655 else:
6656 6656 # this can happen with a non-linear update
6657 6657 ui.status(_("(leaving bookmark %s)\n") %
6658 6658 repo._activebookmark)
6659 6659 bookmarks.deactivate(repo)
6660 6660 elif brev in repo._bookmarks:
6661 6661 bookmarks.activate(repo, brev)
6662 6662 ui.status(_("(activating bookmark %s)\n") % brev)
6663 6663 elif brev:
6664 6664 if repo._activebookmark:
6665 6665 ui.status(_("(leaving bookmark %s)\n") %
6666 6666 repo._activebookmark)
6667 6667 bookmarks.deactivate(repo)
6668 6668 finally:
6669 6669 wlock.release()
6670 6670
6671 6671 return ret
6672 6672
6673 6673 @command('verify', [])
6674 6674 def verify(ui, repo):
6675 6675 """verify the integrity of the repository
6676 6676
6677 6677 Verify the integrity of the current repository.
6678 6678
6679 6679 This will perform an extensive check of the repository's
6680 6680 integrity, validating the hashes and checksums of each entry in
6681 6681 the changelog, manifest, and tracked files, as well as the
6682 6682 integrity of their crosslinks and indices.
6683 6683
6684 6684 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
6685 6685 for more information about recovery from corruption of the
6686 6686 repository.
6687 6687
6688 6688 Returns 0 on success, 1 if errors are encountered.
6689 6689 """
6690 6690 return hg.verify(repo)
6691 6691
6692 6692 @command('version', [], norepo=True)
6693 6693 def version_(ui):
6694 6694 """output version and copyright information"""
6695 6695 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6696 6696 % util.version())
6697 6697 ui.status(_(
6698 6698 "(see https://mercurial-scm.org for more information)\n"
6699 6699 "\nCopyright (C) 2005-2015 Matt Mackall and others\n"
6700 6700 "This is free software; see the source for copying conditions. "
6701 6701 "There is NO\nwarranty; "
6702 6702 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6703 6703 ))
6704 6704
6705 6705 ui.note(_("\nEnabled extensions:\n\n"))
6706 6706 if ui.verbose:
6707 6707 # format names and versions into columns
6708 6708 names = []
6709 6709 vers = []
6710 6710 for name, module in extensions.extensions():
6711 6711 names.append(name)
6712 6712 vers.append(extensions.moduleversion(module))
6713 6713 if names:
6714 6714 maxnamelen = max(len(n) for n in names)
6715 6715 for i, name in enumerate(names):
6716 6716 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
@@ -1,1810 +1,1837
1 1 # exchange.py - utility to exchange data between repos.
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex, nullid
10 10 import errno, urllib, urllib2
11 11 import util, scmutil, changegroup, base85, error
12 12 import discovery, phases, obsolete, bookmarks as bookmod, bundle2, pushkey
13 13 import lock as lockmod
14 14 import streamclone
15 15 import sslutil
16 16 import tags
17 17 import url as urlmod
18 18
19 19 # Maps bundle compression human names to internal representation.
20 20 _bundlespeccompressions = {'none': None,
21 21 'bzip2': 'BZ',
22 22 'gzip': 'GZ',
23 23 }
24 24
25 25 # Maps bundle version human names to changegroup versions.
26 26 _bundlespeccgversions = {'v1': '01',
27 27 'v2': '02',
28 28 'packed1': 's1',
29 29 'bundle2': '02', #legacy
30 30 }
31 31
32 32 def parsebundlespec(repo, spec, strict=True, externalnames=False):
33 33 """Parse a bundle string specification into parts.
34 34
35 35 Bundle specifications denote a well-defined bundle/exchange format.
36 36 The content of a given specification should not change over time in
37 37 order to ensure that bundles produced by a newer version of Mercurial are
38 38 readable from an older version.
39 39
40 40 The string currently has the form:
41 41
42 <compression>-<type>
42 <compression>-<type>[;<parameter0>[;<parameter1>]]
43 43
44 44 Where <compression> is one of the supported compression formats
45 and <type> is (currently) a version string.
45 and <type> is (currently) a version string. A ";" can follow the type and
46 all text afterwards is interpretted as URI encoded, ";" delimited key=value
47 pairs.
46 48
47 49 If ``strict`` is True (the default) <compression> is required. Otherwise,
48 50 it is optional.
49 51
50 52 If ``externalnames`` is False (the default), the human-centric names will
51 53 be converted to their internal representation.
52 54
53 Returns a 2-tuple of (compression, version). Compression will be ``None``
54 if not in strict mode and a compression isn't defined.
55 Returns a 3-tuple of (compression, version, parameters). Compression will
56 be ``None`` if not in strict mode and a compression isn't defined.
55 57
56 58 An ``InvalidBundleSpecification`` is raised when the specification is
57 59 not syntactically well formed.
58 60
59 61 An ``UnsupportedBundleSpecification`` is raised when the compression or
60 62 bundle type/version is not recognized.
61 63
62 64 Note: this function will likely eventually return a more complex data
63 65 structure, including bundle2 part information.
64 66 """
67 def parseparams(s):
68 if ';' not in s:
69 return s, {}
70
71 params = {}
72 version, paramstr = s.split(';', 1)
73
74 for p in paramstr.split(';'):
75 if '=' not in p:
76 raise error.InvalidBundleSpecification(
77 _('invalid bundle specification: '
78 'missing "=" in parameter: %s') % p)
79
80 key, value = p.split('=', 1)
81 key = urllib.unquote(key)
82 value = urllib.unquote(value)
83 params[key] = value
84
85 return version, params
86
87
65 88 if strict and '-' not in spec:
66 89 raise error.InvalidBundleSpecification(
67 90 _('invalid bundle specification; '
68 91 'must be prefixed with compression: %s') % spec)
69 92
70 93 if '-' in spec:
71 94 compression, version = spec.split('-', 1)
72 95
73 96 if compression not in _bundlespeccompressions:
74 97 raise error.UnsupportedBundleSpecification(
75 98 _('%s compression is not supported') % compression)
76 99
100 version, params = parseparams(version)
101
77 102 if version not in _bundlespeccgversions:
78 103 raise error.UnsupportedBundleSpecification(
79 104 _('%s is not a recognized bundle version') % version)
80 105 else:
81 106 # Value could be just the compression or just the version, in which
82 107 # case some defaults are assumed (but only when not in strict mode).
83 108 assert not strict
84 109
110 spec, params = parseparams(spec)
111
85 112 if spec in _bundlespeccompressions:
86 113 compression = spec
87 114 version = 'v1'
88 115 if 'generaldelta' in repo.requirements:
89 116 version = 'v2'
90 117 elif spec in _bundlespeccgversions:
91 118 if spec == 'packed1':
92 119 compression = 'none'
93 120 else:
94 121 compression = 'bzip2'
95 122 version = spec
96 123 else:
97 124 raise error.UnsupportedBundleSpecification(
98 125 _('%s is not a recognized bundle specification') % spec)
99 126
100 127 if not externalnames:
101 128 compression = _bundlespeccompressions[compression]
102 129 version = _bundlespeccgversions[version]
103 return compression, version
130 return compression, version, params
104 131
105 132 def readbundle(ui, fh, fname, vfs=None):
106 133 header = changegroup.readexactly(fh, 4)
107 134
108 135 alg = None
109 136 if not fname:
110 137 fname = "stream"
111 138 if not header.startswith('HG') and header.startswith('\0'):
112 139 fh = changegroup.headerlessfixup(fh, header)
113 140 header = "HG10"
114 141 alg = 'UN'
115 142 elif vfs:
116 143 fname = vfs.join(fname)
117 144
118 145 magic, version = header[0:2], header[2:4]
119 146
120 147 if magic != 'HG':
121 148 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
122 149 if version == '10':
123 150 if alg is None:
124 151 alg = changegroup.readexactly(fh, 2)
125 152 return changegroup.cg1unpacker(fh, alg)
126 153 elif version.startswith('2'):
127 154 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
128 155 elif version == 'S1':
129 156 return streamclone.streamcloneapplier(fh)
130 157 else:
131 158 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
132 159
133 160 def buildobsmarkerspart(bundler, markers):
134 161 """add an obsmarker part to the bundler with <markers>
135 162
136 163 No part is created if markers is empty.
137 164 Raises ValueError if the bundler doesn't support any known obsmarker format.
138 165 """
139 166 if markers:
140 167 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
141 168 version = obsolete.commonversion(remoteversions)
142 169 if version is None:
143 170 raise ValueError('bundler do not support common obsmarker format')
144 171 stream = obsolete.encodemarkers(markers, True, version=version)
145 172 return bundler.newpart('obsmarkers', data=stream)
146 173 return None
147 174
148 175 def _canusebundle2(op):
149 176 """return true if a pull/push can use bundle2
150 177
151 178 Feel free to nuke this function when we drop the experimental option"""
152 179 return (op.repo.ui.configbool('experimental', 'bundle2-exp', True)
153 180 and op.remote.capable('bundle2'))
154 181
155 182
156 183 class pushoperation(object):
157 184 """A object that represent a single push operation
158 185
159 186 It purpose is to carry push related state and very common operation.
160 187
161 188 A new should be created at the beginning of each push and discarded
162 189 afterward.
163 190 """
164 191
165 192 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
166 193 bookmarks=()):
167 194 # repo we push from
168 195 self.repo = repo
169 196 self.ui = repo.ui
170 197 # repo we push to
171 198 self.remote = remote
172 199 # force option provided
173 200 self.force = force
174 201 # revs to be pushed (None is "all")
175 202 self.revs = revs
176 203 # bookmark explicitly pushed
177 204 self.bookmarks = bookmarks
178 205 # allow push of new branch
179 206 self.newbranch = newbranch
180 207 # did a local lock get acquired?
181 208 self.locallocked = None
182 209 # step already performed
183 210 # (used to check what steps have been already performed through bundle2)
184 211 self.stepsdone = set()
185 212 # Integer version of the changegroup push result
186 213 # - None means nothing to push
187 214 # - 0 means HTTP error
188 215 # - 1 means we pushed and remote head count is unchanged *or*
189 216 # we have outgoing changesets but refused to push
190 217 # - other values as described by addchangegroup()
191 218 self.cgresult = None
192 219 # Boolean value for the bookmark push
193 220 self.bkresult = None
194 221 # discover.outgoing object (contains common and outgoing data)
195 222 self.outgoing = None
196 223 # all remote heads before the push
197 224 self.remoteheads = None
198 225 # testable as a boolean indicating if any nodes are missing locally.
199 226 self.incoming = None
200 227 # phases changes that must be pushed along side the changesets
201 228 self.outdatedphases = None
202 229 # phases changes that must be pushed if changeset push fails
203 230 self.fallbackoutdatedphases = None
204 231 # outgoing obsmarkers
205 232 self.outobsmarkers = set()
206 233 # outgoing bookmarks
207 234 self.outbookmarks = []
208 235 # transaction manager
209 236 self.trmanager = None
210 237 # map { pushkey partid -> callback handling failure}
211 238 # used to handle exception from mandatory pushkey part failure
212 239 self.pkfailcb = {}
213 240
214 241 @util.propertycache
215 242 def futureheads(self):
216 243 """future remote heads if the changeset push succeeds"""
217 244 return self.outgoing.missingheads
218 245
219 246 @util.propertycache
220 247 def fallbackheads(self):
221 248 """future remote heads if the changeset push fails"""
222 249 if self.revs is None:
223 250 # not target to push, all common are relevant
224 251 return self.outgoing.commonheads
225 252 unfi = self.repo.unfiltered()
226 253 # I want cheads = heads(::missingheads and ::commonheads)
227 254 # (missingheads is revs with secret changeset filtered out)
228 255 #
229 256 # This can be expressed as:
230 257 # cheads = ( (missingheads and ::commonheads)
231 258 # + (commonheads and ::missingheads))"
232 259 # )
233 260 #
234 261 # while trying to push we already computed the following:
235 262 # common = (::commonheads)
236 263 # missing = ((commonheads::missingheads) - commonheads)
237 264 #
238 265 # We can pick:
239 266 # * missingheads part of common (::commonheads)
240 267 common = self.outgoing.common
241 268 nm = self.repo.changelog.nodemap
242 269 cheads = [node for node in self.revs if nm[node] in common]
243 270 # and
244 271 # * commonheads parents on missing
245 272 revset = unfi.set('%ln and parents(roots(%ln))',
246 273 self.outgoing.commonheads,
247 274 self.outgoing.missing)
248 275 cheads.extend(c.node() for c in revset)
249 276 return cheads
250 277
251 278 @property
252 279 def commonheads(self):
253 280 """set of all common heads after changeset bundle push"""
254 281 if self.cgresult:
255 282 return self.futureheads
256 283 else:
257 284 return self.fallbackheads
258 285
259 286 # mapping of message used when pushing bookmark
260 287 bookmsgmap = {'update': (_("updating bookmark %s\n"),
261 288 _('updating bookmark %s failed!\n')),
262 289 'export': (_("exporting bookmark %s\n"),
263 290 _('exporting bookmark %s failed!\n')),
264 291 'delete': (_("deleting remote bookmark %s\n"),
265 292 _('deleting remote bookmark %s failed!\n')),
266 293 }
267 294
268 295
269 296 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
270 297 opargs=None):
271 298 '''Push outgoing changesets (limited by revs) from a local
272 299 repository to remote. Return an integer:
273 300 - None means nothing to push
274 301 - 0 means HTTP error
275 302 - 1 means we pushed and remote head count is unchanged *or*
276 303 we have outgoing changesets but refused to push
277 304 - other values as described by addchangegroup()
278 305 '''
279 306 if opargs is None:
280 307 opargs = {}
281 308 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
282 309 **opargs)
283 310 if pushop.remote.local():
284 311 missing = (set(pushop.repo.requirements)
285 312 - pushop.remote.local().supported)
286 313 if missing:
287 314 msg = _("required features are not"
288 315 " supported in the destination:"
289 316 " %s") % (', '.join(sorted(missing)))
290 317 raise error.Abort(msg)
291 318
292 319 # there are two ways to push to remote repo:
293 320 #
294 321 # addchangegroup assumes local user can lock remote
295 322 # repo (local filesystem, old ssh servers).
296 323 #
297 324 # unbundle assumes local user cannot lock remote repo (new ssh
298 325 # servers, http servers).
299 326
300 327 if not pushop.remote.canpush():
301 328 raise error.Abort(_("destination does not support push"))
302 329 # get local lock as we might write phase data
303 330 localwlock = locallock = None
304 331 try:
305 332 # bundle2 push may receive a reply bundle touching bookmarks or other
306 333 # things requiring the wlock. Take it now to ensure proper ordering.
307 334 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
308 335 if _canusebundle2(pushop) and maypushback:
309 336 localwlock = pushop.repo.wlock()
310 337 locallock = pushop.repo.lock()
311 338 pushop.locallocked = True
312 339 except IOError as err:
313 340 pushop.locallocked = False
314 341 if err.errno != errno.EACCES:
315 342 raise
316 343 # source repo cannot be locked.
317 344 # We do not abort the push, but just disable the local phase
318 345 # synchronisation.
319 346 msg = 'cannot lock source repository: %s\n' % err
320 347 pushop.ui.debug(msg)
321 348 try:
322 349 if pushop.locallocked:
323 350 pushop.trmanager = transactionmanager(pushop.repo,
324 351 'push-response',
325 352 pushop.remote.url())
326 353 pushop.repo.checkpush(pushop)
327 354 lock = None
328 355 unbundle = pushop.remote.capable('unbundle')
329 356 if not unbundle:
330 357 lock = pushop.remote.lock()
331 358 try:
332 359 _pushdiscovery(pushop)
333 360 if _canusebundle2(pushop):
334 361 _pushbundle2(pushop)
335 362 _pushchangeset(pushop)
336 363 _pushsyncphase(pushop)
337 364 _pushobsolete(pushop)
338 365 _pushbookmark(pushop)
339 366 finally:
340 367 if lock is not None:
341 368 lock.release()
342 369 if pushop.trmanager:
343 370 pushop.trmanager.close()
344 371 finally:
345 372 if pushop.trmanager:
346 373 pushop.trmanager.release()
347 374 if locallock is not None:
348 375 locallock.release()
349 376 if localwlock is not None:
350 377 localwlock.release()
351 378
352 379 return pushop
353 380
354 381 # list of steps to perform discovery before push
355 382 pushdiscoveryorder = []
356 383
357 384 # Mapping between step name and function
358 385 #
359 386 # This exists to help extensions wrap steps if necessary
360 387 pushdiscoverymapping = {}
361 388
362 389 def pushdiscovery(stepname):
363 390 """decorator for function performing discovery before push
364 391
365 392 The function is added to the step -> function mapping and appended to the
366 393 list of steps. Beware that decorated function will be added in order (this
367 394 may matter).
368 395
369 396 You can only use this decorator for a new step, if you want to wrap a step
370 397 from an extension, change the pushdiscovery dictionary directly."""
371 398 def dec(func):
372 399 assert stepname not in pushdiscoverymapping
373 400 pushdiscoverymapping[stepname] = func
374 401 pushdiscoveryorder.append(stepname)
375 402 return func
376 403 return dec
377 404
378 405 def _pushdiscovery(pushop):
379 406 """Run all discovery steps"""
380 407 for stepname in pushdiscoveryorder:
381 408 step = pushdiscoverymapping[stepname]
382 409 step(pushop)
383 410
384 411 @pushdiscovery('changeset')
385 412 def _pushdiscoverychangeset(pushop):
386 413 """discover the changeset that need to be pushed"""
387 414 fci = discovery.findcommonincoming
388 415 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
389 416 common, inc, remoteheads = commoninc
390 417 fco = discovery.findcommonoutgoing
391 418 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
392 419 commoninc=commoninc, force=pushop.force)
393 420 pushop.outgoing = outgoing
394 421 pushop.remoteheads = remoteheads
395 422 pushop.incoming = inc
396 423
397 424 @pushdiscovery('phase')
398 425 def _pushdiscoveryphase(pushop):
399 426 """discover the phase that needs to be pushed
400 427
401 428 (computed for both success and failure case for changesets push)"""
402 429 outgoing = pushop.outgoing
403 430 unfi = pushop.repo.unfiltered()
404 431 remotephases = pushop.remote.listkeys('phases')
405 432 publishing = remotephases.get('publishing', False)
406 433 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
407 434 and remotephases # server supports phases
408 435 and not pushop.outgoing.missing # no changesets to be pushed
409 436 and publishing):
410 437 # When:
411 438 # - this is a subrepo push
412 439 # - and remote support phase
413 440 # - and no changeset are to be pushed
414 441 # - and remote is publishing
415 442 # We may be in issue 3871 case!
416 443 # We drop the possible phase synchronisation done by
417 444 # courtesy to publish changesets possibly locally draft
418 445 # on the remote.
419 446 remotephases = {'publishing': 'True'}
420 447 ana = phases.analyzeremotephases(pushop.repo,
421 448 pushop.fallbackheads,
422 449 remotephases)
423 450 pheads, droots = ana
424 451 extracond = ''
425 452 if not publishing:
426 453 extracond = ' and public()'
427 454 revset = 'heads((%%ln::%%ln) %s)' % extracond
428 455 # Get the list of all revs draft on remote by public here.
429 456 # XXX Beware that revset break if droots is not strictly
430 457 # XXX root we may want to ensure it is but it is costly
431 458 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
432 459 if not outgoing.missing:
433 460 future = fallback
434 461 else:
435 462 # adds changeset we are going to push as draft
436 463 #
437 464 # should not be necessary for publishing server, but because of an
438 465 # issue fixed in xxxxx we have to do it anyway.
439 466 fdroots = list(unfi.set('roots(%ln + %ln::)',
440 467 outgoing.missing, droots))
441 468 fdroots = [f.node() for f in fdroots]
442 469 future = list(unfi.set(revset, fdroots, pushop.futureheads))
443 470 pushop.outdatedphases = future
444 471 pushop.fallbackoutdatedphases = fallback
445 472
446 473 @pushdiscovery('obsmarker')
447 474 def _pushdiscoveryobsmarkers(pushop):
448 475 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
449 476 and pushop.repo.obsstore
450 477 and 'obsolete' in pushop.remote.listkeys('namespaces')):
451 478 repo = pushop.repo
452 479 # very naive computation, that can be quite expensive on big repo.
453 480 # However: evolution is currently slow on them anyway.
454 481 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
455 482 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
456 483
457 484 @pushdiscovery('bookmarks')
458 485 def _pushdiscoverybookmarks(pushop):
459 486 ui = pushop.ui
460 487 repo = pushop.repo.unfiltered()
461 488 remote = pushop.remote
462 489 ui.debug("checking for updated bookmarks\n")
463 490 ancestors = ()
464 491 if pushop.revs:
465 492 revnums = map(repo.changelog.rev, pushop.revs)
466 493 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
467 494 remotebookmark = remote.listkeys('bookmarks')
468 495
469 496 explicit = set(pushop.bookmarks)
470 497
471 498 comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
472 499 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
473 500 for b, scid, dcid in advsrc:
474 501 if b in explicit:
475 502 explicit.remove(b)
476 503 if not ancestors or repo[scid].rev() in ancestors:
477 504 pushop.outbookmarks.append((b, dcid, scid))
478 505 # search added bookmark
479 506 for b, scid, dcid in addsrc:
480 507 if b in explicit:
481 508 explicit.remove(b)
482 509 pushop.outbookmarks.append((b, '', scid))
483 510 # search for overwritten bookmark
484 511 for b, scid, dcid in advdst + diverge + differ:
485 512 if b in explicit:
486 513 explicit.remove(b)
487 514 pushop.outbookmarks.append((b, dcid, scid))
488 515 # search for bookmark to delete
489 516 for b, scid, dcid in adddst:
490 517 if b in explicit:
491 518 explicit.remove(b)
492 519 # treat as "deleted locally"
493 520 pushop.outbookmarks.append((b, dcid, ''))
494 521 # identical bookmarks shouldn't get reported
495 522 for b, scid, dcid in same:
496 523 if b in explicit:
497 524 explicit.remove(b)
498 525
499 526 if explicit:
500 527 explicit = sorted(explicit)
501 528 # we should probably list all of them
502 529 ui.warn(_('bookmark %s does not exist on the local '
503 530 'or remote repository!\n') % explicit[0])
504 531 pushop.bkresult = 2
505 532
506 533 pushop.outbookmarks.sort()
507 534
508 535 def _pushcheckoutgoing(pushop):
509 536 outgoing = pushop.outgoing
510 537 unfi = pushop.repo.unfiltered()
511 538 if not outgoing.missing:
512 539 # nothing to push
513 540 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
514 541 return False
515 542 # something to push
516 543 if not pushop.force:
517 544 # if repo.obsstore == False --> no obsolete
518 545 # then, save the iteration
519 546 if unfi.obsstore:
520 547 # this message are here for 80 char limit reason
521 548 mso = _("push includes obsolete changeset: %s!")
522 549 mst = {"unstable": _("push includes unstable changeset: %s!"),
523 550 "bumped": _("push includes bumped changeset: %s!"),
524 551 "divergent": _("push includes divergent changeset: %s!")}
525 552 # If we are to push if there is at least one
526 553 # obsolete or unstable changeset in missing, at
527 554 # least one of the missinghead will be obsolete or
528 555 # unstable. So checking heads only is ok
529 556 for node in outgoing.missingheads:
530 557 ctx = unfi[node]
531 558 if ctx.obsolete():
532 559 raise error.Abort(mso % ctx)
533 560 elif ctx.troubled():
534 561 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
535 562
536 563 # internal config: bookmarks.pushing
537 564 newbm = pushop.ui.configlist('bookmarks', 'pushing')
538 565 discovery.checkheads(unfi, pushop.remote, outgoing,
539 566 pushop.remoteheads,
540 567 pushop.newbranch,
541 568 bool(pushop.incoming),
542 569 newbm)
543 570 return True
544 571
545 572 # List of names of steps to perform for an outgoing bundle2, order matters.
546 573 b2partsgenorder = []
547 574
548 575 # Mapping between step name and function
549 576 #
550 577 # This exists to help extensions wrap steps if necessary
551 578 b2partsgenmapping = {}
552 579
553 580 def b2partsgenerator(stepname, idx=None):
554 581 """decorator for function generating bundle2 part
555 582
556 583 The function is added to the step -> function mapping and appended to the
557 584 list of steps. Beware that decorated functions will be added in order
558 585 (this may matter).
559 586
560 587 You can only use this decorator for new steps, if you want to wrap a step
561 588 from an extension, attack the b2partsgenmapping dictionary directly."""
562 589 def dec(func):
563 590 assert stepname not in b2partsgenmapping
564 591 b2partsgenmapping[stepname] = func
565 592 if idx is None:
566 593 b2partsgenorder.append(stepname)
567 594 else:
568 595 b2partsgenorder.insert(idx, stepname)
569 596 return func
570 597 return dec
571 598
572 599 def _pushb2ctxcheckheads(pushop, bundler):
573 600 """Generate race condition checking parts
574 601
575 602 Exists as an indepedent function to aid extensions
576 603 """
577 604 if not pushop.force:
578 605 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
579 606
580 607 @b2partsgenerator('changeset')
581 608 def _pushb2ctx(pushop, bundler):
582 609 """handle changegroup push through bundle2
583 610
584 611 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
585 612 """
586 613 if 'changesets' in pushop.stepsdone:
587 614 return
588 615 pushop.stepsdone.add('changesets')
589 616 # Send known heads to the server for race detection.
590 617 if not _pushcheckoutgoing(pushop):
591 618 return
592 619 pushop.repo.prepushoutgoinghooks(pushop.repo,
593 620 pushop.remote,
594 621 pushop.outgoing)
595 622
596 623 _pushb2ctxcheckheads(pushop, bundler)
597 624
598 625 b2caps = bundle2.bundle2caps(pushop.remote)
599 626 version = None
600 627 cgversions = b2caps.get('changegroup')
601 628 if not cgversions: # 3.1 and 3.2 ship with an empty value
602 629 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
603 630 pushop.outgoing)
604 631 else:
605 632 cgversions = [v for v in cgversions if v in changegroup.packermap]
606 633 if not cgversions:
607 634 raise ValueError(_('no common changegroup version'))
608 635 version = max(cgversions)
609 636 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
610 637 pushop.outgoing,
611 638 version=version)
612 639 cgpart = bundler.newpart('changegroup', data=cg)
613 640 if version is not None:
614 641 cgpart.addparam('version', version)
615 642 def handlereply(op):
616 643 """extract addchangegroup returns from server reply"""
617 644 cgreplies = op.records.getreplies(cgpart.id)
618 645 assert len(cgreplies['changegroup']) == 1
619 646 pushop.cgresult = cgreplies['changegroup'][0]['return']
620 647 return handlereply
621 648
622 649 @b2partsgenerator('phase')
623 650 def _pushb2phases(pushop, bundler):
624 651 """handle phase push through bundle2"""
625 652 if 'phases' in pushop.stepsdone:
626 653 return
627 654 b2caps = bundle2.bundle2caps(pushop.remote)
628 655 if not 'pushkey' in b2caps:
629 656 return
630 657 pushop.stepsdone.add('phases')
631 658 part2node = []
632 659
633 660 def handlefailure(pushop, exc):
634 661 targetid = int(exc.partid)
635 662 for partid, node in part2node:
636 663 if partid == targetid:
637 664 raise error.Abort(_('updating %s to public failed') % node)
638 665
639 666 enc = pushkey.encode
640 667 for newremotehead in pushop.outdatedphases:
641 668 part = bundler.newpart('pushkey')
642 669 part.addparam('namespace', enc('phases'))
643 670 part.addparam('key', enc(newremotehead.hex()))
644 671 part.addparam('old', enc(str(phases.draft)))
645 672 part.addparam('new', enc(str(phases.public)))
646 673 part2node.append((part.id, newremotehead))
647 674 pushop.pkfailcb[part.id] = handlefailure
648 675
649 676 def handlereply(op):
650 677 for partid, node in part2node:
651 678 partrep = op.records.getreplies(partid)
652 679 results = partrep['pushkey']
653 680 assert len(results) <= 1
654 681 msg = None
655 682 if not results:
656 683 msg = _('server ignored update of %s to public!\n') % node
657 684 elif not int(results[0]['return']):
658 685 msg = _('updating %s to public failed!\n') % node
659 686 if msg is not None:
660 687 pushop.ui.warn(msg)
661 688 return handlereply
662 689
663 690 @b2partsgenerator('obsmarkers')
664 691 def _pushb2obsmarkers(pushop, bundler):
665 692 if 'obsmarkers' in pushop.stepsdone:
666 693 return
667 694 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
668 695 if obsolete.commonversion(remoteversions) is None:
669 696 return
670 697 pushop.stepsdone.add('obsmarkers')
671 698 if pushop.outobsmarkers:
672 699 markers = sorted(pushop.outobsmarkers)
673 700 buildobsmarkerspart(bundler, markers)
674 701
675 702 @b2partsgenerator('bookmarks')
676 703 def _pushb2bookmarks(pushop, bundler):
677 704 """handle bookmark push through bundle2"""
678 705 if 'bookmarks' in pushop.stepsdone:
679 706 return
680 707 b2caps = bundle2.bundle2caps(pushop.remote)
681 708 if 'pushkey' not in b2caps:
682 709 return
683 710 pushop.stepsdone.add('bookmarks')
684 711 part2book = []
685 712 enc = pushkey.encode
686 713
687 714 def handlefailure(pushop, exc):
688 715 targetid = int(exc.partid)
689 716 for partid, book, action in part2book:
690 717 if partid == targetid:
691 718 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
692 719 # we should not be called for part we did not generated
693 720 assert False
694 721
695 722 for book, old, new in pushop.outbookmarks:
696 723 part = bundler.newpart('pushkey')
697 724 part.addparam('namespace', enc('bookmarks'))
698 725 part.addparam('key', enc(book))
699 726 part.addparam('old', enc(old))
700 727 part.addparam('new', enc(new))
701 728 action = 'update'
702 729 if not old:
703 730 action = 'export'
704 731 elif not new:
705 732 action = 'delete'
706 733 part2book.append((part.id, book, action))
707 734 pushop.pkfailcb[part.id] = handlefailure
708 735
709 736 def handlereply(op):
710 737 ui = pushop.ui
711 738 for partid, book, action in part2book:
712 739 partrep = op.records.getreplies(partid)
713 740 results = partrep['pushkey']
714 741 assert len(results) <= 1
715 742 if not results:
716 743 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
717 744 else:
718 745 ret = int(results[0]['return'])
719 746 if ret:
720 747 ui.status(bookmsgmap[action][0] % book)
721 748 else:
722 749 ui.warn(bookmsgmap[action][1] % book)
723 750 if pushop.bkresult is not None:
724 751 pushop.bkresult = 1
725 752 return handlereply
726 753
727 754
728 755 def _pushbundle2(pushop):
729 756 """push data to the remote using bundle2
730 757
731 758 The only currently supported type of data is changegroup but this will
732 759 evolve in the future."""
733 760 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
734 761 pushback = (pushop.trmanager
735 762 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
736 763
737 764 # create reply capability
738 765 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
739 766 allowpushback=pushback))
740 767 bundler.newpart('replycaps', data=capsblob)
741 768 replyhandlers = []
742 769 for partgenname in b2partsgenorder:
743 770 partgen = b2partsgenmapping[partgenname]
744 771 ret = partgen(pushop, bundler)
745 772 if callable(ret):
746 773 replyhandlers.append(ret)
747 774 # do not push if nothing to push
748 775 if bundler.nbparts <= 1:
749 776 return
750 777 stream = util.chunkbuffer(bundler.getchunks())
751 778 try:
752 779 try:
753 780 reply = pushop.remote.unbundle(stream, ['force'], 'push')
754 781 except error.BundleValueError as exc:
755 782 raise error.Abort('missing support for %s' % exc)
756 783 try:
757 784 trgetter = None
758 785 if pushback:
759 786 trgetter = pushop.trmanager.transaction
760 787 op = bundle2.processbundle(pushop.repo, reply, trgetter)
761 788 except error.BundleValueError as exc:
762 789 raise error.Abort('missing support for %s' % exc)
763 790 except error.PushkeyFailed as exc:
764 791 partid = int(exc.partid)
765 792 if partid not in pushop.pkfailcb:
766 793 raise
767 794 pushop.pkfailcb[partid](pushop, exc)
768 795 for rephand in replyhandlers:
769 796 rephand(op)
770 797
771 798 def _pushchangeset(pushop):
772 799 """Make the actual push of changeset bundle to remote repo"""
773 800 if 'changesets' in pushop.stepsdone:
774 801 return
775 802 pushop.stepsdone.add('changesets')
776 803 if not _pushcheckoutgoing(pushop):
777 804 return
778 805 pushop.repo.prepushoutgoinghooks(pushop.repo,
779 806 pushop.remote,
780 807 pushop.outgoing)
781 808 outgoing = pushop.outgoing
782 809 unbundle = pushop.remote.capable('unbundle')
783 810 # TODO: get bundlecaps from remote
784 811 bundlecaps = None
785 812 # create a changegroup from local
786 813 if pushop.revs is None and not (outgoing.excluded
787 814 or pushop.repo.changelog.filteredrevs):
788 815 # push everything,
789 816 # use the fast path, no race possible on push
790 817 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
791 818 cg = changegroup.getsubset(pushop.repo,
792 819 outgoing,
793 820 bundler,
794 821 'push',
795 822 fastpath=True)
796 823 else:
797 824 cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing,
798 825 bundlecaps)
799 826
800 827 # apply changegroup to remote
801 828 if unbundle:
802 829 # local repo finds heads on server, finds out what
803 830 # revs it must push. once revs transferred, if server
804 831 # finds it has different heads (someone else won
805 832 # commit/push race), server aborts.
806 833 if pushop.force:
807 834 remoteheads = ['force']
808 835 else:
809 836 remoteheads = pushop.remoteheads
810 837 # ssh: return remote's addchangegroup()
811 838 # http: return remote's addchangegroup() or 0 for error
812 839 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
813 840 pushop.repo.url())
814 841 else:
815 842 # we return an integer indicating remote head count
816 843 # change
817 844 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
818 845 pushop.repo.url())
819 846
820 847 def _pushsyncphase(pushop):
821 848 """synchronise phase information locally and remotely"""
822 849 cheads = pushop.commonheads
823 850 # even when we don't push, exchanging phase data is useful
824 851 remotephases = pushop.remote.listkeys('phases')
825 852 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
826 853 and remotephases # server supports phases
827 854 and pushop.cgresult is None # nothing was pushed
828 855 and remotephases.get('publishing', False)):
829 856 # When:
830 857 # - this is a subrepo push
831 858 # - and remote support phase
832 859 # - and no changeset was pushed
833 860 # - and remote is publishing
834 861 # We may be in issue 3871 case!
835 862 # We drop the possible phase synchronisation done by
836 863 # courtesy to publish changesets possibly locally draft
837 864 # on the remote.
838 865 remotephases = {'publishing': 'True'}
839 866 if not remotephases: # old server or public only reply from non-publishing
840 867 _localphasemove(pushop, cheads)
841 868 # don't push any phase data as there is nothing to push
842 869 else:
843 870 ana = phases.analyzeremotephases(pushop.repo, cheads,
844 871 remotephases)
845 872 pheads, droots = ana
846 873 ### Apply remote phase on local
847 874 if remotephases.get('publishing', False):
848 875 _localphasemove(pushop, cheads)
849 876 else: # publish = False
850 877 _localphasemove(pushop, pheads)
851 878 _localphasemove(pushop, cheads, phases.draft)
852 879 ### Apply local phase on remote
853 880
854 881 if pushop.cgresult:
855 882 if 'phases' in pushop.stepsdone:
856 883 # phases already pushed though bundle2
857 884 return
858 885 outdated = pushop.outdatedphases
859 886 else:
860 887 outdated = pushop.fallbackoutdatedphases
861 888
862 889 pushop.stepsdone.add('phases')
863 890
864 891 # filter heads already turned public by the push
865 892 outdated = [c for c in outdated if c.node() not in pheads]
866 893 # fallback to independent pushkey command
867 894 for newremotehead in outdated:
868 895 r = pushop.remote.pushkey('phases',
869 896 newremotehead.hex(),
870 897 str(phases.draft),
871 898 str(phases.public))
872 899 if not r:
873 900 pushop.ui.warn(_('updating %s to public failed!\n')
874 901 % newremotehead)
875 902
876 903 def _localphasemove(pushop, nodes, phase=phases.public):
877 904 """move <nodes> to <phase> in the local source repo"""
878 905 if pushop.trmanager:
879 906 phases.advanceboundary(pushop.repo,
880 907 pushop.trmanager.transaction(),
881 908 phase,
882 909 nodes)
883 910 else:
884 911 # repo is not locked, do not change any phases!
885 912 # Informs the user that phases should have been moved when
886 913 # applicable.
887 914 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
888 915 phasestr = phases.phasenames[phase]
889 916 if actualmoves:
890 917 pushop.ui.status(_('cannot lock source repo, skipping '
891 918 'local %s phase update\n') % phasestr)
892 919
893 920 def _pushobsolete(pushop):
894 921 """utility function to push obsolete markers to a remote"""
895 922 if 'obsmarkers' in pushop.stepsdone:
896 923 return
897 924 repo = pushop.repo
898 925 remote = pushop.remote
899 926 pushop.stepsdone.add('obsmarkers')
900 927 if pushop.outobsmarkers:
901 928 pushop.ui.debug('try to push obsolete markers to remote\n')
902 929 rslts = []
903 930 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
904 931 for key in sorted(remotedata, reverse=True):
905 932 # reverse sort to ensure we end with dump0
906 933 data = remotedata[key]
907 934 rslts.append(remote.pushkey('obsolete', key, '', data))
908 935 if [r for r in rslts if not r]:
909 936 msg = _('failed to push some obsolete markers!\n')
910 937 repo.ui.warn(msg)
911 938
912 939 def _pushbookmark(pushop):
913 940 """Update bookmark position on remote"""
914 941 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
915 942 return
916 943 pushop.stepsdone.add('bookmarks')
917 944 ui = pushop.ui
918 945 remote = pushop.remote
919 946
920 947 for b, old, new in pushop.outbookmarks:
921 948 action = 'update'
922 949 if not old:
923 950 action = 'export'
924 951 elif not new:
925 952 action = 'delete'
926 953 if remote.pushkey('bookmarks', b, old, new):
927 954 ui.status(bookmsgmap[action][0] % b)
928 955 else:
929 956 ui.warn(bookmsgmap[action][1] % b)
930 957 # discovery can have set the value form invalid entry
931 958 if pushop.bkresult is not None:
932 959 pushop.bkresult = 1
933 960
934 961 class pulloperation(object):
935 962 """A object that represent a single pull operation
936 963
937 964 It purpose is to carry pull related state and very common operation.
938 965
939 966 A new should be created at the beginning of each pull and discarded
940 967 afterward.
941 968 """
942 969
943 970 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
944 971 remotebookmarks=None, streamclonerequested=None):
945 972 # repo we pull into
946 973 self.repo = repo
947 974 # repo we pull from
948 975 self.remote = remote
949 976 # revision we try to pull (None is "all")
950 977 self.heads = heads
951 978 # bookmark pulled explicitly
952 979 self.explicitbookmarks = bookmarks
953 980 # do we force pull?
954 981 self.force = force
955 982 # whether a streaming clone was requested
956 983 self.streamclonerequested = streamclonerequested
957 984 # transaction manager
958 985 self.trmanager = None
959 986 # set of common changeset between local and remote before pull
960 987 self.common = None
961 988 # set of pulled head
962 989 self.rheads = None
963 990 # list of missing changeset to fetch remotely
964 991 self.fetch = None
965 992 # remote bookmarks data
966 993 self.remotebookmarks = remotebookmarks
967 994 # result of changegroup pulling (used as return code by pull)
968 995 self.cgresult = None
969 996 # list of step already done
970 997 self.stepsdone = set()
971 998 # Whether we attempted a clone from pre-generated bundles.
972 999 self.clonebundleattempted = False
973 1000
974 1001 @util.propertycache
975 1002 def pulledsubset(self):
976 1003 """heads of the set of changeset target by the pull"""
977 1004 # compute target subset
978 1005 if self.heads is None:
979 1006 # We pulled every thing possible
980 1007 # sync on everything common
981 1008 c = set(self.common)
982 1009 ret = list(self.common)
983 1010 for n in self.rheads:
984 1011 if n not in c:
985 1012 ret.append(n)
986 1013 return ret
987 1014 else:
988 1015 # We pulled a specific subset
989 1016 # sync on this subset
990 1017 return self.heads
991 1018
992 1019 @util.propertycache
993 1020 def canusebundle2(self):
994 1021 return _canusebundle2(self)
995 1022
996 1023 @util.propertycache
997 1024 def remotebundle2caps(self):
998 1025 return bundle2.bundle2caps(self.remote)
999 1026
1000 1027 def gettransaction(self):
1001 1028 # deprecated; talk to trmanager directly
1002 1029 return self.trmanager.transaction()
1003 1030
1004 1031 class transactionmanager(object):
1005 1032 """An object to manage the life cycle of a transaction
1006 1033
1007 1034 It creates the transaction on demand and calls the appropriate hooks when
1008 1035 closing the transaction."""
1009 1036 def __init__(self, repo, source, url):
1010 1037 self.repo = repo
1011 1038 self.source = source
1012 1039 self.url = url
1013 1040 self._tr = None
1014 1041
1015 1042 def transaction(self):
1016 1043 """Return an open transaction object, constructing if necessary"""
1017 1044 if not self._tr:
1018 1045 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1019 1046 self._tr = self.repo.transaction(trname)
1020 1047 self._tr.hookargs['source'] = self.source
1021 1048 self._tr.hookargs['url'] = self.url
1022 1049 return self._tr
1023 1050
1024 1051 def close(self):
1025 1052 """close transaction if created"""
1026 1053 if self._tr is not None:
1027 1054 self._tr.close()
1028 1055
1029 1056 def release(self):
1030 1057 """release transaction if created"""
1031 1058 if self._tr is not None:
1032 1059 self._tr.release()
1033 1060
1034 1061 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1035 1062 streamclonerequested=None):
1036 1063 """Fetch repository data from a remote.
1037 1064
1038 1065 This is the main function used to retrieve data from a remote repository.
1039 1066
1040 1067 ``repo`` is the local repository to clone into.
1041 1068 ``remote`` is a peer instance.
1042 1069 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1043 1070 default) means to pull everything from the remote.
1044 1071 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1045 1072 default, all remote bookmarks are pulled.
1046 1073 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1047 1074 initialization.
1048 1075 ``streamclonerequested`` is a boolean indicating whether a "streaming
1049 1076 clone" is requested. A "streaming clone" is essentially a raw file copy
1050 1077 of revlogs from the server. This only works when the local repository is
1051 1078 empty. The default value of ``None`` means to respect the server
1052 1079 configuration for preferring stream clones.
1053 1080
1054 1081 Returns the ``pulloperation`` created for this pull.
1055 1082 """
1056 1083 if opargs is None:
1057 1084 opargs = {}
1058 1085 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1059 1086 streamclonerequested=streamclonerequested, **opargs)
1060 1087 if pullop.remote.local():
1061 1088 missing = set(pullop.remote.requirements) - pullop.repo.supported
1062 1089 if missing:
1063 1090 msg = _("required features are not"
1064 1091 " supported in the destination:"
1065 1092 " %s") % (', '.join(sorted(missing)))
1066 1093 raise error.Abort(msg)
1067 1094
1068 1095 lock = pullop.repo.lock()
1069 1096 try:
1070 1097 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1071 1098 streamclone.maybeperformlegacystreamclone(pullop)
1072 1099 # This should ideally be in _pullbundle2(). However, it needs to run
1073 1100 # before discovery to avoid extra work.
1074 1101 _maybeapplyclonebundle(pullop)
1075 1102 _pulldiscovery(pullop)
1076 1103 if pullop.canusebundle2:
1077 1104 _pullbundle2(pullop)
1078 1105 _pullchangeset(pullop)
1079 1106 _pullphase(pullop)
1080 1107 _pullbookmarks(pullop)
1081 1108 _pullobsolete(pullop)
1082 1109 pullop.trmanager.close()
1083 1110 finally:
1084 1111 pullop.trmanager.release()
1085 1112 lock.release()
1086 1113
1087 1114 return pullop
1088 1115
1089 1116 # list of steps to perform discovery before pull
1090 1117 pulldiscoveryorder = []
1091 1118
1092 1119 # Mapping between step name and function
1093 1120 #
1094 1121 # This exists to help extensions wrap steps if necessary
1095 1122 pulldiscoverymapping = {}
1096 1123
1097 1124 def pulldiscovery(stepname):
1098 1125 """decorator for function performing discovery before pull
1099 1126
1100 1127 The function is added to the step -> function mapping and appended to the
1101 1128 list of steps. Beware that decorated function will be added in order (this
1102 1129 may matter).
1103 1130
1104 1131 You can only use this decorator for a new step, if you want to wrap a step
1105 1132 from an extension, change the pulldiscovery dictionary directly."""
1106 1133 def dec(func):
1107 1134 assert stepname not in pulldiscoverymapping
1108 1135 pulldiscoverymapping[stepname] = func
1109 1136 pulldiscoveryorder.append(stepname)
1110 1137 return func
1111 1138 return dec
1112 1139
1113 1140 def _pulldiscovery(pullop):
1114 1141 """Run all discovery steps"""
1115 1142 for stepname in pulldiscoveryorder:
1116 1143 step = pulldiscoverymapping[stepname]
1117 1144 step(pullop)
1118 1145
1119 1146 @pulldiscovery('b1:bookmarks')
1120 1147 def _pullbookmarkbundle1(pullop):
1121 1148 """fetch bookmark data in bundle1 case
1122 1149
1123 1150 If not using bundle2, we have to fetch bookmarks before changeset
1124 1151 discovery to reduce the chance and impact of race conditions."""
1125 1152 if pullop.remotebookmarks is not None:
1126 1153 return
1127 1154 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1128 1155 # all known bundle2 servers now support listkeys, but lets be nice with
1129 1156 # new implementation.
1130 1157 return
1131 1158 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1132 1159
1133 1160
1134 1161 @pulldiscovery('changegroup')
1135 1162 def _pulldiscoverychangegroup(pullop):
1136 1163 """discovery phase for the pull
1137 1164
1138 1165 Current handle changeset discovery only, will change handle all discovery
1139 1166 at some point."""
1140 1167 tmp = discovery.findcommonincoming(pullop.repo,
1141 1168 pullop.remote,
1142 1169 heads=pullop.heads,
1143 1170 force=pullop.force)
1144 1171 common, fetch, rheads = tmp
1145 1172 nm = pullop.repo.unfiltered().changelog.nodemap
1146 1173 if fetch and rheads:
1147 1174 # If a remote heads in filtered locally, lets drop it from the unknown
1148 1175 # remote heads and put in back in common.
1149 1176 #
1150 1177 # This is a hackish solution to catch most of "common but locally
1151 1178 # hidden situation". We do not performs discovery on unfiltered
1152 1179 # repository because it end up doing a pathological amount of round
1153 1180 # trip for w huge amount of changeset we do not care about.
1154 1181 #
1155 1182 # If a set of such "common but filtered" changeset exist on the server
1156 1183 # but are not including a remote heads, we'll not be able to detect it,
1157 1184 scommon = set(common)
1158 1185 filteredrheads = []
1159 1186 for n in rheads:
1160 1187 if n in nm:
1161 1188 if n not in scommon:
1162 1189 common.append(n)
1163 1190 else:
1164 1191 filteredrheads.append(n)
1165 1192 if not filteredrheads:
1166 1193 fetch = []
1167 1194 rheads = filteredrheads
1168 1195 pullop.common = common
1169 1196 pullop.fetch = fetch
1170 1197 pullop.rheads = rheads
1171 1198
1172 1199 def _pullbundle2(pullop):
1173 1200 """pull data using bundle2
1174 1201
1175 1202 For now, the only supported data are changegroup."""
1176 1203 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1177 1204
1178 1205 streaming, streamreqs = streamclone.canperformstreamclone(pullop)
1179 1206
1180 1207 # pulling changegroup
1181 1208 pullop.stepsdone.add('changegroup')
1182 1209
1183 1210 kwargs['common'] = pullop.common
1184 1211 kwargs['heads'] = pullop.heads or pullop.rheads
1185 1212 kwargs['cg'] = pullop.fetch
1186 1213 if 'listkeys' in pullop.remotebundle2caps:
1187 1214 kwargs['listkeys'] = ['phase']
1188 1215 if pullop.remotebookmarks is None:
1189 1216 # make sure to always includes bookmark data when migrating
1190 1217 # `hg incoming --bundle` to using this function.
1191 1218 kwargs['listkeys'].append('bookmarks')
1192 1219
1193 1220 # If this is a full pull / clone and the server supports the clone bundles
1194 1221 # feature, tell the server whether we attempted a clone bundle. The
1195 1222 # presence of this flag indicates the client supports clone bundles. This
1196 1223 # will enable the server to treat clients that support clone bundles
1197 1224 # differently from those that don't.
1198 1225 if (pullop.remote.capable('clonebundles')
1199 1226 and pullop.heads is None and list(pullop.common) == [nullid]):
1200 1227 kwargs['cbattempted'] = pullop.clonebundleattempted
1201 1228
1202 1229 if streaming:
1203 1230 pullop.repo.ui.status(_('streaming all changes\n'))
1204 1231 elif not pullop.fetch:
1205 1232 pullop.repo.ui.status(_("no changes found\n"))
1206 1233 pullop.cgresult = 0
1207 1234 else:
1208 1235 if pullop.heads is None and list(pullop.common) == [nullid]:
1209 1236 pullop.repo.ui.status(_("requesting all changes\n"))
1210 1237 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1211 1238 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1212 1239 if obsolete.commonversion(remoteversions) is not None:
1213 1240 kwargs['obsmarkers'] = True
1214 1241 pullop.stepsdone.add('obsmarkers')
1215 1242 _pullbundle2extraprepare(pullop, kwargs)
1216 1243 bundle = pullop.remote.getbundle('pull', **kwargs)
1217 1244 try:
1218 1245 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1219 1246 except error.BundleValueError as exc:
1220 1247 raise error.Abort('missing support for %s' % exc)
1221 1248
1222 1249 if pullop.fetch:
1223 1250 results = [cg['return'] for cg in op.records['changegroup']]
1224 1251 pullop.cgresult = changegroup.combineresults(results)
1225 1252
1226 1253 # processing phases change
1227 1254 for namespace, value in op.records['listkeys']:
1228 1255 if namespace == 'phases':
1229 1256 _pullapplyphases(pullop, value)
1230 1257
1231 1258 # processing bookmark update
1232 1259 for namespace, value in op.records['listkeys']:
1233 1260 if namespace == 'bookmarks':
1234 1261 pullop.remotebookmarks = value
1235 1262
1236 1263 # bookmark data were either already there or pulled in the bundle
1237 1264 if pullop.remotebookmarks is not None:
1238 1265 _pullbookmarks(pullop)
1239 1266
1240 1267 def _pullbundle2extraprepare(pullop, kwargs):
1241 1268 """hook function so that extensions can extend the getbundle call"""
1242 1269 pass
1243 1270
1244 1271 def _pullchangeset(pullop):
1245 1272 """pull changeset from unbundle into the local repo"""
1246 1273 # We delay the open of the transaction as late as possible so we
1247 1274 # don't open transaction for nothing or you break future useful
1248 1275 # rollback call
1249 1276 if 'changegroup' in pullop.stepsdone:
1250 1277 return
1251 1278 pullop.stepsdone.add('changegroup')
1252 1279 if not pullop.fetch:
1253 1280 pullop.repo.ui.status(_("no changes found\n"))
1254 1281 pullop.cgresult = 0
1255 1282 return
1256 1283 pullop.gettransaction()
1257 1284 if pullop.heads is None and list(pullop.common) == [nullid]:
1258 1285 pullop.repo.ui.status(_("requesting all changes\n"))
1259 1286 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1260 1287 # issue1320, avoid a race if remote changed after discovery
1261 1288 pullop.heads = pullop.rheads
1262 1289
1263 1290 if pullop.remote.capable('getbundle'):
1264 1291 # TODO: get bundlecaps from remote
1265 1292 cg = pullop.remote.getbundle('pull', common=pullop.common,
1266 1293 heads=pullop.heads or pullop.rheads)
1267 1294 elif pullop.heads is None:
1268 1295 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1269 1296 elif not pullop.remote.capable('changegroupsubset'):
1270 1297 raise error.Abort(_("partial pull cannot be done because "
1271 1298 "other repository doesn't support "
1272 1299 "changegroupsubset."))
1273 1300 else:
1274 1301 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1275 1302 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1276 1303
1277 1304 def _pullphase(pullop):
1278 1305 # Get remote phases data from remote
1279 1306 if 'phases' in pullop.stepsdone:
1280 1307 return
1281 1308 remotephases = pullop.remote.listkeys('phases')
1282 1309 _pullapplyphases(pullop, remotephases)
1283 1310
1284 1311 def _pullapplyphases(pullop, remotephases):
1285 1312 """apply phase movement from observed remote state"""
1286 1313 if 'phases' in pullop.stepsdone:
1287 1314 return
1288 1315 pullop.stepsdone.add('phases')
1289 1316 publishing = bool(remotephases.get('publishing', False))
1290 1317 if remotephases and not publishing:
1291 1318 # remote is new and unpublishing
1292 1319 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1293 1320 pullop.pulledsubset,
1294 1321 remotephases)
1295 1322 dheads = pullop.pulledsubset
1296 1323 else:
1297 1324 # Remote is old or publishing all common changesets
1298 1325 # should be seen as public
1299 1326 pheads = pullop.pulledsubset
1300 1327 dheads = []
1301 1328 unfi = pullop.repo.unfiltered()
1302 1329 phase = unfi._phasecache.phase
1303 1330 rev = unfi.changelog.nodemap.get
1304 1331 public = phases.public
1305 1332 draft = phases.draft
1306 1333
1307 1334 # exclude changesets already public locally and update the others
1308 1335 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1309 1336 if pheads:
1310 1337 tr = pullop.gettransaction()
1311 1338 phases.advanceboundary(pullop.repo, tr, public, pheads)
1312 1339
1313 1340 # exclude changesets already draft locally and update the others
1314 1341 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1315 1342 if dheads:
1316 1343 tr = pullop.gettransaction()
1317 1344 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1318 1345
1319 1346 def _pullbookmarks(pullop):
1320 1347 """process the remote bookmark information to update the local one"""
1321 1348 if 'bookmarks' in pullop.stepsdone:
1322 1349 return
1323 1350 pullop.stepsdone.add('bookmarks')
1324 1351 repo = pullop.repo
1325 1352 remotebookmarks = pullop.remotebookmarks
1326 1353 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1327 1354 pullop.remote.url(),
1328 1355 pullop.gettransaction,
1329 1356 explicit=pullop.explicitbookmarks)
1330 1357
1331 1358 def _pullobsolete(pullop):
1332 1359 """utility function to pull obsolete markers from a remote
1333 1360
1334 1361 The `gettransaction` is function that return the pull transaction, creating
1335 1362 one if necessary. We return the transaction to inform the calling code that
1336 1363 a new transaction have been created (when applicable).
1337 1364
1338 1365 Exists mostly to allow overriding for experimentation purpose"""
1339 1366 if 'obsmarkers' in pullop.stepsdone:
1340 1367 return
1341 1368 pullop.stepsdone.add('obsmarkers')
1342 1369 tr = None
1343 1370 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1344 1371 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1345 1372 remoteobs = pullop.remote.listkeys('obsolete')
1346 1373 if 'dump0' in remoteobs:
1347 1374 tr = pullop.gettransaction()
1348 1375 for key in sorted(remoteobs, reverse=True):
1349 1376 if key.startswith('dump'):
1350 1377 data = base85.b85decode(remoteobs[key])
1351 1378 pullop.repo.obsstore.mergemarkers(tr, data)
1352 1379 pullop.repo.invalidatevolatilesets()
1353 1380 return tr
1354 1381
1355 1382 def caps20to10(repo):
1356 1383 """return a set with appropriate options to use bundle20 during getbundle"""
1357 1384 caps = set(['HG20'])
1358 1385 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1359 1386 caps.add('bundle2=' + urllib.quote(capsblob))
1360 1387 return caps
1361 1388
1362 1389 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1363 1390 getbundle2partsorder = []
1364 1391
1365 1392 # Mapping between step name and function
1366 1393 #
1367 1394 # This exists to help extensions wrap steps if necessary
1368 1395 getbundle2partsmapping = {}
1369 1396
1370 1397 def getbundle2partsgenerator(stepname, idx=None):
1371 1398 """decorator for function generating bundle2 part for getbundle
1372 1399
1373 1400 The function is added to the step -> function mapping and appended to the
1374 1401 list of steps. Beware that decorated functions will be added in order
1375 1402 (this may matter).
1376 1403
1377 1404 You can only use this decorator for new steps, if you want to wrap a step
1378 1405 from an extension, attack the getbundle2partsmapping dictionary directly."""
1379 1406 def dec(func):
1380 1407 assert stepname not in getbundle2partsmapping
1381 1408 getbundle2partsmapping[stepname] = func
1382 1409 if idx is None:
1383 1410 getbundle2partsorder.append(stepname)
1384 1411 else:
1385 1412 getbundle2partsorder.insert(idx, stepname)
1386 1413 return func
1387 1414 return dec
1388 1415
1389 1416 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
1390 1417 **kwargs):
1391 1418 """return a full bundle (with potentially multiple kind of parts)
1392 1419
1393 1420 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1394 1421 passed. For now, the bundle can contain only changegroup, but this will
1395 1422 changes when more part type will be available for bundle2.
1396 1423
1397 1424 This is different from changegroup.getchangegroup that only returns an HG10
1398 1425 changegroup bundle. They may eventually get reunited in the future when we
1399 1426 have a clearer idea of the API we what to query different data.
1400 1427
1401 1428 The implementation is at a very early stage and will get massive rework
1402 1429 when the API of bundle is refined.
1403 1430 """
1404 1431 # bundle10 case
1405 1432 usebundle2 = False
1406 1433 if bundlecaps is not None:
1407 1434 usebundle2 = any((cap.startswith('HG2') for cap in bundlecaps))
1408 1435 if not usebundle2:
1409 1436 if bundlecaps and not kwargs.get('cg', True):
1410 1437 raise ValueError(_('request for bundle10 must include changegroup'))
1411 1438
1412 1439 if kwargs:
1413 1440 raise ValueError(_('unsupported getbundle arguments: %s')
1414 1441 % ', '.join(sorted(kwargs.keys())))
1415 1442 return changegroup.getchangegroup(repo, source, heads=heads,
1416 1443 common=common, bundlecaps=bundlecaps)
1417 1444
1418 1445 # bundle20 case
1419 1446 b2caps = {}
1420 1447 for bcaps in bundlecaps:
1421 1448 if bcaps.startswith('bundle2='):
1422 1449 blob = urllib.unquote(bcaps[len('bundle2='):])
1423 1450 b2caps.update(bundle2.decodecaps(blob))
1424 1451 bundler = bundle2.bundle20(repo.ui, b2caps)
1425 1452
1426 1453 kwargs['heads'] = heads
1427 1454 kwargs['common'] = common
1428 1455
1429 1456 for name in getbundle2partsorder:
1430 1457 func = getbundle2partsmapping[name]
1431 1458 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1432 1459 **kwargs)
1433 1460
1434 1461 return util.chunkbuffer(bundler.getchunks())
1435 1462
1436 1463 @getbundle2partsgenerator('changegroup')
1437 1464 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1438 1465 b2caps=None, heads=None, common=None, **kwargs):
1439 1466 """add a changegroup part to the requested bundle"""
1440 1467 cg = None
1441 1468 if kwargs.get('cg', True):
1442 1469 # build changegroup bundle here.
1443 1470 version = None
1444 1471 cgversions = b2caps.get('changegroup')
1445 1472 getcgkwargs = {}
1446 1473 if cgversions: # 3.1 and 3.2 ship with an empty value
1447 1474 cgversions = [v for v in cgversions if v in changegroup.packermap]
1448 1475 if not cgversions:
1449 1476 raise ValueError(_('no common changegroup version'))
1450 1477 version = getcgkwargs['version'] = max(cgversions)
1451 1478 outgoing = changegroup.computeoutgoing(repo, heads, common)
1452 1479 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1453 1480 bundlecaps=bundlecaps,
1454 1481 **getcgkwargs)
1455 1482
1456 1483 if cg:
1457 1484 part = bundler.newpart('changegroup', data=cg)
1458 1485 if version is not None:
1459 1486 part.addparam('version', version)
1460 1487 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1461 1488
1462 1489 @getbundle2partsgenerator('listkeys')
1463 1490 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1464 1491 b2caps=None, **kwargs):
1465 1492 """add parts containing listkeys namespaces to the requested bundle"""
1466 1493 listkeys = kwargs.get('listkeys', ())
1467 1494 for namespace in listkeys:
1468 1495 part = bundler.newpart('listkeys')
1469 1496 part.addparam('namespace', namespace)
1470 1497 keys = repo.listkeys(namespace).items()
1471 1498 part.data = pushkey.encodekeys(keys)
1472 1499
1473 1500 @getbundle2partsgenerator('obsmarkers')
1474 1501 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1475 1502 b2caps=None, heads=None, **kwargs):
1476 1503 """add an obsolescence markers part to the requested bundle"""
1477 1504 if kwargs.get('obsmarkers', False):
1478 1505 if heads is None:
1479 1506 heads = repo.heads()
1480 1507 subset = [c.node() for c in repo.set('::%ln', heads)]
1481 1508 markers = repo.obsstore.relevantmarkers(subset)
1482 1509 markers = sorted(markers)
1483 1510 buildobsmarkerspart(bundler, markers)
1484 1511
1485 1512 @getbundle2partsgenerator('hgtagsfnodes')
1486 1513 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1487 1514 b2caps=None, heads=None, common=None,
1488 1515 **kwargs):
1489 1516 """Transfer the .hgtags filenodes mapping.
1490 1517
1491 1518 Only values for heads in this bundle will be transferred.
1492 1519
1493 1520 The part data consists of pairs of 20 byte changeset node and .hgtags
1494 1521 filenodes raw values.
1495 1522 """
1496 1523 # Don't send unless:
1497 1524 # - changeset are being exchanged,
1498 1525 # - the client supports it.
1499 1526 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1500 1527 return
1501 1528
1502 1529 outgoing = changegroup.computeoutgoing(repo, heads, common)
1503 1530
1504 1531 if not outgoing.missingheads:
1505 1532 return
1506 1533
1507 1534 cache = tags.hgtagsfnodescache(repo.unfiltered())
1508 1535 chunks = []
1509 1536
1510 1537 # .hgtags fnodes are only relevant for head changesets. While we could
1511 1538 # transfer values for all known nodes, there will likely be little to
1512 1539 # no benefit.
1513 1540 #
1514 1541 # We don't bother using a generator to produce output data because
1515 1542 # a) we only have 40 bytes per head and even esoteric numbers of heads
1516 1543 # consume little memory (1M heads is 40MB) b) we don't want to send the
1517 1544 # part if we don't have entries and knowing if we have entries requires
1518 1545 # cache lookups.
1519 1546 for node in outgoing.missingheads:
1520 1547 # Don't compute missing, as this may slow down serving.
1521 1548 fnode = cache.getfnode(node, computemissing=False)
1522 1549 if fnode is not None:
1523 1550 chunks.extend([node, fnode])
1524 1551
1525 1552 if chunks:
1526 1553 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1527 1554
1528 1555 def check_heads(repo, their_heads, context):
1529 1556 """check if the heads of a repo have been modified
1530 1557
1531 1558 Used by peer for unbundling.
1532 1559 """
1533 1560 heads = repo.heads()
1534 1561 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1535 1562 if not (their_heads == ['force'] or their_heads == heads or
1536 1563 their_heads == ['hashed', heads_hash]):
1537 1564 # someone else committed/pushed/unbundled while we
1538 1565 # were transferring data
1539 1566 raise error.PushRaced('repository changed while %s - '
1540 1567 'please try again' % context)
1541 1568
1542 1569 def unbundle(repo, cg, heads, source, url):
1543 1570 """Apply a bundle to a repo.
1544 1571
1545 1572 this function makes sure the repo is locked during the application and have
1546 1573 mechanism to check that no push race occurred between the creation of the
1547 1574 bundle and its application.
1548 1575
1549 1576 If the push was raced as PushRaced exception is raised."""
1550 1577 r = 0
1551 1578 # need a transaction when processing a bundle2 stream
1552 1579 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1553 1580 lockandtr = [None, None, None]
1554 1581 recordout = None
1555 1582 # quick fix for output mismatch with bundle2 in 3.4
1556 1583 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1557 1584 False)
1558 1585 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1559 1586 captureoutput = True
1560 1587 try:
1561 1588 check_heads(repo, heads, 'uploading changes')
1562 1589 # push can proceed
1563 1590 if util.safehasattr(cg, 'params'):
1564 1591 r = None
1565 1592 try:
1566 1593 def gettransaction():
1567 1594 if not lockandtr[2]:
1568 1595 lockandtr[0] = repo.wlock()
1569 1596 lockandtr[1] = repo.lock()
1570 1597 lockandtr[2] = repo.transaction(source)
1571 1598 lockandtr[2].hookargs['source'] = source
1572 1599 lockandtr[2].hookargs['url'] = url
1573 1600 lockandtr[2].hookargs['bundle2'] = '1'
1574 1601 return lockandtr[2]
1575 1602
1576 1603 # Do greedy locking by default until we're satisfied with lazy
1577 1604 # locking.
1578 1605 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1579 1606 gettransaction()
1580 1607
1581 1608 op = bundle2.bundleoperation(repo, gettransaction,
1582 1609 captureoutput=captureoutput)
1583 1610 try:
1584 1611 op = bundle2.processbundle(repo, cg, op=op)
1585 1612 finally:
1586 1613 r = op.reply
1587 1614 if captureoutput and r is not None:
1588 1615 repo.ui.pushbuffer(error=True, subproc=True)
1589 1616 def recordout(output):
1590 1617 r.newpart('output', data=output, mandatory=False)
1591 1618 if lockandtr[2] is not None:
1592 1619 lockandtr[2].close()
1593 1620 except BaseException as exc:
1594 1621 exc.duringunbundle2 = True
1595 1622 if captureoutput and r is not None:
1596 1623 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1597 1624 def recordout(output):
1598 1625 part = bundle2.bundlepart('output', data=output,
1599 1626 mandatory=False)
1600 1627 parts.append(part)
1601 1628 raise
1602 1629 else:
1603 1630 lockandtr[1] = repo.lock()
1604 1631 r = cg.apply(repo, source, url)
1605 1632 finally:
1606 1633 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1607 1634 if recordout is not None:
1608 1635 recordout(repo.ui.popbuffer())
1609 1636 return r
1610 1637
1611 1638 def _maybeapplyclonebundle(pullop):
1612 1639 """Apply a clone bundle from a remote, if possible."""
1613 1640
1614 1641 repo = pullop.repo
1615 1642 remote = pullop.remote
1616 1643
1617 1644 if not repo.ui.configbool('experimental', 'clonebundles', False):
1618 1645 return
1619 1646
1620 1647 if pullop.heads:
1621 1648 return
1622 1649
1623 1650 if not remote.capable('clonebundles'):
1624 1651 return
1625 1652
1626 1653 res = remote._call('clonebundles')
1627 1654
1628 1655 # If we call the wire protocol command, that's good enough to record the
1629 1656 # attempt.
1630 1657 pullop.clonebundleattempted = True
1631 1658
1632 1659 entries = parseclonebundlesmanifest(repo, res)
1633 1660 if not entries:
1634 1661 repo.ui.note(_('no clone bundles available on remote; '
1635 1662 'falling back to regular clone\n'))
1636 1663 return
1637 1664
1638 1665 entries = filterclonebundleentries(repo, entries)
1639 1666 if not entries:
1640 1667 # There is a thundering herd concern here. However, if a server
1641 1668 # operator doesn't advertise bundles appropriate for its clients,
1642 1669 # they deserve what's coming. Furthermore, from a client's
1643 1670 # perspective, no automatic fallback would mean not being able to
1644 1671 # clone!
1645 1672 repo.ui.warn(_('no compatible clone bundles available on server; '
1646 1673 'falling back to regular clone\n'))
1647 1674 repo.ui.warn(_('(you may want to report this to the server '
1648 1675 'operator)\n'))
1649 1676 return
1650 1677
1651 1678 entries = sortclonebundleentries(repo.ui, entries)
1652 1679
1653 1680 url = entries[0]['URL']
1654 1681 repo.ui.status(_('applying clone bundle from %s\n') % url)
1655 1682 if trypullbundlefromurl(repo.ui, repo, url):
1656 1683 repo.ui.status(_('finished applying clone bundle\n'))
1657 1684 # Bundle failed.
1658 1685 #
1659 1686 # We abort by default to avoid the thundering herd of
1660 1687 # clients flooding a server that was expecting expensive
1661 1688 # clone load to be offloaded.
1662 1689 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1663 1690 repo.ui.warn(_('falling back to normal clone\n'))
1664 1691 else:
1665 1692 raise error.Abort(_('error applying bundle'),
1666 1693 hint=_('if this error persists, consider contacting '
1667 1694 'the server operator or disable clone '
1668 1695 'bundles via '
1669 1696 '"--config experimental.clonebundles=false"'))
1670 1697
1671 1698 def parseclonebundlesmanifest(repo, s):
1672 1699 """Parses the raw text of a clone bundles manifest.
1673 1700
1674 1701 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1675 1702 to the URL and other keys are the attributes for the entry.
1676 1703 """
1677 1704 m = []
1678 1705 for line in s.splitlines():
1679 1706 fields = line.split()
1680 1707 if not fields:
1681 1708 continue
1682 1709 attrs = {'URL': fields[0]}
1683 1710 for rawattr in fields[1:]:
1684 1711 key, value = rawattr.split('=', 1)
1685 1712 key = urllib.unquote(key)
1686 1713 value = urllib.unquote(value)
1687 1714 attrs[key] = value
1688 1715
1689 1716 # Parse BUNDLESPEC into components. This makes client-side
1690 1717 # preferences easier to specify since you can prefer a single
1691 1718 # component of the BUNDLESPEC.
1692 1719 if key == 'BUNDLESPEC':
1693 1720 try:
1694 comp, version = parsebundlespec(repo, value,
1695 externalnames=True)
1721 comp, version, params = parsebundlespec(repo, value,
1722 externalnames=True)
1696 1723 attrs['COMPRESSION'] = comp
1697 1724 attrs['VERSION'] = version
1698 1725 except error.InvalidBundleSpecification:
1699 1726 pass
1700 1727 except error.UnsupportedBundleSpecification:
1701 1728 pass
1702 1729
1703 1730 m.append(attrs)
1704 1731
1705 1732 return m
1706 1733
1707 1734 def filterclonebundleentries(repo, entries):
1708 1735 """Remove incompatible clone bundle manifest entries.
1709 1736
1710 1737 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1711 1738 and returns a new list consisting of only the entries that this client
1712 1739 should be able to apply.
1713 1740
1714 1741 There is no guarantee we'll be able to apply all returned entries because
1715 1742 the metadata we use to filter on may be missing or wrong.
1716 1743 """
1717 1744 newentries = []
1718 1745 for entry in entries:
1719 1746 spec = entry.get('BUNDLESPEC')
1720 1747 if spec:
1721 1748 try:
1722 1749 parsebundlespec(repo, spec, strict=True)
1723 1750 except error.InvalidBundleSpecification as e:
1724 1751 repo.ui.debug(str(e) + '\n')
1725 1752 continue
1726 1753 except error.UnsupportedBundleSpecification as e:
1727 1754 repo.ui.debug('filtering %s because unsupported bundle '
1728 1755 'spec: %s\n' % (entry['URL'], str(e)))
1729 1756 continue
1730 1757
1731 1758 if 'REQUIRESNI' in entry and not sslutil.hassni:
1732 1759 repo.ui.debug('filtering %s because SNI not supported\n' %
1733 1760 entry['URL'])
1734 1761 continue
1735 1762
1736 1763 newentries.append(entry)
1737 1764
1738 1765 return newentries
1739 1766
1740 1767 def sortclonebundleentries(ui, entries):
1741 1768 # experimental config: experimental.clonebundleprefers
1742 1769 prefers = ui.configlist('experimental', 'clonebundleprefers', default=[])
1743 1770 if not prefers:
1744 1771 return list(entries)
1745 1772
1746 1773 prefers = [p.split('=', 1) for p in prefers]
1747 1774
1748 1775 # Our sort function.
1749 1776 def compareentry(a, b):
1750 1777 for prefkey, prefvalue in prefers:
1751 1778 avalue = a.get(prefkey)
1752 1779 bvalue = b.get(prefkey)
1753 1780
1754 1781 # Special case for b missing attribute and a matches exactly.
1755 1782 if avalue is not None and bvalue is None and avalue == prefvalue:
1756 1783 return -1
1757 1784
1758 1785 # Special case for a missing attribute and b matches exactly.
1759 1786 if bvalue is not None and avalue is None and bvalue == prefvalue:
1760 1787 return 1
1761 1788
1762 1789 # We can't compare unless attribute present on both.
1763 1790 if avalue is None or bvalue is None:
1764 1791 continue
1765 1792
1766 1793 # Same values should fall back to next attribute.
1767 1794 if avalue == bvalue:
1768 1795 continue
1769 1796
1770 1797 # Exact matches come first.
1771 1798 if avalue == prefvalue:
1772 1799 return -1
1773 1800 if bvalue == prefvalue:
1774 1801 return 1
1775 1802
1776 1803 # Fall back to next attribute.
1777 1804 continue
1778 1805
1779 1806 # If we got here we couldn't sort by attributes and prefers. Fall
1780 1807 # back to index order.
1781 1808 return 0
1782 1809
1783 1810 return sorted(entries, cmp=compareentry)
1784 1811
1785 1812 def trypullbundlefromurl(ui, repo, url):
1786 1813 """Attempt to apply a bundle from a URL."""
1787 1814 lock = repo.lock()
1788 1815 try:
1789 1816 tr = repo.transaction('bundleurl')
1790 1817 try:
1791 1818 try:
1792 1819 fh = urlmod.open(ui, url)
1793 1820 cg = readbundle(ui, fh, 'stream')
1794 1821
1795 1822 if isinstance(cg, bundle2.unbundle20):
1796 1823 bundle2.processbundle(repo, cg, lambda: tr)
1797 1824 else:
1798 1825 cg.apply(repo, 'clonebundles', url)
1799 1826 tr.close()
1800 1827 return True
1801 1828 except urllib2.HTTPError as e:
1802 1829 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1803 1830 except urllib2.URLError as e:
1804 1831 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1805 1832
1806 1833 return False
1807 1834 finally:
1808 1835 tr.release()
1809 1836 finally:
1810 1837 lock.release()
General Comments 0
You need to be logged in to leave comments. Login now