##// END OF EJS Templates
addchangegroup: remove the lock argument on the addchangegroup methods...
Pierre-Yves David -
r15585:a348739d default
parent child Browse files
Show More
@@ -1,5702 +1,5701 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, difflib, time, tempfile, errno
12 12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 13 import patch, help, url, encoding, templatekw, discovery
14 14 import archival, changegroup, cmdutil, hbisect
15 15 import sshserver, hgweb, hgweb.server, commandserver
16 16 import match as matchmod
17 17 import merge as mergemod
18 18 import minirst, revset, fileset
19 19 import dagparser, context, simplemerge
20 20 import random, setdiscovery, treediscovery, dagutil
21 21
22 22 table = {}
23 23
24 24 command = cmdutil.command(table)
25 25
26 26 # common command options
27 27
28 28 globalopts = [
29 29 ('R', 'repository', '',
30 30 _('repository root directory or name of overlay bundle file'),
31 31 _('REPO')),
32 32 ('', 'cwd', '',
33 33 _('change working directory'), _('DIR')),
34 34 ('y', 'noninteractive', None,
35 35 _('do not prompt, automatically pick the first choice for all prompts')),
36 36 ('q', 'quiet', None, _('suppress output')),
37 37 ('v', 'verbose', None, _('enable additional output')),
38 38 ('', 'config', [],
39 39 _('set/override config option (use \'section.name=value\')'),
40 40 _('CONFIG')),
41 41 ('', 'debug', None, _('enable debugging output')),
42 42 ('', 'debugger', None, _('start debugger')),
43 43 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
44 44 _('ENCODE')),
45 45 ('', 'encodingmode', encoding.encodingmode,
46 46 _('set the charset encoding mode'), _('MODE')),
47 47 ('', 'traceback', None, _('always print a traceback on exception')),
48 48 ('', 'time', None, _('time how long the command takes')),
49 49 ('', 'profile', None, _('print command execution profile')),
50 50 ('', 'version', None, _('output version information and exit')),
51 51 ('h', 'help', None, _('display help and exit')),
52 52 ]
53 53
54 54 dryrunopts = [('n', 'dry-run', None,
55 55 _('do not perform actions, just print output'))]
56 56
57 57 remoteopts = [
58 58 ('e', 'ssh', '',
59 59 _('specify ssh command to use'), _('CMD')),
60 60 ('', 'remotecmd', '',
61 61 _('specify hg command to run on the remote side'), _('CMD')),
62 62 ('', 'insecure', None,
63 63 _('do not verify server certificate (ignoring web.cacerts config)')),
64 64 ]
65 65
66 66 walkopts = [
67 67 ('I', 'include', [],
68 68 _('include names matching the given patterns'), _('PATTERN')),
69 69 ('X', 'exclude', [],
70 70 _('exclude names matching the given patterns'), _('PATTERN')),
71 71 ]
72 72
73 73 commitopts = [
74 74 ('m', 'message', '',
75 75 _('use text as commit message'), _('TEXT')),
76 76 ('l', 'logfile', '',
77 77 _('read commit message from file'), _('FILE')),
78 78 ]
79 79
80 80 commitopts2 = [
81 81 ('d', 'date', '',
82 82 _('record the specified date as commit date'), _('DATE')),
83 83 ('u', 'user', '',
84 84 _('record the specified user as committer'), _('USER')),
85 85 ]
86 86
87 87 templateopts = [
88 88 ('', 'style', '',
89 89 _('display using template map file'), _('STYLE')),
90 90 ('', 'template', '',
91 91 _('display with template'), _('TEMPLATE')),
92 92 ]
93 93
94 94 logopts = [
95 95 ('p', 'patch', None, _('show patch')),
96 96 ('g', 'git', None, _('use git extended diff format')),
97 97 ('l', 'limit', '',
98 98 _('limit number of changes displayed'), _('NUM')),
99 99 ('M', 'no-merges', None, _('do not show merges')),
100 100 ('', 'stat', None, _('output diffstat-style summary of changes')),
101 101 ] + templateopts
102 102
103 103 diffopts = [
104 104 ('a', 'text', None, _('treat all files as text')),
105 105 ('g', 'git', None, _('use git extended diff format')),
106 106 ('', 'nodates', None, _('omit dates from diff headers'))
107 107 ]
108 108
109 109 diffwsopts = [
110 110 ('w', 'ignore-all-space', None,
111 111 _('ignore white space when comparing lines')),
112 112 ('b', 'ignore-space-change', None,
113 113 _('ignore changes in the amount of white space')),
114 114 ('B', 'ignore-blank-lines', None,
115 115 _('ignore changes whose lines are all blank')),
116 116 ]
117 117
118 118 diffopts2 = [
119 119 ('p', 'show-function', None, _('show which function each change is in')),
120 120 ('', 'reverse', None, _('produce a diff that undoes the changes')),
121 121 ] + diffwsopts + [
122 122 ('U', 'unified', '',
123 123 _('number of lines of context to show'), _('NUM')),
124 124 ('', 'stat', None, _('output diffstat-style summary of changes')),
125 125 ]
126 126
127 127 mergetoolopts = [
128 128 ('t', 'tool', '', _('specify merge tool')),
129 129 ]
130 130
131 131 similarityopts = [
132 132 ('s', 'similarity', '',
133 133 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
134 134 ]
135 135
136 136 subrepoopts = [
137 137 ('S', 'subrepos', None,
138 138 _('recurse into subrepositories'))
139 139 ]
140 140
141 141 # Commands start here, listed alphabetically
142 142
143 143 @command('^add',
144 144 walkopts + subrepoopts + dryrunopts,
145 145 _('[OPTION]... [FILE]...'))
146 146 def add(ui, repo, *pats, **opts):
147 147 """add the specified files on the next commit
148 148
149 149 Schedule files to be version controlled and added to the
150 150 repository.
151 151
152 152 The files will be added to the repository at the next commit. To
153 153 undo an add before that, see :hg:`forget`.
154 154
155 155 If no names are given, add all files to the repository.
156 156
157 157 .. container:: verbose
158 158
159 159 An example showing how new (unknown) files are added
160 160 automatically by :hg:`add`::
161 161
162 162 $ ls
163 163 foo.c
164 164 $ hg status
165 165 ? foo.c
166 166 $ hg add
167 167 adding foo.c
168 168 $ hg status
169 169 A foo.c
170 170
171 171 Returns 0 if all files are successfully added.
172 172 """
173 173
174 174 m = scmutil.match(repo[None], pats, opts)
175 175 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
176 176 opts.get('subrepos'), prefix="")
177 177 return rejected and 1 or 0
178 178
179 179 @command('addremove',
180 180 similarityopts + walkopts + dryrunopts,
181 181 _('[OPTION]... [FILE]...'))
182 182 def addremove(ui, repo, *pats, **opts):
183 183 """add all new files, delete all missing files
184 184
185 185 Add all new files and remove all missing files from the
186 186 repository.
187 187
188 188 New files are ignored if they match any of the patterns in
189 189 ``.hgignore``. As with add, these changes take effect at the next
190 190 commit.
191 191
192 192 Use the -s/--similarity option to detect renamed files. With a
193 193 parameter greater than 0, this compares every removed file with
194 194 every added file and records those similar enough as renames. This
195 195 option takes a percentage between 0 (disabled) and 100 (files must
196 196 be identical) as its parameter. Detecting renamed files this way
197 197 can be expensive. After using this option, :hg:`status -C` can be
198 198 used to check which files were identified as moved or renamed.
199 199
200 200 Returns 0 if all files are successfully added.
201 201 """
202 202 try:
203 203 sim = float(opts.get('similarity') or 100)
204 204 except ValueError:
205 205 raise util.Abort(_('similarity must be a number'))
206 206 if sim < 0 or sim > 100:
207 207 raise util.Abort(_('similarity must be between 0 and 100'))
208 208 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
209 209
210 210 @command('^annotate|blame',
211 211 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
212 212 ('', 'follow', None,
213 213 _('follow copies/renames and list the filename (DEPRECATED)')),
214 214 ('', 'no-follow', None, _("don't follow copies and renames")),
215 215 ('a', 'text', None, _('treat all files as text')),
216 216 ('u', 'user', None, _('list the author (long with -v)')),
217 217 ('f', 'file', None, _('list the filename')),
218 218 ('d', 'date', None, _('list the date (short with -q)')),
219 219 ('n', 'number', None, _('list the revision number (default)')),
220 220 ('c', 'changeset', None, _('list the changeset')),
221 221 ('l', 'line-number', None, _('show line number at the first appearance'))
222 222 ] + diffwsopts + walkopts,
223 223 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
224 224 def annotate(ui, repo, *pats, **opts):
225 225 """show changeset information by line for each file
226 226
227 227 List changes in files, showing the revision id responsible for
228 228 each line
229 229
230 230 This command is useful for discovering when a change was made and
231 231 by whom.
232 232
233 233 Without the -a/--text option, annotate will avoid processing files
234 234 it detects as binary. With -a, annotate will annotate the file
235 235 anyway, although the results will probably be neither useful
236 236 nor desirable.
237 237
238 238 Returns 0 on success.
239 239 """
240 240 if opts.get('follow'):
241 241 # --follow is deprecated and now just an alias for -f/--file
242 242 # to mimic the behavior of Mercurial before version 1.5
243 243 opts['file'] = True
244 244
245 245 datefunc = ui.quiet and util.shortdate or util.datestr
246 246 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
247 247
248 248 if not pats:
249 249 raise util.Abort(_('at least one filename or pattern is required'))
250 250
251 251 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
252 252 ('number', ' ', lambda x: str(x[0].rev())),
253 253 ('changeset', ' ', lambda x: short(x[0].node())),
254 254 ('date', ' ', getdate),
255 255 ('file', ' ', lambda x: x[0].path()),
256 256 ('line_number', ':', lambda x: str(x[1])),
257 257 ]
258 258
259 259 if (not opts.get('user') and not opts.get('changeset')
260 260 and not opts.get('date') and not opts.get('file')):
261 261 opts['number'] = True
262 262
263 263 linenumber = opts.get('line_number') is not None
264 264 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
265 265 raise util.Abort(_('at least one of -n/-c is required for -l'))
266 266
267 267 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
268 268 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
269 269
270 270 def bad(x, y):
271 271 raise util.Abort("%s: %s" % (x, y))
272 272
273 273 ctx = scmutil.revsingle(repo, opts.get('rev'))
274 274 m = scmutil.match(ctx, pats, opts)
275 275 m.bad = bad
276 276 follow = not opts.get('no_follow')
277 277 diffopts = patch.diffopts(ui, opts, section='annotate')
278 278 for abs in ctx.walk(m):
279 279 fctx = ctx[abs]
280 280 if not opts.get('text') and util.binary(fctx.data()):
281 281 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
282 282 continue
283 283
284 284 lines = fctx.annotate(follow=follow, linenumber=linenumber,
285 285 diffopts=diffopts)
286 286 pieces = []
287 287
288 288 for f, sep in funcmap:
289 289 l = [f(n) for n, dummy in lines]
290 290 if l:
291 291 sized = [(x, encoding.colwidth(x)) for x in l]
292 292 ml = max([w for x, w in sized])
293 293 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
294 294 for x, w in sized])
295 295
296 296 if pieces:
297 297 for p, l in zip(zip(*pieces), lines):
298 298 ui.write("%s: %s" % ("".join(p), l[1]))
299 299
300 300 @command('archive',
301 301 [('', 'no-decode', None, _('do not pass files through decoders')),
302 302 ('p', 'prefix', '', _('directory prefix for files in archive'),
303 303 _('PREFIX')),
304 304 ('r', 'rev', '', _('revision to distribute'), _('REV')),
305 305 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
306 306 ] + subrepoopts + walkopts,
307 307 _('[OPTION]... DEST'))
308 308 def archive(ui, repo, dest, **opts):
309 309 '''create an unversioned archive of a repository revision
310 310
311 311 By default, the revision used is the parent of the working
312 312 directory; use -r/--rev to specify a different revision.
313 313
314 314 The archive type is automatically detected based on file
315 315 extension (or override using -t/--type).
316 316
317 317 .. container:: verbose
318 318
319 319 Examples:
320 320
321 321 - create a zip file containing the 1.0 release::
322 322
323 323 hg archive -r 1.0 project-1.0.zip
324 324
325 325 - create a tarball excluding .hg files::
326 326
327 327 hg archive project.tar.gz -X ".hg*"
328 328
329 329 Valid types are:
330 330
331 331 :``files``: a directory full of files (default)
332 332 :``tar``: tar archive, uncompressed
333 333 :``tbz2``: tar archive, compressed using bzip2
334 334 :``tgz``: tar archive, compressed using gzip
335 335 :``uzip``: zip archive, uncompressed
336 336 :``zip``: zip archive, compressed using deflate
337 337
338 338 The exact name of the destination archive or directory is given
339 339 using a format string; see :hg:`help export` for details.
340 340
341 341 Each member added to an archive file has a directory prefix
342 342 prepended. Use -p/--prefix to specify a format string for the
343 343 prefix. The default is the basename of the archive, with suffixes
344 344 removed.
345 345
346 346 Returns 0 on success.
347 347 '''
348 348
349 349 ctx = scmutil.revsingle(repo, opts.get('rev'))
350 350 if not ctx:
351 351 raise util.Abort(_('no working directory: please specify a revision'))
352 352 node = ctx.node()
353 353 dest = cmdutil.makefilename(repo, dest, node)
354 354 if os.path.realpath(dest) == repo.root:
355 355 raise util.Abort(_('repository root cannot be destination'))
356 356
357 357 kind = opts.get('type') or archival.guesskind(dest) or 'files'
358 358 prefix = opts.get('prefix')
359 359
360 360 if dest == '-':
361 361 if kind == 'files':
362 362 raise util.Abort(_('cannot archive plain files to stdout'))
363 363 dest = cmdutil.makefileobj(repo, dest)
364 364 if not prefix:
365 365 prefix = os.path.basename(repo.root) + '-%h'
366 366
367 367 prefix = cmdutil.makefilename(repo, prefix, node)
368 368 matchfn = scmutil.match(ctx, [], opts)
369 369 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
370 370 matchfn, prefix, subrepos=opts.get('subrepos'))
371 371
372 372 @command('backout',
373 373 [('', 'merge', None, _('merge with old dirstate parent after backout')),
374 374 ('', 'parent', '',
375 375 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
376 376 ('r', 'rev', '', _('revision to backout'), _('REV')),
377 377 ] + mergetoolopts + walkopts + commitopts + commitopts2,
378 378 _('[OPTION]... [-r] REV'))
379 379 def backout(ui, repo, node=None, rev=None, **opts):
380 380 '''reverse effect of earlier changeset
381 381
382 382 Prepare a new changeset with the effect of REV undone in the
383 383 current working directory.
384 384
385 385 If REV is the parent of the working directory, then this new changeset
386 386 is committed automatically. Otherwise, hg needs to merge the
387 387 changes and the merged result is left uncommitted.
388 388
389 389 .. note::
390 390 backout cannot be used to fix either an unwanted or
391 391 incorrect merge.
392 392
393 393 .. container:: verbose
394 394
395 395 By default, the pending changeset will have one parent,
396 396 maintaining a linear history. With --merge, the pending
397 397 changeset will instead have two parents: the old parent of the
398 398 working directory and a new child of REV that simply undoes REV.
399 399
400 400 Before version 1.7, the behavior without --merge was equivalent
401 401 to specifying --merge followed by :hg:`update --clean .` to
402 402 cancel the merge and leave the child of REV as a head to be
403 403 merged separately.
404 404
405 405 See :hg:`help dates` for a list of formats valid for -d/--date.
406 406
407 407 Returns 0 on success.
408 408 '''
409 409 if rev and node:
410 410 raise util.Abort(_("please specify just one revision"))
411 411
412 412 if not rev:
413 413 rev = node
414 414
415 415 if not rev:
416 416 raise util.Abort(_("please specify a revision to backout"))
417 417
418 418 date = opts.get('date')
419 419 if date:
420 420 opts['date'] = util.parsedate(date)
421 421
422 422 cmdutil.bailifchanged(repo)
423 423 node = scmutil.revsingle(repo, rev).node()
424 424
425 425 op1, op2 = repo.dirstate.parents()
426 426 a = repo.changelog.ancestor(op1, node)
427 427 if a != node:
428 428 raise util.Abort(_('cannot backout change on a different branch'))
429 429
430 430 p1, p2 = repo.changelog.parents(node)
431 431 if p1 == nullid:
432 432 raise util.Abort(_('cannot backout a change with no parents'))
433 433 if p2 != nullid:
434 434 if not opts.get('parent'):
435 435 raise util.Abort(_('cannot backout a merge changeset'))
436 436 p = repo.lookup(opts['parent'])
437 437 if p not in (p1, p2):
438 438 raise util.Abort(_('%s is not a parent of %s') %
439 439 (short(p), short(node)))
440 440 parent = p
441 441 else:
442 442 if opts.get('parent'):
443 443 raise util.Abort(_('cannot use --parent on non-merge changeset'))
444 444 parent = p1
445 445
446 446 # the backout should appear on the same branch
447 447 branch = repo.dirstate.branch()
448 448 hg.clean(repo, node, show_stats=False)
449 449 repo.dirstate.setbranch(branch)
450 450 revert_opts = opts.copy()
451 451 revert_opts['date'] = None
452 452 revert_opts['all'] = True
453 453 revert_opts['rev'] = hex(parent)
454 454 revert_opts['no_backup'] = None
455 455 revert(ui, repo, **revert_opts)
456 456 if not opts.get('merge') and op1 != node:
457 457 try:
458 458 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
459 459 return hg.update(repo, op1)
460 460 finally:
461 461 ui.setconfig('ui', 'forcemerge', '')
462 462
463 463 commit_opts = opts.copy()
464 464 commit_opts['addremove'] = False
465 465 if not commit_opts['message'] and not commit_opts['logfile']:
466 466 # we don't translate commit messages
467 467 commit_opts['message'] = "Backed out changeset %s" % short(node)
468 468 commit_opts['force_editor'] = True
469 469 commit(ui, repo, **commit_opts)
470 470 def nice(node):
471 471 return '%d:%s' % (repo.changelog.rev(node), short(node))
472 472 ui.status(_('changeset %s backs out changeset %s\n') %
473 473 (nice(repo.changelog.tip()), nice(node)))
474 474 if opts.get('merge') and op1 != node:
475 475 hg.clean(repo, op1, show_stats=False)
476 476 ui.status(_('merging with changeset %s\n')
477 477 % nice(repo.changelog.tip()))
478 478 try:
479 479 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
480 480 return hg.merge(repo, hex(repo.changelog.tip()))
481 481 finally:
482 482 ui.setconfig('ui', 'forcemerge', '')
483 483 return 0
484 484
485 485 @command('bisect',
486 486 [('r', 'reset', False, _('reset bisect state')),
487 487 ('g', 'good', False, _('mark changeset good')),
488 488 ('b', 'bad', False, _('mark changeset bad')),
489 489 ('s', 'skip', False, _('skip testing changeset')),
490 490 ('e', 'extend', False, _('extend the bisect range')),
491 491 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
492 492 ('U', 'noupdate', False, _('do not update to target'))],
493 493 _("[-gbsr] [-U] [-c CMD] [REV]"))
494 494 def bisect(ui, repo, rev=None, extra=None, command=None,
495 495 reset=None, good=None, bad=None, skip=None, extend=None,
496 496 noupdate=None):
497 497 """subdivision search of changesets
498 498
499 499 This command helps to find changesets which introduce problems. To
500 500 use, mark the earliest changeset you know exhibits the problem as
501 501 bad, then mark the latest changeset which is free from the problem
502 502 as good. Bisect will update your working directory to a revision
503 503 for testing (unless the -U/--noupdate option is specified). Once
504 504 you have performed tests, mark the working directory as good or
505 505 bad, and bisect will either update to another candidate changeset
506 506 or announce that it has found the bad revision.
507 507
508 508 As a shortcut, you can also use the revision argument to mark a
509 509 revision as good or bad without checking it out first.
510 510
511 511 If you supply a command, it will be used for automatic bisection.
512 512 Its exit status will be used to mark revisions as good or bad:
513 513 status 0 means good, 125 means to skip the revision, 127
514 514 (command not found) will abort the bisection, and any other
515 515 non-zero exit status means the revision is bad.
516 516
517 517 .. container:: verbose
518 518
519 519 Some examples:
520 520
521 521 - start a bisection with known bad revision 12, and good revision 34::
522 522
523 523 hg bisect --bad 34
524 524 hg bisect --good 12
525 525
526 526 - advance the current bisection by marking current revision as good or
527 527 bad::
528 528
529 529 hg bisect --good
530 530 hg bisect --bad
531 531
532 532 - mark the current revision, or a known revision, to be skipped (eg. if
533 533 that revision is not usable because of another issue)::
534 534
535 535 hg bisect --skip
536 536 hg bisect --skip 23
537 537
538 538 - forget the current bisection::
539 539
540 540 hg bisect --reset
541 541
542 542 - use 'make && make tests' to automatically find the first broken
543 543 revision::
544 544
545 545 hg bisect --reset
546 546 hg bisect --bad 34
547 547 hg bisect --good 12
548 548 hg bisect --command 'make && make tests'
549 549
550 550 - see all changesets whose states are already known in the current
551 551 bisection::
552 552
553 553 hg log -r "bisect(pruned)"
554 554
555 555 - see all changesets that took part in the current bisection::
556 556
557 557 hg log -r "bisect(range)"
558 558
559 559 - with the graphlog extension, you can even get a nice graph::
560 560
561 561 hg log --graph -r "bisect(range)"
562 562
563 563 See :hg:`help revsets` for more about the `bisect()` keyword.
564 564
565 565 Returns 0 on success.
566 566 """
567 567 def extendbisectrange(nodes, good):
568 568 # bisect is incomplete when it ends on a merge node and
569 569 # one of the parent was not checked.
570 570 parents = repo[nodes[0]].parents()
571 571 if len(parents) > 1:
572 572 side = good and state['bad'] or state['good']
573 573 num = len(set(i.node() for i in parents) & set(side))
574 574 if num == 1:
575 575 return parents[0].ancestor(parents[1])
576 576 return None
577 577
578 578 def print_result(nodes, good):
579 579 displayer = cmdutil.show_changeset(ui, repo, {})
580 580 if len(nodes) == 1:
581 581 # narrowed it down to a single revision
582 582 if good:
583 583 ui.write(_("The first good revision is:\n"))
584 584 else:
585 585 ui.write(_("The first bad revision is:\n"))
586 586 displayer.show(repo[nodes[0]])
587 587 extendnode = extendbisectrange(nodes, good)
588 588 if extendnode is not None:
589 589 ui.write(_('Not all ancestors of this changeset have been'
590 590 ' checked.\nUse bisect --extend to continue the '
591 591 'bisection from\nthe common ancestor, %s.\n')
592 592 % extendnode)
593 593 else:
594 594 # multiple possible revisions
595 595 if good:
596 596 ui.write(_("Due to skipped revisions, the first "
597 597 "good revision could be any of:\n"))
598 598 else:
599 599 ui.write(_("Due to skipped revisions, the first "
600 600 "bad revision could be any of:\n"))
601 601 for n in nodes:
602 602 displayer.show(repo[n])
603 603 displayer.close()
604 604
605 605 def check_state(state, interactive=True):
606 606 if not state['good'] or not state['bad']:
607 607 if (good or bad or skip or reset) and interactive:
608 608 return
609 609 if not state['good']:
610 610 raise util.Abort(_('cannot bisect (no known good revisions)'))
611 611 else:
612 612 raise util.Abort(_('cannot bisect (no known bad revisions)'))
613 613 return True
614 614
615 615 # backward compatibility
616 616 if rev in "good bad reset init".split():
617 617 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
618 618 cmd, rev, extra = rev, extra, None
619 619 if cmd == "good":
620 620 good = True
621 621 elif cmd == "bad":
622 622 bad = True
623 623 else:
624 624 reset = True
625 625 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
626 626 raise util.Abort(_('incompatible arguments'))
627 627
628 628 if reset:
629 629 p = repo.join("bisect.state")
630 630 if os.path.exists(p):
631 631 os.unlink(p)
632 632 return
633 633
634 634 state = hbisect.load_state(repo)
635 635
636 636 if command:
637 637 changesets = 1
638 638 try:
639 639 while changesets:
640 640 # update state
641 641 status = util.system(command, out=ui.fout)
642 642 if status == 125:
643 643 transition = "skip"
644 644 elif status == 0:
645 645 transition = "good"
646 646 # status < 0 means process was killed
647 647 elif status == 127:
648 648 raise util.Abort(_("failed to execute %s") % command)
649 649 elif status < 0:
650 650 raise util.Abort(_("%s killed") % command)
651 651 else:
652 652 transition = "bad"
653 653 ctx = scmutil.revsingle(repo, rev)
654 654 rev = None # clear for future iterations
655 655 state[transition].append(ctx.node())
656 656 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
657 657 check_state(state, interactive=False)
658 658 # bisect
659 659 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
660 660 # update to next check
661 661 cmdutil.bailifchanged(repo)
662 662 hg.clean(repo, nodes[0], show_stats=False)
663 663 finally:
664 664 hbisect.save_state(repo, state)
665 665 print_result(nodes, good)
666 666 return
667 667
668 668 # update state
669 669
670 670 if rev:
671 671 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
672 672 else:
673 673 nodes = [repo.lookup('.')]
674 674
675 675 if good or bad or skip:
676 676 if good:
677 677 state['good'] += nodes
678 678 elif bad:
679 679 state['bad'] += nodes
680 680 elif skip:
681 681 state['skip'] += nodes
682 682 hbisect.save_state(repo, state)
683 683
684 684 if not check_state(state):
685 685 return
686 686
687 687 # actually bisect
688 688 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
689 689 if extend:
690 690 if not changesets:
691 691 extendnode = extendbisectrange(nodes, good)
692 692 if extendnode is not None:
693 693 ui.write(_("Extending search to changeset %d:%s\n"
694 694 % (extendnode.rev(), extendnode)))
695 695 if noupdate:
696 696 return
697 697 cmdutil.bailifchanged(repo)
698 698 return hg.clean(repo, extendnode.node())
699 699 raise util.Abort(_("nothing to extend"))
700 700
701 701 if changesets == 0:
702 702 print_result(nodes, good)
703 703 else:
704 704 assert len(nodes) == 1 # only a single node can be tested next
705 705 node = nodes[0]
706 706 # compute the approximate number of remaining tests
707 707 tests, size = 0, 2
708 708 while size <= changesets:
709 709 tests, size = tests + 1, size * 2
710 710 rev = repo.changelog.rev(node)
711 711 ui.write(_("Testing changeset %d:%s "
712 712 "(%d changesets remaining, ~%d tests)\n")
713 713 % (rev, short(node), changesets, tests))
714 714 if not noupdate:
715 715 cmdutil.bailifchanged(repo)
716 716 return hg.clean(repo, node)
717 717
718 718 @command('bookmarks',
719 719 [('f', 'force', False, _('force')),
720 720 ('r', 'rev', '', _('revision'), _('REV')),
721 721 ('d', 'delete', False, _('delete a given bookmark')),
722 722 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
723 723 ('i', 'inactive', False, _('do not mark a new bookmark active'))],
724 724 _('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
725 725 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
726 726 rename=None, inactive=False):
727 727 '''track a line of development with movable markers
728 728
729 729 Bookmarks are pointers to certain commits that move when
730 730 committing. Bookmarks are local. They can be renamed, copied and
731 731 deleted. It is possible to use bookmark names in :hg:`merge` and
732 732 :hg:`update` to merge and update respectively to a given bookmark.
733 733
734 734 You can use :hg:`bookmark NAME` to set a bookmark on the working
735 735 directory's parent revision with the given name. If you specify
736 736 a revision using -r REV (where REV may be an existing bookmark),
737 737 the bookmark is assigned to that revision.
738 738
739 739 Bookmarks can be pushed and pulled between repositories (see :hg:`help
740 740 push` and :hg:`help pull`). This requires both the local and remote
741 741 repositories to support bookmarks. For versions prior to 1.8, this means
742 742 the bookmarks extension must be enabled.
743 743 '''
744 744 hexfn = ui.debugflag and hex or short
745 745 marks = repo._bookmarks
746 746 cur = repo.changectx('.').node()
747 747
748 748 if delete:
749 749 if mark is None:
750 750 raise util.Abort(_("bookmark name required"))
751 751 if mark not in marks:
752 752 raise util.Abort(_("bookmark '%s' does not exist") % mark)
753 753 if mark == repo._bookmarkcurrent:
754 754 bookmarks.setcurrent(repo, None)
755 755 del marks[mark]
756 756 bookmarks.write(repo)
757 757 return
758 758
759 759 if rename:
760 760 if rename not in marks:
761 761 raise util.Abort(_("bookmark '%s' does not exist") % rename)
762 762 if mark in marks and not force:
763 763 raise util.Abort(_("bookmark '%s' already exists "
764 764 "(use -f to force)") % mark)
765 765 if mark is None:
766 766 raise util.Abort(_("new bookmark name required"))
767 767 marks[mark] = marks[rename]
768 768 if repo._bookmarkcurrent == rename and not inactive:
769 769 bookmarks.setcurrent(repo, mark)
770 770 del marks[rename]
771 771 bookmarks.write(repo)
772 772 return
773 773
774 774 if mark is not None:
775 775 if "\n" in mark:
776 776 raise util.Abort(_("bookmark name cannot contain newlines"))
777 777 mark = mark.strip()
778 778 if not mark:
779 779 raise util.Abort(_("bookmark names cannot consist entirely of "
780 780 "whitespace"))
781 781 if inactive and mark == repo._bookmarkcurrent:
782 782 bookmarks.setcurrent(repo, None)
783 783 return
784 784 if mark in marks and not force:
785 785 raise util.Abort(_("bookmark '%s' already exists "
786 786 "(use -f to force)") % mark)
787 787 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
788 788 and not force):
789 789 raise util.Abort(
790 790 _("a bookmark cannot have the name of an existing branch"))
791 791 if rev:
792 792 marks[mark] = repo.lookup(rev)
793 793 else:
794 794 marks[mark] = cur
795 795 if not inactive and cur == marks[mark]:
796 796 bookmarks.setcurrent(repo, mark)
797 797 bookmarks.write(repo)
798 798 return
799 799
800 800 if mark is None:
801 801 if rev:
802 802 raise util.Abort(_("bookmark name required"))
803 803 if len(marks) == 0:
804 804 ui.status(_("no bookmarks set\n"))
805 805 else:
806 806 for bmark, n in sorted(marks.iteritems()):
807 807 current = repo._bookmarkcurrent
808 808 if bmark == current and n == cur:
809 809 prefix, label = '*', 'bookmarks.current'
810 810 else:
811 811 prefix, label = ' ', ''
812 812
813 813 if ui.quiet:
814 814 ui.write("%s\n" % bmark, label=label)
815 815 else:
816 816 ui.write(" %s %-25s %d:%s\n" % (
817 817 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
818 818 label=label)
819 819 return
820 820
821 821 @command('branch',
822 822 [('f', 'force', None,
823 823 _('set branch name even if it shadows an existing branch')),
824 824 ('C', 'clean', None, _('reset branch name to parent branch name'))],
825 825 _('[-fC] [NAME]'))
826 826 def branch(ui, repo, label=None, **opts):
827 827 """set or show the current branch name
828 828
829 829 With no argument, show the current branch name. With one argument,
830 830 set the working directory branch name (the branch will not exist
831 831 in the repository until the next commit). Standard practice
832 832 recommends that primary development take place on the 'default'
833 833 branch.
834 834
835 835 Unless -f/--force is specified, branch will not let you set a
836 836 branch name that already exists, even if it's inactive.
837 837
838 838 Use -C/--clean to reset the working directory branch to that of
839 839 the parent of the working directory, negating a previous branch
840 840 change.
841 841
842 842 Use the command :hg:`update` to switch to an existing branch. Use
843 843 :hg:`commit --close-branch` to mark this branch as closed.
844 844
845 845 .. note::
846 846 Branch names are permanent. Use :hg:`bookmark` to create a
847 847 light-weight bookmark instead. See :hg:`help glossary` for more
848 848 information about named branches and bookmarks.
849 849
850 850 Returns 0 on success.
851 851 """
852 852
853 853 if opts.get('clean'):
854 854 label = repo[None].p1().branch()
855 855 repo.dirstate.setbranch(label)
856 856 ui.status(_('reset working directory to branch %s\n') % label)
857 857 elif label:
858 858 if not opts.get('force') and label in repo.branchtags():
859 859 if label not in [p.branch() for p in repo.parents()]:
860 860 raise util.Abort(_('a branch of the same name already exists'),
861 861 # i18n: "it" refers to an existing branch
862 862 hint=_("use 'hg update' to switch to it"))
863 863 repo.dirstate.setbranch(label)
864 864 ui.status(_('marked working directory as branch %s\n') % label)
865 865 else:
866 866 ui.write("%s\n" % repo.dirstate.branch())
867 867
868 868 @command('branches',
869 869 [('a', 'active', False, _('show only branches that have unmerged heads')),
870 870 ('c', 'closed', False, _('show normal and closed branches'))],
871 871 _('[-ac]'))
872 872 def branches(ui, repo, active=False, closed=False):
873 873 """list repository named branches
874 874
875 875 List the repository's named branches, indicating which ones are
876 876 inactive. If -c/--closed is specified, also list branches which have
877 877 been marked closed (see :hg:`commit --close-branch`).
878 878
879 879 If -a/--active is specified, only show active branches. A branch
880 880 is considered active if it contains repository heads.
881 881
882 882 Use the command :hg:`update` to switch to an existing branch.
883 883
884 884 Returns 0.
885 885 """
886 886
887 887 hexfunc = ui.debugflag and hex or short
888 888 activebranches = [repo[n].branch() for n in repo.heads()]
889 889 def testactive(tag, node):
890 890 realhead = tag in activebranches
891 891 open = node in repo.branchheads(tag, closed=False)
892 892 return realhead and open
893 893 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
894 894 for tag, node in repo.branchtags().items()],
895 895 reverse=True)
896 896
897 897 for isactive, node, tag in branches:
898 898 if (not active) or isactive:
899 899 if ui.quiet:
900 900 ui.write("%s\n" % tag)
901 901 else:
902 902 hn = repo.lookup(node)
903 903 if isactive:
904 904 label = 'branches.active'
905 905 notice = ''
906 906 elif hn not in repo.branchheads(tag, closed=False):
907 907 if not closed:
908 908 continue
909 909 label = 'branches.closed'
910 910 notice = _(' (closed)')
911 911 else:
912 912 label = 'branches.inactive'
913 913 notice = _(' (inactive)')
914 914 if tag == repo.dirstate.branch():
915 915 label = 'branches.current'
916 916 rev = str(node).rjust(31 - encoding.colwidth(tag))
917 917 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
918 918 tag = ui.label(tag, label)
919 919 ui.write("%s %s%s\n" % (tag, rev, notice))
920 920
921 921 @command('bundle',
922 922 [('f', 'force', None, _('run even when the destination is unrelated')),
923 923 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
924 924 _('REV')),
925 925 ('b', 'branch', [], _('a specific branch you would like to bundle'),
926 926 _('BRANCH')),
927 927 ('', 'base', [],
928 928 _('a base changeset assumed to be available at the destination'),
929 929 _('REV')),
930 930 ('a', 'all', None, _('bundle all changesets in the repository')),
931 931 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
932 932 ] + remoteopts,
933 933 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
934 934 def bundle(ui, repo, fname, dest=None, **opts):
935 935 """create a changegroup file
936 936
937 937 Generate a compressed changegroup file collecting changesets not
938 938 known to be in another repository.
939 939
940 940 If you omit the destination repository, then hg assumes the
941 941 destination will have all the nodes you specify with --base
942 942 parameters. To create a bundle containing all changesets, use
943 943 -a/--all (or --base null).
944 944
945 945 You can change compression method with the -t/--type option.
946 946 The available compression methods are: none, bzip2, and
947 947 gzip (by default, bundles are compressed using bzip2).
948 948
949 949 The bundle file can then be transferred using conventional means
950 950 and applied to another repository with the unbundle or pull
951 951 command. This is useful when direct push and pull are not
952 952 available or when exporting an entire repository is undesirable.
953 953
954 954 Applying bundles preserves all changeset contents including
955 955 permissions, copy/rename information, and revision history.
956 956
957 957 Returns 0 on success, 1 if no changes found.
958 958 """
959 959 revs = None
960 960 if 'rev' in opts:
961 961 revs = scmutil.revrange(repo, opts['rev'])
962 962
963 963 if opts.get('all'):
964 964 base = ['null']
965 965 else:
966 966 base = scmutil.revrange(repo, opts.get('base'))
967 967 if base:
968 968 if dest:
969 969 raise util.Abort(_("--base is incompatible with specifying "
970 970 "a destination"))
971 971 common = [repo.lookup(rev) for rev in base]
972 972 heads = revs and map(repo.lookup, revs) or revs
973 973 else:
974 974 dest = ui.expandpath(dest or 'default-push', dest or 'default')
975 975 dest, branches = hg.parseurl(dest, opts.get('branch'))
976 976 other = hg.peer(repo, opts, dest)
977 977 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
978 978 heads = revs and map(repo.lookup, revs) or revs
979 979 common, outheads = discovery.findcommonoutgoing(repo, other,
980 980 onlyheads=heads,
981 981 force=opts.get('force'))
982 982
983 983 cg = repo.getbundle('bundle', common=common, heads=heads)
984 984 if not cg:
985 985 ui.status(_("no changes found\n"))
986 986 return 1
987 987
988 988 bundletype = opts.get('type', 'bzip2').lower()
989 989 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
990 990 bundletype = btypes.get(bundletype)
991 991 if bundletype not in changegroup.bundletypes:
992 992 raise util.Abort(_('unknown bundle type specified with --type'))
993 993
994 994 changegroup.writebundle(cg, fname, bundletype)
995 995
996 996 @command('cat',
997 997 [('o', 'output', '',
998 998 _('print output to file with formatted name'), _('FORMAT')),
999 999 ('r', 'rev', '', _('print the given revision'), _('REV')),
1000 1000 ('', 'decode', None, _('apply any matching decode filter')),
1001 1001 ] + walkopts,
1002 1002 _('[OPTION]... FILE...'))
1003 1003 def cat(ui, repo, file1, *pats, **opts):
1004 1004 """output the current or given revision of files
1005 1005
1006 1006 Print the specified files as they were at the given revision. If
1007 1007 no revision is given, the parent of the working directory is used,
1008 1008 or tip if no revision is checked out.
1009 1009
1010 1010 Output may be to a file, in which case the name of the file is
1011 1011 given using a format string. The formatting rules are the same as
1012 1012 for the export command, with the following additions:
1013 1013
1014 1014 :``%s``: basename of file being printed
1015 1015 :``%d``: dirname of file being printed, or '.' if in repository root
1016 1016 :``%p``: root-relative path name of file being printed
1017 1017
1018 1018 Returns 0 on success.
1019 1019 """
1020 1020 ctx = scmutil.revsingle(repo, opts.get('rev'))
1021 1021 err = 1
1022 1022 m = scmutil.match(ctx, (file1,) + pats, opts)
1023 1023 for abs in ctx.walk(m):
1024 1024 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1025 1025 pathname=abs)
1026 1026 data = ctx[abs].data()
1027 1027 if opts.get('decode'):
1028 1028 data = repo.wwritedata(abs, data)
1029 1029 fp.write(data)
1030 1030 fp.close()
1031 1031 err = 0
1032 1032 return err
1033 1033
1034 1034 @command('^clone',
1035 1035 [('U', 'noupdate', None,
1036 1036 _('the clone will include an empty working copy (only a repository)')),
1037 1037 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1038 1038 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1039 1039 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1040 1040 ('', 'pull', None, _('use pull protocol to copy metadata')),
1041 1041 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1042 1042 ] + remoteopts,
1043 1043 _('[OPTION]... SOURCE [DEST]'))
1044 1044 def clone(ui, source, dest=None, **opts):
1045 1045 """make a copy of an existing repository
1046 1046
1047 1047 Create a copy of an existing repository in a new directory.
1048 1048
1049 1049 If no destination directory name is specified, it defaults to the
1050 1050 basename of the source.
1051 1051
1052 1052 The location of the source is added to the new repository's
1053 1053 ``.hg/hgrc`` file, as the default to be used for future pulls.
1054 1054
1055 1055 Only local paths and ``ssh://`` URLs are supported as
1056 1056 destinations. For ``ssh://`` destinations, no working directory or
1057 1057 ``.hg/hgrc`` will be created on the remote side.
1058 1058
1059 1059 To pull only a subset of changesets, specify one or more revisions
1060 1060 identifiers with -r/--rev or branches with -b/--branch. The
1061 1061 resulting clone will contain only the specified changesets and
1062 1062 their ancestors. These options (or 'clone src#rev dest') imply
1063 1063 --pull, even for local source repositories. Note that specifying a
1064 1064 tag will include the tagged changeset but not the changeset
1065 1065 containing the tag.
1066 1066
1067 1067 To check out a particular version, use -u/--update, or
1068 1068 -U/--noupdate to create a clone with no working directory.
1069 1069
1070 1070 .. container:: verbose
1071 1071
1072 1072 For efficiency, hardlinks are used for cloning whenever the
1073 1073 source and destination are on the same filesystem (note this
1074 1074 applies only to the repository data, not to the working
1075 1075 directory). Some filesystems, such as AFS, implement hardlinking
1076 1076 incorrectly, but do not report errors. In these cases, use the
1077 1077 --pull option to avoid hardlinking.
1078 1078
1079 1079 In some cases, you can clone repositories and the working
1080 1080 directory using full hardlinks with ::
1081 1081
1082 1082 $ cp -al REPO REPOCLONE
1083 1083
1084 1084 This is the fastest way to clone, but it is not always safe. The
1085 1085 operation is not atomic (making sure REPO is not modified during
1086 1086 the operation is up to you) and you have to make sure your
1087 1087 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1088 1088 so). Also, this is not compatible with certain extensions that
1089 1089 place their metadata under the .hg directory, such as mq.
1090 1090
1091 1091 Mercurial will update the working directory to the first applicable
1092 1092 revision from this list:
1093 1093
1094 1094 a) null if -U or the source repository has no changesets
1095 1095 b) if -u . and the source repository is local, the first parent of
1096 1096 the source repository's working directory
1097 1097 c) the changeset specified with -u (if a branch name, this means the
1098 1098 latest head of that branch)
1099 1099 d) the changeset specified with -r
1100 1100 e) the tipmost head specified with -b
1101 1101 f) the tipmost head specified with the url#branch source syntax
1102 1102 g) the tipmost head of the default branch
1103 1103 h) tip
1104 1104
1105 1105 Examples:
1106 1106
1107 1107 - clone a remote repository to a new directory named hg/::
1108 1108
1109 1109 hg clone http://selenic.com/hg
1110 1110
1111 1111 - create a lightweight local clone::
1112 1112
1113 1113 hg clone project/ project-feature/
1114 1114
1115 1115 - clone from an absolute path on an ssh server (note double-slash)::
1116 1116
1117 1117 hg clone ssh://user@server//home/projects/alpha/
1118 1118
1119 1119 - do a high-speed clone over a LAN while checking out a
1120 1120 specified version::
1121 1121
1122 1122 hg clone --uncompressed http://server/repo -u 1.5
1123 1123
1124 1124 - create a repository without changesets after a particular revision::
1125 1125
1126 1126 hg clone -r 04e544 experimental/ good/
1127 1127
1128 1128 - clone (and track) a particular named branch::
1129 1129
1130 1130 hg clone http://selenic.com/hg#stable
1131 1131
1132 1132 See :hg:`help urls` for details on specifying URLs.
1133 1133
1134 1134 Returns 0 on success.
1135 1135 """
1136 1136 if opts.get('noupdate') and opts.get('updaterev'):
1137 1137 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1138 1138
1139 1139 r = hg.clone(ui, opts, source, dest,
1140 1140 pull=opts.get('pull'),
1141 1141 stream=opts.get('uncompressed'),
1142 1142 rev=opts.get('rev'),
1143 1143 update=opts.get('updaterev') or not opts.get('noupdate'),
1144 1144 branch=opts.get('branch'))
1145 1145
1146 1146 return r is None
1147 1147
1148 1148 @command('^commit|ci',
1149 1149 [('A', 'addremove', None,
1150 1150 _('mark new/missing files as added/removed before committing')),
1151 1151 ('', 'close-branch', None,
1152 1152 _('mark a branch as closed, hiding it from the branch list')),
1153 1153 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1154 1154 _('[OPTION]... [FILE]...'))
1155 1155 def commit(ui, repo, *pats, **opts):
1156 1156 """commit the specified files or all outstanding changes
1157 1157
1158 1158 Commit changes to the given files into the repository. Unlike a
1159 1159 centralized SCM, this operation is a local operation. See
1160 1160 :hg:`push` for a way to actively distribute your changes.
1161 1161
1162 1162 If a list of files is omitted, all changes reported by :hg:`status`
1163 1163 will be committed.
1164 1164
1165 1165 If you are committing the result of a merge, do not provide any
1166 1166 filenames or -I/-X filters.
1167 1167
1168 1168 If no commit message is specified, Mercurial starts your
1169 1169 configured editor where you can enter a message. In case your
1170 1170 commit fails, you will find a backup of your message in
1171 1171 ``.hg/last-message.txt``.
1172 1172
1173 1173 See :hg:`help dates` for a list of formats valid for -d/--date.
1174 1174
1175 1175 Returns 0 on success, 1 if nothing changed.
1176 1176 """
1177 1177 if opts.get('subrepos'):
1178 1178 # Let --subrepos on the command line overide config setting.
1179 1179 ui.setconfig('ui', 'commitsubrepos', True)
1180 1180
1181 1181 extra = {}
1182 1182 if opts.get('close_branch'):
1183 1183 if repo['.'].node() not in repo.branchheads():
1184 1184 # The topo heads set is included in the branch heads set of the
1185 1185 # current branch, so it's sufficient to test branchheads
1186 1186 raise util.Abort(_('can only close branch heads'))
1187 1187 extra['close'] = 1
1188 1188 e = cmdutil.commiteditor
1189 1189 if opts.get('force_editor'):
1190 1190 e = cmdutil.commitforceeditor
1191 1191
1192 1192 def commitfunc(ui, repo, message, match, opts):
1193 1193 return repo.commit(message, opts.get('user'), opts.get('date'), match,
1194 1194 editor=e, extra=extra)
1195 1195
1196 1196 branch = repo[None].branch()
1197 1197 bheads = repo.branchheads(branch)
1198 1198
1199 1199 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1200 1200 if not node:
1201 1201 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1202 1202 if stat[3]:
1203 1203 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
1204 1204 % len(stat[3]))
1205 1205 else:
1206 1206 ui.status(_("nothing changed\n"))
1207 1207 return 1
1208 1208
1209 1209 ctx = repo[node]
1210 1210 parents = ctx.parents()
1211 1211
1212 1212 if (bheads and node not in bheads and not
1213 1213 [x for x in parents if x.node() in bheads and x.branch() == branch]):
1214 1214 ui.status(_('created new head\n'))
1215 1215 # The message is not printed for initial roots. For the other
1216 1216 # changesets, it is printed in the following situations:
1217 1217 #
1218 1218 # Par column: for the 2 parents with ...
1219 1219 # N: null or no parent
1220 1220 # B: parent is on another named branch
1221 1221 # C: parent is a regular non head changeset
1222 1222 # H: parent was a branch head of the current branch
1223 1223 # Msg column: whether we print "created new head" message
1224 1224 # In the following, it is assumed that there already exists some
1225 1225 # initial branch heads of the current branch, otherwise nothing is
1226 1226 # printed anyway.
1227 1227 #
1228 1228 # Par Msg Comment
1229 1229 # NN y additional topo root
1230 1230 #
1231 1231 # BN y additional branch root
1232 1232 # CN y additional topo head
1233 1233 # HN n usual case
1234 1234 #
1235 1235 # BB y weird additional branch root
1236 1236 # CB y branch merge
1237 1237 # HB n merge with named branch
1238 1238 #
1239 1239 # CC y additional head from merge
1240 1240 # CH n merge with a head
1241 1241 #
1242 1242 # HH n head merge: head count decreases
1243 1243
1244 1244 if not opts.get('close_branch'):
1245 1245 for r in parents:
1246 1246 if r.extra().get('close') and r.branch() == branch:
1247 1247 ui.status(_('reopening closed branch head %d\n') % r)
1248 1248
1249 1249 if ui.debugflag:
1250 1250 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
1251 1251 elif ui.verbose:
1252 1252 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
1253 1253
1254 1254 @command('copy|cp',
1255 1255 [('A', 'after', None, _('record a copy that has already occurred')),
1256 1256 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1257 1257 ] + walkopts + dryrunopts,
1258 1258 _('[OPTION]... [SOURCE]... DEST'))
1259 1259 def copy(ui, repo, *pats, **opts):
1260 1260 """mark files as copied for the next commit
1261 1261
1262 1262 Mark dest as having copies of source files. If dest is a
1263 1263 directory, copies are put in that directory. If dest is a file,
1264 1264 the source must be a single file.
1265 1265
1266 1266 By default, this command copies the contents of files as they
1267 1267 exist in the working directory. If invoked with -A/--after, the
1268 1268 operation is recorded, but no copying is performed.
1269 1269
1270 1270 This command takes effect with the next commit. To undo a copy
1271 1271 before that, see :hg:`revert`.
1272 1272
1273 1273 Returns 0 on success, 1 if errors are encountered.
1274 1274 """
1275 1275 wlock = repo.wlock(False)
1276 1276 try:
1277 1277 return cmdutil.copy(ui, repo, pats, opts)
1278 1278 finally:
1279 1279 wlock.release()
1280 1280
1281 1281 @command('debugancestor', [], _('[INDEX] REV1 REV2'))
1282 1282 def debugancestor(ui, repo, *args):
1283 1283 """find the ancestor revision of two revisions in a given index"""
1284 1284 if len(args) == 3:
1285 1285 index, rev1, rev2 = args
1286 1286 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1287 1287 lookup = r.lookup
1288 1288 elif len(args) == 2:
1289 1289 if not repo:
1290 1290 raise util.Abort(_("there is no Mercurial repository here "
1291 1291 "(.hg not found)"))
1292 1292 rev1, rev2 = args
1293 1293 r = repo.changelog
1294 1294 lookup = repo.lookup
1295 1295 else:
1296 1296 raise util.Abort(_('either two or three arguments required'))
1297 1297 a = r.ancestor(lookup(rev1), lookup(rev2))
1298 1298 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1299 1299
1300 1300 @command('debugbuilddag',
1301 1301 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1302 1302 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1303 1303 ('n', 'new-file', None, _('add new file at each rev'))],
1304 1304 _('[OPTION]... [TEXT]'))
1305 1305 def debugbuilddag(ui, repo, text=None,
1306 1306 mergeable_file=False,
1307 1307 overwritten_file=False,
1308 1308 new_file=False):
1309 1309 """builds a repo with a given DAG from scratch in the current empty repo
1310 1310
1311 1311 The description of the DAG is read from stdin if not given on the
1312 1312 command line.
1313 1313
1314 1314 Elements:
1315 1315
1316 1316 - "+n" is a linear run of n nodes based on the current default parent
1317 1317 - "." is a single node based on the current default parent
1318 1318 - "$" resets the default parent to null (implied at the start);
1319 1319 otherwise the default parent is always the last node created
1320 1320 - "<p" sets the default parent to the backref p
1321 1321 - "*p" is a fork at parent p, which is a backref
1322 1322 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1323 1323 - "/p2" is a merge of the preceding node and p2
1324 1324 - ":tag" defines a local tag for the preceding node
1325 1325 - "@branch" sets the named branch for subsequent nodes
1326 1326 - "#...\\n" is a comment up to the end of the line
1327 1327
1328 1328 Whitespace between the above elements is ignored.
1329 1329
1330 1330 A backref is either
1331 1331
1332 1332 - a number n, which references the node curr-n, where curr is the current
1333 1333 node, or
1334 1334 - the name of a local tag you placed earlier using ":tag", or
1335 1335 - empty to denote the default parent.
1336 1336
1337 1337 All string valued-elements are either strictly alphanumeric, or must
1338 1338 be enclosed in double quotes ("..."), with "\\" as escape character.
1339 1339 """
1340 1340
1341 1341 if text is None:
1342 1342 ui.status(_("reading DAG from stdin\n"))
1343 1343 text = ui.fin.read()
1344 1344
1345 1345 cl = repo.changelog
1346 1346 if len(cl) > 0:
1347 1347 raise util.Abort(_('repository is not empty'))
1348 1348
1349 1349 # determine number of revs in DAG
1350 1350 total = 0
1351 1351 for type, data in dagparser.parsedag(text):
1352 1352 if type == 'n':
1353 1353 total += 1
1354 1354
1355 1355 if mergeable_file:
1356 1356 linesperrev = 2
1357 1357 # make a file with k lines per rev
1358 1358 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1359 1359 initialmergedlines.append("")
1360 1360
1361 1361 tags = []
1362 1362
1363 1363 tr = repo.transaction("builddag")
1364 1364 try:
1365 1365
1366 1366 at = -1
1367 1367 atbranch = 'default'
1368 1368 nodeids = []
1369 1369 ui.progress(_('building'), 0, unit=_('revisions'), total=total)
1370 1370 for type, data in dagparser.parsedag(text):
1371 1371 if type == 'n':
1372 1372 ui.note('node %s\n' % str(data))
1373 1373 id, ps = data
1374 1374
1375 1375 files = []
1376 1376 fctxs = {}
1377 1377
1378 1378 p2 = None
1379 1379 if mergeable_file:
1380 1380 fn = "mf"
1381 1381 p1 = repo[ps[0]]
1382 1382 if len(ps) > 1:
1383 1383 p2 = repo[ps[1]]
1384 1384 pa = p1.ancestor(p2)
1385 1385 base, local, other = [x[fn].data() for x in pa, p1, p2]
1386 1386 m3 = simplemerge.Merge3Text(base, local, other)
1387 1387 ml = [l.strip() for l in m3.merge_lines()]
1388 1388 ml.append("")
1389 1389 elif at > 0:
1390 1390 ml = p1[fn].data().split("\n")
1391 1391 else:
1392 1392 ml = initialmergedlines
1393 1393 ml[id * linesperrev] += " r%i" % id
1394 1394 mergedtext = "\n".join(ml)
1395 1395 files.append(fn)
1396 1396 fctxs[fn] = context.memfilectx(fn, mergedtext)
1397 1397
1398 1398 if overwritten_file:
1399 1399 fn = "of"
1400 1400 files.append(fn)
1401 1401 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1402 1402
1403 1403 if new_file:
1404 1404 fn = "nf%i" % id
1405 1405 files.append(fn)
1406 1406 fctxs[fn] = context.memfilectx(fn, "r%i\n" % id)
1407 1407 if len(ps) > 1:
1408 1408 if not p2:
1409 1409 p2 = repo[ps[1]]
1410 1410 for fn in p2:
1411 1411 if fn.startswith("nf"):
1412 1412 files.append(fn)
1413 1413 fctxs[fn] = p2[fn]
1414 1414
1415 1415 def fctxfn(repo, cx, path):
1416 1416 return fctxs.get(path)
1417 1417
1418 1418 if len(ps) == 0 or ps[0] < 0:
1419 1419 pars = [None, None]
1420 1420 elif len(ps) == 1:
1421 1421 pars = [nodeids[ps[0]], None]
1422 1422 else:
1423 1423 pars = [nodeids[p] for p in ps]
1424 1424 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1425 1425 date=(id, 0),
1426 1426 user="debugbuilddag",
1427 1427 extra={'branch': atbranch})
1428 1428 nodeid = repo.commitctx(cx)
1429 1429 nodeids.append(nodeid)
1430 1430 at = id
1431 1431 elif type == 'l':
1432 1432 id, name = data
1433 1433 ui.note('tag %s\n' % name)
1434 1434 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1435 1435 elif type == 'a':
1436 1436 ui.note('branch %s\n' % data)
1437 1437 atbranch = data
1438 1438 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1439 1439 tr.close()
1440 1440 finally:
1441 1441 ui.progress(_('building'), None)
1442 1442 tr.release()
1443 1443
1444 1444 if tags:
1445 1445 repo.opener.write("localtags", "".join(tags))
1446 1446
1447 1447 @command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
1448 1448 def debugbundle(ui, bundlepath, all=None, **opts):
1449 1449 """lists the contents of a bundle"""
1450 1450 f = url.open(ui, bundlepath)
1451 1451 try:
1452 1452 gen = changegroup.readbundle(f, bundlepath)
1453 1453 if all:
1454 1454 ui.write("format: id, p1, p2, cset, delta base, len(delta)\n")
1455 1455
1456 1456 def showchunks(named):
1457 1457 ui.write("\n%s\n" % named)
1458 1458 chain = None
1459 1459 while True:
1460 1460 chunkdata = gen.deltachunk(chain)
1461 1461 if not chunkdata:
1462 1462 break
1463 1463 node = chunkdata['node']
1464 1464 p1 = chunkdata['p1']
1465 1465 p2 = chunkdata['p2']
1466 1466 cs = chunkdata['cs']
1467 1467 deltabase = chunkdata['deltabase']
1468 1468 delta = chunkdata['delta']
1469 1469 ui.write("%s %s %s %s %s %s\n" %
1470 1470 (hex(node), hex(p1), hex(p2),
1471 1471 hex(cs), hex(deltabase), len(delta)))
1472 1472 chain = node
1473 1473
1474 1474 chunkdata = gen.changelogheader()
1475 1475 showchunks("changelog")
1476 1476 chunkdata = gen.manifestheader()
1477 1477 showchunks("manifest")
1478 1478 while True:
1479 1479 chunkdata = gen.filelogheader()
1480 1480 if not chunkdata:
1481 1481 break
1482 1482 fname = chunkdata['filename']
1483 1483 showchunks(fname)
1484 1484 else:
1485 1485 chunkdata = gen.changelogheader()
1486 1486 chain = None
1487 1487 while True:
1488 1488 chunkdata = gen.deltachunk(chain)
1489 1489 if not chunkdata:
1490 1490 break
1491 1491 node = chunkdata['node']
1492 1492 ui.write("%s\n" % hex(node))
1493 1493 chain = node
1494 1494 finally:
1495 1495 f.close()
1496 1496
1497 1497 @command('debugcheckstate', [], '')
1498 1498 def debugcheckstate(ui, repo):
1499 1499 """validate the correctness of the current dirstate"""
1500 1500 parent1, parent2 = repo.dirstate.parents()
1501 1501 m1 = repo[parent1].manifest()
1502 1502 m2 = repo[parent2].manifest()
1503 1503 errors = 0
1504 1504 for f in repo.dirstate:
1505 1505 state = repo.dirstate[f]
1506 1506 if state in "nr" and f not in m1:
1507 1507 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1508 1508 errors += 1
1509 1509 if state in "a" and f in m1:
1510 1510 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1511 1511 errors += 1
1512 1512 if state in "m" and f not in m1 and f not in m2:
1513 1513 ui.warn(_("%s in state %s, but not in either manifest\n") %
1514 1514 (f, state))
1515 1515 errors += 1
1516 1516 for f in m1:
1517 1517 state = repo.dirstate[f]
1518 1518 if state not in "nrm":
1519 1519 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1520 1520 errors += 1
1521 1521 if errors:
1522 1522 error = _(".hg/dirstate inconsistent with current parent's manifest")
1523 1523 raise util.Abort(error)
1524 1524
1525 1525 @command('debugcommands', [], _('[COMMAND]'))
1526 1526 def debugcommands(ui, cmd='', *args):
1527 1527 """list all available commands and options"""
1528 1528 for cmd, vals in sorted(table.iteritems()):
1529 1529 cmd = cmd.split('|')[0].strip('^')
1530 1530 opts = ', '.join([i[1] for i in vals[1]])
1531 1531 ui.write('%s: %s\n' % (cmd, opts))
1532 1532
1533 1533 @command('debugcomplete',
1534 1534 [('o', 'options', None, _('show the command options'))],
1535 1535 _('[-o] CMD'))
1536 1536 def debugcomplete(ui, cmd='', **opts):
1537 1537 """returns the completion list associated with the given command"""
1538 1538
1539 1539 if opts.get('options'):
1540 1540 options = []
1541 1541 otables = [globalopts]
1542 1542 if cmd:
1543 1543 aliases, entry = cmdutil.findcmd(cmd, table, False)
1544 1544 otables.append(entry[1])
1545 1545 for t in otables:
1546 1546 for o in t:
1547 1547 if "(DEPRECATED)" in o[3]:
1548 1548 continue
1549 1549 if o[0]:
1550 1550 options.append('-%s' % o[0])
1551 1551 options.append('--%s' % o[1])
1552 1552 ui.write("%s\n" % "\n".join(options))
1553 1553 return
1554 1554
1555 1555 cmdlist = cmdutil.findpossible(cmd, table)
1556 1556 if ui.verbose:
1557 1557 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1558 1558 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1559 1559
1560 1560 @command('debugdag',
1561 1561 [('t', 'tags', None, _('use tags as labels')),
1562 1562 ('b', 'branches', None, _('annotate with branch names')),
1563 1563 ('', 'dots', None, _('use dots for runs')),
1564 1564 ('s', 'spaces', None, _('separate elements by spaces'))],
1565 1565 _('[OPTION]... [FILE [REV]...]'))
1566 1566 def debugdag(ui, repo, file_=None, *revs, **opts):
1567 1567 """format the changelog or an index DAG as a concise textual description
1568 1568
1569 1569 If you pass a revlog index, the revlog's DAG is emitted. If you list
1570 1570 revision numbers, they get labelled in the output as rN.
1571 1571
1572 1572 Otherwise, the changelog DAG of the current repo is emitted.
1573 1573 """
1574 1574 spaces = opts.get('spaces')
1575 1575 dots = opts.get('dots')
1576 1576 if file_:
1577 1577 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1578 1578 revs = set((int(r) for r in revs))
1579 1579 def events():
1580 1580 for r in rlog:
1581 1581 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1582 1582 if r in revs:
1583 1583 yield 'l', (r, "r%i" % r)
1584 1584 elif repo:
1585 1585 cl = repo.changelog
1586 1586 tags = opts.get('tags')
1587 1587 branches = opts.get('branches')
1588 1588 if tags:
1589 1589 labels = {}
1590 1590 for l, n in repo.tags().items():
1591 1591 labels.setdefault(cl.rev(n), []).append(l)
1592 1592 def events():
1593 1593 b = "default"
1594 1594 for r in cl:
1595 1595 if branches:
1596 1596 newb = cl.read(cl.node(r))[5]['branch']
1597 1597 if newb != b:
1598 1598 yield 'a', newb
1599 1599 b = newb
1600 1600 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1601 1601 if tags:
1602 1602 ls = labels.get(r)
1603 1603 if ls:
1604 1604 for l in ls:
1605 1605 yield 'l', (r, l)
1606 1606 else:
1607 1607 raise util.Abort(_('need repo for changelog dag'))
1608 1608
1609 1609 for line in dagparser.dagtextlines(events(),
1610 1610 addspaces=spaces,
1611 1611 wraplabels=True,
1612 1612 wrapannotations=True,
1613 1613 wrapnonlinear=dots,
1614 1614 usedots=dots,
1615 1615 maxlinewidth=70):
1616 1616 ui.write(line)
1617 1617 ui.write("\n")
1618 1618
1619 1619 @command('debugdata',
1620 1620 [('c', 'changelog', False, _('open changelog')),
1621 1621 ('m', 'manifest', False, _('open manifest'))],
1622 1622 _('-c|-m|FILE REV'))
1623 1623 def debugdata(ui, repo, file_, rev = None, **opts):
1624 1624 """dump the contents of a data file revision"""
1625 1625 if opts.get('changelog') or opts.get('manifest'):
1626 1626 file_, rev = None, file_
1627 1627 elif rev is None:
1628 1628 raise error.CommandError('debugdata', _('invalid arguments'))
1629 1629 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1630 1630 try:
1631 1631 ui.write(r.revision(r.lookup(rev)))
1632 1632 except KeyError:
1633 1633 raise util.Abort(_('invalid revision identifier %s') % rev)
1634 1634
1635 1635 @command('debugdate',
1636 1636 [('e', 'extended', None, _('try extended date formats'))],
1637 1637 _('[-e] DATE [RANGE]'))
1638 1638 def debugdate(ui, date, range=None, **opts):
1639 1639 """parse and display a date"""
1640 1640 if opts["extended"]:
1641 1641 d = util.parsedate(date, util.extendeddateformats)
1642 1642 else:
1643 1643 d = util.parsedate(date)
1644 1644 ui.write("internal: %s %s\n" % d)
1645 1645 ui.write("standard: %s\n" % util.datestr(d))
1646 1646 if range:
1647 1647 m = util.matchdate(range)
1648 1648 ui.write("match: %s\n" % m(d[0]))
1649 1649
1650 1650 @command('debugdiscovery',
1651 1651 [('', 'old', None, _('use old-style discovery')),
1652 1652 ('', 'nonheads', None,
1653 1653 _('use old-style discovery with non-heads included')),
1654 1654 ] + remoteopts,
1655 1655 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1656 1656 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1657 1657 """runs the changeset discovery protocol in isolation"""
1658 1658 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
1659 1659 remote = hg.peer(repo, opts, remoteurl)
1660 1660 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1661 1661
1662 1662 # make sure tests are repeatable
1663 1663 random.seed(12323)
1664 1664
1665 1665 def doit(localheads, remoteheads):
1666 1666 if opts.get('old'):
1667 1667 if localheads:
1668 1668 raise util.Abort('cannot use localheads with old style discovery')
1669 1669 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
1670 1670 force=True)
1671 1671 common = set(common)
1672 1672 if not opts.get('nonheads'):
1673 1673 ui.write("unpruned common: %s\n" % " ".join([short(n)
1674 1674 for n in common]))
1675 1675 dag = dagutil.revlogdag(repo.changelog)
1676 1676 all = dag.ancestorset(dag.internalizeall(common))
1677 1677 common = dag.externalizeall(dag.headsetofconnecteds(all))
1678 1678 else:
1679 1679 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
1680 1680 common = set(common)
1681 1681 rheads = set(hds)
1682 1682 lheads = set(repo.heads())
1683 1683 ui.write("common heads: %s\n" % " ".join([short(n) for n in common]))
1684 1684 if lheads <= common:
1685 1685 ui.write("local is subset\n")
1686 1686 elif rheads <= common:
1687 1687 ui.write("remote is subset\n")
1688 1688
1689 1689 serverlogs = opts.get('serverlog')
1690 1690 if serverlogs:
1691 1691 for filename in serverlogs:
1692 1692 logfile = open(filename, 'r')
1693 1693 try:
1694 1694 line = logfile.readline()
1695 1695 while line:
1696 1696 parts = line.strip().split(';')
1697 1697 op = parts[1]
1698 1698 if op == 'cg':
1699 1699 pass
1700 1700 elif op == 'cgss':
1701 1701 doit(parts[2].split(' '), parts[3].split(' '))
1702 1702 elif op == 'unb':
1703 1703 doit(parts[3].split(' '), parts[2].split(' '))
1704 1704 line = logfile.readline()
1705 1705 finally:
1706 1706 logfile.close()
1707 1707
1708 1708 else:
1709 1709 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
1710 1710 opts.get('remote_head'))
1711 1711 localrevs = opts.get('local_head')
1712 1712 doit(localrevs, remoterevs)
1713 1713
1714 1714 @command('debugfileset', [], ('REVSPEC'))
1715 1715 def debugfileset(ui, repo, expr):
1716 1716 '''parse and apply a fileset specification'''
1717 1717 if ui.verbose:
1718 1718 tree = fileset.parse(expr)[0]
1719 1719 ui.note(tree, "\n")
1720 1720
1721 1721 for f in fileset.getfileset(repo[None], expr):
1722 1722 ui.write("%s\n" % f)
1723 1723
1724 1724 @command('debugfsinfo', [], _('[PATH]'))
1725 1725 def debugfsinfo(ui, path = "."):
1726 1726 """show information detected about current filesystem"""
1727 1727 util.writefile('.debugfsinfo', '')
1728 1728 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1729 1729 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1730 1730 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1731 1731 and 'yes' or 'no'))
1732 1732 os.unlink('.debugfsinfo')
1733 1733
1734 1734 @command('debuggetbundle',
1735 1735 [('H', 'head', [], _('id of head node'), _('ID')),
1736 1736 ('C', 'common', [], _('id of common node'), _('ID')),
1737 1737 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1738 1738 _('REPO FILE [-H|-C ID]...'))
1739 1739 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1740 1740 """retrieves a bundle from a repo
1741 1741
1742 1742 Every ID must be a full-length hex node id string. Saves the bundle to the
1743 1743 given file.
1744 1744 """
1745 1745 repo = hg.peer(ui, opts, repopath)
1746 1746 if not repo.capable('getbundle'):
1747 1747 raise util.Abort("getbundle() not supported by target repository")
1748 1748 args = {}
1749 1749 if common:
1750 1750 args['common'] = [bin(s) for s in common]
1751 1751 if head:
1752 1752 args['heads'] = [bin(s) for s in head]
1753 1753 bundle = repo.getbundle('debug', **args)
1754 1754
1755 1755 bundletype = opts.get('type', 'bzip2').lower()
1756 1756 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1757 1757 bundletype = btypes.get(bundletype)
1758 1758 if bundletype not in changegroup.bundletypes:
1759 1759 raise util.Abort(_('unknown bundle type specified with --type'))
1760 1760 changegroup.writebundle(bundle, bundlepath, bundletype)
1761 1761
1762 1762 @command('debugignore', [], '')
1763 1763 def debugignore(ui, repo, *values, **opts):
1764 1764 """display the combined ignore pattern"""
1765 1765 ignore = repo.dirstate._ignore
1766 1766 includepat = getattr(ignore, 'includepat', None)
1767 1767 if includepat is not None:
1768 1768 ui.write("%s\n" % includepat)
1769 1769 else:
1770 1770 raise util.Abort(_("no ignore patterns found"))
1771 1771
1772 1772 @command('debugindex',
1773 1773 [('c', 'changelog', False, _('open changelog')),
1774 1774 ('m', 'manifest', False, _('open manifest')),
1775 1775 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1776 1776 _('[-f FORMAT] -c|-m|FILE'))
1777 1777 def debugindex(ui, repo, file_ = None, **opts):
1778 1778 """dump the contents of an index file"""
1779 1779 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1780 1780 format = opts.get('format', 0)
1781 1781 if format not in (0, 1):
1782 1782 raise util.Abort(_("unknown format %d") % format)
1783 1783
1784 1784 generaldelta = r.version & revlog.REVLOGGENERALDELTA
1785 1785 if generaldelta:
1786 1786 basehdr = ' delta'
1787 1787 else:
1788 1788 basehdr = ' base'
1789 1789
1790 1790 if format == 0:
1791 1791 ui.write(" rev offset length " + basehdr + " linkrev"
1792 1792 " nodeid p1 p2\n")
1793 1793 elif format == 1:
1794 1794 ui.write(" rev flag offset length"
1795 1795 " size " + basehdr + " link p1 p2 nodeid\n")
1796 1796
1797 1797 for i in r:
1798 1798 node = r.node(i)
1799 1799 if generaldelta:
1800 1800 base = r.deltaparent(i)
1801 1801 else:
1802 1802 base = r.chainbase(i)
1803 1803 if format == 0:
1804 1804 try:
1805 1805 pp = r.parents(node)
1806 1806 except:
1807 1807 pp = [nullid, nullid]
1808 1808 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1809 1809 i, r.start(i), r.length(i), base, r.linkrev(i),
1810 1810 short(node), short(pp[0]), short(pp[1])))
1811 1811 elif format == 1:
1812 1812 pr = r.parentrevs(i)
1813 1813 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1814 1814 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1815 1815 base, r.linkrev(i), pr[0], pr[1], short(node)))
1816 1816
1817 1817 @command('debugindexdot', [], _('FILE'))
1818 1818 def debugindexdot(ui, repo, file_):
1819 1819 """dump an index DAG as a graphviz dot file"""
1820 1820 r = None
1821 1821 if repo:
1822 1822 filelog = repo.file(file_)
1823 1823 if len(filelog):
1824 1824 r = filelog
1825 1825 if not r:
1826 1826 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1827 1827 ui.write("digraph G {\n")
1828 1828 for i in r:
1829 1829 node = r.node(i)
1830 1830 pp = r.parents(node)
1831 1831 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1832 1832 if pp[1] != nullid:
1833 1833 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1834 1834 ui.write("}\n")
1835 1835
1836 1836 @command('debuginstall', [], '')
1837 1837 def debuginstall(ui):
1838 1838 '''test Mercurial installation
1839 1839
1840 1840 Returns 0 on success.
1841 1841 '''
1842 1842
1843 1843 def writetemp(contents):
1844 1844 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1845 1845 f = os.fdopen(fd, "wb")
1846 1846 f.write(contents)
1847 1847 f.close()
1848 1848 return name
1849 1849
1850 1850 problems = 0
1851 1851
1852 1852 # encoding
1853 1853 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1854 1854 try:
1855 1855 encoding.fromlocal("test")
1856 1856 except util.Abort, inst:
1857 1857 ui.write(" %s\n" % inst)
1858 1858 ui.write(_(" (check that your locale is properly set)\n"))
1859 1859 problems += 1
1860 1860
1861 1861 # compiled modules
1862 1862 ui.status(_("Checking installed modules (%s)...\n")
1863 1863 % os.path.dirname(__file__))
1864 1864 try:
1865 1865 import bdiff, mpatch, base85, osutil
1866 1866 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1867 1867 except Exception, inst:
1868 1868 ui.write(" %s\n" % inst)
1869 1869 ui.write(_(" One or more extensions could not be found"))
1870 1870 ui.write(_(" (check that you compiled the extensions)\n"))
1871 1871 problems += 1
1872 1872
1873 1873 # templates
1874 1874 import templater
1875 1875 p = templater.templatepath()
1876 1876 ui.status(_("Checking templates (%s)...\n") % ' '.join(p))
1877 1877 try:
1878 1878 templater.templater(templater.templatepath("map-cmdline.default"))
1879 1879 except Exception, inst:
1880 1880 ui.write(" %s\n" % inst)
1881 1881 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1882 1882 problems += 1
1883 1883
1884 1884 # editor
1885 1885 ui.status(_("Checking commit editor...\n"))
1886 1886 editor = ui.geteditor()
1887 1887 cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
1888 1888 if not cmdpath:
1889 1889 if editor == 'vi':
1890 1890 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1891 1891 ui.write(_(" (specify a commit editor in your configuration"
1892 1892 " file)\n"))
1893 1893 else:
1894 1894 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1895 1895 ui.write(_(" (specify a commit editor in your configuration"
1896 1896 " file)\n"))
1897 1897 problems += 1
1898 1898
1899 1899 # check username
1900 1900 ui.status(_("Checking username...\n"))
1901 1901 try:
1902 1902 ui.username()
1903 1903 except util.Abort, e:
1904 1904 ui.write(" %s\n" % e)
1905 1905 ui.write(_(" (specify a username in your configuration file)\n"))
1906 1906 problems += 1
1907 1907
1908 1908 if not problems:
1909 1909 ui.status(_("No problems detected\n"))
1910 1910 else:
1911 1911 ui.write(_("%s problems detected,"
1912 1912 " please check your install!\n") % problems)
1913 1913
1914 1914 return problems
1915 1915
1916 1916 @command('debugknown', [], _('REPO ID...'))
1917 1917 def debugknown(ui, repopath, *ids, **opts):
1918 1918 """test whether node ids are known to a repo
1919 1919
1920 1920 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1921 1921 indicating unknown/known.
1922 1922 """
1923 1923 repo = hg.peer(ui, opts, repopath)
1924 1924 if not repo.capable('known'):
1925 1925 raise util.Abort("known() not supported by target repository")
1926 1926 flags = repo.known([bin(s) for s in ids])
1927 1927 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1928 1928
1929 1929 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
1930 1930 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1931 1931 '''access the pushkey key/value protocol
1932 1932
1933 1933 With two args, list the keys in the given namespace.
1934 1934
1935 1935 With five args, set a key to new if it currently is set to old.
1936 1936 Reports success or failure.
1937 1937 '''
1938 1938
1939 1939 target = hg.peer(ui, {}, repopath)
1940 1940 if keyinfo:
1941 1941 key, old, new = keyinfo
1942 1942 r = target.pushkey(namespace, key, old, new)
1943 1943 ui.status(str(r) + '\n')
1944 1944 return not r
1945 1945 else:
1946 1946 for k, v in target.listkeys(namespace).iteritems():
1947 1947 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1948 1948 v.encode('string-escape')))
1949 1949
1950 1950 @command('debugrebuildstate',
1951 1951 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
1952 1952 _('[-r REV] [REV]'))
1953 1953 def debugrebuildstate(ui, repo, rev="tip"):
1954 1954 """rebuild the dirstate as it would look like for the given revision"""
1955 1955 ctx = scmutil.revsingle(repo, rev)
1956 1956 wlock = repo.wlock()
1957 1957 try:
1958 1958 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1959 1959 finally:
1960 1960 wlock.release()
1961 1961
1962 1962 @command('debugrename',
1963 1963 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1964 1964 _('[-r REV] FILE'))
1965 1965 def debugrename(ui, repo, file1, *pats, **opts):
1966 1966 """dump rename information"""
1967 1967
1968 1968 ctx = scmutil.revsingle(repo, opts.get('rev'))
1969 1969 m = scmutil.match(ctx, (file1,) + pats, opts)
1970 1970 for abs in ctx.walk(m):
1971 1971 fctx = ctx[abs]
1972 1972 o = fctx.filelog().renamed(fctx.filenode())
1973 1973 rel = m.rel(abs)
1974 1974 if o:
1975 1975 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1976 1976 else:
1977 1977 ui.write(_("%s not renamed\n") % rel)
1978 1978
1979 1979 @command('debugrevlog',
1980 1980 [('c', 'changelog', False, _('open changelog')),
1981 1981 ('m', 'manifest', False, _('open manifest')),
1982 1982 ('d', 'dump', False, _('dump index data'))],
1983 1983 _('-c|-m|FILE'))
1984 1984 def debugrevlog(ui, repo, file_ = None, **opts):
1985 1985 """show data and statistics about a revlog"""
1986 1986 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1987 1987
1988 1988 if opts.get("dump"):
1989 1989 numrevs = len(r)
1990 1990 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
1991 1991 " rawsize totalsize compression heads\n")
1992 1992 ts = 0
1993 1993 heads = set()
1994 1994 for rev in xrange(numrevs):
1995 1995 dbase = r.deltaparent(rev)
1996 1996 if dbase == -1:
1997 1997 dbase = rev
1998 1998 cbase = r.chainbase(rev)
1999 1999 p1, p2 = r.parentrevs(rev)
2000 2000 rs = r.rawsize(rev)
2001 2001 ts = ts + rs
2002 2002 heads -= set(r.parentrevs(rev))
2003 2003 heads.add(rev)
2004 2004 ui.write("%d %d %d %d %d %d %d %d %d %d %d %d %d\n" %
2005 2005 (rev, p1, p2, r.start(rev), r.end(rev),
2006 2006 r.start(dbase), r.start(cbase),
2007 2007 r.start(p1), r.start(p2),
2008 2008 rs, ts, ts / r.end(rev), len(heads)))
2009 2009 return 0
2010 2010
2011 2011 v = r.version
2012 2012 format = v & 0xFFFF
2013 2013 flags = []
2014 2014 gdelta = False
2015 2015 if v & revlog.REVLOGNGINLINEDATA:
2016 2016 flags.append('inline')
2017 2017 if v & revlog.REVLOGGENERALDELTA:
2018 2018 gdelta = True
2019 2019 flags.append('generaldelta')
2020 2020 if not flags:
2021 2021 flags = ['(none)']
2022 2022
2023 2023 nummerges = 0
2024 2024 numfull = 0
2025 2025 numprev = 0
2026 2026 nump1 = 0
2027 2027 nump2 = 0
2028 2028 numother = 0
2029 2029 nump1prev = 0
2030 2030 nump2prev = 0
2031 2031 chainlengths = []
2032 2032
2033 2033 datasize = [None, 0, 0L]
2034 2034 fullsize = [None, 0, 0L]
2035 2035 deltasize = [None, 0, 0L]
2036 2036
2037 2037 def addsize(size, l):
2038 2038 if l[0] is None or size < l[0]:
2039 2039 l[0] = size
2040 2040 if size > l[1]:
2041 2041 l[1] = size
2042 2042 l[2] += size
2043 2043
2044 2044 numrevs = len(r)
2045 2045 for rev in xrange(numrevs):
2046 2046 p1, p2 = r.parentrevs(rev)
2047 2047 delta = r.deltaparent(rev)
2048 2048 if format > 0:
2049 2049 addsize(r.rawsize(rev), datasize)
2050 2050 if p2 != nullrev:
2051 2051 nummerges += 1
2052 2052 size = r.length(rev)
2053 2053 if delta == nullrev:
2054 2054 chainlengths.append(0)
2055 2055 numfull += 1
2056 2056 addsize(size, fullsize)
2057 2057 else:
2058 2058 chainlengths.append(chainlengths[delta] + 1)
2059 2059 addsize(size, deltasize)
2060 2060 if delta == rev - 1:
2061 2061 numprev += 1
2062 2062 if delta == p1:
2063 2063 nump1prev += 1
2064 2064 elif delta == p2:
2065 2065 nump2prev += 1
2066 2066 elif delta == p1:
2067 2067 nump1 += 1
2068 2068 elif delta == p2:
2069 2069 nump2 += 1
2070 2070 elif delta != nullrev:
2071 2071 numother += 1
2072 2072
2073 2073 numdeltas = numrevs - numfull
2074 2074 numoprev = numprev - nump1prev - nump2prev
2075 2075 totalrawsize = datasize[2]
2076 2076 datasize[2] /= numrevs
2077 2077 fulltotal = fullsize[2]
2078 2078 fullsize[2] /= numfull
2079 2079 deltatotal = deltasize[2]
2080 2080 deltasize[2] /= numrevs - numfull
2081 2081 totalsize = fulltotal + deltatotal
2082 2082 avgchainlen = sum(chainlengths) / numrevs
2083 2083 compratio = totalrawsize / totalsize
2084 2084
2085 2085 basedfmtstr = '%%%dd\n'
2086 2086 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2087 2087
2088 2088 def dfmtstr(max):
2089 2089 return basedfmtstr % len(str(max))
2090 2090 def pcfmtstr(max, padding=0):
2091 2091 return basepcfmtstr % (len(str(max)), ' ' * padding)
2092 2092
2093 2093 def pcfmt(value, total):
2094 2094 return (value, 100 * float(value) / total)
2095 2095
2096 2096 ui.write('format : %d\n' % format)
2097 2097 ui.write('flags : %s\n' % ', '.join(flags))
2098 2098
2099 2099 ui.write('\n')
2100 2100 fmt = pcfmtstr(totalsize)
2101 2101 fmt2 = dfmtstr(totalsize)
2102 2102 ui.write('revisions : ' + fmt2 % numrevs)
2103 2103 ui.write(' merges : ' + fmt % pcfmt(nummerges, numrevs))
2104 2104 ui.write(' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
2105 2105 ui.write('revisions : ' + fmt2 % numrevs)
2106 2106 ui.write(' full : ' + fmt % pcfmt(numfull, numrevs))
2107 2107 ui.write(' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2108 2108 ui.write('revision size : ' + fmt2 % totalsize)
2109 2109 ui.write(' full : ' + fmt % pcfmt(fulltotal, totalsize))
2110 2110 ui.write(' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2111 2111
2112 2112 ui.write('\n')
2113 2113 fmt = dfmtstr(max(avgchainlen, compratio))
2114 2114 ui.write('avg chain length : ' + fmt % avgchainlen)
2115 2115 ui.write('compression ratio : ' + fmt % compratio)
2116 2116
2117 2117 if format > 0:
2118 2118 ui.write('\n')
2119 2119 ui.write('uncompressed data size (min/max/avg) : %d / %d / %d\n'
2120 2120 % tuple(datasize))
2121 2121 ui.write('full revision size (min/max/avg) : %d / %d / %d\n'
2122 2122 % tuple(fullsize))
2123 2123 ui.write('delta size (min/max/avg) : %d / %d / %d\n'
2124 2124 % tuple(deltasize))
2125 2125
2126 2126 if numdeltas > 0:
2127 2127 ui.write('\n')
2128 2128 fmt = pcfmtstr(numdeltas)
2129 2129 fmt2 = pcfmtstr(numdeltas, 4)
2130 2130 ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
2131 2131 if numprev > 0:
2132 2132 ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev))
2133 2133 ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev))
2134 2134 ui.write(' other : ' + fmt2 % pcfmt(numoprev, numprev))
2135 2135 if gdelta:
2136 2136 ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
2137 2137 ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
2138 2138 ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
2139 2139
2140 2140 @command('debugrevspec', [], ('REVSPEC'))
2141 2141 def debugrevspec(ui, repo, expr):
2142 2142 '''parse and apply a revision specification'''
2143 2143 if ui.verbose:
2144 2144 tree = revset.parse(expr)[0]
2145 2145 ui.note(tree, "\n")
2146 2146 newtree = revset.findaliases(ui, tree)
2147 2147 if newtree != tree:
2148 2148 ui.note(newtree, "\n")
2149 2149 func = revset.match(ui, expr)
2150 2150 for c in func(repo, range(len(repo))):
2151 2151 ui.write("%s\n" % c)
2152 2152
2153 2153 @command('debugsetparents', [], _('REV1 [REV2]'))
2154 2154 def debugsetparents(ui, repo, rev1, rev2=None):
2155 2155 """manually set the parents of the current working directory
2156 2156
2157 2157 This is useful for writing repository conversion tools, but should
2158 2158 be used with care.
2159 2159
2160 2160 Returns 0 on success.
2161 2161 """
2162 2162
2163 2163 r1 = scmutil.revsingle(repo, rev1).node()
2164 2164 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2165 2165
2166 2166 wlock = repo.wlock()
2167 2167 try:
2168 2168 repo.dirstate.setparents(r1, r2)
2169 2169 finally:
2170 2170 wlock.release()
2171 2171
2172 2172 @command('debugstate',
2173 2173 [('', 'nodates', None, _('do not display the saved mtime')),
2174 2174 ('', 'datesort', None, _('sort by saved mtime'))],
2175 2175 _('[OPTION]...'))
2176 2176 def debugstate(ui, repo, nodates=None, datesort=None):
2177 2177 """show the contents of the current dirstate"""
2178 2178 timestr = ""
2179 2179 showdate = not nodates
2180 2180 if datesort:
2181 2181 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2182 2182 else:
2183 2183 keyfunc = None # sort by filename
2184 2184 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2185 2185 if showdate:
2186 2186 if ent[3] == -1:
2187 2187 # Pad or slice to locale representation
2188 2188 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2189 2189 time.localtime(0)))
2190 2190 timestr = 'unset'
2191 2191 timestr = (timestr[:locale_len] +
2192 2192 ' ' * (locale_len - len(timestr)))
2193 2193 else:
2194 2194 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2195 2195 time.localtime(ent[3]))
2196 2196 if ent[1] & 020000:
2197 2197 mode = 'lnk'
2198 2198 else:
2199 2199 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2200 2200 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2201 2201 for f in repo.dirstate.copies():
2202 2202 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2203 2203
2204 2204 @command('debugsub',
2205 2205 [('r', 'rev', '',
2206 2206 _('revision to check'), _('REV'))],
2207 2207 _('[-r REV] [REV]'))
2208 2208 def debugsub(ui, repo, rev=None):
2209 2209 ctx = scmutil.revsingle(repo, rev, None)
2210 2210 for k, v in sorted(ctx.substate.items()):
2211 2211 ui.write('path %s\n' % k)
2212 2212 ui.write(' source %s\n' % v[0])
2213 2213 ui.write(' revision %s\n' % v[1])
2214 2214
2215 2215 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'))
2216 2216 def debugwalk(ui, repo, *pats, **opts):
2217 2217 """show how files match on given patterns"""
2218 2218 m = scmutil.match(repo[None], pats, opts)
2219 2219 items = list(repo.walk(m))
2220 2220 if not items:
2221 2221 return
2222 2222 fmt = 'f %%-%ds %%-%ds %%s' % (
2223 2223 max([len(abs) for abs in items]),
2224 2224 max([len(m.rel(abs)) for abs in items]))
2225 2225 for abs in items:
2226 2226 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
2227 2227 ui.write("%s\n" % line.rstrip())
2228 2228
2229 2229 @command('debugwireargs',
2230 2230 [('', 'three', '', 'three'),
2231 2231 ('', 'four', '', 'four'),
2232 2232 ('', 'five', '', 'five'),
2233 2233 ] + remoteopts,
2234 2234 _('REPO [OPTIONS]... [ONE [TWO]]'))
2235 2235 def debugwireargs(ui, repopath, *vals, **opts):
2236 2236 repo = hg.peer(ui, opts, repopath)
2237 2237 for opt in remoteopts:
2238 2238 del opts[opt[1]]
2239 2239 args = {}
2240 2240 for k, v in opts.iteritems():
2241 2241 if v:
2242 2242 args[k] = v
2243 2243 # run twice to check that we don't mess up the stream for the next command
2244 2244 res1 = repo.debugwireargs(*vals, **args)
2245 2245 res2 = repo.debugwireargs(*vals, **args)
2246 2246 ui.write("%s\n" % res1)
2247 2247 if res1 != res2:
2248 2248 ui.warn("%s\n" % res2)
2249 2249
2250 2250 @command('^diff',
2251 2251 [('r', 'rev', [], _('revision'), _('REV')),
2252 2252 ('c', 'change', '', _('change made by revision'), _('REV'))
2253 2253 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2254 2254 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'))
2255 2255 def diff(ui, repo, *pats, **opts):
2256 2256 """diff repository (or selected files)
2257 2257
2258 2258 Show differences between revisions for the specified files.
2259 2259
2260 2260 Differences between files are shown using the unified diff format.
2261 2261
2262 2262 .. note::
2263 2263 diff may generate unexpected results for merges, as it will
2264 2264 default to comparing against the working directory's first
2265 2265 parent changeset if no revisions are specified.
2266 2266
2267 2267 When two revision arguments are given, then changes are shown
2268 2268 between those revisions. If only one revision is specified then
2269 2269 that revision is compared to the working directory, and, when no
2270 2270 revisions are specified, the working directory files are compared
2271 2271 to its parent.
2272 2272
2273 2273 Alternatively you can specify -c/--change with a revision to see
2274 2274 the changes in that changeset relative to its first parent.
2275 2275
2276 2276 Without the -a/--text option, diff will avoid generating diffs of
2277 2277 files it detects as binary. With -a, diff will generate a diff
2278 2278 anyway, probably with undesirable results.
2279 2279
2280 2280 Use the -g/--git option to generate diffs in the git extended diff
2281 2281 format. For more information, read :hg:`help diffs`.
2282 2282
2283 2283 .. container:: verbose
2284 2284
2285 2285 Examples:
2286 2286
2287 2287 - compare a file in the current working directory to its parent::
2288 2288
2289 2289 hg diff foo.c
2290 2290
2291 2291 - compare two historical versions of a directory, with rename info::
2292 2292
2293 2293 hg diff --git -r 1.0:1.2 lib/
2294 2294
2295 2295 - get change stats relative to the last change on some date::
2296 2296
2297 2297 hg diff --stat -r "date('may 2')"
2298 2298
2299 2299 - diff all newly-added files that contain a keyword::
2300 2300
2301 2301 hg diff "set:added() and grep(GNU)"
2302 2302
2303 2303 - compare a revision and its parents::
2304 2304
2305 2305 hg diff -c 9353 # compare against first parent
2306 2306 hg diff -r 9353^:9353 # same using revset syntax
2307 2307 hg diff -r 9353^2:9353 # compare against the second parent
2308 2308
2309 2309 Returns 0 on success.
2310 2310 """
2311 2311
2312 2312 revs = opts.get('rev')
2313 2313 change = opts.get('change')
2314 2314 stat = opts.get('stat')
2315 2315 reverse = opts.get('reverse')
2316 2316
2317 2317 if revs and change:
2318 2318 msg = _('cannot specify --rev and --change at the same time')
2319 2319 raise util.Abort(msg)
2320 2320 elif change:
2321 2321 node2 = scmutil.revsingle(repo, change, None).node()
2322 2322 node1 = repo[node2].p1().node()
2323 2323 else:
2324 2324 node1, node2 = scmutil.revpair(repo, revs)
2325 2325
2326 2326 if reverse:
2327 2327 node1, node2 = node2, node1
2328 2328
2329 2329 diffopts = patch.diffopts(ui, opts)
2330 2330 m = scmutil.match(repo[node2], pats, opts)
2331 2331 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2332 2332 listsubrepos=opts.get('subrepos'))
2333 2333
2334 2334 @command('^export',
2335 2335 [('o', 'output', '',
2336 2336 _('print output to file with formatted name'), _('FORMAT')),
2337 2337 ('', 'switch-parent', None, _('diff against the second parent')),
2338 2338 ('r', 'rev', [], _('revisions to export'), _('REV')),
2339 2339 ] + diffopts,
2340 2340 _('[OPTION]... [-o OUTFILESPEC] REV...'))
2341 2341 def export(ui, repo, *changesets, **opts):
2342 2342 """dump the header and diffs for one or more changesets
2343 2343
2344 2344 Print the changeset header and diffs for one or more revisions.
2345 2345
2346 2346 The information shown in the changeset header is: author, date,
2347 2347 branch name (if non-default), changeset hash, parent(s) and commit
2348 2348 comment.
2349 2349
2350 2350 .. note::
2351 2351 export may generate unexpected diff output for merge
2352 2352 changesets, as it will compare the merge changeset against its
2353 2353 first parent only.
2354 2354
2355 2355 Output may be to a file, in which case the name of the file is
2356 2356 given using a format string. The formatting rules are as follows:
2357 2357
2358 2358 :``%%``: literal "%" character
2359 2359 :``%H``: changeset hash (40 hexadecimal digits)
2360 2360 :``%N``: number of patches being generated
2361 2361 :``%R``: changeset revision number
2362 2362 :``%b``: basename of the exporting repository
2363 2363 :``%h``: short-form changeset hash (12 hexadecimal digits)
2364 2364 :``%m``: first line of the commit message (only alphanumeric characters)
2365 2365 :``%n``: zero-padded sequence number, starting at 1
2366 2366 :``%r``: zero-padded changeset revision number
2367 2367
2368 2368 Without the -a/--text option, export will avoid generating diffs
2369 2369 of files it detects as binary. With -a, export will generate a
2370 2370 diff anyway, probably with undesirable results.
2371 2371
2372 2372 Use the -g/--git option to generate diffs in the git extended diff
2373 2373 format. See :hg:`help diffs` for more information.
2374 2374
2375 2375 With the --switch-parent option, the diff will be against the
2376 2376 second parent. It can be useful to review a merge.
2377 2377
2378 2378 .. container:: verbose
2379 2379
2380 2380 Examples:
2381 2381
2382 2382 - use export and import to transplant a bugfix to the current
2383 2383 branch::
2384 2384
2385 2385 hg export -r 9353 | hg import -
2386 2386
2387 2387 - export all the changesets between two revisions to a file with
2388 2388 rename information::
2389 2389
2390 2390 hg export --git -r 123:150 > changes.txt
2391 2391
2392 2392 - split outgoing changes into a series of patches with
2393 2393 descriptive names::
2394 2394
2395 2395 hg export -r "outgoing()" -o "%n-%m.patch"
2396 2396
2397 2397 Returns 0 on success.
2398 2398 """
2399 2399 changesets += tuple(opts.get('rev', []))
2400 2400 if not changesets:
2401 2401 raise util.Abort(_("export requires at least one changeset"))
2402 2402 revs = scmutil.revrange(repo, changesets)
2403 2403 if len(revs) > 1:
2404 2404 ui.note(_('exporting patches:\n'))
2405 2405 else:
2406 2406 ui.note(_('exporting patch:\n'))
2407 2407 cmdutil.export(repo, revs, template=opts.get('output'),
2408 2408 switch_parent=opts.get('switch_parent'),
2409 2409 opts=patch.diffopts(ui, opts))
2410 2410
2411 2411 @command('^forget', walkopts, _('[OPTION]... FILE...'))
2412 2412 def forget(ui, repo, *pats, **opts):
2413 2413 """forget the specified files on the next commit
2414 2414
2415 2415 Mark the specified files so they will no longer be tracked
2416 2416 after the next commit.
2417 2417
2418 2418 This only removes files from the current branch, not from the
2419 2419 entire project history, and it does not delete them from the
2420 2420 working directory.
2421 2421
2422 2422 To undo a forget before the next commit, see :hg:`add`.
2423 2423
2424 2424 .. container:: verbose
2425 2425
2426 2426 Examples:
2427 2427
2428 2428 - forget newly-added binary files::
2429 2429
2430 2430 hg forget "set:added() and binary()"
2431 2431
2432 2432 - forget files that would be excluded by .hgignore::
2433 2433
2434 2434 hg forget "set:hgignore()"
2435 2435
2436 2436 Returns 0 on success.
2437 2437 """
2438 2438
2439 2439 if not pats:
2440 2440 raise util.Abort(_('no files specified'))
2441 2441
2442 2442 wctx = repo[None]
2443 2443 m = scmutil.match(wctx, pats, opts)
2444 2444 s = repo.status(match=m, clean=True)
2445 2445 forget = sorted(s[0] + s[1] + s[3] + s[6])
2446 2446 subforget = {}
2447 2447 errs = 0
2448 2448
2449 2449 for subpath in wctx.substate:
2450 2450 sub = wctx.sub(subpath)
2451 2451 try:
2452 2452 submatch = matchmod.narrowmatcher(subpath, m)
2453 2453 for fsub in sub.walk(submatch):
2454 2454 if submatch.exact(fsub):
2455 2455 subforget[subpath + '/' + fsub] = (fsub, sub)
2456 2456 except error.LookupError:
2457 2457 ui.status(_("skipping missing subrepository: %s\n") % subpath)
2458 2458
2459 2459 for f in m.files():
2460 2460 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2461 2461 if f not in subforget:
2462 2462 if os.path.exists(m.rel(f)):
2463 2463 ui.warn(_('not removing %s: file is already untracked\n')
2464 2464 % m.rel(f))
2465 2465 errs = 1
2466 2466
2467 2467 for f in forget:
2468 2468 if ui.verbose or not m.exact(f):
2469 2469 ui.status(_('removing %s\n') % m.rel(f))
2470 2470
2471 2471 if ui.verbose:
2472 2472 for f in sorted(subforget.keys()):
2473 2473 ui.status(_('removing %s\n') % m.rel(f))
2474 2474
2475 2475 wctx.forget(forget)
2476 2476
2477 2477 for f in sorted(subforget.keys()):
2478 2478 fsub, sub = subforget[f]
2479 2479 sub.forget([fsub])
2480 2480
2481 2481 return errs
2482 2482
2483 2483 @command(
2484 2484 'graft',
2485 2485 [('c', 'continue', False, _('resume interrupted graft')),
2486 2486 ('e', 'edit', False, _('invoke editor on commit messages')),
2487 2487 ('D', 'currentdate', False,
2488 2488 _('record the current date as commit date')),
2489 2489 ('U', 'currentuser', False,
2490 2490 _('record the current user as committer'), _('DATE'))]
2491 2491 + commitopts2 + mergetoolopts,
2492 2492 _('[OPTION]... REVISION...'))
2493 2493 def graft(ui, repo, *revs, **opts):
2494 2494 '''copy changes from other branches onto the current branch
2495 2495
2496 2496 This command uses Mercurial's merge logic to copy individual
2497 2497 changes from other branches without merging branches in the
2498 2498 history graph. This is sometimes known as 'backporting' or
2499 2499 'cherry-picking'. By default, graft will copy user, date, and
2500 2500 description from the source changesets.
2501 2501
2502 2502 Changesets that are ancestors of the current revision, that have
2503 2503 already been grafted, or that are merges will be skipped.
2504 2504
2505 2505 If a graft merge results in conflicts, the graft process is
2506 2506 aborted so that the current merge can be manually resolved. Once
2507 2507 all conflicts are addressed, the graft process can be continued
2508 2508 with the -c/--continue option.
2509 2509
2510 2510 .. note::
2511 2511 The -c/--continue option does not reapply earlier options.
2512 2512
2513 2513 .. container:: verbose
2514 2514
2515 2515 Examples:
2516 2516
2517 2517 - copy a single change to the stable branch and edit its description::
2518 2518
2519 2519 hg update stable
2520 2520 hg graft --edit 9393
2521 2521
2522 2522 - graft a range of changesets with one exception, updating dates::
2523 2523
2524 2524 hg graft -D "2085::2093 and not 2091"
2525 2525
2526 2526 - continue a graft after resolving conflicts::
2527 2527
2528 2528 hg graft -c
2529 2529
2530 2530 - show the source of a grafted changeset::
2531 2531
2532 2532 hg log --debug -r tip
2533 2533
2534 2534 Returns 0 on successful completion.
2535 2535 '''
2536 2536
2537 2537 if not opts.get('user') and opts.get('currentuser'):
2538 2538 opts['user'] = ui.username()
2539 2539 if not opts.get('date') and opts.get('currentdate'):
2540 2540 opts['date'] = "%d %d" % util.makedate()
2541 2541
2542 2542 editor = None
2543 2543 if opts.get('edit'):
2544 2544 editor = cmdutil.commitforceeditor
2545 2545
2546 2546 cont = False
2547 2547 if opts['continue']:
2548 2548 cont = True
2549 2549 if revs:
2550 2550 raise util.Abort(_("can't specify --continue and revisions"))
2551 2551 # read in unfinished revisions
2552 2552 try:
2553 2553 nodes = repo.opener.read('graftstate').splitlines()
2554 2554 revs = [repo[node].rev() for node in nodes]
2555 2555 except IOError, inst:
2556 2556 if inst.errno != errno.ENOENT:
2557 2557 raise
2558 2558 raise util.Abort(_("no graft state found, can't continue"))
2559 2559 else:
2560 2560 cmdutil.bailifchanged(repo)
2561 2561 if not revs:
2562 2562 raise util.Abort(_('no revisions specified'))
2563 2563 revs = scmutil.revrange(repo, revs)
2564 2564
2565 2565 # check for merges
2566 2566 for rev in repo.revs('%ld and merge()', revs):
2567 2567 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2568 2568 revs.remove(rev)
2569 2569 if not revs:
2570 2570 return -1
2571 2571
2572 2572 # check for ancestors of dest branch
2573 2573 for rev in repo.revs('::. and %ld', revs):
2574 2574 ui.warn(_('skipping ancestor revision %s\n') % rev)
2575 2575 revs.remove(rev)
2576 2576 if not revs:
2577 2577 return -1
2578 2578
2579 2579 # analyze revs for earlier grafts
2580 2580 ids = {}
2581 2581 for ctx in repo.set("%ld", revs):
2582 2582 ids[ctx.hex()] = ctx.rev()
2583 2583 n = ctx.extra().get('source')
2584 2584 if n:
2585 2585 ids[n] = ctx.rev()
2586 2586
2587 2587 # check ancestors for earlier grafts
2588 2588 ui.debug('scanning for duplicate grafts\n')
2589 2589 for ctx in repo.set("::. - ::%ld", revs):
2590 2590 n = ctx.extra().get('source')
2591 2591 if n in ids:
2592 2592 r = repo[n].rev()
2593 2593 if r in revs:
2594 2594 ui.warn(_('skipping already grafted revision %s\n') % r)
2595 2595 revs.remove(r)
2596 2596 elif ids[n] in revs:
2597 2597 ui.warn(_('skipping already grafted revision %s '
2598 2598 '(same origin %d)\n') % (ids[n], r))
2599 2599 revs.remove(ids[n])
2600 2600 elif ctx.hex() in ids:
2601 2601 r = ids[ctx.hex()]
2602 2602 ui.warn(_('skipping already grafted revision %s '
2603 2603 '(was grafted from %d)\n') % (r, ctx.rev()))
2604 2604 revs.remove(r)
2605 2605 if not revs:
2606 2606 return -1
2607 2607
2608 2608 for pos, ctx in enumerate(repo.set("%ld", revs)):
2609 2609 current = repo['.']
2610 2610 ui.status(_('grafting revision %s\n') % ctx.rev())
2611 2611
2612 2612 # we don't merge the first commit when continuing
2613 2613 if not cont:
2614 2614 # perform the graft merge with p1(rev) as 'ancestor'
2615 2615 try:
2616 2616 # ui.forcemerge is an internal variable, do not document
2617 2617 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2618 2618 stats = mergemod.update(repo, ctx.node(), True, True, False,
2619 2619 ctx.p1().node())
2620 2620 finally:
2621 2621 ui.setconfig('ui', 'forcemerge', '')
2622 2622 # drop the second merge parent
2623 2623 repo.dirstate.setparents(current.node(), nullid)
2624 2624 repo.dirstate.write()
2625 2625 # fix up dirstate for copies and renames
2626 2626 cmdutil.duplicatecopies(repo, ctx.rev(), current.node(), nullid)
2627 2627 # report any conflicts
2628 2628 if stats and stats[3] > 0:
2629 2629 # write out state for --continue
2630 2630 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2631 2631 repo.opener.write('graftstate', ''.join(nodelines))
2632 2632 raise util.Abort(
2633 2633 _("unresolved conflicts, can't continue"),
2634 2634 hint=_('use hg resolve and hg graft --continue'))
2635 2635 else:
2636 2636 cont = False
2637 2637
2638 2638 # commit
2639 2639 source = ctx.extra().get('source')
2640 2640 if not source:
2641 2641 source = ctx.hex()
2642 2642 extra = {'source': source}
2643 2643 user = ctx.user()
2644 2644 if opts.get('user'):
2645 2645 user = opts['user']
2646 2646 date = ctx.date()
2647 2647 if opts.get('date'):
2648 2648 date = opts['date']
2649 2649 repo.commit(text=ctx.description(), user=user,
2650 2650 date=date, extra=extra, editor=editor)
2651 2651
2652 2652 # remove state when we complete successfully
2653 2653 if os.path.exists(repo.join('graftstate')):
2654 2654 util.unlinkpath(repo.join('graftstate'))
2655 2655
2656 2656 return 0
2657 2657
2658 2658 @command('grep',
2659 2659 [('0', 'print0', None, _('end fields with NUL')),
2660 2660 ('', 'all', None, _('print all revisions that match')),
2661 2661 ('a', 'text', None, _('treat all files as text')),
2662 2662 ('f', 'follow', None,
2663 2663 _('follow changeset history,'
2664 2664 ' or file history across copies and renames')),
2665 2665 ('i', 'ignore-case', None, _('ignore case when matching')),
2666 2666 ('l', 'files-with-matches', None,
2667 2667 _('print only filenames and revisions that match')),
2668 2668 ('n', 'line-number', None, _('print matching line numbers')),
2669 2669 ('r', 'rev', [],
2670 2670 _('only search files changed within revision range'), _('REV')),
2671 2671 ('u', 'user', None, _('list the author (long with -v)')),
2672 2672 ('d', 'date', None, _('list the date (short with -q)')),
2673 2673 ] + walkopts,
2674 2674 _('[OPTION]... PATTERN [FILE]...'))
2675 2675 def grep(ui, repo, pattern, *pats, **opts):
2676 2676 """search for a pattern in specified files and revisions
2677 2677
2678 2678 Search revisions of files for a regular expression.
2679 2679
2680 2680 This command behaves differently than Unix grep. It only accepts
2681 2681 Python/Perl regexps. It searches repository history, not the
2682 2682 working directory. It always prints the revision number in which a
2683 2683 match appears.
2684 2684
2685 2685 By default, grep only prints output for the first revision of a
2686 2686 file in which it finds a match. To get it to print every revision
2687 2687 that contains a change in match status ("-" for a match that
2688 2688 becomes a non-match, or "+" for a non-match that becomes a match),
2689 2689 use the --all flag.
2690 2690
2691 2691 Returns 0 if a match is found, 1 otherwise.
2692 2692 """
2693 2693 reflags = 0
2694 2694 if opts.get('ignore_case'):
2695 2695 reflags |= re.I
2696 2696 try:
2697 2697 regexp = re.compile(pattern, reflags)
2698 2698 except re.error, inst:
2699 2699 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2700 2700 return 1
2701 2701 sep, eol = ':', '\n'
2702 2702 if opts.get('print0'):
2703 2703 sep = eol = '\0'
2704 2704
2705 2705 getfile = util.lrucachefunc(repo.file)
2706 2706
2707 2707 def matchlines(body):
2708 2708 begin = 0
2709 2709 linenum = 0
2710 2710 while True:
2711 2711 match = regexp.search(body, begin)
2712 2712 if not match:
2713 2713 break
2714 2714 mstart, mend = match.span()
2715 2715 linenum += body.count('\n', begin, mstart) + 1
2716 2716 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2717 2717 begin = body.find('\n', mend) + 1 or len(body) + 1
2718 2718 lend = begin - 1
2719 2719 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2720 2720
2721 2721 class linestate(object):
2722 2722 def __init__(self, line, linenum, colstart, colend):
2723 2723 self.line = line
2724 2724 self.linenum = linenum
2725 2725 self.colstart = colstart
2726 2726 self.colend = colend
2727 2727
2728 2728 def __hash__(self):
2729 2729 return hash((self.linenum, self.line))
2730 2730
2731 2731 def __eq__(self, other):
2732 2732 return self.line == other.line
2733 2733
2734 2734 matches = {}
2735 2735 copies = {}
2736 2736 def grepbody(fn, rev, body):
2737 2737 matches[rev].setdefault(fn, [])
2738 2738 m = matches[rev][fn]
2739 2739 for lnum, cstart, cend, line in matchlines(body):
2740 2740 s = linestate(line, lnum, cstart, cend)
2741 2741 m.append(s)
2742 2742
2743 2743 def difflinestates(a, b):
2744 2744 sm = difflib.SequenceMatcher(None, a, b)
2745 2745 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2746 2746 if tag == 'insert':
2747 2747 for i in xrange(blo, bhi):
2748 2748 yield ('+', b[i])
2749 2749 elif tag == 'delete':
2750 2750 for i in xrange(alo, ahi):
2751 2751 yield ('-', a[i])
2752 2752 elif tag == 'replace':
2753 2753 for i in xrange(alo, ahi):
2754 2754 yield ('-', a[i])
2755 2755 for i in xrange(blo, bhi):
2756 2756 yield ('+', b[i])
2757 2757
2758 2758 def display(fn, ctx, pstates, states):
2759 2759 rev = ctx.rev()
2760 2760 datefunc = ui.quiet and util.shortdate or util.datestr
2761 2761 found = False
2762 2762 filerevmatches = {}
2763 2763 def binary():
2764 2764 flog = getfile(fn)
2765 2765 return util.binary(flog.read(ctx.filenode(fn)))
2766 2766
2767 2767 if opts.get('all'):
2768 2768 iter = difflinestates(pstates, states)
2769 2769 else:
2770 2770 iter = [('', l) for l in states]
2771 2771 for change, l in iter:
2772 2772 cols = [fn, str(rev)]
2773 2773 before, match, after = None, None, None
2774 2774 if opts.get('line_number'):
2775 2775 cols.append(str(l.linenum))
2776 2776 if opts.get('all'):
2777 2777 cols.append(change)
2778 2778 if opts.get('user'):
2779 2779 cols.append(ui.shortuser(ctx.user()))
2780 2780 if opts.get('date'):
2781 2781 cols.append(datefunc(ctx.date()))
2782 2782 if opts.get('files_with_matches'):
2783 2783 c = (fn, rev)
2784 2784 if c in filerevmatches:
2785 2785 continue
2786 2786 filerevmatches[c] = 1
2787 2787 else:
2788 2788 before = l.line[:l.colstart]
2789 2789 match = l.line[l.colstart:l.colend]
2790 2790 after = l.line[l.colend:]
2791 2791 ui.write(sep.join(cols))
2792 2792 if before is not None:
2793 2793 if not opts.get('text') and binary():
2794 2794 ui.write(sep + " Binary file matches")
2795 2795 else:
2796 2796 ui.write(sep + before)
2797 2797 ui.write(match, label='grep.match')
2798 2798 ui.write(after)
2799 2799 ui.write(eol)
2800 2800 found = True
2801 2801 return found
2802 2802
2803 2803 skip = {}
2804 2804 revfiles = {}
2805 2805 matchfn = scmutil.match(repo[None], pats, opts)
2806 2806 found = False
2807 2807 follow = opts.get('follow')
2808 2808
2809 2809 def prep(ctx, fns):
2810 2810 rev = ctx.rev()
2811 2811 pctx = ctx.p1()
2812 2812 parent = pctx.rev()
2813 2813 matches.setdefault(rev, {})
2814 2814 matches.setdefault(parent, {})
2815 2815 files = revfiles.setdefault(rev, [])
2816 2816 for fn in fns:
2817 2817 flog = getfile(fn)
2818 2818 try:
2819 2819 fnode = ctx.filenode(fn)
2820 2820 except error.LookupError:
2821 2821 continue
2822 2822
2823 2823 copied = flog.renamed(fnode)
2824 2824 copy = follow and copied and copied[0]
2825 2825 if copy:
2826 2826 copies.setdefault(rev, {})[fn] = copy
2827 2827 if fn in skip:
2828 2828 if copy:
2829 2829 skip[copy] = True
2830 2830 continue
2831 2831 files.append(fn)
2832 2832
2833 2833 if fn not in matches[rev]:
2834 2834 grepbody(fn, rev, flog.read(fnode))
2835 2835
2836 2836 pfn = copy or fn
2837 2837 if pfn not in matches[parent]:
2838 2838 try:
2839 2839 fnode = pctx.filenode(pfn)
2840 2840 grepbody(pfn, parent, flog.read(fnode))
2841 2841 except error.LookupError:
2842 2842 pass
2843 2843
2844 2844 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2845 2845 rev = ctx.rev()
2846 2846 parent = ctx.p1().rev()
2847 2847 for fn in sorted(revfiles.get(rev, [])):
2848 2848 states = matches[rev][fn]
2849 2849 copy = copies.get(rev, {}).get(fn)
2850 2850 if fn in skip:
2851 2851 if copy:
2852 2852 skip[copy] = True
2853 2853 continue
2854 2854 pstates = matches.get(parent, {}).get(copy or fn, [])
2855 2855 if pstates or states:
2856 2856 r = display(fn, ctx, pstates, states)
2857 2857 found = found or r
2858 2858 if r and not opts.get('all'):
2859 2859 skip[fn] = True
2860 2860 if copy:
2861 2861 skip[copy] = True
2862 2862 del matches[rev]
2863 2863 del revfiles[rev]
2864 2864
2865 2865 return not found
2866 2866
2867 2867 @command('heads',
2868 2868 [('r', 'rev', '',
2869 2869 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2870 2870 ('t', 'topo', False, _('show topological heads only')),
2871 2871 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2872 2872 ('c', 'closed', False, _('show normal and closed branch heads')),
2873 2873 ] + templateopts,
2874 2874 _('[-ac] [-r STARTREV] [REV]...'))
2875 2875 def heads(ui, repo, *branchrevs, **opts):
2876 2876 """show current repository heads or show branch heads
2877 2877
2878 2878 With no arguments, show all repository branch heads.
2879 2879
2880 2880 Repository "heads" are changesets with no child changesets. They are
2881 2881 where development generally takes place and are the usual targets
2882 2882 for update and merge operations. Branch heads are changesets that have
2883 2883 no child changeset on the same branch.
2884 2884
2885 2885 If one or more REVs are given, only branch heads on the branches
2886 2886 associated with the specified changesets are shown. This means
2887 2887 that you can use :hg:`heads foo` to see the heads on a branch
2888 2888 named ``foo``.
2889 2889
2890 2890 If -c/--closed is specified, also show branch heads marked closed
2891 2891 (see :hg:`commit --close-branch`).
2892 2892
2893 2893 If STARTREV is specified, only those heads that are descendants of
2894 2894 STARTREV will be displayed.
2895 2895
2896 2896 If -t/--topo is specified, named branch mechanics will be ignored and only
2897 2897 changesets without children will be shown.
2898 2898
2899 2899 Returns 0 if matching heads are found, 1 if not.
2900 2900 """
2901 2901
2902 2902 start = None
2903 2903 if 'rev' in opts:
2904 2904 start = scmutil.revsingle(repo, opts['rev'], None).node()
2905 2905
2906 2906 if opts.get('topo'):
2907 2907 heads = [repo[h] for h in repo.heads(start)]
2908 2908 else:
2909 2909 heads = []
2910 2910 for branch in repo.branchmap():
2911 2911 heads += repo.branchheads(branch, start, opts.get('closed'))
2912 2912 heads = [repo[h] for h in heads]
2913 2913
2914 2914 if branchrevs:
2915 2915 branches = set(repo[br].branch() for br in branchrevs)
2916 2916 heads = [h for h in heads if h.branch() in branches]
2917 2917
2918 2918 if opts.get('active') and branchrevs:
2919 2919 dagheads = repo.heads(start)
2920 2920 heads = [h for h in heads if h.node() in dagheads]
2921 2921
2922 2922 if branchrevs:
2923 2923 haveheads = set(h.branch() for h in heads)
2924 2924 if branches - haveheads:
2925 2925 headless = ', '.join(b for b in branches - haveheads)
2926 2926 msg = _('no open branch heads found on branches %s')
2927 2927 if opts.get('rev'):
2928 2928 msg += _(' (started at %s)' % opts['rev'])
2929 2929 ui.warn((msg + '\n') % headless)
2930 2930
2931 2931 if not heads:
2932 2932 return 1
2933 2933
2934 2934 heads = sorted(heads, key=lambda x: -x.rev())
2935 2935 displayer = cmdutil.show_changeset(ui, repo, opts)
2936 2936 for ctx in heads:
2937 2937 displayer.show(ctx)
2938 2938 displayer.close()
2939 2939
2940 2940 @command('help',
2941 2941 [('e', 'extension', None, _('show only help for extensions')),
2942 2942 ('c', 'command', None, _('show only help for commands'))],
2943 2943 _('[-ec] [TOPIC]'))
2944 2944 def help_(ui, name=None, unknowncmd=False, full=True, **opts):
2945 2945 """show help for a given topic or a help overview
2946 2946
2947 2947 With no arguments, print a list of commands with short help messages.
2948 2948
2949 2949 Given a topic, extension, or command name, print help for that
2950 2950 topic.
2951 2951
2952 2952 Returns 0 if successful.
2953 2953 """
2954 2954
2955 2955 textwidth = min(ui.termwidth(), 80) - 2
2956 2956
2957 2957 def optrst(options):
2958 2958 data = []
2959 2959 multioccur = False
2960 2960 for option in options:
2961 2961 if len(option) == 5:
2962 2962 shortopt, longopt, default, desc, optlabel = option
2963 2963 else:
2964 2964 shortopt, longopt, default, desc = option
2965 2965 optlabel = _("VALUE") # default label
2966 2966
2967 2967 if _("DEPRECATED") in desc and not ui.verbose:
2968 2968 continue
2969 2969
2970 2970 so = ''
2971 2971 if shortopt:
2972 2972 so = '-' + shortopt
2973 2973 lo = '--' + longopt
2974 2974 if default:
2975 2975 desc += _(" (default: %s)") % default
2976 2976
2977 2977 if isinstance(default, list):
2978 2978 lo += " %s [+]" % optlabel
2979 2979 multioccur = True
2980 2980 elif (default is not None) and not isinstance(default, bool):
2981 2981 lo += " %s" % optlabel
2982 2982
2983 2983 data.append((so, lo, desc))
2984 2984
2985 2985 rst = minirst.maketable(data, 1)
2986 2986
2987 2987 if multioccur:
2988 2988 rst += _("\n[+] marked option can be specified multiple times\n")
2989 2989
2990 2990 return rst
2991 2991
2992 2992 # list all option lists
2993 2993 def opttext(optlist, width):
2994 2994 rst = ''
2995 2995 if not optlist:
2996 2996 return ''
2997 2997
2998 2998 for title, options in optlist:
2999 2999 rst += '\n%s\n' % title
3000 3000 if options:
3001 3001 rst += "\n"
3002 3002 rst += optrst(options)
3003 3003 rst += '\n'
3004 3004
3005 3005 return '\n' + minirst.format(rst, width)
3006 3006
3007 3007 def addglobalopts(optlist, aliases):
3008 3008 if ui.quiet:
3009 3009 return []
3010 3010
3011 3011 if ui.verbose:
3012 3012 optlist.append((_("global options:"), globalopts))
3013 3013 if name == 'shortlist':
3014 3014 optlist.append((_('use "hg help" for the full list '
3015 3015 'of commands'), ()))
3016 3016 else:
3017 3017 if name == 'shortlist':
3018 3018 msg = _('use "hg help" for the full list of commands '
3019 3019 'or "hg -v" for details')
3020 3020 elif name and not full:
3021 3021 msg = _('use "hg help %s" to show the full help text' % name)
3022 3022 elif aliases:
3023 3023 msg = _('use "hg -v help%s" to show builtin aliases and '
3024 3024 'global options') % (name and " " + name or "")
3025 3025 else:
3026 3026 msg = _('use "hg -v help %s" to show more info') % name
3027 3027 optlist.append((msg, ()))
3028 3028
3029 3029 def helpcmd(name):
3030 3030 try:
3031 3031 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
3032 3032 except error.AmbiguousCommand, inst:
3033 3033 # py3k fix: except vars can't be used outside the scope of the
3034 3034 # except block, nor can be used inside a lambda. python issue4617
3035 3035 prefix = inst.args[0]
3036 3036 select = lambda c: c.lstrip('^').startswith(prefix)
3037 3037 helplist(select)
3038 3038 return
3039 3039
3040 3040 # check if it's an invalid alias and display its error if it is
3041 3041 if getattr(entry[0], 'badalias', False):
3042 3042 if not unknowncmd:
3043 3043 entry[0](ui)
3044 3044 return
3045 3045
3046 3046 rst = ""
3047 3047
3048 3048 # synopsis
3049 3049 if len(entry) > 2:
3050 3050 if entry[2].startswith('hg'):
3051 3051 rst += "%s\n" % entry[2]
3052 3052 else:
3053 3053 rst += 'hg %s %s\n' % (aliases[0], entry[2])
3054 3054 else:
3055 3055 rst += 'hg %s\n' % aliases[0]
3056 3056
3057 3057 # aliases
3058 3058 if full and not ui.quiet and len(aliases) > 1:
3059 3059 rst += _("\naliases: %s\n") % ', '.join(aliases[1:])
3060 3060
3061 3061 # description
3062 3062 doc = gettext(entry[0].__doc__)
3063 3063 if not doc:
3064 3064 doc = _("(no help text available)")
3065 3065 if util.safehasattr(entry[0], 'definition'): # aliased command
3066 3066 if entry[0].definition.startswith('!'): # shell alias
3067 3067 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
3068 3068 else:
3069 3069 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
3070 3070 if ui.quiet or not full:
3071 3071 doc = doc.splitlines()[0]
3072 3072 rst += "\n" + doc + "\n"
3073 3073
3074 3074 # check if this command shadows a non-trivial (multi-line)
3075 3075 # extension help text
3076 3076 try:
3077 3077 mod = extensions.find(name)
3078 3078 doc = gettext(mod.__doc__) or ''
3079 3079 if '\n' in doc.strip():
3080 3080 msg = _('use "hg help -e %s" to show help for '
3081 3081 'the %s extension') % (name, name)
3082 3082 rst += '\n%s\n' % msg
3083 3083 except KeyError:
3084 3084 pass
3085 3085
3086 3086 # options
3087 3087 if not ui.quiet and entry[1]:
3088 3088 rst += '\noptions:\n\n'
3089 3089 rst += optrst(entry[1])
3090 3090
3091 3091 if ui.verbose:
3092 3092 rst += '\nglobal options:\n\n'
3093 3093 rst += optrst(globalopts)
3094 3094
3095 3095 keep = ui.verbose and ['verbose'] or []
3096 3096 formatted, pruned = minirst.format(rst, textwidth, keep=keep)
3097 3097 ui.write(formatted)
3098 3098
3099 3099 if not ui.verbose:
3100 3100 if not full:
3101 3101 ui.write(_('\nuse "hg help %s" to show the full help text\n')
3102 3102 % name)
3103 3103 elif not ui.quiet:
3104 3104 ui.write(_('\nuse "hg -v help %s" to show more info\n') % name)
3105 3105
3106 3106
3107 3107 def helplist(select=None):
3108 3108 # list of commands
3109 3109 if name == "shortlist":
3110 3110 header = _('basic commands:\n\n')
3111 3111 else:
3112 3112 header = _('list of commands:\n\n')
3113 3113
3114 3114 h = {}
3115 3115 cmds = {}
3116 3116 for c, e in table.iteritems():
3117 3117 f = c.split("|", 1)[0]
3118 3118 if select and not select(f):
3119 3119 continue
3120 3120 if (not select and name != 'shortlist' and
3121 3121 e[0].__module__ != __name__):
3122 3122 continue
3123 3123 if name == "shortlist" and not f.startswith("^"):
3124 3124 continue
3125 3125 f = f.lstrip("^")
3126 3126 if not ui.debugflag and f.startswith("debug"):
3127 3127 continue
3128 3128 doc = e[0].__doc__
3129 3129 if doc and 'DEPRECATED' in doc and not ui.verbose:
3130 3130 continue
3131 3131 doc = gettext(doc)
3132 3132 if not doc:
3133 3133 doc = _("(no help text available)")
3134 3134 h[f] = doc.splitlines()[0].rstrip()
3135 3135 cmds[f] = c.lstrip("^")
3136 3136
3137 3137 if not h:
3138 3138 ui.status(_('no commands defined\n'))
3139 3139 return
3140 3140
3141 3141 ui.status(header)
3142 3142 fns = sorted(h)
3143 3143 m = max(map(len, fns))
3144 3144 for f in fns:
3145 3145 if ui.verbose:
3146 3146 commands = cmds[f].replace("|",", ")
3147 3147 ui.write(" %s:\n %s\n"%(commands, h[f]))
3148 3148 else:
3149 3149 ui.write('%s\n' % (util.wrap(h[f], textwidth,
3150 3150 initindent=' %-*s ' % (m, f),
3151 3151 hangindent=' ' * (m + 4))))
3152 3152
3153 3153 if not name:
3154 3154 text = help.listexts(_('enabled extensions:'), extensions.enabled())
3155 3155 if text:
3156 3156 ui.write("\n%s" % minirst.format(text, textwidth))
3157 3157
3158 3158 ui.write(_("\nadditional help topics:\n\n"))
3159 3159 topics = []
3160 3160 for names, header, doc in help.helptable:
3161 3161 topics.append((sorted(names, key=len, reverse=True)[0], header))
3162 3162 topics_len = max([len(s[0]) for s in topics])
3163 3163 for t, desc in topics:
3164 3164 ui.write(" %-*s %s\n" % (topics_len, t, desc))
3165 3165
3166 3166 optlist = []
3167 3167 addglobalopts(optlist, True)
3168 3168 ui.write(opttext(optlist, textwidth))
3169 3169
3170 3170 def helptopic(name):
3171 3171 for names, header, doc in help.helptable:
3172 3172 if name in names:
3173 3173 break
3174 3174 else:
3175 3175 raise error.UnknownCommand(name)
3176 3176
3177 3177 # description
3178 3178 if not doc:
3179 3179 doc = _("(no help text available)")
3180 3180 if util.safehasattr(doc, '__call__'):
3181 3181 doc = doc()
3182 3182
3183 3183 ui.write("%s\n\n" % header)
3184 3184 ui.write("%s" % minirst.format(doc, textwidth, indent=4))
3185 3185 try:
3186 3186 cmdutil.findcmd(name, table)
3187 3187 ui.write(_('\nuse "hg help -c %s" to see help for '
3188 3188 'the %s command\n') % (name, name))
3189 3189 except error.UnknownCommand:
3190 3190 pass
3191 3191
3192 3192 def helpext(name):
3193 3193 try:
3194 3194 mod = extensions.find(name)
3195 3195 doc = gettext(mod.__doc__) or _('no help text available')
3196 3196 except KeyError:
3197 3197 mod = None
3198 3198 doc = extensions.disabledext(name)
3199 3199 if not doc:
3200 3200 raise error.UnknownCommand(name)
3201 3201
3202 3202 if '\n' not in doc:
3203 3203 head, tail = doc, ""
3204 3204 else:
3205 3205 head, tail = doc.split('\n', 1)
3206 3206 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
3207 3207 if tail:
3208 3208 ui.write(minirst.format(tail, textwidth))
3209 3209 ui.status('\n')
3210 3210
3211 3211 if mod:
3212 3212 try:
3213 3213 ct = mod.cmdtable
3214 3214 except AttributeError:
3215 3215 ct = {}
3216 3216 modcmds = set([c.split('|', 1)[0] for c in ct])
3217 3217 helplist(modcmds.__contains__)
3218 3218 else:
3219 3219 ui.write(_('use "hg help extensions" for information on enabling '
3220 3220 'extensions\n'))
3221 3221
3222 3222 def helpextcmd(name):
3223 3223 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
3224 3224 doc = gettext(mod.__doc__).splitlines()[0]
3225 3225
3226 3226 msg = help.listexts(_("'%s' is provided by the following "
3227 3227 "extension:") % cmd, {ext: doc}, indent=4)
3228 3228 ui.write(minirst.format(msg, textwidth))
3229 3229 ui.write('\n')
3230 3230 ui.write(_('use "hg help extensions" for information on enabling '
3231 3231 'extensions\n'))
3232 3232
3233 3233 if name and name != 'shortlist':
3234 3234 i = None
3235 3235 if unknowncmd:
3236 3236 queries = (helpextcmd,)
3237 3237 elif opts.get('extension'):
3238 3238 queries = (helpext,)
3239 3239 elif opts.get('command'):
3240 3240 queries = (helpcmd,)
3241 3241 else:
3242 3242 queries = (helptopic, helpcmd, helpext, helpextcmd)
3243 3243 for f in queries:
3244 3244 try:
3245 3245 f(name)
3246 3246 i = None
3247 3247 break
3248 3248 except error.UnknownCommand, inst:
3249 3249 i = inst
3250 3250 if i:
3251 3251 raise i
3252 3252 else:
3253 3253 # program name
3254 3254 ui.status(_("Mercurial Distributed SCM\n"))
3255 3255 ui.status('\n')
3256 3256 helplist()
3257 3257
3258 3258
3259 3259 @command('identify|id',
3260 3260 [('r', 'rev', '',
3261 3261 _('identify the specified revision'), _('REV')),
3262 3262 ('n', 'num', None, _('show local revision number')),
3263 3263 ('i', 'id', None, _('show global revision id')),
3264 3264 ('b', 'branch', None, _('show branch')),
3265 3265 ('t', 'tags', None, _('show tags')),
3266 3266 ('B', 'bookmarks', None, _('show bookmarks')),
3267 3267 ] + remoteopts,
3268 3268 _('[-nibtB] [-r REV] [SOURCE]'))
3269 3269 def identify(ui, repo, source=None, rev=None,
3270 3270 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3271 3271 """identify the working copy or specified revision
3272 3272
3273 3273 Print a summary identifying the repository state at REV using one or
3274 3274 two parent hash identifiers, followed by a "+" if the working
3275 3275 directory has uncommitted changes, the branch name (if not default),
3276 3276 a list of tags, and a list of bookmarks.
3277 3277
3278 3278 When REV is not given, print a summary of the current state of the
3279 3279 repository.
3280 3280
3281 3281 Specifying a path to a repository root or Mercurial bundle will
3282 3282 cause lookup to operate on that repository/bundle.
3283 3283
3284 3284 .. container:: verbose
3285 3285
3286 3286 Examples:
3287 3287
3288 3288 - generate a build identifier for the working directory::
3289 3289
3290 3290 hg id --id > build-id.dat
3291 3291
3292 3292 - find the revision corresponding to a tag::
3293 3293
3294 3294 hg id -n -r 1.3
3295 3295
3296 3296 - check the most recent revision of a remote repository::
3297 3297
3298 3298 hg id -r tip http://selenic.com/hg/
3299 3299
3300 3300 Returns 0 if successful.
3301 3301 """
3302 3302
3303 3303 if not repo and not source:
3304 3304 raise util.Abort(_("there is no Mercurial repository here "
3305 3305 "(.hg not found)"))
3306 3306
3307 3307 hexfunc = ui.debugflag and hex or short
3308 3308 default = not (num or id or branch or tags or bookmarks)
3309 3309 output = []
3310 3310 revs = []
3311 3311
3312 3312 if source:
3313 3313 source, branches = hg.parseurl(ui.expandpath(source))
3314 3314 repo = hg.peer(ui, opts, source)
3315 3315 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3316 3316
3317 3317 if not repo.local():
3318 3318 if num or branch or tags:
3319 3319 raise util.Abort(
3320 3320 _("can't query remote revision number, branch, or tags"))
3321 3321 if not rev and revs:
3322 3322 rev = revs[0]
3323 3323 if not rev:
3324 3324 rev = "tip"
3325 3325
3326 3326 remoterev = repo.lookup(rev)
3327 3327 if default or id:
3328 3328 output = [hexfunc(remoterev)]
3329 3329
3330 3330 def getbms():
3331 3331 bms = []
3332 3332
3333 3333 if 'bookmarks' in repo.listkeys('namespaces'):
3334 3334 hexremoterev = hex(remoterev)
3335 3335 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
3336 3336 if bmr == hexremoterev]
3337 3337
3338 3338 return bms
3339 3339
3340 3340 if bookmarks:
3341 3341 output.extend(getbms())
3342 3342 elif default and not ui.quiet:
3343 3343 # multiple bookmarks for a single parent separated by '/'
3344 3344 bm = '/'.join(getbms())
3345 3345 if bm:
3346 3346 output.append(bm)
3347 3347 else:
3348 3348 if not rev:
3349 3349 ctx = repo[None]
3350 3350 parents = ctx.parents()
3351 3351 changed = ""
3352 3352 if default or id or num:
3353 3353 changed = util.any(repo.status()) and "+" or ""
3354 3354 if default or id:
3355 3355 output = ["%s%s" %
3356 3356 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3357 3357 if num:
3358 3358 output.append("%s%s" %
3359 3359 ('+'.join([str(p.rev()) for p in parents]), changed))
3360 3360 else:
3361 3361 ctx = scmutil.revsingle(repo, rev)
3362 3362 if default or id:
3363 3363 output = [hexfunc(ctx.node())]
3364 3364 if num:
3365 3365 output.append(str(ctx.rev()))
3366 3366
3367 3367 if default and not ui.quiet:
3368 3368 b = ctx.branch()
3369 3369 if b != 'default':
3370 3370 output.append("(%s)" % b)
3371 3371
3372 3372 # multiple tags for a single parent separated by '/'
3373 3373 t = '/'.join(ctx.tags())
3374 3374 if t:
3375 3375 output.append(t)
3376 3376
3377 3377 # multiple bookmarks for a single parent separated by '/'
3378 3378 bm = '/'.join(ctx.bookmarks())
3379 3379 if bm:
3380 3380 output.append(bm)
3381 3381 else:
3382 3382 if branch:
3383 3383 output.append(ctx.branch())
3384 3384
3385 3385 if tags:
3386 3386 output.extend(ctx.tags())
3387 3387
3388 3388 if bookmarks:
3389 3389 output.extend(ctx.bookmarks())
3390 3390
3391 3391 ui.write("%s\n" % ' '.join(output))
3392 3392
3393 3393 @command('import|patch',
3394 3394 [('p', 'strip', 1,
3395 3395 _('directory strip option for patch. This has the same '
3396 3396 'meaning as the corresponding patch option'), _('NUM')),
3397 3397 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3398 3398 ('e', 'edit', False, _('invoke editor on commit messages')),
3399 3399 ('f', 'force', None, _('skip check for outstanding uncommitted changes')),
3400 3400 ('', 'no-commit', None,
3401 3401 _("don't commit, just update the working directory")),
3402 3402 ('', 'bypass', None,
3403 3403 _("apply patch without touching the working directory")),
3404 3404 ('', 'exact', None,
3405 3405 _('apply patch to the nodes from which it was generated')),
3406 3406 ('', 'import-branch', None,
3407 3407 _('use any branch information in patch (implied by --exact)'))] +
3408 3408 commitopts + commitopts2 + similarityopts,
3409 3409 _('[OPTION]... PATCH...'))
3410 3410 def import_(ui, repo, patch1=None, *patches, **opts):
3411 3411 """import an ordered set of patches
3412 3412
3413 3413 Import a list of patches and commit them individually (unless
3414 3414 --no-commit is specified).
3415 3415
3416 3416 If there are outstanding changes in the working directory, import
3417 3417 will abort unless given the -f/--force flag.
3418 3418
3419 3419 You can import a patch straight from a mail message. Even patches
3420 3420 as attachments work (to use the body part, it must have type
3421 3421 text/plain or text/x-patch). From and Subject headers of email
3422 3422 message are used as default committer and commit message. All
3423 3423 text/plain body parts before first diff are added to commit
3424 3424 message.
3425 3425
3426 3426 If the imported patch was generated by :hg:`export`, user and
3427 3427 description from patch override values from message headers and
3428 3428 body. Values given on command line with -m/--message and -u/--user
3429 3429 override these.
3430 3430
3431 3431 If --exact is specified, import will set the working directory to
3432 3432 the parent of each patch before applying it, and will abort if the
3433 3433 resulting changeset has a different ID than the one recorded in
3434 3434 the patch. This may happen due to character set problems or other
3435 3435 deficiencies in the text patch format.
3436 3436
3437 3437 Use --bypass to apply and commit patches directly to the
3438 3438 repository, not touching the working directory. Without --exact,
3439 3439 patches will be applied on top of the working directory parent
3440 3440 revision.
3441 3441
3442 3442 With -s/--similarity, hg will attempt to discover renames and
3443 3443 copies in the patch in the same way as 'addremove'.
3444 3444
3445 3445 To read a patch from standard input, use "-" as the patch name. If
3446 3446 a URL is specified, the patch will be downloaded from it.
3447 3447 See :hg:`help dates` for a list of formats valid for -d/--date.
3448 3448
3449 3449 .. container:: verbose
3450 3450
3451 3451 Examples:
3452 3452
3453 3453 - import a traditional patch from a website and detect renames::
3454 3454
3455 3455 hg import -s 80 http://example.com/bugfix.patch
3456 3456
3457 3457 - import a changeset from an hgweb server::
3458 3458
3459 3459 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3460 3460
3461 3461 - import all the patches in an Unix-style mbox::
3462 3462
3463 3463 hg import incoming-patches.mbox
3464 3464
3465 3465 - attempt to exactly restore an exported changeset (not always
3466 3466 possible)::
3467 3467
3468 3468 hg import --exact proposed-fix.patch
3469 3469
3470 3470 Returns 0 on success.
3471 3471 """
3472 3472
3473 3473 if not patch1:
3474 3474 raise util.Abort(_('need at least one patch to import'))
3475 3475
3476 3476 patches = (patch1,) + patches
3477 3477
3478 3478 date = opts.get('date')
3479 3479 if date:
3480 3480 opts['date'] = util.parsedate(date)
3481 3481
3482 3482 editor = cmdutil.commiteditor
3483 3483 if opts.get('edit'):
3484 3484 editor = cmdutil.commitforceeditor
3485 3485
3486 3486 update = not opts.get('bypass')
3487 3487 if not update and opts.get('no_commit'):
3488 3488 raise util.Abort(_('cannot use --no-commit with --bypass'))
3489 3489 try:
3490 3490 sim = float(opts.get('similarity') or 0)
3491 3491 except ValueError:
3492 3492 raise util.Abort(_('similarity must be a number'))
3493 3493 if sim < 0 or sim > 100:
3494 3494 raise util.Abort(_('similarity must be between 0 and 100'))
3495 3495 if sim and not update:
3496 3496 raise util.Abort(_('cannot use --similarity with --bypass'))
3497 3497
3498 3498 if (opts.get('exact') or not opts.get('force')) and update:
3499 3499 cmdutil.bailifchanged(repo)
3500 3500
3501 3501 base = opts["base"]
3502 3502 strip = opts["strip"]
3503 3503 wlock = lock = tr = None
3504 3504 msgs = []
3505 3505
3506 3506 def checkexact(repo, n, nodeid):
3507 3507 if opts.get('exact') and hex(n) != nodeid:
3508 3508 repo.rollback()
3509 3509 raise util.Abort(_('patch is damaged or loses information'))
3510 3510
3511 3511 def tryone(ui, hunk, parents):
3512 3512 tmpname, message, user, date, branch, nodeid, p1, p2 = \
3513 3513 patch.extract(ui, hunk)
3514 3514
3515 3515 if not tmpname:
3516 3516 return (None, None)
3517 3517 msg = _('applied to working directory')
3518 3518
3519 3519 try:
3520 3520 cmdline_message = cmdutil.logmessage(ui, opts)
3521 3521 if cmdline_message:
3522 3522 # pickup the cmdline msg
3523 3523 message = cmdline_message
3524 3524 elif message:
3525 3525 # pickup the patch msg
3526 3526 message = message.strip()
3527 3527 else:
3528 3528 # launch the editor
3529 3529 message = None
3530 3530 ui.debug('message:\n%s\n' % message)
3531 3531
3532 3532 if len(parents) == 1:
3533 3533 parents.append(repo[nullid])
3534 3534 if opts.get('exact'):
3535 3535 if not nodeid or not p1:
3536 3536 raise util.Abort(_('not a Mercurial patch'))
3537 3537 p1 = repo[p1]
3538 3538 p2 = repo[p2 or nullid]
3539 3539 elif p2:
3540 3540 try:
3541 3541 p1 = repo[p1]
3542 3542 p2 = repo[p2]
3543 3543 # Without any options, consider p2 only if the
3544 3544 # patch is being applied on top of the recorded
3545 3545 # first parent.
3546 3546 if p1 != parents[0]:
3547 3547 p1 = parents[0]
3548 3548 p2 = repo[nullid]
3549 3549 except error.RepoError:
3550 3550 p1, p2 = parents
3551 3551 else:
3552 3552 p1, p2 = parents
3553 3553
3554 3554 n = None
3555 3555 if update:
3556 3556 if p1 != parents[0]:
3557 3557 hg.clean(repo, p1.node())
3558 3558 if p2 != parents[1]:
3559 3559 repo.dirstate.setparents(p1.node(), p2.node())
3560 3560
3561 3561 if opts.get('exact') or opts.get('import_branch'):
3562 3562 repo.dirstate.setbranch(branch or 'default')
3563 3563
3564 3564 files = set()
3565 3565 patch.patch(ui, repo, tmpname, strip=strip, files=files,
3566 3566 eolmode=None, similarity=sim / 100.0)
3567 3567 files = list(files)
3568 3568 if opts.get('no_commit'):
3569 3569 if message:
3570 3570 msgs.append(message)
3571 3571 else:
3572 3572 if opts.get('exact') or p2:
3573 3573 # If you got here, you either use --force and know what
3574 3574 # you are doing or used --exact or a merge patch while
3575 3575 # being updated to its first parent.
3576 3576 m = None
3577 3577 else:
3578 3578 m = scmutil.matchfiles(repo, files or [])
3579 3579 n = repo.commit(message, opts.get('user') or user,
3580 3580 opts.get('date') or date, match=m,
3581 3581 editor=editor)
3582 3582 checkexact(repo, n, nodeid)
3583 3583 else:
3584 3584 if opts.get('exact') or opts.get('import_branch'):
3585 3585 branch = branch or 'default'
3586 3586 else:
3587 3587 branch = p1.branch()
3588 3588 store = patch.filestore()
3589 3589 try:
3590 3590 files = set()
3591 3591 try:
3592 3592 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
3593 3593 files, eolmode=None)
3594 3594 except patch.PatchError, e:
3595 3595 raise util.Abort(str(e))
3596 3596 memctx = patch.makememctx(repo, (p1.node(), p2.node()),
3597 3597 message,
3598 3598 opts.get('user') or user,
3599 3599 opts.get('date') or date,
3600 3600 branch, files, store,
3601 3601 editor=cmdutil.commiteditor)
3602 3602 repo.savecommitmessage(memctx.description())
3603 3603 n = memctx.commit()
3604 3604 checkexact(repo, n, nodeid)
3605 3605 finally:
3606 3606 store.close()
3607 3607 if n:
3608 3608 # i18n: refers to a short changeset id
3609 3609 msg = _('created %s') % short(n)
3610 3610 return (msg, n)
3611 3611 finally:
3612 3612 os.unlink(tmpname)
3613 3613
3614 3614 try:
3615 3615 try:
3616 3616 wlock = repo.wlock()
3617 3617 lock = repo.lock()
3618 3618 tr = repo.transaction('import')
3619 3619 parents = repo.parents()
3620 3620 for patchurl in patches:
3621 3621 if patchurl == '-':
3622 3622 ui.status(_('applying patch from stdin\n'))
3623 3623 patchfile = ui.fin
3624 3624 patchurl = 'stdin' # for error message
3625 3625 else:
3626 3626 patchurl = os.path.join(base, patchurl)
3627 3627 ui.status(_('applying %s\n') % patchurl)
3628 3628 patchfile = url.open(ui, patchurl)
3629 3629
3630 3630 haspatch = False
3631 3631 for hunk in patch.split(patchfile):
3632 3632 (msg, node) = tryone(ui, hunk, parents)
3633 3633 if msg:
3634 3634 haspatch = True
3635 3635 ui.note(msg + '\n')
3636 3636 if update or opts.get('exact'):
3637 3637 parents = repo.parents()
3638 3638 else:
3639 3639 parents = [repo[node]]
3640 3640
3641 3641 if not haspatch:
3642 3642 raise util.Abort(_('%s: no diffs found') % patchurl)
3643 3643
3644 3644 tr.close()
3645 3645 if msgs:
3646 3646 repo.savecommitmessage('\n* * *\n'.join(msgs))
3647 3647 except:
3648 3648 # wlock.release() indirectly calls dirstate.write(): since
3649 3649 # we're crashing, we do not want to change the working dir
3650 3650 # parent after all, so make sure it writes nothing
3651 3651 repo.dirstate.invalidate()
3652 3652 raise
3653 3653 finally:
3654 3654 if tr:
3655 3655 tr.release()
3656 3656 release(lock, wlock)
3657 3657
3658 3658 @command('incoming|in',
3659 3659 [('f', 'force', None,
3660 3660 _('run even if remote repository is unrelated')),
3661 3661 ('n', 'newest-first', None, _('show newest record first')),
3662 3662 ('', 'bundle', '',
3663 3663 _('file to store the bundles into'), _('FILE')),
3664 3664 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3665 3665 ('B', 'bookmarks', False, _("compare bookmarks")),
3666 3666 ('b', 'branch', [],
3667 3667 _('a specific branch you would like to pull'), _('BRANCH')),
3668 3668 ] + logopts + remoteopts + subrepoopts,
3669 3669 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3670 3670 def incoming(ui, repo, source="default", **opts):
3671 3671 """show new changesets found in source
3672 3672
3673 3673 Show new changesets found in the specified path/URL or the default
3674 3674 pull location. These are the changesets that would have been pulled
3675 3675 if a pull at the time you issued this command.
3676 3676
3677 3677 For remote repository, using --bundle avoids downloading the
3678 3678 changesets twice if the incoming is followed by a pull.
3679 3679
3680 3680 See pull for valid source format details.
3681 3681
3682 3682 Returns 0 if there are incoming changes, 1 otherwise.
3683 3683 """
3684 3684 if opts.get('bundle') and opts.get('subrepos'):
3685 3685 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3686 3686
3687 3687 if opts.get('bookmarks'):
3688 3688 source, branches = hg.parseurl(ui.expandpath(source),
3689 3689 opts.get('branch'))
3690 3690 other = hg.peer(repo, opts, source)
3691 3691 if 'bookmarks' not in other.listkeys('namespaces'):
3692 3692 ui.warn(_("remote doesn't support bookmarks\n"))
3693 3693 return 0
3694 3694 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3695 3695 return bookmarks.diff(ui, repo, other)
3696 3696
3697 3697 repo._subtoppath = ui.expandpath(source)
3698 3698 try:
3699 3699 return hg.incoming(ui, repo, source, opts)
3700 3700 finally:
3701 3701 del repo._subtoppath
3702 3702
3703 3703
3704 3704 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'))
3705 3705 def init(ui, dest=".", **opts):
3706 3706 """create a new repository in the given directory
3707 3707
3708 3708 Initialize a new repository in the given directory. If the given
3709 3709 directory does not exist, it will be created.
3710 3710
3711 3711 If no directory is given, the current directory is used.
3712 3712
3713 3713 It is possible to specify an ``ssh://`` URL as the destination.
3714 3714 See :hg:`help urls` for more information.
3715 3715
3716 3716 Returns 0 on success.
3717 3717 """
3718 3718 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3719 3719
3720 3720 @command('locate',
3721 3721 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3722 3722 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3723 3723 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3724 3724 ] + walkopts,
3725 3725 _('[OPTION]... [PATTERN]...'))
3726 3726 def locate(ui, repo, *pats, **opts):
3727 3727 """locate files matching specific patterns
3728 3728
3729 3729 Print files under Mercurial control in the working directory whose
3730 3730 names match the given patterns.
3731 3731
3732 3732 By default, this command searches all directories in the working
3733 3733 directory. To search just the current directory and its
3734 3734 subdirectories, use "--include .".
3735 3735
3736 3736 If no patterns are given to match, this command prints the names
3737 3737 of all files under Mercurial control in the working directory.
3738 3738
3739 3739 If you want to feed the output of this command into the "xargs"
3740 3740 command, use the -0 option to both this command and "xargs". This
3741 3741 will avoid the problem of "xargs" treating single filenames that
3742 3742 contain whitespace as multiple filenames.
3743 3743
3744 3744 Returns 0 if a match is found, 1 otherwise.
3745 3745 """
3746 3746 end = opts.get('print0') and '\0' or '\n'
3747 3747 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3748 3748
3749 3749 ret = 1
3750 3750 m = scmutil.match(repo[rev], pats, opts, default='relglob')
3751 3751 m.bad = lambda x, y: False
3752 3752 for abs in repo[rev].walk(m):
3753 3753 if not rev and abs not in repo.dirstate:
3754 3754 continue
3755 3755 if opts.get('fullpath'):
3756 3756 ui.write(repo.wjoin(abs), end)
3757 3757 else:
3758 3758 ui.write(((pats and m.rel(abs)) or abs), end)
3759 3759 ret = 0
3760 3760
3761 3761 return ret
3762 3762
3763 3763 @command('^log|history',
3764 3764 [('f', 'follow', None,
3765 3765 _('follow changeset history, or file history across copies and renames')),
3766 3766 ('', 'follow-first', None,
3767 3767 _('only follow the first parent of merge changesets (DEPRECATED)')),
3768 3768 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3769 3769 ('C', 'copies', None, _('show copied files')),
3770 3770 ('k', 'keyword', [],
3771 3771 _('do case-insensitive search for a given text'), _('TEXT')),
3772 3772 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
3773 3773 ('', 'removed', None, _('include revisions where files were removed')),
3774 3774 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3775 3775 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3776 3776 ('', 'only-branch', [],
3777 3777 _('show only changesets within the given named branch (DEPRECATED)'),
3778 3778 _('BRANCH')),
3779 3779 ('b', 'branch', [],
3780 3780 _('show changesets within the given named branch'), _('BRANCH')),
3781 3781 ('P', 'prune', [],
3782 3782 _('do not display revision or any of its ancestors'), _('REV')),
3783 3783 ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
3784 3784 ] + logopts + walkopts,
3785 3785 _('[OPTION]... [FILE]'))
3786 3786 def log(ui, repo, *pats, **opts):
3787 3787 """show revision history of entire repository or files
3788 3788
3789 3789 Print the revision history of the specified files or the entire
3790 3790 project.
3791 3791
3792 3792 If no revision range is specified, the default is ``tip:0`` unless
3793 3793 --follow is set, in which case the working directory parent is
3794 3794 used as the starting revision.
3795 3795
3796 3796 File history is shown without following rename or copy history of
3797 3797 files. Use -f/--follow with a filename to follow history across
3798 3798 renames and copies. --follow without a filename will only show
3799 3799 ancestors or descendants of the starting revision.
3800 3800
3801 3801 By default this command prints revision number and changeset id,
3802 3802 tags, non-trivial parents, user, date and time, and a summary for
3803 3803 each commit. When the -v/--verbose switch is used, the list of
3804 3804 changed files and full commit message are shown.
3805 3805
3806 3806 .. note::
3807 3807 log -p/--patch may generate unexpected diff output for merge
3808 3808 changesets, as it will only compare the merge changeset against
3809 3809 its first parent. Also, only files different from BOTH parents
3810 3810 will appear in files:.
3811 3811
3812 3812 .. note::
3813 3813 for performance reasons, log FILE may omit duplicate changes
3814 3814 made on branches and will not show deletions. To see all
3815 3815 changes including duplicates and deletions, use the --removed
3816 3816 switch.
3817 3817
3818 3818 .. container:: verbose
3819 3819
3820 3820 Some examples:
3821 3821
3822 3822 - changesets with full descriptions and file lists::
3823 3823
3824 3824 hg log -v
3825 3825
3826 3826 - changesets ancestral to the working directory::
3827 3827
3828 3828 hg log -f
3829 3829
3830 3830 - last 10 commits on the current branch::
3831 3831
3832 3832 hg log -l 10 -b .
3833 3833
3834 3834 - changesets showing all modifications of a file, including removals::
3835 3835
3836 3836 hg log --removed file.c
3837 3837
3838 3838 - all changesets that touch a directory, with diffs, excluding merges::
3839 3839
3840 3840 hg log -Mp lib/
3841 3841
3842 3842 - all revision numbers that match a keyword::
3843 3843
3844 3844 hg log -k bug --template "{rev}\\n"
3845 3845
3846 3846 - check if a given changeset is included is a tagged release::
3847 3847
3848 3848 hg log -r "a21ccf and ancestor(1.9)"
3849 3849
3850 3850 - find all changesets by some user in a date range::
3851 3851
3852 3852 hg log -k alice -d "may 2008 to jul 2008"
3853 3853
3854 3854 - summary of all changesets after the last tag::
3855 3855
3856 3856 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3857 3857
3858 3858 See :hg:`help dates` for a list of formats valid for -d/--date.
3859 3859
3860 3860 See :hg:`help revisions` and :hg:`help revsets` for more about
3861 3861 specifying revisions.
3862 3862
3863 3863 Returns 0 on success.
3864 3864 """
3865 3865
3866 3866 matchfn = scmutil.match(repo[None], pats, opts)
3867 3867 limit = cmdutil.loglimit(opts)
3868 3868 count = 0
3869 3869
3870 3870 endrev = None
3871 3871 if opts.get('copies') and opts.get('rev'):
3872 3872 endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
3873 3873
3874 3874 df = False
3875 3875 if opts["date"]:
3876 3876 df = util.matchdate(opts["date"])
3877 3877
3878 3878 branches = opts.get('branch', []) + opts.get('only_branch', [])
3879 3879 opts['branch'] = [repo.lookupbranch(b) for b in branches]
3880 3880
3881 3881 displayer = cmdutil.show_changeset(ui, repo, opts, True)
3882 3882 def prep(ctx, fns):
3883 3883 rev = ctx.rev()
3884 3884 parents = [p for p in repo.changelog.parentrevs(rev)
3885 3885 if p != nullrev]
3886 3886 if opts.get('no_merges') and len(parents) == 2:
3887 3887 return
3888 3888 if opts.get('only_merges') and len(parents) != 2:
3889 3889 return
3890 3890 if opts.get('branch') and ctx.branch() not in opts['branch']:
3891 3891 return
3892 3892 if not opts.get('hidden') and ctx.hidden():
3893 3893 return
3894 3894 if df and not df(ctx.date()[0]):
3895 3895 return
3896 3896 if opts['user'] and not [k for k in opts['user']
3897 3897 if k.lower() in ctx.user().lower()]:
3898 3898 return
3899 3899 if opts.get('keyword'):
3900 3900 for k in [kw.lower() for kw in opts['keyword']]:
3901 3901 if (k in ctx.user().lower() or
3902 3902 k in ctx.description().lower() or
3903 3903 k in " ".join(ctx.files()).lower()):
3904 3904 break
3905 3905 else:
3906 3906 return
3907 3907
3908 3908 copies = None
3909 3909 if opts.get('copies') and rev:
3910 3910 copies = []
3911 3911 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3912 3912 for fn in ctx.files():
3913 3913 rename = getrenamed(fn, rev)
3914 3914 if rename:
3915 3915 copies.append((fn, rename[0]))
3916 3916
3917 3917 revmatchfn = None
3918 3918 if opts.get('patch') or opts.get('stat'):
3919 3919 if opts.get('follow') or opts.get('follow_first'):
3920 3920 # note: this might be wrong when following through merges
3921 3921 revmatchfn = scmutil.match(repo[None], fns, default='path')
3922 3922 else:
3923 3923 revmatchfn = matchfn
3924 3924
3925 3925 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3926 3926
3927 3927 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3928 3928 if count == limit:
3929 3929 break
3930 3930 if displayer.flush(ctx.rev()):
3931 3931 count += 1
3932 3932 displayer.close()
3933 3933
3934 3934 @command('manifest',
3935 3935 [('r', 'rev', '', _('revision to display'), _('REV')),
3936 3936 ('', 'all', False, _("list files from all revisions"))],
3937 3937 _('[-r REV]'))
3938 3938 def manifest(ui, repo, node=None, rev=None, **opts):
3939 3939 """output the current or given revision of the project manifest
3940 3940
3941 3941 Print a list of version controlled files for the given revision.
3942 3942 If no revision is given, the first parent of the working directory
3943 3943 is used, or the null revision if no revision is checked out.
3944 3944
3945 3945 With -v, print file permissions, symlink and executable bits.
3946 3946 With --debug, print file revision hashes.
3947 3947
3948 3948 If option --all is specified, the list of all files from all revisions
3949 3949 is printed. This includes deleted and renamed files.
3950 3950
3951 3951 Returns 0 on success.
3952 3952 """
3953 3953 if opts.get('all'):
3954 3954 if rev or node:
3955 3955 raise util.Abort(_("can't specify a revision with --all"))
3956 3956
3957 3957 res = []
3958 3958 prefix = "data/"
3959 3959 suffix = ".i"
3960 3960 plen = len(prefix)
3961 3961 slen = len(suffix)
3962 3962 lock = repo.lock()
3963 3963 try:
3964 3964 for fn, b, size in repo.store.datafiles():
3965 3965 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3966 3966 res.append(fn[plen:-slen])
3967 3967 finally:
3968 3968 lock.release()
3969 3969 for f in sorted(res):
3970 3970 ui.write("%s\n" % f)
3971 3971 return
3972 3972
3973 3973 if rev and node:
3974 3974 raise util.Abort(_("please specify just one revision"))
3975 3975
3976 3976 if not node:
3977 3977 node = rev
3978 3978
3979 3979 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
3980 3980 ctx = scmutil.revsingle(repo, node)
3981 3981 for f in ctx:
3982 3982 if ui.debugflag:
3983 3983 ui.write("%40s " % hex(ctx.manifest()[f]))
3984 3984 if ui.verbose:
3985 3985 ui.write(decor[ctx.flags(f)])
3986 3986 ui.write("%s\n" % f)
3987 3987
3988 3988 @command('^merge',
3989 3989 [('f', 'force', None, _('force a merge with outstanding changes')),
3990 3990 ('r', 'rev', '', _('revision to merge'), _('REV')),
3991 3991 ('P', 'preview', None,
3992 3992 _('review revisions to merge (no merge is performed)'))
3993 3993 ] + mergetoolopts,
3994 3994 _('[-P] [-f] [[-r] REV]'))
3995 3995 def merge(ui, repo, node=None, **opts):
3996 3996 """merge working directory with another revision
3997 3997
3998 3998 The current working directory is updated with all changes made in
3999 3999 the requested revision since the last common predecessor revision.
4000 4000
4001 4001 Files that changed between either parent are marked as changed for
4002 4002 the next commit and a commit must be performed before any further
4003 4003 updates to the repository are allowed. The next commit will have
4004 4004 two parents.
4005 4005
4006 4006 ``--tool`` can be used to specify the merge tool used for file
4007 4007 merges. It overrides the HGMERGE environment variable and your
4008 4008 configuration files. See :hg:`help merge-tools` for options.
4009 4009
4010 4010 If no revision is specified, the working directory's parent is a
4011 4011 head revision, and the current branch contains exactly one other
4012 4012 head, the other head is merged with by default. Otherwise, an
4013 4013 explicit revision with which to merge with must be provided.
4014 4014
4015 4015 :hg:`resolve` must be used to resolve unresolved files.
4016 4016
4017 4017 To undo an uncommitted merge, use :hg:`update --clean .` which
4018 4018 will check out a clean copy of the original merge parent, losing
4019 4019 all changes.
4020 4020
4021 4021 Returns 0 on success, 1 if there are unresolved files.
4022 4022 """
4023 4023
4024 4024 if opts.get('rev') and node:
4025 4025 raise util.Abort(_("please specify just one revision"))
4026 4026 if not node:
4027 4027 node = opts.get('rev')
4028 4028
4029 4029 if not node:
4030 4030 branch = repo[None].branch()
4031 4031 bheads = repo.branchheads(branch)
4032 4032 if len(bheads) > 2:
4033 4033 raise util.Abort(_("branch '%s' has %d heads - "
4034 4034 "please merge with an explicit rev")
4035 4035 % (branch, len(bheads)),
4036 4036 hint=_("run 'hg heads .' to see heads"))
4037 4037
4038 4038 parent = repo.dirstate.p1()
4039 4039 if len(bheads) == 1:
4040 4040 if len(repo.heads()) > 1:
4041 4041 raise util.Abort(_("branch '%s' has one head - "
4042 4042 "please merge with an explicit rev")
4043 4043 % branch,
4044 4044 hint=_("run 'hg heads' to see all heads"))
4045 4045 msg = _('there is nothing to merge')
4046 4046 if parent != repo.lookup(repo[None].branch()):
4047 4047 msg = _('%s - use "hg update" instead') % msg
4048 4048 raise util.Abort(msg)
4049 4049
4050 4050 if parent not in bheads:
4051 4051 raise util.Abort(_('working directory not at a head revision'),
4052 4052 hint=_("use 'hg update' or merge with an "
4053 4053 "explicit revision"))
4054 4054 node = parent == bheads[0] and bheads[-1] or bheads[0]
4055 4055 else:
4056 4056 node = scmutil.revsingle(repo, node).node()
4057 4057
4058 4058 if opts.get('preview'):
4059 4059 # find nodes that are ancestors of p2 but not of p1
4060 4060 p1 = repo.lookup('.')
4061 4061 p2 = repo.lookup(node)
4062 4062 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4063 4063
4064 4064 displayer = cmdutil.show_changeset(ui, repo, opts)
4065 4065 for node in nodes:
4066 4066 displayer.show(repo[node])
4067 4067 displayer.close()
4068 4068 return 0
4069 4069
4070 4070 try:
4071 4071 # ui.forcemerge is an internal variable, do not document
4072 4072 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4073 4073 return hg.merge(repo, node, force=opts.get('force'))
4074 4074 finally:
4075 4075 ui.setconfig('ui', 'forcemerge', '')
4076 4076
4077 4077 @command('outgoing|out',
4078 4078 [('f', 'force', None, _('run even when the destination is unrelated')),
4079 4079 ('r', 'rev', [],
4080 4080 _('a changeset intended to be included in the destination'), _('REV')),
4081 4081 ('n', 'newest-first', None, _('show newest record first')),
4082 4082 ('B', 'bookmarks', False, _('compare bookmarks')),
4083 4083 ('b', 'branch', [], _('a specific branch you would like to push'),
4084 4084 _('BRANCH')),
4085 4085 ] + logopts + remoteopts + subrepoopts,
4086 4086 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4087 4087 def outgoing(ui, repo, dest=None, **opts):
4088 4088 """show changesets not found in the destination
4089 4089
4090 4090 Show changesets not found in the specified destination repository
4091 4091 or the default push location. These are the changesets that would
4092 4092 be pushed if a push was requested.
4093 4093
4094 4094 See pull for details of valid destination formats.
4095 4095
4096 4096 Returns 0 if there are outgoing changes, 1 otherwise.
4097 4097 """
4098 4098
4099 4099 if opts.get('bookmarks'):
4100 4100 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4101 4101 dest, branches = hg.parseurl(dest, opts.get('branch'))
4102 4102 other = hg.peer(repo, opts, dest)
4103 4103 if 'bookmarks' not in other.listkeys('namespaces'):
4104 4104 ui.warn(_("remote doesn't support bookmarks\n"))
4105 4105 return 0
4106 4106 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4107 4107 return bookmarks.diff(ui, other, repo)
4108 4108
4109 4109 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4110 4110 try:
4111 4111 return hg.outgoing(ui, repo, dest, opts)
4112 4112 finally:
4113 4113 del repo._subtoppath
4114 4114
4115 4115 @command('parents',
4116 4116 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4117 4117 ] + templateopts,
4118 4118 _('[-r REV] [FILE]'))
4119 4119 def parents(ui, repo, file_=None, **opts):
4120 4120 """show the parents of the working directory or revision
4121 4121
4122 4122 Print the working directory's parent revisions. If a revision is
4123 4123 given via -r/--rev, the parent of that revision will be printed.
4124 4124 If a file argument is given, the revision in which the file was
4125 4125 last changed (before the working directory revision or the
4126 4126 argument to --rev if given) is printed.
4127 4127
4128 4128 Returns 0 on success.
4129 4129 """
4130 4130
4131 4131 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4132 4132
4133 4133 if file_:
4134 4134 m = scmutil.match(ctx, (file_,), opts)
4135 4135 if m.anypats() or len(m.files()) != 1:
4136 4136 raise util.Abort(_('can only specify an explicit filename'))
4137 4137 file_ = m.files()[0]
4138 4138 filenodes = []
4139 4139 for cp in ctx.parents():
4140 4140 if not cp:
4141 4141 continue
4142 4142 try:
4143 4143 filenodes.append(cp.filenode(file_))
4144 4144 except error.LookupError:
4145 4145 pass
4146 4146 if not filenodes:
4147 4147 raise util.Abort(_("'%s' not found in manifest!") % file_)
4148 4148 fl = repo.file(file_)
4149 4149 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
4150 4150 else:
4151 4151 p = [cp.node() for cp in ctx.parents()]
4152 4152
4153 4153 displayer = cmdutil.show_changeset(ui, repo, opts)
4154 4154 for n in p:
4155 4155 if n != nullid:
4156 4156 displayer.show(repo[n])
4157 4157 displayer.close()
4158 4158
4159 4159 @command('paths', [], _('[NAME]'))
4160 4160 def paths(ui, repo, search=None):
4161 4161 """show aliases for remote repositories
4162 4162
4163 4163 Show definition of symbolic path name NAME. If no name is given,
4164 4164 show definition of all available names.
4165 4165
4166 4166 Option -q/--quiet suppresses all output when searching for NAME
4167 4167 and shows only the path names when listing all definitions.
4168 4168
4169 4169 Path names are defined in the [paths] section of your
4170 4170 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4171 4171 repository, ``.hg/hgrc`` is used, too.
4172 4172
4173 4173 The path names ``default`` and ``default-push`` have a special
4174 4174 meaning. When performing a push or pull operation, they are used
4175 4175 as fallbacks if no location is specified on the command-line.
4176 4176 When ``default-push`` is set, it will be used for push and
4177 4177 ``default`` will be used for pull; otherwise ``default`` is used
4178 4178 as the fallback for both. When cloning a repository, the clone
4179 4179 source is written as ``default`` in ``.hg/hgrc``. Note that
4180 4180 ``default`` and ``default-push`` apply to all inbound (e.g.
4181 4181 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4182 4182 :hg:`bundle`) operations.
4183 4183
4184 4184 See :hg:`help urls` for more information.
4185 4185
4186 4186 Returns 0 on success.
4187 4187 """
4188 4188 if search:
4189 4189 for name, path in ui.configitems("paths"):
4190 4190 if name == search:
4191 4191 ui.status("%s\n" % util.hidepassword(path))
4192 4192 return
4193 4193 if not ui.quiet:
4194 4194 ui.warn(_("not found!\n"))
4195 4195 return 1
4196 4196 else:
4197 4197 for name, path in ui.configitems("paths"):
4198 4198 if ui.quiet:
4199 4199 ui.write("%s\n" % name)
4200 4200 else:
4201 4201 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4202 4202
4203 4203 def postincoming(ui, repo, modheads, optupdate, checkout):
4204 4204 if modheads == 0:
4205 4205 return
4206 4206 if optupdate:
4207 4207 try:
4208 4208 return hg.update(repo, checkout)
4209 4209 except util.Abort, inst:
4210 4210 ui.warn(_("not updating: %s\n" % str(inst)))
4211 4211 return 0
4212 4212 if modheads > 1:
4213 4213 currentbranchheads = len(repo.branchheads())
4214 4214 if currentbranchheads == modheads:
4215 4215 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4216 4216 elif currentbranchheads > 1:
4217 4217 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
4218 4218 else:
4219 4219 ui.status(_("(run 'hg heads' to see heads)\n"))
4220 4220 else:
4221 4221 ui.status(_("(run 'hg update' to get a working copy)\n"))
4222 4222
4223 4223 @command('^pull',
4224 4224 [('u', 'update', None,
4225 4225 _('update to new branch head if changesets were pulled')),
4226 4226 ('f', 'force', None, _('run even when remote repository is unrelated')),
4227 4227 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4228 4228 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4229 4229 ('b', 'branch', [], _('a specific branch you would like to pull'),
4230 4230 _('BRANCH')),
4231 4231 ] + remoteopts,
4232 4232 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4233 4233 def pull(ui, repo, source="default", **opts):
4234 4234 """pull changes from the specified source
4235 4235
4236 4236 Pull changes from a remote repository to a local one.
4237 4237
4238 4238 This finds all changes from the repository at the specified path
4239 4239 or URL and adds them to a local repository (the current one unless
4240 4240 -R is specified). By default, this does not update the copy of the
4241 4241 project in the working directory.
4242 4242
4243 4243 Use :hg:`incoming` if you want to see what would have been added
4244 4244 by a pull at the time you issued this command. If you then decide
4245 4245 to add those changes to the repository, you should use :hg:`pull
4246 4246 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4247 4247
4248 4248 If SOURCE is omitted, the 'default' path will be used.
4249 4249 See :hg:`help urls` for more information.
4250 4250
4251 4251 Returns 0 on success, 1 if an update had unresolved files.
4252 4252 """
4253 4253 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4254 4254 other = hg.peer(repo, opts, source)
4255 4255 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4256 4256 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
4257 4257
4258 4258 if opts.get('bookmark'):
4259 4259 if not revs:
4260 4260 revs = []
4261 4261 rb = other.listkeys('bookmarks')
4262 4262 for b in opts['bookmark']:
4263 4263 if b not in rb:
4264 4264 raise util.Abort(_('remote bookmark %s not found!') % b)
4265 4265 revs.append(rb[b])
4266 4266
4267 4267 if revs:
4268 4268 try:
4269 4269 revs = [other.lookup(rev) for rev in revs]
4270 4270 except error.CapabilityError:
4271 4271 err = _("other repository doesn't support revision lookup, "
4272 4272 "so a rev cannot be specified.")
4273 4273 raise util.Abort(err)
4274 4274
4275 4275 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4276 4276 bookmarks.updatefromremote(ui, repo, other)
4277 4277 if checkout:
4278 4278 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4279 4279 repo._subtoppath = source
4280 4280 try:
4281 4281 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4282 4282
4283 4283 finally:
4284 4284 del repo._subtoppath
4285 4285
4286 4286 # update specified bookmarks
4287 4287 if opts.get('bookmark'):
4288 4288 for b in opts['bookmark']:
4289 4289 # explicit pull overrides local bookmark if any
4290 4290 ui.status(_("importing bookmark %s\n") % b)
4291 4291 repo._bookmarks[b] = repo[rb[b]].node()
4292 4292 bookmarks.write(repo)
4293 4293
4294 4294 return ret
4295 4295
4296 4296 @command('^push',
4297 4297 [('f', 'force', None, _('force push')),
4298 4298 ('r', 'rev', [],
4299 4299 _('a changeset intended to be included in the destination'),
4300 4300 _('REV')),
4301 4301 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4302 4302 ('b', 'branch', [],
4303 4303 _('a specific branch you would like to push'), _('BRANCH')),
4304 4304 ('', 'new-branch', False, _('allow pushing a new branch')),
4305 4305 ] + remoteopts,
4306 4306 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4307 4307 def push(ui, repo, dest=None, **opts):
4308 4308 """push changes to the specified destination
4309 4309
4310 4310 Push changesets from the local repository to the specified
4311 4311 destination.
4312 4312
4313 4313 This operation is symmetrical to pull: it is identical to a pull
4314 4314 in the destination repository from the current one.
4315 4315
4316 4316 By default, push will not allow creation of new heads at the
4317 4317 destination, since multiple heads would make it unclear which head
4318 4318 to use. In this situation, it is recommended to pull and merge
4319 4319 before pushing.
4320 4320
4321 4321 Use --new-branch if you want to allow push to create a new named
4322 4322 branch that is not present at the destination. This allows you to
4323 4323 only create a new branch without forcing other changes.
4324 4324
4325 4325 Use -f/--force to override the default behavior and push all
4326 4326 changesets on all branches.
4327 4327
4328 4328 If -r/--rev is used, the specified revision and all its ancestors
4329 4329 will be pushed to the remote repository.
4330 4330
4331 4331 Please see :hg:`help urls` for important details about ``ssh://``
4332 4332 URLs. If DESTINATION is omitted, a default path will be used.
4333 4333
4334 4334 Returns 0 if push was successful, 1 if nothing to push.
4335 4335 """
4336 4336
4337 4337 if opts.get('bookmark'):
4338 4338 for b in opts['bookmark']:
4339 4339 # translate -B options to -r so changesets get pushed
4340 4340 if b in repo._bookmarks:
4341 4341 opts.setdefault('rev', []).append(b)
4342 4342 else:
4343 4343 # if we try to push a deleted bookmark, translate it to null
4344 4344 # this lets simultaneous -r, -b options continue working
4345 4345 opts.setdefault('rev', []).append("null")
4346 4346
4347 4347 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4348 4348 dest, branches = hg.parseurl(dest, opts.get('branch'))
4349 4349 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4350 4350 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4351 4351 other = hg.peer(repo, opts, dest)
4352 4352 if revs:
4353 4353 revs = [repo.lookup(rev) for rev in revs]
4354 4354
4355 4355 repo._subtoppath = dest
4356 4356 try:
4357 4357 # push subrepos depth-first for coherent ordering
4358 4358 c = repo['']
4359 4359 subs = c.substate # only repos that are committed
4360 4360 for s in sorted(subs):
4361 4361 if not c.sub(s).push(opts.get('force')):
4362 4362 return False
4363 4363 finally:
4364 4364 del repo._subtoppath
4365 4365 result = repo.push(other, opts.get('force'), revs=revs,
4366 4366 newbranch=opts.get('new_branch'))
4367 4367
4368 4368 result = (result == 0)
4369 4369
4370 4370 if opts.get('bookmark'):
4371 4371 rb = other.listkeys('bookmarks')
4372 4372 for b in opts['bookmark']:
4373 4373 # explicit push overrides remote bookmark if any
4374 4374 if b in repo._bookmarks:
4375 4375 ui.status(_("exporting bookmark %s\n") % b)
4376 4376 new = repo[b].hex()
4377 4377 elif b in rb:
4378 4378 ui.status(_("deleting remote bookmark %s\n") % b)
4379 4379 new = '' # delete
4380 4380 else:
4381 4381 ui.warn(_('bookmark %s does not exist on the local '
4382 4382 'or remote repository!\n') % b)
4383 4383 return 2
4384 4384 old = rb.get(b, '')
4385 4385 r = other.pushkey('bookmarks', b, old, new)
4386 4386 if not r:
4387 4387 ui.warn(_('updating bookmark %s failed!\n') % b)
4388 4388 if not result:
4389 4389 result = 2
4390 4390
4391 4391 return result
4392 4392
4393 4393 @command('recover', [])
4394 4394 def recover(ui, repo):
4395 4395 """roll back an interrupted transaction
4396 4396
4397 4397 Recover from an interrupted commit or pull.
4398 4398
4399 4399 This command tries to fix the repository status after an
4400 4400 interrupted operation. It should only be necessary when Mercurial
4401 4401 suggests it.
4402 4402
4403 4403 Returns 0 if successful, 1 if nothing to recover or verify fails.
4404 4404 """
4405 4405 if repo.recover():
4406 4406 return hg.verify(repo)
4407 4407 return 1
4408 4408
4409 4409 @command('^remove|rm',
4410 4410 [('A', 'after', None, _('record delete for missing files')),
4411 4411 ('f', 'force', None,
4412 4412 _('remove (and delete) file even if added or modified')),
4413 4413 ] + walkopts,
4414 4414 _('[OPTION]... FILE...'))
4415 4415 def remove(ui, repo, *pats, **opts):
4416 4416 """remove the specified files on the next commit
4417 4417
4418 4418 Schedule the indicated files for removal from the current branch.
4419 4419
4420 4420 This command schedules the files to be removed at the next commit.
4421 4421 To undo a remove before that, see :hg:`revert`. To undo added
4422 4422 files, see :hg:`forget`.
4423 4423
4424 4424 .. container:: verbose
4425 4425
4426 4426 -A/--after can be used to remove only files that have already
4427 4427 been deleted, -f/--force can be used to force deletion, and -Af
4428 4428 can be used to remove files from the next revision without
4429 4429 deleting them from the working directory.
4430 4430
4431 4431 The following table details the behavior of remove for different
4432 4432 file states (columns) and option combinations (rows). The file
4433 4433 states are Added [A], Clean [C], Modified [M] and Missing [!]
4434 4434 (as reported by :hg:`status`). The actions are Warn, Remove
4435 4435 (from branch) and Delete (from disk):
4436 4436
4437 4437 ======= == == == ==
4438 4438 A C M !
4439 4439 ======= == == == ==
4440 4440 none W RD W R
4441 4441 -f R RD RD R
4442 4442 -A W W W R
4443 4443 -Af R R R R
4444 4444 ======= == == == ==
4445 4445
4446 4446 Note that remove never deletes files in Added [A] state from the
4447 4447 working directory, not even if option --force is specified.
4448 4448
4449 4449 Returns 0 on success, 1 if any warnings encountered.
4450 4450 """
4451 4451
4452 4452 ret = 0
4453 4453 after, force = opts.get('after'), opts.get('force')
4454 4454 if not pats and not after:
4455 4455 raise util.Abort(_('no files specified'))
4456 4456
4457 4457 m = scmutil.match(repo[None], pats, opts)
4458 4458 s = repo.status(match=m, clean=True)
4459 4459 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4460 4460
4461 4461 for f in m.files():
4462 4462 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
4463 4463 if os.path.exists(m.rel(f)):
4464 4464 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4465 4465 ret = 1
4466 4466
4467 4467 if force:
4468 4468 list = modified + deleted + clean + added
4469 4469 elif after:
4470 4470 list = deleted
4471 4471 for f in modified + added + clean:
4472 4472 ui.warn(_('not removing %s: file still exists (use -f'
4473 4473 ' to force removal)\n') % m.rel(f))
4474 4474 ret = 1
4475 4475 else:
4476 4476 list = deleted + clean
4477 4477 for f in modified:
4478 4478 ui.warn(_('not removing %s: file is modified (use -f'
4479 4479 ' to force removal)\n') % m.rel(f))
4480 4480 ret = 1
4481 4481 for f in added:
4482 4482 ui.warn(_('not removing %s: file has been marked for add'
4483 4483 ' (use forget to undo)\n') % m.rel(f))
4484 4484 ret = 1
4485 4485
4486 4486 for f in sorted(list):
4487 4487 if ui.verbose or not m.exact(f):
4488 4488 ui.status(_('removing %s\n') % m.rel(f))
4489 4489
4490 4490 wlock = repo.wlock()
4491 4491 try:
4492 4492 if not after:
4493 4493 for f in list:
4494 4494 if f in added:
4495 4495 continue # we never unlink added files on remove
4496 4496 try:
4497 4497 util.unlinkpath(repo.wjoin(f))
4498 4498 except OSError, inst:
4499 4499 if inst.errno != errno.ENOENT:
4500 4500 raise
4501 4501 repo[None].forget(list)
4502 4502 finally:
4503 4503 wlock.release()
4504 4504
4505 4505 return ret
4506 4506
4507 4507 @command('rename|move|mv',
4508 4508 [('A', 'after', None, _('record a rename that has already occurred')),
4509 4509 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4510 4510 ] + walkopts + dryrunopts,
4511 4511 _('[OPTION]... SOURCE... DEST'))
4512 4512 def rename(ui, repo, *pats, **opts):
4513 4513 """rename files; equivalent of copy + remove
4514 4514
4515 4515 Mark dest as copies of sources; mark sources for deletion. If dest
4516 4516 is a directory, copies are put in that directory. If dest is a
4517 4517 file, there can only be one source.
4518 4518
4519 4519 By default, this command copies the contents of files as they
4520 4520 exist in the working directory. If invoked with -A/--after, the
4521 4521 operation is recorded, but no copying is performed.
4522 4522
4523 4523 This command takes effect at the next commit. To undo a rename
4524 4524 before that, see :hg:`revert`.
4525 4525
4526 4526 Returns 0 on success, 1 if errors are encountered.
4527 4527 """
4528 4528 wlock = repo.wlock(False)
4529 4529 try:
4530 4530 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4531 4531 finally:
4532 4532 wlock.release()
4533 4533
4534 4534 @command('resolve',
4535 4535 [('a', 'all', None, _('select all unresolved files')),
4536 4536 ('l', 'list', None, _('list state of files needing merge')),
4537 4537 ('m', 'mark', None, _('mark files as resolved')),
4538 4538 ('u', 'unmark', None, _('mark files as unresolved')),
4539 4539 ('n', 'no-status', None, _('hide status prefix'))]
4540 4540 + mergetoolopts + walkopts,
4541 4541 _('[OPTION]... [FILE]...'))
4542 4542 def resolve(ui, repo, *pats, **opts):
4543 4543 """redo merges or set/view the merge status of files
4544 4544
4545 4545 Merges with unresolved conflicts are often the result of
4546 4546 non-interactive merging using the ``internal:merge`` configuration
4547 4547 setting, or a command-line merge tool like ``diff3``. The resolve
4548 4548 command is used to manage the files involved in a merge, after
4549 4549 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4550 4550 working directory must have two parents).
4551 4551
4552 4552 The resolve command can be used in the following ways:
4553 4553
4554 4554 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4555 4555 files, discarding any previous merge attempts. Re-merging is not
4556 4556 performed for files already marked as resolved. Use ``--all/-a``
4557 4557 to select all unresolved files. ``--tool`` can be used to specify
4558 4558 the merge tool used for the given files. It overrides the HGMERGE
4559 4559 environment variable and your configuration files. Previous file
4560 4560 contents are saved with a ``.orig`` suffix.
4561 4561
4562 4562 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4563 4563 (e.g. after having manually fixed-up the files). The default is
4564 4564 to mark all unresolved files.
4565 4565
4566 4566 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4567 4567 default is to mark all resolved files.
4568 4568
4569 4569 - :hg:`resolve -l`: list files which had or still have conflicts.
4570 4570 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4571 4571
4572 4572 Note that Mercurial will not let you commit files with unresolved
4573 4573 merge conflicts. You must use :hg:`resolve -m ...` before you can
4574 4574 commit after a conflicting merge.
4575 4575
4576 4576 Returns 0 on success, 1 if any files fail a resolve attempt.
4577 4577 """
4578 4578
4579 4579 all, mark, unmark, show, nostatus = \
4580 4580 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
4581 4581
4582 4582 if (show and (mark or unmark)) or (mark and unmark):
4583 4583 raise util.Abort(_("too many options specified"))
4584 4584 if pats and all:
4585 4585 raise util.Abort(_("can't specify --all and patterns"))
4586 4586 if not (all or pats or show or mark or unmark):
4587 4587 raise util.Abort(_('no files or directories specified; '
4588 4588 'use --all to remerge all files'))
4589 4589
4590 4590 ms = mergemod.mergestate(repo)
4591 4591 m = scmutil.match(repo[None], pats, opts)
4592 4592 ret = 0
4593 4593
4594 4594 for f in ms:
4595 4595 if m(f):
4596 4596 if show:
4597 4597 if nostatus:
4598 4598 ui.write("%s\n" % f)
4599 4599 else:
4600 4600 ui.write("%s %s\n" % (ms[f].upper(), f),
4601 4601 label='resolve.' +
4602 4602 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
4603 4603 elif mark:
4604 4604 ms.mark(f, "r")
4605 4605 elif unmark:
4606 4606 ms.mark(f, "u")
4607 4607 else:
4608 4608 wctx = repo[None]
4609 4609 mctx = wctx.parents()[-1]
4610 4610
4611 4611 # backup pre-resolve (merge uses .orig for its own purposes)
4612 4612 a = repo.wjoin(f)
4613 4613 util.copyfile(a, a + ".resolve")
4614 4614
4615 4615 try:
4616 4616 # resolve file
4617 4617 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
4618 4618 if ms.resolve(f, wctx, mctx):
4619 4619 ret = 1
4620 4620 finally:
4621 4621 ui.setconfig('ui', 'forcemerge', '')
4622 4622
4623 4623 # replace filemerge's .orig file with our resolve file
4624 4624 util.rename(a + ".resolve", a + ".orig")
4625 4625
4626 4626 ms.commit()
4627 4627 return ret
4628 4628
4629 4629 @command('revert',
4630 4630 [('a', 'all', None, _('revert all changes when no arguments given')),
4631 4631 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4632 4632 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4633 4633 ('C', 'no-backup', None, _('do not save backup copies of files')),
4634 4634 ] + walkopts + dryrunopts,
4635 4635 _('[OPTION]... [-r REV] [NAME]...'))
4636 4636 def revert(ui, repo, *pats, **opts):
4637 4637 """restore files to their checkout state
4638 4638
4639 4639 .. note::
4640 4640 To check out earlier revisions, you should use :hg:`update REV`.
4641 4641 To cancel a merge (and lose your changes), use :hg:`update --clean .`.
4642 4642
4643 4643 With no revision specified, revert the specified files or directories
4644 4644 to the contents they had in the parent of the working directory.
4645 4645 This restores the contents of files to an unmodified
4646 4646 state and unschedules adds, removes, copies, and renames. If the
4647 4647 working directory has two parents, you must explicitly specify a
4648 4648 revision.
4649 4649
4650 4650 Using the -r/--rev or -d/--date options, revert the given files or
4651 4651 directories to their states as of a specific revision. Because
4652 4652 revert does not change the working directory parents, this will
4653 4653 cause these files to appear modified. This can be helpful to "back
4654 4654 out" some or all of an earlier change. See :hg:`backout` for a
4655 4655 related method.
4656 4656
4657 4657 Modified files are saved with a .orig suffix before reverting.
4658 4658 To disable these backups, use --no-backup.
4659 4659
4660 4660 See :hg:`help dates` for a list of formats valid for -d/--date.
4661 4661
4662 4662 Returns 0 on success.
4663 4663 """
4664 4664
4665 4665 if opts.get("date"):
4666 4666 if opts.get("rev"):
4667 4667 raise util.Abort(_("you can't specify a revision and a date"))
4668 4668 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4669 4669
4670 4670 parent, p2 = repo.dirstate.parents()
4671 4671 if not opts.get('rev') and p2 != nullid:
4672 4672 # revert after merge is a trap for new users (issue2915)
4673 4673 raise util.Abort(_('uncommitted merge with no revision specified'),
4674 4674 hint=_('use "hg update" or see "hg help revert"'))
4675 4675
4676 4676 ctx = scmutil.revsingle(repo, opts.get('rev'))
4677 4677 node = ctx.node()
4678 4678
4679 4679 if not pats and not opts.get('all'):
4680 4680 msg = _("no files or directories specified")
4681 4681 if p2 != nullid:
4682 4682 hint = _("uncommitted merge, use --all to discard all changes,"
4683 4683 " or 'hg update -C .' to abort the merge")
4684 4684 raise util.Abort(msg, hint=hint)
4685 4685 dirty = util.any(repo.status())
4686 4686 if node != parent:
4687 4687 if dirty:
4688 4688 hint = _("uncommitted changes, use --all to discard all"
4689 4689 " changes, or 'hg update %s' to update") % ctx.rev()
4690 4690 else:
4691 4691 hint = _("use --all to revert all files,"
4692 4692 " or 'hg update %s' to update") % ctx.rev()
4693 4693 elif dirty:
4694 4694 hint = _("uncommitted changes, use --all to discard all changes")
4695 4695 else:
4696 4696 hint = _("use --all to revert all files")
4697 4697 raise util.Abort(msg, hint=hint)
4698 4698
4699 4699 mf = ctx.manifest()
4700 4700 if node == parent:
4701 4701 pmf = mf
4702 4702 else:
4703 4703 pmf = None
4704 4704
4705 4705 # need all matching names in dirstate and manifest of target rev,
4706 4706 # so have to walk both. do not print errors if files exist in one
4707 4707 # but not other.
4708 4708
4709 4709 names = {}
4710 4710
4711 4711 wlock = repo.wlock()
4712 4712 try:
4713 4713 # walk dirstate.
4714 4714
4715 4715 m = scmutil.match(repo[None], pats, opts)
4716 4716 m.bad = lambda x, y: False
4717 4717 for abs in repo.walk(m):
4718 4718 names[abs] = m.rel(abs), m.exact(abs)
4719 4719
4720 4720 # walk target manifest.
4721 4721
4722 4722 def badfn(path, msg):
4723 4723 if path in names:
4724 4724 return
4725 4725 if path in repo[node].substate:
4726 4726 ui.warn("%s: %s\n" % (m.rel(path),
4727 4727 'reverting subrepos is unsupported'))
4728 4728 return
4729 4729 path_ = path + '/'
4730 4730 for f in names:
4731 4731 if f.startswith(path_):
4732 4732 return
4733 4733 ui.warn("%s: %s\n" % (m.rel(path), msg))
4734 4734
4735 4735 m = scmutil.match(repo[node], pats, opts)
4736 4736 m.bad = badfn
4737 4737 for abs in repo[node].walk(m):
4738 4738 if abs not in names:
4739 4739 names[abs] = m.rel(abs), m.exact(abs)
4740 4740
4741 4741 m = scmutil.matchfiles(repo, names)
4742 4742 changes = repo.status(match=m)[:4]
4743 4743 modified, added, removed, deleted = map(set, changes)
4744 4744
4745 4745 # if f is a rename, also revert the source
4746 4746 cwd = repo.getcwd()
4747 4747 for f in added:
4748 4748 src = repo.dirstate.copied(f)
4749 4749 if src and src not in names and repo.dirstate[src] == 'r':
4750 4750 removed.add(src)
4751 4751 names[src] = (repo.pathto(src, cwd), True)
4752 4752
4753 4753 def removeforget(abs):
4754 4754 if repo.dirstate[abs] == 'a':
4755 4755 return _('forgetting %s\n')
4756 4756 return _('removing %s\n')
4757 4757
4758 4758 revert = ([], _('reverting %s\n'))
4759 4759 add = ([], _('adding %s\n'))
4760 4760 remove = ([], removeforget)
4761 4761 undelete = ([], _('undeleting %s\n'))
4762 4762
4763 4763 disptable = (
4764 4764 # dispatch table:
4765 4765 # file state
4766 4766 # action if in target manifest
4767 4767 # action if not in target manifest
4768 4768 # make backup if in target manifest
4769 4769 # make backup if not in target manifest
4770 4770 (modified, revert, remove, True, True),
4771 4771 (added, revert, remove, True, False),
4772 4772 (removed, undelete, None, False, False),
4773 4773 (deleted, revert, remove, False, False),
4774 4774 )
4775 4775
4776 4776 for abs, (rel, exact) in sorted(names.items()):
4777 4777 mfentry = mf.get(abs)
4778 4778 target = repo.wjoin(abs)
4779 4779 def handle(xlist, dobackup):
4780 4780 xlist[0].append(abs)
4781 4781 if (dobackup and not opts.get('no_backup') and
4782 4782 os.path.lexists(target)):
4783 4783 bakname = "%s.orig" % rel
4784 4784 ui.note(_('saving current version of %s as %s\n') %
4785 4785 (rel, bakname))
4786 4786 if not opts.get('dry_run'):
4787 4787 util.rename(target, bakname)
4788 4788 if ui.verbose or not exact:
4789 4789 msg = xlist[1]
4790 4790 if not isinstance(msg, basestring):
4791 4791 msg = msg(abs)
4792 4792 ui.status(msg % rel)
4793 4793 for table, hitlist, misslist, backuphit, backupmiss in disptable:
4794 4794 if abs not in table:
4795 4795 continue
4796 4796 # file has changed in dirstate
4797 4797 if mfentry:
4798 4798 handle(hitlist, backuphit)
4799 4799 elif misslist is not None:
4800 4800 handle(misslist, backupmiss)
4801 4801 break
4802 4802 else:
4803 4803 if abs not in repo.dirstate:
4804 4804 if mfentry:
4805 4805 handle(add, True)
4806 4806 elif exact:
4807 4807 ui.warn(_('file not managed: %s\n') % rel)
4808 4808 continue
4809 4809 # file has not changed in dirstate
4810 4810 if node == parent:
4811 4811 if exact:
4812 4812 ui.warn(_('no changes needed to %s\n') % rel)
4813 4813 continue
4814 4814 if pmf is None:
4815 4815 # only need parent manifest in this unlikely case,
4816 4816 # so do not read by default
4817 4817 pmf = repo[parent].manifest()
4818 4818 if abs in pmf and mfentry:
4819 4819 # if version of file is same in parent and target
4820 4820 # manifests, do nothing
4821 4821 if (pmf[abs] != mfentry or
4822 4822 pmf.flags(abs) != mf.flags(abs)):
4823 4823 handle(revert, False)
4824 4824 else:
4825 4825 handle(remove, False)
4826 4826
4827 4827 if not opts.get('dry_run'):
4828 4828 def checkout(f):
4829 4829 fc = ctx[f]
4830 4830 repo.wwrite(f, fc.data(), fc.flags())
4831 4831
4832 4832 audit_path = scmutil.pathauditor(repo.root)
4833 4833 for f in remove[0]:
4834 4834 if repo.dirstate[f] == 'a':
4835 4835 repo.dirstate.drop(f)
4836 4836 continue
4837 4837 audit_path(f)
4838 4838 try:
4839 4839 util.unlinkpath(repo.wjoin(f))
4840 4840 except OSError:
4841 4841 pass
4842 4842 repo.dirstate.remove(f)
4843 4843
4844 4844 normal = None
4845 4845 if node == parent:
4846 4846 # We're reverting to our parent. If possible, we'd like status
4847 4847 # to report the file as clean. We have to use normallookup for
4848 4848 # merges to avoid losing information about merged/dirty files.
4849 4849 if p2 != nullid:
4850 4850 normal = repo.dirstate.normallookup
4851 4851 else:
4852 4852 normal = repo.dirstate.normal
4853 4853 for f in revert[0]:
4854 4854 checkout(f)
4855 4855 if normal:
4856 4856 normal(f)
4857 4857
4858 4858 for f in add[0]:
4859 4859 checkout(f)
4860 4860 repo.dirstate.add(f)
4861 4861
4862 4862 normal = repo.dirstate.normallookup
4863 4863 if node == parent and p2 == nullid:
4864 4864 normal = repo.dirstate.normal
4865 4865 for f in undelete[0]:
4866 4866 checkout(f)
4867 4867 normal(f)
4868 4868
4869 4869 finally:
4870 4870 wlock.release()
4871 4871
4872 4872 @command('rollback', dryrunopts +
4873 4873 [('f', 'force', False, _('ignore safety measures'))])
4874 4874 def rollback(ui, repo, **opts):
4875 4875 """roll back the last transaction (dangerous)
4876 4876
4877 4877 This command should be used with care. There is only one level of
4878 4878 rollback, and there is no way to undo a rollback. It will also
4879 4879 restore the dirstate at the time of the last transaction, losing
4880 4880 any dirstate changes since that time. This command does not alter
4881 4881 the working directory.
4882 4882
4883 4883 Transactions are used to encapsulate the effects of all commands
4884 4884 that create new changesets or propagate existing changesets into a
4885 4885 repository. For example, the following commands are transactional,
4886 4886 and their effects can be rolled back:
4887 4887
4888 4888 - commit
4889 4889 - import
4890 4890 - pull
4891 4891 - push (with this repository as the destination)
4892 4892 - unbundle
4893 4893
4894 4894 It's possible to lose data with rollback: commit, update back to
4895 4895 an older changeset, and then rollback. The update removes the
4896 4896 changes you committed from the working directory, and rollback
4897 4897 removes them from history. To avoid data loss, you must pass
4898 4898 --force in this case.
4899 4899
4900 4900 This command is not intended for use on public repositories. Once
4901 4901 changes are visible for pull by other users, rolling a transaction
4902 4902 back locally is ineffective (someone else may already have pulled
4903 4903 the changes). Furthermore, a race is possible with readers of the
4904 4904 repository; for example an in-progress pull from the repository
4905 4905 may fail if a rollback is performed.
4906 4906
4907 4907 Returns 0 on success, 1 if no rollback data is available.
4908 4908 """
4909 4909 return repo.rollback(dryrun=opts.get('dry_run'),
4910 4910 force=opts.get('force'))
4911 4911
4912 4912 @command('root', [])
4913 4913 def root(ui, repo):
4914 4914 """print the root (top) of the current working directory
4915 4915
4916 4916 Print the root directory of the current repository.
4917 4917
4918 4918 Returns 0 on success.
4919 4919 """
4920 4920 ui.write(repo.root + "\n")
4921 4921
4922 4922 @command('^serve',
4923 4923 [('A', 'accesslog', '', _('name of access log file to write to'),
4924 4924 _('FILE')),
4925 4925 ('d', 'daemon', None, _('run server in background')),
4926 4926 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
4927 4927 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4928 4928 # use string type, then we can check if something was passed
4929 4929 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4930 4930 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4931 4931 _('ADDR')),
4932 4932 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4933 4933 _('PREFIX')),
4934 4934 ('n', 'name', '',
4935 4935 _('name to show in web pages (default: working directory)'), _('NAME')),
4936 4936 ('', 'web-conf', '',
4937 4937 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
4938 4938 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4939 4939 _('FILE')),
4940 4940 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4941 4941 ('', 'stdio', None, _('for remote clients')),
4942 4942 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
4943 4943 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4944 4944 ('', 'style', '', _('template style to use'), _('STYLE')),
4945 4945 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4946 4946 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
4947 4947 _('[OPTION]...'))
4948 4948 def serve(ui, repo, **opts):
4949 4949 """start stand-alone webserver
4950 4950
4951 4951 Start a local HTTP repository browser and pull server. You can use
4952 4952 this for ad-hoc sharing and browsing of repositories. It is
4953 4953 recommended to use a real web server to serve a repository for
4954 4954 longer periods of time.
4955 4955
4956 4956 Please note that the server does not implement access control.
4957 4957 This means that, by default, anybody can read from the server and
4958 4958 nobody can write to it by default. Set the ``web.allow_push``
4959 4959 option to ``*`` to allow everybody to push to the server. You
4960 4960 should use a real web server if you need to authenticate users.
4961 4961
4962 4962 By default, the server logs accesses to stdout and errors to
4963 4963 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4964 4964 files.
4965 4965
4966 4966 To have the server choose a free port number to listen on, specify
4967 4967 a port number of 0; in this case, the server will print the port
4968 4968 number it uses.
4969 4969
4970 4970 Returns 0 on success.
4971 4971 """
4972 4972
4973 4973 if opts["stdio"] and opts["cmdserver"]:
4974 4974 raise util.Abort(_("cannot use --stdio with --cmdserver"))
4975 4975
4976 4976 def checkrepo():
4977 4977 if repo is None:
4978 4978 raise error.RepoError(_("There is no Mercurial repository here"
4979 4979 " (.hg not found)"))
4980 4980
4981 4981 if opts["stdio"]:
4982 4982 checkrepo()
4983 4983 s = sshserver.sshserver(ui, repo)
4984 4984 s.serve_forever()
4985 4985
4986 4986 if opts["cmdserver"]:
4987 4987 checkrepo()
4988 4988 s = commandserver.server(ui, repo, opts["cmdserver"])
4989 4989 return s.serve()
4990 4990
4991 4991 # this way we can check if something was given in the command-line
4992 4992 if opts.get('port'):
4993 4993 opts['port'] = util.getport(opts.get('port'))
4994 4994
4995 4995 baseui = repo and repo.baseui or ui
4996 4996 optlist = ("name templates style address port prefix ipv6"
4997 4997 " accesslog errorlog certificate encoding")
4998 4998 for o in optlist.split():
4999 4999 val = opts.get(o, '')
5000 5000 if val in (None, ''): # should check against default options instead
5001 5001 continue
5002 5002 baseui.setconfig("web", o, val)
5003 5003 if repo and repo.ui != baseui:
5004 5004 repo.ui.setconfig("web", o, val)
5005 5005
5006 5006 o = opts.get('web_conf') or opts.get('webdir_conf')
5007 5007 if not o:
5008 5008 if not repo:
5009 5009 raise error.RepoError(_("There is no Mercurial repository"
5010 5010 " here (.hg not found)"))
5011 5011 o = repo.root
5012 5012
5013 5013 app = hgweb.hgweb(o, baseui=ui)
5014 5014
5015 5015 class service(object):
5016 5016 def init(self):
5017 5017 util.setsignalhandler()
5018 5018 self.httpd = hgweb.server.create_server(ui, app)
5019 5019
5020 5020 if opts['port'] and not ui.verbose:
5021 5021 return
5022 5022
5023 5023 if self.httpd.prefix:
5024 5024 prefix = self.httpd.prefix.strip('/') + '/'
5025 5025 else:
5026 5026 prefix = ''
5027 5027
5028 5028 port = ':%d' % self.httpd.port
5029 5029 if port == ':80':
5030 5030 port = ''
5031 5031
5032 5032 bindaddr = self.httpd.addr
5033 5033 if bindaddr == '0.0.0.0':
5034 5034 bindaddr = '*'
5035 5035 elif ':' in bindaddr: # IPv6
5036 5036 bindaddr = '[%s]' % bindaddr
5037 5037
5038 5038 fqaddr = self.httpd.fqaddr
5039 5039 if ':' in fqaddr:
5040 5040 fqaddr = '[%s]' % fqaddr
5041 5041 if opts['port']:
5042 5042 write = ui.status
5043 5043 else:
5044 5044 write = ui.write
5045 5045 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5046 5046 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5047 5047
5048 5048 def run(self):
5049 5049 self.httpd.serve_forever()
5050 5050
5051 5051 service = service()
5052 5052
5053 5053 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5054 5054
5055 5055 @command('showconfig|debugconfig',
5056 5056 [('u', 'untrusted', None, _('show untrusted configuration options'))],
5057 5057 _('[-u] [NAME]...'))
5058 5058 def showconfig(ui, repo, *values, **opts):
5059 5059 """show combined config settings from all hgrc files
5060 5060
5061 5061 With no arguments, print names and values of all config items.
5062 5062
5063 5063 With one argument of the form section.name, print just the value
5064 5064 of that config item.
5065 5065
5066 5066 With multiple arguments, print names and values of all config
5067 5067 items with matching section names.
5068 5068
5069 5069 With --debug, the source (filename and line number) is printed
5070 5070 for each config item.
5071 5071
5072 5072 Returns 0 on success.
5073 5073 """
5074 5074
5075 5075 for f in scmutil.rcpath():
5076 5076 ui.debug('read config from: %s\n' % f)
5077 5077 untrusted = bool(opts.get('untrusted'))
5078 5078 if values:
5079 5079 sections = [v for v in values if '.' not in v]
5080 5080 items = [v for v in values if '.' in v]
5081 5081 if len(items) > 1 or items and sections:
5082 5082 raise util.Abort(_('only one config item permitted'))
5083 5083 for section, name, value in ui.walkconfig(untrusted=untrusted):
5084 5084 value = str(value).replace('\n', '\\n')
5085 5085 sectname = section + '.' + name
5086 5086 if values:
5087 5087 for v in values:
5088 5088 if v == section:
5089 5089 ui.debug('%s: ' %
5090 5090 ui.configsource(section, name, untrusted))
5091 5091 ui.write('%s=%s\n' % (sectname, value))
5092 5092 elif v == sectname:
5093 5093 ui.debug('%s: ' %
5094 5094 ui.configsource(section, name, untrusted))
5095 5095 ui.write(value, '\n')
5096 5096 else:
5097 5097 ui.debug('%s: ' %
5098 5098 ui.configsource(section, name, untrusted))
5099 5099 ui.write('%s=%s\n' % (sectname, value))
5100 5100
5101 5101 @command('^status|st',
5102 5102 [('A', 'all', None, _('show status of all files')),
5103 5103 ('m', 'modified', None, _('show only modified files')),
5104 5104 ('a', 'added', None, _('show only added files')),
5105 5105 ('r', 'removed', None, _('show only removed files')),
5106 5106 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5107 5107 ('c', 'clean', None, _('show only files without changes')),
5108 5108 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5109 5109 ('i', 'ignored', None, _('show only ignored files')),
5110 5110 ('n', 'no-status', None, _('hide status prefix')),
5111 5111 ('C', 'copies', None, _('show source of copied files')),
5112 5112 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5113 5113 ('', 'rev', [], _('show difference from revision'), _('REV')),
5114 5114 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5115 5115 ] + walkopts + subrepoopts,
5116 5116 _('[OPTION]... [FILE]...'))
5117 5117 def status(ui, repo, *pats, **opts):
5118 5118 """show changed files in the working directory
5119 5119
5120 5120 Show status of files in the repository. If names are given, only
5121 5121 files that match are shown. Files that are clean or ignored or
5122 5122 the source of a copy/move operation, are not listed unless
5123 5123 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5124 5124 Unless options described with "show only ..." are given, the
5125 5125 options -mardu are used.
5126 5126
5127 5127 Option -q/--quiet hides untracked (unknown and ignored) files
5128 5128 unless explicitly requested with -u/--unknown or -i/--ignored.
5129 5129
5130 5130 .. note::
5131 5131 status may appear to disagree with diff if permissions have
5132 5132 changed or a merge has occurred. The standard diff format does
5133 5133 not report permission changes and diff only reports changes
5134 5134 relative to one merge parent.
5135 5135
5136 5136 If one revision is given, it is used as the base revision.
5137 5137 If two revisions are given, the differences between them are
5138 5138 shown. The --change option can also be used as a shortcut to list
5139 5139 the changed files of a revision from its first parent.
5140 5140
5141 5141 The codes used to show the status of files are::
5142 5142
5143 5143 M = modified
5144 5144 A = added
5145 5145 R = removed
5146 5146 C = clean
5147 5147 ! = missing (deleted by non-hg command, but still tracked)
5148 5148 ? = not tracked
5149 5149 I = ignored
5150 5150 = origin of the previous file listed as A (added)
5151 5151
5152 5152 .. container:: verbose
5153 5153
5154 5154 Examples:
5155 5155
5156 5156 - show changes in the working directory relative to a changeset:
5157 5157
5158 5158 hg status --rev 9353
5159 5159
5160 5160 - show all changes including copies in an existing changeset::
5161 5161
5162 5162 hg status --copies --change 9353
5163 5163
5164 5164 - get a NUL separated list of added files, suitable for xargs::
5165 5165
5166 5166 hg status -an0
5167 5167
5168 5168 Returns 0 on success.
5169 5169 """
5170 5170
5171 5171 revs = opts.get('rev')
5172 5172 change = opts.get('change')
5173 5173
5174 5174 if revs and change:
5175 5175 msg = _('cannot specify --rev and --change at the same time')
5176 5176 raise util.Abort(msg)
5177 5177 elif change:
5178 5178 node2 = scmutil.revsingle(repo, change, None).node()
5179 5179 node1 = repo[node2].p1().node()
5180 5180 else:
5181 5181 node1, node2 = scmutil.revpair(repo, revs)
5182 5182
5183 5183 cwd = (pats and repo.getcwd()) or ''
5184 5184 end = opts.get('print0') and '\0' or '\n'
5185 5185 copy = {}
5186 5186 states = 'modified added removed deleted unknown ignored clean'.split()
5187 5187 show = [k for k in states if opts.get(k)]
5188 5188 if opts.get('all'):
5189 5189 show += ui.quiet and (states[:4] + ['clean']) or states
5190 5190 if not show:
5191 5191 show = ui.quiet and states[:4] or states[:5]
5192 5192
5193 5193 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5194 5194 'ignored' in show, 'clean' in show, 'unknown' in show,
5195 5195 opts.get('subrepos'))
5196 5196 changestates = zip(states, 'MAR!?IC', stat)
5197 5197
5198 5198 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5199 5199 ctxn = repo[nullid]
5200 5200 ctx1 = repo[node1]
5201 5201 ctx2 = repo[node2]
5202 5202 added = stat[1]
5203 5203 if node2 is None:
5204 5204 added = stat[0] + stat[1] # merged?
5205 5205
5206 5206 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
5207 5207 if k in added:
5208 5208 copy[k] = v
5209 5209 elif v in added:
5210 5210 copy[v] = k
5211 5211
5212 5212 for state, char, files in changestates:
5213 5213 if state in show:
5214 5214 format = "%s %%s%s" % (char, end)
5215 5215 if opts.get('no_status'):
5216 5216 format = "%%s%s" % end
5217 5217
5218 5218 for f in files:
5219 5219 ui.write(format % repo.pathto(f, cwd),
5220 5220 label='status.' + state)
5221 5221 if f in copy:
5222 5222 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
5223 5223 label='status.copied')
5224 5224
5225 5225 @command('^summary|sum',
5226 5226 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5227 5227 def summary(ui, repo, **opts):
5228 5228 """summarize working directory state
5229 5229
5230 5230 This generates a brief summary of the working directory state,
5231 5231 including parents, branch, commit status, and available updates.
5232 5232
5233 5233 With the --remote option, this will check the default paths for
5234 5234 incoming and outgoing changes. This can be time-consuming.
5235 5235
5236 5236 Returns 0 on success.
5237 5237 """
5238 5238
5239 5239 ctx = repo[None]
5240 5240 parents = ctx.parents()
5241 5241 pnode = parents[0].node()
5242 5242 marks = []
5243 5243
5244 5244 for p in parents:
5245 5245 # label with log.changeset (instead of log.parent) since this
5246 5246 # shows a working directory parent *changeset*:
5247 5247 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5248 5248 label='log.changeset')
5249 5249 ui.write(' '.join(p.tags()), label='log.tag')
5250 5250 if p.bookmarks():
5251 5251 marks.extend(p.bookmarks())
5252 5252 if p.rev() == -1:
5253 5253 if not len(repo):
5254 5254 ui.write(_(' (empty repository)'))
5255 5255 else:
5256 5256 ui.write(_(' (no revision checked out)'))
5257 5257 ui.write('\n')
5258 5258 if p.description():
5259 5259 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5260 5260 label='log.summary')
5261 5261
5262 5262 branch = ctx.branch()
5263 5263 bheads = repo.branchheads(branch)
5264 5264 m = _('branch: %s\n') % branch
5265 5265 if branch != 'default':
5266 5266 ui.write(m, label='log.branch')
5267 5267 else:
5268 5268 ui.status(m, label='log.branch')
5269 5269
5270 5270 if marks:
5271 5271 current = repo._bookmarkcurrent
5272 5272 ui.write(_('bookmarks:'), label='log.bookmark')
5273 5273 if current is not None:
5274 5274 try:
5275 5275 marks.remove(current)
5276 5276 ui.write(' *' + current, label='bookmarks.current')
5277 5277 except ValueError:
5278 5278 # current bookmark not in parent ctx marks
5279 5279 pass
5280 5280 for m in marks:
5281 5281 ui.write(' ' + m, label='log.bookmark')
5282 5282 ui.write('\n', label='log.bookmark')
5283 5283
5284 5284 st = list(repo.status(unknown=True))[:6]
5285 5285
5286 5286 c = repo.dirstate.copies()
5287 5287 copied, renamed = [], []
5288 5288 for d, s in c.iteritems():
5289 5289 if s in st[2]:
5290 5290 st[2].remove(s)
5291 5291 renamed.append(d)
5292 5292 else:
5293 5293 copied.append(d)
5294 5294 if d in st[1]:
5295 5295 st[1].remove(d)
5296 5296 st.insert(3, renamed)
5297 5297 st.insert(4, copied)
5298 5298
5299 5299 ms = mergemod.mergestate(repo)
5300 5300 st.append([f for f in ms if ms[f] == 'u'])
5301 5301
5302 5302 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5303 5303 st.append(subs)
5304 5304
5305 5305 labels = [ui.label(_('%d modified'), 'status.modified'),
5306 5306 ui.label(_('%d added'), 'status.added'),
5307 5307 ui.label(_('%d removed'), 'status.removed'),
5308 5308 ui.label(_('%d renamed'), 'status.copied'),
5309 5309 ui.label(_('%d copied'), 'status.copied'),
5310 5310 ui.label(_('%d deleted'), 'status.deleted'),
5311 5311 ui.label(_('%d unknown'), 'status.unknown'),
5312 5312 ui.label(_('%d ignored'), 'status.ignored'),
5313 5313 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5314 5314 ui.label(_('%d subrepos'), 'status.modified')]
5315 5315 t = []
5316 5316 for s, l in zip(st, labels):
5317 5317 if s:
5318 5318 t.append(l % len(s))
5319 5319
5320 5320 t = ', '.join(t)
5321 5321 cleanworkdir = False
5322 5322
5323 5323 if len(parents) > 1:
5324 5324 t += _(' (merge)')
5325 5325 elif branch != parents[0].branch():
5326 5326 t += _(' (new branch)')
5327 5327 elif (parents[0].extra().get('close') and
5328 5328 pnode in repo.branchheads(branch, closed=True)):
5329 5329 t += _(' (head closed)')
5330 5330 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5331 5331 t += _(' (clean)')
5332 5332 cleanworkdir = True
5333 5333 elif pnode not in bheads:
5334 5334 t += _(' (new branch head)')
5335 5335
5336 5336 if cleanworkdir:
5337 5337 ui.status(_('commit: %s\n') % t.strip())
5338 5338 else:
5339 5339 ui.write(_('commit: %s\n') % t.strip())
5340 5340
5341 5341 # all ancestors of branch heads - all ancestors of parent = new csets
5342 5342 new = [0] * len(repo)
5343 5343 cl = repo.changelog
5344 5344 for a in [cl.rev(n) for n in bheads]:
5345 5345 new[a] = 1
5346 5346 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
5347 5347 new[a] = 1
5348 5348 for a in [p.rev() for p in parents]:
5349 5349 if a >= 0:
5350 5350 new[a] = 0
5351 5351 for a in cl.ancestors(*[p.rev() for p in parents]):
5352 5352 new[a] = 0
5353 5353 new = sum(new)
5354 5354
5355 5355 if new == 0:
5356 5356 ui.status(_('update: (current)\n'))
5357 5357 elif pnode not in bheads:
5358 5358 ui.write(_('update: %d new changesets (update)\n') % new)
5359 5359 else:
5360 5360 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5361 5361 (new, len(bheads)))
5362 5362
5363 5363 if opts.get('remote'):
5364 5364 t = []
5365 5365 source, branches = hg.parseurl(ui.expandpath('default'))
5366 5366 other = hg.peer(repo, {}, source)
5367 5367 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
5368 5368 ui.debug('comparing with %s\n' % util.hidepassword(source))
5369 5369 repo.ui.pushbuffer()
5370 5370 commoninc = discovery.findcommonincoming(repo, other)
5371 5371 _common, incoming, _rheads = commoninc
5372 5372 repo.ui.popbuffer()
5373 5373 if incoming:
5374 5374 t.append(_('1 or more incoming'))
5375 5375
5376 5376 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5377 5377 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5378 5378 if source != dest:
5379 5379 other = hg.peer(repo, {}, dest)
5380 5380 commoninc = None
5381 5381 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5382 5382 repo.ui.pushbuffer()
5383 5383 common, outheads = discovery.findcommonoutgoing(repo, other,
5384 5384 commoninc=commoninc)
5385 5385 repo.ui.popbuffer()
5386 5386 o = repo.changelog.findmissing(common=common, heads=outheads)
5387 5387 if o:
5388 5388 t.append(_('%d outgoing') % len(o))
5389 5389 if 'bookmarks' in other.listkeys('namespaces'):
5390 5390 lmarks = repo.listkeys('bookmarks')
5391 5391 rmarks = other.listkeys('bookmarks')
5392 5392 diff = set(rmarks) - set(lmarks)
5393 5393 if len(diff) > 0:
5394 5394 t.append(_('%d incoming bookmarks') % len(diff))
5395 5395 diff = set(lmarks) - set(rmarks)
5396 5396 if len(diff) > 0:
5397 5397 t.append(_('%d outgoing bookmarks') % len(diff))
5398 5398
5399 5399 if t:
5400 5400 ui.write(_('remote: %s\n') % (', '.join(t)))
5401 5401 else:
5402 5402 ui.status(_('remote: (synced)\n'))
5403 5403
5404 5404 @command('tag',
5405 5405 [('f', 'force', None, _('force tag')),
5406 5406 ('l', 'local', None, _('make the tag local')),
5407 5407 ('r', 'rev', '', _('revision to tag'), _('REV')),
5408 5408 ('', 'remove', None, _('remove a tag')),
5409 5409 # -l/--local is already there, commitopts cannot be used
5410 5410 ('e', 'edit', None, _('edit commit message')),
5411 5411 ('m', 'message', '', _('use <text> as commit message'), _('TEXT')),
5412 5412 ] + commitopts2,
5413 5413 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5414 5414 def tag(ui, repo, name1, *names, **opts):
5415 5415 """add one or more tags for the current or given revision
5416 5416
5417 5417 Name a particular revision using <name>.
5418 5418
5419 5419 Tags are used to name particular revisions of the repository and are
5420 5420 very useful to compare different revisions, to go back to significant
5421 5421 earlier versions or to mark branch points as releases, etc. Changing
5422 5422 an existing tag is normally disallowed; use -f/--force to override.
5423 5423
5424 5424 If no revision is given, the parent of the working directory is
5425 5425 used, or tip if no revision is checked out.
5426 5426
5427 5427 To facilitate version control, distribution, and merging of tags,
5428 5428 they are stored as a file named ".hgtags" which is managed similarly
5429 5429 to other project files and can be hand-edited if necessary. This
5430 5430 also means that tagging creates a new commit. The file
5431 5431 ".hg/localtags" is used for local tags (not shared among
5432 5432 repositories).
5433 5433
5434 5434 Tag commits are usually made at the head of a branch. If the parent
5435 5435 of the working directory is not a branch head, :hg:`tag` aborts; use
5436 5436 -f/--force to force the tag commit to be based on a non-head
5437 5437 changeset.
5438 5438
5439 5439 See :hg:`help dates` for a list of formats valid for -d/--date.
5440 5440
5441 5441 Since tag names have priority over branch names during revision
5442 5442 lookup, using an existing branch name as a tag name is discouraged.
5443 5443
5444 5444 Returns 0 on success.
5445 5445 """
5446 5446
5447 5447 rev_ = "."
5448 5448 names = [t.strip() for t in (name1,) + names]
5449 5449 if len(names) != len(set(names)):
5450 5450 raise util.Abort(_('tag names must be unique'))
5451 5451 for n in names:
5452 5452 if n in ['tip', '.', 'null']:
5453 5453 raise util.Abort(_("the name '%s' is reserved") % n)
5454 5454 if not n:
5455 5455 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
5456 5456 if opts.get('rev') and opts.get('remove'):
5457 5457 raise util.Abort(_("--rev and --remove are incompatible"))
5458 5458 if opts.get('rev'):
5459 5459 rev_ = opts['rev']
5460 5460 message = opts.get('message')
5461 5461 if opts.get('remove'):
5462 5462 expectedtype = opts.get('local') and 'local' or 'global'
5463 5463 for n in names:
5464 5464 if not repo.tagtype(n):
5465 5465 raise util.Abort(_("tag '%s' does not exist") % n)
5466 5466 if repo.tagtype(n) != expectedtype:
5467 5467 if expectedtype == 'global':
5468 5468 raise util.Abort(_("tag '%s' is not a global tag") % n)
5469 5469 else:
5470 5470 raise util.Abort(_("tag '%s' is not a local tag") % n)
5471 5471 rev_ = nullid
5472 5472 if not message:
5473 5473 # we don't translate commit messages
5474 5474 message = 'Removed tag %s' % ', '.join(names)
5475 5475 elif not opts.get('force'):
5476 5476 for n in names:
5477 5477 if n in repo.tags():
5478 5478 raise util.Abort(_("tag '%s' already exists "
5479 5479 "(use -f to force)") % n)
5480 5480 if not opts.get('local'):
5481 5481 p1, p2 = repo.dirstate.parents()
5482 5482 if p2 != nullid:
5483 5483 raise util.Abort(_('uncommitted merge'))
5484 5484 bheads = repo.branchheads()
5485 5485 if not opts.get('force') and bheads and p1 not in bheads:
5486 5486 raise util.Abort(_('not at a branch head (use -f to force)'))
5487 5487 r = scmutil.revsingle(repo, rev_).node()
5488 5488
5489 5489 if not message:
5490 5490 # we don't translate commit messages
5491 5491 message = ('Added tag %s for changeset %s' %
5492 5492 (', '.join(names), short(r)))
5493 5493
5494 5494 date = opts.get('date')
5495 5495 if date:
5496 5496 date = util.parsedate(date)
5497 5497
5498 5498 if opts.get('edit'):
5499 5499 message = ui.edit(message, ui.username())
5500 5500
5501 5501 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
5502 5502
5503 5503 @command('tags', [], '')
5504 5504 def tags(ui, repo):
5505 5505 """list repository tags
5506 5506
5507 5507 This lists both regular and local tags. When the -v/--verbose
5508 5508 switch is used, a third column "local" is printed for local tags.
5509 5509
5510 5510 Returns 0 on success.
5511 5511 """
5512 5512
5513 5513 hexfunc = ui.debugflag and hex or short
5514 5514 tagtype = ""
5515 5515
5516 5516 for t, n in reversed(repo.tagslist()):
5517 5517 if ui.quiet:
5518 5518 ui.write("%s\n" % t, label='tags.normal')
5519 5519 continue
5520 5520
5521 5521 hn = hexfunc(n)
5522 5522 r = "%5d:%s" % (repo.changelog.rev(n), hn)
5523 5523 rev = ui.label(r, 'log.changeset')
5524 5524 spaces = " " * (30 - encoding.colwidth(t))
5525 5525
5526 5526 tag = ui.label(t, 'tags.normal')
5527 5527 if ui.verbose:
5528 5528 if repo.tagtype(t) == 'local':
5529 5529 tagtype = " local"
5530 5530 tag = ui.label(t, 'tags.local')
5531 5531 else:
5532 5532 tagtype = ""
5533 5533 ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
5534 5534
5535 5535 @command('tip',
5536 5536 [('p', 'patch', None, _('show patch')),
5537 5537 ('g', 'git', None, _('use git extended diff format')),
5538 5538 ] + templateopts,
5539 5539 _('[-p] [-g]'))
5540 5540 def tip(ui, repo, **opts):
5541 5541 """show the tip revision
5542 5542
5543 5543 The tip revision (usually just called the tip) is the changeset
5544 5544 most recently added to the repository (and therefore the most
5545 5545 recently changed head).
5546 5546
5547 5547 If you have just made a commit, that commit will be the tip. If
5548 5548 you have just pulled changes from another repository, the tip of
5549 5549 that repository becomes the current tip. The "tip" tag is special
5550 5550 and cannot be renamed or assigned to a different changeset.
5551 5551
5552 5552 Returns 0 on success.
5553 5553 """
5554 5554 displayer = cmdutil.show_changeset(ui, repo, opts)
5555 5555 displayer.show(repo[len(repo) - 1])
5556 5556 displayer.close()
5557 5557
5558 5558 @command('unbundle',
5559 5559 [('u', 'update', None,
5560 5560 _('update to new branch head if changesets were unbundled'))],
5561 5561 _('[-u] FILE...'))
5562 5562 def unbundle(ui, repo, fname1, *fnames, **opts):
5563 5563 """apply one or more changegroup files
5564 5564
5565 5565 Apply one or more compressed changegroup files generated by the
5566 5566 bundle command.
5567 5567
5568 5568 Returns 0 on success, 1 if an update has unresolved files.
5569 5569 """
5570 5570 fnames = (fname1,) + fnames
5571 5571
5572 5572 lock = repo.lock()
5573 5573 wc = repo['.']
5574 5574 try:
5575 5575 for fname in fnames:
5576 5576 f = url.open(ui, fname)
5577 5577 gen = changegroup.readbundle(f, fname)
5578 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
5579 lock=lock)
5578 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
5580 5579 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
5581 5580 finally:
5582 5581 lock.release()
5583 5582 return postincoming(ui, repo, modheads, opts.get('update'), None)
5584 5583
5585 5584 @command('^update|up|checkout|co',
5586 5585 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5587 5586 ('c', 'check', None,
5588 5587 _('update across branches if no uncommitted changes')),
5589 5588 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5590 5589 ('r', 'rev', '', _('revision'), _('REV'))],
5591 5590 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5592 5591 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
5593 5592 """update working directory (or switch revisions)
5594 5593
5595 5594 Update the repository's working directory to the specified
5596 5595 changeset. If no changeset is specified, update to the tip of the
5597 5596 current named branch.
5598 5597
5599 5598 If the changeset is not a descendant of the working directory's
5600 5599 parent, the update is aborted. With the -c/--check option, the
5601 5600 working directory is checked for uncommitted changes; if none are
5602 5601 found, the working directory is updated to the specified
5603 5602 changeset.
5604 5603
5605 5604 Update sets the working directory's parent revison to the specified
5606 5605 changeset (see :hg:`help parents`).
5607 5606
5608 5607 The following rules apply when the working directory contains
5609 5608 uncommitted changes:
5610 5609
5611 5610 1. If neither -c/--check nor -C/--clean is specified, and if
5612 5611 the requested changeset is an ancestor or descendant of
5613 5612 the working directory's parent, the uncommitted changes
5614 5613 are merged into the requested changeset and the merged
5615 5614 result is left uncommitted. If the requested changeset is
5616 5615 not an ancestor or descendant (that is, it is on another
5617 5616 branch), the update is aborted and the uncommitted changes
5618 5617 are preserved.
5619 5618
5620 5619 2. With the -c/--check option, the update is aborted and the
5621 5620 uncommitted changes are preserved.
5622 5621
5623 5622 3. With the -C/--clean option, uncommitted changes are discarded and
5624 5623 the working directory is updated to the requested changeset.
5625 5624
5626 5625 Use null as the changeset to remove the working directory (like
5627 5626 :hg:`clone -U`).
5628 5627
5629 5628 If you want to revert just one file to an older revision, use
5630 5629 :hg:`revert [-r REV] NAME`.
5631 5630
5632 5631 See :hg:`help dates` for a list of formats valid for -d/--date.
5633 5632
5634 5633 Returns 0 on success, 1 if there are unresolved files.
5635 5634 """
5636 5635 if rev and node:
5637 5636 raise util.Abort(_("please specify just one revision"))
5638 5637
5639 5638 if rev is None or rev == '':
5640 5639 rev = node
5641 5640
5642 5641 # if we defined a bookmark, we have to remember the original bookmark name
5643 5642 brev = rev
5644 5643 rev = scmutil.revsingle(repo, rev, rev).rev()
5645 5644
5646 5645 if check and clean:
5647 5646 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5648 5647
5649 5648 if check:
5650 5649 # we could use dirty() but we can ignore merge and branch trivia
5651 5650 c = repo[None]
5652 5651 if c.modified() or c.added() or c.removed():
5653 5652 raise util.Abort(_("uncommitted local changes"))
5654 5653
5655 5654 if date:
5656 5655 if rev is not None:
5657 5656 raise util.Abort(_("you can't specify a revision and a date"))
5658 5657 rev = cmdutil.finddate(ui, repo, date)
5659 5658
5660 5659 if clean or check:
5661 5660 ret = hg.clean(repo, rev)
5662 5661 else:
5663 5662 ret = hg.update(repo, rev)
5664 5663
5665 5664 if brev in repo._bookmarks:
5666 5665 bookmarks.setcurrent(repo, brev)
5667 5666
5668 5667 return ret
5669 5668
5670 5669 @command('verify', [])
5671 5670 def verify(ui, repo):
5672 5671 """verify the integrity of the repository
5673 5672
5674 5673 Verify the integrity of the current repository.
5675 5674
5676 5675 This will perform an extensive check of the repository's
5677 5676 integrity, validating the hashes and checksums of each entry in
5678 5677 the changelog, manifest, and tracked files, as well as the
5679 5678 integrity of their crosslinks and indices.
5680 5679
5681 5680 Returns 0 on success, 1 if errors are encountered.
5682 5681 """
5683 5682 return hg.verify(repo)
5684 5683
5685 5684 @command('version', [])
5686 5685 def version_(ui):
5687 5686 """output version and copyright information"""
5688 5687 ui.write(_("Mercurial Distributed SCM (version %s)\n")
5689 5688 % util.version())
5690 5689 ui.status(_(
5691 5690 "(see http://mercurial.selenic.com for more information)\n"
5692 5691 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
5693 5692 "This is free software; see the source for copying conditions. "
5694 5693 "There is NO\nwarranty; "
5695 5694 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5696 5695 ))
5697 5696
5698 5697 norepo = ("clone init version help debugcommands debugcomplete"
5699 5698 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
5700 5699 " debugknown debuggetbundle debugbundle")
5701 5700 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
5702 5701 " debugdata debugindex debugindexdot debugrevlog")
@@ -1,2160 +1,2154 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo, discovery, pushkey
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 12 import lock, transaction, store, encoding
13 13 import scmutil, util, extensions, hook, error, revset
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 from lock import release
18 18 import weakref, errno, os, time, inspect
19 19 propertycache = util.propertycache
20 20 filecache = scmutil.filecache
21 21
22 22 class localrepository(repo.repository):
23 23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 24 'known', 'getbundle'))
25 25 supportedformats = set(('revlogv1', 'generaldelta'))
26 26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 27 'dotencode'))
28 28
29 29 def __init__(self, baseui, path=None, create=False):
30 30 repo.repository.__init__(self)
31 31 self.root = os.path.realpath(util.expandpath(path))
32 32 self.path = os.path.join(self.root, ".hg")
33 33 self.origroot = path
34 34 self.auditor = scmutil.pathauditor(self.root, self._checknested)
35 35 self.opener = scmutil.opener(self.path)
36 36 self.wopener = scmutil.opener(self.root)
37 37 self.baseui = baseui
38 38 self.ui = baseui.copy()
39 39 self._dirtyphases = False
40 40
41 41 try:
42 42 self.ui.readconfig(self.join("hgrc"), self.root)
43 43 extensions.loadall(self.ui)
44 44 except IOError:
45 45 pass
46 46
47 47 if not os.path.isdir(self.path):
48 48 if create:
49 49 if not os.path.exists(path):
50 50 util.makedirs(path)
51 51 util.makedir(self.path, notindexed=True)
52 52 requirements = ["revlogv1"]
53 53 if self.ui.configbool('format', 'usestore', True):
54 54 os.mkdir(os.path.join(self.path, "store"))
55 55 requirements.append("store")
56 56 if self.ui.configbool('format', 'usefncache', True):
57 57 requirements.append("fncache")
58 58 if self.ui.configbool('format', 'dotencode', True):
59 59 requirements.append('dotencode')
60 60 # create an invalid changelog
61 61 self.opener.append(
62 62 "00changelog.i",
63 63 '\0\0\0\2' # represents revlogv2
64 64 ' dummy changelog to prevent using the old repo layout'
65 65 )
66 66 if self.ui.configbool('format', 'generaldelta', False):
67 67 requirements.append("generaldelta")
68 68 requirements = set(requirements)
69 69 else:
70 70 raise error.RepoError(_("repository %s not found") % path)
71 71 elif create:
72 72 raise error.RepoError(_("repository %s already exists") % path)
73 73 else:
74 74 try:
75 75 requirements = scmutil.readrequires(self.opener, self.supported)
76 76 except IOError, inst:
77 77 if inst.errno != errno.ENOENT:
78 78 raise
79 79 requirements = set()
80 80
81 81 self.sharedpath = self.path
82 82 try:
83 83 s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
84 84 if not os.path.exists(s):
85 85 raise error.RepoError(
86 86 _('.hg/sharedpath points to nonexistent directory %s') % s)
87 87 self.sharedpath = s
88 88 except IOError, inst:
89 89 if inst.errno != errno.ENOENT:
90 90 raise
91 91
92 92 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
93 93 self.spath = self.store.path
94 94 self.sopener = self.store.opener
95 95 self.sjoin = self.store.join
96 96 self.opener.createmode = self.store.createmode
97 97 self._applyrequirements(requirements)
98 98 if create:
99 99 self._writerequirements()
100 100
101 101
102 102 self._branchcache = None
103 103 self._branchcachetip = None
104 104 self.filterpats = {}
105 105 self._datafilters = {}
106 106 self._transref = self._lockref = self._wlockref = None
107 107
108 108 # A cache for various files under .hg/ that tracks file changes,
109 109 # (used by the filecache decorator)
110 110 #
111 111 # Maps a property name to its util.filecacheentry
112 112 self._filecache = {}
113 113
114 114 def _applyrequirements(self, requirements):
115 115 self.requirements = requirements
116 116 openerreqs = set(('revlogv1', 'generaldelta'))
117 117 self.sopener.options = dict((r, 1) for r in requirements
118 118 if r in openerreqs)
119 119
120 120 def _writerequirements(self):
121 121 reqfile = self.opener("requires", "w")
122 122 for r in self.requirements:
123 123 reqfile.write("%s\n" % r)
124 124 reqfile.close()
125 125
126 126 def _checknested(self, path):
127 127 """Determine if path is a legal nested repository."""
128 128 if not path.startswith(self.root):
129 129 return False
130 130 subpath = path[len(self.root) + 1:]
131 131
132 132 # XXX: Checking against the current working copy is wrong in
133 133 # the sense that it can reject things like
134 134 #
135 135 # $ hg cat -r 10 sub/x.txt
136 136 #
137 137 # if sub/ is no longer a subrepository in the working copy
138 138 # parent revision.
139 139 #
140 140 # However, it can of course also allow things that would have
141 141 # been rejected before, such as the above cat command if sub/
142 142 # is a subrepository now, but was a normal directory before.
143 143 # The old path auditor would have rejected by mistake since it
144 144 # panics when it sees sub/.hg/.
145 145 #
146 146 # All in all, checking against the working copy seems sensible
147 147 # since we want to prevent access to nested repositories on
148 148 # the filesystem *now*.
149 149 ctx = self[None]
150 150 parts = util.splitpath(subpath)
151 151 while parts:
152 152 prefix = os.sep.join(parts)
153 153 if prefix in ctx.substate:
154 154 if prefix == subpath:
155 155 return True
156 156 else:
157 157 sub = ctx.sub(prefix)
158 158 return sub.checknested(subpath[len(prefix) + 1:])
159 159 else:
160 160 parts.pop()
161 161 return False
162 162
163 163 @filecache('bookmarks')
164 164 def _bookmarks(self):
165 165 return bookmarks.read(self)
166 166
167 167 @filecache('bookmarks.current')
168 168 def _bookmarkcurrent(self):
169 169 return bookmarks.readcurrent(self)
170 170
171 171 def _writebookmarks(self, marks):
172 172 bookmarks.write(self)
173 173
174 174 @filecache('phaseroots')
175 175 def _phaseroots(self):
176 176 self._dirtyphases = False
177 177 phaseroots = phases.readroots(self)
178 178 phases.filterunknown(self, phaseroots)
179 179 return phaseroots
180 180
181 181 @propertycache
182 182 def _phaserev(self):
183 183 cache = [0] * len(self)
184 184 for phase in phases.trackedphases:
185 185 roots = map(self.changelog.rev, self._phaseroots[phase])
186 186 if roots:
187 187 for rev in roots:
188 188 cache[rev] = phase
189 189 for rev in self.changelog.descendants(*roots):
190 190 cache[rev] = phase
191 191 return cache
192 192
193 193 @filecache('00changelog.i', True)
194 194 def changelog(self):
195 195 c = changelog.changelog(self.sopener)
196 196 if 'HG_PENDING' in os.environ:
197 197 p = os.environ['HG_PENDING']
198 198 if p.startswith(self.root):
199 199 c.readpending('00changelog.i.a')
200 200 return c
201 201
202 202 @filecache('00manifest.i', True)
203 203 def manifest(self):
204 204 return manifest.manifest(self.sopener)
205 205
206 206 @filecache('dirstate')
207 207 def dirstate(self):
208 208 warned = [0]
209 209 def validate(node):
210 210 try:
211 211 self.changelog.rev(node)
212 212 return node
213 213 except error.LookupError:
214 214 if not warned[0]:
215 215 warned[0] = True
216 216 self.ui.warn(_("warning: ignoring unknown"
217 217 " working parent %s!\n") % short(node))
218 218 return nullid
219 219
220 220 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
221 221
222 222 def __getitem__(self, changeid):
223 223 if changeid is None:
224 224 return context.workingctx(self)
225 225 return context.changectx(self, changeid)
226 226
227 227 def __contains__(self, changeid):
228 228 try:
229 229 return bool(self.lookup(changeid))
230 230 except error.RepoLookupError:
231 231 return False
232 232
233 233 def __nonzero__(self):
234 234 return True
235 235
236 236 def __len__(self):
237 237 return len(self.changelog)
238 238
239 239 def __iter__(self):
240 240 for i in xrange(len(self)):
241 241 yield i
242 242
243 243 def revs(self, expr, *args):
244 244 '''Return a list of revisions matching the given revset'''
245 245 expr = revset.formatspec(expr, *args)
246 246 m = revset.match(None, expr)
247 247 return [r for r in m(self, range(len(self)))]
248 248
249 249 def set(self, expr, *args):
250 250 '''
251 251 Yield a context for each matching revision, after doing arg
252 252 replacement via revset.formatspec
253 253 '''
254 254 for r in self.revs(expr, *args):
255 255 yield self[r]
256 256
257 257 def url(self):
258 258 return 'file:' + self.root
259 259
260 260 def hook(self, name, throw=False, **args):
261 261 return hook.hook(self.ui, self, name, throw, **args)
262 262
263 263 tag_disallowed = ':\r\n'
264 264
265 265 def _tag(self, names, node, message, local, user, date, extra={}):
266 266 if isinstance(names, str):
267 267 allchars = names
268 268 names = (names,)
269 269 else:
270 270 allchars = ''.join(names)
271 271 for c in self.tag_disallowed:
272 272 if c in allchars:
273 273 raise util.Abort(_('%r cannot be used in a tag name') % c)
274 274
275 275 branches = self.branchmap()
276 276 for name in names:
277 277 self.hook('pretag', throw=True, node=hex(node), tag=name,
278 278 local=local)
279 279 if name in branches:
280 280 self.ui.warn(_("warning: tag %s conflicts with existing"
281 281 " branch name\n") % name)
282 282
283 283 def writetags(fp, names, munge, prevtags):
284 284 fp.seek(0, 2)
285 285 if prevtags and prevtags[-1] != '\n':
286 286 fp.write('\n')
287 287 for name in names:
288 288 m = munge and munge(name) or name
289 289 if self._tagscache.tagtypes and name in self._tagscache.tagtypes:
290 290 old = self.tags().get(name, nullid)
291 291 fp.write('%s %s\n' % (hex(old), m))
292 292 fp.write('%s %s\n' % (hex(node), m))
293 293 fp.close()
294 294
295 295 prevtags = ''
296 296 if local:
297 297 try:
298 298 fp = self.opener('localtags', 'r+')
299 299 except IOError:
300 300 fp = self.opener('localtags', 'a')
301 301 else:
302 302 prevtags = fp.read()
303 303
304 304 # local tags are stored in the current charset
305 305 writetags(fp, names, None, prevtags)
306 306 for name in names:
307 307 self.hook('tag', node=hex(node), tag=name, local=local)
308 308 return
309 309
310 310 try:
311 311 fp = self.wfile('.hgtags', 'rb+')
312 312 except IOError, e:
313 313 if e.errno != errno.ENOENT:
314 314 raise
315 315 fp = self.wfile('.hgtags', 'ab')
316 316 else:
317 317 prevtags = fp.read()
318 318
319 319 # committed tags are stored in UTF-8
320 320 writetags(fp, names, encoding.fromlocal, prevtags)
321 321
322 322 fp.close()
323 323
324 324 if '.hgtags' not in self.dirstate:
325 325 self[None].add(['.hgtags'])
326 326
327 327 m = matchmod.exact(self.root, '', ['.hgtags'])
328 328 tagnode = self.commit(message, user, date, extra=extra, match=m)
329 329
330 330 for name in names:
331 331 self.hook('tag', node=hex(node), tag=name, local=local)
332 332
333 333 return tagnode
334 334
335 335 def tag(self, names, node, message, local, user, date):
336 336 '''tag a revision with one or more symbolic names.
337 337
338 338 names is a list of strings or, when adding a single tag, names may be a
339 339 string.
340 340
341 341 if local is True, the tags are stored in a per-repository file.
342 342 otherwise, they are stored in the .hgtags file, and a new
343 343 changeset is committed with the change.
344 344
345 345 keyword arguments:
346 346
347 347 local: whether to store tags in non-version-controlled file
348 348 (default False)
349 349
350 350 message: commit message to use if committing
351 351
352 352 user: name of user to use if committing
353 353
354 354 date: date tuple to use if committing'''
355 355
356 356 if not local:
357 357 for x in self.status()[:5]:
358 358 if '.hgtags' in x:
359 359 raise util.Abort(_('working copy of .hgtags is changed '
360 360 '(please commit .hgtags manually)'))
361 361
362 362 self.tags() # instantiate the cache
363 363 self._tag(names, node, message, local, user, date)
364 364
365 365 @propertycache
366 366 def _tagscache(self):
367 367 '''Returns a tagscache object that contains various tags related caches.'''
368 368
369 369 # This simplifies its cache management by having one decorated
370 370 # function (this one) and the rest simply fetch things from it.
371 371 class tagscache(object):
372 372 def __init__(self):
373 373 # These two define the set of tags for this repository. tags
374 374 # maps tag name to node; tagtypes maps tag name to 'global' or
375 375 # 'local'. (Global tags are defined by .hgtags across all
376 376 # heads, and local tags are defined in .hg/localtags.)
377 377 # They constitute the in-memory cache of tags.
378 378 self.tags = self.tagtypes = None
379 379
380 380 self.nodetagscache = self.tagslist = None
381 381
382 382 cache = tagscache()
383 383 cache.tags, cache.tagtypes = self._findtags()
384 384
385 385 return cache
386 386
387 387 def tags(self):
388 388 '''return a mapping of tag to node'''
389 389 return self._tagscache.tags
390 390
391 391 def _findtags(self):
392 392 '''Do the hard work of finding tags. Return a pair of dicts
393 393 (tags, tagtypes) where tags maps tag name to node, and tagtypes
394 394 maps tag name to a string like \'global\' or \'local\'.
395 395 Subclasses or extensions are free to add their own tags, but
396 396 should be aware that the returned dicts will be retained for the
397 397 duration of the localrepo object.'''
398 398
399 399 # XXX what tagtype should subclasses/extensions use? Currently
400 400 # mq and bookmarks add tags, but do not set the tagtype at all.
401 401 # Should each extension invent its own tag type? Should there
402 402 # be one tagtype for all such "virtual" tags? Or is the status
403 403 # quo fine?
404 404
405 405 alltags = {} # map tag name to (node, hist)
406 406 tagtypes = {}
407 407
408 408 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
409 409 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
410 410
411 411 # Build the return dicts. Have to re-encode tag names because
412 412 # the tags module always uses UTF-8 (in order not to lose info
413 413 # writing to the cache), but the rest of Mercurial wants them in
414 414 # local encoding.
415 415 tags = {}
416 416 for (name, (node, hist)) in alltags.iteritems():
417 417 if node != nullid:
418 418 try:
419 419 # ignore tags to unknown nodes
420 420 self.changelog.lookup(node)
421 421 tags[encoding.tolocal(name)] = node
422 422 except error.LookupError:
423 423 pass
424 424 tags['tip'] = self.changelog.tip()
425 425 tagtypes = dict([(encoding.tolocal(name), value)
426 426 for (name, value) in tagtypes.iteritems()])
427 427 return (tags, tagtypes)
428 428
429 429 def tagtype(self, tagname):
430 430 '''
431 431 return the type of the given tag. result can be:
432 432
433 433 'local' : a local tag
434 434 'global' : a global tag
435 435 None : tag does not exist
436 436 '''
437 437
438 438 return self._tagscache.tagtypes.get(tagname)
439 439
440 440 def tagslist(self):
441 441 '''return a list of tags ordered by revision'''
442 442 if not self._tagscache.tagslist:
443 443 l = []
444 444 for t, n in self.tags().iteritems():
445 445 r = self.changelog.rev(n)
446 446 l.append((r, t, n))
447 447 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
448 448
449 449 return self._tagscache.tagslist
450 450
451 451 def nodetags(self, node):
452 452 '''return the tags associated with a node'''
453 453 if not self._tagscache.nodetagscache:
454 454 nodetagscache = {}
455 455 for t, n in self.tags().iteritems():
456 456 nodetagscache.setdefault(n, []).append(t)
457 457 for tags in nodetagscache.itervalues():
458 458 tags.sort()
459 459 self._tagscache.nodetagscache = nodetagscache
460 460 return self._tagscache.nodetagscache.get(node, [])
461 461
462 462 def nodebookmarks(self, node):
463 463 marks = []
464 464 for bookmark, n in self._bookmarks.iteritems():
465 465 if n == node:
466 466 marks.append(bookmark)
467 467 return sorted(marks)
468 468
469 469 def _branchtags(self, partial, lrev):
470 470 # TODO: rename this function?
471 471 tiprev = len(self) - 1
472 472 if lrev != tiprev:
473 473 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
474 474 self._updatebranchcache(partial, ctxgen)
475 475 self._writebranchcache(partial, self.changelog.tip(), tiprev)
476 476
477 477 return partial
478 478
479 479 def updatebranchcache(self):
480 480 tip = self.changelog.tip()
481 481 if self._branchcache is not None and self._branchcachetip == tip:
482 482 return self._branchcache
483 483
484 484 oldtip = self._branchcachetip
485 485 self._branchcachetip = tip
486 486 if oldtip is None or oldtip not in self.changelog.nodemap:
487 487 partial, last, lrev = self._readbranchcache()
488 488 else:
489 489 lrev = self.changelog.rev(oldtip)
490 490 partial = self._branchcache
491 491
492 492 self._branchtags(partial, lrev)
493 493 # this private cache holds all heads (not just tips)
494 494 self._branchcache = partial
495 495
496 496 def branchmap(self):
497 497 '''returns a dictionary {branch: [branchheads]}'''
498 498 self.updatebranchcache()
499 499 return self._branchcache
500 500
501 501 def branchtags(self):
502 502 '''return a dict where branch names map to the tipmost head of
503 503 the branch, open heads come before closed'''
504 504 bt = {}
505 505 for bn, heads in self.branchmap().iteritems():
506 506 tip = heads[-1]
507 507 for h in reversed(heads):
508 508 if 'close' not in self.changelog.read(h)[5]:
509 509 tip = h
510 510 break
511 511 bt[bn] = tip
512 512 return bt
513 513
514 514 def _readbranchcache(self):
515 515 partial = {}
516 516 try:
517 517 f = self.opener("cache/branchheads")
518 518 lines = f.read().split('\n')
519 519 f.close()
520 520 except (IOError, OSError):
521 521 return {}, nullid, nullrev
522 522
523 523 try:
524 524 last, lrev = lines.pop(0).split(" ", 1)
525 525 last, lrev = bin(last), int(lrev)
526 526 if lrev >= len(self) or self[lrev].node() != last:
527 527 # invalidate the cache
528 528 raise ValueError('invalidating branch cache (tip differs)')
529 529 for l in lines:
530 530 if not l:
531 531 continue
532 532 node, label = l.split(" ", 1)
533 533 label = encoding.tolocal(label.strip())
534 534 partial.setdefault(label, []).append(bin(node))
535 535 except KeyboardInterrupt:
536 536 raise
537 537 except Exception, inst:
538 538 if self.ui.debugflag:
539 539 self.ui.warn(str(inst), '\n')
540 540 partial, last, lrev = {}, nullid, nullrev
541 541 return partial, last, lrev
542 542
543 543 def _writebranchcache(self, branches, tip, tiprev):
544 544 try:
545 545 f = self.opener("cache/branchheads", "w", atomictemp=True)
546 546 f.write("%s %s\n" % (hex(tip), tiprev))
547 547 for label, nodes in branches.iteritems():
548 548 for node in nodes:
549 549 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
550 550 f.close()
551 551 except (IOError, OSError):
552 552 pass
553 553
554 554 def _updatebranchcache(self, partial, ctxgen):
555 555 # collect new branch entries
556 556 newbranches = {}
557 557 for c in ctxgen:
558 558 newbranches.setdefault(c.branch(), []).append(c.node())
559 559 # if older branchheads are reachable from new ones, they aren't
560 560 # really branchheads. Note checking parents is insufficient:
561 561 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
562 562 for branch, newnodes in newbranches.iteritems():
563 563 bheads = partial.setdefault(branch, [])
564 564 bheads.extend(newnodes)
565 565 if len(bheads) <= 1:
566 566 continue
567 567 bheads = sorted(bheads, key=lambda x: self[x].rev())
568 568 # starting from tip means fewer passes over reachable
569 569 while newnodes:
570 570 latest = newnodes.pop()
571 571 if latest not in bheads:
572 572 continue
573 573 minbhrev = self[bheads[0]].node()
574 574 reachable = self.changelog.reachable(latest, minbhrev)
575 575 reachable.remove(latest)
576 576 if reachable:
577 577 bheads = [b for b in bheads if b not in reachable]
578 578 partial[branch] = bheads
579 579
580 580 def lookup(self, key):
581 581 if isinstance(key, int):
582 582 return self.changelog.node(key)
583 583 elif key == '.':
584 584 return self.dirstate.p1()
585 585 elif key == 'null':
586 586 return nullid
587 587 elif key == 'tip':
588 588 return self.changelog.tip()
589 589 n = self.changelog._match(key)
590 590 if n:
591 591 return n
592 592 if key in self._bookmarks:
593 593 return self._bookmarks[key]
594 594 if key in self.tags():
595 595 return self.tags()[key]
596 596 if key in self.branchtags():
597 597 return self.branchtags()[key]
598 598 n = self.changelog._partialmatch(key)
599 599 if n:
600 600 return n
601 601
602 602 # can't find key, check if it might have come from damaged dirstate
603 603 if key in self.dirstate.parents():
604 604 raise error.Abort(_("working directory has unknown parent '%s'!")
605 605 % short(key))
606 606 try:
607 607 if len(key) == 20:
608 608 key = hex(key)
609 609 except TypeError:
610 610 pass
611 611 raise error.RepoLookupError(_("unknown revision '%s'") % key)
612 612
613 613 def lookupbranch(self, key, remote=None):
614 614 repo = remote or self
615 615 if key in repo.branchmap():
616 616 return key
617 617
618 618 repo = (remote and remote.local()) and remote or self
619 619 return repo[key].branch()
620 620
621 621 def known(self, nodes):
622 622 nm = self.changelog.nodemap
623 623 return [(n in nm) for n in nodes]
624 624
625 625 def local(self):
626 626 return self
627 627
628 628 def join(self, f):
629 629 return os.path.join(self.path, f)
630 630
631 631 def wjoin(self, f):
632 632 return os.path.join(self.root, f)
633 633
634 634 def file(self, f):
635 635 if f[0] == '/':
636 636 f = f[1:]
637 637 return filelog.filelog(self.sopener, f)
638 638
639 639 def changectx(self, changeid):
640 640 return self[changeid]
641 641
642 642 def parents(self, changeid=None):
643 643 '''get list of changectxs for parents of changeid'''
644 644 return self[changeid].parents()
645 645
646 646 def filectx(self, path, changeid=None, fileid=None):
647 647 """changeid can be a changeset revision, node, or tag.
648 648 fileid can be a file revision or node."""
649 649 return context.filectx(self, path, changeid, fileid)
650 650
651 651 def getcwd(self):
652 652 return self.dirstate.getcwd()
653 653
654 654 def pathto(self, f, cwd=None):
655 655 return self.dirstate.pathto(f, cwd)
656 656
657 657 def wfile(self, f, mode='r'):
658 658 return self.wopener(f, mode)
659 659
660 660 def _link(self, f):
661 661 return os.path.islink(self.wjoin(f))
662 662
663 663 def _loadfilter(self, filter):
664 664 if filter not in self.filterpats:
665 665 l = []
666 666 for pat, cmd in self.ui.configitems(filter):
667 667 if cmd == '!':
668 668 continue
669 669 mf = matchmod.match(self.root, '', [pat])
670 670 fn = None
671 671 params = cmd
672 672 for name, filterfn in self._datafilters.iteritems():
673 673 if cmd.startswith(name):
674 674 fn = filterfn
675 675 params = cmd[len(name):].lstrip()
676 676 break
677 677 if not fn:
678 678 fn = lambda s, c, **kwargs: util.filter(s, c)
679 679 # Wrap old filters not supporting keyword arguments
680 680 if not inspect.getargspec(fn)[2]:
681 681 oldfn = fn
682 682 fn = lambda s, c, **kwargs: oldfn(s, c)
683 683 l.append((mf, fn, params))
684 684 self.filterpats[filter] = l
685 685 return self.filterpats[filter]
686 686
687 687 def _filter(self, filterpats, filename, data):
688 688 for mf, fn, cmd in filterpats:
689 689 if mf(filename):
690 690 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
691 691 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
692 692 break
693 693
694 694 return data
695 695
696 696 @propertycache
697 697 def _encodefilterpats(self):
698 698 return self._loadfilter('encode')
699 699
700 700 @propertycache
701 701 def _decodefilterpats(self):
702 702 return self._loadfilter('decode')
703 703
704 704 def adddatafilter(self, name, filter):
705 705 self._datafilters[name] = filter
706 706
707 707 def wread(self, filename):
708 708 if self._link(filename):
709 709 data = os.readlink(self.wjoin(filename))
710 710 else:
711 711 data = self.wopener.read(filename)
712 712 return self._filter(self._encodefilterpats, filename, data)
713 713
714 714 def wwrite(self, filename, data, flags):
715 715 data = self._filter(self._decodefilterpats, filename, data)
716 716 if 'l' in flags:
717 717 self.wopener.symlink(data, filename)
718 718 else:
719 719 self.wopener.write(filename, data)
720 720 if 'x' in flags:
721 721 util.setflags(self.wjoin(filename), False, True)
722 722
723 723 def wwritedata(self, filename, data):
724 724 return self._filter(self._decodefilterpats, filename, data)
725 725
726 726 def transaction(self, desc):
727 727 tr = self._transref and self._transref() or None
728 728 if tr and tr.running():
729 729 return tr.nest()
730 730
731 731 # abort here if the journal already exists
732 732 if os.path.exists(self.sjoin("journal")):
733 733 raise error.RepoError(
734 734 _("abandoned transaction found - run hg recover"))
735 735
736 736 journalfiles = self._writejournal(desc)
737 737 renames = [(x, undoname(x)) for x in journalfiles]
738 738
739 739 tr = transaction.transaction(self.ui.warn, self.sopener,
740 740 self.sjoin("journal"),
741 741 aftertrans(renames),
742 742 self.store.createmode)
743 743 self._transref = weakref.ref(tr)
744 744 return tr
745 745
746 746 def _writejournal(self, desc):
747 747 # save dirstate for rollback
748 748 try:
749 749 ds = self.opener.read("dirstate")
750 750 except IOError:
751 751 ds = ""
752 752 self.opener.write("journal.dirstate", ds)
753 753 self.opener.write("journal.branch",
754 754 encoding.fromlocal(self.dirstate.branch()))
755 755 self.opener.write("journal.desc",
756 756 "%d\n%s\n" % (len(self), desc))
757 757
758 758 bkname = self.join('bookmarks')
759 759 if os.path.exists(bkname):
760 760 util.copyfile(bkname, self.join('journal.bookmarks'))
761 761 else:
762 762 self.opener.write('journal.bookmarks', '')
763 763 phasesname = self.sjoin('phaseroots')
764 764 if os.path.exists(phasesname):
765 765 util.copyfile(phasesname, self.sjoin('journal.phaseroots'))
766 766 else:
767 767 self.sopener.write('journal.phaseroots', '')
768 768
769 769 return (self.sjoin('journal'), self.join('journal.dirstate'),
770 770 self.join('journal.branch'), self.join('journal.desc'),
771 771 self.join('journal.bookmarks'),
772 772 self.sjoin('journal.phaseroots'))
773 773
774 774 def recover(self):
775 775 lock = self.lock()
776 776 try:
777 777 if os.path.exists(self.sjoin("journal")):
778 778 self.ui.status(_("rolling back interrupted transaction\n"))
779 779 transaction.rollback(self.sopener, self.sjoin("journal"),
780 780 self.ui.warn)
781 781 self.invalidate()
782 782 return True
783 783 else:
784 784 self.ui.warn(_("no interrupted transaction available\n"))
785 785 return False
786 786 finally:
787 787 lock.release()
788 788
789 789 def rollback(self, dryrun=False, force=False):
790 790 wlock = lock = None
791 791 try:
792 792 wlock = self.wlock()
793 793 lock = self.lock()
794 794 if os.path.exists(self.sjoin("undo")):
795 795 return self._rollback(dryrun, force)
796 796 else:
797 797 self.ui.warn(_("no rollback information available\n"))
798 798 return 1
799 799 finally:
800 800 release(lock, wlock)
801 801
802 802 def _rollback(self, dryrun, force):
803 803 ui = self.ui
804 804 try:
805 805 args = self.opener.read('undo.desc').splitlines()
806 806 (oldlen, desc, detail) = (int(args[0]), args[1], None)
807 807 if len(args) >= 3:
808 808 detail = args[2]
809 809 oldtip = oldlen - 1
810 810
811 811 if detail and ui.verbose:
812 812 msg = (_('repository tip rolled back to revision %s'
813 813 ' (undo %s: %s)\n')
814 814 % (oldtip, desc, detail))
815 815 else:
816 816 msg = (_('repository tip rolled back to revision %s'
817 817 ' (undo %s)\n')
818 818 % (oldtip, desc))
819 819 except IOError:
820 820 msg = _('rolling back unknown transaction\n')
821 821 desc = None
822 822
823 823 if not force and self['.'] != self['tip'] and desc == 'commit':
824 824 raise util.Abort(
825 825 _('rollback of last commit while not checked out '
826 826 'may lose data'), hint=_('use -f to force'))
827 827
828 828 ui.status(msg)
829 829 if dryrun:
830 830 return 0
831 831
832 832 parents = self.dirstate.parents()
833 833 transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
834 834 if os.path.exists(self.join('undo.bookmarks')):
835 835 util.rename(self.join('undo.bookmarks'),
836 836 self.join('bookmarks'))
837 837 if os.path.exists(self.sjoin('undo.phaseroots')):
838 838 util.rename(self.sjoin('undo.phaseroots'),
839 839 self.sjoin('phaseroots'))
840 840 self.invalidate()
841 841
842 842 parentgone = (parents[0] not in self.changelog.nodemap or
843 843 parents[1] not in self.changelog.nodemap)
844 844 if parentgone:
845 845 util.rename(self.join('undo.dirstate'), self.join('dirstate'))
846 846 try:
847 847 branch = self.opener.read('undo.branch')
848 848 self.dirstate.setbranch(branch)
849 849 except IOError:
850 850 ui.warn(_('named branch could not be reset: '
851 851 'current branch is still \'%s\'\n')
852 852 % self.dirstate.branch())
853 853
854 854 self.dirstate.invalidate()
855 855 self.destroyed()
856 856 parents = tuple([p.rev() for p in self.parents()])
857 857 if len(parents) > 1:
858 858 ui.status(_('working directory now based on '
859 859 'revisions %d and %d\n') % parents)
860 860 else:
861 861 ui.status(_('working directory now based on '
862 862 'revision %d\n') % parents)
863 863 return 0
864 864
865 865 def invalidatecaches(self):
866 866 try:
867 867 delattr(self, '_tagscache')
868 868 except AttributeError:
869 869 pass
870 870
871 871 self._branchcache = None # in UTF-8
872 872 self._branchcachetip = None
873 873
874 874 def invalidatedirstate(self):
875 875 '''Invalidates the dirstate, causing the next call to dirstate
876 876 to check if it was modified since the last time it was read,
877 877 rereading it if it has.
878 878
879 879 This is different to dirstate.invalidate() that it doesn't always
880 880 rereads the dirstate. Use dirstate.invalidate() if you want to
881 881 explicitly read the dirstate again (i.e. restoring it to a previous
882 882 known good state).'''
883 883 try:
884 884 delattr(self, 'dirstate')
885 885 except AttributeError:
886 886 pass
887 887
888 888 def invalidate(self):
889 889 for k in self._filecache:
890 890 # dirstate is invalidated separately in invalidatedirstate()
891 891 if k == 'dirstate':
892 892 continue
893 893
894 894 try:
895 895 delattr(self, k)
896 896 except AttributeError:
897 897 pass
898 898 self.invalidatecaches()
899 899
900 900 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
901 901 try:
902 902 l = lock.lock(lockname, 0, releasefn, desc=desc)
903 903 except error.LockHeld, inst:
904 904 if not wait:
905 905 raise
906 906 self.ui.warn(_("waiting for lock on %s held by %r\n") %
907 907 (desc, inst.locker))
908 908 # default to 600 seconds timeout
909 909 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
910 910 releasefn, desc=desc)
911 911 if acquirefn:
912 912 acquirefn()
913 913 return l
914 914
915 915 def _postrelease(self, callback):
916 916 """add a callback to the current repository lock.
917 917
918 918 The callback will be executed on lock release."""
919 919 l = self._lockref and self._lockref()
920 920 assert l is not None
921 921 assert l.held
922 922 l.postreleasehooks.append(callback)
923 923
924 924 def lock(self, wait=True):
925 925 '''Lock the repository store (.hg/store) and return a weak reference
926 926 to the lock. Use this before modifying the store (e.g. committing or
927 927 stripping). If you are opening a transaction, get a lock as well.)'''
928 928 l = self._lockref and self._lockref()
929 929 if l is not None and l.held:
930 930 l.lock()
931 931 return l
932 932
933 933 def unlock():
934 934 self.store.write()
935 935 if self._dirtyphases:
936 936 phases.writeroots(self)
937 937 for k, ce in self._filecache.items():
938 938 if k == 'dirstate':
939 939 continue
940 940 ce.refresh()
941 941
942 942 l = self._lock(self.sjoin("lock"), wait, unlock,
943 943 self.invalidate, _('repository %s') % self.origroot)
944 944 self._lockref = weakref.ref(l)
945 945 return l
946 946
947 947 def wlock(self, wait=True):
948 948 '''Lock the non-store parts of the repository (everything under
949 949 .hg except .hg/store) and return a weak reference to the lock.
950 950 Use this before modifying files in .hg.'''
951 951 l = self._wlockref and self._wlockref()
952 952 if l is not None and l.held:
953 953 l.lock()
954 954 return l
955 955
956 956 def unlock():
957 957 self.dirstate.write()
958 958 ce = self._filecache.get('dirstate')
959 959 if ce:
960 960 ce.refresh()
961 961
962 962 l = self._lock(self.join("wlock"), wait, unlock,
963 963 self.invalidatedirstate, _('working directory of %s') %
964 964 self.origroot)
965 965 self._wlockref = weakref.ref(l)
966 966 return l
967 967
968 968 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
969 969 """
970 970 commit an individual file as part of a larger transaction
971 971 """
972 972
973 973 fname = fctx.path()
974 974 text = fctx.data()
975 975 flog = self.file(fname)
976 976 fparent1 = manifest1.get(fname, nullid)
977 977 fparent2 = fparent2o = manifest2.get(fname, nullid)
978 978
979 979 meta = {}
980 980 copy = fctx.renamed()
981 981 if copy and copy[0] != fname:
982 982 # Mark the new revision of this file as a copy of another
983 983 # file. This copy data will effectively act as a parent
984 984 # of this new revision. If this is a merge, the first
985 985 # parent will be the nullid (meaning "look up the copy data")
986 986 # and the second one will be the other parent. For example:
987 987 #
988 988 # 0 --- 1 --- 3 rev1 changes file foo
989 989 # \ / rev2 renames foo to bar and changes it
990 990 # \- 2 -/ rev3 should have bar with all changes and
991 991 # should record that bar descends from
992 992 # bar in rev2 and foo in rev1
993 993 #
994 994 # this allows this merge to succeed:
995 995 #
996 996 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
997 997 # \ / merging rev3 and rev4 should use bar@rev2
998 998 # \- 2 --- 4 as the merge base
999 999 #
1000 1000
1001 1001 cfname = copy[0]
1002 1002 crev = manifest1.get(cfname)
1003 1003 newfparent = fparent2
1004 1004
1005 1005 if manifest2: # branch merge
1006 1006 if fparent2 == nullid or crev is None: # copied on remote side
1007 1007 if cfname in manifest2:
1008 1008 crev = manifest2[cfname]
1009 1009 newfparent = fparent1
1010 1010
1011 1011 # find source in nearest ancestor if we've lost track
1012 1012 if not crev:
1013 1013 self.ui.debug(" %s: searching for copy revision for %s\n" %
1014 1014 (fname, cfname))
1015 1015 for ancestor in self[None].ancestors():
1016 1016 if cfname in ancestor:
1017 1017 crev = ancestor[cfname].filenode()
1018 1018 break
1019 1019
1020 1020 if crev:
1021 1021 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1022 1022 meta["copy"] = cfname
1023 1023 meta["copyrev"] = hex(crev)
1024 1024 fparent1, fparent2 = nullid, newfparent
1025 1025 else:
1026 1026 self.ui.warn(_("warning: can't find ancestor for '%s' "
1027 1027 "copied from '%s'!\n") % (fname, cfname))
1028 1028
1029 1029 elif fparent2 != nullid:
1030 1030 # is one parent an ancestor of the other?
1031 1031 fparentancestor = flog.ancestor(fparent1, fparent2)
1032 1032 if fparentancestor == fparent1:
1033 1033 fparent1, fparent2 = fparent2, nullid
1034 1034 elif fparentancestor == fparent2:
1035 1035 fparent2 = nullid
1036 1036
1037 1037 # is the file changed?
1038 1038 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1039 1039 changelist.append(fname)
1040 1040 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1041 1041
1042 1042 # are just the flags changed during merge?
1043 1043 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1044 1044 changelist.append(fname)
1045 1045
1046 1046 return fparent1
1047 1047
1048 1048 def commit(self, text="", user=None, date=None, match=None, force=False,
1049 1049 editor=False, extra={}):
1050 1050 """Add a new revision to current repository.
1051 1051
1052 1052 Revision information is gathered from the working directory,
1053 1053 match can be used to filter the committed files. If editor is
1054 1054 supplied, it is called to get a commit message.
1055 1055 """
1056 1056
1057 1057 def fail(f, msg):
1058 1058 raise util.Abort('%s: %s' % (f, msg))
1059 1059
1060 1060 if not match:
1061 1061 match = matchmod.always(self.root, '')
1062 1062
1063 1063 if not force:
1064 1064 vdirs = []
1065 1065 match.dir = vdirs.append
1066 1066 match.bad = fail
1067 1067
1068 1068 wlock = self.wlock()
1069 1069 try:
1070 1070 wctx = self[None]
1071 1071 merge = len(wctx.parents()) > 1
1072 1072
1073 1073 if (not force and merge and match and
1074 1074 (match.files() or match.anypats())):
1075 1075 raise util.Abort(_('cannot partially commit a merge '
1076 1076 '(do not specify files or patterns)'))
1077 1077
1078 1078 changes = self.status(match=match, clean=force)
1079 1079 if force:
1080 1080 changes[0].extend(changes[6]) # mq may commit unchanged files
1081 1081
1082 1082 # check subrepos
1083 1083 subs = []
1084 1084 removedsubs = set()
1085 1085 if '.hgsub' in wctx:
1086 1086 # only manage subrepos and .hgsubstate if .hgsub is present
1087 1087 for p in wctx.parents():
1088 1088 removedsubs.update(s for s in p.substate if match(s))
1089 1089 for s in wctx.substate:
1090 1090 removedsubs.discard(s)
1091 1091 if match(s) and wctx.sub(s).dirty():
1092 1092 subs.append(s)
1093 1093 if (subs or removedsubs):
1094 1094 if (not match('.hgsub') and
1095 1095 '.hgsub' in (wctx.modified() + wctx.added())):
1096 1096 raise util.Abort(
1097 1097 _("can't commit subrepos without .hgsub"))
1098 1098 if '.hgsubstate' not in changes[0]:
1099 1099 changes[0].insert(0, '.hgsubstate')
1100 1100 if '.hgsubstate' in changes[2]:
1101 1101 changes[2].remove('.hgsubstate')
1102 1102 elif '.hgsub' in changes[2]:
1103 1103 # clean up .hgsubstate when .hgsub is removed
1104 1104 if ('.hgsubstate' in wctx and
1105 1105 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1106 1106 changes[2].insert(0, '.hgsubstate')
1107 1107
1108 1108 if subs and not self.ui.configbool('ui', 'commitsubrepos', False):
1109 1109 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
1110 1110 if changedsubs:
1111 1111 raise util.Abort(_("uncommitted changes in subrepo %s")
1112 1112 % changedsubs[0],
1113 1113 hint=_("use --subrepos for recursive commit"))
1114 1114
1115 1115 # make sure all explicit patterns are matched
1116 1116 if not force and match.files():
1117 1117 matched = set(changes[0] + changes[1] + changes[2])
1118 1118
1119 1119 for f in match.files():
1120 1120 if f == '.' or f in matched or f in wctx.substate:
1121 1121 continue
1122 1122 if f in changes[3]: # missing
1123 1123 fail(f, _('file not found!'))
1124 1124 if f in vdirs: # visited directory
1125 1125 d = f + '/'
1126 1126 for mf in matched:
1127 1127 if mf.startswith(d):
1128 1128 break
1129 1129 else:
1130 1130 fail(f, _("no match under directory!"))
1131 1131 elif f not in self.dirstate:
1132 1132 fail(f, _("file not tracked!"))
1133 1133
1134 1134 if (not force and not extra.get("close") and not merge
1135 1135 and not (changes[0] or changes[1] or changes[2])
1136 1136 and wctx.branch() == wctx.p1().branch()):
1137 1137 return None
1138 1138
1139 1139 ms = mergemod.mergestate(self)
1140 1140 for f in changes[0]:
1141 1141 if f in ms and ms[f] == 'u':
1142 1142 raise util.Abort(_("unresolved merge conflicts "
1143 1143 "(see hg help resolve)"))
1144 1144
1145 1145 cctx = context.workingctx(self, text, user, date, extra, changes)
1146 1146 if editor:
1147 1147 cctx._text = editor(self, cctx, subs)
1148 1148 edited = (text != cctx._text)
1149 1149
1150 1150 # commit subs
1151 1151 if subs or removedsubs:
1152 1152 state = wctx.substate.copy()
1153 1153 for s in sorted(subs):
1154 1154 sub = wctx.sub(s)
1155 1155 self.ui.status(_('committing subrepository %s\n') %
1156 1156 subrepo.subrelpath(sub))
1157 1157 sr = sub.commit(cctx._text, user, date)
1158 1158 state[s] = (state[s][0], sr)
1159 1159 subrepo.writestate(self, state)
1160 1160
1161 1161 # Save commit message in case this transaction gets rolled back
1162 1162 # (e.g. by a pretxncommit hook). Leave the content alone on
1163 1163 # the assumption that the user will use the same editor again.
1164 1164 msgfn = self.savecommitmessage(cctx._text)
1165 1165
1166 1166 p1, p2 = self.dirstate.parents()
1167 1167 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1168 1168 try:
1169 1169 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1170 1170 ret = self.commitctx(cctx, True)
1171 1171 except:
1172 1172 if edited:
1173 1173 self.ui.write(
1174 1174 _('note: commit message saved in %s\n') % msgfn)
1175 1175 raise
1176 1176
1177 1177 # update bookmarks, dirstate and mergestate
1178 1178 bookmarks.update(self, p1, ret)
1179 1179 for f in changes[0] + changes[1]:
1180 1180 self.dirstate.normal(f)
1181 1181 for f in changes[2]:
1182 1182 self.dirstate.drop(f)
1183 1183 self.dirstate.setparents(ret)
1184 1184 ms.reset()
1185 1185 finally:
1186 1186 wlock.release()
1187 1187
1188 1188 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1189 1189 return ret
1190 1190
1191 1191 def commitctx(self, ctx, error=False):
1192 1192 """Add a new revision to current repository.
1193 1193 Revision information is passed via the context argument.
1194 1194 """
1195 1195
1196 1196 tr = lock = None
1197 1197 removed = list(ctx.removed())
1198 1198 p1, p2 = ctx.p1(), ctx.p2()
1199 1199 user = ctx.user()
1200 1200
1201 1201 lock = self.lock()
1202 1202 try:
1203 1203 tr = self.transaction("commit")
1204 1204 trp = weakref.proxy(tr)
1205 1205
1206 1206 if ctx.files():
1207 1207 m1 = p1.manifest().copy()
1208 1208 m2 = p2.manifest()
1209 1209
1210 1210 # check in files
1211 1211 new = {}
1212 1212 changed = []
1213 1213 linkrev = len(self)
1214 1214 for f in sorted(ctx.modified() + ctx.added()):
1215 1215 self.ui.note(f + "\n")
1216 1216 try:
1217 1217 fctx = ctx[f]
1218 1218 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1219 1219 changed)
1220 1220 m1.set(f, fctx.flags())
1221 1221 except OSError, inst:
1222 1222 self.ui.warn(_("trouble committing %s!\n") % f)
1223 1223 raise
1224 1224 except IOError, inst:
1225 1225 errcode = getattr(inst, 'errno', errno.ENOENT)
1226 1226 if error or errcode and errcode != errno.ENOENT:
1227 1227 self.ui.warn(_("trouble committing %s!\n") % f)
1228 1228 raise
1229 1229 else:
1230 1230 removed.append(f)
1231 1231
1232 1232 # update manifest
1233 1233 m1.update(new)
1234 1234 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1235 1235 drop = [f for f in removed if f in m1]
1236 1236 for f in drop:
1237 1237 del m1[f]
1238 1238 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1239 1239 p2.manifestnode(), (new, drop))
1240 1240 files = changed + removed
1241 1241 else:
1242 1242 mn = p1.manifestnode()
1243 1243 files = []
1244 1244
1245 1245 # update changelog
1246 1246 self.changelog.delayupdate()
1247 1247 n = self.changelog.add(mn, files, ctx.description(),
1248 1248 trp, p1.node(), p2.node(),
1249 1249 user, ctx.date(), ctx.extra().copy())
1250 1250 p = lambda: self.changelog.writepending() and self.root or ""
1251 1251 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1252 1252 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1253 1253 parent2=xp2, pending=p)
1254 1254 self.changelog.finalize(trp)
1255 1255 # ensure the new commit is 1-phase
1256 1256 phases.retractboundary(self, 1, [n])
1257 1257 tr.close()
1258 1258
1259 1259 if self._branchcache:
1260 1260 self.updatebranchcache()
1261 1261 return n
1262 1262 finally:
1263 1263 if tr:
1264 1264 tr.release()
1265 1265 lock.release()
1266 1266
1267 1267 def destroyed(self):
1268 1268 '''Inform the repository that nodes have been destroyed.
1269 1269 Intended for use by strip and rollback, so there's a common
1270 1270 place for anything that has to be done after destroying history.'''
1271 1271 # XXX it might be nice if we could take the list of destroyed
1272 1272 # nodes, but I don't see an easy way for rollback() to do that
1273 1273
1274 1274 # Ensure the persistent tag cache is updated. Doing it now
1275 1275 # means that the tag cache only has to worry about destroyed
1276 1276 # heads immediately after a strip/rollback. That in turn
1277 1277 # guarantees that "cachetip == currenttip" (comparing both rev
1278 1278 # and node) always means no nodes have been added or destroyed.
1279 1279
1280 1280 # XXX this is suboptimal when qrefresh'ing: we strip the current
1281 1281 # head, refresh the tag cache, then immediately add a new head.
1282 1282 # But I think doing it this way is necessary for the "instant
1283 1283 # tag cache retrieval" case to work.
1284 1284 self.invalidatecaches()
1285 1285
1286 1286 def walk(self, match, node=None):
1287 1287 '''
1288 1288 walk recursively through the directory tree or a given
1289 1289 changeset, finding all files matched by the match
1290 1290 function
1291 1291 '''
1292 1292 return self[node].walk(match)
1293 1293
1294 1294 def status(self, node1='.', node2=None, match=None,
1295 1295 ignored=False, clean=False, unknown=False,
1296 1296 listsubrepos=False):
1297 1297 """return status of files between two nodes or node and working directory
1298 1298
1299 1299 If node1 is None, use the first dirstate parent instead.
1300 1300 If node2 is None, compare node1 with working directory.
1301 1301 """
1302 1302
1303 1303 def mfmatches(ctx):
1304 1304 mf = ctx.manifest().copy()
1305 1305 for fn in mf.keys():
1306 1306 if not match(fn):
1307 1307 del mf[fn]
1308 1308 return mf
1309 1309
1310 1310 if isinstance(node1, context.changectx):
1311 1311 ctx1 = node1
1312 1312 else:
1313 1313 ctx1 = self[node1]
1314 1314 if isinstance(node2, context.changectx):
1315 1315 ctx2 = node2
1316 1316 else:
1317 1317 ctx2 = self[node2]
1318 1318
1319 1319 working = ctx2.rev() is None
1320 1320 parentworking = working and ctx1 == self['.']
1321 1321 match = match or matchmod.always(self.root, self.getcwd())
1322 1322 listignored, listclean, listunknown = ignored, clean, unknown
1323 1323
1324 1324 # load earliest manifest first for caching reasons
1325 1325 if not working and ctx2.rev() < ctx1.rev():
1326 1326 ctx2.manifest()
1327 1327
1328 1328 if not parentworking:
1329 1329 def bad(f, msg):
1330 1330 if f not in ctx1:
1331 1331 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1332 1332 match.bad = bad
1333 1333
1334 1334 if working: # we need to scan the working dir
1335 1335 subrepos = []
1336 1336 if '.hgsub' in self.dirstate:
1337 1337 subrepos = ctx2.substate.keys()
1338 1338 s = self.dirstate.status(match, subrepos, listignored,
1339 1339 listclean, listunknown)
1340 1340 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1341 1341
1342 1342 # check for any possibly clean files
1343 1343 if parentworking and cmp:
1344 1344 fixup = []
1345 1345 # do a full compare of any files that might have changed
1346 1346 for f in sorted(cmp):
1347 1347 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1348 1348 or ctx1[f].cmp(ctx2[f])):
1349 1349 modified.append(f)
1350 1350 else:
1351 1351 fixup.append(f)
1352 1352
1353 1353 # update dirstate for files that are actually clean
1354 1354 if fixup:
1355 1355 if listclean:
1356 1356 clean += fixup
1357 1357
1358 1358 try:
1359 1359 # updating the dirstate is optional
1360 1360 # so we don't wait on the lock
1361 1361 wlock = self.wlock(False)
1362 1362 try:
1363 1363 for f in fixup:
1364 1364 self.dirstate.normal(f)
1365 1365 finally:
1366 1366 wlock.release()
1367 1367 except error.LockError:
1368 1368 pass
1369 1369
1370 1370 if not parentworking:
1371 1371 mf1 = mfmatches(ctx1)
1372 1372 if working:
1373 1373 # we are comparing working dir against non-parent
1374 1374 # generate a pseudo-manifest for the working dir
1375 1375 mf2 = mfmatches(self['.'])
1376 1376 for f in cmp + modified + added:
1377 1377 mf2[f] = None
1378 1378 mf2.set(f, ctx2.flags(f))
1379 1379 for f in removed:
1380 1380 if f in mf2:
1381 1381 del mf2[f]
1382 1382 else:
1383 1383 # we are comparing two revisions
1384 1384 deleted, unknown, ignored = [], [], []
1385 1385 mf2 = mfmatches(ctx2)
1386 1386
1387 1387 modified, added, clean = [], [], []
1388 1388 for fn in mf2:
1389 1389 if fn in mf1:
1390 1390 if (fn not in deleted and
1391 1391 (mf1.flags(fn) != mf2.flags(fn) or
1392 1392 (mf1[fn] != mf2[fn] and
1393 1393 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1394 1394 modified.append(fn)
1395 1395 elif listclean:
1396 1396 clean.append(fn)
1397 1397 del mf1[fn]
1398 1398 elif fn not in deleted:
1399 1399 added.append(fn)
1400 1400 removed = mf1.keys()
1401 1401
1402 1402 if working and modified and not self.dirstate._checklink:
1403 1403 # Symlink placeholders may get non-symlink-like contents
1404 1404 # via user error or dereferencing by NFS or Samba servers,
1405 1405 # so we filter out any placeholders that don't look like a
1406 1406 # symlink
1407 1407 sane = []
1408 1408 for f in modified:
1409 1409 if ctx2.flags(f) == 'l':
1410 1410 d = ctx2[f].data()
1411 1411 if len(d) >= 1024 or '\n' in d or util.binary(d):
1412 1412 self.ui.debug('ignoring suspect symlink placeholder'
1413 1413 ' "%s"\n' % f)
1414 1414 continue
1415 1415 sane.append(f)
1416 1416 modified = sane
1417 1417
1418 1418 r = modified, added, removed, deleted, unknown, ignored, clean
1419 1419
1420 1420 if listsubrepos:
1421 1421 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1422 1422 if working:
1423 1423 rev2 = None
1424 1424 else:
1425 1425 rev2 = ctx2.substate[subpath][1]
1426 1426 try:
1427 1427 submatch = matchmod.narrowmatcher(subpath, match)
1428 1428 s = sub.status(rev2, match=submatch, ignored=listignored,
1429 1429 clean=listclean, unknown=listunknown,
1430 1430 listsubrepos=True)
1431 1431 for rfiles, sfiles in zip(r, s):
1432 1432 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1433 1433 except error.LookupError:
1434 1434 self.ui.status(_("skipping missing subrepository: %s\n")
1435 1435 % subpath)
1436 1436
1437 1437 for l in r:
1438 1438 l.sort()
1439 1439 return r
1440 1440
1441 1441 def heads(self, start=None):
1442 1442 heads = self.changelog.heads(start)
1443 1443 # sort the output in rev descending order
1444 1444 return sorted(heads, key=self.changelog.rev, reverse=True)
1445 1445
1446 1446 def branchheads(self, branch=None, start=None, closed=False):
1447 1447 '''return a (possibly filtered) list of heads for the given branch
1448 1448
1449 1449 Heads are returned in topological order, from newest to oldest.
1450 1450 If branch is None, use the dirstate branch.
1451 1451 If start is not None, return only heads reachable from start.
1452 1452 If closed is True, return heads that are marked as closed as well.
1453 1453 '''
1454 1454 if branch is None:
1455 1455 branch = self[None].branch()
1456 1456 branches = self.branchmap()
1457 1457 if branch not in branches:
1458 1458 return []
1459 1459 # the cache returns heads ordered lowest to highest
1460 1460 bheads = list(reversed(branches[branch]))
1461 1461 if start is not None:
1462 1462 # filter out the heads that cannot be reached from startrev
1463 1463 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1464 1464 bheads = [h for h in bheads if h in fbheads]
1465 1465 if not closed:
1466 1466 bheads = [h for h in bheads if
1467 1467 ('close' not in self.changelog.read(h)[5])]
1468 1468 return bheads
1469 1469
1470 1470 def branches(self, nodes):
1471 1471 if not nodes:
1472 1472 nodes = [self.changelog.tip()]
1473 1473 b = []
1474 1474 for n in nodes:
1475 1475 t = n
1476 1476 while True:
1477 1477 p = self.changelog.parents(n)
1478 1478 if p[1] != nullid or p[0] == nullid:
1479 1479 b.append((t, n, p[0], p[1]))
1480 1480 break
1481 1481 n = p[0]
1482 1482 return b
1483 1483
1484 1484 def between(self, pairs):
1485 1485 r = []
1486 1486
1487 1487 for top, bottom in pairs:
1488 1488 n, l, i = top, [], 0
1489 1489 f = 1
1490 1490
1491 1491 while n != bottom and n != nullid:
1492 1492 p = self.changelog.parents(n)[0]
1493 1493 if i == f:
1494 1494 l.append(n)
1495 1495 f = f * 2
1496 1496 n = p
1497 1497 i += 1
1498 1498
1499 1499 r.append(l)
1500 1500
1501 1501 return r
1502 1502
1503 1503 def pull(self, remote, heads=None, force=False):
1504 1504 lock = self.lock()
1505 1505 try:
1506 1506 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1507 1507 force=force)
1508 1508 common, fetch, rheads = tmp
1509 1509 if not fetch:
1510 1510 self.ui.status(_("no changes found\n"))
1511 1511 result = 0
1512 1512 else:
1513 1513 if heads is None and list(common) == [nullid]:
1514 1514 self.ui.status(_("requesting all changes\n"))
1515 1515 elif heads is None and remote.capable('changegroupsubset'):
1516 1516 # issue1320, avoid a race if remote changed after discovery
1517 1517 heads = rheads
1518 1518
1519 1519 if remote.capable('getbundle'):
1520 1520 cg = remote.getbundle('pull', common=common,
1521 1521 heads=heads or rheads)
1522 1522 elif heads is None:
1523 1523 cg = remote.changegroup(fetch, 'pull')
1524 1524 elif not remote.capable('changegroupsubset'):
1525 1525 raise util.Abort(_("partial pull cannot be done because "
1526 1526 "other repository doesn't support "
1527 1527 "changegroupsubset."))
1528 1528 else:
1529 1529 cg = remote.changegroupsubset(fetch, heads, 'pull')
1530 result = self.addchangegroup(cg, 'pull', remote.url(),
1531 lock=lock)
1530 result = self.addchangegroup(cg, 'pull', remote.url())
1532 1531 phases.advanceboundary(self, 0, common)
1533 1532 finally:
1534 1533 lock.release()
1535 1534
1536 1535 return result
1537 1536
1538 1537 def checkpush(self, force, revs):
1539 1538 """Extensions can override this function if additional checks have
1540 1539 to be performed before pushing, or call it if they override push
1541 1540 command.
1542 1541 """
1543 1542 pass
1544 1543
1545 1544 def push(self, remote, force=False, revs=None, newbranch=False):
1546 1545 '''Push outgoing changesets (limited by revs) from the current
1547 1546 repository to remote. Return an integer:
1548 1547 - 0 means HTTP error *or* nothing to push
1549 1548 - 1 means we pushed and remote head count is unchanged *or*
1550 1549 we have outgoing changesets but refused to push
1551 1550 - other values as described by addchangegroup()
1552 1551 '''
1553 1552 # there are two ways to push to remote repo:
1554 1553 #
1555 1554 # addchangegroup assumes local user can lock remote
1556 1555 # repo (local filesystem, old ssh servers).
1557 1556 #
1558 1557 # unbundle assumes local user cannot lock remote repo (new ssh
1559 1558 # servers, http servers).
1560 1559
1561 1560 self.checkpush(force, revs)
1562 1561 lock = None
1563 1562 unbundle = remote.capable('unbundle')
1564 1563 if not unbundle:
1565 1564 lock = remote.lock()
1566 1565 try:
1567 1566 # get local lock as we might write phase data
1568 1567 locallock = self.lock()
1569 1568 try:
1570 1569 cg, remote_heads, fut = discovery.prepush(self, remote, force,
1571 1570 revs, newbranch)
1572 1571 ret = remote_heads
1573 1572 if cg is not None:
1574 1573 if unbundle:
1575 1574 # local repo finds heads on server, finds out what
1576 1575 # revs it must push. once revs transferred, if server
1577 1576 # finds it has different heads (someone else won
1578 1577 # commit/push race), server aborts.
1579 1578 if force:
1580 1579 remote_heads = ['force']
1581 1580 # ssh: return remote's addchangegroup()
1582 1581 # http: return remote's addchangegroup() or 0 for error
1583 1582 ret = remote.unbundle(cg, remote_heads, 'push')
1584 1583 else:
1585 1584 # we return an integer indicating remote head count change
1586 ret = remote.addchangegroup(cg, 'push', self.url(),
1587 lock=lock)
1585 ret = remote.addchangegroup(cg, 'push', self.url())
1588 1586 # if we don't push, the common data is already useful
1589 1587 # everything exchange is public for now
1590 1588 phases.advanceboundary(self, 0, fut)
1591 1589 finally:
1592 1590 locallock.release()
1593 1591 finally:
1594 1592 if lock is not None:
1595 1593 lock.release()
1596 1594
1597 1595 self.ui.debug("checking for updated bookmarks\n")
1598 1596 rb = remote.listkeys('bookmarks')
1599 1597 for k in rb.keys():
1600 1598 if k in self._bookmarks:
1601 1599 nr, nl = rb[k], hex(self._bookmarks[k])
1602 1600 if nr in self:
1603 1601 cr = self[nr]
1604 1602 cl = self[nl]
1605 1603 if cl in cr.descendants():
1606 1604 r = remote.pushkey('bookmarks', k, nr, nl)
1607 1605 if r:
1608 1606 self.ui.status(_("updating bookmark %s\n") % k)
1609 1607 else:
1610 1608 self.ui.warn(_('updating bookmark %s'
1611 1609 ' failed!\n') % k)
1612 1610
1613 1611 return ret
1614 1612
1615 1613 def changegroupinfo(self, nodes, source):
1616 1614 if self.ui.verbose or source == 'bundle':
1617 1615 self.ui.status(_("%d changesets found\n") % len(nodes))
1618 1616 if self.ui.debugflag:
1619 1617 self.ui.debug("list of changesets:\n")
1620 1618 for node in nodes:
1621 1619 self.ui.debug("%s\n" % hex(node))
1622 1620
1623 1621 def changegroupsubset(self, bases, heads, source):
1624 1622 """Compute a changegroup consisting of all the nodes that are
1625 1623 descendants of any of the bases and ancestors of any of the heads.
1626 1624 Return a chunkbuffer object whose read() method will return
1627 1625 successive changegroup chunks.
1628 1626
1629 1627 It is fairly complex as determining which filenodes and which
1630 1628 manifest nodes need to be included for the changeset to be complete
1631 1629 is non-trivial.
1632 1630
1633 1631 Another wrinkle is doing the reverse, figuring out which changeset in
1634 1632 the changegroup a particular filenode or manifestnode belongs to.
1635 1633 """
1636 1634 cl = self.changelog
1637 1635 if not bases:
1638 1636 bases = [nullid]
1639 1637 csets, bases, heads = cl.nodesbetween(bases, heads)
1640 1638 # We assume that all ancestors of bases are known
1641 1639 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1642 1640 return self._changegroupsubset(common, csets, heads, source)
1643 1641
1644 1642 def getbundle(self, source, heads=None, common=None):
1645 1643 """Like changegroupsubset, but returns the set difference between the
1646 1644 ancestors of heads and the ancestors common.
1647 1645
1648 1646 If heads is None, use the local heads. If common is None, use [nullid].
1649 1647
1650 1648 The nodes in common might not all be known locally due to the way the
1651 1649 current discovery protocol works.
1652 1650 """
1653 1651 cl = self.changelog
1654 1652 if common:
1655 1653 nm = cl.nodemap
1656 1654 common = [n for n in common if n in nm]
1657 1655 else:
1658 1656 common = [nullid]
1659 1657 if not heads:
1660 1658 heads = cl.heads()
1661 1659 common, missing = cl.findcommonmissing(common, heads)
1662 1660 if not missing:
1663 1661 return None
1664 1662 return self._changegroupsubset(common, missing, heads, source)
1665 1663
1666 1664 def _changegroupsubset(self, commonrevs, csets, heads, source):
1667 1665
1668 1666 cl = self.changelog
1669 1667 mf = self.manifest
1670 1668 mfs = {} # needed manifests
1671 1669 fnodes = {} # needed file nodes
1672 1670 changedfiles = set()
1673 1671 fstate = ['', {}]
1674 1672 count = [0]
1675 1673
1676 1674 # can we go through the fast path ?
1677 1675 heads.sort()
1678 1676 if heads == sorted(self.heads()):
1679 1677 return self._changegroup(csets, source)
1680 1678
1681 1679 # slow path
1682 1680 self.hook('preoutgoing', throw=True, source=source)
1683 1681 self.changegroupinfo(csets, source)
1684 1682
1685 1683 # filter any nodes that claim to be part of the known set
1686 1684 def prune(revlog, missing):
1687 1685 return [n for n in missing
1688 1686 if revlog.linkrev(revlog.rev(n)) not in commonrevs]
1689 1687
1690 1688 def lookup(revlog, x):
1691 1689 if revlog == cl:
1692 1690 c = cl.read(x)
1693 1691 changedfiles.update(c[3])
1694 1692 mfs.setdefault(c[0], x)
1695 1693 count[0] += 1
1696 1694 self.ui.progress(_('bundling'), count[0],
1697 1695 unit=_('changesets'), total=len(csets))
1698 1696 return x
1699 1697 elif revlog == mf:
1700 1698 clnode = mfs[x]
1701 1699 mdata = mf.readfast(x)
1702 1700 for f in changedfiles:
1703 1701 if f in mdata:
1704 1702 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1705 1703 count[0] += 1
1706 1704 self.ui.progress(_('bundling'), count[0],
1707 1705 unit=_('manifests'), total=len(mfs))
1708 1706 return mfs[x]
1709 1707 else:
1710 1708 self.ui.progress(
1711 1709 _('bundling'), count[0], item=fstate[0],
1712 1710 unit=_('files'), total=len(changedfiles))
1713 1711 return fstate[1][x]
1714 1712
1715 1713 bundler = changegroup.bundle10(lookup)
1716 1714 reorder = self.ui.config('bundle', 'reorder', 'auto')
1717 1715 if reorder == 'auto':
1718 1716 reorder = None
1719 1717 else:
1720 1718 reorder = util.parsebool(reorder)
1721 1719
1722 1720 def gengroup():
1723 1721 # Create a changenode group generator that will call our functions
1724 1722 # back to lookup the owning changenode and collect information.
1725 1723 for chunk in cl.group(csets, bundler, reorder=reorder):
1726 1724 yield chunk
1727 1725 self.ui.progress(_('bundling'), None)
1728 1726
1729 1727 # Create a generator for the manifestnodes that calls our lookup
1730 1728 # and data collection functions back.
1731 1729 count[0] = 0
1732 1730 for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
1733 1731 yield chunk
1734 1732 self.ui.progress(_('bundling'), None)
1735 1733
1736 1734 mfs.clear()
1737 1735
1738 1736 # Go through all our files in order sorted by name.
1739 1737 count[0] = 0
1740 1738 for fname in sorted(changedfiles):
1741 1739 filerevlog = self.file(fname)
1742 1740 if not len(filerevlog):
1743 1741 raise util.Abort(_("empty or missing revlog for %s") % fname)
1744 1742 fstate[0] = fname
1745 1743 fstate[1] = fnodes.pop(fname, {})
1746 1744
1747 1745 nodelist = prune(filerevlog, fstate[1])
1748 1746 if nodelist:
1749 1747 count[0] += 1
1750 1748 yield bundler.fileheader(fname)
1751 1749 for chunk in filerevlog.group(nodelist, bundler, reorder):
1752 1750 yield chunk
1753 1751
1754 1752 # Signal that no more groups are left.
1755 1753 yield bundler.close()
1756 1754 self.ui.progress(_('bundling'), None)
1757 1755
1758 1756 if csets:
1759 1757 self.hook('outgoing', node=hex(csets[0]), source=source)
1760 1758
1761 1759 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1762 1760
1763 1761 def changegroup(self, basenodes, source):
1764 1762 # to avoid a race we use changegroupsubset() (issue1320)
1765 1763 return self.changegroupsubset(basenodes, self.heads(), source)
1766 1764
1767 1765 def _changegroup(self, nodes, source):
1768 1766 """Compute the changegroup of all nodes that we have that a recipient
1769 1767 doesn't. Return a chunkbuffer object whose read() method will return
1770 1768 successive changegroup chunks.
1771 1769
1772 1770 This is much easier than the previous function as we can assume that
1773 1771 the recipient has any changenode we aren't sending them.
1774 1772
1775 1773 nodes is the set of nodes to send"""
1776 1774
1777 1775 cl = self.changelog
1778 1776 mf = self.manifest
1779 1777 mfs = {}
1780 1778 changedfiles = set()
1781 1779 fstate = ['']
1782 1780 count = [0]
1783 1781
1784 1782 self.hook('preoutgoing', throw=True, source=source)
1785 1783 self.changegroupinfo(nodes, source)
1786 1784
1787 1785 revset = set([cl.rev(n) for n in nodes])
1788 1786
1789 1787 def gennodelst(log):
1790 1788 return [log.node(r) for r in log if log.linkrev(r) in revset]
1791 1789
1792 1790 def lookup(revlog, x):
1793 1791 if revlog == cl:
1794 1792 c = cl.read(x)
1795 1793 changedfiles.update(c[3])
1796 1794 mfs.setdefault(c[0], x)
1797 1795 count[0] += 1
1798 1796 self.ui.progress(_('bundling'), count[0],
1799 1797 unit=_('changesets'), total=len(nodes))
1800 1798 return x
1801 1799 elif revlog == mf:
1802 1800 count[0] += 1
1803 1801 self.ui.progress(_('bundling'), count[0],
1804 1802 unit=_('manifests'), total=len(mfs))
1805 1803 return cl.node(revlog.linkrev(revlog.rev(x)))
1806 1804 else:
1807 1805 self.ui.progress(
1808 1806 _('bundling'), count[0], item=fstate[0],
1809 1807 total=len(changedfiles), unit=_('files'))
1810 1808 return cl.node(revlog.linkrev(revlog.rev(x)))
1811 1809
1812 1810 bundler = changegroup.bundle10(lookup)
1813 1811 reorder = self.ui.config('bundle', 'reorder', 'auto')
1814 1812 if reorder == 'auto':
1815 1813 reorder = None
1816 1814 else:
1817 1815 reorder = util.parsebool(reorder)
1818 1816
1819 1817 def gengroup():
1820 1818 '''yield a sequence of changegroup chunks (strings)'''
1821 1819 # construct a list of all changed files
1822 1820
1823 1821 for chunk in cl.group(nodes, bundler, reorder=reorder):
1824 1822 yield chunk
1825 1823 self.ui.progress(_('bundling'), None)
1826 1824
1827 1825 count[0] = 0
1828 1826 for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
1829 1827 yield chunk
1830 1828 self.ui.progress(_('bundling'), None)
1831 1829
1832 1830 count[0] = 0
1833 1831 for fname in sorted(changedfiles):
1834 1832 filerevlog = self.file(fname)
1835 1833 if not len(filerevlog):
1836 1834 raise util.Abort(_("empty or missing revlog for %s") % fname)
1837 1835 fstate[0] = fname
1838 1836 nodelist = gennodelst(filerevlog)
1839 1837 if nodelist:
1840 1838 count[0] += 1
1841 1839 yield bundler.fileheader(fname)
1842 1840 for chunk in filerevlog.group(nodelist, bundler, reorder):
1843 1841 yield chunk
1844 1842 yield bundler.close()
1845 1843 self.ui.progress(_('bundling'), None)
1846 1844
1847 1845 if nodes:
1848 1846 self.hook('outgoing', node=hex(nodes[0]), source=source)
1849 1847
1850 1848 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1851 1849
1852 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1850 def addchangegroup(self, source, srctype, url, emptyok=False):
1853 1851 """Add the changegroup returned by source.read() to this repo.
1854 1852 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1855 1853 the URL of the repo where this changegroup is coming from.
1856 If lock is not None, the function takes ownership of the lock
1857 and releases it after the changegroup is added.
1858 1854
1859 1855 Return an integer summarizing the change to this repo:
1860 1856 - nothing changed or no source: 0
1861 1857 - more heads than before: 1+added heads (2..n)
1862 1858 - fewer heads than before: -1-removed heads (-2..-n)
1863 1859 - number of heads stays the same: 1
1864 1860 """
1865 1861 def csmap(x):
1866 1862 self.ui.debug("add changeset %s\n" % short(x))
1867 1863 return len(cl)
1868 1864
1869 1865 def revmap(x):
1870 1866 return cl.rev(x)
1871 1867
1872 1868 if not source:
1873 1869 return 0
1874 1870
1875 1871 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1876 1872
1877 1873 changesets = files = revisions = 0
1878 1874 efiles = set()
1879 1875
1880 1876 # write changelog data to temp files so concurrent readers will not see
1881 1877 # inconsistent view
1882 1878 cl = self.changelog
1883 1879 cl.delayupdate()
1884 1880 oldheads = cl.heads()
1885 1881
1886 1882 tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
1887 1883 try:
1888 1884 trp = weakref.proxy(tr)
1889 1885 # pull off the changeset group
1890 1886 self.ui.status(_("adding changesets\n"))
1891 1887 clstart = len(cl)
1892 1888 class prog(object):
1893 1889 step = _('changesets')
1894 1890 count = 1
1895 1891 ui = self.ui
1896 1892 total = None
1897 1893 def __call__(self):
1898 1894 self.ui.progress(self.step, self.count, unit=_('chunks'),
1899 1895 total=self.total)
1900 1896 self.count += 1
1901 1897 pr = prog()
1902 1898 source.callback = pr
1903 1899
1904 1900 source.changelogheader()
1905 1901 if (cl.addgroup(source, csmap, trp) is None
1906 1902 and not emptyok):
1907 1903 raise util.Abort(_("received changelog group is empty"))
1908 1904 clend = len(cl)
1909 1905 changesets = clend - clstart
1910 1906 for c in xrange(clstart, clend):
1911 1907 efiles.update(self[c].files())
1912 1908 efiles = len(efiles)
1913 1909 self.ui.progress(_('changesets'), None)
1914 1910
1915 1911 # pull off the manifest group
1916 1912 self.ui.status(_("adding manifests\n"))
1917 1913 pr.step = _('manifests')
1918 1914 pr.count = 1
1919 1915 pr.total = changesets # manifests <= changesets
1920 1916 # no need to check for empty manifest group here:
1921 1917 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1922 1918 # no new manifest will be created and the manifest group will
1923 1919 # be empty during the pull
1924 1920 source.manifestheader()
1925 1921 self.manifest.addgroup(source, revmap, trp)
1926 1922 self.ui.progress(_('manifests'), None)
1927 1923
1928 1924 needfiles = {}
1929 1925 if self.ui.configbool('server', 'validate', default=False):
1930 1926 # validate incoming csets have their manifests
1931 1927 for cset in xrange(clstart, clend):
1932 1928 mfest = self.changelog.read(self.changelog.node(cset))[0]
1933 1929 mfest = self.manifest.readdelta(mfest)
1934 1930 # store file nodes we must see
1935 1931 for f, n in mfest.iteritems():
1936 1932 needfiles.setdefault(f, set()).add(n)
1937 1933
1938 1934 # process the files
1939 1935 self.ui.status(_("adding file changes\n"))
1940 1936 pr.step = _('files')
1941 1937 pr.count = 1
1942 1938 pr.total = efiles
1943 1939 source.callback = None
1944 1940
1945 1941 while True:
1946 1942 chunkdata = source.filelogheader()
1947 1943 if not chunkdata:
1948 1944 break
1949 1945 f = chunkdata["filename"]
1950 1946 self.ui.debug("adding %s revisions\n" % f)
1951 1947 pr()
1952 1948 fl = self.file(f)
1953 1949 o = len(fl)
1954 1950 if fl.addgroup(source, revmap, trp) is None:
1955 1951 raise util.Abort(_("received file revlog group is empty"))
1956 1952 revisions += len(fl) - o
1957 1953 files += 1
1958 1954 if f in needfiles:
1959 1955 needs = needfiles[f]
1960 1956 for new in xrange(o, len(fl)):
1961 1957 n = fl.node(new)
1962 1958 if n in needs:
1963 1959 needs.remove(n)
1964 1960 if not needs:
1965 1961 del needfiles[f]
1966 1962 self.ui.progress(_('files'), None)
1967 1963
1968 1964 for f, needs in needfiles.iteritems():
1969 1965 fl = self.file(f)
1970 1966 for n in needs:
1971 1967 try:
1972 1968 fl.rev(n)
1973 1969 except error.LookupError:
1974 1970 raise util.Abort(
1975 1971 _('missing file data for %s:%s - run hg verify') %
1976 1972 (f, hex(n)))
1977 1973
1978 1974 dh = 0
1979 1975 if oldheads:
1980 1976 heads = cl.heads()
1981 1977 dh = len(heads) - len(oldheads)
1982 1978 for h in heads:
1983 1979 if h not in oldheads and 'close' in self[h].extra():
1984 1980 dh -= 1
1985 1981 htext = ""
1986 1982 if dh:
1987 1983 htext = _(" (%+d heads)") % dh
1988 1984
1989 1985 self.ui.status(_("added %d changesets"
1990 1986 " with %d changes to %d files%s\n")
1991 1987 % (changesets, revisions, files, htext))
1992 1988
1993 1989 if changesets > 0:
1994 1990 p = lambda: cl.writepending() and self.root or ""
1995 1991 self.hook('pretxnchangegroup', throw=True,
1996 1992 node=hex(cl.node(clstart)), source=srctype,
1997 1993 url=url, pending=p)
1998 1994
1999 1995 added = [cl.node(r) for r in xrange(clstart, clend)]
2000 1996 if srctype != 'strip':
2001 1997 phases.advanceboundary(self, 0, added)
2002 1998 # make changelog see real files again
2003 1999 cl.finalize(trp)
2004 2000
2005 2001 tr.close()
2006 2002
2007 2003 def postaddchangegroup():
2008 2004 if changesets > 0:
2009 2005 # forcefully update the on-disk branch cache
2010 2006 self.ui.debug("updating the branch cache\n")
2011 2007 self.updatebranchcache()
2012 2008 self.hook("changegroup", node=hex(cl.node(clstart)),
2013 2009 source=srctype, url=url)
2014 2010
2015 2011 for n in added:
2016 2012 self.hook("incoming", node=hex(n), source=srctype,
2017 2013 url=url)
2018 2014 self._postrelease(postaddchangegroup)
2019 2015
2020 2016 finally:
2021 2017 tr.release()
2022 if lock:
2023 lock.release()
2024 2018 # never return 0 here:
2025 2019 if dh < 0:
2026 2020 return dh - 1
2027 2021 else:
2028 2022 return dh + 1
2029 2023
2030 2024 def stream_in(self, remote, requirements):
2031 2025 lock = self.lock()
2032 2026 try:
2033 2027 fp = remote.stream_out()
2034 2028 l = fp.readline()
2035 2029 try:
2036 2030 resp = int(l)
2037 2031 except ValueError:
2038 2032 raise error.ResponseError(
2039 2033 _('Unexpected response from remote server:'), l)
2040 2034 if resp == 1:
2041 2035 raise util.Abort(_('operation forbidden by server'))
2042 2036 elif resp == 2:
2043 2037 raise util.Abort(_('locking the remote repository failed'))
2044 2038 elif resp != 0:
2045 2039 raise util.Abort(_('the server sent an unknown error code'))
2046 2040 self.ui.status(_('streaming all changes\n'))
2047 2041 l = fp.readline()
2048 2042 try:
2049 2043 total_files, total_bytes = map(int, l.split(' ', 1))
2050 2044 except (ValueError, TypeError):
2051 2045 raise error.ResponseError(
2052 2046 _('Unexpected response from remote server:'), l)
2053 2047 self.ui.status(_('%d files to transfer, %s of data\n') %
2054 2048 (total_files, util.bytecount(total_bytes)))
2055 2049 start = time.time()
2056 2050 for i in xrange(total_files):
2057 2051 # XXX doesn't support '\n' or '\r' in filenames
2058 2052 l = fp.readline()
2059 2053 try:
2060 2054 name, size = l.split('\0', 1)
2061 2055 size = int(size)
2062 2056 except (ValueError, TypeError):
2063 2057 raise error.ResponseError(
2064 2058 _('Unexpected response from remote server:'), l)
2065 2059 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2066 2060 # for backwards compat, name was partially encoded
2067 2061 ofp = self.sopener(store.decodedir(name), 'w')
2068 2062 for chunk in util.filechunkiter(fp, limit=size):
2069 2063 ofp.write(chunk)
2070 2064 ofp.close()
2071 2065 elapsed = time.time() - start
2072 2066 if elapsed <= 0:
2073 2067 elapsed = 0.001
2074 2068 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2075 2069 (util.bytecount(total_bytes), elapsed,
2076 2070 util.bytecount(total_bytes / elapsed)))
2077 2071
2078 2072 # new requirements = old non-format requirements + new format-related
2079 2073 # requirements from the streamed-in repository
2080 2074 requirements.update(set(self.requirements) - self.supportedformats)
2081 2075 self._applyrequirements(requirements)
2082 2076 self._writerequirements()
2083 2077
2084 2078 self.invalidate()
2085 2079 return len(self.heads()) + 1
2086 2080 finally:
2087 2081 lock.release()
2088 2082
2089 2083 def clone(self, remote, heads=[], stream=False):
2090 2084 '''clone remote repository.
2091 2085
2092 2086 keyword arguments:
2093 2087 heads: list of revs to clone (forces use of pull)
2094 2088 stream: use streaming clone if possible'''
2095 2089
2096 2090 # now, all clients that can request uncompressed clones can
2097 2091 # read repo formats supported by all servers that can serve
2098 2092 # them.
2099 2093
2100 2094 # if revlog format changes, client will have to check version
2101 2095 # and format flags on "stream" capability, and use
2102 2096 # uncompressed only if compatible.
2103 2097
2104 2098 if stream and not heads:
2105 2099 # 'stream' means remote revlog format is revlogv1 only
2106 2100 if remote.capable('stream'):
2107 2101 return self.stream_in(remote, set(('revlogv1',)))
2108 2102 # otherwise, 'streamreqs' contains the remote revlog format
2109 2103 streamreqs = remote.capable('streamreqs')
2110 2104 if streamreqs:
2111 2105 streamreqs = set(streamreqs.split(','))
2112 2106 # if we support it, stream in and adjust our requirements
2113 2107 if not streamreqs - self.supportedformats:
2114 2108 return self.stream_in(remote, streamreqs)
2115 2109 return self.pull(remote, heads)
2116 2110
2117 2111 def pushkey(self, namespace, key, old, new):
2118 2112 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
2119 2113 old=old, new=new)
2120 2114 ret = pushkey.push(self, namespace, key, old, new)
2121 2115 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2122 2116 ret=ret)
2123 2117 return ret
2124 2118
2125 2119 def listkeys(self, namespace):
2126 2120 self.hook('prelistkeys', throw=True, namespace=namespace)
2127 2121 values = pushkey.list(self, namespace)
2128 2122 self.hook('listkeys', namespace=namespace, values=values)
2129 2123 return values
2130 2124
2131 2125 def debugwireargs(self, one, two, three=None, four=None, five=None):
2132 2126 '''used to test argument passing over the wire'''
2133 2127 return "%s %s %s %s %s" % (one, two, three, four, five)
2134 2128
2135 2129 def savecommitmessage(self, text):
2136 2130 fp = self.opener('last-message.txt', 'wb')
2137 2131 try:
2138 2132 fp.write(text)
2139 2133 finally:
2140 2134 fp.close()
2141 2135 return self.pathto(fp.name[len(self.root)+1:])
2142 2136
2143 2137 # used to avoid circular references so destructors work
2144 2138 def aftertrans(files):
2145 2139 renamefiles = [tuple(t) for t in files]
2146 2140 def a():
2147 2141 for src, dest in renamefiles:
2148 2142 util.rename(src, dest)
2149 2143 return a
2150 2144
2151 2145 def undoname(fn):
2152 2146 base, name = os.path.split(fn)
2153 2147 assert name.startswith('journal')
2154 2148 return os.path.join(base, name.replace('journal', 'undo', 1))
2155 2149
2156 2150 def instance(ui, path, create):
2157 2151 return localrepository(ui, util.urllocalpath(path), create)
2158 2152
2159 2153 def islocal(path):
2160 2154 return True
@@ -1,151 +1,151 b''
1 1 # sshserver.py - ssh protocol server support for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import util, hook, wireproto, changegroup
10 10 import os, sys
11 11
12 12 class sshserver(object):
13 13 def __init__(self, ui, repo):
14 14 self.ui = ui
15 15 self.repo = repo
16 16 self.lock = None
17 17 self.fin = ui.fin
18 18 self.fout = ui.fout
19 19
20 20 hook.redirect(True)
21 21 ui.fout = repo.ui.fout = ui.ferr
22 22
23 23 # Prevent insertion/deletion of CRs
24 24 util.setbinary(self.fin)
25 25 util.setbinary(self.fout)
26 26
27 27 def getargs(self, args):
28 28 data = {}
29 29 keys = args.split()
30 30 for n in xrange(len(keys)):
31 31 argline = self.fin.readline()[:-1]
32 32 arg, l = argline.split()
33 33 if arg not in keys:
34 34 raise util.Abort("unexpected parameter %r" % arg)
35 35 if arg == '*':
36 36 star = {}
37 37 for k in xrange(int(l)):
38 38 argline = self.fin.readline()[:-1]
39 39 arg, l = argline.split()
40 40 val = self.fin.read(int(l))
41 41 star[arg] = val
42 42 data['*'] = star
43 43 else:
44 44 val = self.fin.read(int(l))
45 45 data[arg] = val
46 46 return [data[k] for k in keys]
47 47
48 48 def getarg(self, name):
49 49 return self.getargs(name)[0]
50 50
51 51 def getfile(self, fpout):
52 52 self.sendresponse('')
53 53 count = int(self.fin.readline())
54 54 while count:
55 55 fpout.write(self.fin.read(count))
56 56 count = int(self.fin.readline())
57 57
58 58 def redirect(self):
59 59 pass
60 60
61 61 def groupchunks(self, changegroup):
62 62 while True:
63 63 d = changegroup.read(4096)
64 64 if not d:
65 65 break
66 66 yield d
67 67
68 68 def sendresponse(self, v):
69 69 self.fout.write("%d\n" % len(v))
70 70 self.fout.write(v)
71 71 self.fout.flush()
72 72
73 73 def sendstream(self, source):
74 74 for chunk in source.gen:
75 75 self.fout.write(chunk)
76 76 self.fout.flush()
77 77
78 78 def sendpushresponse(self, rsp):
79 79 self.sendresponse('')
80 80 self.sendresponse(str(rsp.res))
81 81
82 82 def sendpusherror(self, rsp):
83 83 self.sendresponse(rsp.res)
84 84
85 85 def sendooberror(self, rsp):
86 86 self.ui.ferr.write('%s\n-\n' % rsp.message)
87 87 self.ui.ferr.flush()
88 88 self.fout.write('\n')
89 89 self.fout.flush()
90 90
91 91 def serve_forever(self):
92 92 try:
93 93 while self.serve_one():
94 94 pass
95 95 finally:
96 96 if self.lock is not None:
97 97 self.lock.release()
98 98 sys.exit(0)
99 99
100 100 handlers = {
101 101 str: sendresponse,
102 102 wireproto.streamres: sendstream,
103 103 wireproto.pushres: sendpushresponse,
104 104 wireproto.pusherr: sendpusherror,
105 105 wireproto.ooberror: sendooberror,
106 106 }
107 107
108 108 def serve_one(self):
109 109 cmd = self.fin.readline()[:-1]
110 110 if cmd and cmd in wireproto.commands:
111 111 rsp = wireproto.dispatch(self.repo, self, cmd)
112 112 self.handlers[rsp.__class__](self, rsp)
113 113 elif cmd:
114 114 impl = getattr(self, 'do_' + cmd, None)
115 115 if impl:
116 116 r = impl()
117 117 if r is not None:
118 118 self.sendresponse(r)
119 119 else: self.sendresponse("")
120 120 return cmd != ''
121 121
122 122 def do_lock(self):
123 123 '''DEPRECATED - allowing remote client to lock repo is not safe'''
124 124
125 125 self.lock = self.repo.lock()
126 126 return ""
127 127
128 128 def do_unlock(self):
129 129 '''DEPRECATED'''
130 130
131 131 if self.lock:
132 132 self.lock.release()
133 133 self.lock = None
134 134 return ""
135 135
136 136 def do_addchangegroup(self):
137 137 '''DEPRECATED'''
138 138
139 139 if not self.lock:
140 140 self.sendresponse("not locked")
141 141 return
142 142
143 143 self.sendresponse("")
144 144 cg = changegroup.unbundle10(self.fin, "UN")
145 r = self.repo.addchangegroup(cg, 'serve', self._client(),
146 lock=self.lock)
145 r = self.repo.addchangegroup(cg, 'serve', self._client())
146 self.lock.release()
147 147 return str(r)
148 148
149 149 def _client(self):
150 150 client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0]
151 151 return 'remote:ssh:' + client
@@ -1,607 +1,606 b''
1 1 # wireproto.py - generic wire protocol support functions
2 2 #
3 3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import urllib, tempfile, os, sys
9 9 from i18n import _
10 10 from node import bin, hex
11 11 import changegroup as changegroupmod
12 12 import repo, error, encoding, util, store
13 13
14 14 # abstract batching support
15 15
16 16 class future(object):
17 17 '''placeholder for a value to be set later'''
18 18 def set(self, value):
19 19 if util.safehasattr(self, 'value'):
20 20 raise error.RepoError("future is already set")
21 21 self.value = value
22 22
23 23 class batcher(object):
24 24 '''base class for batches of commands submittable in a single request
25 25
26 26 All methods invoked on instances of this class are simply queued and return a
27 27 a future for the result. Once you call submit(), all the queued calls are
28 28 performed and the results set in their respective futures.
29 29 '''
30 30 def __init__(self):
31 31 self.calls = []
32 32 def __getattr__(self, name):
33 33 def call(*args, **opts):
34 34 resref = future()
35 35 self.calls.append((name, args, opts, resref,))
36 36 return resref
37 37 return call
38 38 def submit(self):
39 39 pass
40 40
41 41 class localbatch(batcher):
42 42 '''performs the queued calls directly'''
43 43 def __init__(self, local):
44 44 batcher.__init__(self)
45 45 self.local = local
46 46 def submit(self):
47 47 for name, args, opts, resref in self.calls:
48 48 resref.set(getattr(self.local, name)(*args, **opts))
49 49
50 50 class remotebatch(batcher):
51 51 '''batches the queued calls; uses as few roundtrips as possible'''
52 52 def __init__(self, remote):
53 53 '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
54 54 batcher.__init__(self)
55 55 self.remote = remote
56 56 def submit(self):
57 57 req, rsp = [], []
58 58 for name, args, opts, resref in self.calls:
59 59 mtd = getattr(self.remote, name)
60 60 batchablefn = getattr(mtd, 'batchable', None)
61 61 if batchablefn is not None:
62 62 batchable = batchablefn(mtd.im_self, *args, **opts)
63 63 encargsorres, encresref = batchable.next()
64 64 if encresref:
65 65 req.append((name, encargsorres,))
66 66 rsp.append((batchable, encresref, resref,))
67 67 else:
68 68 resref.set(encargsorres)
69 69 else:
70 70 if req:
71 71 self._submitreq(req, rsp)
72 72 req, rsp = [], []
73 73 resref.set(mtd(*args, **opts))
74 74 if req:
75 75 self._submitreq(req, rsp)
76 76 def _submitreq(self, req, rsp):
77 77 encresults = self.remote._submitbatch(req)
78 78 for encres, r in zip(encresults, rsp):
79 79 batchable, encresref, resref = r
80 80 encresref.set(encres)
81 81 resref.set(batchable.next())
82 82
83 83 def batchable(f):
84 84 '''annotation for batchable methods
85 85
86 86 Such methods must implement a coroutine as follows:
87 87
88 88 @batchable
89 89 def sample(self, one, two=None):
90 90 # Handle locally computable results first:
91 91 if not one:
92 92 yield "a local result", None
93 93 # Build list of encoded arguments suitable for your wire protocol:
94 94 encargs = [('one', encode(one),), ('two', encode(two),)]
95 95 # Create future for injection of encoded result:
96 96 encresref = future()
97 97 # Return encoded arguments and future:
98 98 yield encargs, encresref
99 99 # Assuming the future to be filled with the result from the batched request
100 100 # now. Decode it:
101 101 yield decode(encresref.value)
102 102
103 103 The decorator returns a function which wraps this coroutine as a plain method,
104 104 but adds the original method as an attribute called "batchable", which is
105 105 used by remotebatch to split the call into separate encoding and decoding
106 106 phases.
107 107 '''
108 108 def plain(*args, **opts):
109 109 batchable = f(*args, **opts)
110 110 encargsorres, encresref = batchable.next()
111 111 if not encresref:
112 112 return encargsorres # a local result in this case
113 113 self = args[0]
114 114 encresref.set(self._submitone(f.func_name, encargsorres))
115 115 return batchable.next()
116 116 setattr(plain, 'batchable', f)
117 117 return plain
118 118
119 119 # list of nodes encoding / decoding
120 120
121 121 def decodelist(l, sep=' '):
122 122 if l:
123 123 return map(bin, l.split(sep))
124 124 return []
125 125
126 126 def encodelist(l, sep=' '):
127 127 return sep.join(map(hex, l))
128 128
129 129 # batched call argument encoding
130 130
131 131 def escapearg(plain):
132 132 return (plain
133 133 .replace(':', '::')
134 134 .replace(',', ':,')
135 135 .replace(';', ':;')
136 136 .replace('=', ':='))
137 137
138 138 def unescapearg(escaped):
139 139 return (escaped
140 140 .replace(':=', '=')
141 141 .replace(':;', ';')
142 142 .replace(':,', ',')
143 143 .replace('::', ':'))
144 144
145 145 # client side
146 146
147 147 def todict(**args):
148 148 return args
149 149
150 150 class wirerepository(repo.repository):
151 151
152 152 def batch(self):
153 153 return remotebatch(self)
154 154 def _submitbatch(self, req):
155 155 cmds = []
156 156 for op, argsdict in req:
157 157 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
158 158 cmds.append('%s %s' % (op, args))
159 159 rsp = self._call("batch", cmds=';'.join(cmds))
160 160 return rsp.split(';')
161 161 def _submitone(self, op, args):
162 162 return self._call(op, **args)
163 163
164 164 @batchable
165 165 def lookup(self, key):
166 166 self.requirecap('lookup', _('look up remote revision'))
167 167 f = future()
168 168 yield todict(key=encoding.fromlocal(key)), f
169 169 d = f.value
170 170 success, data = d[:-1].split(" ", 1)
171 171 if int(success):
172 172 yield bin(data)
173 173 self._abort(error.RepoError(data))
174 174
175 175 @batchable
176 176 def heads(self):
177 177 f = future()
178 178 yield {}, f
179 179 d = f.value
180 180 try:
181 181 yield decodelist(d[:-1])
182 182 except ValueError:
183 183 self._abort(error.ResponseError(_("unexpected response:"), d))
184 184
185 185 @batchable
186 186 def known(self, nodes):
187 187 f = future()
188 188 yield todict(nodes=encodelist(nodes)), f
189 189 d = f.value
190 190 try:
191 191 yield [bool(int(f)) for f in d]
192 192 except ValueError:
193 193 self._abort(error.ResponseError(_("unexpected response:"), d))
194 194
195 195 @batchable
196 196 def branchmap(self):
197 197 f = future()
198 198 yield {}, f
199 199 d = f.value
200 200 try:
201 201 branchmap = {}
202 202 for branchpart in d.splitlines():
203 203 branchname, branchheads = branchpart.split(' ', 1)
204 204 branchname = encoding.tolocal(urllib.unquote(branchname))
205 205 branchheads = decodelist(branchheads)
206 206 branchmap[branchname] = branchheads
207 207 yield branchmap
208 208 except TypeError:
209 209 self._abort(error.ResponseError(_("unexpected response:"), d))
210 210
211 211 def branches(self, nodes):
212 212 n = encodelist(nodes)
213 213 d = self._call("branches", nodes=n)
214 214 try:
215 215 br = [tuple(decodelist(b)) for b in d.splitlines()]
216 216 return br
217 217 except ValueError:
218 218 self._abort(error.ResponseError(_("unexpected response:"), d))
219 219
220 220 def between(self, pairs):
221 221 batch = 8 # avoid giant requests
222 222 r = []
223 223 for i in xrange(0, len(pairs), batch):
224 224 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
225 225 d = self._call("between", pairs=n)
226 226 try:
227 227 r.extend(l and decodelist(l) or [] for l in d.splitlines())
228 228 except ValueError:
229 229 self._abort(error.ResponseError(_("unexpected response:"), d))
230 230 return r
231 231
232 232 @batchable
233 233 def pushkey(self, namespace, key, old, new):
234 234 if not self.capable('pushkey'):
235 235 yield False, None
236 236 f = future()
237 237 yield todict(namespace=encoding.fromlocal(namespace),
238 238 key=encoding.fromlocal(key),
239 239 old=encoding.fromlocal(old),
240 240 new=encoding.fromlocal(new)), f
241 241 d = f.value
242 242 try:
243 243 d = bool(int(d))
244 244 except ValueError:
245 245 raise error.ResponseError(
246 246 _('push failed (unexpected response):'), d)
247 247 yield d
248 248
249 249 @batchable
250 250 def listkeys(self, namespace):
251 251 if not self.capable('pushkey'):
252 252 yield {}, None
253 253 f = future()
254 254 yield todict(namespace=encoding.fromlocal(namespace)), f
255 255 d = f.value
256 256 r = {}
257 257 for l in d.splitlines():
258 258 k, v = l.split('\t')
259 259 r[encoding.tolocal(k)] = encoding.tolocal(v)
260 260 yield r
261 261
262 262 def stream_out(self):
263 263 return self._callstream('stream_out')
264 264
265 265 def changegroup(self, nodes, kind):
266 266 n = encodelist(nodes)
267 267 f = self._callstream("changegroup", roots=n)
268 268 return changegroupmod.unbundle10(self._decompress(f), 'UN')
269 269
270 270 def changegroupsubset(self, bases, heads, kind):
271 271 self.requirecap('changegroupsubset', _('look up remote changes'))
272 272 bases = encodelist(bases)
273 273 heads = encodelist(heads)
274 274 f = self._callstream("changegroupsubset",
275 275 bases=bases, heads=heads)
276 276 return changegroupmod.unbundle10(self._decompress(f), 'UN')
277 277
278 278 def getbundle(self, source, heads=None, common=None):
279 279 self.requirecap('getbundle', _('look up remote changes'))
280 280 opts = {}
281 281 if heads is not None:
282 282 opts['heads'] = encodelist(heads)
283 283 if common is not None:
284 284 opts['common'] = encodelist(common)
285 285 f = self._callstream("getbundle", **opts)
286 286 return changegroupmod.unbundle10(self._decompress(f), 'UN')
287 287
288 288 def unbundle(self, cg, heads, source):
289 289 '''Send cg (a readable file-like object representing the
290 290 changegroup to push, typically a chunkbuffer object) to the
291 291 remote server as a bundle. Return an integer indicating the
292 292 result of the push (see localrepository.addchangegroup()).'''
293 293
294 294 if heads != ['force'] and self.capable('unbundlehash'):
295 295 heads = encodelist(['hashed',
296 296 util.sha1(''.join(sorted(heads))).digest()])
297 297 else:
298 298 heads = encodelist(heads)
299 299
300 300 ret, output = self._callpush("unbundle", cg, heads=heads)
301 301 if ret == "":
302 302 raise error.ResponseError(
303 303 _('push failed:'), output)
304 304 try:
305 305 ret = int(ret)
306 306 except ValueError:
307 307 raise error.ResponseError(
308 308 _('push failed (unexpected response):'), ret)
309 309
310 310 for l in output.splitlines(True):
311 311 self.ui.status(_('remote: '), l)
312 312 return ret
313 313
314 314 def debugwireargs(self, one, two, three=None, four=None, five=None):
315 315 # don't pass optional arguments left at their default value
316 316 opts = {}
317 317 if three is not None:
318 318 opts['three'] = three
319 319 if four is not None:
320 320 opts['four'] = four
321 321 return self._call('debugwireargs', one=one, two=two, **opts)
322 322
323 323 # server side
324 324
325 325 class streamres(object):
326 326 def __init__(self, gen):
327 327 self.gen = gen
328 328
329 329 class pushres(object):
330 330 def __init__(self, res):
331 331 self.res = res
332 332
333 333 class pusherr(object):
334 334 def __init__(self, res):
335 335 self.res = res
336 336
337 337 class ooberror(object):
338 338 def __init__(self, message):
339 339 self.message = message
340 340
341 341 def dispatch(repo, proto, command):
342 342 func, spec = commands[command]
343 343 args = proto.getargs(spec)
344 344 return func(repo, proto, *args)
345 345
346 346 def options(cmd, keys, others):
347 347 opts = {}
348 348 for k in keys:
349 349 if k in others:
350 350 opts[k] = others[k]
351 351 del others[k]
352 352 if others:
353 353 sys.stderr.write("abort: %s got unexpected arguments %s\n"
354 354 % (cmd, ",".join(others)))
355 355 return opts
356 356
357 357 def batch(repo, proto, cmds, others):
358 358 res = []
359 359 for pair in cmds.split(';'):
360 360 op, args = pair.split(' ', 1)
361 361 vals = {}
362 362 for a in args.split(','):
363 363 if a:
364 364 n, v = a.split('=')
365 365 vals[n] = unescapearg(v)
366 366 func, spec = commands[op]
367 367 if spec:
368 368 keys = spec.split()
369 369 data = {}
370 370 for k in keys:
371 371 if k == '*':
372 372 star = {}
373 373 for key in vals.keys():
374 374 if key not in keys:
375 375 star[key] = vals[key]
376 376 data['*'] = star
377 377 else:
378 378 data[k] = vals[k]
379 379 result = func(repo, proto, *[data[k] for k in keys])
380 380 else:
381 381 result = func(repo, proto)
382 382 if isinstance(result, ooberror):
383 383 return result
384 384 res.append(escapearg(result))
385 385 return ';'.join(res)
386 386
387 387 def between(repo, proto, pairs):
388 388 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
389 389 r = []
390 390 for b in repo.between(pairs):
391 391 r.append(encodelist(b) + "\n")
392 392 return "".join(r)
393 393
394 394 def branchmap(repo, proto):
395 395 branchmap = repo.branchmap()
396 396 heads = []
397 397 for branch, nodes in branchmap.iteritems():
398 398 branchname = urllib.quote(encoding.fromlocal(branch))
399 399 branchnodes = encodelist(nodes)
400 400 heads.append('%s %s' % (branchname, branchnodes))
401 401 return '\n'.join(heads)
402 402
403 403 def branches(repo, proto, nodes):
404 404 nodes = decodelist(nodes)
405 405 r = []
406 406 for b in repo.branches(nodes):
407 407 r.append(encodelist(b) + "\n")
408 408 return "".join(r)
409 409
410 410 def capabilities(repo, proto):
411 411 caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
412 412 'unbundlehash batch').split()
413 413 if _allowstream(repo.ui):
414 414 requiredformats = repo.requirements & repo.supportedformats
415 415 # if our local revlogs are just revlogv1, add 'stream' cap
416 416 if not requiredformats - set(('revlogv1',)):
417 417 caps.append('stream')
418 418 # otherwise, add 'streamreqs' detailing our local revlog format
419 419 else:
420 420 caps.append('streamreqs=%s' % ','.join(requiredformats))
421 421 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
422 422 caps.append('httpheader=1024')
423 423 return ' '.join(caps)
424 424
425 425 def changegroup(repo, proto, roots):
426 426 nodes = decodelist(roots)
427 427 cg = repo.changegroup(nodes, 'serve')
428 428 return streamres(proto.groupchunks(cg))
429 429
430 430 def changegroupsubset(repo, proto, bases, heads):
431 431 bases = decodelist(bases)
432 432 heads = decodelist(heads)
433 433 cg = repo.changegroupsubset(bases, heads, 'serve')
434 434 return streamres(proto.groupchunks(cg))
435 435
436 436 def debugwireargs(repo, proto, one, two, others):
437 437 # only accept optional args from the known set
438 438 opts = options('debugwireargs', ['three', 'four'], others)
439 439 return repo.debugwireargs(one, two, **opts)
440 440
441 441 def getbundle(repo, proto, others):
442 442 opts = options('getbundle', ['heads', 'common'], others)
443 443 for k, v in opts.iteritems():
444 444 opts[k] = decodelist(v)
445 445 cg = repo.getbundle('serve', **opts)
446 446 return streamres(proto.groupchunks(cg))
447 447
448 448 def heads(repo, proto):
449 449 h = repo.heads()
450 450 return encodelist(h) + "\n"
451 451
452 452 def hello(repo, proto):
453 453 '''the hello command returns a set of lines describing various
454 454 interesting things about the server, in an RFC822-like format.
455 455 Currently the only one defined is "capabilities", which
456 456 consists of a line in the form:
457 457
458 458 capabilities: space separated list of tokens
459 459 '''
460 460 return "capabilities: %s\n" % (capabilities(repo, proto))
461 461
462 462 def listkeys(repo, proto, namespace):
463 463 d = repo.listkeys(encoding.tolocal(namespace)).items()
464 464 t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
465 465 for k, v in d])
466 466 return t
467 467
468 468 def lookup(repo, proto, key):
469 469 try:
470 470 r = hex(repo.lookup(encoding.tolocal(key)))
471 471 success = 1
472 472 except Exception, inst:
473 473 r = str(inst)
474 474 success = 0
475 475 return "%s %s\n" % (success, r)
476 476
477 477 def known(repo, proto, nodes, others):
478 478 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
479 479
480 480 def pushkey(repo, proto, namespace, key, old, new):
481 481 # compatibility with pre-1.8 clients which were accidentally
482 482 # sending raw binary nodes rather than utf-8-encoded hex
483 483 if len(new) == 20 and new.encode('string-escape') != new:
484 484 # looks like it could be a binary node
485 485 try:
486 486 new.decode('utf-8')
487 487 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
488 488 except UnicodeDecodeError:
489 489 pass # binary, leave unmodified
490 490 else:
491 491 new = encoding.tolocal(new) # normal path
492 492
493 493 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
494 494 encoding.tolocal(old), new)
495 495 return '%s\n' % int(r)
496 496
497 497 def _allowstream(ui):
498 498 return ui.configbool('server', 'uncompressed', True, untrusted=True)
499 499
500 500 def stream(repo, proto):
501 501 '''If the server supports streaming clone, it advertises the "stream"
502 502 capability with a value representing the version and flags of the repo
503 503 it is serving. Client checks to see if it understands the format.
504 504
505 505 The format is simple: the server writes out a line with the amount
506 506 of files, then the total amount of bytes to be transfered (separated
507 507 by a space). Then, for each file, the server first writes the filename
508 508 and filesize (separated by the null character), then the file contents.
509 509 '''
510 510
511 511 if not _allowstream(repo.ui):
512 512 return '1\n'
513 513
514 514 entries = []
515 515 total_bytes = 0
516 516 try:
517 517 # get consistent snapshot of repo, lock during scan
518 518 lock = repo.lock()
519 519 try:
520 520 repo.ui.debug('scanning\n')
521 521 for name, ename, size in repo.store.walk():
522 522 entries.append((name, size))
523 523 total_bytes += size
524 524 finally:
525 525 lock.release()
526 526 except error.LockError:
527 527 return '2\n' # error: 2
528 528
529 529 def streamer(repo, entries, total):
530 530 '''stream out all metadata files in repository.'''
531 531 yield '0\n' # success
532 532 repo.ui.debug('%d files, %d bytes to transfer\n' %
533 533 (len(entries), total_bytes))
534 534 yield '%d %d\n' % (len(entries), total_bytes)
535 535 for name, size in entries:
536 536 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
537 537 # partially encode name over the wire for backwards compat
538 538 yield '%s\0%d\n' % (store.encodedir(name), size)
539 539 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
540 540 yield chunk
541 541
542 542 return streamres(streamer(repo, entries, total_bytes))
543 543
544 544 def unbundle(repo, proto, heads):
545 545 their_heads = decodelist(heads)
546 546
547 547 def check_heads():
548 548 heads = repo.heads()
549 549 heads_hash = util.sha1(''.join(sorted(heads))).digest()
550 550 return (their_heads == ['force'] or their_heads == heads or
551 551 their_heads == ['hashed', heads_hash])
552 552
553 553 proto.redirect()
554 554
555 555 # fail early if possible
556 556 if not check_heads():
557 557 return pusherr('unsynced changes')
558 558
559 559 # write bundle data to temporary file because it can be big
560 560 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
561 561 fp = os.fdopen(fd, 'wb+')
562 562 r = 0
563 563 try:
564 564 proto.getfile(fp)
565 565 lock = repo.lock()
566 566 try:
567 567 if not check_heads():
568 568 # someone else committed/pushed/unbundled while we
569 569 # were transferring data
570 570 return pusherr('unsynced changes')
571 571
572 572 # push can proceed
573 573 fp.seek(0)
574 574 gen = changegroupmod.readbundle(fp, None)
575 575
576 576 try:
577 r = repo.addchangegroup(gen, 'serve', proto._client(),
578 lock=lock)
577 r = repo.addchangegroup(gen, 'serve', proto._client())
579 578 except util.Abort, inst:
580 579 sys.stderr.write("abort: %s\n" % inst)
581 580 finally:
582 581 lock.release()
583 582 return pushres(r)
584 583
585 584 finally:
586 585 fp.close()
587 586 os.unlink(tempname)
588 587
589 588 commands = {
590 589 'batch': (batch, 'cmds *'),
591 590 'between': (between, 'pairs'),
592 591 'branchmap': (branchmap, ''),
593 592 'branches': (branches, 'nodes'),
594 593 'capabilities': (capabilities, ''),
595 594 'changegroup': (changegroup, 'roots'),
596 595 'changegroupsubset': (changegroupsubset, 'bases heads'),
597 596 'debugwireargs': (debugwireargs, 'one two *'),
598 597 'getbundle': (getbundle, '*'),
599 598 'heads': (heads, ''),
600 599 'hello': (hello, ''),
601 600 'known': (known, 'nodes *'),
602 601 'listkeys': (listkeys, 'namespace'),
603 602 'lookup': (lookup, 'key'),
604 603 'pushkey': (pushkey, 'namespace key old new'),
605 604 'stream_out': (stream, ''),
606 605 'unbundle': (unbundle, 'heads'),
607 606 }
General Comments 0
You need to be logged in to leave comments. Login now