##// END OF EJS Templates
branch closing: referencing open and closed branches/heads...
John Mulligan -
r7656:6a24fb99 default
parent child Browse files
Show More
@@ -1,3434 +1,3443
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _, gettext
10 10 import os, re, sys
11 11 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 12 import difflib, patch, time, help, mdiff, tempfile, url
13 13 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 14 import merge as merge_
15 15
16 16 # Commands start here, listed alphabetically
17 17
18 18 def add(ui, repo, *pats, **opts):
19 19 """add the specified files on the next commit
20 20
21 21 Schedule files to be version controlled and added to the repository.
22 22
23 23 The files will be added to the repository at the next commit. To
24 24 undo an add before that, see hg revert.
25 25
26 26 If no names are given, add all files in the repository.
27 27 """
28 28
29 29 rejected = None
30 30 exacts = {}
31 31 names = []
32 32 m = cmdutil.match(repo, pats, opts)
33 33 m.bad = lambda x,y: True
34 34 for abs in repo.walk(m):
35 35 if m.exact(abs):
36 36 if ui.verbose:
37 37 ui.status(_('adding %s\n') % m.rel(abs))
38 38 names.append(abs)
39 39 exacts[abs] = 1
40 40 elif abs not in repo.dirstate:
41 41 ui.status(_('adding %s\n') % m.rel(abs))
42 42 names.append(abs)
43 43 if not opts.get('dry_run'):
44 44 rejected = repo.add(names)
45 45 rejected = [p for p in rejected if p in exacts]
46 46 return rejected and 1 or 0
47 47
48 48 def addremove(ui, repo, *pats, **opts):
49 49 """add all new files, delete all missing files
50 50
51 51 Add all new files and remove all missing files from the repository.
52 52
53 53 New files are ignored if they match any of the patterns in .hgignore. As
54 54 with add, these changes take effect at the next commit.
55 55
56 56 Use the -s option to detect renamed files. With a parameter > 0,
57 57 this compares every removed file with every added file and records
58 58 those similar enough as renames. This option takes a percentage
59 59 between 0 (disabled) and 100 (files must be identical) as its
60 60 parameter. Detecting renamed files this way can be expensive.
61 61 """
62 62 try:
63 63 sim = float(opts.get('similarity') or 0)
64 64 except ValueError:
65 65 raise util.Abort(_('similarity must be a number'))
66 66 if sim < 0 or sim > 100:
67 67 raise util.Abort(_('similarity must be between 0 and 100'))
68 68 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
69 69
70 70 def annotate(ui, repo, *pats, **opts):
71 71 """show changeset information per file line
72 72
73 73 List changes in files, showing the revision id responsible for each line
74 74
75 75 This command is useful to discover who did a change or when a change took
76 76 place.
77 77
78 78 Without the -a option, annotate will avoid processing files it
79 79 detects as binary. With -a, annotate will generate an annotation
80 80 anyway, probably with undesirable results.
81 81 """
82 82 datefunc = ui.quiet and util.shortdate or util.datestr
83 83 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
84 84
85 85 if not pats:
86 86 raise util.Abort(_('at least one file name or pattern required'))
87 87
88 88 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
89 89 ('number', lambda x: str(x[0].rev())),
90 90 ('changeset', lambda x: short(x[0].node())),
91 91 ('date', getdate),
92 92 ('follow', lambda x: x[0].path()),
93 93 ]
94 94
95 95 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
96 96 and not opts.get('follow')):
97 97 opts['number'] = 1
98 98
99 99 linenumber = opts.get('line_number') is not None
100 100 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
101 101 raise util.Abort(_('at least one of -n/-c is required for -l'))
102 102
103 103 funcmap = [func for op, func in opmap if opts.get(op)]
104 104 if linenumber:
105 105 lastfunc = funcmap[-1]
106 106 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
107 107
108 108 ctx = repo[opts.get('rev')]
109 109
110 110 m = cmdutil.match(repo, pats, opts)
111 111 for abs in ctx.walk(m):
112 112 fctx = ctx[abs]
113 113 if not opts.get('text') and util.binary(fctx.data()):
114 114 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
115 115 continue
116 116
117 117 lines = fctx.annotate(follow=opts.get('follow'),
118 118 linenumber=linenumber)
119 119 pieces = []
120 120
121 121 for f in funcmap:
122 122 l = [f(n) for n, dummy in lines]
123 123 if l:
124 124 ml = max(map(len, l))
125 125 pieces.append(["%*s" % (ml, x) for x in l])
126 126
127 127 if pieces:
128 128 for p, l in zip(zip(*pieces), lines):
129 129 ui.write("%s: %s" % (" ".join(p), l[1]))
130 130
131 131 def archive(ui, repo, dest, **opts):
132 132 '''create unversioned archive of a repository revision
133 133
134 134 By default, the revision used is the parent of the working
135 135 directory; use "-r" to specify a different revision.
136 136
137 137 To specify the type of archive to create, use "-t". Valid
138 138 types are:
139 139
140 140 "files" (default): a directory full of files
141 141 "tar": tar archive, uncompressed
142 142 "tbz2": tar archive, compressed using bzip2
143 143 "tgz": tar archive, compressed using gzip
144 144 "uzip": zip archive, uncompressed
145 145 "zip": zip archive, compressed using deflate
146 146
147 147 The exact name of the destination archive or directory is given
148 148 using a format string; see "hg help export" for details.
149 149
150 150 Each member added to an archive file has a directory prefix
151 151 prepended. Use "-p" to specify a format string for the prefix.
152 152 The default is the basename of the archive, with suffixes removed.
153 153 '''
154 154
155 155 ctx = repo[opts.get('rev')]
156 156 if not ctx:
157 157 raise util.Abort(_('no working directory: please specify a revision'))
158 158 node = ctx.node()
159 159 dest = cmdutil.make_filename(repo, dest, node)
160 160 if os.path.realpath(dest) == repo.root:
161 161 raise util.Abort(_('repository root cannot be destination'))
162 162 matchfn = cmdutil.match(repo, [], opts)
163 163 kind = opts.get('type') or 'files'
164 164 prefix = opts.get('prefix')
165 165 if dest == '-':
166 166 if kind == 'files':
167 167 raise util.Abort(_('cannot archive plain files to stdout'))
168 168 dest = sys.stdout
169 169 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
170 170 prefix = cmdutil.make_filename(repo, prefix, node)
171 171 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
172 172 matchfn, prefix)
173 173
174 174 def backout(ui, repo, node=None, rev=None, **opts):
175 175 '''reverse effect of earlier changeset
176 176
177 177 Commit the backed out changes as a new changeset. The new
178 178 changeset is a child of the backed out changeset.
179 179
180 180 If you back out a changeset other than the tip, a new head is
181 181 created. This head will be the new tip and you should merge this
182 182 backout changeset with another head (current one by default).
183 183
184 184 The --merge option remembers the parent of the working directory
185 185 before starting the backout, then merges the new head with that
186 186 changeset afterwards. This saves you from doing the merge by
187 187 hand. The result of this merge is not committed, as for a normal
188 188 merge.
189 189
190 190 See \'hg help dates\' for a list of formats valid for -d/--date.
191 191 '''
192 192 if rev and node:
193 193 raise util.Abort(_("please specify just one revision"))
194 194
195 195 if not rev:
196 196 rev = node
197 197
198 198 if not rev:
199 199 raise util.Abort(_("please specify a revision to backout"))
200 200
201 201 date = opts.get('date')
202 202 if date:
203 203 opts['date'] = util.parsedate(date)
204 204
205 205 cmdutil.bail_if_changed(repo)
206 206 node = repo.lookup(rev)
207 207
208 208 op1, op2 = repo.dirstate.parents()
209 209 a = repo.changelog.ancestor(op1, node)
210 210 if a != node:
211 211 raise util.Abort(_('cannot back out change on a different branch'))
212 212
213 213 p1, p2 = repo.changelog.parents(node)
214 214 if p1 == nullid:
215 215 raise util.Abort(_('cannot back out a change with no parents'))
216 216 if p2 != nullid:
217 217 if not opts.get('parent'):
218 218 raise util.Abort(_('cannot back out a merge changeset without '
219 219 '--parent'))
220 220 p = repo.lookup(opts['parent'])
221 221 if p not in (p1, p2):
222 222 raise util.Abort(_('%s is not a parent of %s') %
223 223 (short(p), short(node)))
224 224 parent = p
225 225 else:
226 226 if opts.get('parent'):
227 227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
228 228 parent = p1
229 229
230 230 # the backout should appear on the same branch
231 231 branch = repo.dirstate.branch()
232 232 hg.clean(repo, node, show_stats=False)
233 233 repo.dirstate.setbranch(branch)
234 234 revert_opts = opts.copy()
235 235 revert_opts['date'] = None
236 236 revert_opts['all'] = True
237 237 revert_opts['rev'] = hex(parent)
238 238 revert_opts['no_backup'] = None
239 239 revert(ui, repo, **revert_opts)
240 240 commit_opts = opts.copy()
241 241 commit_opts['addremove'] = False
242 242 if not commit_opts['message'] and not commit_opts['logfile']:
243 243 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
244 244 commit_opts['force_editor'] = True
245 245 commit(ui, repo, **commit_opts)
246 246 def nice(node):
247 247 return '%d:%s' % (repo.changelog.rev(node), short(node))
248 248 ui.status(_('changeset %s backs out changeset %s\n') %
249 249 (nice(repo.changelog.tip()), nice(node)))
250 250 if op1 != node:
251 251 hg.clean(repo, op1, show_stats=False)
252 252 if opts.get('merge'):
253 253 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
254 254 hg.merge(repo, hex(repo.changelog.tip()))
255 255 else:
256 256 ui.status(_('the backout changeset is a new head - '
257 257 'do not forget to merge\n'))
258 258 ui.status(_('(use "backout --merge" '
259 259 'if you want to auto-merge)\n'))
260 260
261 261 def bisect(ui, repo, rev=None, extra=None, command=None,
262 262 reset=None, good=None, bad=None, skip=None, noupdate=None):
263 263 """subdivision search of changesets
264 264
265 265 This command helps to find changesets which introduce problems.
266 266 To use, mark the earliest changeset you know exhibits the problem
267 267 as bad, then mark the latest changeset which is free from the
268 268 problem as good. Bisect will update your working directory to a
269 269 revision for testing (unless the --noupdate option is specified).
270 270 Once you have performed tests, mark the working directory as bad
271 271 or good and bisect will either update to another candidate changeset
272 272 or announce that it has found the bad revision.
273 273
274 274 As a shortcut, you can also use the revision argument to mark a
275 275 revision as good or bad without checking it out first.
276 276
277 277 If you supply a command it will be used for automatic bisection. Its exit
278 278 status will be used as flag to mark revision as bad or good. In case exit
279 279 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
280 280 found) - bisection will be aborted and any other status bigger than 0 will
281 281 mark revision as bad.
282 282 """
283 283 def print_result(nodes, good):
284 284 displayer = cmdutil.show_changeset(ui, repo, {})
285 285 transition = (good and "good" or "bad")
286 286 if len(nodes) == 1:
287 287 # narrowed it down to a single revision
288 288 ui.write(_("The first %s revision is:\n") % transition)
289 289 displayer.show(repo[nodes[0]])
290 290 else:
291 291 # multiple possible revisions
292 292 ui.write(_("Due to skipped revisions, the first "
293 293 "%s revision could be any of:\n") % transition)
294 294 for n in nodes:
295 295 displayer.show(repo[n])
296 296
297 297 def check_state(state, interactive=True):
298 298 if not state['good'] or not state['bad']:
299 299 if (good or bad or skip or reset) and interactive:
300 300 return
301 301 if not state['good']:
302 302 raise util.Abort(_('cannot bisect (no known good revisions)'))
303 303 else:
304 304 raise util.Abort(_('cannot bisect (no known bad revisions)'))
305 305 return True
306 306
307 307 # backward compatibility
308 308 if rev in "good bad reset init".split():
309 309 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
310 310 cmd, rev, extra = rev, extra, None
311 311 if cmd == "good":
312 312 good = True
313 313 elif cmd == "bad":
314 314 bad = True
315 315 else:
316 316 reset = True
317 317 elif extra or good + bad + skip + reset + bool(command) > 1:
318 318 raise util.Abort(_('incompatible arguments'))
319 319
320 320 if reset:
321 321 p = repo.join("bisect.state")
322 322 if os.path.exists(p):
323 323 os.unlink(p)
324 324 return
325 325
326 326 state = hbisect.load_state(repo)
327 327
328 328 if command:
329 329 commandpath = util.find_exe(command)
330 330 changesets = 1
331 331 try:
332 332 while changesets:
333 333 # update state
334 334 status = os.spawnl(os.P_WAIT, commandpath)
335 335 if status == 125:
336 336 transition = "skip"
337 337 elif status == 0:
338 338 transition = "good"
339 339 # status < 0 means process was killed
340 340 elif status == 127:
341 341 raise util.Abort(_("failed to execute %s") % command)
342 342 elif status < 0:
343 343 raise util.Abort(_("%s killed") % command)
344 344 else:
345 345 transition = "bad"
346 346 node = repo.lookup(rev or '.')
347 347 state[transition].append(node)
348 348 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
349 349 check_state(state, interactive=False)
350 350 # bisect
351 351 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
352 352 # update to next check
353 353 cmdutil.bail_if_changed(repo)
354 354 hg.clean(repo, nodes[0], show_stats=False)
355 355 finally:
356 356 hbisect.save_state(repo, state)
357 357 return print_result(nodes, not status)
358 358
359 359 # update state
360 360 node = repo.lookup(rev or '.')
361 361 if good:
362 362 state['good'].append(node)
363 363 elif bad:
364 364 state['bad'].append(node)
365 365 elif skip:
366 366 state['skip'].append(node)
367 367
368 368 hbisect.save_state(repo, state)
369 369
370 370 if not check_state(state):
371 371 return
372 372
373 373 # actually bisect
374 374 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
375 375 if changesets == 0:
376 376 print_result(nodes, good)
377 377 else:
378 378 assert len(nodes) == 1 # only a single node can be tested next
379 379 node = nodes[0]
380 380 # compute the approximate number of remaining tests
381 381 tests, size = 0, 2
382 382 while size <= changesets:
383 383 tests, size = tests + 1, size * 2
384 384 rev = repo.changelog.rev(node)
385 385 ui.write(_("Testing changeset %s:%s "
386 386 "(%s changesets remaining, ~%s tests)\n")
387 387 % (rev, short(node), changesets, tests))
388 388 if not noupdate:
389 389 cmdutil.bail_if_changed(repo)
390 390 return hg.clean(repo, node)
391 391
392 392 def branch(ui, repo, label=None, **opts):
393 393 """set or show the current branch name
394 394
395 395 With no argument, show the current branch name. With one argument,
396 396 set the working directory branch name (the branch does not exist in
397 397 the repository until the next commit).
398 398
399 399 Unless --force is specified, branch will not let you set a
400 400 branch name that shadows an existing branch.
401 401
402 402 Use --clean to reset the working directory branch to that of the
403 403 parent of the working directory, negating a previous branch change.
404 404
405 405 Use the command 'hg update' to switch to an existing branch.
406 406 """
407 407
408 408 if opts.get('clean'):
409 409 label = repo[None].parents()[0].branch()
410 410 repo.dirstate.setbranch(label)
411 411 ui.status(_('reset working directory to branch %s\n') % label)
412 412 elif label:
413 413 if not opts.get('force') and label in repo.branchtags():
414 414 if label not in [p.branch() for p in repo.parents()]:
415 415 raise util.Abort(_('a branch of the same name already exists'
416 416 ' (use --force to override)'))
417 417 repo.dirstate.setbranch(util.fromlocal(label))
418 418 ui.status(_('marked working directory as branch %s\n') % label)
419 419 else:
420 420 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
421 421
422 422 def branches(ui, repo, active=False):
423 423 """list repository named branches
424 424
425 425 List the repository's named branches, indicating which ones are
426 426 inactive. If active is specified, only show active branches.
427 427
428 428 A branch is considered active if it contains repository heads.
429 429
430 430 Use the command 'hg update' to switch to an existing branch.
431 431 """
432 432 hexfunc = ui.debugflag and hex or short
433 433 activebranches = [util.tolocal(repo[n].branch())
434 for n in repo.heads()]
434 for n in repo.heads(closed=False)]
435 435 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
436 436 for tag, node in repo.branchtags().items()])
437 437 branches.reverse()
438 438
439 439 for isactive, node, tag in branches:
440 440 if (not active) or isactive:
441 441 if ui.quiet:
442 442 ui.write("%s\n" % tag)
443 443 else:
444 hn = repo.lookup(node)
445 if isactive:
446 notice = ''
447 elif hn not in repo.branchheads(tag, closed=False):
448 notice = ' (closed)'
449 else:
450 notice = ' (inactive)'
444 451 rev = str(node).rjust(31 - util.locallen(tag))
445 isinactive = ((not isactive) and " (inactive)") or ''
446 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
452 data = tag, rev, hexfunc(hn), notice
447 453 ui.write("%s %s:%s%s\n" % data)
448 454
449 455 def bundle(ui, repo, fname, dest=None, **opts):
450 456 """create a changegroup file
451 457
452 458 Generate a compressed changegroup file collecting changesets not
453 459 found in the other repository.
454 460
455 461 If no destination repository is specified the destination is
456 462 assumed to have all the nodes specified by one or more --base
457 463 parameters. To create a bundle containing all changesets, use
458 464 --all (or --base null). To change the compression method applied,
459 465 use the -t option (by default, bundles are compressed using bz2).
460 466
461 467 The bundle file can then be transferred using conventional means and
462 468 applied to another repository with the unbundle or pull command.
463 469 This is useful when direct push and pull are not available or when
464 470 exporting an entire repository is undesirable.
465 471
466 472 Applying bundles preserves all changeset contents including
467 473 permissions, copy/rename information, and revision history.
468 474 """
469 475 revs = opts.get('rev') or None
470 476 if revs:
471 477 revs = [repo.lookup(rev) for rev in revs]
472 478 if opts.get('all'):
473 479 base = ['null']
474 480 else:
475 481 base = opts.get('base')
476 482 if base:
477 483 if dest:
478 484 raise util.Abort(_("--base is incompatible with specifiying "
479 485 "a destination"))
480 486 base = [repo.lookup(rev) for rev in base]
481 487 # create the right base
482 488 # XXX: nodesbetween / changegroup* should be "fixed" instead
483 489 o = []
484 490 has = {nullid: None}
485 491 for n in base:
486 492 has.update(repo.changelog.reachable(n))
487 493 if revs:
488 494 visit = list(revs)
489 495 else:
490 496 visit = repo.changelog.heads()
491 497 seen = {}
492 498 while visit:
493 499 n = visit.pop(0)
494 500 parents = [p for p in repo.changelog.parents(n) if p not in has]
495 501 if len(parents) == 0:
496 502 o.insert(0, n)
497 503 else:
498 504 for p in parents:
499 505 if p not in seen:
500 506 seen[p] = 1
501 507 visit.append(p)
502 508 else:
503 509 cmdutil.setremoteconfig(ui, opts)
504 510 dest, revs, checkout = hg.parseurl(
505 511 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
506 512 other = hg.repository(ui, dest)
507 513 o = repo.findoutgoing(other, force=opts.get('force'))
508 514
509 515 if revs:
510 516 cg = repo.changegroupsubset(o, revs, 'bundle')
511 517 else:
512 518 cg = repo.changegroup(o, 'bundle')
513 519
514 520 bundletype = opts.get('type', 'bzip2').lower()
515 521 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
516 522 bundletype = btypes.get(bundletype)
517 523 if bundletype not in changegroup.bundletypes:
518 524 raise util.Abort(_('unknown bundle type specified with --type'))
519 525
520 526 changegroup.writebundle(cg, fname, bundletype)
521 527
522 528 def cat(ui, repo, file1, *pats, **opts):
523 529 """output the current or given revision of files
524 530
525 531 Print the specified files as they were at the given revision.
526 532 If no revision is given, the parent of the working directory is used,
527 533 or tip if no revision is checked out.
528 534
529 535 Output may be to a file, in which case the name of the file is
530 536 given using a format string. The formatting rules are the same as
531 537 for the export command, with the following additions:
532 538
533 539 %s basename of file being printed
534 540 %d dirname of file being printed, or '.' if in repo root
535 541 %p root-relative path name of file being printed
536 542 """
537 543 ctx = repo[opts.get('rev')]
538 544 err = 1
539 545 m = cmdutil.match(repo, (file1,) + pats, opts)
540 546 for abs in ctx.walk(m):
541 547 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
542 548 data = ctx[abs].data()
543 549 if opts.get('decode'):
544 550 data = repo.wwritedata(abs, data)
545 551 fp.write(data)
546 552 err = 0
547 553 return err
548 554
549 555 def clone(ui, source, dest=None, **opts):
550 556 """make a copy of an existing repository
551 557
552 558 Create a copy of an existing repository in a new directory.
553 559
554 560 If no destination directory name is specified, it defaults to the
555 561 basename of the source.
556 562
557 563 The location of the source is added to the new repository's
558 564 .hg/hgrc file, as the default to be used for future pulls.
559 565
560 566 For efficiency, hardlinks are used for cloning whenever the source
561 567 and destination are on the same filesystem (note this applies only
562 568 to the repository data, not to the checked out files). Some
563 569 filesystems, such as AFS, implement hardlinking incorrectly, but
564 570 do not report errors. In these cases, use the --pull option to
565 571 avoid hardlinking.
566 572
567 573 In some cases, you can clone repositories and checked out files
568 574 using full hardlinks with
569 575
570 576 $ cp -al REPO REPOCLONE
571 577
572 578 This is the fastest way to clone, but it is not always safe. The
573 579 operation is not atomic (making sure REPO is not modified during
574 580 the operation is up to you) and you have to make sure your editor
575 581 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
576 582 this is not compatible with certain extensions that place their
577 583 metadata under the .hg directory, such as mq.
578 584
579 585 If you use the -r option to clone up to a specific revision, no
580 586 subsequent revisions will be present in the cloned repository.
581 587 This option implies --pull, even on local repositories.
582 588
583 589 If the -U option is used, the new clone will contain only a repository
584 590 (.hg) and no working copy (the working copy parent is the null revision).
585 591
586 592 See pull for valid source format details.
587 593
588 594 It is possible to specify an ssh:// URL as the destination, but no
589 595 .hg/hgrc and working directory will be created on the remote side.
590 596 Look at the help text for the pull command for important details
591 597 about ssh:// URLs.
592 598 """
593 599 cmdutil.setremoteconfig(ui, opts)
594 600 hg.clone(ui, source, dest,
595 601 pull=opts.get('pull'),
596 602 stream=opts.get('uncompressed'),
597 603 rev=opts.get('rev'),
598 604 update=not opts.get('noupdate'))
599 605
600 606 def commit(ui, repo, *pats, **opts):
601 607 """commit the specified files or all outstanding changes
602 608
603 609 Commit changes to the given files into the repository.
604 610
605 611 If a list of files is omitted, all changes reported by "hg status"
606 612 will be committed.
607 613
608 614 If you are committing the result of a merge, do not provide any
609 615 file names or -I/-X filters.
610 616
611 617 If no commit message is specified, the configured editor is started to
612 618 enter a message.
613 619
614 620 See 'hg help dates' for a list of formats valid for -d/--date.
615 621 """
616 622 extra = {}
617 623 if opts.get('close_branch'):
618 624 extra['close'] = 1
619 625 def commitfunc(ui, repo, message, match, opts):
620 626 return repo.commit(match.files(), message, opts.get('user'),
621 627 opts.get('date'), match, force_editor=opts.get('force_editor'),
622 628 extra=extra)
623 629
624 630 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
625 631 if not node:
626 632 return
627 633 cl = repo.changelog
628 634 rev = cl.rev(node)
629 635 parents = cl.parentrevs(rev)
630 636 if rev - 1 in parents:
631 637 # one of the parents was the old tip
632 638 pass
633 639 elif (parents == (nullrev, nullrev) or
634 640 len(cl.heads(cl.node(parents[0]))) > 1 and
635 641 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
636 642 ui.status(_('created new head\n'))
637 643
638 644 if ui.debugflag:
639 645 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
640 646 elif ui.verbose:
641 647 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
642 648
643 649 def copy(ui, repo, *pats, **opts):
644 650 """mark files as copied for the next commit
645 651
646 652 Mark dest as having copies of source files. If dest is a
647 653 directory, copies are put in that directory. If dest is a file,
648 654 there can only be one source.
649 655
650 656 By default, this command copies the contents of files as they
651 657 stand in the working directory. If invoked with --after, the
652 658 operation is recorded, but no copying is performed.
653 659
654 660 This command takes effect in the next commit. To undo a copy
655 661 before that, see hg revert.
656 662 """
657 663 wlock = repo.wlock(False)
658 664 try:
659 665 return cmdutil.copy(ui, repo, pats, opts)
660 666 finally:
661 667 del wlock
662 668
663 669 def debugancestor(ui, repo, *args):
664 670 """find the ancestor revision of two revisions in a given index"""
665 671 if len(args) == 3:
666 672 index, rev1, rev2 = args
667 673 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
668 674 lookup = r.lookup
669 675 elif len(args) == 2:
670 676 if not repo:
671 677 raise util.Abort(_("There is no Mercurial repository here "
672 678 "(.hg not found)"))
673 679 rev1, rev2 = args
674 680 r = repo.changelog
675 681 lookup = repo.lookup
676 682 else:
677 683 raise util.Abort(_('either two or three arguments required'))
678 684 a = r.ancestor(lookup(rev1), lookup(rev2))
679 685 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
680 686
681 687 def debugcomplete(ui, cmd='', **opts):
682 688 """returns the completion list associated with the given command"""
683 689
684 690 if opts.get('options'):
685 691 options = []
686 692 otables = [globalopts]
687 693 if cmd:
688 694 aliases, entry = cmdutil.findcmd(cmd, table, False)
689 695 otables.append(entry[1])
690 696 for t in otables:
691 697 for o in t:
692 698 if o[0]:
693 699 options.append('-%s' % o[0])
694 700 options.append('--%s' % o[1])
695 701 ui.write("%s\n" % "\n".join(options))
696 702 return
697 703
698 704 cmdlist = cmdutil.findpossible(cmd, table)
699 705 if ui.verbose:
700 706 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
701 707 ui.write("%s\n" % "\n".join(util.sort(cmdlist)))
702 708
703 709 def debugfsinfo(ui, path = "."):
704 710 file('.debugfsinfo', 'w').write('')
705 711 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
706 712 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
707 713 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
708 714 and 'yes' or 'no'))
709 715 os.unlink('.debugfsinfo')
710 716
711 717 def debugrebuildstate(ui, repo, rev="tip"):
712 718 """rebuild the dirstate as it would look like for the given revision"""
713 719 ctx = repo[rev]
714 720 wlock = repo.wlock()
715 721 try:
716 722 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
717 723 finally:
718 724 del wlock
719 725
720 726 def debugcheckstate(ui, repo):
721 727 """validate the correctness of the current dirstate"""
722 728 parent1, parent2 = repo.dirstate.parents()
723 729 m1 = repo[parent1].manifest()
724 730 m2 = repo[parent2].manifest()
725 731 errors = 0
726 732 for f in repo.dirstate:
727 733 state = repo.dirstate[f]
728 734 if state in "nr" and f not in m1:
729 735 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
730 736 errors += 1
731 737 if state in "a" and f in m1:
732 738 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
733 739 errors += 1
734 740 if state in "m" and f not in m1 and f not in m2:
735 741 ui.warn(_("%s in state %s, but not in either manifest\n") %
736 742 (f, state))
737 743 errors += 1
738 744 for f in m1:
739 745 state = repo.dirstate[f]
740 746 if state not in "nrm":
741 747 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
742 748 errors += 1
743 749 if errors:
744 750 error = _(".hg/dirstate inconsistent with current parent's manifest")
745 751 raise util.Abort(error)
746 752
747 753 def showconfig(ui, repo, *values, **opts):
748 754 """show combined config settings from all hgrc files
749 755
750 756 With no args, print names and values of all config items.
751 757
752 758 With one arg of the form section.name, print just the value of
753 759 that config item.
754 760
755 761 With multiple args, print names and values of all config items
756 762 with matching section names."""
757 763
758 764 untrusted = bool(opts.get('untrusted'))
759 765 if values:
760 766 if len([v for v in values if '.' in v]) > 1:
761 767 raise util.Abort(_('only one config item permitted'))
762 768 for section, name, value in ui.walkconfig(untrusted=untrusted):
763 769 sectname = section + '.' + name
764 770 if values:
765 771 for v in values:
766 772 if v == section:
767 773 ui.write('%s=%s\n' % (sectname, value))
768 774 elif v == sectname:
769 775 ui.write(value, '\n')
770 776 else:
771 777 ui.write('%s=%s\n' % (sectname, value))
772 778
773 779 def debugsetparents(ui, repo, rev1, rev2=None):
774 780 """manually set the parents of the current working directory
775 781
776 782 This is useful for writing repository conversion tools, but should
777 783 be used with care.
778 784 """
779 785
780 786 if not rev2:
781 787 rev2 = hex(nullid)
782 788
783 789 wlock = repo.wlock()
784 790 try:
785 791 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
786 792 finally:
787 793 del wlock
788 794
789 795 def debugstate(ui, repo, nodates=None):
790 796 """show the contents of the current dirstate"""
791 797 timestr = ""
792 798 showdate = not nodates
793 799 for file_, ent in util.sort(repo.dirstate._map.iteritems()):
794 800 if showdate:
795 801 if ent[3] == -1:
796 802 # Pad or slice to locale representation
797 803 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
798 804 timestr = 'unset'
799 805 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
800 806 else:
801 807 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
802 808 if ent[1] & 020000:
803 809 mode = 'lnk'
804 810 else:
805 811 mode = '%3o' % (ent[1] & 0777)
806 812 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
807 813 for f in repo.dirstate.copies():
808 814 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
809 815
810 816 def debugdata(ui, file_, rev):
811 817 """dump the contents of a data file revision"""
812 818 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
813 819 try:
814 820 ui.write(r.revision(r.lookup(rev)))
815 821 except KeyError:
816 822 raise util.Abort(_('invalid revision identifier %s') % rev)
817 823
818 824 def debugdate(ui, date, range=None, **opts):
819 825 """parse and display a date"""
820 826 if opts["extended"]:
821 827 d = util.parsedate(date, util.extendeddateformats)
822 828 else:
823 829 d = util.parsedate(date)
824 830 ui.write("internal: %s %s\n" % d)
825 831 ui.write("standard: %s\n" % util.datestr(d))
826 832 if range:
827 833 m = util.matchdate(range)
828 834 ui.write("match: %s\n" % m(d[0]))
829 835
830 836 def debugindex(ui, file_):
831 837 """dump the contents of an index file"""
832 838 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
833 839 ui.write(" rev offset length base linkrev" +
834 840 " nodeid p1 p2\n")
835 841 for i in r:
836 842 node = r.node(i)
837 843 try:
838 844 pp = r.parents(node)
839 845 except:
840 846 pp = [nullid, nullid]
841 847 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
842 848 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
843 849 short(node), short(pp[0]), short(pp[1])))
844 850
845 851 def debugindexdot(ui, file_):
846 852 """dump an index DAG as a .dot file"""
847 853 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
848 854 ui.write("digraph G {\n")
849 855 for i in r:
850 856 node = r.node(i)
851 857 pp = r.parents(node)
852 858 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
853 859 if pp[1] != nullid:
854 860 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
855 861 ui.write("}\n")
856 862
857 863 def debuginstall(ui):
858 864 '''test Mercurial installation'''
859 865
860 866 def writetemp(contents):
861 867 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
862 868 f = os.fdopen(fd, "wb")
863 869 f.write(contents)
864 870 f.close()
865 871 return name
866 872
867 873 problems = 0
868 874
869 875 # encoding
870 876 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
871 877 try:
872 878 util.fromlocal("test")
873 879 except util.Abort, inst:
874 880 ui.write(" %s\n" % inst)
875 881 ui.write(_(" (check that your locale is properly set)\n"))
876 882 problems += 1
877 883
878 884 # compiled modules
879 885 ui.status(_("Checking extensions...\n"))
880 886 try:
881 887 import bdiff, mpatch, base85
882 888 except Exception, inst:
883 889 ui.write(" %s\n" % inst)
884 890 ui.write(_(" One or more extensions could not be found"))
885 891 ui.write(_(" (check that you compiled the extensions)\n"))
886 892 problems += 1
887 893
888 894 # templates
889 895 ui.status(_("Checking templates...\n"))
890 896 try:
891 897 import templater
892 898 t = templater.templater(templater.templatepath("map-cmdline.default"))
893 899 except Exception, inst:
894 900 ui.write(" %s\n" % inst)
895 901 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
896 902 problems += 1
897 903
898 904 # patch
899 905 ui.status(_("Checking patch...\n"))
900 906 patchproblems = 0
901 907 a = "1\n2\n3\n4\n"
902 908 b = "1\n2\n3\ninsert\n4\n"
903 909 fa = writetemp(a)
904 910 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
905 911 os.path.basename(fa))
906 912 fd = writetemp(d)
907 913
908 914 files = {}
909 915 try:
910 916 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
911 917 except util.Abort, e:
912 918 ui.write(_(" patch call failed:\n"))
913 919 ui.write(" " + str(e) + "\n")
914 920 patchproblems += 1
915 921 else:
916 922 if list(files) != [os.path.basename(fa)]:
917 923 ui.write(_(" unexpected patch output!\n"))
918 924 patchproblems += 1
919 925 a = file(fa).read()
920 926 if a != b:
921 927 ui.write(_(" patch test failed!\n"))
922 928 patchproblems += 1
923 929
924 930 if patchproblems:
925 931 if ui.config('ui', 'patch'):
926 932 ui.write(_(" (Current patch tool may be incompatible with patch,"
927 933 " or misconfigured. Please check your .hgrc file)\n"))
928 934 else:
929 935 ui.write(_(" Internal patcher failure, please report this error"
930 936 " to http://www.selenic.com/mercurial/bts\n"))
931 937 problems += patchproblems
932 938
933 939 os.unlink(fa)
934 940 os.unlink(fd)
935 941
936 942 # editor
937 943 ui.status(_("Checking commit editor...\n"))
938 944 editor = ui.geteditor()
939 945 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
940 946 if not cmdpath:
941 947 if editor == 'vi':
942 948 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
943 949 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
944 950 else:
945 951 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
946 952 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
947 953 problems += 1
948 954
949 955 # check username
950 956 ui.status(_("Checking username...\n"))
951 957 user = os.environ.get("HGUSER")
952 958 if user is None:
953 959 user = ui.config("ui", "username")
954 960 if user is None:
955 961 user = os.environ.get("EMAIL")
956 962 if not user:
957 963 ui.warn(" ")
958 964 ui.username()
959 965 ui.write(_(" (specify a username in your .hgrc file)\n"))
960 966
961 967 if not problems:
962 968 ui.status(_("No problems detected\n"))
963 969 else:
964 970 ui.write(_("%s problems detected,"
965 971 " please check your install!\n") % problems)
966 972
967 973 return problems
968 974
969 975 def debugrename(ui, repo, file1, *pats, **opts):
970 976 """dump rename information"""
971 977
972 978 ctx = repo[opts.get('rev')]
973 979 m = cmdutil.match(repo, (file1,) + pats, opts)
974 980 for abs in ctx.walk(m):
975 981 fctx = ctx[abs]
976 982 o = fctx.filelog().renamed(fctx.filenode())
977 983 rel = m.rel(abs)
978 984 if o:
979 985 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
980 986 else:
981 987 ui.write(_("%s not renamed\n") % rel)
982 988
983 989 def debugwalk(ui, repo, *pats, **opts):
984 990 """show how files match on given patterns"""
985 991 m = cmdutil.match(repo, pats, opts)
986 992 items = list(repo.walk(m))
987 993 if not items:
988 994 return
989 995 fmt = 'f %%-%ds %%-%ds %%s' % (
990 996 max([len(abs) for abs in items]),
991 997 max([len(m.rel(abs)) for abs in items]))
992 998 for abs in items:
993 999 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
994 1000 ui.write("%s\n" % line.rstrip())
995 1001
996 1002 def diff(ui, repo, *pats, **opts):
997 1003 """diff repository (or selected files)
998 1004
999 1005 Show differences between revisions for the specified files.
1000 1006
1001 1007 Differences between files are shown using the unified diff format.
1002 1008
1003 1009 NOTE: diff may generate unexpected results for merges, as it will
1004 1010 default to comparing against the working directory's first parent
1005 1011 changeset if no revisions are specified.
1006 1012
1007 1013 When two revision arguments are given, then changes are shown
1008 1014 between those revisions. If only one revision is specified then
1009 1015 that revision is compared to the working directory, and, when no
1010 1016 revisions are specified, the working directory files are compared
1011 1017 to its parent.
1012 1018
1013 1019 Without the -a option, diff will avoid generating diffs of files
1014 1020 it detects as binary. With -a, diff will generate a diff anyway,
1015 1021 probably with undesirable results.
1016 1022
1017 1023 Use the --git option to generate diffs in the git extended diff
1018 1024 format. Read the diffs help topic for more information.
1019 1025 """
1020 1026
1021 1027 revs = opts.get('rev')
1022 1028 change = opts.get('change')
1023 1029
1024 1030 if revs and change:
1025 1031 msg = _('cannot specify --rev and --change at the same time')
1026 1032 raise util.Abort(msg)
1027 1033 elif change:
1028 1034 node2 = repo.lookup(change)
1029 1035 node1 = repo[node2].parents()[0].node()
1030 1036 else:
1031 1037 node1, node2 = cmdutil.revpair(repo, revs)
1032 1038
1033 1039 m = cmdutil.match(repo, pats, opts)
1034 1040 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1035 1041 for chunk in it:
1036 1042 repo.ui.write(chunk)
1037 1043
1038 1044 def export(ui, repo, *changesets, **opts):
1039 1045 """dump the header and diffs for one or more changesets
1040 1046
1041 1047 Print the changeset header and diffs for one or more revisions.
1042 1048
1043 1049 The information shown in the changeset header is: author,
1044 1050 changeset hash, parent(s) and commit comment.
1045 1051
1046 1052 NOTE: export may generate unexpected diff output for merge changesets,
1047 1053 as it will compare the merge changeset against its first parent only.
1048 1054
1049 1055 Output may be to a file, in which case the name of the file is
1050 1056 given using a format string. The formatting rules are as follows:
1051 1057
1052 1058 %% literal "%" character
1053 1059 %H changeset hash (40 bytes of hexadecimal)
1054 1060 %N number of patches being generated
1055 1061 %R changeset revision number
1056 1062 %b basename of the exporting repository
1057 1063 %h short-form changeset hash (12 bytes of hexadecimal)
1058 1064 %n zero-padded sequence number, starting at 1
1059 1065 %r zero-padded changeset revision number
1060 1066
1061 1067 Without the -a option, export will avoid generating diffs of files
1062 1068 it detects as binary. With -a, export will generate a diff anyway,
1063 1069 probably with undesirable results.
1064 1070
1065 1071 Use the --git option to generate diffs in the git extended diff
1066 1072 format. Read the diffs help topic for more information.
1067 1073
1068 1074 With the --switch-parent option, the diff will be against the second
1069 1075 parent. It can be useful to review a merge.
1070 1076 """
1071 1077 if not changesets:
1072 1078 raise util.Abort(_("export requires at least one changeset"))
1073 1079 revs = cmdutil.revrange(repo, changesets)
1074 1080 if len(revs) > 1:
1075 1081 ui.note(_('exporting patches:\n'))
1076 1082 else:
1077 1083 ui.note(_('exporting patch:\n'))
1078 1084 patch.export(repo, revs, template=opts.get('output'),
1079 1085 switch_parent=opts.get('switch_parent'),
1080 1086 opts=patch.diffopts(ui, opts))
1081 1087
1082 1088 def grep(ui, repo, pattern, *pats, **opts):
1083 1089 """search for a pattern in specified files and revisions
1084 1090
1085 1091 Search revisions of files for a regular expression.
1086 1092
1087 1093 This command behaves differently than Unix grep. It only accepts
1088 1094 Python/Perl regexps. It searches repository history, not the
1089 1095 working directory. It always prints the revision number in which
1090 1096 a match appears.
1091 1097
1092 1098 By default, grep only prints output for the first revision of a
1093 1099 file in which it finds a match. To get it to print every revision
1094 1100 that contains a change in match status ("-" for a match that
1095 1101 becomes a non-match, or "+" for a non-match that becomes a match),
1096 1102 use the --all flag.
1097 1103 """
1098 1104 reflags = 0
1099 1105 if opts.get('ignore_case'):
1100 1106 reflags |= re.I
1101 1107 try:
1102 1108 regexp = re.compile(pattern, reflags)
1103 1109 except Exception, inst:
1104 1110 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1105 1111 return None
1106 1112 sep, eol = ':', '\n'
1107 1113 if opts.get('print0'):
1108 1114 sep = eol = '\0'
1109 1115
1110 1116 fcache = {}
1111 1117 def getfile(fn):
1112 1118 if fn not in fcache:
1113 1119 fcache[fn] = repo.file(fn)
1114 1120 return fcache[fn]
1115 1121
1116 1122 def matchlines(body):
1117 1123 begin = 0
1118 1124 linenum = 0
1119 1125 while True:
1120 1126 match = regexp.search(body, begin)
1121 1127 if not match:
1122 1128 break
1123 1129 mstart, mend = match.span()
1124 1130 linenum += body.count('\n', begin, mstart) + 1
1125 1131 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1126 1132 begin = body.find('\n', mend) + 1 or len(body)
1127 1133 lend = begin - 1
1128 1134 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1129 1135
1130 1136 class linestate(object):
1131 1137 def __init__(self, line, linenum, colstart, colend):
1132 1138 self.line = line
1133 1139 self.linenum = linenum
1134 1140 self.colstart = colstart
1135 1141 self.colend = colend
1136 1142
1137 1143 def __hash__(self):
1138 1144 return hash((self.linenum, self.line))
1139 1145
1140 1146 def __eq__(self, other):
1141 1147 return self.line == other.line
1142 1148
1143 1149 matches = {}
1144 1150 copies = {}
1145 1151 def grepbody(fn, rev, body):
1146 1152 matches[rev].setdefault(fn, [])
1147 1153 m = matches[rev][fn]
1148 1154 for lnum, cstart, cend, line in matchlines(body):
1149 1155 s = linestate(line, lnum, cstart, cend)
1150 1156 m.append(s)
1151 1157
1152 1158 def difflinestates(a, b):
1153 1159 sm = difflib.SequenceMatcher(None, a, b)
1154 1160 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1155 1161 if tag == 'insert':
1156 1162 for i in xrange(blo, bhi):
1157 1163 yield ('+', b[i])
1158 1164 elif tag == 'delete':
1159 1165 for i in xrange(alo, ahi):
1160 1166 yield ('-', a[i])
1161 1167 elif tag == 'replace':
1162 1168 for i in xrange(alo, ahi):
1163 1169 yield ('-', a[i])
1164 1170 for i in xrange(blo, bhi):
1165 1171 yield ('+', b[i])
1166 1172
1167 1173 prev = {}
1168 1174 def display(fn, rev, states, prevstates):
1169 1175 datefunc = ui.quiet and util.shortdate or util.datestr
1170 1176 found = False
1171 1177 filerevmatches = {}
1172 1178 r = prev.get(fn, -1)
1173 1179 if opts.get('all'):
1174 1180 iter = difflinestates(states, prevstates)
1175 1181 else:
1176 1182 iter = [('', l) for l in prevstates]
1177 1183 for change, l in iter:
1178 1184 cols = [fn, str(r)]
1179 1185 if opts.get('line_number'):
1180 1186 cols.append(str(l.linenum))
1181 1187 if opts.get('all'):
1182 1188 cols.append(change)
1183 1189 if opts.get('user'):
1184 1190 cols.append(ui.shortuser(get(r)[1]))
1185 1191 if opts.get('date'):
1186 1192 cols.append(datefunc(get(r)[2]))
1187 1193 if opts.get('files_with_matches'):
1188 1194 c = (fn, r)
1189 1195 if c in filerevmatches:
1190 1196 continue
1191 1197 filerevmatches[c] = 1
1192 1198 else:
1193 1199 cols.append(l.line)
1194 1200 ui.write(sep.join(cols), eol)
1195 1201 found = True
1196 1202 return found
1197 1203
1198 1204 fstate = {}
1199 1205 skip = {}
1200 1206 get = util.cachefunc(lambda r: repo[r].changeset())
1201 1207 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1202 1208 found = False
1203 1209 follow = opts.get('follow')
1204 1210 for st, rev, fns in changeiter:
1205 1211 if st == 'window':
1206 1212 matches.clear()
1207 1213 elif st == 'add':
1208 1214 ctx = repo[rev]
1209 1215 matches[rev] = {}
1210 1216 for fn in fns:
1211 1217 if fn in skip:
1212 1218 continue
1213 1219 try:
1214 1220 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1215 1221 fstate.setdefault(fn, [])
1216 1222 if follow:
1217 1223 copied = getfile(fn).renamed(ctx.filenode(fn))
1218 1224 if copied:
1219 1225 copies.setdefault(rev, {})[fn] = copied[0]
1220 1226 except error.LookupError:
1221 1227 pass
1222 1228 elif st == 'iter':
1223 1229 for fn, m in util.sort(matches[rev].items()):
1224 1230 copy = copies.get(rev, {}).get(fn)
1225 1231 if fn in skip:
1226 1232 if copy:
1227 1233 skip[copy] = True
1228 1234 continue
1229 1235 if fn in prev or fstate[fn]:
1230 1236 r = display(fn, rev, m, fstate[fn])
1231 1237 found = found or r
1232 1238 if r and not opts.get('all'):
1233 1239 skip[fn] = True
1234 1240 if copy:
1235 1241 skip[copy] = True
1236 1242 fstate[fn] = m
1237 1243 if copy:
1238 1244 fstate[copy] = m
1239 1245 prev[fn] = rev
1240 1246
1241 1247 for fn, state in util.sort(fstate.items()):
1242 1248 if fn in skip:
1243 1249 continue
1244 1250 if fn not in copies.get(prev[fn], {}):
1245 1251 found = display(fn, rev, {}, state) or found
1246 1252 return (not found and 1) or 0
1247 1253
1248 1254 def heads(ui, repo, *branchrevs, **opts):
1249 1255 """show current repository heads or show branch heads
1250 1256
1251 1257 With no arguments, show all repository head changesets.
1252 1258
1253 1259 If branch or revisions names are given this will show the heads of
1254 1260 the specified branches or the branches those revisions are tagged
1255 1261 with.
1256 1262
1257 1263 Repository "heads" are changesets that don't have child
1258 1264 changesets. They are where development generally takes place and
1259 1265 are the usual targets for update and merge operations.
1260 1266
1261 1267 Branch heads are changesets that have a given branch tag, but have
1262 1268 no child changesets with that tag. They are usually where
1263 1269 development on the given branch takes place.
1264 1270 """
1265 1271 if opts.get('rev'):
1266 1272 start = repo.lookup(opts['rev'])
1267 1273 else:
1268 1274 start = None
1275 closed = not opts.get('active')
1269 1276 if not branchrevs:
1270 1277 # Assume we're looking repo-wide heads if no revs were specified.
1271 heads = repo.heads(start)
1278 heads = repo.heads(start, closed=closed)
1272 1279 else:
1273 1280 heads = []
1274 1281 visitedset = util.set()
1275 1282 for branchrev in branchrevs:
1276 1283 branch = repo[branchrev].branch()
1277 1284 if branch in visitedset:
1278 1285 continue
1279 1286 visitedset.add(branch)
1280 bheads = repo.branchheads(branch, start)
1287 bheads = repo.branchheads(branch, start, closed=closed)
1281 1288 if not bheads:
1282 1289 if branch != branchrev:
1283 1290 ui.warn(_("no changes on branch %s containing %s are "
1284 1291 "reachable from %s\n")
1285 1292 % (branch, branchrev, opts.get('rev')))
1286 1293 else:
1287 1294 ui.warn(_("no changes on branch %s are reachable from %s\n")
1288 1295 % (branch, opts.get('rev')))
1289 1296 heads.extend(bheads)
1290 1297 if not heads:
1291 1298 return 1
1292 1299 displayer = cmdutil.show_changeset(ui, repo, opts)
1293 1300 for n in heads:
1294 1301 displayer.show(repo[n])
1295 1302
1296 1303 def help_(ui, name=None, with_version=False):
1297 1304 """show help for a given topic or a help overview
1298 1305
1299 1306 With no arguments, print a list of commands and short help.
1300 1307
1301 1308 Given a topic, extension, or command name, print help for that topic."""
1302 1309 option_lists = []
1303 1310
1304 1311 def addglobalopts(aliases):
1305 1312 if ui.verbose:
1306 1313 option_lists.append((_("global options:"), globalopts))
1307 1314 if name == 'shortlist':
1308 1315 option_lists.append((_('use "hg help" for the full list '
1309 1316 'of commands'), ()))
1310 1317 else:
1311 1318 if name == 'shortlist':
1312 1319 msg = _('use "hg help" for the full list of commands '
1313 1320 'or "hg -v" for details')
1314 1321 elif aliases:
1315 1322 msg = _('use "hg -v help%s" to show aliases and '
1316 1323 'global options') % (name and " " + name or "")
1317 1324 else:
1318 1325 msg = _('use "hg -v help %s" to show global options') % name
1319 1326 option_lists.append((msg, ()))
1320 1327
1321 1328 def helpcmd(name):
1322 1329 if with_version:
1323 1330 version_(ui)
1324 1331 ui.write('\n')
1325 1332
1326 1333 try:
1327 1334 aliases, i = cmdutil.findcmd(name, table, False)
1328 1335 except error.AmbiguousCommand, inst:
1329 1336 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1330 1337 helplist(_('list of commands:\n\n'), select)
1331 1338 return
1332 1339
1333 1340 # synopsis
1334 1341 if len(i) > 2:
1335 1342 if i[2].startswith('hg'):
1336 1343 ui.write("%s\n" % i[2])
1337 1344 else:
1338 1345 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1339 1346 else:
1340 1347 ui.write('hg %s\n' % aliases[0])
1341 1348
1342 1349 # aliases
1343 1350 if not ui.quiet and len(aliases) > 1:
1344 1351 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1345 1352
1346 1353 # description
1347 1354 doc = gettext(i[0].__doc__)
1348 1355 if not doc:
1349 1356 doc = _("(no help text available)")
1350 1357 if ui.quiet:
1351 1358 doc = doc.splitlines(0)[0]
1352 1359 ui.write("\n%s\n" % doc.rstrip())
1353 1360
1354 1361 if not ui.quiet:
1355 1362 # options
1356 1363 if i[1]:
1357 1364 option_lists.append((_("options:\n"), i[1]))
1358 1365
1359 1366 addglobalopts(False)
1360 1367
1361 1368 def helplist(header, select=None):
1362 1369 h = {}
1363 1370 cmds = {}
1364 1371 for c, e in table.iteritems():
1365 1372 f = c.split("|", 1)[0]
1366 1373 if select and not select(f):
1367 1374 continue
1368 1375 if (not select and name != 'shortlist' and
1369 1376 e[0].__module__ != __name__):
1370 1377 continue
1371 1378 if name == "shortlist" and not f.startswith("^"):
1372 1379 continue
1373 1380 f = f.lstrip("^")
1374 1381 if not ui.debugflag and f.startswith("debug"):
1375 1382 continue
1376 1383 doc = gettext(e[0].__doc__)
1377 1384 if not doc:
1378 1385 doc = _("(no help text available)")
1379 1386 h[f] = doc.splitlines(0)[0].rstrip()
1380 1387 cmds[f] = c.lstrip("^")
1381 1388
1382 1389 if not h:
1383 1390 ui.status(_('no commands defined\n'))
1384 1391 return
1385 1392
1386 1393 ui.status(header)
1387 1394 fns = util.sort(h)
1388 1395 m = max(map(len, fns))
1389 1396 for f in fns:
1390 1397 if ui.verbose:
1391 1398 commands = cmds[f].replace("|",", ")
1392 1399 ui.write(" %s:\n %s\n"%(commands, h[f]))
1393 1400 else:
1394 1401 ui.write(' %-*s %s\n' % (m, f, h[f]))
1395 1402
1396 1403 exts = list(extensions.extensions())
1397 1404 if exts and name != 'shortlist':
1398 1405 ui.write(_('\nenabled extensions:\n\n'))
1399 1406 maxlength = 0
1400 1407 exthelps = []
1401 1408 for ename, ext in exts:
1402 1409 doc = (ext.__doc__ or _('(no help text available)'))
1403 1410 ename = ename.split('.')[-1]
1404 1411 maxlength = max(len(ename), maxlength)
1405 1412 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1406 1413 for ename, text in exthelps:
1407 1414 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1408 1415
1409 1416 if not ui.quiet:
1410 1417 addglobalopts(True)
1411 1418
1412 1419 def helptopic(name):
1413 1420 for names, header, doc in help.helptable:
1414 1421 if name in names:
1415 1422 break
1416 1423 else:
1417 1424 raise error.UnknownCommand(name)
1418 1425
1419 1426 # description
1420 1427 if not doc:
1421 1428 doc = _("(no help text available)")
1422 1429 if callable(doc):
1423 1430 doc = doc()
1424 1431
1425 1432 ui.write("%s\n" % header)
1426 1433 ui.write("%s\n" % doc.rstrip())
1427 1434
1428 1435 def helpext(name):
1429 1436 try:
1430 1437 mod = extensions.find(name)
1431 1438 except KeyError:
1432 1439 raise error.UnknownCommand(name)
1433 1440
1434 1441 doc = gettext(mod.__doc__) or _('no help text available')
1435 1442 doc = doc.splitlines(0)
1436 1443 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1437 1444 for d in doc[1:]:
1438 1445 ui.write(d, '\n')
1439 1446
1440 1447 ui.status('\n')
1441 1448
1442 1449 try:
1443 1450 ct = mod.cmdtable
1444 1451 except AttributeError:
1445 1452 ct = {}
1446 1453
1447 1454 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1448 1455 helplist(_('list of commands:\n\n'), modcmds.has_key)
1449 1456
1450 1457 if name and name != 'shortlist':
1451 1458 i = None
1452 1459 for f in (helptopic, helpcmd, helpext):
1453 1460 try:
1454 1461 f(name)
1455 1462 i = None
1456 1463 break
1457 1464 except error.UnknownCommand, inst:
1458 1465 i = inst
1459 1466 if i:
1460 1467 raise i
1461 1468
1462 1469 else:
1463 1470 # program name
1464 1471 if ui.verbose or with_version:
1465 1472 version_(ui)
1466 1473 else:
1467 1474 ui.status(_("Mercurial Distributed SCM\n"))
1468 1475 ui.status('\n')
1469 1476
1470 1477 # list of commands
1471 1478 if name == "shortlist":
1472 1479 header = _('basic commands:\n\n')
1473 1480 else:
1474 1481 header = _('list of commands:\n\n')
1475 1482
1476 1483 helplist(header)
1477 1484
1478 1485 # list all option lists
1479 1486 opt_output = []
1480 1487 for title, options in option_lists:
1481 1488 opt_output.append(("\n%s" % title, None))
1482 1489 for shortopt, longopt, default, desc in options:
1483 1490 if "DEPRECATED" in desc and not ui.verbose: continue
1484 1491 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1485 1492 longopt and " --%s" % longopt),
1486 1493 "%s%s" % (desc,
1487 1494 default
1488 1495 and _(" (default: %s)") % default
1489 1496 or "")))
1490 1497
1491 1498 if not name:
1492 1499 ui.write(_("\nadditional help topics:\n\n"))
1493 1500 topics = []
1494 1501 for names, header, doc in help.helptable:
1495 1502 names = [(-len(name), name) for name in names]
1496 1503 names.sort()
1497 1504 topics.append((names[0][1], header))
1498 1505 topics_len = max([len(s[0]) for s in topics])
1499 1506 for t, desc in topics:
1500 1507 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1501 1508
1502 1509 if opt_output:
1503 1510 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1504 1511 for first, second in opt_output:
1505 1512 if second:
1506 1513 ui.write(" %-*s %s\n" % (opts_len, first, second))
1507 1514 else:
1508 1515 ui.write("%s\n" % first)
1509 1516
1510 1517 def identify(ui, repo, source=None,
1511 1518 rev=None, num=None, id=None, branch=None, tags=None):
1512 1519 """identify the working copy or specified revision
1513 1520
1514 1521 With no revision, print a summary of the current state of the repo.
1515 1522
1516 1523 With a path, do a lookup in another repository.
1517 1524
1518 1525 This summary identifies the repository state using one or two parent
1519 1526 hash identifiers, followed by a "+" if there are uncommitted changes
1520 1527 in the working directory, a list of tags for this revision and a branch
1521 1528 name for non-default branches.
1522 1529 """
1523 1530
1524 1531 if not repo and not source:
1525 1532 raise util.Abort(_("There is no Mercurial repository here "
1526 1533 "(.hg not found)"))
1527 1534
1528 1535 hexfunc = ui.debugflag and hex or short
1529 1536 default = not (num or id or branch or tags)
1530 1537 output = []
1531 1538
1532 1539 if source:
1533 1540 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1534 1541 srepo = hg.repository(ui, source)
1535 1542 if not rev and revs:
1536 1543 rev = revs[0]
1537 1544 if not rev:
1538 1545 rev = "tip"
1539 1546 if num or branch or tags:
1540 1547 raise util.Abort(
1541 1548 "can't query remote revision number, branch, or tags")
1542 1549 output = [hexfunc(srepo.lookup(rev))]
1543 1550 elif not rev:
1544 1551 ctx = repo[None]
1545 1552 parents = ctx.parents()
1546 1553 changed = False
1547 1554 if default or id or num:
1548 1555 changed = ctx.files() + ctx.deleted()
1549 1556 if default or id:
1550 1557 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1551 1558 (changed) and "+" or "")]
1552 1559 if num:
1553 1560 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1554 1561 (changed) and "+" or ""))
1555 1562 else:
1556 1563 ctx = repo[rev]
1557 1564 if default or id:
1558 1565 output = [hexfunc(ctx.node())]
1559 1566 if num:
1560 1567 output.append(str(ctx.rev()))
1561 1568
1562 1569 if not source and default and not ui.quiet:
1563 1570 b = util.tolocal(ctx.branch())
1564 1571 if b != 'default':
1565 1572 output.append("(%s)" % b)
1566 1573
1567 1574 # multiple tags for a single parent separated by '/'
1568 1575 t = "/".join(ctx.tags())
1569 1576 if t:
1570 1577 output.append(t)
1571 1578
1572 1579 if branch:
1573 1580 output.append(util.tolocal(ctx.branch()))
1574 1581
1575 1582 if tags:
1576 1583 output.extend(ctx.tags())
1577 1584
1578 1585 ui.write("%s\n" % ' '.join(output))
1579 1586
1580 1587 def import_(ui, repo, patch1, *patches, **opts):
1581 1588 """import an ordered set of patches
1582 1589
1583 1590 Import a list of patches and commit them individually.
1584 1591
1585 1592 If there are outstanding changes in the working directory, import
1586 1593 will abort unless given the -f flag.
1587 1594
1588 1595 You can import a patch straight from a mail message. Even patches
1589 1596 as attachments work (body part must be type text/plain or
1590 1597 text/x-patch to be used). From and Subject headers of email
1591 1598 message are used as default committer and commit message. All
1592 1599 text/plain body parts before first diff are added to commit
1593 1600 message.
1594 1601
1595 1602 If the imported patch was generated by hg export, user and description
1596 1603 from patch override values from message headers and body. Values
1597 1604 given on command line with -m and -u override these.
1598 1605
1599 1606 If --exact is specified, import will set the working directory
1600 1607 to the parent of each patch before applying it, and will abort
1601 1608 if the resulting changeset has a different ID than the one
1602 1609 recorded in the patch. This may happen due to character set
1603 1610 problems or other deficiencies in the text patch format.
1604 1611
1605 1612 With --similarity, hg will attempt to discover renames and copies
1606 1613 in the patch in the same way as 'addremove'.
1607 1614
1608 1615 To read a patch from standard input, use patch name "-".
1609 1616 See 'hg help dates' for a list of formats valid for -d/--date.
1610 1617 """
1611 1618 patches = (patch1,) + patches
1612 1619
1613 1620 date = opts.get('date')
1614 1621 if date:
1615 1622 opts['date'] = util.parsedate(date)
1616 1623
1617 1624 try:
1618 1625 sim = float(opts.get('similarity') or 0)
1619 1626 except ValueError:
1620 1627 raise util.Abort(_('similarity must be a number'))
1621 1628 if sim < 0 or sim > 100:
1622 1629 raise util.Abort(_('similarity must be between 0 and 100'))
1623 1630
1624 1631 if opts.get('exact') or not opts.get('force'):
1625 1632 cmdutil.bail_if_changed(repo)
1626 1633
1627 1634 d = opts["base"]
1628 1635 strip = opts["strip"]
1629 1636 wlock = lock = None
1630 1637 try:
1631 1638 wlock = repo.wlock()
1632 1639 lock = repo.lock()
1633 1640 for p in patches:
1634 1641 pf = os.path.join(d, p)
1635 1642
1636 1643 if pf == '-':
1637 1644 ui.status(_("applying patch from stdin\n"))
1638 1645 pf = sys.stdin
1639 1646 else:
1640 1647 ui.status(_("applying %s\n") % p)
1641 1648 pf = url.open(ui, pf)
1642 1649 data = patch.extract(ui, pf)
1643 1650 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1644 1651
1645 1652 if tmpname is None:
1646 1653 raise util.Abort(_('no diffs found'))
1647 1654
1648 1655 try:
1649 1656 cmdline_message = cmdutil.logmessage(opts)
1650 1657 if cmdline_message:
1651 1658 # pickup the cmdline msg
1652 1659 message = cmdline_message
1653 1660 elif message:
1654 1661 # pickup the patch msg
1655 1662 message = message.strip()
1656 1663 else:
1657 1664 # launch the editor
1658 1665 message = None
1659 1666 ui.debug(_('message:\n%s\n') % message)
1660 1667
1661 1668 wp = repo.parents()
1662 1669 if opts.get('exact'):
1663 1670 if not nodeid or not p1:
1664 1671 raise util.Abort(_('not a mercurial patch'))
1665 1672 p1 = repo.lookup(p1)
1666 1673 p2 = repo.lookup(p2 or hex(nullid))
1667 1674
1668 1675 if p1 != wp[0].node():
1669 1676 hg.clean(repo, p1)
1670 1677 repo.dirstate.setparents(p1, p2)
1671 1678 elif p2:
1672 1679 try:
1673 1680 p1 = repo.lookup(p1)
1674 1681 p2 = repo.lookup(p2)
1675 1682 if p1 == wp[0].node():
1676 1683 repo.dirstate.setparents(p1, p2)
1677 1684 except error.RepoError:
1678 1685 pass
1679 1686 if opts.get('exact') or opts.get('import_branch'):
1680 1687 repo.dirstate.setbranch(branch or 'default')
1681 1688
1682 1689 files = {}
1683 1690 try:
1684 1691 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1685 1692 files=files)
1686 1693 finally:
1687 1694 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1688 1695 if not opts.get('no_commit'):
1689 1696 n = repo.commit(files, message, opts.get('user') or user,
1690 1697 opts.get('date') or date)
1691 1698 if opts.get('exact'):
1692 1699 if hex(n) != nodeid:
1693 1700 repo.rollback()
1694 1701 raise util.Abort(_('patch is damaged'
1695 1702 ' or loses information'))
1696 1703 # Force a dirstate write so that the next transaction
1697 1704 # backups an up-do-date file.
1698 1705 repo.dirstate.write()
1699 1706 finally:
1700 1707 os.unlink(tmpname)
1701 1708 finally:
1702 1709 del lock, wlock
1703 1710
1704 1711 def incoming(ui, repo, source="default", **opts):
1705 1712 """show new changesets found in source
1706 1713
1707 1714 Show new changesets found in the specified path/URL or the default
1708 1715 pull location. These are the changesets that would be pulled if a pull
1709 1716 was requested.
1710 1717
1711 1718 For remote repository, using --bundle avoids downloading the changesets
1712 1719 twice if the incoming is followed by a pull.
1713 1720
1714 1721 See pull for valid source format details.
1715 1722 """
1716 1723 limit = cmdutil.loglimit(opts)
1717 1724 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1718 1725 cmdutil.setremoteconfig(ui, opts)
1719 1726
1720 1727 other = hg.repository(ui, source)
1721 1728 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1722 1729 if revs:
1723 1730 revs = [other.lookup(rev) for rev in revs]
1724 1731 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1725 1732 force=opts["force"])
1726 1733 if not incoming:
1727 1734 try:
1728 1735 os.unlink(opts["bundle"])
1729 1736 except:
1730 1737 pass
1731 1738 ui.status(_("no changes found\n"))
1732 1739 return 1
1733 1740
1734 1741 cleanup = None
1735 1742 try:
1736 1743 fname = opts["bundle"]
1737 1744 if fname or not other.local():
1738 1745 # create a bundle (uncompressed if other repo is not local)
1739 1746
1740 1747 if revs is None and other.capable('changegroupsubset'):
1741 1748 revs = rheads
1742 1749
1743 1750 if revs is None:
1744 1751 cg = other.changegroup(incoming, "incoming")
1745 1752 else:
1746 1753 cg = other.changegroupsubset(incoming, revs, 'incoming')
1747 1754 bundletype = other.local() and "HG10BZ" or "HG10UN"
1748 1755 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1749 1756 # keep written bundle?
1750 1757 if opts["bundle"]:
1751 1758 cleanup = None
1752 1759 if not other.local():
1753 1760 # use the created uncompressed bundlerepo
1754 1761 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1755 1762
1756 1763 o = other.changelog.nodesbetween(incoming, revs)[0]
1757 1764 if opts.get('newest_first'):
1758 1765 o.reverse()
1759 1766 displayer = cmdutil.show_changeset(ui, other, opts)
1760 1767 count = 0
1761 1768 for n in o:
1762 1769 if count >= limit:
1763 1770 break
1764 1771 parents = [p for p in other.changelog.parents(n) if p != nullid]
1765 1772 if opts.get('no_merges') and len(parents) == 2:
1766 1773 continue
1767 1774 count += 1
1768 1775 displayer.show(other[n])
1769 1776 finally:
1770 1777 if hasattr(other, 'close'):
1771 1778 other.close()
1772 1779 if cleanup:
1773 1780 os.unlink(cleanup)
1774 1781
1775 1782 def init(ui, dest=".", **opts):
1776 1783 """create a new repository in the given directory
1777 1784
1778 1785 Initialize a new repository in the given directory. If the given
1779 1786 directory does not exist, it is created.
1780 1787
1781 1788 If no directory is given, the current directory is used.
1782 1789
1783 1790 It is possible to specify an ssh:// URL as the destination.
1784 1791 Look at the help text for the pull command for important details
1785 1792 about ssh:// URLs.
1786 1793 """
1787 1794 cmdutil.setremoteconfig(ui, opts)
1788 1795 hg.repository(ui, dest, create=1)
1789 1796
1790 1797 def locate(ui, repo, *pats, **opts):
1791 1798 """locate files matching specific patterns
1792 1799
1793 1800 Print all files under Mercurial control whose names match the
1794 1801 given patterns.
1795 1802
1796 1803 This command searches the entire repository by default. To search
1797 1804 just the current directory and its subdirectories, use
1798 1805 "--include .".
1799 1806
1800 1807 If no patterns are given to match, this command prints all file
1801 1808 names.
1802 1809
1803 1810 If you want to feed the output of this command into the "xargs"
1804 1811 command, use the "-0" option to both this command and "xargs".
1805 1812 This will avoid the problem of "xargs" treating single filenames
1806 1813 that contain white space as multiple filenames.
1807 1814 """
1808 1815 end = opts.get('print0') and '\0' or '\n'
1809 1816 rev = opts.get('rev') or None
1810 1817
1811 1818 ret = 1
1812 1819 m = cmdutil.match(repo, pats, opts, default='relglob')
1813 1820 m.bad = lambda x,y: False
1814 1821 for abs in repo[rev].walk(m):
1815 1822 if not rev and abs not in repo.dirstate:
1816 1823 continue
1817 1824 if opts.get('fullpath'):
1818 1825 ui.write(repo.wjoin(abs), end)
1819 1826 else:
1820 1827 ui.write(((pats and m.rel(abs)) or abs), end)
1821 1828 ret = 0
1822 1829
1823 1830 return ret
1824 1831
1825 1832 def log(ui, repo, *pats, **opts):
1826 1833 """show revision history of entire repository or files
1827 1834
1828 1835 Print the revision history of the specified files or the entire
1829 1836 project.
1830 1837
1831 1838 File history is shown without following rename or copy history of
1832 1839 files. Use -f/--follow with a file name to follow history across
1833 1840 renames and copies. --follow without a file name will only show
1834 1841 ancestors or descendants of the starting revision. --follow-first
1835 1842 only follows the first parent of merge revisions.
1836 1843
1837 1844 If no revision range is specified, the default is tip:0 unless
1838 1845 --follow is set, in which case the working directory parent is
1839 1846 used as the starting revision.
1840 1847
1841 1848 See 'hg help dates' for a list of formats valid for -d/--date.
1842 1849
1843 1850 By default this command outputs: changeset id and hash, tags,
1844 1851 non-trivial parents, user, date and time, and a summary for each
1845 1852 commit. When the -v/--verbose switch is used, the list of changed
1846 1853 files and full commit message is shown.
1847 1854
1848 1855 NOTE: log -p may generate unexpected diff output for merge
1849 1856 changesets, as it will compare the merge changeset against its
1850 1857 first parent only. Also, the files: list will only reflect files
1851 1858 that are different from BOTH parents.
1852 1859
1853 1860 """
1854 1861
1855 1862 get = util.cachefunc(lambda r: repo[r].changeset())
1856 1863 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1857 1864
1858 1865 limit = cmdutil.loglimit(opts)
1859 1866 count = 0
1860 1867
1861 1868 if opts.get('copies') and opts.get('rev'):
1862 1869 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1863 1870 else:
1864 1871 endrev = len(repo)
1865 1872 rcache = {}
1866 1873 ncache = {}
1867 1874 def getrenamed(fn, rev):
1868 1875 '''looks up all renames for a file (up to endrev) the first
1869 1876 time the file is given. It indexes on the changerev and only
1870 1877 parses the manifest if linkrev != changerev.
1871 1878 Returns rename info for fn at changerev rev.'''
1872 1879 if fn not in rcache:
1873 1880 rcache[fn] = {}
1874 1881 ncache[fn] = {}
1875 1882 fl = repo.file(fn)
1876 1883 for i in fl:
1877 1884 node = fl.node(i)
1878 1885 lr = fl.linkrev(i)
1879 1886 renamed = fl.renamed(node)
1880 1887 rcache[fn][lr] = renamed
1881 1888 if renamed:
1882 1889 ncache[fn][node] = renamed
1883 1890 if lr >= endrev:
1884 1891 break
1885 1892 if rev in rcache[fn]:
1886 1893 return rcache[fn][rev]
1887 1894
1888 1895 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1889 1896 # filectx logic.
1890 1897
1891 1898 try:
1892 1899 return repo[rev][fn].renamed()
1893 1900 except error.LookupError:
1894 1901 pass
1895 1902 return None
1896 1903
1897 1904 df = False
1898 1905 if opts["date"]:
1899 1906 df = util.matchdate(opts["date"])
1900 1907
1901 1908 only_branches = opts.get('only_branch')
1902 1909
1903 1910 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1904 1911 for st, rev, fns in changeiter:
1905 1912 if st == 'add':
1906 1913 parents = [p for p in repo.changelog.parentrevs(rev)
1907 1914 if p != nullrev]
1908 1915 if opts.get('no_merges') and len(parents) == 2:
1909 1916 continue
1910 1917 if opts.get('only_merges') and len(parents) != 2:
1911 1918 continue
1912 1919
1913 1920 if only_branches:
1914 1921 revbranch = get(rev)[5]['branch']
1915 1922 if revbranch not in only_branches:
1916 1923 continue
1917 1924
1918 1925 if df:
1919 1926 changes = get(rev)
1920 1927 if not df(changes[2][0]):
1921 1928 continue
1922 1929
1923 1930 if opts.get('keyword'):
1924 1931 changes = get(rev)
1925 1932 miss = 0
1926 1933 for k in [kw.lower() for kw in opts['keyword']]:
1927 1934 if not (k in changes[1].lower() or
1928 1935 k in changes[4].lower() or
1929 1936 k in " ".join(changes[3]).lower()):
1930 1937 miss = 1
1931 1938 break
1932 1939 if miss:
1933 1940 continue
1934 1941
1935 1942 if opts['user']:
1936 1943 changes = get(rev)
1937 1944 miss = 0
1938 1945 for k in opts['user']:
1939 1946 if k != changes[1]:
1940 1947 miss = 1
1941 1948 break
1942 1949 if miss:
1943 1950 continue
1944 1951
1945 1952 copies = []
1946 1953 if opts.get('copies') and rev:
1947 1954 for fn in get(rev)[3]:
1948 1955 rename = getrenamed(fn, rev)
1949 1956 if rename:
1950 1957 copies.append((fn, rename[0]))
1951 1958 displayer.show(context.changectx(repo, rev), copies=copies)
1952 1959 elif st == 'iter':
1953 1960 if count == limit: break
1954 1961 if displayer.flush(rev):
1955 1962 count += 1
1956 1963
1957 1964 def manifest(ui, repo, node=None, rev=None):
1958 1965 """output the current or given revision of the project manifest
1959 1966
1960 1967 Print a list of version controlled files for the given revision.
1961 1968 If no revision is given, the parent of the working directory is used,
1962 1969 or tip if no revision is checked out.
1963 1970
1964 1971 The manifest is the list of files being version controlled. If no revision
1965 1972 is given then the first parent of the working directory is used.
1966 1973
1967 1974 With -v flag, print file permissions, symlink and executable bits. With
1968 1975 --debug flag, print file revision hashes.
1969 1976 """
1970 1977
1971 1978 if rev and node:
1972 1979 raise util.Abort(_("please specify just one revision"))
1973 1980
1974 1981 if not node:
1975 1982 node = rev
1976 1983
1977 1984 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1978 1985 ctx = repo[node]
1979 1986 for f in ctx:
1980 1987 if ui.debugflag:
1981 1988 ui.write("%40s " % hex(ctx.manifest()[f]))
1982 1989 if ui.verbose:
1983 1990 ui.write(decor[ctx.flags(f)])
1984 1991 ui.write("%s\n" % f)
1985 1992
1986 1993 def merge(ui, repo, node=None, force=None, rev=None):
1987 1994 """merge working directory with another revision
1988 1995
1989 1996 Merge the contents of the current working directory and the
1990 1997 requested revision. Files that changed between either parent are
1991 1998 marked as changed for the next commit and a commit must be
1992 1999 performed before any further updates are allowed.
1993 2000
1994 2001 If no revision is specified, the working directory's parent is a
1995 2002 head revision, and the current branch contains exactly one other head,
1996 2003 the other head is merged with by default. Otherwise, an explicit
1997 2004 revision to merge with must be provided.
1998 2005 """
1999 2006
2000 2007 if rev and node:
2001 2008 raise util.Abort(_("please specify just one revision"))
2002 2009 if not node:
2003 2010 node = rev
2004 2011
2005 2012 if not node:
2006 2013 branch = repo.changectx(None).branch()
2007 2014 bheads = repo.branchheads(branch)
2008 2015 if len(bheads) > 2:
2009 2016 raise util.Abort(_("branch '%s' has %d heads - "
2010 2017 "please merge with an explicit rev") %
2011 2018 (branch, len(bheads)))
2012 2019
2013 2020 parent = repo.dirstate.parents()[0]
2014 2021 if len(bheads) == 1:
2015 2022 if len(repo.heads()) > 1:
2016 2023 raise util.Abort(_("branch '%s' has one head - "
2017 2024 "please merge with an explicit rev") %
2018 2025 branch)
2019 2026 msg = _('there is nothing to merge')
2020 2027 if parent != repo.lookup(repo[None].branch()):
2021 2028 msg = _('%s - use "hg update" instead') % msg
2022 2029 raise util.Abort(msg)
2023 2030
2024 2031 if parent not in bheads:
2025 2032 raise util.Abort(_('working dir not at a head rev - '
2026 2033 'use "hg update" or merge with an explicit rev'))
2027 2034 node = parent == bheads[0] and bheads[-1] or bheads[0]
2028 2035 return hg.merge(repo, node, force=force)
2029 2036
2030 2037 def outgoing(ui, repo, dest=None, **opts):
2031 2038 """show changesets not found in destination
2032 2039
2033 2040 Show changesets not found in the specified destination repository or
2034 2041 the default push location. These are the changesets that would be pushed
2035 2042 if a push was requested.
2036 2043
2037 2044 See pull for valid destination format details.
2038 2045 """
2039 2046 limit = cmdutil.loglimit(opts)
2040 2047 dest, revs, checkout = hg.parseurl(
2041 2048 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2042 2049 cmdutil.setremoteconfig(ui, opts)
2043 2050 if revs:
2044 2051 revs = [repo.lookup(rev) for rev in revs]
2045 2052
2046 2053 other = hg.repository(ui, dest)
2047 2054 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2048 2055 o = repo.findoutgoing(other, force=opts.get('force'))
2049 2056 if not o:
2050 2057 ui.status(_("no changes found\n"))
2051 2058 return 1
2052 2059 o = repo.changelog.nodesbetween(o, revs)[0]
2053 2060 if opts.get('newest_first'):
2054 2061 o.reverse()
2055 2062 displayer = cmdutil.show_changeset(ui, repo, opts)
2056 2063 count = 0
2057 2064 for n in o:
2058 2065 if count >= limit:
2059 2066 break
2060 2067 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2061 2068 if opts.get('no_merges') and len(parents) == 2:
2062 2069 continue
2063 2070 count += 1
2064 2071 displayer.show(repo[n])
2065 2072
2066 2073 def parents(ui, repo, file_=None, **opts):
2067 2074 """show the parents of the working dir or revision
2068 2075
2069 2076 Print the working directory's parent revisions. If a
2070 2077 revision is given via --rev, the parent of that revision
2071 2078 will be printed. If a file argument is given, revision in
2072 2079 which the file was last changed (before the working directory
2073 2080 revision or the argument to --rev if given) is printed.
2074 2081 """
2075 2082 rev = opts.get('rev')
2076 2083 if rev:
2077 2084 ctx = repo[rev]
2078 2085 else:
2079 2086 ctx = repo[None]
2080 2087
2081 2088 if file_:
2082 2089 m = cmdutil.match(repo, (file_,), opts)
2083 2090 if m.anypats() or len(m.files()) != 1:
2084 2091 raise util.Abort(_('can only specify an explicit file name'))
2085 2092 file_ = m.files()[0]
2086 2093 filenodes = []
2087 2094 for cp in ctx.parents():
2088 2095 if not cp:
2089 2096 continue
2090 2097 try:
2091 2098 filenodes.append(cp.filenode(file_))
2092 2099 except error.LookupError:
2093 2100 pass
2094 2101 if not filenodes:
2095 2102 raise util.Abort(_("'%s' not found in manifest!") % file_)
2096 2103 fl = repo.file(file_)
2097 2104 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2098 2105 else:
2099 2106 p = [cp.node() for cp in ctx.parents()]
2100 2107
2101 2108 displayer = cmdutil.show_changeset(ui, repo, opts)
2102 2109 for n in p:
2103 2110 if n != nullid:
2104 2111 displayer.show(repo[n])
2105 2112
2106 2113 def paths(ui, repo, search=None):
2107 2114 """show definition of symbolic path names
2108 2115
2109 2116 Show definition of symbolic path name NAME. If no name is given, show
2110 2117 definition of available names.
2111 2118
2112 2119 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2113 2120 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2114 2121 """
2115 2122 if search:
2116 2123 for name, path in ui.configitems("paths"):
2117 2124 if name == search:
2118 2125 ui.write("%s\n" % url.hidepassword(path))
2119 2126 return
2120 2127 ui.warn(_("not found!\n"))
2121 2128 return 1
2122 2129 else:
2123 2130 for name, path in ui.configitems("paths"):
2124 2131 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2125 2132
2126 2133 def postincoming(ui, repo, modheads, optupdate, checkout):
2127 2134 if modheads == 0:
2128 2135 return
2129 2136 if optupdate:
2130 2137 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2131 2138 return hg.update(repo, checkout)
2132 2139 else:
2133 2140 ui.status(_("not updating, since new heads added\n"))
2134 2141 if modheads > 1:
2135 2142 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2136 2143 else:
2137 2144 ui.status(_("(run 'hg update' to get a working copy)\n"))
2138 2145
2139 2146 def pull(ui, repo, source="default", **opts):
2140 2147 """pull changes from the specified source
2141 2148
2142 2149 Pull changes from a remote repository to a local one.
2143 2150
2144 2151 This finds all changes from the repository at the specified path
2145 2152 or URL and adds them to the local repository. By default, this
2146 2153 does not update the copy of the project in the working directory.
2147 2154
2148 2155 Valid URLs are of the form:
2149 2156
2150 2157 local/filesystem/path (or file://local/filesystem/path)
2151 2158 http://[user[:pass]@]host[:port]/[path]
2152 2159 https://[user[:pass]@]host[:port]/[path]
2153 2160 ssh://[user[:pass]@]host[:port]/[path]
2154 2161
2155 2162 Paths in the local filesystem can either point to Mercurial
2156 2163 repositories or to bundle files (as created by 'hg bundle' or
2157 2164 'hg incoming --bundle').
2158 2165
2159 2166 An optional identifier after # indicates a particular branch, tag,
2160 2167 or changeset to pull.
2161 2168
2162 2169 Some notes about using SSH with Mercurial:
2163 2170 - SSH requires an accessible shell account on the destination machine
2164 2171 and a copy of hg in the remote path or specified with as remotecmd.
2165 2172 - path is relative to the remote user's home directory by default.
2166 2173 Use an extra slash at the start of a path to specify an absolute path:
2167 2174 ssh://example.com//tmp/repository
2168 2175 - Mercurial doesn't use its own compression via SSH; the right thing
2169 2176 to do is to configure it in your ~/.ssh/config, e.g.:
2170 2177 Host *.mylocalnetwork.example.com
2171 2178 Compression no
2172 2179 Host *
2173 2180 Compression yes
2174 2181 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2175 2182 with the --ssh command line option.
2176 2183 """
2177 2184 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2178 2185 cmdutil.setremoteconfig(ui, opts)
2179 2186
2180 2187 other = hg.repository(ui, source)
2181 2188 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2182 2189 if revs:
2183 2190 try:
2184 2191 revs = [other.lookup(rev) for rev in revs]
2185 2192 except error.CapabilityError:
2186 2193 err = _("Other repository doesn't support revision lookup, "
2187 2194 "so a rev cannot be specified.")
2188 2195 raise util.Abort(err)
2189 2196
2190 2197 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2191 2198 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2192 2199
2193 2200 def push(ui, repo, dest=None, **opts):
2194 2201 """push changes to the specified destination
2195 2202
2196 2203 Push changes from the local repository to the given destination.
2197 2204
2198 2205 This is the symmetrical operation for pull. It helps to move
2199 2206 changes from the current repository to a different one. If the
2200 2207 destination is local this is identical to a pull in that directory
2201 2208 from the current one.
2202 2209
2203 2210 By default, push will refuse to run if it detects the result would
2204 2211 increase the number of remote heads. This generally indicates the
2205 2212 the client has forgotten to pull and merge before pushing.
2206 2213
2207 2214 Valid URLs are of the form:
2208 2215
2209 2216 local/filesystem/path (or file://local/filesystem/path)
2210 2217 ssh://[user[:pass]@]host[:port]/[path]
2211 2218 http://[user[:pass]@]host[:port]/[path]
2212 2219 https://[user[:pass]@]host[:port]/[path]
2213 2220
2214 2221 An optional identifier after # indicates a particular branch, tag,
2215 2222 or changeset to push. If -r is used, the named changeset and all its
2216 2223 ancestors will be pushed to the remote repository.
2217 2224
2218 2225 Look at the help text for the pull command for important details
2219 2226 about ssh:// URLs.
2220 2227
2221 2228 Pushing to http:// and https:// URLs is only possible, if this
2222 2229 feature is explicitly enabled on the remote Mercurial server.
2223 2230 """
2224 2231 dest, revs, checkout = hg.parseurl(
2225 2232 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2226 2233 cmdutil.setremoteconfig(ui, opts)
2227 2234
2228 2235 other = hg.repository(ui, dest)
2229 2236 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2230 2237 if revs:
2231 2238 revs = [repo.lookup(rev) for rev in revs]
2232 2239 r = repo.push(other, opts.get('force'), revs=revs)
2233 2240 return r == 0
2234 2241
2235 2242 def rawcommit(ui, repo, *pats, **opts):
2236 2243 """raw commit interface (DEPRECATED)
2237 2244
2238 2245 (DEPRECATED)
2239 2246 Lowlevel commit, for use in helper scripts.
2240 2247
2241 2248 This command is not intended to be used by normal users, as it is
2242 2249 primarily useful for importing from other SCMs.
2243 2250
2244 2251 This command is now deprecated and will be removed in a future
2245 2252 release, please use debugsetparents and commit instead.
2246 2253 """
2247 2254
2248 2255 ui.warn(_("(the rawcommit command is deprecated)\n"))
2249 2256
2250 2257 message = cmdutil.logmessage(opts)
2251 2258
2252 2259 files = cmdutil.match(repo, pats, opts).files()
2253 2260 if opts.get('files'):
2254 2261 files += open(opts['files']).read().splitlines()
2255 2262
2256 2263 parents = [repo.lookup(p) for p in opts['parent']]
2257 2264
2258 2265 try:
2259 2266 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2260 2267 except ValueError, inst:
2261 2268 raise util.Abort(str(inst))
2262 2269
2263 2270 def recover(ui, repo):
2264 2271 """roll back an interrupted transaction
2265 2272
2266 2273 Recover from an interrupted commit or pull.
2267 2274
2268 2275 This command tries to fix the repository status after an interrupted
2269 2276 operation. It should only be necessary when Mercurial suggests it.
2270 2277 """
2271 2278 if repo.recover():
2272 2279 return hg.verify(repo)
2273 2280 return 1
2274 2281
2275 2282 def remove(ui, repo, *pats, **opts):
2276 2283 """remove the specified files on the next commit
2277 2284
2278 2285 Schedule the indicated files for removal from the repository.
2279 2286
2280 2287 This only removes files from the current branch, not from the entire
2281 2288 project history. -A can be used to remove only files that have already
2282 2289 been deleted, -f can be used to force deletion, and -Af can be used
2283 2290 to remove files from the next revision without deleting them.
2284 2291
2285 2292 The following table details the behavior of remove for different file
2286 2293 states (columns) and option combinations (rows). The file states are
2287 2294 Added, Clean, Modified and Missing (as reported by hg status). The
2288 2295 actions are Warn, Remove (from branch) and Delete (from disk).
2289 2296
2290 2297 A C M !
2291 2298 none W RD W R
2292 2299 -f R RD RD R
2293 2300 -A W W W R
2294 2301 -Af R R R R
2295 2302
2296 2303 This command schedules the files to be removed at the next commit.
2297 2304 To undo a remove before that, see hg revert.
2298 2305 """
2299 2306
2300 2307 after, force = opts.get('after'), opts.get('force')
2301 2308 if not pats and not after:
2302 2309 raise util.Abort(_('no files specified'))
2303 2310
2304 2311 m = cmdutil.match(repo, pats, opts)
2305 2312 s = repo.status(match=m, clean=True)
2306 2313 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2307 2314
2308 2315 def warn(files, reason):
2309 2316 for f in files:
2310 2317 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2311 2318 % (m.rel(f), reason))
2312 2319
2313 2320 if force:
2314 2321 remove, forget = modified + deleted + clean, added
2315 2322 elif after:
2316 2323 remove, forget = deleted, []
2317 2324 warn(modified + added + clean, _('still exists'))
2318 2325 else:
2319 2326 remove, forget = deleted + clean, []
2320 2327 warn(modified, _('is modified'))
2321 2328 warn(added, _('has been marked for add'))
2322 2329
2323 2330 for f in util.sort(remove + forget):
2324 2331 if ui.verbose or not m.exact(f):
2325 2332 ui.status(_('removing %s\n') % m.rel(f))
2326 2333
2327 2334 repo.forget(forget)
2328 2335 repo.remove(remove, unlink=not after)
2329 2336
2330 2337 def rename(ui, repo, *pats, **opts):
2331 2338 """rename files; equivalent of copy + remove
2332 2339
2333 2340 Mark dest as copies of sources; mark sources for deletion. If
2334 2341 dest is a directory, copies are put in that directory. If dest is
2335 2342 a file, there can only be one source.
2336 2343
2337 2344 By default, this command copies the contents of files as they
2338 2345 stand in the working directory. If invoked with --after, the
2339 2346 operation is recorded, but no copying is performed.
2340 2347
2341 2348 This command takes effect in the next commit. To undo a rename
2342 2349 before that, see hg revert.
2343 2350 """
2344 2351 wlock = repo.wlock(False)
2345 2352 try:
2346 2353 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2347 2354 finally:
2348 2355 del wlock
2349 2356
2350 2357 def resolve(ui, repo, *pats, **opts):
2351 2358 """retry file merges from a merge or update
2352 2359
2353 2360 This command will cleanly retry unresolved file merges using file
2354 2361 revisions preserved from the last update or merge. To attempt to
2355 2362 resolve all unresolved files, use the -a switch.
2356 2363
2357 2364 This command will also allow listing resolved files and manually
2358 2365 marking and unmarking files as resolved.
2359 2366
2360 2367 The codes used to show the status of files are:
2361 2368 U = unresolved
2362 2369 R = resolved
2363 2370 """
2364 2371
2365 2372 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2366 2373
2367 2374 if (show and (mark or unmark)) or (mark and unmark):
2368 2375 raise util.Abort(_("too many options specified"))
2369 2376 if pats and all:
2370 2377 raise util.Abort(_("can't specify --all and patterns"))
2371 2378 if not (all or pats or show or mark or unmark):
2372 2379 raise util.Abort(_('no files or directories specified; '
2373 2380 'use --all to remerge all files'))
2374 2381
2375 2382 ms = merge_.mergestate(repo)
2376 2383 m = cmdutil.match(repo, pats, opts)
2377 2384
2378 2385 for f in ms:
2379 2386 if m(f):
2380 2387 if show:
2381 2388 ui.write("%s %s\n" % (ms[f].upper(), f))
2382 2389 elif mark:
2383 2390 ms.mark(f, "r")
2384 2391 elif unmark:
2385 2392 ms.mark(f, "u")
2386 2393 else:
2387 2394 wctx = repo[None]
2388 2395 mctx = wctx.parents()[-1]
2389 2396 ms.resolve(f, wctx, mctx)
2390 2397
2391 2398 def revert(ui, repo, *pats, **opts):
2392 2399 """restore individual files or dirs to an earlier state
2393 2400
2394 2401 (use update -r to check out earlier revisions, revert does not
2395 2402 change the working dir parents)
2396 2403
2397 2404 With no revision specified, revert the named files or directories
2398 2405 to the contents they had in the parent of the working directory.
2399 2406 This restores the contents of the affected files to an unmodified
2400 2407 state and unschedules adds, removes, copies, and renames. If the
2401 2408 working directory has two parents, you must explicitly specify the
2402 2409 revision to revert to.
2403 2410
2404 2411 Using the -r option, revert the given files or directories to their
2405 2412 contents as of a specific revision. This can be helpful to "roll
2406 2413 back" some or all of an earlier change.
2407 2414 See 'hg help dates' for a list of formats valid for -d/--date.
2408 2415
2409 2416 Revert modifies the working directory. It does not commit any
2410 2417 changes, or change the parent of the working directory. If you
2411 2418 revert to a revision other than the parent of the working
2412 2419 directory, the reverted files will thus appear modified
2413 2420 afterwards.
2414 2421
2415 2422 If a file has been deleted, it is restored. If the executable
2416 2423 mode of a file was changed, it is reset.
2417 2424
2418 2425 If names are given, all files matching the names are reverted.
2419 2426 If no arguments are given, no files are reverted.
2420 2427
2421 2428 Modified files are saved with a .orig suffix before reverting.
2422 2429 To disable these backups, use --no-backup.
2423 2430 """
2424 2431
2425 2432 if opts["date"]:
2426 2433 if opts["rev"]:
2427 2434 raise util.Abort(_("you can't specify a revision and a date"))
2428 2435 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2429 2436
2430 2437 if not pats and not opts.get('all'):
2431 2438 raise util.Abort(_('no files or directories specified; '
2432 2439 'use --all to revert the whole repo'))
2433 2440
2434 2441 parent, p2 = repo.dirstate.parents()
2435 2442 if not opts.get('rev') and p2 != nullid:
2436 2443 raise util.Abort(_('uncommitted merge - please provide a '
2437 2444 'specific revision'))
2438 2445 ctx = repo[opts.get('rev')]
2439 2446 node = ctx.node()
2440 2447 mf = ctx.manifest()
2441 2448 if node == parent:
2442 2449 pmf = mf
2443 2450 else:
2444 2451 pmf = None
2445 2452
2446 2453 # need all matching names in dirstate and manifest of target rev,
2447 2454 # so have to walk both. do not print errors if files exist in one
2448 2455 # but not other.
2449 2456
2450 2457 names = {}
2451 2458
2452 2459 wlock = repo.wlock()
2453 2460 try:
2454 2461 # walk dirstate.
2455 2462 files = []
2456 2463
2457 2464 m = cmdutil.match(repo, pats, opts)
2458 2465 m.bad = lambda x,y: False
2459 2466 for abs in repo.walk(m):
2460 2467 names[abs] = m.rel(abs), m.exact(abs)
2461 2468
2462 2469 # walk target manifest.
2463 2470
2464 2471 def badfn(path, msg):
2465 2472 if path in names:
2466 2473 return False
2467 2474 path_ = path + '/'
2468 2475 for f in names:
2469 2476 if f.startswith(path_):
2470 2477 return False
2471 2478 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2472 2479 return False
2473 2480
2474 2481 m = cmdutil.match(repo, pats, opts)
2475 2482 m.bad = badfn
2476 2483 for abs in repo[node].walk(m):
2477 2484 if abs not in names:
2478 2485 names[abs] = m.rel(abs), m.exact(abs)
2479 2486
2480 2487 m = cmdutil.matchfiles(repo, names)
2481 2488 changes = repo.status(match=m)[:4]
2482 2489 modified, added, removed, deleted = map(dict.fromkeys, changes)
2483 2490
2484 2491 # if f is a rename, also revert the source
2485 2492 cwd = repo.getcwd()
2486 2493 for f in added:
2487 2494 src = repo.dirstate.copied(f)
2488 2495 if src and src not in names and repo.dirstate[src] == 'r':
2489 2496 removed[src] = None
2490 2497 names[src] = (repo.pathto(src, cwd), True)
2491 2498
2492 2499 def removeforget(abs):
2493 2500 if repo.dirstate[abs] == 'a':
2494 2501 return _('forgetting %s\n')
2495 2502 return _('removing %s\n')
2496 2503
2497 2504 revert = ([], _('reverting %s\n'))
2498 2505 add = ([], _('adding %s\n'))
2499 2506 remove = ([], removeforget)
2500 2507 undelete = ([], _('undeleting %s\n'))
2501 2508
2502 2509 disptable = (
2503 2510 # dispatch table:
2504 2511 # file state
2505 2512 # action if in target manifest
2506 2513 # action if not in target manifest
2507 2514 # make backup if in target manifest
2508 2515 # make backup if not in target manifest
2509 2516 (modified, revert, remove, True, True),
2510 2517 (added, revert, remove, True, False),
2511 2518 (removed, undelete, None, False, False),
2512 2519 (deleted, revert, remove, False, False),
2513 2520 )
2514 2521
2515 2522 for abs, (rel, exact) in util.sort(names.items()):
2516 2523 mfentry = mf.get(abs)
2517 2524 target = repo.wjoin(abs)
2518 2525 def handle(xlist, dobackup):
2519 2526 xlist[0].append(abs)
2520 2527 if dobackup and not opts.get('no_backup') and util.lexists(target):
2521 2528 bakname = "%s.orig" % rel
2522 2529 ui.note(_('saving current version of %s as %s\n') %
2523 2530 (rel, bakname))
2524 2531 if not opts.get('dry_run'):
2525 2532 util.copyfile(target, bakname)
2526 2533 if ui.verbose or not exact:
2527 2534 msg = xlist[1]
2528 2535 if not isinstance(msg, basestring):
2529 2536 msg = msg(abs)
2530 2537 ui.status(msg % rel)
2531 2538 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2532 2539 if abs not in table: continue
2533 2540 # file has changed in dirstate
2534 2541 if mfentry:
2535 2542 handle(hitlist, backuphit)
2536 2543 elif misslist is not None:
2537 2544 handle(misslist, backupmiss)
2538 2545 break
2539 2546 else:
2540 2547 if abs not in repo.dirstate:
2541 2548 if mfentry:
2542 2549 handle(add, True)
2543 2550 elif exact:
2544 2551 ui.warn(_('file not managed: %s\n') % rel)
2545 2552 continue
2546 2553 # file has not changed in dirstate
2547 2554 if node == parent:
2548 2555 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2549 2556 continue
2550 2557 if pmf is None:
2551 2558 # only need parent manifest in this unlikely case,
2552 2559 # so do not read by default
2553 2560 pmf = repo[parent].manifest()
2554 2561 if abs in pmf:
2555 2562 if mfentry:
2556 2563 # if version of file is same in parent and target
2557 2564 # manifests, do nothing
2558 2565 if (pmf[abs] != mfentry or
2559 2566 pmf.flags(abs) != mf.flags(abs)):
2560 2567 handle(revert, False)
2561 2568 else:
2562 2569 handle(remove, False)
2563 2570
2564 2571 if not opts.get('dry_run'):
2565 2572 def checkout(f):
2566 2573 fc = ctx[f]
2567 2574 repo.wwrite(f, fc.data(), fc.flags())
2568 2575
2569 2576 audit_path = util.path_auditor(repo.root)
2570 2577 for f in remove[0]:
2571 2578 if repo.dirstate[f] == 'a':
2572 2579 repo.dirstate.forget(f)
2573 2580 continue
2574 2581 audit_path(f)
2575 2582 try:
2576 2583 util.unlink(repo.wjoin(f))
2577 2584 except OSError:
2578 2585 pass
2579 2586 repo.dirstate.remove(f)
2580 2587
2581 2588 normal = None
2582 2589 if node == parent:
2583 2590 # We're reverting to our parent. If possible, we'd like status
2584 2591 # to report the file as clean. We have to use normallookup for
2585 2592 # merges to avoid losing information about merged/dirty files.
2586 2593 if p2 != nullid:
2587 2594 normal = repo.dirstate.normallookup
2588 2595 else:
2589 2596 normal = repo.dirstate.normal
2590 2597 for f in revert[0]:
2591 2598 checkout(f)
2592 2599 if normal:
2593 2600 normal(f)
2594 2601
2595 2602 for f in add[0]:
2596 2603 checkout(f)
2597 2604 repo.dirstate.add(f)
2598 2605
2599 2606 normal = repo.dirstate.normallookup
2600 2607 if node == parent and p2 == nullid:
2601 2608 normal = repo.dirstate.normal
2602 2609 for f in undelete[0]:
2603 2610 checkout(f)
2604 2611 normal(f)
2605 2612
2606 2613 finally:
2607 2614 del wlock
2608 2615
2609 2616 def rollback(ui, repo):
2610 2617 """roll back the last transaction
2611 2618
2612 2619 This command should be used with care. There is only one level of
2613 2620 rollback, and there is no way to undo a rollback. It will also
2614 2621 restore the dirstate at the time of the last transaction, losing
2615 2622 any dirstate changes since that time.
2616 2623
2617 2624 Transactions are used to encapsulate the effects of all commands
2618 2625 that create new changesets or propagate existing changesets into a
2619 2626 repository. For example, the following commands are transactional,
2620 2627 and their effects can be rolled back:
2621 2628
2622 2629 commit
2623 2630 import
2624 2631 pull
2625 2632 push (with this repository as destination)
2626 2633 unbundle
2627 2634
2628 2635 This command is not intended for use on public repositories. Once
2629 2636 changes are visible for pull by other users, rolling a transaction
2630 2637 back locally is ineffective (someone else may already have pulled
2631 2638 the changes). Furthermore, a race is possible with readers of the
2632 2639 repository; for example an in-progress pull from the repository
2633 2640 may fail if a rollback is performed.
2634 2641 """
2635 2642 repo.rollback()
2636 2643
2637 2644 def root(ui, repo):
2638 2645 """print the root (top) of the current working dir
2639 2646
2640 2647 Print the root directory of the current repository.
2641 2648 """
2642 2649 ui.write(repo.root + "\n")
2643 2650
2644 2651 def serve(ui, repo, **opts):
2645 2652 """export the repository via HTTP
2646 2653
2647 2654 Start a local HTTP repository browser and pull server.
2648 2655
2649 2656 By default, the server logs accesses to stdout and errors to
2650 2657 stderr. Use the "-A" and "-E" options to log to files.
2651 2658 """
2652 2659
2653 2660 if opts["stdio"]:
2654 2661 if repo is None:
2655 2662 raise error.RepoError(_("There is no Mercurial repository here"
2656 2663 " (.hg not found)"))
2657 2664 s = sshserver.sshserver(ui, repo)
2658 2665 s.serve_forever()
2659 2666
2660 2667 parentui = ui.parentui or ui
2661 2668 optlist = ("name templates style address port prefix ipv6"
2662 2669 " accesslog errorlog webdir_conf certificate")
2663 2670 for o in optlist.split():
2664 2671 if opts[o]:
2665 2672 parentui.setconfig("web", o, str(opts[o]))
2666 2673 if (repo is not None) and (repo.ui != parentui):
2667 2674 repo.ui.setconfig("web", o, str(opts[o]))
2668 2675
2669 2676 if repo is None and not ui.config("web", "webdir_conf"):
2670 2677 raise error.RepoError(_("There is no Mercurial repository here"
2671 2678 " (.hg not found)"))
2672 2679
2673 2680 class service:
2674 2681 def init(self):
2675 2682 util.set_signal_handler()
2676 2683 self.httpd = hgweb.server.create_server(parentui, repo)
2677 2684
2678 2685 if not ui.verbose: return
2679 2686
2680 2687 if self.httpd.prefix:
2681 2688 prefix = self.httpd.prefix.strip('/') + '/'
2682 2689 else:
2683 2690 prefix = ''
2684 2691
2685 2692 port = ':%d' % self.httpd.port
2686 2693 if port == ':80':
2687 2694 port = ''
2688 2695
2689 2696 bindaddr = self.httpd.addr
2690 2697 if bindaddr == '0.0.0.0':
2691 2698 bindaddr = '*'
2692 2699 elif ':' in bindaddr: # IPv6
2693 2700 bindaddr = '[%s]' % bindaddr
2694 2701
2695 2702 fqaddr = self.httpd.fqaddr
2696 2703 if ':' in fqaddr:
2697 2704 fqaddr = '[%s]' % fqaddr
2698 2705 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2699 2706 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2700 2707
2701 2708 def run(self):
2702 2709 self.httpd.serve_forever()
2703 2710
2704 2711 service = service()
2705 2712
2706 2713 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2707 2714
2708 2715 def status(ui, repo, *pats, **opts):
2709 2716 """show changed files in the working directory
2710 2717
2711 2718 Show status of files in the repository. If names are given, only
2712 2719 files that match are shown. Files that are clean or ignored or
2713 2720 source of a copy/move operation, are not listed unless -c (clean),
2714 2721 -i (ignored), -C (copies) or -A is given. Unless options described
2715 2722 with "show only ..." are given, the options -mardu are used.
2716 2723
2717 2724 Option -q/--quiet hides untracked (unknown and ignored) files
2718 2725 unless explicitly requested with -u/--unknown or -i/-ignored.
2719 2726
2720 2727 NOTE: status may appear to disagree with diff if permissions have
2721 2728 changed or a merge has occurred. The standard diff format does not
2722 2729 report permission changes and diff only reports changes relative
2723 2730 to one merge parent.
2724 2731
2725 2732 If one revision is given, it is used as the base revision.
2726 2733 If two revisions are given, the difference between them is shown.
2727 2734
2728 2735 The codes used to show the status of files are:
2729 2736 M = modified
2730 2737 A = added
2731 2738 R = removed
2732 2739 C = clean
2733 2740 ! = deleted, but still tracked
2734 2741 ? = not tracked
2735 2742 I = ignored
2736 2743 = the previous added file was copied from here
2737 2744 """
2738 2745
2739 2746 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2740 2747 cwd = (pats and repo.getcwd()) or ''
2741 2748 end = opts.get('print0') and '\0' or '\n'
2742 2749 copy = {}
2743 2750 states = 'modified added removed deleted unknown ignored clean'.split()
2744 2751 show = [k for k in states if opts[k]]
2745 2752 if opts.get('all'):
2746 2753 show += ui.quiet and (states[:4] + ['clean']) or states
2747 2754 if not show:
2748 2755 show = ui.quiet and states[:4] or states[:5]
2749 2756
2750 2757 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2751 2758 'ignored' in show, 'clean' in show, 'unknown' in show)
2752 2759 changestates = zip(states, 'MAR!?IC', stat)
2753 2760
2754 2761 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2755 2762 ctxn = repo[nullid]
2756 2763 ctx1 = repo[node1]
2757 2764 ctx2 = repo[node2]
2758 2765 added = stat[1]
2759 2766 if node2 is None:
2760 2767 added = stat[0] + stat[1] # merged?
2761 2768
2762 2769 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2763 2770 if k in added:
2764 2771 copy[k] = v
2765 2772 elif v in added:
2766 2773 copy[v] = k
2767 2774
2768 2775 for state, char, files in changestates:
2769 2776 if state in show:
2770 2777 format = "%s %%s%s" % (char, end)
2771 2778 if opts.get('no_status'):
2772 2779 format = "%%s%s" % end
2773 2780
2774 2781 for f in files:
2775 2782 ui.write(format % repo.pathto(f, cwd))
2776 2783 if f in copy:
2777 2784 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2778 2785
2779 2786 def tag(ui, repo, name1, *names, **opts):
2780 2787 """add one or more tags for the current or given revision
2781 2788
2782 2789 Name a particular revision using <name>.
2783 2790
2784 2791 Tags are used to name particular revisions of the repository and are
2785 2792 very useful to compare different revisions, to go back to significant
2786 2793 earlier versions or to mark branch points as releases, etc.
2787 2794
2788 2795 If no revision is given, the parent of the working directory is used,
2789 2796 or tip if no revision is checked out.
2790 2797
2791 2798 To facilitate version control, distribution, and merging of tags,
2792 2799 they are stored as a file named ".hgtags" which is managed
2793 2800 similarly to other project files and can be hand-edited if
2794 2801 necessary. The file '.hg/localtags' is used for local tags (not
2795 2802 shared among repositories).
2796 2803
2797 2804 See 'hg help dates' for a list of formats valid for -d/--date.
2798 2805 """
2799 2806
2800 2807 rev_ = "."
2801 2808 names = (name1,) + names
2802 2809 if len(names) != len(dict.fromkeys(names)):
2803 2810 raise util.Abort(_('tag names must be unique'))
2804 2811 for n in names:
2805 2812 if n in ['tip', '.', 'null']:
2806 2813 raise util.Abort(_('the name \'%s\' is reserved') % n)
2807 2814 if opts.get('rev') and opts.get('remove'):
2808 2815 raise util.Abort(_("--rev and --remove are incompatible"))
2809 2816 if opts.get('rev'):
2810 2817 rev_ = opts['rev']
2811 2818 message = opts.get('message')
2812 2819 if opts.get('remove'):
2813 2820 expectedtype = opts.get('local') and 'local' or 'global'
2814 2821 for n in names:
2815 2822 if not repo.tagtype(n):
2816 2823 raise util.Abort(_('tag \'%s\' does not exist') % n)
2817 2824 if repo.tagtype(n) != expectedtype:
2818 2825 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2819 2826 (n, expectedtype))
2820 2827 rev_ = nullid
2821 2828 if not message:
2822 2829 message = _('Removed tag %s') % ', '.join(names)
2823 2830 elif not opts.get('force'):
2824 2831 for n in names:
2825 2832 if n in repo.tags():
2826 2833 raise util.Abort(_('tag \'%s\' already exists '
2827 2834 '(use -f to force)') % n)
2828 2835 if not rev_ and repo.dirstate.parents()[1] != nullid:
2829 2836 raise util.Abort(_('uncommitted merge - please provide a '
2830 2837 'specific revision'))
2831 2838 r = repo[rev_].node()
2832 2839
2833 2840 if not message:
2834 2841 message = (_('Added tag %s for changeset %s') %
2835 2842 (', '.join(names), short(r)))
2836 2843
2837 2844 date = opts.get('date')
2838 2845 if date:
2839 2846 date = util.parsedate(date)
2840 2847
2841 2848 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2842 2849
2843 2850 def tags(ui, repo):
2844 2851 """list repository tags
2845 2852
2846 2853 This lists both regular and local tags. When the -v/--verbose switch
2847 2854 is used, a third column "local" is printed for local tags.
2848 2855 """
2849 2856
2850 2857 l = repo.tagslist()
2851 2858 l.reverse()
2852 2859 hexfunc = ui.debugflag and hex or short
2853 2860 tagtype = ""
2854 2861
2855 2862 for t, n in l:
2856 2863 if ui.quiet:
2857 2864 ui.write("%s\n" % t)
2858 2865 continue
2859 2866
2860 2867 try:
2861 2868 hn = hexfunc(n)
2862 2869 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2863 2870 except error.LookupError:
2864 2871 r = " ?:%s" % hn
2865 2872 else:
2866 2873 spaces = " " * (30 - util.locallen(t))
2867 2874 if ui.verbose:
2868 2875 if repo.tagtype(t) == 'local':
2869 2876 tagtype = " local"
2870 2877 else:
2871 2878 tagtype = ""
2872 2879 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2873 2880
2874 2881 def tip(ui, repo, **opts):
2875 2882 """show the tip revision
2876 2883
2877 2884 The tip revision (usually just called the tip) is the most
2878 2885 recently added changeset in the repository, the most recently
2879 2886 changed head.
2880 2887
2881 2888 If you have just made a commit, that commit will be the tip. If
2882 2889 you have just pulled changes from another repository, the tip of
2883 2890 that repository becomes the current tip. The "tip" tag is special
2884 2891 and cannot be renamed or assigned to a different changeset.
2885 2892 """
2886 2893 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2887 2894
2888 2895 def unbundle(ui, repo, fname1, *fnames, **opts):
2889 2896 """apply one or more changegroup files
2890 2897
2891 2898 Apply one or more compressed changegroup files generated by the
2892 2899 bundle command.
2893 2900 """
2894 2901 fnames = (fname1,) + fnames
2895 2902
2896 2903 lock = None
2897 2904 try:
2898 2905 lock = repo.lock()
2899 2906 for fname in fnames:
2900 2907 f = url.open(ui, fname)
2901 2908 gen = changegroup.readbundle(f, fname)
2902 2909 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2903 2910 finally:
2904 2911 del lock
2905 2912
2906 2913 return postincoming(ui, repo, modheads, opts.get('update'), None)
2907 2914
2908 2915 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2909 2916 """update working directory
2910 2917
2911 2918 Update the repository's working directory to the specified revision,
2912 2919 or the tip of the current branch if none is specified. Use null as
2913 2920 the revision to remove the working copy (like 'hg clone -U').
2914 2921
2915 2922 When the working dir contains no uncommitted changes, it will be
2916 2923 replaced by the state of the requested revision from the repo. When
2917 2924 the requested revision is on a different branch, the working dir
2918 2925 will additionally be switched to that branch.
2919 2926
2920 2927 When there are uncommitted changes, use option -C to discard them,
2921 2928 forcibly replacing the state of the working dir with the requested
2922 2929 revision.
2923 2930
2924 2931 When there are uncommitted changes and option -C is not used, and
2925 2932 the parent revision and requested revision are on the same branch,
2926 2933 and one of them is an ancestor of the other, then the new working
2927 2934 directory will contain the requested revision merged with the
2928 2935 uncommitted changes. Otherwise, the update will fail with a
2929 2936 suggestion to use 'merge' or 'update -C' instead.
2930 2937
2931 2938 If you want to update just one file to an older revision, use revert.
2932 2939
2933 2940 See 'hg help dates' for a list of formats valid for --date.
2934 2941 """
2935 2942 if rev and node:
2936 2943 raise util.Abort(_("please specify just one revision"))
2937 2944
2938 2945 if not rev:
2939 2946 rev = node
2940 2947
2941 2948 if date:
2942 2949 if rev:
2943 2950 raise util.Abort(_("you can't specify a revision and a date"))
2944 2951 rev = cmdutil.finddate(ui, repo, date)
2945 2952
2946 2953 if clean:
2947 2954 return hg.clean(repo, rev)
2948 2955 else:
2949 2956 return hg.update(repo, rev)
2950 2957
2951 2958 def verify(ui, repo):
2952 2959 """verify the integrity of the repository
2953 2960
2954 2961 Verify the integrity of the current repository.
2955 2962
2956 2963 This will perform an extensive check of the repository's
2957 2964 integrity, validating the hashes and checksums of each entry in
2958 2965 the changelog, manifest, and tracked files, as well as the
2959 2966 integrity of their crosslinks and indices.
2960 2967 """
2961 2968 return hg.verify(repo)
2962 2969
2963 2970 def version_(ui):
2964 2971 """output version and copyright information"""
2965 2972 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2966 2973 % util.version())
2967 2974 ui.status(_(
2968 2975 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2969 2976 "This is free software; see the source for copying conditions. "
2970 2977 "There is NO\nwarranty; "
2971 2978 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2972 2979 ))
2973 2980
2974 2981 # Command options and aliases are listed here, alphabetically
2975 2982
2976 2983 globalopts = [
2977 2984 ('R', 'repository', '',
2978 2985 _('repository root directory or symbolic path name')),
2979 2986 ('', 'cwd', '', _('change working directory')),
2980 2987 ('y', 'noninteractive', None,
2981 2988 _('do not prompt, assume \'yes\' for any required answers')),
2982 2989 ('q', 'quiet', None, _('suppress output')),
2983 2990 ('v', 'verbose', None, _('enable additional output')),
2984 2991 ('', 'config', [], _('set/override config option')),
2985 2992 ('', 'debug', None, _('enable debugging output')),
2986 2993 ('', 'debugger', None, _('start debugger')),
2987 2994 ('', 'encoding', util._encoding, _('set the charset encoding')),
2988 2995 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2989 2996 ('', 'lsprof', None, _('print improved command execution profile')),
2990 2997 ('', 'traceback', None, _('print traceback on exception')),
2991 2998 ('', 'time', None, _('time how long the command takes')),
2992 2999 ('', 'profile', None, _('print command execution profile')),
2993 3000 ('', 'version', None, _('output version information and exit')),
2994 3001 ('h', 'help', None, _('display help and exit')),
2995 3002 ]
2996 3003
2997 3004 dryrunopts = [('n', 'dry-run', None,
2998 3005 _('do not perform actions, just print output'))]
2999 3006
3000 3007 remoteopts = [
3001 3008 ('e', 'ssh', '', _('specify ssh command to use')),
3002 3009 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3003 3010 ]
3004 3011
3005 3012 walkopts = [
3006 3013 ('I', 'include', [], _('include names matching the given patterns')),
3007 3014 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3008 3015 ]
3009 3016
3010 3017 commitopts = [
3011 3018 ('m', 'message', '', _('use <text> as commit message')),
3012 3019 ('l', 'logfile', '', _('read commit message from <file>')),
3013 3020 ]
3014 3021
3015 3022 commitopts2 = [
3016 3023 ('d', 'date', '', _('record datecode as commit date')),
3017 3024 ('u', 'user', '', _('record user as committer')),
3018 3025 ]
3019 3026
3020 3027 templateopts = [
3021 3028 ('', 'style', '', _('display using template map file')),
3022 3029 ('', 'template', '', _('display with template')),
3023 3030 ]
3024 3031
3025 3032 logopts = [
3026 3033 ('p', 'patch', None, _('show patch')),
3027 3034 ('l', 'limit', '', _('limit number of changes displayed')),
3028 3035 ('M', 'no-merges', None, _('do not show merges')),
3029 3036 ] + templateopts
3030 3037
3031 3038 diffopts = [
3032 3039 ('a', 'text', None, _('treat all files as text')),
3033 3040 ('g', 'git', None, _('use git extended diff format')),
3034 3041 ('', 'nodates', None, _("don't include dates in diff headers"))
3035 3042 ]
3036 3043
3037 3044 diffopts2 = [
3038 3045 ('p', 'show-function', None, _('show which function each change is in')),
3039 3046 ('w', 'ignore-all-space', None,
3040 3047 _('ignore white space when comparing lines')),
3041 3048 ('b', 'ignore-space-change', None,
3042 3049 _('ignore changes in the amount of white space')),
3043 3050 ('B', 'ignore-blank-lines', None,
3044 3051 _('ignore changes whose lines are all blank')),
3045 3052 ('U', 'unified', '', _('number of lines of context to show'))
3046 3053 ]
3047 3054
3048 3055 similarityopts = [
3049 3056 ('s', 'similarity', '',
3050 3057 _('guess renamed files by similarity (0<=s<=100)'))
3051 3058 ]
3052 3059
3053 3060 table = {
3054 3061 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3055 3062 "addremove":
3056 3063 (addremove, similarityopts + walkopts + dryrunopts,
3057 3064 _('[OPTION]... [FILE]...')),
3058 3065 "^annotate|blame":
3059 3066 (annotate,
3060 3067 [('r', 'rev', '', _('annotate the specified revision')),
3061 3068 ('f', 'follow', None, _('follow file copies and renames')),
3062 3069 ('a', 'text', None, _('treat all files as text')),
3063 3070 ('u', 'user', None, _('list the author (long with -v)')),
3064 3071 ('d', 'date', None, _('list the date (short with -q)')),
3065 3072 ('n', 'number', None, _('list the revision number (default)')),
3066 3073 ('c', 'changeset', None, _('list the changeset')),
3067 3074 ('l', 'line-number', None,
3068 3075 _('show line number at the first appearance'))
3069 3076 ] + walkopts,
3070 3077 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3071 3078 "archive":
3072 3079 (archive,
3073 3080 [('', 'no-decode', None, _('do not pass files through decoders')),
3074 3081 ('p', 'prefix', '', _('directory prefix for files in archive')),
3075 3082 ('r', 'rev', '', _('revision to distribute')),
3076 3083 ('t', 'type', '', _('type of distribution to create')),
3077 3084 ] + walkopts,
3078 3085 _('[OPTION]... DEST')),
3079 3086 "backout":
3080 3087 (backout,
3081 3088 [('', 'merge', None,
3082 3089 _('merge with old dirstate parent after backout')),
3083 3090 ('', 'parent', '', _('parent to choose when backing out merge')),
3084 3091 ('r', 'rev', '', _('revision to backout')),
3085 3092 ] + walkopts + commitopts + commitopts2,
3086 3093 _('[OPTION]... [-r] REV')),
3087 3094 "bisect":
3088 3095 (bisect,
3089 3096 [('r', 'reset', False, _('reset bisect state')),
3090 3097 ('g', 'good', False, _('mark changeset good')),
3091 3098 ('b', 'bad', False, _('mark changeset bad')),
3092 3099 ('s', 'skip', False, _('skip testing changeset')),
3093 3100 ('c', 'command', '', _('use command to check changeset state')),
3094 3101 ('U', 'noupdate', False, _('do not update to target'))],
3095 3102 _("[-gbsr] [-c CMD] [REV]")),
3096 3103 "branch":
3097 3104 (branch,
3098 3105 [('f', 'force', None,
3099 3106 _('set branch name even if it shadows an existing branch')),
3100 3107 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3101 3108 _('[-fC] [NAME]')),
3102 3109 "branches":
3103 3110 (branches,
3104 3111 [('a', 'active', False,
3105 3112 _('show only branches that have unmerged heads'))],
3106 3113 _('[-a]')),
3107 3114 "bundle":
3108 3115 (bundle,
3109 3116 [('f', 'force', None,
3110 3117 _('run even when remote repository is unrelated')),
3111 3118 ('r', 'rev', [],
3112 3119 _('a changeset up to which you would like to bundle')),
3113 3120 ('', 'base', [],
3114 3121 _('a base changeset to specify instead of a destination')),
3115 3122 ('a', 'all', None, _('bundle all changesets in the repository')),
3116 3123 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3117 3124 ] + remoteopts,
3118 3125 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3119 3126 "cat":
3120 3127 (cat,
3121 3128 [('o', 'output', '', _('print output to file with formatted name')),
3122 3129 ('r', 'rev', '', _('print the given revision')),
3123 3130 ('', 'decode', None, _('apply any matching decode filter')),
3124 3131 ] + walkopts,
3125 3132 _('[OPTION]... FILE...')),
3126 3133 "^clone":
3127 3134 (clone,
3128 3135 [('U', 'noupdate', None,
3129 3136 _('the clone will only contain a repository (no working copy)')),
3130 3137 ('r', 'rev', [],
3131 3138 _('a changeset you would like to have after cloning')),
3132 3139 ('', 'pull', None, _('use pull protocol to copy metadata')),
3133 3140 ('', 'uncompressed', None,
3134 3141 _('use uncompressed transfer (fast over LAN)')),
3135 3142 ] + remoteopts,
3136 3143 _('[OPTION]... SOURCE [DEST]')),
3137 3144 "^commit|ci":
3138 3145 (commit,
3139 3146 [('A', 'addremove', None,
3140 3147 _('mark new/missing files as added/removed before committing')),
3141 3148 ('', 'close-branch', None,
3142 3149 _('mark a branch as closed, hiding it from the branch list')),
3143 3150 ] + walkopts + commitopts + commitopts2,
3144 3151 _('[OPTION]... [FILE]...')),
3145 3152 "copy|cp":
3146 3153 (copy,
3147 3154 [('A', 'after', None, _('record a copy that has already occurred')),
3148 3155 ('f', 'force', None,
3149 3156 _('forcibly copy over an existing managed file')),
3150 3157 ] + walkopts + dryrunopts,
3151 3158 _('[OPTION]... [SOURCE]... DEST')),
3152 3159 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3153 3160 "debugcheckstate": (debugcheckstate, []),
3154 3161 "debugcomplete":
3155 3162 (debugcomplete,
3156 3163 [('o', 'options', None, _('show the command options'))],
3157 3164 _('[-o] CMD')),
3158 3165 "debugdate":
3159 3166 (debugdate,
3160 3167 [('e', 'extended', None, _('try extended date formats'))],
3161 3168 _('[-e] DATE [RANGE]')),
3162 3169 "debugdata": (debugdata, [], _('FILE REV')),
3163 3170 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3164 3171 "debugindex": (debugindex, [], _('FILE')),
3165 3172 "debugindexdot": (debugindexdot, [], _('FILE')),
3166 3173 "debuginstall": (debuginstall, []),
3167 3174 "debugrawcommit|rawcommit":
3168 3175 (rawcommit,
3169 3176 [('p', 'parent', [], _('parent')),
3170 3177 ('F', 'files', '', _('file list'))
3171 3178 ] + commitopts + commitopts2,
3172 3179 _('[OPTION]... [FILE]...')),
3173 3180 "debugrebuildstate":
3174 3181 (debugrebuildstate,
3175 3182 [('r', 'rev', '', _('revision to rebuild to'))],
3176 3183 _('[-r REV] [REV]')),
3177 3184 "debugrename":
3178 3185 (debugrename,
3179 3186 [('r', 'rev', '', _('revision to debug'))],
3180 3187 _('[-r REV] FILE')),
3181 3188 "debugsetparents":
3182 3189 (debugsetparents, [], _('REV1 [REV2]')),
3183 3190 "debugstate":
3184 3191 (debugstate,
3185 3192 [('', 'nodates', None, _('do not display the saved mtime'))],
3186 3193 _('[OPTION]...')),
3187 3194 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3188 3195 "^diff":
3189 3196 (diff,
3190 3197 [('r', 'rev', [], _('revision')),
3191 3198 ('c', 'change', '', _('change made by revision'))
3192 3199 ] + diffopts + diffopts2 + walkopts,
3193 3200 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3194 3201 "^export":
3195 3202 (export,
3196 3203 [('o', 'output', '', _('print output to file with formatted name')),
3197 3204 ('', 'switch-parent', None, _('diff against the second parent'))
3198 3205 ] + diffopts,
3199 3206 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3200 3207 "grep":
3201 3208 (grep,
3202 3209 [('0', 'print0', None, _('end fields with NUL')),
3203 3210 ('', 'all', None, _('print all revisions that match')),
3204 3211 ('f', 'follow', None,
3205 3212 _('follow changeset history, or file history across copies and renames')),
3206 3213 ('i', 'ignore-case', None, _('ignore case when matching')),
3207 3214 ('l', 'files-with-matches', None,
3208 3215 _('print only filenames and revs that match')),
3209 3216 ('n', 'line-number', None, _('print matching line numbers')),
3210 3217 ('r', 'rev', [], _('search in given revision range')),
3211 3218 ('u', 'user', None, _('list the author (long with -v)')),
3212 3219 ('d', 'date', None, _('list the date (short with -q)')),
3213 3220 ] + walkopts,
3214 3221 _('[OPTION]... PATTERN [FILE]...')),
3215 3222 "heads":
3216 3223 (heads,
3217 3224 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3225 ('a', 'active', False,
3226 _('show only the active heads from open branches')),
3218 3227 ] + templateopts,
3219 3228 _('[-r REV] [REV]...')),
3220 3229 "help": (help_, [], _('[TOPIC]')),
3221 3230 "identify|id":
3222 3231 (identify,
3223 3232 [('r', 'rev', '', _('identify the specified rev')),
3224 3233 ('n', 'num', None, _('show local revision number')),
3225 3234 ('i', 'id', None, _('show global revision id')),
3226 3235 ('b', 'branch', None, _('show branch')),
3227 3236 ('t', 'tags', None, _('show tags'))],
3228 3237 _('[-nibt] [-r REV] [SOURCE]')),
3229 3238 "import|patch":
3230 3239 (import_,
3231 3240 [('p', 'strip', 1,
3232 3241 _('directory strip option for patch. This has the same\n'
3233 3242 'meaning as the corresponding patch option')),
3234 3243 ('b', 'base', '', _('base path')),
3235 3244 ('f', 'force', None,
3236 3245 _('skip check for outstanding uncommitted changes')),
3237 3246 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3238 3247 ('', 'exact', None,
3239 3248 _('apply patch to the nodes from which it was generated')),
3240 3249 ('', 'import-branch', None,
3241 3250 _('Use any branch information in patch (implied by --exact)'))] +
3242 3251 commitopts + commitopts2 + similarityopts,
3243 3252 _('[OPTION]... PATCH...')),
3244 3253 "incoming|in":
3245 3254 (incoming,
3246 3255 [('f', 'force', None,
3247 3256 _('run even when remote repository is unrelated')),
3248 3257 ('n', 'newest-first', None, _('show newest record first')),
3249 3258 ('', 'bundle', '', _('file to store the bundles into')),
3250 3259 ('r', 'rev', [],
3251 3260 _('a specific revision up to which you would like to pull')),
3252 3261 ] + logopts + remoteopts,
3253 3262 _('[-p] [-n] [-M] [-f] [-r REV]...'
3254 3263 ' [--bundle FILENAME] [SOURCE]')),
3255 3264 "^init":
3256 3265 (init,
3257 3266 remoteopts,
3258 3267 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3259 3268 "locate":
3260 3269 (locate,
3261 3270 [('r', 'rev', '', _('search the repository as it stood at rev')),
3262 3271 ('0', 'print0', None,
3263 3272 _('end filenames with NUL, for use with xargs')),
3264 3273 ('f', 'fullpath', None,
3265 3274 _('print complete paths from the filesystem root')),
3266 3275 ] + walkopts,
3267 3276 _('[OPTION]... [PATTERN]...')),
3268 3277 "^log|history":
3269 3278 (log,
3270 3279 [('f', 'follow', None,
3271 3280 _('follow changeset history, or file history across copies and renames')),
3272 3281 ('', 'follow-first', None,
3273 3282 _('only follow the first parent of merge changesets')),
3274 3283 ('d', 'date', '', _('show revs matching date spec')),
3275 3284 ('C', 'copies', None, _('show copied files')),
3276 3285 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3277 3286 ('r', 'rev', [], _('show the specified revision or range')),
3278 3287 ('', 'removed', None, _('include revs where files were removed')),
3279 3288 ('m', 'only-merges', None, _('show only merges')),
3280 3289 ('u', 'user', [], _('revs committed by user')),
3281 3290 ('b', 'only-branch', [],
3282 3291 _('show only changesets within the given named branch')),
3283 3292 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3284 3293 ] + logopts + walkopts,
3285 3294 _('[OPTION]... [FILE]')),
3286 3295 "manifest":
3287 3296 (manifest,
3288 3297 [('r', 'rev', '', _('revision to display'))],
3289 3298 _('[-r REV]')),
3290 3299 "^merge":
3291 3300 (merge,
3292 3301 [('f', 'force', None, _('force a merge with outstanding changes')),
3293 3302 ('r', 'rev', '', _('revision to merge')),
3294 3303 ],
3295 3304 _('[-f] [[-r] REV]')),
3296 3305 "outgoing|out":
3297 3306 (outgoing,
3298 3307 [('f', 'force', None,
3299 3308 _('run even when remote repository is unrelated')),
3300 3309 ('r', 'rev', [],
3301 3310 _('a specific revision up to which you would like to push')),
3302 3311 ('n', 'newest-first', None, _('show newest record first')),
3303 3312 ] + logopts + remoteopts,
3304 3313 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3305 3314 "^parents":
3306 3315 (parents,
3307 3316 [('r', 'rev', '', _('show parents from the specified rev')),
3308 3317 ] + templateopts,
3309 3318 _('hg parents [-r REV] [FILE]')),
3310 3319 "paths": (paths, [], _('[NAME]')),
3311 3320 "^pull":
3312 3321 (pull,
3313 3322 [('u', 'update', None,
3314 3323 _('update to new tip if changesets were pulled')),
3315 3324 ('f', 'force', None,
3316 3325 _('run even when remote repository is unrelated')),
3317 3326 ('r', 'rev', [],
3318 3327 _('a specific revision up to which you would like to pull')),
3319 3328 ] + remoteopts,
3320 3329 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3321 3330 "^push":
3322 3331 (push,
3323 3332 [('f', 'force', None, _('force push')),
3324 3333 ('r', 'rev', [],
3325 3334 _('a specific revision up to which you would like to push')),
3326 3335 ] + remoteopts,
3327 3336 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3328 3337 "recover": (recover, []),
3329 3338 "^remove|rm":
3330 3339 (remove,
3331 3340 [('A', 'after', None, _('record delete for missing files')),
3332 3341 ('f', 'force', None,
3333 3342 _('remove (and delete) file even if added or modified')),
3334 3343 ] + walkopts,
3335 3344 _('[OPTION]... FILE...')),
3336 3345 "rename|mv":
3337 3346 (rename,
3338 3347 [('A', 'after', None, _('record a rename that has already occurred')),
3339 3348 ('f', 'force', None,
3340 3349 _('forcibly copy over an existing managed file')),
3341 3350 ] + walkopts + dryrunopts,
3342 3351 _('[OPTION]... SOURCE... DEST')),
3343 3352 "resolve":
3344 3353 (resolve,
3345 3354 [('a', 'all', None, _('remerge all unresolved files')),
3346 3355 ('l', 'list', None, _('list state of files needing merge')),
3347 3356 ('m', 'mark', None, _('mark files as resolved')),
3348 3357 ('u', 'unmark', None, _('unmark files as resolved'))],
3349 3358 _('[OPTION]... [FILE]...')),
3350 3359 "revert":
3351 3360 (revert,
3352 3361 [('a', 'all', None, _('revert all changes when no arguments given')),
3353 3362 ('d', 'date', '', _('tipmost revision matching date')),
3354 3363 ('r', 'rev', '', _('revision to revert to')),
3355 3364 ('', 'no-backup', None, _('do not save backup copies of files')),
3356 3365 ] + walkopts + dryrunopts,
3357 3366 _('[OPTION]... [-r REV] [NAME]...')),
3358 3367 "rollback": (rollback, []),
3359 3368 "root": (root, []),
3360 3369 "^serve":
3361 3370 (serve,
3362 3371 [('A', 'accesslog', '', _('name of access log file to write to')),
3363 3372 ('d', 'daemon', None, _('run server in background')),
3364 3373 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3365 3374 ('E', 'errorlog', '', _('name of error log file to write to')),
3366 3375 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3367 3376 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3368 3377 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3369 3378 ('n', 'name', '',
3370 3379 _('name to show in web pages (default: working dir)')),
3371 3380 ('', 'webdir-conf', '', _('name of the webdir config file'
3372 3381 ' (serve more than one repo)')),
3373 3382 ('', 'pid-file', '', _('name of file to write process ID to')),
3374 3383 ('', 'stdio', None, _('for remote clients')),
3375 3384 ('t', 'templates', '', _('web templates to use')),
3376 3385 ('', 'style', '', _('template style to use')),
3377 3386 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3378 3387 ('', 'certificate', '', _('SSL certificate file'))],
3379 3388 _('[OPTION]...')),
3380 3389 "showconfig|debugconfig":
3381 3390 (showconfig,
3382 3391 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3383 3392 _('[-u] [NAME]...')),
3384 3393 "^status|st":
3385 3394 (status,
3386 3395 [('A', 'all', None, _('show status of all files')),
3387 3396 ('m', 'modified', None, _('show only modified files')),
3388 3397 ('a', 'added', None, _('show only added files')),
3389 3398 ('r', 'removed', None, _('show only removed files')),
3390 3399 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3391 3400 ('c', 'clean', None, _('show only files without changes')),
3392 3401 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3393 3402 ('i', 'ignored', None, _('show only ignored files')),
3394 3403 ('n', 'no-status', None, _('hide status prefix')),
3395 3404 ('C', 'copies', None, _('show source of copied files')),
3396 3405 ('0', 'print0', None,
3397 3406 _('end filenames with NUL, for use with xargs')),
3398 3407 ('', 'rev', [], _('show difference from revision')),
3399 3408 ] + walkopts,
3400 3409 _('[OPTION]... [FILE]...')),
3401 3410 "tag":
3402 3411 (tag,
3403 3412 [('f', 'force', None, _('replace existing tag')),
3404 3413 ('l', 'local', None, _('make the tag local')),
3405 3414 ('r', 'rev', '', _('revision to tag')),
3406 3415 ('', 'remove', None, _('remove a tag')),
3407 3416 # -l/--local is already there, commitopts cannot be used
3408 3417 ('m', 'message', '', _('use <text> as commit message')),
3409 3418 ] + commitopts2,
3410 3419 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3411 3420 "tags": (tags, []),
3412 3421 "tip":
3413 3422 (tip,
3414 3423 [('p', 'patch', None, _('show patch')),
3415 3424 ] + templateopts,
3416 3425 _('[-p]')),
3417 3426 "unbundle":
3418 3427 (unbundle,
3419 3428 [('u', 'update', None,
3420 3429 _('update to new tip if changesets were unbundled'))],
3421 3430 _('[-u] FILE...')),
3422 3431 "^update|up|checkout|co":
3423 3432 (update,
3424 3433 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3425 3434 ('d', 'date', '', _('tipmost revision matching date')),
3426 3435 ('r', 'rev', '', _('revision'))],
3427 3436 _('[-C] [-d DATE] [[-r] REV]')),
3428 3437 "verify": (verify, []),
3429 3438 "version": (version_, []),
3430 3439 }
3431 3440
3432 3441 norepo = ("clone init version help debugcomplete debugdata"
3433 3442 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3434 3443 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2135 +1,2156
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import lock, transaction, stat, errno, ui, store
13 13 import os, time, util, extensions, hook, inspect, error
14 14 import match as match_
15 15 import merge as merge_
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 19 supported = ('revlogv1', 'store', 'fncache')
20 20
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 self.root = os.path.realpath(path)
24 24 self.path = os.path.join(self.root, ".hg")
25 25 self.origroot = path
26 26 self.opener = util.opener(self.path)
27 27 self.wopener = util.opener(self.root)
28 28
29 29 if not os.path.isdir(self.path):
30 30 if create:
31 31 if not os.path.exists(path):
32 32 os.mkdir(path)
33 33 os.mkdir(self.path)
34 34 requirements = ["revlogv1"]
35 35 if parentui.configbool('format', 'usestore', True):
36 36 os.mkdir(os.path.join(self.path, "store"))
37 37 requirements.append("store")
38 38 if parentui.configbool('format', 'usefncache', True):
39 39 requirements.append("fncache")
40 40 # create an invalid changelog
41 41 self.opener("00changelog.i", "a").write(
42 42 '\0\0\0\2' # represents revlogv2
43 43 ' dummy changelog to prevent using the old repo layout'
44 44 )
45 45 reqfile = self.opener("requires", "w")
46 46 for r in requirements:
47 47 reqfile.write("%s\n" % r)
48 48 reqfile.close()
49 49 else:
50 50 raise error.RepoError(_("repository %s not found") % path)
51 51 elif create:
52 52 raise error.RepoError(_("repository %s already exists") % path)
53 53 else:
54 54 # find requirements
55 55 requirements = []
56 56 try:
57 57 requirements = self.opener("requires").read().splitlines()
58 58 for r in requirements:
59 59 if r not in self.supported:
60 60 raise error.RepoError(_("requirement '%s' not supported") % r)
61 61 except IOError, inst:
62 62 if inst.errno != errno.ENOENT:
63 63 raise
64 64
65 65 self.store = store.store(requirements, self.path, util.opener)
66 66 self.spath = self.store.path
67 67 self.sopener = self.store.opener
68 68 self.sjoin = self.store.join
69 69 self.opener.createmode = self.store.createmode
70 70
71 71 self.ui = ui.ui(parentui=parentui)
72 72 try:
73 73 self.ui.readconfig(self.join("hgrc"), self.root)
74 74 extensions.loadall(self.ui)
75 75 except IOError:
76 76 pass
77 77
78 78 self.tagscache = None
79 79 self._tagstypecache = None
80 80 self.branchcache = None
81 81 self._ubranchcache = None # UTF-8 version of branchcache
82 82 self._branchcachetip = None
83 83 self.nodetagscache = None
84 84 self.filterpats = {}
85 85 self._datafilters = {}
86 86 self._transref = self._lockref = self._wlockref = None
87 87
88 88 def __getattr__(self, name):
89 89 if name == 'changelog':
90 90 self.changelog = changelog.changelog(self.sopener)
91 91 self.sopener.defversion = self.changelog.version
92 92 return self.changelog
93 93 if name == 'manifest':
94 94 self.changelog
95 95 self.manifest = manifest.manifest(self.sopener)
96 96 return self.manifest
97 97 if name == 'dirstate':
98 98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 99 return self.dirstate
100 100 else:
101 101 raise AttributeError(name)
102 102
103 103 def __getitem__(self, changeid):
104 104 if changeid == None:
105 105 return context.workingctx(self)
106 106 return context.changectx(self, changeid)
107 107
108 108 def __nonzero__(self):
109 109 return True
110 110
111 111 def __len__(self):
112 112 return len(self.changelog)
113 113
114 114 def __iter__(self):
115 115 for i in xrange(len(self)):
116 116 yield i
117 117
118 118 def url(self):
119 119 return 'file:' + self.root
120 120
121 121 def hook(self, name, throw=False, **args):
122 122 return hook.hook(self.ui, self, name, throw, **args)
123 123
124 124 tag_disallowed = ':\r\n'
125 125
126 126 def _tag(self, names, node, message, local, user, date, parent=None,
127 127 extra={}):
128 128 use_dirstate = parent is None
129 129
130 130 if isinstance(names, str):
131 131 allchars = names
132 132 names = (names,)
133 133 else:
134 134 allchars = ''.join(names)
135 135 for c in self.tag_disallowed:
136 136 if c in allchars:
137 137 raise util.Abort(_('%r cannot be used in a tag name') % c)
138 138
139 139 for name in names:
140 140 self.hook('pretag', throw=True, node=hex(node), tag=name,
141 141 local=local)
142 142
143 143 def writetags(fp, names, munge, prevtags):
144 144 fp.seek(0, 2)
145 145 if prevtags and prevtags[-1] != '\n':
146 146 fp.write('\n')
147 147 for name in names:
148 148 m = munge and munge(name) or name
149 149 if self._tagstypecache and name in self._tagstypecache:
150 150 old = self.tagscache.get(name, nullid)
151 151 fp.write('%s %s\n' % (hex(old), m))
152 152 fp.write('%s %s\n' % (hex(node), m))
153 153 fp.close()
154 154
155 155 prevtags = ''
156 156 if local:
157 157 try:
158 158 fp = self.opener('localtags', 'r+')
159 159 except IOError, err:
160 160 fp = self.opener('localtags', 'a')
161 161 else:
162 162 prevtags = fp.read()
163 163
164 164 # local tags are stored in the current charset
165 165 writetags(fp, names, None, prevtags)
166 166 for name in names:
167 167 self.hook('tag', node=hex(node), tag=name, local=local)
168 168 return
169 169
170 170 if use_dirstate:
171 171 try:
172 172 fp = self.wfile('.hgtags', 'rb+')
173 173 except IOError, err:
174 174 fp = self.wfile('.hgtags', 'ab')
175 175 else:
176 176 prevtags = fp.read()
177 177 else:
178 178 try:
179 179 prevtags = self.filectx('.hgtags', parent).data()
180 180 except error.LookupError:
181 181 pass
182 182 fp = self.wfile('.hgtags', 'wb')
183 183 if prevtags:
184 184 fp.write(prevtags)
185 185
186 186 # committed tags are stored in UTF-8
187 187 writetags(fp, names, util.fromlocal, prevtags)
188 188
189 189 if use_dirstate and '.hgtags' not in self.dirstate:
190 190 self.add(['.hgtags'])
191 191
192 192 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
193 193 extra=extra)
194 194
195 195 for name in names:
196 196 self.hook('tag', node=hex(node), tag=name, local=local)
197 197
198 198 return tagnode
199 199
200 200 def tag(self, names, node, message, local, user, date):
201 201 '''tag a revision with one or more symbolic names.
202 202
203 203 names is a list of strings or, when adding a single tag, names may be a
204 204 string.
205 205
206 206 if local is True, the tags are stored in a per-repository file.
207 207 otherwise, they are stored in the .hgtags file, and a new
208 208 changeset is committed with the change.
209 209
210 210 keyword arguments:
211 211
212 212 local: whether to store tags in non-version-controlled file
213 213 (default False)
214 214
215 215 message: commit message to use if committing
216 216
217 217 user: name of user to use if committing
218 218
219 219 date: date tuple to use if committing'''
220 220
221 221 for x in self.status()[:5]:
222 222 if '.hgtags' in x:
223 223 raise util.Abort(_('working copy of .hgtags is changed '
224 224 '(please commit .hgtags manually)'))
225 225
226 226 self._tag(names, node, message, local, user, date)
227 227
228 228 def tags(self):
229 229 '''return a mapping of tag to node'''
230 230 if self.tagscache:
231 231 return self.tagscache
232 232
233 233 globaltags = {}
234 234 tagtypes = {}
235 235
236 236 def readtags(lines, fn, tagtype):
237 237 filetags = {}
238 238 count = 0
239 239
240 240 def warn(msg):
241 241 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
242 242
243 243 for l in lines:
244 244 count += 1
245 245 if not l:
246 246 continue
247 247 s = l.split(" ", 1)
248 248 if len(s) != 2:
249 249 warn(_("cannot parse entry"))
250 250 continue
251 251 node, key = s
252 252 key = util.tolocal(key.strip()) # stored in UTF-8
253 253 try:
254 254 bin_n = bin(node)
255 255 except TypeError:
256 256 warn(_("node '%s' is not well formed") % node)
257 257 continue
258 258 if bin_n not in self.changelog.nodemap:
259 259 warn(_("tag '%s' refers to unknown node") % key)
260 260 continue
261 261
262 262 h = []
263 263 if key in filetags:
264 264 n, h = filetags[key]
265 265 h.append(n)
266 266 filetags[key] = (bin_n, h)
267 267
268 268 for k, nh in filetags.iteritems():
269 269 if k not in globaltags:
270 270 globaltags[k] = nh
271 271 tagtypes[k] = tagtype
272 272 continue
273 273
274 274 # we prefer the global tag if:
275 275 # it supercedes us OR
276 276 # mutual supercedes and it has a higher rank
277 277 # otherwise we win because we're tip-most
278 278 an, ah = nh
279 279 bn, bh = globaltags[k]
280 280 if (bn != an and an in bh and
281 281 (bn not in ah or len(bh) > len(ah))):
282 282 an = bn
283 283 ah.extend([n for n in bh if n not in ah])
284 284 globaltags[k] = an, ah
285 285 tagtypes[k] = tagtype
286 286
287 287 # read the tags file from each head, ending with the tip
288 288 f = None
289 289 for rev, node, fnode in self._hgtagsnodes():
290 290 f = (f and f.filectx(fnode) or
291 291 self.filectx('.hgtags', fileid=fnode))
292 292 readtags(f.data().splitlines(), f, "global")
293 293
294 294 try:
295 295 data = util.fromlocal(self.opener("localtags").read())
296 296 # localtags are stored in the local character set
297 297 # while the internal tag table is stored in UTF-8
298 298 readtags(data.splitlines(), "localtags", "local")
299 299 except IOError:
300 300 pass
301 301
302 302 self.tagscache = {}
303 303 self._tagstypecache = {}
304 304 for k, nh in globaltags.iteritems():
305 305 n = nh[0]
306 306 if n != nullid:
307 307 self.tagscache[k] = n
308 308 self._tagstypecache[k] = tagtypes[k]
309 309 self.tagscache['tip'] = self.changelog.tip()
310 310 return self.tagscache
311 311
312 312 def tagtype(self, tagname):
313 313 '''
314 314 return the type of the given tag. result can be:
315 315
316 316 'local' : a local tag
317 317 'global' : a global tag
318 318 None : tag does not exist
319 319 '''
320 320
321 321 self.tags()
322 322
323 323 return self._tagstypecache.get(tagname)
324 324
325 325 def _hgtagsnodes(self):
326 326 heads = self.heads()
327 327 heads.reverse()
328 328 last = {}
329 329 ret = []
330 330 for node in heads:
331 331 c = self[node]
332 332 rev = c.rev()
333 333 try:
334 334 fnode = c.filenode('.hgtags')
335 335 except error.LookupError:
336 336 continue
337 337 ret.append((rev, node, fnode))
338 338 if fnode in last:
339 339 ret[last[fnode]] = None
340 340 last[fnode] = len(ret) - 1
341 341 return [item for item in ret if item]
342 342
343 343 def tagslist(self):
344 344 '''return a list of tags ordered by revision'''
345 345 l = []
346 346 for t, n in self.tags().iteritems():
347 347 try:
348 348 r = self.changelog.rev(n)
349 349 except:
350 350 r = -2 # sort to the beginning of the list if unknown
351 351 l.append((r, t, n))
352 352 return [(t, n) for r, t, n in util.sort(l)]
353 353
354 354 def nodetags(self, node):
355 355 '''return the tags associated with a node'''
356 356 if not self.nodetagscache:
357 357 self.nodetagscache = {}
358 358 for t, n in self.tags().iteritems():
359 359 self.nodetagscache.setdefault(n, []).append(t)
360 360 return self.nodetagscache.get(node, [])
361 361
362 362 def _branchtags(self, partial, lrev):
363 363 # TODO: rename this function?
364 364 tiprev = len(self) - 1
365 365 if lrev != tiprev:
366 366 self._updatebranchcache(partial, lrev+1, tiprev+1)
367 367 self._writebranchcache(partial, self.changelog.tip(), tiprev)
368 368
369 369 return partial
370 370
371 371 def _branchheads(self):
372 372 tip = self.changelog.tip()
373 373 if self.branchcache is not None and self._branchcachetip == tip:
374 374 return self.branchcache
375 375
376 376 oldtip = self._branchcachetip
377 377 self._branchcachetip = tip
378 378 if self.branchcache is None:
379 379 self.branchcache = {} # avoid recursion in changectx
380 380 else:
381 381 self.branchcache.clear() # keep using the same dict
382 382 if oldtip is None or oldtip not in self.changelog.nodemap:
383 383 partial, last, lrev = self._readbranchcache()
384 384 else:
385 385 lrev = self.changelog.rev(oldtip)
386 386 partial = self._ubranchcache
387 387
388 388 self._branchtags(partial, lrev)
389 389 # this private cache holds all heads (not just tips)
390 390 self._ubranchcache = partial
391 391
392 392 # the branch cache is stored on disk as UTF-8, but in the local
393 393 # charset internally
394 394 for k, v in partial.iteritems():
395 395 self.branchcache[util.tolocal(k)] = v
396 396 return self.branchcache
397 397
398 398
399 399 def branchtags(self):
400 400 '''return a dict where branch names map to the tipmost head of
401 the branch'''
402 return dict([(k, v[-1]) for (k, v) in self._branchheads().iteritems()])
401 the branch, open heads come before closed'''
402 bt = {}
403 for bn, heads in self._branchheads().iteritems():
404 head = None
405 for i in range(len(heads)-1, -1, -1):
406 h = heads[i]
407 if 'close' not in self.changelog.read(h)[5]:
408 head = h
409 break
410 # no open heads were found
411 if head is None:
412 head = heads[-1]
413 bt[bn] = head
414 return bt
415
403 416
404 417 def _readbranchcache(self):
405 418 partial = {}
406 419 try:
407 420 f = self.opener("branchheads.cache")
408 421 lines = f.read().split('\n')
409 422 f.close()
410 423 except (IOError, OSError):
411 424 return {}, nullid, nullrev
412 425
413 426 try:
414 427 last, lrev = lines.pop(0).split(" ", 1)
415 428 last, lrev = bin(last), int(lrev)
416 429 if lrev >= len(self) or self[lrev].node() != last:
417 430 # invalidate the cache
418 431 raise ValueError('invalidating branch cache (tip differs)')
419 432 for l in lines:
420 433 if not l: continue
421 434 node, label = l.split(" ", 1)
422 435 partial.setdefault(label.strip(), []).append(bin(node))
423 436 except KeyboardInterrupt:
424 437 raise
425 438 except Exception, inst:
426 439 if self.ui.debugflag:
427 440 self.ui.warn(str(inst), '\n')
428 441 partial, last, lrev = {}, nullid, nullrev
429 442 return partial, last, lrev
430 443
431 444 def _writebranchcache(self, branches, tip, tiprev):
432 445 try:
433 446 f = self.opener("branchheads.cache", "w", atomictemp=True)
434 447 f.write("%s %s\n" % (hex(tip), tiprev))
435 448 for label, nodes in branches.iteritems():
436 449 for node in nodes:
437 450 f.write("%s %s\n" % (hex(node), label))
438 451 f.rename()
439 452 except (IOError, OSError):
440 453 pass
441 454
442 455 def _updatebranchcache(self, partial, start, end):
443 456 for r in xrange(start, end):
444 457 c = self[r]
445 458 b = c.branch()
446 459 bheads = partial.setdefault(b, [])
447 460 bheads.append(c.node())
448 461 for p in c.parents():
449 462 pn = p.node()
450 463 if pn in bheads:
451 464 bheads.remove(pn)
452 465
453 466 def lookup(self, key):
454 467 if isinstance(key, int):
455 468 return self.changelog.node(key)
456 469 elif key == '.':
457 470 return self.dirstate.parents()[0]
458 471 elif key == 'null':
459 472 return nullid
460 473 elif key == 'tip':
461 474 return self.changelog.tip()
462 475 n = self.changelog._match(key)
463 476 if n:
464 477 return n
465 478 if key in self.tags():
466 479 return self.tags()[key]
467 480 if key in self.branchtags():
468 481 return self.branchtags()[key]
469 482 n = self.changelog._partialmatch(key)
470 483 if n:
471 484 return n
472 485 try:
473 486 if len(key) == 20:
474 487 key = hex(key)
475 488 except:
476 489 pass
477 490 raise error.RepoError(_("unknown revision '%s'") % key)
478 491
479 492 def local(self):
480 493 return True
481 494
482 495 def join(self, f):
483 496 return os.path.join(self.path, f)
484 497
485 498 def wjoin(self, f):
486 499 return os.path.join(self.root, f)
487 500
488 501 def rjoin(self, f):
489 502 return os.path.join(self.root, util.pconvert(f))
490 503
491 504 def file(self, f):
492 505 if f[0] == '/':
493 506 f = f[1:]
494 507 return filelog.filelog(self.sopener, f)
495 508
496 509 def changectx(self, changeid):
497 510 return self[changeid]
498 511
499 512 def parents(self, changeid=None):
500 513 '''get list of changectxs for parents of changeid'''
501 514 return self[changeid].parents()
502 515
503 516 def filectx(self, path, changeid=None, fileid=None):
504 517 """changeid can be a changeset revision, node, or tag.
505 518 fileid can be a file revision or node."""
506 519 return context.filectx(self, path, changeid, fileid)
507 520
508 521 def getcwd(self):
509 522 return self.dirstate.getcwd()
510 523
511 524 def pathto(self, f, cwd=None):
512 525 return self.dirstate.pathto(f, cwd)
513 526
514 527 def wfile(self, f, mode='r'):
515 528 return self.wopener(f, mode)
516 529
517 530 def _link(self, f):
518 531 return os.path.islink(self.wjoin(f))
519 532
520 533 def _filter(self, filter, filename, data):
521 534 if filter not in self.filterpats:
522 535 l = []
523 536 for pat, cmd in self.ui.configitems(filter):
524 537 if cmd == '!':
525 538 continue
526 539 mf = util.matcher(self.root, "", [pat], [], [])[1]
527 540 fn = None
528 541 params = cmd
529 542 for name, filterfn in self._datafilters.iteritems():
530 543 if cmd.startswith(name):
531 544 fn = filterfn
532 545 params = cmd[len(name):].lstrip()
533 546 break
534 547 if not fn:
535 548 fn = lambda s, c, **kwargs: util.filter(s, c)
536 549 # Wrap old filters not supporting keyword arguments
537 550 if not inspect.getargspec(fn)[2]:
538 551 oldfn = fn
539 552 fn = lambda s, c, **kwargs: oldfn(s, c)
540 553 l.append((mf, fn, params))
541 554 self.filterpats[filter] = l
542 555
543 556 for mf, fn, cmd in self.filterpats[filter]:
544 557 if mf(filename):
545 558 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
546 559 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
547 560 break
548 561
549 562 return data
550 563
551 564 def adddatafilter(self, name, filter):
552 565 self._datafilters[name] = filter
553 566
554 567 def wread(self, filename):
555 568 if self._link(filename):
556 569 data = os.readlink(self.wjoin(filename))
557 570 else:
558 571 data = self.wopener(filename, 'r').read()
559 572 return self._filter("encode", filename, data)
560 573
561 574 def wwrite(self, filename, data, flags):
562 575 data = self._filter("decode", filename, data)
563 576 try:
564 577 os.unlink(self.wjoin(filename))
565 578 except OSError:
566 579 pass
567 580 if 'l' in flags:
568 581 self.wopener.symlink(data, filename)
569 582 else:
570 583 self.wopener(filename, 'w').write(data)
571 584 if 'x' in flags:
572 585 util.set_flags(self.wjoin(filename), False, True)
573 586
574 587 def wwritedata(self, filename, data):
575 588 return self._filter("decode", filename, data)
576 589
577 590 def transaction(self):
578 591 if self._transref and self._transref():
579 592 return self._transref().nest()
580 593
581 594 # abort here if the journal already exists
582 595 if os.path.exists(self.sjoin("journal")):
583 596 raise error.RepoError(_("journal already exists - run hg recover"))
584 597
585 598 # save dirstate for rollback
586 599 try:
587 600 ds = self.opener("dirstate").read()
588 601 except IOError:
589 602 ds = ""
590 603 self.opener("journal.dirstate", "w").write(ds)
591 604 self.opener("journal.branch", "w").write(self.dirstate.branch())
592 605
593 606 renames = [(self.sjoin("journal"), self.sjoin("undo")),
594 607 (self.join("journal.dirstate"), self.join("undo.dirstate")),
595 608 (self.join("journal.branch"), self.join("undo.branch"))]
596 609 tr = transaction.transaction(self.ui.warn, self.sopener,
597 610 self.sjoin("journal"),
598 611 aftertrans(renames),
599 612 self.store.createmode)
600 613 self._transref = weakref.ref(tr)
601 614 return tr
602 615
603 616 def recover(self):
604 617 l = self.lock()
605 618 try:
606 619 if os.path.exists(self.sjoin("journal")):
607 620 self.ui.status(_("rolling back interrupted transaction\n"))
608 621 transaction.rollback(self.sopener, self.sjoin("journal"))
609 622 self.invalidate()
610 623 return True
611 624 else:
612 625 self.ui.warn(_("no interrupted transaction available\n"))
613 626 return False
614 627 finally:
615 628 del l
616 629
617 630 def rollback(self):
618 631 wlock = lock = None
619 632 try:
620 633 wlock = self.wlock()
621 634 lock = self.lock()
622 635 if os.path.exists(self.sjoin("undo")):
623 636 self.ui.status(_("rolling back last transaction\n"))
624 637 transaction.rollback(self.sopener, self.sjoin("undo"))
625 638 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
626 639 try:
627 640 branch = self.opener("undo.branch").read()
628 641 self.dirstate.setbranch(branch)
629 642 except IOError:
630 643 self.ui.warn(_("Named branch could not be reset, "
631 644 "current branch still is: %s\n")
632 645 % util.tolocal(self.dirstate.branch()))
633 646 self.invalidate()
634 647 self.dirstate.invalidate()
635 648 else:
636 649 self.ui.warn(_("no rollback information available\n"))
637 650 finally:
638 651 del lock, wlock
639 652
640 653 def invalidate(self):
641 654 for a in "changelog manifest".split():
642 655 if a in self.__dict__:
643 656 delattr(self, a)
644 657 self.tagscache = None
645 658 self._tagstypecache = None
646 659 self.nodetagscache = None
647 660 self.branchcache = None
648 661 self._ubranchcache = None
649 662 self._branchcachetip = None
650 663
651 664 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
652 665 try:
653 666 l = lock.lock(lockname, 0, releasefn, desc=desc)
654 667 except error.LockHeld, inst:
655 668 if not wait:
656 669 raise
657 670 self.ui.warn(_("waiting for lock on %s held by %r\n") %
658 671 (desc, inst.locker))
659 672 # default to 600 seconds timeout
660 673 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
661 674 releasefn, desc=desc)
662 675 if acquirefn:
663 676 acquirefn()
664 677 return l
665 678
666 679 def lock(self, wait=True):
667 680 if self._lockref and self._lockref():
668 681 return self._lockref()
669 682
670 683 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
671 684 _('repository %s') % self.origroot)
672 685 self._lockref = weakref.ref(l)
673 686 return l
674 687
675 688 def wlock(self, wait=True):
676 689 if self._wlockref and self._wlockref():
677 690 return self._wlockref()
678 691
679 692 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
680 693 self.dirstate.invalidate, _('working directory of %s') %
681 694 self.origroot)
682 695 self._wlockref = weakref.ref(l)
683 696 return l
684 697
685 698 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
686 699 """
687 700 commit an individual file as part of a larger transaction
688 701 """
689 702
690 703 fn = fctx.path()
691 704 t = fctx.data()
692 705 fl = self.file(fn)
693 706 fp1 = manifest1.get(fn, nullid)
694 707 fp2 = manifest2.get(fn, nullid)
695 708
696 709 meta = {}
697 710 cp = fctx.renamed()
698 711 if cp and cp[0] != fn:
699 712 # Mark the new revision of this file as a copy of another
700 713 # file. This copy data will effectively act as a parent
701 714 # of this new revision. If this is a merge, the first
702 715 # parent will be the nullid (meaning "look up the copy data")
703 716 # and the second one will be the other parent. For example:
704 717 #
705 718 # 0 --- 1 --- 3 rev1 changes file foo
706 719 # \ / rev2 renames foo to bar and changes it
707 720 # \- 2 -/ rev3 should have bar with all changes and
708 721 # should record that bar descends from
709 722 # bar in rev2 and foo in rev1
710 723 #
711 724 # this allows this merge to succeed:
712 725 #
713 726 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
714 727 # \ / merging rev3 and rev4 should use bar@rev2
715 728 # \- 2 --- 4 as the merge base
716 729 #
717 730
718 731 cf = cp[0]
719 732 cr = manifest1.get(cf)
720 733 nfp = fp2
721 734
722 735 if manifest2: # branch merge
723 736 if fp2 == nullid: # copied on remote side
724 737 if fp1 != nullid or cf in manifest2:
725 738 cr = manifest2[cf]
726 739 nfp = fp1
727 740
728 741 # find source in nearest ancestor if we've lost track
729 742 if not cr:
730 743 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
731 744 (fn, cf))
732 745 for a in self['.'].ancestors():
733 746 if cf in a:
734 747 cr = a[cf].filenode()
735 748 break
736 749
737 750 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
738 751 meta["copy"] = cf
739 752 meta["copyrev"] = hex(cr)
740 753 fp1, fp2 = nullid, nfp
741 754 elif fp2 != nullid:
742 755 # is one parent an ancestor of the other?
743 756 fpa = fl.ancestor(fp1, fp2)
744 757 if fpa == fp1:
745 758 fp1, fp2 = fp2, nullid
746 759 elif fpa == fp2:
747 760 fp2 = nullid
748 761
749 762 # is the file unmodified from the parent? report existing entry
750 763 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
751 764 return fp1
752 765
753 766 changelist.append(fn)
754 767 return fl.add(t, meta, tr, linkrev, fp1, fp2)
755 768
756 769 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
757 770 if p1 is None:
758 771 p1, p2 = self.dirstate.parents()
759 772 return self.commit(files=files, text=text, user=user, date=date,
760 773 p1=p1, p2=p2, extra=extra, empty_ok=True)
761 774
762 775 def commit(self, files=None, text="", user=None, date=None,
763 776 match=None, force=False, force_editor=False,
764 777 p1=None, p2=None, extra={}, empty_ok=False):
765 778 wlock = lock = None
766 779 if extra.get("close"):
767 780 force = True
768 781 if files:
769 782 files = util.unique(files)
770 783 try:
771 784 wlock = self.wlock()
772 785 lock = self.lock()
773 786 use_dirstate = (p1 is None) # not rawcommit
774 787
775 788 if use_dirstate:
776 789 p1, p2 = self.dirstate.parents()
777 790 update_dirstate = True
778 791
779 792 if (not force and p2 != nullid and
780 793 (match and (match.files() or match.anypats()))):
781 794 raise util.Abort(_('cannot partially commit a merge '
782 795 '(do not specify files or patterns)'))
783 796
784 797 if files:
785 798 modified, removed = [], []
786 799 for f in files:
787 800 s = self.dirstate[f]
788 801 if s in 'nma':
789 802 modified.append(f)
790 803 elif s == 'r':
791 804 removed.append(f)
792 805 else:
793 806 self.ui.warn(_("%s not tracked!\n") % f)
794 807 changes = [modified, [], removed, [], []]
795 808 else:
796 809 changes = self.status(match=match)
797 810 else:
798 811 p1, p2 = p1, p2 or nullid
799 812 update_dirstate = (self.dirstate.parents()[0] == p1)
800 813 changes = [files, [], [], [], []]
801 814
802 815 ms = merge_.mergestate(self)
803 816 for f in changes[0]:
804 817 if f in ms and ms[f] == 'u':
805 818 raise util.Abort(_("unresolved merge conflicts "
806 819 "(see hg resolve)"))
807 820 wctx = context.workingctx(self, (p1, p2), text, user, date,
808 821 extra, changes)
809 822 return self._commitctx(wctx, force, force_editor, empty_ok,
810 823 use_dirstate, update_dirstate)
811 824 finally:
812 825 del lock, wlock
813 826
814 827 def commitctx(self, ctx):
815 828 """Add a new revision to current repository.
816 829
817 830 Revision information is passed in the context.memctx argument.
818 831 commitctx() does not touch the working directory.
819 832 """
820 833 wlock = lock = None
821 834 try:
822 835 wlock = self.wlock()
823 836 lock = self.lock()
824 837 return self._commitctx(ctx, force=True, force_editor=False,
825 838 empty_ok=True, use_dirstate=False,
826 839 update_dirstate=False)
827 840 finally:
828 841 del lock, wlock
829 842
830 843 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
831 844 use_dirstate=True, update_dirstate=True):
832 845 tr = None
833 846 valid = 0 # don't save the dirstate if this isn't set
834 847 try:
835 848 commit = util.sort(wctx.modified() + wctx.added())
836 849 remove = wctx.removed()
837 850 extra = wctx.extra().copy()
838 851 branchname = extra['branch']
839 852 user = wctx.user()
840 853 text = wctx.description()
841 854
842 855 if branchname == 'default' and extra.get('close'):
843 856 raise util.Abort(_('closing the default branch is invalid'))
844 857 p1, p2 = [p.node() for p in wctx.parents()]
845 858 c1 = self.changelog.read(p1)
846 859 c2 = self.changelog.read(p2)
847 860 m1 = self.manifest.read(c1[0]).copy()
848 861 m2 = self.manifest.read(c2[0])
849 862
850 863 if use_dirstate:
851 864 oldname = c1[5].get("branch") # stored in UTF-8
852 865 if (not commit and not remove and not force and p2 == nullid
853 866 and branchname == oldname):
854 867 self.ui.status(_("nothing changed\n"))
855 868 return None
856 869
857 870 xp1 = hex(p1)
858 871 if p2 == nullid: xp2 = ''
859 872 else: xp2 = hex(p2)
860 873
861 874 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
862 875
863 876 tr = self.transaction()
864 877 trp = weakref.proxy(tr)
865 878
866 879 # check in files
867 880 new = {}
868 881 changed = []
869 882 linkrev = len(self)
870 883 for f in commit:
871 884 self.ui.note(f + "\n")
872 885 try:
873 886 fctx = wctx.filectx(f)
874 887 newflags = fctx.flags()
875 888 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
876 889 if ((not changed or changed[-1] != f) and
877 890 m2.get(f) != new[f]):
878 891 # mention the file in the changelog if some
879 892 # flag changed, even if there was no content
880 893 # change.
881 894 if m1.flags(f) != newflags:
882 895 changed.append(f)
883 896 m1.set(f, newflags)
884 897 if use_dirstate:
885 898 self.dirstate.normal(f)
886 899
887 900 except (OSError, IOError):
888 901 if use_dirstate:
889 902 self.ui.warn(_("trouble committing %s!\n") % f)
890 903 raise
891 904 else:
892 905 remove.append(f)
893 906
894 907 updated, added = [], []
895 908 for f in util.sort(changed):
896 909 if f in m1 or f in m2:
897 910 updated.append(f)
898 911 else:
899 912 added.append(f)
900 913
901 914 # update manifest
902 915 m1.update(new)
903 916 removed = [f for f in util.sort(remove) if f in m1 or f in m2]
904 917 removed1 = []
905 918
906 919 for f in removed:
907 920 if f in m1:
908 921 del m1[f]
909 922 removed1.append(f)
910 923 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
911 924 (new, removed1))
912 925
913 926 # add changeset
914 927 if (not empty_ok and not text) or force_editor:
915 928 edittext = []
916 929 if text:
917 930 edittext.append(text)
918 931 edittext.append("")
919 932 edittext.append("") # Empty line between message and comments.
920 933 edittext.append(_("HG: Enter commit message."
921 934 " Lines beginning with 'HG:' are removed."))
922 935 edittext.append("HG: --")
923 936 edittext.append("HG: user: %s" % user)
924 937 if p2 != nullid:
925 938 edittext.append("HG: branch merge")
926 939 if branchname:
927 940 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
928 941 edittext.extend(["HG: added %s" % f for f in added])
929 942 edittext.extend(["HG: changed %s" % f for f in updated])
930 943 edittext.extend(["HG: removed %s" % f for f in removed])
931 944 if not added and not updated and not removed:
932 945 edittext.append("HG: no files changed")
933 946 edittext.append("")
934 947 # run editor in the repository root
935 948 olddir = os.getcwd()
936 949 os.chdir(self.root)
937 950 text = self.ui.edit("\n".join(edittext), user)
938 951 os.chdir(olddir)
939 952
940 953 lines = [line.rstrip() for line in text.rstrip().splitlines()]
941 954 while lines and not lines[0]:
942 955 del lines[0]
943 956 if not lines and use_dirstate:
944 957 raise util.Abort(_("empty commit message"))
945 958 text = '\n'.join(lines)
946 959
947 960 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
948 961 user, wctx.date(), extra)
949 962 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
950 963 parent2=xp2)
951 964 tr.close()
952 965
953 966 if self.branchcache:
954 967 self.branchtags()
955 968
956 969 if use_dirstate or update_dirstate:
957 970 self.dirstate.setparents(n)
958 971 if use_dirstate:
959 972 for f in removed:
960 973 self.dirstate.forget(f)
961 974 valid = 1 # our dirstate updates are complete
962 975
963 976 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
964 977 return n
965 978 finally:
966 979 if not valid: # don't save our updated dirstate
967 980 self.dirstate.invalidate()
968 981 del tr
969 982
970 983 def walk(self, match, node=None):
971 984 '''
972 985 walk recursively through the directory tree or a given
973 986 changeset, finding all files matched by the match
974 987 function
975 988 '''
976 989 return self[node].walk(match)
977 990
978 991 def status(self, node1='.', node2=None, match=None,
979 992 ignored=False, clean=False, unknown=False):
980 993 """return status of files between two nodes or node and working directory
981 994
982 995 If node1 is None, use the first dirstate parent instead.
983 996 If node2 is None, compare node1 with working directory.
984 997 """
985 998
986 999 def mfmatches(ctx):
987 1000 mf = ctx.manifest().copy()
988 1001 for fn in mf.keys():
989 1002 if not match(fn):
990 1003 del mf[fn]
991 1004 return mf
992 1005
993 1006 if isinstance(node1, context.changectx):
994 1007 ctx1 = node1
995 1008 else:
996 1009 ctx1 = self[node1]
997 1010 if isinstance(node2, context.changectx):
998 1011 ctx2 = node2
999 1012 else:
1000 1013 ctx2 = self[node2]
1001 1014
1002 1015 working = ctx2.rev() is None
1003 1016 parentworking = working and ctx1 == self['.']
1004 1017 match = match or match_.always(self.root, self.getcwd())
1005 1018 listignored, listclean, listunknown = ignored, clean, unknown
1006 1019
1007 1020 # load earliest manifest first for caching reasons
1008 1021 if not working and ctx2.rev() < ctx1.rev():
1009 1022 ctx2.manifest()
1010 1023
1011 1024 if not parentworking:
1012 1025 def bad(f, msg):
1013 1026 if f not in ctx1:
1014 1027 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1015 1028 return False
1016 1029 match.bad = bad
1017 1030
1018 1031 if working: # we need to scan the working dir
1019 1032 s = self.dirstate.status(match, listignored, listclean, listunknown)
1020 1033 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1021 1034
1022 1035 # check for any possibly clean files
1023 1036 if parentworking and cmp:
1024 1037 fixup = []
1025 1038 # do a full compare of any files that might have changed
1026 1039 for f in cmp:
1027 1040 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1028 1041 or ctx1[f].cmp(ctx2[f].data())):
1029 1042 modified.append(f)
1030 1043 else:
1031 1044 fixup.append(f)
1032 1045
1033 1046 if listclean:
1034 1047 clean += fixup
1035 1048
1036 1049 # update dirstate for files that are actually clean
1037 1050 if fixup:
1038 1051 wlock = None
1039 1052 try:
1040 1053 try:
1041 1054 wlock = self.wlock(False)
1042 1055 for f in fixup:
1043 1056 self.dirstate.normal(f)
1044 1057 except lock.LockError:
1045 1058 pass
1046 1059 finally:
1047 1060 del wlock
1048 1061
1049 1062 if not parentworking:
1050 1063 mf1 = mfmatches(ctx1)
1051 1064 if working:
1052 1065 # we are comparing working dir against non-parent
1053 1066 # generate a pseudo-manifest for the working dir
1054 1067 mf2 = mfmatches(self['.'])
1055 1068 for f in cmp + modified + added:
1056 1069 mf2[f] = None
1057 1070 mf2.set(f, ctx2.flags(f))
1058 1071 for f in removed:
1059 1072 if f in mf2:
1060 1073 del mf2[f]
1061 1074 else:
1062 1075 # we are comparing two revisions
1063 1076 deleted, unknown, ignored = [], [], []
1064 1077 mf2 = mfmatches(ctx2)
1065 1078
1066 1079 modified, added, clean = [], [], []
1067 1080 for fn in mf2:
1068 1081 if fn in mf1:
1069 1082 if (mf1.flags(fn) != mf2.flags(fn) or
1070 1083 (mf1[fn] != mf2[fn] and
1071 1084 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1072 1085 modified.append(fn)
1073 1086 elif listclean:
1074 1087 clean.append(fn)
1075 1088 del mf1[fn]
1076 1089 else:
1077 1090 added.append(fn)
1078 1091 removed = mf1.keys()
1079 1092
1080 1093 r = modified, added, removed, deleted, unknown, ignored, clean
1081 1094 [l.sort() for l in r]
1082 1095 return r
1083 1096
1084 1097 def add(self, list):
1085 1098 wlock = self.wlock()
1086 1099 try:
1087 1100 rejected = []
1088 1101 for f in list:
1089 1102 p = self.wjoin(f)
1090 1103 try:
1091 1104 st = os.lstat(p)
1092 1105 except:
1093 1106 self.ui.warn(_("%s does not exist!\n") % f)
1094 1107 rejected.append(f)
1095 1108 continue
1096 1109 if st.st_size > 10000000:
1097 1110 self.ui.warn(_("%s: files over 10MB may cause memory and"
1098 1111 " performance problems\n"
1099 1112 "(use 'hg revert %s' to unadd the file)\n")
1100 1113 % (f, f))
1101 1114 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1102 1115 self.ui.warn(_("%s not added: only files and symlinks "
1103 1116 "supported currently\n") % f)
1104 1117 rejected.append(p)
1105 1118 elif self.dirstate[f] in 'amn':
1106 1119 self.ui.warn(_("%s already tracked!\n") % f)
1107 1120 elif self.dirstate[f] == 'r':
1108 1121 self.dirstate.normallookup(f)
1109 1122 else:
1110 1123 self.dirstate.add(f)
1111 1124 return rejected
1112 1125 finally:
1113 1126 del wlock
1114 1127
1115 1128 def forget(self, list):
1116 1129 wlock = self.wlock()
1117 1130 try:
1118 1131 for f in list:
1119 1132 if self.dirstate[f] != 'a':
1120 1133 self.ui.warn(_("%s not added!\n") % f)
1121 1134 else:
1122 1135 self.dirstate.forget(f)
1123 1136 finally:
1124 1137 del wlock
1125 1138
1126 1139 def remove(self, list, unlink=False):
1127 1140 wlock = None
1128 1141 try:
1129 1142 if unlink:
1130 1143 for f in list:
1131 1144 try:
1132 1145 util.unlink(self.wjoin(f))
1133 1146 except OSError, inst:
1134 1147 if inst.errno != errno.ENOENT:
1135 1148 raise
1136 1149 wlock = self.wlock()
1137 1150 for f in list:
1138 1151 if unlink and os.path.exists(self.wjoin(f)):
1139 1152 self.ui.warn(_("%s still exists!\n") % f)
1140 1153 elif self.dirstate[f] == 'a':
1141 1154 self.dirstate.forget(f)
1142 1155 elif f not in self.dirstate:
1143 1156 self.ui.warn(_("%s not tracked!\n") % f)
1144 1157 else:
1145 1158 self.dirstate.remove(f)
1146 1159 finally:
1147 1160 del wlock
1148 1161
1149 1162 def undelete(self, list):
1150 1163 wlock = None
1151 1164 try:
1152 1165 manifests = [self.manifest.read(self.changelog.read(p)[0])
1153 1166 for p in self.dirstate.parents() if p != nullid]
1154 1167 wlock = self.wlock()
1155 1168 for f in list:
1156 1169 if self.dirstate[f] != 'r':
1157 1170 self.ui.warn(_("%s not removed!\n") % f)
1158 1171 else:
1159 1172 m = f in manifests[0] and manifests[0] or manifests[1]
1160 1173 t = self.file(f).read(m[f])
1161 1174 self.wwrite(f, t, m.flags(f))
1162 1175 self.dirstate.normal(f)
1163 1176 finally:
1164 1177 del wlock
1165 1178
1166 1179 def copy(self, source, dest):
1167 1180 wlock = None
1168 1181 try:
1169 1182 p = self.wjoin(dest)
1170 1183 if not (os.path.exists(p) or os.path.islink(p)):
1171 1184 self.ui.warn(_("%s does not exist!\n") % dest)
1172 1185 elif not (os.path.isfile(p) or os.path.islink(p)):
1173 1186 self.ui.warn(_("copy failed: %s is not a file or a "
1174 1187 "symbolic link\n") % dest)
1175 1188 else:
1176 1189 wlock = self.wlock()
1177 1190 if self.dirstate[dest] in '?r':
1178 1191 self.dirstate.add(dest)
1179 1192 self.dirstate.copy(source, dest)
1180 1193 finally:
1181 1194 del wlock
1182 1195
1183 def heads(self, start=None):
1196 def heads(self, start=None, closed=True):
1184 1197 heads = self.changelog.heads(start)
1198 def display(head):
1199 if closed:
1200 return True
1201 extras = self.changelog.read(head)[5]
1202 return ('close' not in extras)
1185 1203 # sort the output in rev descending order
1186 heads = [(-self.changelog.rev(h), h) for h in heads]
1204 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1187 1205 return [n for (r, n) in util.sort(heads)]
1188 1206
1189 def branchheads(self, branch=None, start=None):
1207 def branchheads(self, branch=None, start=None, closed=True):
1190 1208 if branch is None:
1191 1209 branch = self[None].branch()
1192 1210 branches = self._branchheads()
1193 1211 if branch not in branches:
1194 1212 return []
1195 1213 bheads = branches[branch]
1196 1214 # the cache returns heads ordered lowest to highest
1197 1215 bheads.reverse()
1198 1216 if start is not None:
1199 1217 # filter out the heads that cannot be reached from startrev
1200 1218 bheads = self.changelog.nodesbetween([start], bheads)[2]
1219 if not closed:
1220 bheads = [h for h in bheads if
1221 ('close' not in self.changelog.read(h)[5])]
1201 1222 return bheads
1202 1223
1203 1224 def branches(self, nodes):
1204 1225 if not nodes:
1205 1226 nodes = [self.changelog.tip()]
1206 1227 b = []
1207 1228 for n in nodes:
1208 1229 t = n
1209 1230 while 1:
1210 1231 p = self.changelog.parents(n)
1211 1232 if p[1] != nullid or p[0] == nullid:
1212 1233 b.append((t, n, p[0], p[1]))
1213 1234 break
1214 1235 n = p[0]
1215 1236 return b
1216 1237
1217 1238 def between(self, pairs):
1218 1239 r = []
1219 1240
1220 1241 for top, bottom in pairs:
1221 1242 n, l, i = top, [], 0
1222 1243 f = 1
1223 1244
1224 1245 while n != bottom:
1225 1246 p = self.changelog.parents(n)[0]
1226 1247 if i == f:
1227 1248 l.append(n)
1228 1249 f = f * 2
1229 1250 n = p
1230 1251 i += 1
1231 1252
1232 1253 r.append(l)
1233 1254
1234 1255 return r
1235 1256
1236 1257 def findincoming(self, remote, base=None, heads=None, force=False):
1237 1258 """Return list of roots of the subsets of missing nodes from remote
1238 1259
1239 1260 If base dict is specified, assume that these nodes and their parents
1240 1261 exist on the remote side and that no child of a node of base exists
1241 1262 in both remote and self.
1242 1263 Furthermore base will be updated to include the nodes that exists
1243 1264 in self and remote but no children exists in self and remote.
1244 1265 If a list of heads is specified, return only nodes which are heads
1245 1266 or ancestors of these heads.
1246 1267
1247 1268 All the ancestors of base are in self and in remote.
1248 1269 All the descendants of the list returned are missing in self.
1249 1270 (and so we know that the rest of the nodes are missing in remote, see
1250 1271 outgoing)
1251 1272 """
1252 1273 return self.findcommonincoming(remote, base, heads, force)[1]
1253 1274
1254 1275 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1255 1276 """Return a tuple (common, missing roots, heads) used to identify
1256 1277 missing nodes from remote.
1257 1278
1258 1279 If base dict is specified, assume that these nodes and their parents
1259 1280 exist on the remote side and that no child of a node of base exists
1260 1281 in both remote and self.
1261 1282 Furthermore base will be updated to include the nodes that exists
1262 1283 in self and remote but no children exists in self and remote.
1263 1284 If a list of heads is specified, return only nodes which are heads
1264 1285 or ancestors of these heads.
1265 1286
1266 1287 All the ancestors of base are in self and in remote.
1267 1288 """
1268 1289 m = self.changelog.nodemap
1269 1290 search = []
1270 1291 fetch = {}
1271 1292 seen = {}
1272 1293 seenbranch = {}
1273 1294 if base == None:
1274 1295 base = {}
1275 1296
1276 1297 if not heads:
1277 1298 heads = remote.heads()
1278 1299
1279 1300 if self.changelog.tip() == nullid:
1280 1301 base[nullid] = 1
1281 1302 if heads != [nullid]:
1282 1303 return [nullid], [nullid], list(heads)
1283 1304 return [nullid], [], []
1284 1305
1285 1306 # assume we're closer to the tip than the root
1286 1307 # and start by examining the heads
1287 1308 self.ui.status(_("searching for changes\n"))
1288 1309
1289 1310 unknown = []
1290 1311 for h in heads:
1291 1312 if h not in m:
1292 1313 unknown.append(h)
1293 1314 else:
1294 1315 base[h] = 1
1295 1316
1296 1317 heads = unknown
1297 1318 if not unknown:
1298 1319 return base.keys(), [], []
1299 1320
1300 1321 req = dict.fromkeys(unknown)
1301 1322 reqcnt = 0
1302 1323
1303 1324 # search through remote branches
1304 1325 # a 'branch' here is a linear segment of history, with four parts:
1305 1326 # head, root, first parent, second parent
1306 1327 # (a branch always has two parents (or none) by definition)
1307 1328 unknown = remote.branches(unknown)
1308 1329 while unknown:
1309 1330 r = []
1310 1331 while unknown:
1311 1332 n = unknown.pop(0)
1312 1333 if n[0] in seen:
1313 1334 continue
1314 1335
1315 1336 self.ui.debug(_("examining %s:%s\n")
1316 1337 % (short(n[0]), short(n[1])))
1317 1338 if n[0] == nullid: # found the end of the branch
1318 1339 pass
1319 1340 elif n in seenbranch:
1320 1341 self.ui.debug(_("branch already found\n"))
1321 1342 continue
1322 1343 elif n[1] and n[1] in m: # do we know the base?
1323 1344 self.ui.debug(_("found incomplete branch %s:%s\n")
1324 1345 % (short(n[0]), short(n[1])))
1325 1346 search.append(n[0:2]) # schedule branch range for scanning
1326 1347 seenbranch[n] = 1
1327 1348 else:
1328 1349 if n[1] not in seen and n[1] not in fetch:
1329 1350 if n[2] in m and n[3] in m:
1330 1351 self.ui.debug(_("found new changeset %s\n") %
1331 1352 short(n[1]))
1332 1353 fetch[n[1]] = 1 # earliest unknown
1333 1354 for p in n[2:4]:
1334 1355 if p in m:
1335 1356 base[p] = 1 # latest known
1336 1357
1337 1358 for p in n[2:4]:
1338 1359 if p not in req and p not in m:
1339 1360 r.append(p)
1340 1361 req[p] = 1
1341 1362 seen[n[0]] = 1
1342 1363
1343 1364 if r:
1344 1365 reqcnt += 1
1345 1366 self.ui.debug(_("request %d: %s\n") %
1346 1367 (reqcnt, " ".join(map(short, r))))
1347 1368 for p in xrange(0, len(r), 10):
1348 1369 for b in remote.branches(r[p:p+10]):
1349 1370 self.ui.debug(_("received %s:%s\n") %
1350 1371 (short(b[0]), short(b[1])))
1351 1372 unknown.append(b)
1352 1373
1353 1374 # do binary search on the branches we found
1354 1375 while search:
1355 1376 newsearch = []
1356 1377 reqcnt += 1
1357 1378 for n, l in zip(search, remote.between(search)):
1358 1379 l.append(n[1])
1359 1380 p = n[0]
1360 1381 f = 1
1361 1382 for i in l:
1362 1383 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1363 1384 if i in m:
1364 1385 if f <= 2:
1365 1386 self.ui.debug(_("found new branch changeset %s\n") %
1366 1387 short(p))
1367 1388 fetch[p] = 1
1368 1389 base[i] = 1
1369 1390 else:
1370 1391 self.ui.debug(_("narrowed branch search to %s:%s\n")
1371 1392 % (short(p), short(i)))
1372 1393 newsearch.append((p, i))
1373 1394 break
1374 1395 p, f = i, f * 2
1375 1396 search = newsearch
1376 1397
1377 1398 # sanity check our fetch list
1378 1399 for f in fetch.keys():
1379 1400 if f in m:
1380 1401 raise error.RepoError(_("already have changeset ")
1381 1402 + short(f[:4]))
1382 1403
1383 1404 if base.keys() == [nullid]:
1384 1405 if force:
1385 1406 self.ui.warn(_("warning: repository is unrelated\n"))
1386 1407 else:
1387 1408 raise util.Abort(_("repository is unrelated"))
1388 1409
1389 1410 self.ui.debug(_("found new changesets starting at ") +
1390 1411 " ".join([short(f) for f in fetch]) + "\n")
1391 1412
1392 1413 self.ui.debug(_("%d total queries\n") % reqcnt)
1393 1414
1394 1415 return base.keys(), fetch.keys(), heads
1395 1416
1396 1417 def findoutgoing(self, remote, base=None, heads=None, force=False):
1397 1418 """Return list of nodes that are roots of subsets not in remote
1398 1419
1399 1420 If base dict is specified, assume that these nodes and their parents
1400 1421 exist on the remote side.
1401 1422 If a list of heads is specified, return only nodes which are heads
1402 1423 or ancestors of these heads, and return a second element which
1403 1424 contains all remote heads which get new children.
1404 1425 """
1405 1426 if base == None:
1406 1427 base = {}
1407 1428 self.findincoming(remote, base, heads, force=force)
1408 1429
1409 1430 self.ui.debug(_("common changesets up to ")
1410 1431 + " ".join(map(short, base.keys())) + "\n")
1411 1432
1412 1433 remain = dict.fromkeys(self.changelog.nodemap)
1413 1434
1414 1435 # prune everything remote has from the tree
1415 1436 del remain[nullid]
1416 1437 remove = base.keys()
1417 1438 while remove:
1418 1439 n = remove.pop(0)
1419 1440 if n in remain:
1420 1441 del remain[n]
1421 1442 for p in self.changelog.parents(n):
1422 1443 remove.append(p)
1423 1444
1424 1445 # find every node whose parents have been pruned
1425 1446 subset = []
1426 1447 # find every remote head that will get new children
1427 1448 updated_heads = {}
1428 1449 for n in remain:
1429 1450 p1, p2 = self.changelog.parents(n)
1430 1451 if p1 not in remain and p2 not in remain:
1431 1452 subset.append(n)
1432 1453 if heads:
1433 1454 if p1 in heads:
1434 1455 updated_heads[p1] = True
1435 1456 if p2 in heads:
1436 1457 updated_heads[p2] = True
1437 1458
1438 1459 # this is the set of all roots we have to push
1439 1460 if heads:
1440 1461 return subset, updated_heads.keys()
1441 1462 else:
1442 1463 return subset
1443 1464
1444 1465 def pull(self, remote, heads=None, force=False):
1445 1466 lock = self.lock()
1446 1467 try:
1447 1468 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1448 1469 force=force)
1449 1470 if fetch == [nullid]:
1450 1471 self.ui.status(_("requesting all changes\n"))
1451 1472
1452 1473 if not fetch:
1453 1474 self.ui.status(_("no changes found\n"))
1454 1475 return 0
1455 1476
1456 1477 if heads is None and remote.capable('changegroupsubset'):
1457 1478 heads = rheads
1458 1479
1459 1480 if heads is None:
1460 1481 cg = remote.changegroup(fetch, 'pull')
1461 1482 else:
1462 1483 if not remote.capable('changegroupsubset'):
1463 1484 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1464 1485 cg = remote.changegroupsubset(fetch, heads, 'pull')
1465 1486 return self.addchangegroup(cg, 'pull', remote.url())
1466 1487 finally:
1467 1488 del lock
1468 1489
1469 1490 def push(self, remote, force=False, revs=None):
1470 1491 # there are two ways to push to remote repo:
1471 1492 #
1472 1493 # addchangegroup assumes local user can lock remote
1473 1494 # repo (local filesystem, old ssh servers).
1474 1495 #
1475 1496 # unbundle assumes local user cannot lock remote repo (new ssh
1476 1497 # servers, http servers).
1477 1498
1478 1499 if remote.capable('unbundle'):
1479 1500 return self.push_unbundle(remote, force, revs)
1480 1501 return self.push_addchangegroup(remote, force, revs)
1481 1502
1482 1503 def prepush(self, remote, force, revs):
1483 1504 common = {}
1484 1505 remote_heads = remote.heads()
1485 1506 inc = self.findincoming(remote, common, remote_heads, force=force)
1486 1507
1487 1508 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1488 1509 if revs is not None:
1489 1510 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1490 1511 else:
1491 1512 bases, heads = update, self.changelog.heads()
1492 1513
1493 1514 if not bases:
1494 1515 self.ui.status(_("no changes found\n"))
1495 1516 return None, 1
1496 1517 elif not force:
1497 1518 # check if we're creating new remote heads
1498 1519 # to be a remote head after push, node must be either
1499 1520 # - unknown locally
1500 1521 # - a local outgoing head descended from update
1501 1522 # - a remote head that's known locally and not
1502 1523 # ancestral to an outgoing head
1503 1524
1504 1525 warn = 0
1505 1526
1506 1527 if remote_heads == [nullid]:
1507 1528 warn = 0
1508 1529 elif not revs and len(heads) > len(remote_heads):
1509 1530 warn = 1
1510 1531 else:
1511 1532 newheads = list(heads)
1512 1533 for r in remote_heads:
1513 1534 if r in self.changelog.nodemap:
1514 1535 desc = self.changelog.heads(r, heads)
1515 1536 l = [h for h in heads if h in desc]
1516 1537 if not l:
1517 1538 newheads.append(r)
1518 1539 else:
1519 1540 newheads.append(r)
1520 1541 if len(newheads) > len(remote_heads):
1521 1542 warn = 1
1522 1543
1523 1544 if warn:
1524 1545 self.ui.warn(_("abort: push creates new remote heads!\n"))
1525 1546 self.ui.status(_("(did you forget to merge?"
1526 1547 " use push -f to force)\n"))
1527 1548 return None, 0
1528 1549 elif inc:
1529 1550 self.ui.warn(_("note: unsynced remote changes!\n"))
1530 1551
1531 1552
1532 1553 if revs is None:
1533 1554 # use the fast path, no race possible on push
1534 1555 cg = self._changegroup(common.keys(), 'push')
1535 1556 else:
1536 1557 cg = self.changegroupsubset(update, revs, 'push')
1537 1558 return cg, remote_heads
1538 1559
1539 1560 def push_addchangegroup(self, remote, force, revs):
1540 1561 lock = remote.lock()
1541 1562 try:
1542 1563 ret = self.prepush(remote, force, revs)
1543 1564 if ret[0] is not None:
1544 1565 cg, remote_heads = ret
1545 1566 return remote.addchangegroup(cg, 'push', self.url())
1546 1567 return ret[1]
1547 1568 finally:
1548 1569 del lock
1549 1570
1550 1571 def push_unbundle(self, remote, force, revs):
1551 1572 # local repo finds heads on server, finds out what revs it
1552 1573 # must push. once revs transferred, if server finds it has
1553 1574 # different heads (someone else won commit/push race), server
1554 1575 # aborts.
1555 1576
1556 1577 ret = self.prepush(remote, force, revs)
1557 1578 if ret[0] is not None:
1558 1579 cg, remote_heads = ret
1559 1580 if force: remote_heads = ['force']
1560 1581 return remote.unbundle(cg, remote_heads, 'push')
1561 1582 return ret[1]
1562 1583
1563 1584 def changegroupinfo(self, nodes, source):
1564 1585 if self.ui.verbose or source == 'bundle':
1565 1586 self.ui.status(_("%d changesets found\n") % len(nodes))
1566 1587 if self.ui.debugflag:
1567 1588 self.ui.debug(_("list of changesets:\n"))
1568 1589 for node in nodes:
1569 1590 self.ui.debug("%s\n" % hex(node))
1570 1591
1571 1592 def changegroupsubset(self, bases, heads, source, extranodes=None):
1572 1593 """This function generates a changegroup consisting of all the nodes
1573 1594 that are descendents of any of the bases, and ancestors of any of
1574 1595 the heads.
1575 1596
1576 1597 It is fairly complex as determining which filenodes and which
1577 1598 manifest nodes need to be included for the changeset to be complete
1578 1599 is non-trivial.
1579 1600
1580 1601 Another wrinkle is doing the reverse, figuring out which changeset in
1581 1602 the changegroup a particular filenode or manifestnode belongs to.
1582 1603
1583 1604 The caller can specify some nodes that must be included in the
1584 1605 changegroup using the extranodes argument. It should be a dict
1585 1606 where the keys are the filenames (or 1 for the manifest), and the
1586 1607 values are lists of (node, linknode) tuples, where node is a wanted
1587 1608 node and linknode is the changelog node that should be transmitted as
1588 1609 the linkrev.
1589 1610 """
1590 1611
1591 1612 if extranodes is None:
1592 1613 # can we go through the fast path ?
1593 1614 heads.sort()
1594 1615 allheads = self.heads()
1595 1616 allheads.sort()
1596 1617 if heads == allheads:
1597 1618 common = []
1598 1619 # parents of bases are known from both sides
1599 1620 for n in bases:
1600 1621 for p in self.changelog.parents(n):
1601 1622 if p != nullid:
1602 1623 common.append(p)
1603 1624 return self._changegroup(common, source)
1604 1625
1605 1626 self.hook('preoutgoing', throw=True, source=source)
1606 1627
1607 1628 # Set up some initial variables
1608 1629 # Make it easy to refer to self.changelog
1609 1630 cl = self.changelog
1610 1631 # msng is short for missing - compute the list of changesets in this
1611 1632 # changegroup.
1612 1633 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1613 1634 self.changegroupinfo(msng_cl_lst, source)
1614 1635 # Some bases may turn out to be superfluous, and some heads may be
1615 1636 # too. nodesbetween will return the minimal set of bases and heads
1616 1637 # necessary to re-create the changegroup.
1617 1638
1618 1639 # Known heads are the list of heads that it is assumed the recipient
1619 1640 # of this changegroup will know about.
1620 1641 knownheads = {}
1621 1642 # We assume that all parents of bases are known heads.
1622 1643 for n in bases:
1623 1644 for p in cl.parents(n):
1624 1645 if p != nullid:
1625 1646 knownheads[p] = 1
1626 1647 knownheads = knownheads.keys()
1627 1648 if knownheads:
1628 1649 # Now that we know what heads are known, we can compute which
1629 1650 # changesets are known. The recipient must know about all
1630 1651 # changesets required to reach the known heads from the null
1631 1652 # changeset.
1632 1653 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1633 1654 junk = None
1634 1655 # Transform the list into an ersatz set.
1635 1656 has_cl_set = dict.fromkeys(has_cl_set)
1636 1657 else:
1637 1658 # If there were no known heads, the recipient cannot be assumed to
1638 1659 # know about any changesets.
1639 1660 has_cl_set = {}
1640 1661
1641 1662 # Make it easy to refer to self.manifest
1642 1663 mnfst = self.manifest
1643 1664 # We don't know which manifests are missing yet
1644 1665 msng_mnfst_set = {}
1645 1666 # Nor do we know which filenodes are missing.
1646 1667 msng_filenode_set = {}
1647 1668
1648 1669 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1649 1670 junk = None
1650 1671
1651 1672 # A changeset always belongs to itself, so the changenode lookup
1652 1673 # function for a changenode is identity.
1653 1674 def identity(x):
1654 1675 return x
1655 1676
1656 1677 # A function generating function. Sets up an environment for the
1657 1678 # inner function.
1658 1679 def cmp_by_rev_func(revlog):
1659 1680 # Compare two nodes by their revision number in the environment's
1660 1681 # revision history. Since the revision number both represents the
1661 1682 # most efficient order to read the nodes in, and represents a
1662 1683 # topological sorting of the nodes, this function is often useful.
1663 1684 def cmp_by_rev(a, b):
1664 1685 return cmp(revlog.rev(a), revlog.rev(b))
1665 1686 return cmp_by_rev
1666 1687
1667 1688 # If we determine that a particular file or manifest node must be a
1668 1689 # node that the recipient of the changegroup will already have, we can
1669 1690 # also assume the recipient will have all the parents. This function
1670 1691 # prunes them from the set of missing nodes.
1671 1692 def prune_parents(revlog, hasset, msngset):
1672 1693 haslst = hasset.keys()
1673 1694 haslst.sort(cmp_by_rev_func(revlog))
1674 1695 for node in haslst:
1675 1696 parentlst = [p for p in revlog.parents(node) if p != nullid]
1676 1697 while parentlst:
1677 1698 n = parentlst.pop()
1678 1699 if n not in hasset:
1679 1700 hasset[n] = 1
1680 1701 p = [p for p in revlog.parents(n) if p != nullid]
1681 1702 parentlst.extend(p)
1682 1703 for n in hasset:
1683 1704 msngset.pop(n, None)
1684 1705
1685 1706 # This is a function generating function used to set up an environment
1686 1707 # for the inner function to execute in.
1687 1708 def manifest_and_file_collector(changedfileset):
1688 1709 # This is an information gathering function that gathers
1689 1710 # information from each changeset node that goes out as part of
1690 1711 # the changegroup. The information gathered is a list of which
1691 1712 # manifest nodes are potentially required (the recipient may
1692 1713 # already have them) and total list of all files which were
1693 1714 # changed in any changeset in the changegroup.
1694 1715 #
1695 1716 # We also remember the first changenode we saw any manifest
1696 1717 # referenced by so we can later determine which changenode 'owns'
1697 1718 # the manifest.
1698 1719 def collect_manifests_and_files(clnode):
1699 1720 c = cl.read(clnode)
1700 1721 for f in c[3]:
1701 1722 # This is to make sure we only have one instance of each
1702 1723 # filename string for each filename.
1703 1724 changedfileset.setdefault(f, f)
1704 1725 msng_mnfst_set.setdefault(c[0], clnode)
1705 1726 return collect_manifests_and_files
1706 1727
1707 1728 # Figure out which manifest nodes (of the ones we think might be part
1708 1729 # of the changegroup) the recipient must know about and remove them
1709 1730 # from the changegroup.
1710 1731 def prune_manifests():
1711 1732 has_mnfst_set = {}
1712 1733 for n in msng_mnfst_set:
1713 1734 # If a 'missing' manifest thinks it belongs to a changenode
1714 1735 # the recipient is assumed to have, obviously the recipient
1715 1736 # must have that manifest.
1716 1737 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1717 1738 if linknode in has_cl_set:
1718 1739 has_mnfst_set[n] = 1
1719 1740 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1720 1741
1721 1742 # Use the information collected in collect_manifests_and_files to say
1722 1743 # which changenode any manifestnode belongs to.
1723 1744 def lookup_manifest_link(mnfstnode):
1724 1745 return msng_mnfst_set[mnfstnode]
1725 1746
1726 1747 # A function generating function that sets up the initial environment
1727 1748 # the inner function.
1728 1749 def filenode_collector(changedfiles):
1729 1750 next_rev = [0]
1730 1751 # This gathers information from each manifestnode included in the
1731 1752 # changegroup about which filenodes the manifest node references
1732 1753 # so we can include those in the changegroup too.
1733 1754 #
1734 1755 # It also remembers which changenode each filenode belongs to. It
1735 1756 # does this by assuming the a filenode belongs to the changenode
1736 1757 # the first manifest that references it belongs to.
1737 1758 def collect_msng_filenodes(mnfstnode):
1738 1759 r = mnfst.rev(mnfstnode)
1739 1760 if r == next_rev[0]:
1740 1761 # If the last rev we looked at was the one just previous,
1741 1762 # we only need to see a diff.
1742 1763 deltamf = mnfst.readdelta(mnfstnode)
1743 1764 # For each line in the delta
1744 1765 for f, fnode in deltamf.iteritems():
1745 1766 f = changedfiles.get(f, None)
1746 1767 # And if the file is in the list of files we care
1747 1768 # about.
1748 1769 if f is not None:
1749 1770 # Get the changenode this manifest belongs to
1750 1771 clnode = msng_mnfst_set[mnfstnode]
1751 1772 # Create the set of filenodes for the file if
1752 1773 # there isn't one already.
1753 1774 ndset = msng_filenode_set.setdefault(f, {})
1754 1775 # And set the filenode's changelog node to the
1755 1776 # manifest's if it hasn't been set already.
1756 1777 ndset.setdefault(fnode, clnode)
1757 1778 else:
1758 1779 # Otherwise we need a full manifest.
1759 1780 m = mnfst.read(mnfstnode)
1760 1781 # For every file in we care about.
1761 1782 for f in changedfiles:
1762 1783 fnode = m.get(f, None)
1763 1784 # If it's in the manifest
1764 1785 if fnode is not None:
1765 1786 # See comments above.
1766 1787 clnode = msng_mnfst_set[mnfstnode]
1767 1788 ndset = msng_filenode_set.setdefault(f, {})
1768 1789 ndset.setdefault(fnode, clnode)
1769 1790 # Remember the revision we hope to see next.
1770 1791 next_rev[0] = r + 1
1771 1792 return collect_msng_filenodes
1772 1793
1773 1794 # We have a list of filenodes we think we need for a file, lets remove
1774 1795 # all those we now the recipient must have.
1775 1796 def prune_filenodes(f, filerevlog):
1776 1797 msngset = msng_filenode_set[f]
1777 1798 hasset = {}
1778 1799 # If a 'missing' filenode thinks it belongs to a changenode we
1779 1800 # assume the recipient must have, then the recipient must have
1780 1801 # that filenode.
1781 1802 for n in msngset:
1782 1803 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1783 1804 if clnode in has_cl_set:
1784 1805 hasset[n] = 1
1785 1806 prune_parents(filerevlog, hasset, msngset)
1786 1807
1787 1808 # A function generator function that sets up the a context for the
1788 1809 # inner function.
1789 1810 def lookup_filenode_link_func(fname):
1790 1811 msngset = msng_filenode_set[fname]
1791 1812 # Lookup the changenode the filenode belongs to.
1792 1813 def lookup_filenode_link(fnode):
1793 1814 return msngset[fnode]
1794 1815 return lookup_filenode_link
1795 1816
1796 1817 # Add the nodes that were explicitly requested.
1797 1818 def add_extra_nodes(name, nodes):
1798 1819 if not extranodes or name not in extranodes:
1799 1820 return
1800 1821
1801 1822 for node, linknode in extranodes[name]:
1802 1823 if node not in nodes:
1803 1824 nodes[node] = linknode
1804 1825
1805 1826 # Now that we have all theses utility functions to help out and
1806 1827 # logically divide up the task, generate the group.
1807 1828 def gengroup():
1808 1829 # The set of changed files starts empty.
1809 1830 changedfiles = {}
1810 1831 # Create a changenode group generator that will call our functions
1811 1832 # back to lookup the owning changenode and collect information.
1812 1833 group = cl.group(msng_cl_lst, identity,
1813 1834 manifest_and_file_collector(changedfiles))
1814 1835 for chnk in group:
1815 1836 yield chnk
1816 1837
1817 1838 # The list of manifests has been collected by the generator
1818 1839 # calling our functions back.
1819 1840 prune_manifests()
1820 1841 add_extra_nodes(1, msng_mnfst_set)
1821 1842 msng_mnfst_lst = msng_mnfst_set.keys()
1822 1843 # Sort the manifestnodes by revision number.
1823 1844 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1824 1845 # Create a generator for the manifestnodes that calls our lookup
1825 1846 # and data collection functions back.
1826 1847 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1827 1848 filenode_collector(changedfiles))
1828 1849 for chnk in group:
1829 1850 yield chnk
1830 1851
1831 1852 # These are no longer needed, dereference and toss the memory for
1832 1853 # them.
1833 1854 msng_mnfst_lst = None
1834 1855 msng_mnfst_set.clear()
1835 1856
1836 1857 if extranodes:
1837 1858 for fname in extranodes:
1838 1859 if isinstance(fname, int):
1839 1860 continue
1840 1861 msng_filenode_set.setdefault(fname, {})
1841 1862 changedfiles[fname] = 1
1842 1863 # Go through all our files in order sorted by name.
1843 1864 for fname in util.sort(changedfiles):
1844 1865 filerevlog = self.file(fname)
1845 1866 if not len(filerevlog):
1846 1867 raise util.Abort(_("empty or missing revlog for %s") % fname)
1847 1868 # Toss out the filenodes that the recipient isn't really
1848 1869 # missing.
1849 1870 if fname in msng_filenode_set:
1850 1871 prune_filenodes(fname, filerevlog)
1851 1872 add_extra_nodes(fname, msng_filenode_set[fname])
1852 1873 msng_filenode_lst = msng_filenode_set[fname].keys()
1853 1874 else:
1854 1875 msng_filenode_lst = []
1855 1876 # If any filenodes are left, generate the group for them,
1856 1877 # otherwise don't bother.
1857 1878 if len(msng_filenode_lst) > 0:
1858 1879 yield changegroup.chunkheader(len(fname))
1859 1880 yield fname
1860 1881 # Sort the filenodes by their revision #
1861 1882 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1862 1883 # Create a group generator and only pass in a changenode
1863 1884 # lookup function as we need to collect no information
1864 1885 # from filenodes.
1865 1886 group = filerevlog.group(msng_filenode_lst,
1866 1887 lookup_filenode_link_func(fname))
1867 1888 for chnk in group:
1868 1889 yield chnk
1869 1890 if fname in msng_filenode_set:
1870 1891 # Don't need this anymore, toss it to free memory.
1871 1892 del msng_filenode_set[fname]
1872 1893 # Signal that no more groups are left.
1873 1894 yield changegroup.closechunk()
1874 1895
1875 1896 if msng_cl_lst:
1876 1897 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1877 1898
1878 1899 return util.chunkbuffer(gengroup())
1879 1900
1880 1901 def changegroup(self, basenodes, source):
1881 1902 # to avoid a race we use changegroupsubset() (issue1320)
1882 1903 return self.changegroupsubset(basenodes, self.heads(), source)
1883 1904
1884 1905 def _changegroup(self, common, source):
1885 1906 """Generate a changegroup of all nodes that we have that a recipient
1886 1907 doesn't.
1887 1908
1888 1909 This is much easier than the previous function as we can assume that
1889 1910 the recipient has any changenode we aren't sending them.
1890 1911
1891 1912 common is the set of common nodes between remote and self"""
1892 1913
1893 1914 self.hook('preoutgoing', throw=True, source=source)
1894 1915
1895 1916 cl = self.changelog
1896 1917 nodes = cl.findmissing(common)
1897 1918 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1898 1919 self.changegroupinfo(nodes, source)
1899 1920
1900 1921 def identity(x):
1901 1922 return x
1902 1923
1903 1924 def gennodelst(log):
1904 1925 for r in log:
1905 1926 if log.linkrev(r) in revset:
1906 1927 yield log.node(r)
1907 1928
1908 1929 def changed_file_collector(changedfileset):
1909 1930 def collect_changed_files(clnode):
1910 1931 c = cl.read(clnode)
1911 1932 for fname in c[3]:
1912 1933 changedfileset[fname] = 1
1913 1934 return collect_changed_files
1914 1935
1915 1936 def lookuprevlink_func(revlog):
1916 1937 def lookuprevlink(n):
1917 1938 return cl.node(revlog.linkrev(revlog.rev(n)))
1918 1939 return lookuprevlink
1919 1940
1920 1941 def gengroup():
1921 1942 # construct a list of all changed files
1922 1943 changedfiles = {}
1923 1944
1924 1945 for chnk in cl.group(nodes, identity,
1925 1946 changed_file_collector(changedfiles)):
1926 1947 yield chnk
1927 1948
1928 1949 mnfst = self.manifest
1929 1950 nodeiter = gennodelst(mnfst)
1930 1951 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1931 1952 yield chnk
1932 1953
1933 1954 for fname in util.sort(changedfiles):
1934 1955 filerevlog = self.file(fname)
1935 1956 if not len(filerevlog):
1936 1957 raise util.Abort(_("empty or missing revlog for %s") % fname)
1937 1958 nodeiter = gennodelst(filerevlog)
1938 1959 nodeiter = list(nodeiter)
1939 1960 if nodeiter:
1940 1961 yield changegroup.chunkheader(len(fname))
1941 1962 yield fname
1942 1963 lookup = lookuprevlink_func(filerevlog)
1943 1964 for chnk in filerevlog.group(nodeiter, lookup):
1944 1965 yield chnk
1945 1966
1946 1967 yield changegroup.closechunk()
1947 1968
1948 1969 if nodes:
1949 1970 self.hook('outgoing', node=hex(nodes[0]), source=source)
1950 1971
1951 1972 return util.chunkbuffer(gengroup())
1952 1973
1953 1974 def addchangegroup(self, source, srctype, url, emptyok=False):
1954 1975 """add changegroup to repo.
1955 1976
1956 1977 return values:
1957 1978 - nothing changed or no source: 0
1958 1979 - more heads than before: 1+added heads (2..n)
1959 1980 - less heads than before: -1-removed heads (-2..-n)
1960 1981 - number of heads stays the same: 1
1961 1982 """
1962 1983 def csmap(x):
1963 1984 self.ui.debug(_("add changeset %s\n") % short(x))
1964 1985 return len(cl)
1965 1986
1966 1987 def revmap(x):
1967 1988 return cl.rev(x)
1968 1989
1969 1990 if not source:
1970 1991 return 0
1971 1992
1972 1993 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1973 1994
1974 1995 changesets = files = revisions = 0
1975 1996
1976 1997 # write changelog data to temp files so concurrent readers will not see
1977 1998 # inconsistent view
1978 1999 cl = self.changelog
1979 2000 cl.delayupdate()
1980 2001 oldheads = len(cl.heads())
1981 2002
1982 2003 tr = self.transaction()
1983 2004 try:
1984 2005 trp = weakref.proxy(tr)
1985 2006 # pull off the changeset group
1986 2007 self.ui.status(_("adding changesets\n"))
1987 2008 cor = len(cl) - 1
1988 2009 chunkiter = changegroup.chunkiter(source)
1989 2010 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1990 2011 raise util.Abort(_("received changelog group is empty"))
1991 2012 cnr = len(cl) - 1
1992 2013 changesets = cnr - cor
1993 2014
1994 2015 # pull off the manifest group
1995 2016 self.ui.status(_("adding manifests\n"))
1996 2017 chunkiter = changegroup.chunkiter(source)
1997 2018 # no need to check for empty manifest group here:
1998 2019 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1999 2020 # no new manifest will be created and the manifest group will
2000 2021 # be empty during the pull
2001 2022 self.manifest.addgroup(chunkiter, revmap, trp)
2002 2023
2003 2024 # process the files
2004 2025 self.ui.status(_("adding file changes\n"))
2005 2026 while 1:
2006 2027 f = changegroup.getchunk(source)
2007 2028 if not f:
2008 2029 break
2009 2030 self.ui.debug(_("adding %s revisions\n") % f)
2010 2031 fl = self.file(f)
2011 2032 o = len(fl)
2012 2033 chunkiter = changegroup.chunkiter(source)
2013 2034 if fl.addgroup(chunkiter, revmap, trp) is None:
2014 2035 raise util.Abort(_("received file revlog group is empty"))
2015 2036 revisions += len(fl) - o
2016 2037 files += 1
2017 2038
2018 2039 # make changelog see real files again
2019 2040 cl.finalize(trp)
2020 2041
2021 2042 newheads = len(self.changelog.heads())
2022 2043 heads = ""
2023 2044 if oldheads and newheads != oldheads:
2024 2045 heads = _(" (%+d heads)") % (newheads - oldheads)
2025 2046
2026 2047 self.ui.status(_("added %d changesets"
2027 2048 " with %d changes to %d files%s\n")
2028 2049 % (changesets, revisions, files, heads))
2029 2050
2030 2051 if changesets > 0:
2031 2052 self.hook('pretxnchangegroup', throw=True,
2032 2053 node=hex(self.changelog.node(cor+1)), source=srctype,
2033 2054 url=url)
2034 2055
2035 2056 tr.close()
2036 2057 finally:
2037 2058 del tr
2038 2059
2039 2060 if changesets > 0:
2040 2061 # forcefully update the on-disk branch cache
2041 2062 self.ui.debug(_("updating the branch cache\n"))
2042 2063 self.branchtags()
2043 2064 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2044 2065 source=srctype, url=url)
2045 2066
2046 2067 for i in xrange(cor + 1, cnr + 1):
2047 2068 self.hook("incoming", node=hex(self.changelog.node(i)),
2048 2069 source=srctype, url=url)
2049 2070
2050 2071 # never return 0 here:
2051 2072 if newheads < oldheads:
2052 2073 return newheads - oldheads - 1
2053 2074 else:
2054 2075 return newheads - oldheads + 1
2055 2076
2056 2077
2057 2078 def stream_in(self, remote):
2058 2079 fp = remote.stream_out()
2059 2080 l = fp.readline()
2060 2081 try:
2061 2082 resp = int(l)
2062 2083 except ValueError:
2063 2084 raise error.ResponseError(
2064 2085 _('Unexpected response from remote server:'), l)
2065 2086 if resp == 1:
2066 2087 raise util.Abort(_('operation forbidden by server'))
2067 2088 elif resp == 2:
2068 2089 raise util.Abort(_('locking the remote repository failed'))
2069 2090 elif resp != 0:
2070 2091 raise util.Abort(_('the server sent an unknown error code'))
2071 2092 self.ui.status(_('streaming all changes\n'))
2072 2093 l = fp.readline()
2073 2094 try:
2074 2095 total_files, total_bytes = map(int, l.split(' ', 1))
2075 2096 except (ValueError, TypeError):
2076 2097 raise error.ResponseError(
2077 2098 _('Unexpected response from remote server:'), l)
2078 2099 self.ui.status(_('%d files to transfer, %s of data\n') %
2079 2100 (total_files, util.bytecount(total_bytes)))
2080 2101 start = time.time()
2081 2102 for i in xrange(total_files):
2082 2103 # XXX doesn't support '\n' or '\r' in filenames
2083 2104 l = fp.readline()
2084 2105 try:
2085 2106 name, size = l.split('\0', 1)
2086 2107 size = int(size)
2087 2108 except (ValueError, TypeError):
2088 2109 raise error.ResponseError(
2089 2110 _('Unexpected response from remote server:'), l)
2090 2111 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2091 2112 ofp = self.sopener(name, 'w')
2092 2113 for chunk in util.filechunkiter(fp, limit=size):
2093 2114 ofp.write(chunk)
2094 2115 ofp.close()
2095 2116 elapsed = time.time() - start
2096 2117 if elapsed <= 0:
2097 2118 elapsed = 0.001
2098 2119 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2099 2120 (util.bytecount(total_bytes), elapsed,
2100 2121 util.bytecount(total_bytes / elapsed)))
2101 2122 self.invalidate()
2102 2123 return len(self.heads()) + 1
2103 2124
2104 2125 def clone(self, remote, heads=[], stream=False):
2105 2126 '''clone remote repository.
2106 2127
2107 2128 keyword arguments:
2108 2129 heads: list of revs to clone (forces use of pull)
2109 2130 stream: use streaming clone if possible'''
2110 2131
2111 2132 # now, all clients that can request uncompressed clones can
2112 2133 # read repo formats supported by all servers that can serve
2113 2134 # them.
2114 2135
2115 2136 # if revlog format changes, client will have to check version
2116 2137 # and format flags on "stream" capability, and use
2117 2138 # uncompressed only if compatible.
2118 2139
2119 2140 if stream and not heads and remote.capable('stream'):
2120 2141 return self.stream_in(remote)
2121 2142 return self.pull(remote, heads)
2122 2143
2123 2144 # used to avoid circular references so destructors work
2124 2145 def aftertrans(files):
2125 2146 renamefiles = [tuple(t) for t in files]
2126 2147 def a():
2127 2148 for src, dest in renamefiles:
2128 2149 util.rename(src, dest)
2129 2150 return a
2130 2151
2131 2152 def instance(ui, path, create):
2132 2153 return localrepository(ui, util.drop_scheme('file', path), create)
2133 2154
2134 2155 def islocal(path):
2135 2156 return True
General Comments 0
You need to be logged in to leave comments. Login now