##// END OF EJS Templates
revlog: use set instead of dict
Benoit Boissinot -
r8464:7af92e70 default
parent child Browse files
Show More
@@ -1,3458 +1,3458
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, textwrap, subprocess, difflib, time
12 12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 13 import patch, help, mdiff, tempfile, url, encoding
14 14 import archival, changegroup, cmdutil, sshserver, hbisect
15 15 from hgweb import server
16 16 import merge as merge_
17 17
18 18 # Commands start here, listed alphabetically
19 19
20 20 def add(ui, repo, *pats, **opts):
21 21 """add the specified files on the next commit
22 22
23 23 Schedule files to be version controlled and added to the
24 24 repository.
25 25
26 26 The files will be added to the repository at the next commit. To
27 27 undo an add before that, see hg revert.
28 28
29 29 If no names are given, add all files to the repository.
30 30 """
31 31
32 32 rejected = None
33 33 exacts = {}
34 34 names = []
35 35 m = cmdutil.match(repo, pats, opts)
36 36 m.bad = lambda x,y: True
37 37 for abs in repo.walk(m):
38 38 if m.exact(abs):
39 39 if ui.verbose:
40 40 ui.status(_('adding %s\n') % m.rel(abs))
41 41 names.append(abs)
42 42 exacts[abs] = 1
43 43 elif abs not in repo.dirstate:
44 44 ui.status(_('adding %s\n') % m.rel(abs))
45 45 names.append(abs)
46 46 if not opts.get('dry_run'):
47 47 rejected = repo.add(names)
48 48 rejected = [p for p in rejected if p in exacts]
49 49 return rejected and 1 or 0
50 50
51 51 def addremove(ui, repo, *pats, **opts):
52 52 """add all new files, delete all missing files
53 53
54 54 Add all new files and remove all missing files from the
55 55 repository.
56 56
57 57 New files are ignored if they match any of the patterns in
58 58 .hgignore. As with add, these changes take effect at the next
59 59 commit.
60 60
61 61 Use the -s/--similarity option to detect renamed files. With a
62 62 parameter > 0, this compares every removed file with every added
63 63 file and records those similar enough as renames. This option
64 64 takes a percentage between 0 (disabled) and 100 (files must be
65 65 identical) as its parameter. Detecting renamed files this way can
66 66 be expensive.
67 67 """
68 68 try:
69 69 sim = float(opts.get('similarity') or 0)
70 70 except ValueError:
71 71 raise util.Abort(_('similarity must be a number'))
72 72 if sim < 0 or sim > 100:
73 73 raise util.Abort(_('similarity must be between 0 and 100'))
74 74 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
75 75
76 76 def annotate(ui, repo, *pats, **opts):
77 77 """show changeset information per file line
78 78
79 79 List changes in files, showing the revision id responsible for
80 80 each line
81 81
82 82 This command is useful to discover who did a change or when a
83 83 change took place.
84 84
85 85 Without the -a/--text option, annotate will avoid processing files
86 86 it detects as binary. With -a, annotate will generate an
87 87 annotation anyway, probably with undesirable results.
88 88 """
89 89 datefunc = ui.quiet and util.shortdate or util.datestr
90 90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
91 91
92 92 if not pats:
93 93 raise util.Abort(_('at least one file name or pattern required'))
94 94
95 95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
96 96 ('number', lambda x: str(x[0].rev())),
97 97 ('changeset', lambda x: short(x[0].node())),
98 98 ('date', getdate),
99 99 ('follow', lambda x: x[0].path()),
100 100 ]
101 101
102 102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
103 103 and not opts.get('follow')):
104 104 opts['number'] = 1
105 105
106 106 linenumber = opts.get('line_number') is not None
107 107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
108 108 raise util.Abort(_('at least one of -n/-c is required for -l'))
109 109
110 110 funcmap = [func for op, func in opmap if opts.get(op)]
111 111 if linenumber:
112 112 lastfunc = funcmap[-1]
113 113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
114 114
115 115 ctx = repo[opts.get('rev')]
116 116
117 117 m = cmdutil.match(repo, pats, opts)
118 118 for abs in ctx.walk(m):
119 119 fctx = ctx[abs]
120 120 if not opts.get('text') and util.binary(fctx.data()):
121 121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
122 122 continue
123 123
124 124 lines = fctx.annotate(follow=opts.get('follow'),
125 125 linenumber=linenumber)
126 126 pieces = []
127 127
128 128 for f in funcmap:
129 129 l = [f(n) for n, dummy in lines]
130 130 if l:
131 131 ml = max(map(len, l))
132 132 pieces.append(["%*s" % (ml, x) for x in l])
133 133
134 134 if pieces:
135 135 for p, l in zip(zip(*pieces), lines):
136 136 ui.write("%s: %s" % (" ".join(p), l[1]))
137 137
138 138 def archive(ui, repo, dest, **opts):
139 139 '''create unversioned archive of a repository revision
140 140
141 141 By default, the revision used is the parent of the working
142 142 directory; use -r/--rev to specify a different revision.
143 143
144 144 To specify the type of archive to create, use -t/--type. Valid
145 145 types are:
146 146
147 147 "files" (default): a directory full of files
148 148 "tar": tar archive, uncompressed
149 149 "tbz2": tar archive, compressed using bzip2
150 150 "tgz": tar archive, compressed using gzip
151 151 "uzip": zip archive, uncompressed
152 152 "zip": zip archive, compressed using deflate
153 153
154 154 The exact name of the destination archive or directory is given
155 155 using a format string; see 'hg help export' for details.
156 156
157 157 Each member added to an archive file has a directory prefix
158 158 prepended. Use -p/--prefix to specify a format string for the
159 159 prefix. The default is the basename of the archive, with suffixes
160 160 removed.
161 161 '''
162 162
163 163 ctx = repo[opts.get('rev')]
164 164 if not ctx:
165 165 raise util.Abort(_('no working directory: please specify a revision'))
166 166 node = ctx.node()
167 167 dest = cmdutil.make_filename(repo, dest, node)
168 168 if os.path.realpath(dest) == repo.root:
169 169 raise util.Abort(_('repository root cannot be destination'))
170 170 matchfn = cmdutil.match(repo, [], opts)
171 171 kind = opts.get('type') or 'files'
172 172 prefix = opts.get('prefix')
173 173 if dest == '-':
174 174 if kind == 'files':
175 175 raise util.Abort(_('cannot archive plain files to stdout'))
176 176 dest = sys.stdout
177 177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
178 178 prefix = cmdutil.make_filename(repo, prefix, node)
179 179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
180 180 matchfn, prefix)
181 181
182 182 def backout(ui, repo, node=None, rev=None, **opts):
183 183 '''reverse effect of earlier changeset
184 184
185 185 Commit the backed out changes as a new changeset. The new
186 186 changeset is a child of the backed out changeset.
187 187
188 188 If you back out a changeset other than the tip, a new head is
189 189 created. This head will be the new tip and you should merge this
190 190 backout changeset with another head (current one by default).
191 191
192 192 The --merge option remembers the parent of the working directory
193 193 before starting the backout, then merges the new head with that
194 194 changeset afterwards. This saves you from doing the merge by hand.
195 195 The result of this merge is not committed, as with a normal merge.
196 196
197 197 See \'hg help dates\' for a list of formats valid for -d/--date.
198 198 '''
199 199 if rev and node:
200 200 raise util.Abort(_("please specify just one revision"))
201 201
202 202 if not rev:
203 203 rev = node
204 204
205 205 if not rev:
206 206 raise util.Abort(_("please specify a revision to backout"))
207 207
208 208 date = opts.get('date')
209 209 if date:
210 210 opts['date'] = util.parsedate(date)
211 211
212 212 cmdutil.bail_if_changed(repo)
213 213 node = repo.lookup(rev)
214 214
215 215 op1, op2 = repo.dirstate.parents()
216 216 a = repo.changelog.ancestor(op1, node)
217 217 if a != node:
218 218 raise util.Abort(_('cannot back out change on a different branch'))
219 219
220 220 p1, p2 = repo.changelog.parents(node)
221 221 if p1 == nullid:
222 222 raise util.Abort(_('cannot back out a change with no parents'))
223 223 if p2 != nullid:
224 224 if not opts.get('parent'):
225 225 raise util.Abort(_('cannot back out a merge changeset without '
226 226 '--parent'))
227 227 p = repo.lookup(opts['parent'])
228 228 if p not in (p1, p2):
229 229 raise util.Abort(_('%s is not a parent of %s') %
230 230 (short(p), short(node)))
231 231 parent = p
232 232 else:
233 233 if opts.get('parent'):
234 234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
235 235 parent = p1
236 236
237 237 # the backout should appear on the same branch
238 238 branch = repo.dirstate.branch()
239 239 hg.clean(repo, node, show_stats=False)
240 240 repo.dirstate.setbranch(branch)
241 241 revert_opts = opts.copy()
242 242 revert_opts['date'] = None
243 243 revert_opts['all'] = True
244 244 revert_opts['rev'] = hex(parent)
245 245 revert_opts['no_backup'] = None
246 246 revert(ui, repo, **revert_opts)
247 247 commit_opts = opts.copy()
248 248 commit_opts['addremove'] = False
249 249 if not commit_opts['message'] and not commit_opts['logfile']:
250 250 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
251 251 commit_opts['force_editor'] = True
252 252 commit(ui, repo, **commit_opts)
253 253 def nice(node):
254 254 return '%d:%s' % (repo.changelog.rev(node), short(node))
255 255 ui.status(_('changeset %s backs out changeset %s\n') %
256 256 (nice(repo.changelog.tip()), nice(node)))
257 257 if op1 != node:
258 258 hg.clean(repo, op1, show_stats=False)
259 259 if opts.get('merge'):
260 260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
261 261 hg.merge(repo, hex(repo.changelog.tip()))
262 262 else:
263 263 ui.status(_('the backout changeset is a new head - '
264 264 'do not forget to merge\n'))
265 265 ui.status(_('(use "backout --merge" '
266 266 'if you want to auto-merge)\n'))
267 267
268 268 def bisect(ui, repo, rev=None, extra=None, command=None,
269 269 reset=None, good=None, bad=None, skip=None, noupdate=None):
270 270 """subdivision search of changesets
271 271
272 272 This command helps to find changesets which introduce problems. To
273 273 use, mark the earliest changeset you know exhibits the problem as
274 274 bad, then mark the latest changeset which is free from the problem
275 275 as good. Bisect will update your working directory to a revision
276 276 for testing (unless the -U/--noupdate option is specified). Once
277 277 you have performed tests, mark the working directory as bad or
278 278 good and bisect will either update to another candidate changeset
279 279 or announce that it has found the bad revision.
280 280
281 281 As a shortcut, you can also use the revision argument to mark a
282 282 revision as good or bad without checking it out first.
283 283
284 284 If you supply a command it will be used for automatic bisection.
285 285 Its exit status will be used as flag to mark revision as bad or
286 286 good. In case exit status is 0 the revision is marked as good, 125
287 287 - skipped, 127 (command not found) - bisection will be aborted;
288 288 any other status bigger than 0 will mark revision as bad.
289 289 """
290 290 def print_result(nodes, good):
291 291 displayer = cmdutil.show_changeset(ui, repo, {})
292 292 if len(nodes) == 1:
293 293 # narrowed it down to a single revision
294 294 if good:
295 295 ui.write(_("The first good revision is:\n"))
296 296 else:
297 297 ui.write(_("The first bad revision is:\n"))
298 298 displayer.show(repo[nodes[0]])
299 299 else:
300 300 # multiple possible revisions
301 301 if good:
302 302 ui.write(_("Due to skipped revisions, the first "
303 303 "good revision could be any of:\n"))
304 304 else:
305 305 ui.write(_("Due to skipped revisions, the first "
306 306 "bad revision could be any of:\n"))
307 307 for n in nodes:
308 308 displayer.show(repo[n])
309 309
310 310 def check_state(state, interactive=True):
311 311 if not state['good'] or not state['bad']:
312 312 if (good or bad or skip or reset) and interactive:
313 313 return
314 314 if not state['good']:
315 315 raise util.Abort(_('cannot bisect (no known good revisions)'))
316 316 else:
317 317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
318 318 return True
319 319
320 320 # backward compatibility
321 321 if rev in "good bad reset init".split():
322 322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
323 323 cmd, rev, extra = rev, extra, None
324 324 if cmd == "good":
325 325 good = True
326 326 elif cmd == "bad":
327 327 bad = True
328 328 else:
329 329 reset = True
330 330 elif extra or good + bad + skip + reset + bool(command) > 1:
331 331 raise util.Abort(_('incompatible arguments'))
332 332
333 333 if reset:
334 334 p = repo.join("bisect.state")
335 335 if os.path.exists(p):
336 336 os.unlink(p)
337 337 return
338 338
339 339 state = hbisect.load_state(repo)
340 340
341 341 if command:
342 342 commandpath = util.find_exe(command)
343 343 changesets = 1
344 344 try:
345 345 while changesets:
346 346 # update state
347 347 status = subprocess.call([commandpath])
348 348 if status == 125:
349 349 transition = "skip"
350 350 elif status == 0:
351 351 transition = "good"
352 352 # status < 0 means process was killed
353 353 elif status == 127:
354 354 raise util.Abort(_("failed to execute %s") % command)
355 355 elif status < 0:
356 356 raise util.Abort(_("%s killed") % command)
357 357 else:
358 358 transition = "bad"
359 359 node = repo.lookup(rev or '.')
360 360 state[transition].append(node)
361 361 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
362 362 check_state(state, interactive=False)
363 363 # bisect
364 364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 365 # update to next check
366 366 cmdutil.bail_if_changed(repo)
367 367 hg.clean(repo, nodes[0], show_stats=False)
368 368 finally:
369 369 hbisect.save_state(repo, state)
370 370 return print_result(nodes, not status)
371 371
372 372 # update state
373 373 node = repo.lookup(rev or '.')
374 374 if good:
375 375 state['good'].append(node)
376 376 elif bad:
377 377 state['bad'].append(node)
378 378 elif skip:
379 379 state['skip'].append(node)
380 380
381 381 hbisect.save_state(repo, state)
382 382
383 383 if not check_state(state):
384 384 return
385 385
386 386 # actually bisect
387 387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 388 if changesets == 0:
389 389 print_result(nodes, good)
390 390 else:
391 391 assert len(nodes) == 1 # only a single node can be tested next
392 392 node = nodes[0]
393 393 # compute the approximate number of remaining tests
394 394 tests, size = 0, 2
395 395 while size <= changesets:
396 396 tests, size = tests + 1, size * 2
397 397 rev = repo.changelog.rev(node)
398 398 ui.write(_("Testing changeset %s:%s "
399 399 "(%s changesets remaining, ~%s tests)\n")
400 400 % (rev, short(node), changesets, tests))
401 401 if not noupdate:
402 402 cmdutil.bail_if_changed(repo)
403 403 return hg.clean(repo, node)
404 404
405 405 def branch(ui, repo, label=None, **opts):
406 406 """set or show the current branch name
407 407
408 408 With no argument, show the current branch name. With one argument,
409 409 set the working directory branch name (the branch does not exist
410 410 in the repository until the next commit). It is recommended to use
411 411 the 'default' branch as your primary development branch.
412 412
413 413 Unless -f/--force is specified, branch will not let you set a
414 414 branch name that shadows an existing branch.
415 415
416 416 Use -C/--clean to reset the working directory branch to that of
417 417 the parent of the working directory, negating a previous branch
418 418 change.
419 419
420 420 Use the command 'hg update' to switch to an existing branch.
421 421 """
422 422
423 423 if opts.get('clean'):
424 424 label = repo[None].parents()[0].branch()
425 425 repo.dirstate.setbranch(label)
426 426 ui.status(_('reset working directory to branch %s\n') % label)
427 427 elif label:
428 428 if not opts.get('force') and label in repo.branchtags():
429 429 if label not in [p.branch() for p in repo.parents()]:
430 430 raise util.Abort(_('a branch of the same name already exists'
431 431 ' (use --force to override)'))
432 432 repo.dirstate.setbranch(encoding.fromlocal(label))
433 433 ui.status(_('marked working directory as branch %s\n') % label)
434 434 else:
435 435 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
436 436
437 437 def branches(ui, repo, active=False):
438 438 """list repository named branches
439 439
440 440 List the repository's named branches, indicating which ones are
441 441 inactive. If active is specified, only show active branches.
442 442
443 443 A branch is considered active if it contains repository heads.
444 444
445 445 Use the command 'hg update' to switch to an existing branch.
446 446 """
447 447 hexfunc = ui.debugflag and hex or short
448 448 activebranches = [encoding.tolocal(repo[n].branch())
449 449 for n in repo.heads(closed=False)]
450 450 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
451 451 for tag, node in repo.branchtags().items()],
452 452 reverse=True)
453 453
454 454 for isactive, node, tag in branches:
455 455 if (not active) or isactive:
456 456 if ui.quiet:
457 457 ui.write("%s\n" % tag)
458 458 else:
459 459 hn = repo.lookup(node)
460 460 if isactive:
461 461 notice = ''
462 462 elif hn not in repo.branchheads(tag, closed=False):
463 463 notice = ' (closed)'
464 464 else:
465 465 notice = ' (inactive)'
466 466 rev = str(node).rjust(31 - encoding.colwidth(tag))
467 467 data = tag, rev, hexfunc(hn), notice
468 468 ui.write("%s %s:%s%s\n" % data)
469 469
470 470 def bundle(ui, repo, fname, dest=None, **opts):
471 471 """create a changegroup file
472 472
473 473 Generate a compressed changegroup file collecting changesets not
474 474 known to be in another repository.
475 475
476 476 If no destination repository is specified the destination is
477 477 assumed to have all the nodes specified by one or more --base
478 478 parameters. To create a bundle containing all changesets, use
479 479 -a/--all (or --base null). To change the compression method
480 480 applied, use the -t/--type option (by default, bundles are
481 481 compressed using bz2).
482 482
483 483 The bundle file can then be transferred using conventional means
484 484 and applied to another repository with the unbundle or pull
485 485 command. This is useful when direct push and pull are not
486 486 available or when exporting an entire repository is undesirable.
487 487
488 488 Applying bundles preserves all changeset contents including
489 489 permissions, copy/rename information, and revision history.
490 490 """
491 491 revs = opts.get('rev') or None
492 492 if revs:
493 493 revs = [repo.lookup(rev) for rev in revs]
494 494 if opts.get('all'):
495 495 base = ['null']
496 496 else:
497 497 base = opts.get('base')
498 498 if base:
499 499 if dest:
500 500 raise util.Abort(_("--base is incompatible with specifiying "
501 501 "a destination"))
502 502 base = [repo.lookup(rev) for rev in base]
503 503 # create the right base
504 504 # XXX: nodesbetween / changegroup* should be "fixed" instead
505 505 o = []
506 has = {nullid: None}
506 has = set((nullid,))
507 507 for n in base:
508 508 has.update(repo.changelog.reachable(n))
509 509 if revs:
510 510 visit = list(revs)
511 511 else:
512 512 visit = repo.changelog.heads()
513 513 seen = {}
514 514 while visit:
515 515 n = visit.pop(0)
516 516 parents = [p for p in repo.changelog.parents(n) if p not in has]
517 517 if len(parents) == 0:
518 518 o.insert(0, n)
519 519 else:
520 520 for p in parents:
521 521 if p not in seen:
522 522 seen[p] = 1
523 523 visit.append(p)
524 524 else:
525 525 dest, revs, checkout = hg.parseurl(
526 526 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
527 527 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
528 528 o = repo.findoutgoing(other, force=opts.get('force'))
529 529
530 530 if revs:
531 531 cg = repo.changegroupsubset(o, revs, 'bundle')
532 532 else:
533 533 cg = repo.changegroup(o, 'bundle')
534 534
535 535 bundletype = opts.get('type', 'bzip2').lower()
536 536 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
537 537 bundletype = btypes.get(bundletype)
538 538 if bundletype not in changegroup.bundletypes:
539 539 raise util.Abort(_('unknown bundle type specified with --type'))
540 540
541 541 changegroup.writebundle(cg, fname, bundletype)
542 542
543 543 def cat(ui, repo, file1, *pats, **opts):
544 544 """output the current or given revision of files
545 545
546 546 Print the specified files as they were at the given revision. If
547 547 no revision is given, the parent of the working directory is used,
548 548 or tip if no revision is checked out.
549 549
550 550 Output may be to a file, in which case the name of the file is
551 551 given using a format string. The formatting rules are the same as
552 552 for the export command, with the following additions:
553 553
554 554 %s basename of file being printed
555 555 %d dirname of file being printed, or '.' if in repository root
556 556 %p root-relative path name of file being printed
557 557 """
558 558 ctx = repo[opts.get('rev')]
559 559 err = 1
560 560 m = cmdutil.match(repo, (file1,) + pats, opts)
561 561 for abs in ctx.walk(m):
562 562 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
563 563 data = ctx[abs].data()
564 564 if opts.get('decode'):
565 565 data = repo.wwritedata(abs, data)
566 566 fp.write(data)
567 567 err = 0
568 568 return err
569 569
570 570 def clone(ui, source, dest=None, **opts):
571 571 """make a copy of an existing repository
572 572
573 573 Create a copy of an existing repository in a new directory.
574 574
575 575 If no destination directory name is specified, it defaults to the
576 576 basename of the source.
577 577
578 578 The location of the source is added to the new repository's
579 579 .hg/hgrc file, as the default to be used for future pulls.
580 580
581 581 If you use the -r/--rev option to clone up to a specific revision,
582 582 no subsequent revisions (including subsequent tags) will be
583 583 present in the cloned repository. This option implies --pull, even
584 584 on local repositories.
585 585
586 586 By default, clone will check out the head of the 'default' branch.
587 587 If the -U/--noupdate option is used, the new clone will contain
588 588 only a repository (.hg) and no working copy (the working copy
589 589 parent is the null revision).
590 590
591 591 See 'hg help urls' for valid source format details.
592 592
593 593 It is possible to specify an ssh:// URL as the destination, but no
594 594 .hg/hgrc and working directory will be created on the remote side.
595 595 Look at the help text for URLs for important details about ssh://
596 596 URLs.
597 597
598 598 For efficiency, hardlinks are used for cloning whenever the source
599 599 and destination are on the same filesystem (note this applies only
600 600 to the repository data, not to the checked out files). Some
601 601 filesystems, such as AFS, implement hardlinking incorrectly, but
602 602 do not report errors. In these cases, use the --pull option to
603 603 avoid hardlinking.
604 604
605 605 In some cases, you can clone repositories and checked out files
606 606 using full hardlinks with
607 607
608 608 $ cp -al REPO REPOCLONE
609 609
610 610 This is the fastest way to clone, but it is not always safe. The
611 611 operation is not atomic (making sure REPO is not modified during
612 612 the operation is up to you) and you have to make sure your editor
613 613 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
614 614 this is not compatible with certain extensions that place their
615 615 metadata under the .hg directory, such as mq.
616 616
617 617 """
618 618 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
619 619 pull=opts.get('pull'),
620 620 stream=opts.get('uncompressed'),
621 621 rev=opts.get('rev'),
622 622 update=not opts.get('noupdate'))
623 623
624 624 def commit(ui, repo, *pats, **opts):
625 625 """commit the specified files or all outstanding changes
626 626
627 627 Commit changes to the given files into the repository. Unlike a
628 628 centralized RCS, this operation is a local operation. See hg push
629 629 for means to actively distribute your changes.
630 630
631 631 If a list of files is omitted, all changes reported by "hg status"
632 632 will be committed.
633 633
634 634 If you are committing the result of a merge, do not provide any
635 635 file names or -I/-X filters.
636 636
637 637 If no commit message is specified, the configured editor is
638 638 started to prompt you for a message.
639 639
640 640 See 'hg help dates' for a list of formats valid for -d/--date.
641 641 """
642 642 extra = {}
643 643 if opts.get('close_branch'):
644 644 extra['close'] = 1
645 645 e = cmdutil.commiteditor
646 646 if opts.get('force_editor'):
647 647 e = cmdutil.commitforceeditor
648 648
649 649 def commitfunc(ui, repo, message, match, opts):
650 650 return repo.commit(match.files(), message, opts.get('user'),
651 651 opts.get('date'), match, editor=e, extra=extra)
652 652
653 653 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
654 654 if not node:
655 655 return
656 656 cl = repo.changelog
657 657 rev = cl.rev(node)
658 658 parents = cl.parentrevs(rev)
659 659 if rev - 1 in parents:
660 660 # one of the parents was the old tip
661 661 pass
662 662 elif (parents == (nullrev, nullrev) or
663 663 len(cl.heads(cl.node(parents[0]))) > 1 and
664 664 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
665 665 ui.status(_('created new head\n'))
666 666
667 667 if ui.debugflag:
668 668 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
669 669 elif ui.verbose:
670 670 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
671 671
672 672 def copy(ui, repo, *pats, **opts):
673 673 """mark files as copied for the next commit
674 674
675 675 Mark dest as having copies of source files. If dest is a
676 676 directory, copies are put in that directory. If dest is a file,
677 677 the source must be a single file.
678 678
679 679 By default, this command copies the contents of files as they
680 680 stand in the working directory. If invoked with -A/--after, the
681 681 operation is recorded, but no copying is performed.
682 682
683 683 This command takes effect with the next commit. To undo a copy
684 684 before that, see hg revert.
685 685 """
686 686 wlock = repo.wlock(False)
687 687 try:
688 688 return cmdutil.copy(ui, repo, pats, opts)
689 689 finally:
690 690 wlock.release()
691 691
692 692 def debugancestor(ui, repo, *args):
693 693 """find the ancestor revision of two revisions in a given index"""
694 694 if len(args) == 3:
695 695 index, rev1, rev2 = args
696 696 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
697 697 lookup = r.lookup
698 698 elif len(args) == 2:
699 699 if not repo:
700 700 raise util.Abort(_("There is no Mercurial repository here "
701 701 "(.hg not found)"))
702 702 rev1, rev2 = args
703 703 r = repo.changelog
704 704 lookup = repo.lookup
705 705 else:
706 706 raise util.Abort(_('either two or three arguments required'))
707 707 a = r.ancestor(lookup(rev1), lookup(rev2))
708 708 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
709 709
710 710 def debugcommands(ui, cmd='', *args):
711 711 for cmd, vals in sorted(table.iteritems()):
712 712 cmd = cmd.split('|')[0].strip('^')
713 713 opts = ', '.join([i[1] for i in vals[1]])
714 714 ui.write('%s: %s\n' % (cmd, opts))
715 715
716 716 def debugcomplete(ui, cmd='', **opts):
717 717 """returns the completion list associated with the given command"""
718 718
719 719 if opts.get('options'):
720 720 options = []
721 721 otables = [globalopts]
722 722 if cmd:
723 723 aliases, entry = cmdutil.findcmd(cmd, table, False)
724 724 otables.append(entry[1])
725 725 for t in otables:
726 726 for o in t:
727 727 if o[0]:
728 728 options.append('-%s' % o[0])
729 729 options.append('--%s' % o[1])
730 730 ui.write("%s\n" % "\n".join(options))
731 731 return
732 732
733 733 cmdlist = cmdutil.findpossible(cmd, table)
734 734 if ui.verbose:
735 735 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
736 736 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
737 737
738 738 def debugfsinfo(ui, path = "."):
739 739 file('.debugfsinfo', 'w').write('')
740 740 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
741 741 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
742 742 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
743 743 and 'yes' or 'no'))
744 744 os.unlink('.debugfsinfo')
745 745
746 746 def debugrebuildstate(ui, repo, rev="tip"):
747 747 """rebuild the dirstate as it would look like for the given revision"""
748 748 ctx = repo[rev]
749 749 wlock = repo.wlock()
750 750 try:
751 751 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
752 752 finally:
753 753 wlock.release()
754 754
755 755 def debugcheckstate(ui, repo):
756 756 """validate the correctness of the current dirstate"""
757 757 parent1, parent2 = repo.dirstate.parents()
758 758 m1 = repo[parent1].manifest()
759 759 m2 = repo[parent2].manifest()
760 760 errors = 0
761 761 for f in repo.dirstate:
762 762 state = repo.dirstate[f]
763 763 if state in "nr" and f not in m1:
764 764 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
765 765 errors += 1
766 766 if state in "a" and f in m1:
767 767 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
768 768 errors += 1
769 769 if state in "m" and f not in m1 and f not in m2:
770 770 ui.warn(_("%s in state %s, but not in either manifest\n") %
771 771 (f, state))
772 772 errors += 1
773 773 for f in m1:
774 774 state = repo.dirstate[f]
775 775 if state not in "nrm":
776 776 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
777 777 errors += 1
778 778 if errors:
779 779 error = _(".hg/dirstate inconsistent with current parent's manifest")
780 780 raise util.Abort(error)
781 781
782 782 def showconfig(ui, repo, *values, **opts):
783 783 """show combined config settings from all hgrc files
784 784
785 785 With no args, print names and values of all config items.
786 786
787 787 With one arg of the form section.name, print just the value of
788 788 that config item.
789 789
790 790 With multiple args, print names and values of all config items
791 791 with matching section names.
792 792
793 793 With the --debug flag, the source (filename and line number) is
794 794 printed for each config item.
795 795 """
796 796
797 797 untrusted = bool(opts.get('untrusted'))
798 798 if values:
799 799 if len([v for v in values if '.' in v]) > 1:
800 800 raise util.Abort(_('only one config item permitted'))
801 801 for section, name, value in ui.walkconfig(untrusted=untrusted):
802 802 sectname = section + '.' + name
803 803 if values:
804 804 for v in values:
805 805 if v == section:
806 806 ui.debug('%s: ' %
807 807 ui.configsource(section, name, untrusted))
808 808 ui.write('%s=%s\n' % (sectname, value))
809 809 elif v == sectname:
810 810 ui.debug('%s: ' %
811 811 ui.configsource(section, name, untrusted))
812 812 ui.write(value, '\n')
813 813 else:
814 814 ui.debug('%s: ' %
815 815 ui.configsource(section, name, untrusted))
816 816 ui.write('%s=%s\n' % (sectname, value))
817 817
818 818 def debugsetparents(ui, repo, rev1, rev2=None):
819 819 """manually set the parents of the current working directory
820 820
821 821 This is useful for writing repository conversion tools, but should
822 822 be used with care.
823 823 """
824 824
825 825 if not rev2:
826 826 rev2 = hex(nullid)
827 827
828 828 wlock = repo.wlock()
829 829 try:
830 830 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
831 831 finally:
832 832 wlock.release()
833 833
834 834 def debugstate(ui, repo, nodates=None):
835 835 """show the contents of the current dirstate"""
836 836 timestr = ""
837 837 showdate = not nodates
838 838 for file_, ent in sorted(repo.dirstate._map.iteritems()):
839 839 if showdate:
840 840 if ent[3] == -1:
841 841 # Pad or slice to locale representation
842 842 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
843 843 timestr = 'unset'
844 844 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
845 845 else:
846 846 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
847 847 if ent[1] & 020000:
848 848 mode = 'lnk'
849 849 else:
850 850 mode = '%3o' % (ent[1] & 0777)
851 851 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
852 852 for f in repo.dirstate.copies():
853 853 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
854 854
855 855 def debugdata(ui, file_, rev):
856 856 """dump the contents of a data file revision"""
857 857 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
858 858 try:
859 859 ui.write(r.revision(r.lookup(rev)))
860 860 except KeyError:
861 861 raise util.Abort(_('invalid revision identifier %s') % rev)
862 862
863 863 def debugdate(ui, date, range=None, **opts):
864 864 """parse and display a date"""
865 865 if opts["extended"]:
866 866 d = util.parsedate(date, util.extendeddateformats)
867 867 else:
868 868 d = util.parsedate(date)
869 869 ui.write("internal: %s %s\n" % d)
870 870 ui.write("standard: %s\n" % util.datestr(d))
871 871 if range:
872 872 m = util.matchdate(range)
873 873 ui.write("match: %s\n" % m(d[0]))
874 874
875 875 def debugindex(ui, file_):
876 876 """dump the contents of an index file"""
877 877 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
878 878 ui.write(" rev offset length base linkrev"
879 879 " nodeid p1 p2\n")
880 880 for i in r:
881 881 node = r.node(i)
882 882 try:
883 883 pp = r.parents(node)
884 884 except:
885 885 pp = [nullid, nullid]
886 886 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
887 887 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
888 888 short(node), short(pp[0]), short(pp[1])))
889 889
890 890 def debugindexdot(ui, file_):
891 891 """dump an index DAG as a .dot file"""
892 892 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
893 893 ui.write("digraph G {\n")
894 894 for i in r:
895 895 node = r.node(i)
896 896 pp = r.parents(node)
897 897 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
898 898 if pp[1] != nullid:
899 899 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
900 900 ui.write("}\n")
901 901
902 902 def debuginstall(ui):
903 903 '''test Mercurial installation'''
904 904
905 905 def writetemp(contents):
906 906 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
907 907 f = os.fdopen(fd, "wb")
908 908 f.write(contents)
909 909 f.close()
910 910 return name
911 911
912 912 problems = 0
913 913
914 914 # encoding
915 915 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
916 916 try:
917 917 encoding.fromlocal("test")
918 918 except util.Abort, inst:
919 919 ui.write(" %s\n" % inst)
920 920 ui.write(_(" (check that your locale is properly set)\n"))
921 921 problems += 1
922 922
923 923 # compiled modules
924 924 ui.status(_("Checking extensions...\n"))
925 925 try:
926 926 import bdiff, mpatch, base85
927 927 except Exception, inst:
928 928 ui.write(" %s\n" % inst)
929 929 ui.write(_(" One or more extensions could not be found"))
930 930 ui.write(_(" (check that you compiled the extensions)\n"))
931 931 problems += 1
932 932
933 933 # templates
934 934 ui.status(_("Checking templates...\n"))
935 935 try:
936 936 import templater
937 937 templater.templater(templater.templatepath("map-cmdline.default"))
938 938 except Exception, inst:
939 939 ui.write(" %s\n" % inst)
940 940 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
941 941 problems += 1
942 942
943 943 # patch
944 944 ui.status(_("Checking patch...\n"))
945 945 patchproblems = 0
946 946 a = "1\n2\n3\n4\n"
947 947 b = "1\n2\n3\ninsert\n4\n"
948 948 fa = writetemp(a)
949 949 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
950 950 os.path.basename(fa))
951 951 fd = writetemp(d)
952 952
953 953 files = {}
954 954 try:
955 955 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
956 956 except util.Abort, e:
957 957 ui.write(_(" patch call failed:\n"))
958 958 ui.write(" " + str(e) + "\n")
959 959 patchproblems += 1
960 960 else:
961 961 if list(files) != [os.path.basename(fa)]:
962 962 ui.write(_(" unexpected patch output!\n"))
963 963 patchproblems += 1
964 964 a = file(fa).read()
965 965 if a != b:
966 966 ui.write(_(" patch test failed!\n"))
967 967 patchproblems += 1
968 968
969 969 if patchproblems:
970 970 if ui.config('ui', 'patch'):
971 971 ui.write(_(" (Current patch tool may be incompatible with patch,"
972 972 " or misconfigured. Please check your .hgrc file)\n"))
973 973 else:
974 974 ui.write(_(" Internal patcher failure, please report this error"
975 975 " to http://www.selenic.com/mercurial/bts\n"))
976 976 problems += patchproblems
977 977
978 978 os.unlink(fa)
979 979 os.unlink(fd)
980 980
981 981 # editor
982 982 ui.status(_("Checking commit editor...\n"))
983 983 editor = ui.geteditor()
984 984 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
985 985 if not cmdpath:
986 986 if editor == 'vi':
987 987 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
988 988 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
989 989 else:
990 990 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
991 991 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
992 992 problems += 1
993 993
994 994 # check username
995 995 ui.status(_("Checking username...\n"))
996 996 user = os.environ.get("HGUSER")
997 997 if user is None:
998 998 user = ui.config("ui", "username")
999 999 if user is None:
1000 1000 user = os.environ.get("EMAIL")
1001 1001 if not user:
1002 1002 ui.warn(" ")
1003 1003 ui.username()
1004 1004 ui.write(_(" (specify a username in your .hgrc file)\n"))
1005 1005
1006 1006 if not problems:
1007 1007 ui.status(_("No problems detected\n"))
1008 1008 else:
1009 1009 ui.write(_("%s problems detected,"
1010 1010 " please check your install!\n") % problems)
1011 1011
1012 1012 return problems
1013 1013
1014 1014 def debugrename(ui, repo, file1, *pats, **opts):
1015 1015 """dump rename information"""
1016 1016
1017 1017 ctx = repo[opts.get('rev')]
1018 1018 m = cmdutil.match(repo, (file1,) + pats, opts)
1019 1019 for abs in ctx.walk(m):
1020 1020 fctx = ctx[abs]
1021 1021 o = fctx.filelog().renamed(fctx.filenode())
1022 1022 rel = m.rel(abs)
1023 1023 if o:
1024 1024 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1025 1025 else:
1026 1026 ui.write(_("%s not renamed\n") % rel)
1027 1027
1028 1028 def debugwalk(ui, repo, *pats, **opts):
1029 1029 """show how files match on given patterns"""
1030 1030 m = cmdutil.match(repo, pats, opts)
1031 1031 items = list(repo.walk(m))
1032 1032 if not items:
1033 1033 return
1034 1034 fmt = 'f %%-%ds %%-%ds %%s' % (
1035 1035 max([len(abs) for abs in items]),
1036 1036 max([len(m.rel(abs)) for abs in items]))
1037 1037 for abs in items:
1038 1038 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1039 1039 ui.write("%s\n" % line.rstrip())
1040 1040
1041 1041 def diff(ui, repo, *pats, **opts):
1042 1042 """diff repository (or selected files)
1043 1043
1044 1044 Show differences between revisions for the specified files.
1045 1045
1046 1046 Differences between files are shown using the unified diff format.
1047 1047
1048 1048 NOTE: diff may generate unexpected results for merges, as it will
1049 1049 default to comparing against the working directory's first parent
1050 1050 changeset if no revisions are specified.
1051 1051
1052 1052 When two revision arguments are given, then changes are shown
1053 1053 between those revisions. If only one revision is specified then
1054 1054 that revision is compared to the working directory, and, when no
1055 1055 revisions are specified, the working directory files are compared
1056 1056 to its parent.
1057 1057
1058 1058 Without the -a/--text option, diff will avoid generating diffs of
1059 1059 files it detects as binary. With -a, diff will generate a diff
1060 1060 anyway, probably with undesirable results.
1061 1061
1062 1062 Use the -g/--git option to generate diffs in the git extended diff
1063 1063 format. For more information, read 'hg help diffs'.
1064 1064 """
1065 1065
1066 1066 revs = opts.get('rev')
1067 1067 change = opts.get('change')
1068 1068
1069 1069 if revs and change:
1070 1070 msg = _('cannot specify --rev and --change at the same time')
1071 1071 raise util.Abort(msg)
1072 1072 elif change:
1073 1073 node2 = repo.lookup(change)
1074 1074 node1 = repo[node2].parents()[0].node()
1075 1075 else:
1076 1076 node1, node2 = cmdutil.revpair(repo, revs)
1077 1077
1078 1078 m = cmdutil.match(repo, pats, opts)
1079 1079 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1080 1080 for chunk in it:
1081 1081 repo.ui.write(chunk)
1082 1082
1083 1083 def export(ui, repo, *changesets, **opts):
1084 1084 """dump the header and diffs for one or more changesets
1085 1085
1086 1086 Print the changeset header and diffs for one or more revisions.
1087 1087
1088 1088 The information shown in the changeset header is: author,
1089 1089 changeset hash, parent(s) and commit comment.
1090 1090
1091 1091 NOTE: export may generate unexpected diff output for merge
1092 1092 changesets, as it will compare the merge changeset against its
1093 1093 first parent only.
1094 1094
1095 1095 Output may be to a file, in which case the name of the file is
1096 1096 given using a format string. The formatting rules are as follows:
1097 1097
1098 1098 %% literal "%" character
1099 1099 %H changeset hash (40 bytes of hexadecimal)
1100 1100 %N number of patches being generated
1101 1101 %R changeset revision number
1102 1102 %b basename of the exporting repository
1103 1103 %h short-form changeset hash (12 bytes of hexadecimal)
1104 1104 %n zero-padded sequence number, starting at 1
1105 1105 %r zero-padded changeset revision number
1106 1106
1107 1107 Without the -a/--text option, export will avoid generating diffs
1108 1108 of files it detects as binary. With -a, export will generate a
1109 1109 diff anyway, probably with undesirable results.
1110 1110
1111 1111 Use the -g/--git option to generate diffs in the git extended diff
1112 1112 format. Read the diffs help topic for more information.
1113 1113
1114 1114 With the --switch-parent option, the diff will be against the
1115 1115 second parent. It can be useful to review a merge.
1116 1116 """
1117 1117 if not changesets:
1118 1118 raise util.Abort(_("export requires at least one changeset"))
1119 1119 revs = cmdutil.revrange(repo, changesets)
1120 1120 if len(revs) > 1:
1121 1121 ui.note(_('exporting patches:\n'))
1122 1122 else:
1123 1123 ui.note(_('exporting patch:\n'))
1124 1124 patch.export(repo, revs, template=opts.get('output'),
1125 1125 switch_parent=opts.get('switch_parent'),
1126 1126 opts=patch.diffopts(ui, opts))
1127 1127
1128 1128 def grep(ui, repo, pattern, *pats, **opts):
1129 1129 """search for a pattern in specified files and revisions
1130 1130
1131 1131 Search revisions of files for a regular expression.
1132 1132
1133 1133 This command behaves differently than Unix grep. It only accepts
1134 1134 Python/Perl regexps. It searches repository history, not the
1135 1135 working directory. It always prints the revision number in which a
1136 1136 match appears.
1137 1137
1138 1138 By default, grep only prints output for the first revision of a
1139 1139 file in which it finds a match. To get it to print every revision
1140 1140 that contains a change in match status ("-" for a match that
1141 1141 becomes a non-match, or "+" for a non-match that becomes a match),
1142 1142 use the --all flag.
1143 1143 """
1144 1144 reflags = 0
1145 1145 if opts.get('ignore_case'):
1146 1146 reflags |= re.I
1147 1147 try:
1148 1148 regexp = re.compile(pattern, reflags)
1149 1149 except Exception, inst:
1150 1150 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1151 1151 return None
1152 1152 sep, eol = ':', '\n'
1153 1153 if opts.get('print0'):
1154 1154 sep = eol = '\0'
1155 1155
1156 1156 fcache = {}
1157 1157 forder = []
1158 1158 def getfile(fn):
1159 1159 if fn not in fcache:
1160 1160 if len(fcache) > 20:
1161 1161 del fcache[forder.pop(0)]
1162 1162 fcache[fn] = repo.file(fn)
1163 1163 else:
1164 1164 forder.remove(fn)
1165 1165
1166 1166 forder.append(fn)
1167 1167 return fcache[fn]
1168 1168
1169 1169 def matchlines(body):
1170 1170 begin = 0
1171 1171 linenum = 0
1172 1172 while True:
1173 1173 match = regexp.search(body, begin)
1174 1174 if not match:
1175 1175 break
1176 1176 mstart, mend = match.span()
1177 1177 linenum += body.count('\n', begin, mstart) + 1
1178 1178 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1179 1179 begin = body.find('\n', mend) + 1 or len(body)
1180 1180 lend = begin - 1
1181 1181 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1182 1182
1183 1183 class linestate(object):
1184 1184 def __init__(self, line, linenum, colstart, colend):
1185 1185 self.line = line
1186 1186 self.linenum = linenum
1187 1187 self.colstart = colstart
1188 1188 self.colend = colend
1189 1189
1190 1190 def __hash__(self):
1191 1191 return hash((self.linenum, self.line))
1192 1192
1193 1193 def __eq__(self, other):
1194 1194 return self.line == other.line
1195 1195
1196 1196 matches = {}
1197 1197 copies = {}
1198 1198 def grepbody(fn, rev, body):
1199 1199 matches[rev].setdefault(fn, [])
1200 1200 m = matches[rev][fn]
1201 1201 for lnum, cstart, cend, line in matchlines(body):
1202 1202 s = linestate(line, lnum, cstart, cend)
1203 1203 m.append(s)
1204 1204
1205 1205 def difflinestates(a, b):
1206 1206 sm = difflib.SequenceMatcher(None, a, b)
1207 1207 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1208 1208 if tag == 'insert':
1209 1209 for i in xrange(blo, bhi):
1210 1210 yield ('+', b[i])
1211 1211 elif tag == 'delete':
1212 1212 for i in xrange(alo, ahi):
1213 1213 yield ('-', a[i])
1214 1214 elif tag == 'replace':
1215 1215 for i in xrange(alo, ahi):
1216 1216 yield ('-', a[i])
1217 1217 for i in xrange(blo, bhi):
1218 1218 yield ('+', b[i])
1219 1219
1220 1220 prev = {}
1221 1221 def display(fn, rev, states, prevstates):
1222 1222 datefunc = ui.quiet and util.shortdate or util.datestr
1223 1223 found = False
1224 1224 filerevmatches = {}
1225 1225 r = prev.get(fn, -1)
1226 1226 if opts.get('all'):
1227 1227 iter = difflinestates(states, prevstates)
1228 1228 else:
1229 1229 iter = [('', l) for l in prevstates]
1230 1230 for change, l in iter:
1231 1231 cols = [fn, str(r)]
1232 1232 if opts.get('line_number'):
1233 1233 cols.append(str(l.linenum))
1234 1234 if opts.get('all'):
1235 1235 cols.append(change)
1236 1236 if opts.get('user'):
1237 1237 cols.append(ui.shortuser(get(r)[1]))
1238 1238 if opts.get('date'):
1239 1239 cols.append(datefunc(get(r)[2]))
1240 1240 if opts.get('files_with_matches'):
1241 1241 c = (fn, r)
1242 1242 if c in filerevmatches:
1243 1243 continue
1244 1244 filerevmatches[c] = 1
1245 1245 else:
1246 1246 cols.append(l.line)
1247 1247 ui.write(sep.join(cols), eol)
1248 1248 found = True
1249 1249 return found
1250 1250
1251 1251 fstate = {}
1252 1252 skip = {}
1253 1253 get = util.cachefunc(lambda r: repo[r].changeset())
1254 1254 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1255 1255 found = False
1256 1256 follow = opts.get('follow')
1257 1257 for st, rev, fns in changeiter:
1258 1258 if st == 'window':
1259 1259 matches.clear()
1260 1260 elif st == 'add':
1261 1261 ctx = repo[rev]
1262 1262 matches[rev] = {}
1263 1263 for fn in fns:
1264 1264 if fn in skip:
1265 1265 continue
1266 1266 try:
1267 1267 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1268 1268 fstate.setdefault(fn, [])
1269 1269 if follow:
1270 1270 copied = getfile(fn).renamed(ctx.filenode(fn))
1271 1271 if copied:
1272 1272 copies.setdefault(rev, {})[fn] = copied[0]
1273 1273 except error.LookupError:
1274 1274 pass
1275 1275 elif st == 'iter':
1276 1276 for fn, m in sorted(matches[rev].items()):
1277 1277 copy = copies.get(rev, {}).get(fn)
1278 1278 if fn in skip:
1279 1279 if copy:
1280 1280 skip[copy] = True
1281 1281 continue
1282 1282 if fn in prev or fstate[fn]:
1283 1283 r = display(fn, rev, m, fstate[fn])
1284 1284 found = found or r
1285 1285 if r and not opts.get('all'):
1286 1286 skip[fn] = True
1287 1287 if copy:
1288 1288 skip[copy] = True
1289 1289 fstate[fn] = m
1290 1290 if copy:
1291 1291 fstate[copy] = m
1292 1292 prev[fn] = rev
1293 1293
1294 1294 for fn, state in sorted(fstate.items()):
1295 1295 if fn in skip:
1296 1296 continue
1297 1297 if fn not in copies.get(prev[fn], {}):
1298 1298 found = display(fn, rev, {}, state) or found
1299 1299 return (not found and 1) or 0
1300 1300
1301 1301 def heads(ui, repo, *branchrevs, **opts):
1302 1302 """show current repository heads or show branch heads
1303 1303
1304 1304 With no arguments, show all repository head changesets.
1305 1305
1306 1306 If branch or revisions names are given this will show the heads of
1307 1307 the specified branches or the branches those revisions are tagged
1308 1308 with.
1309 1309
1310 1310 Repository "heads" are changesets that don't have child
1311 1311 changesets. They are where development generally takes place and
1312 1312 are the usual targets for update and merge operations.
1313 1313
1314 1314 Branch heads are changesets that have a given branch tag, but have
1315 1315 no child changesets with that tag. They are usually where
1316 1316 development on the given branch takes place.
1317 1317 """
1318 1318 if opts.get('rev'):
1319 1319 start = repo.lookup(opts['rev'])
1320 1320 else:
1321 1321 start = None
1322 1322 closed = not opts.get('active')
1323 1323 if not branchrevs:
1324 1324 # Assume we're looking repo-wide heads if no revs were specified.
1325 1325 heads = repo.heads(start, closed=closed)
1326 1326 else:
1327 1327 heads = []
1328 1328 visitedset = set()
1329 1329 for branchrev in branchrevs:
1330 1330 branch = repo[branchrev].branch()
1331 1331 if branch in visitedset:
1332 1332 continue
1333 1333 visitedset.add(branch)
1334 1334 bheads = repo.branchheads(branch, start, closed=closed)
1335 1335 if not bheads:
1336 1336 if branch != branchrev:
1337 1337 ui.warn(_("no changes on branch %s containing %s are "
1338 1338 "reachable from %s\n")
1339 1339 % (branch, branchrev, opts.get('rev')))
1340 1340 else:
1341 1341 ui.warn(_("no changes on branch %s are reachable from %s\n")
1342 1342 % (branch, opts.get('rev')))
1343 1343 heads.extend(bheads)
1344 1344 if not heads:
1345 1345 return 1
1346 1346 displayer = cmdutil.show_changeset(ui, repo, opts)
1347 1347 for n in heads:
1348 1348 displayer.show(repo[n])
1349 1349
1350 1350 def help_(ui, name=None, with_version=False):
1351 1351 """show help for a given topic or a help overview
1352 1352
1353 1353 With no arguments, print a list of commands and short help.
1354 1354
1355 1355 Given a topic, extension, or command name, print help for that
1356 1356 topic."""
1357 1357 option_lists = []
1358 1358
1359 1359 def addglobalopts(aliases):
1360 1360 if ui.verbose:
1361 1361 option_lists.append((_("global options:"), globalopts))
1362 1362 if name == 'shortlist':
1363 1363 option_lists.append((_('use "hg help" for the full list '
1364 1364 'of commands'), ()))
1365 1365 else:
1366 1366 if name == 'shortlist':
1367 1367 msg = _('use "hg help" for the full list of commands '
1368 1368 'or "hg -v" for details')
1369 1369 elif aliases:
1370 1370 msg = _('use "hg -v help%s" to show aliases and '
1371 1371 'global options') % (name and " " + name or "")
1372 1372 else:
1373 1373 msg = _('use "hg -v help %s" to show global options') % name
1374 1374 option_lists.append((msg, ()))
1375 1375
1376 1376 def helpcmd(name):
1377 1377 if with_version:
1378 1378 version_(ui)
1379 1379 ui.write('\n')
1380 1380
1381 1381 try:
1382 1382 aliases, i = cmdutil.findcmd(name, table, False)
1383 1383 except error.AmbiguousCommand, inst:
1384 1384 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1385 1385 helplist(_('list of commands:\n\n'), select)
1386 1386 return
1387 1387
1388 1388 # synopsis
1389 1389 if len(i) > 2:
1390 1390 if i[2].startswith('hg'):
1391 1391 ui.write("%s\n" % i[2])
1392 1392 else:
1393 1393 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1394 1394 else:
1395 1395 ui.write('hg %s\n' % aliases[0])
1396 1396
1397 1397 # aliases
1398 1398 if not ui.quiet and len(aliases) > 1:
1399 1399 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1400 1400
1401 1401 # description
1402 1402 doc = gettext(i[0].__doc__)
1403 1403 if not doc:
1404 1404 doc = _("(no help text available)")
1405 1405 if ui.quiet:
1406 1406 doc = doc.splitlines(0)[0]
1407 1407 ui.write("\n%s\n" % doc.rstrip())
1408 1408
1409 1409 if not ui.quiet:
1410 1410 # options
1411 1411 if i[1]:
1412 1412 option_lists.append((_("options:\n"), i[1]))
1413 1413
1414 1414 addglobalopts(False)
1415 1415
1416 1416 def helplist(header, select=None):
1417 1417 h = {}
1418 1418 cmds = {}
1419 1419 for c, e in table.iteritems():
1420 1420 f = c.split("|", 1)[0]
1421 1421 if select and not select(f):
1422 1422 continue
1423 1423 if (not select and name != 'shortlist' and
1424 1424 e[0].__module__ != __name__):
1425 1425 continue
1426 1426 if name == "shortlist" and not f.startswith("^"):
1427 1427 continue
1428 1428 f = f.lstrip("^")
1429 1429 if not ui.debugflag and f.startswith("debug"):
1430 1430 continue
1431 1431 doc = gettext(e[0].__doc__)
1432 1432 if not doc:
1433 1433 doc = _("(no help text available)")
1434 1434 h[f] = doc.splitlines(0)[0].rstrip()
1435 1435 cmds[f] = c.lstrip("^")
1436 1436
1437 1437 if not h:
1438 1438 ui.status(_('no commands defined\n'))
1439 1439 return
1440 1440
1441 1441 ui.status(header)
1442 1442 fns = sorted(h)
1443 1443 m = max(map(len, fns))
1444 1444 for f in fns:
1445 1445 if ui.verbose:
1446 1446 commands = cmds[f].replace("|",", ")
1447 1447 ui.write(" %s:\n %s\n"%(commands, h[f]))
1448 1448 else:
1449 1449 ui.write(' %-*s %s\n' % (m, f, h[f]))
1450 1450
1451 1451 exts = list(extensions.extensions())
1452 1452 if exts and name != 'shortlist':
1453 1453 ui.write(_('\nenabled extensions:\n\n'))
1454 1454 maxlength = 0
1455 1455 exthelps = []
1456 1456 for ename, ext in exts:
1457 1457 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1458 1458 ename = ename.split('.')[-1]
1459 1459 maxlength = max(len(ename), maxlength)
1460 1460 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1461 1461 for ename, text in exthelps:
1462 1462 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1463 1463
1464 1464 if not ui.quiet:
1465 1465 addglobalopts(True)
1466 1466
1467 1467 def helptopic(name):
1468 1468 for names, header, doc in help.helptable:
1469 1469 if name in names:
1470 1470 break
1471 1471 else:
1472 1472 raise error.UnknownCommand(name)
1473 1473
1474 1474 # description
1475 1475 if not doc:
1476 1476 doc = _("(no help text available)")
1477 1477 if hasattr(doc, '__call__'):
1478 1478 doc = doc()
1479 1479
1480 1480 ui.write("%s\n" % header)
1481 1481 ui.write("%s\n" % doc.rstrip())
1482 1482
1483 1483 def helpext(name):
1484 1484 try:
1485 1485 mod = extensions.find(name)
1486 1486 except KeyError:
1487 1487 raise error.UnknownCommand(name)
1488 1488
1489 1489 doc = gettext(mod.__doc__) or _('no help text available')
1490 1490 doc = doc.splitlines(0)
1491 1491 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1492 1492 for d in doc[1:]:
1493 1493 ui.write(d, '\n')
1494 1494
1495 1495 ui.status('\n')
1496 1496
1497 1497 try:
1498 1498 ct = mod.cmdtable
1499 1499 except AttributeError:
1500 1500 ct = {}
1501 1501
1502 1502 modcmds = set([c.split('|', 1)[0] for c in ct])
1503 1503 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1504 1504
1505 1505 if name and name != 'shortlist':
1506 1506 i = None
1507 1507 for f in (helptopic, helpcmd, helpext):
1508 1508 try:
1509 1509 f(name)
1510 1510 i = None
1511 1511 break
1512 1512 except error.UnknownCommand, inst:
1513 1513 i = inst
1514 1514 if i:
1515 1515 raise i
1516 1516
1517 1517 else:
1518 1518 # program name
1519 1519 if ui.verbose or with_version:
1520 1520 version_(ui)
1521 1521 else:
1522 1522 ui.status(_("Mercurial Distributed SCM\n"))
1523 1523 ui.status('\n')
1524 1524
1525 1525 # list of commands
1526 1526 if name == "shortlist":
1527 1527 header = _('basic commands:\n\n')
1528 1528 else:
1529 1529 header = _('list of commands:\n\n')
1530 1530
1531 1531 helplist(header)
1532 1532
1533 1533 # list all option lists
1534 1534 opt_output = []
1535 1535 for title, options in option_lists:
1536 1536 opt_output.append(("\n%s" % title, None))
1537 1537 for shortopt, longopt, default, desc in options:
1538 1538 if "DEPRECATED" in desc and not ui.verbose: continue
1539 1539 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1540 1540 longopt and " --%s" % longopt),
1541 1541 "%s%s" % (desc,
1542 1542 default
1543 1543 and _(" (default: %s)") % default
1544 1544 or "")))
1545 1545
1546 1546 if not name:
1547 1547 ui.write(_("\nadditional help topics:\n\n"))
1548 1548 topics = []
1549 1549 for names, header, doc in help.helptable:
1550 1550 names = [(-len(name), name) for name in names]
1551 1551 names.sort()
1552 1552 topics.append((names[0][1], header))
1553 1553 topics_len = max([len(s[0]) for s in topics])
1554 1554 for t, desc in topics:
1555 1555 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1556 1556
1557 1557 if opt_output:
1558 1558 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1559 1559 for first, second in opt_output:
1560 1560 if second:
1561 1561 # wrap descriptions at 70 characters, just like the
1562 1562 # main help texts
1563 1563 second = textwrap.wrap(second, width=70 - opts_len - 3)
1564 1564 pad = '\n' + ' ' * (opts_len + 3)
1565 1565 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1566 1566 else:
1567 1567 ui.write("%s\n" % first)
1568 1568
1569 1569 def identify(ui, repo, source=None,
1570 1570 rev=None, num=None, id=None, branch=None, tags=None):
1571 1571 """identify the working copy or specified revision
1572 1572
1573 1573 With no revision, print a summary of the current state of the
1574 1574 repository.
1575 1575
1576 1576 With a path, do a lookup in another repository.
1577 1577
1578 1578 This summary identifies the repository state using one or two
1579 1579 parent hash identifiers, followed by a "+" if there are
1580 1580 uncommitted changes in the working directory, a list of tags for
1581 1581 this revision and a branch name for non-default branches.
1582 1582 """
1583 1583
1584 1584 if not repo and not source:
1585 1585 raise util.Abort(_("There is no Mercurial repository here "
1586 1586 "(.hg not found)"))
1587 1587
1588 1588 hexfunc = ui.debugflag and hex or short
1589 1589 default = not (num or id or branch or tags)
1590 1590 output = []
1591 1591
1592 1592 revs = []
1593 1593 if source:
1594 1594 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1595 1595 repo = hg.repository(ui, source)
1596 1596
1597 1597 if not repo.local():
1598 1598 if not rev and revs:
1599 1599 rev = revs[0]
1600 1600 if not rev:
1601 1601 rev = "tip"
1602 1602 if num or branch or tags:
1603 1603 raise util.Abort(
1604 1604 "can't query remote revision number, branch, or tags")
1605 1605 output = [hexfunc(repo.lookup(rev))]
1606 1606 elif not rev:
1607 1607 ctx = repo[None]
1608 1608 parents = ctx.parents()
1609 1609 changed = False
1610 1610 if default or id or num:
1611 1611 changed = ctx.files() + ctx.deleted()
1612 1612 if default or id:
1613 1613 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1614 1614 (changed) and "+" or "")]
1615 1615 if num:
1616 1616 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1617 1617 (changed) and "+" or ""))
1618 1618 else:
1619 1619 ctx = repo[rev]
1620 1620 if default or id:
1621 1621 output = [hexfunc(ctx.node())]
1622 1622 if num:
1623 1623 output.append(str(ctx.rev()))
1624 1624
1625 1625 if repo.local() and default and not ui.quiet:
1626 1626 b = encoding.tolocal(ctx.branch())
1627 1627 if b != 'default':
1628 1628 output.append("(%s)" % b)
1629 1629
1630 1630 # multiple tags for a single parent separated by '/'
1631 1631 t = "/".join(ctx.tags())
1632 1632 if t:
1633 1633 output.append(t)
1634 1634
1635 1635 if branch:
1636 1636 output.append(encoding.tolocal(ctx.branch()))
1637 1637
1638 1638 if tags:
1639 1639 output.extend(ctx.tags())
1640 1640
1641 1641 ui.write("%s\n" % ' '.join(output))
1642 1642
1643 1643 def import_(ui, repo, patch1, *patches, **opts):
1644 1644 """import an ordered set of patches
1645 1645
1646 1646 Import a list of patches and commit them individually.
1647 1647
1648 1648 If there are outstanding changes in the working directory, import
1649 1649 will abort unless given the -f/--force flag.
1650 1650
1651 1651 You can import a patch straight from a mail message. Even patches
1652 1652 as attachments work (body part must be type text/plain or
1653 1653 text/x-patch to be used). From and Subject headers of email
1654 1654 message are used as default committer and commit message. All
1655 1655 text/plain body parts before first diff are added to commit
1656 1656 message.
1657 1657
1658 1658 If the imported patch was generated by hg export, user and
1659 1659 description from patch override values from message headers and
1660 1660 body. Values given on command line with -m/--message and -u/--user
1661 1661 override these.
1662 1662
1663 1663 If --exact is specified, import will set the working directory to
1664 1664 the parent of each patch before applying it, and will abort if the
1665 1665 resulting changeset has a different ID than the one recorded in
1666 1666 the patch. This may happen due to character set problems or other
1667 1667 deficiencies in the text patch format.
1668 1668
1669 1669 With -s/--similarity, hg will attempt to discover renames and
1670 1670 copies in the patch in the same way as 'addremove'.
1671 1671
1672 1672 To read a patch from standard input, use patch name "-". See 'hg
1673 1673 help dates' for a list of formats valid for -d/--date.
1674 1674 """
1675 1675 patches = (patch1,) + patches
1676 1676
1677 1677 date = opts.get('date')
1678 1678 if date:
1679 1679 opts['date'] = util.parsedate(date)
1680 1680
1681 1681 try:
1682 1682 sim = float(opts.get('similarity') or 0)
1683 1683 except ValueError:
1684 1684 raise util.Abort(_('similarity must be a number'))
1685 1685 if sim < 0 or sim > 100:
1686 1686 raise util.Abort(_('similarity must be between 0 and 100'))
1687 1687
1688 1688 if opts.get('exact') or not opts.get('force'):
1689 1689 cmdutil.bail_if_changed(repo)
1690 1690
1691 1691 d = opts["base"]
1692 1692 strip = opts["strip"]
1693 1693 wlock = lock = None
1694 1694 try:
1695 1695 wlock = repo.wlock()
1696 1696 lock = repo.lock()
1697 1697 for p in patches:
1698 1698 pf = os.path.join(d, p)
1699 1699
1700 1700 if pf == '-':
1701 1701 ui.status(_("applying patch from stdin\n"))
1702 1702 pf = sys.stdin
1703 1703 else:
1704 1704 ui.status(_("applying %s\n") % p)
1705 1705 pf = url.open(ui, pf)
1706 1706 data = patch.extract(ui, pf)
1707 1707 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1708 1708
1709 1709 if tmpname is None:
1710 1710 raise util.Abort(_('no diffs found'))
1711 1711
1712 1712 try:
1713 1713 cmdline_message = cmdutil.logmessage(opts)
1714 1714 if cmdline_message:
1715 1715 # pickup the cmdline msg
1716 1716 message = cmdline_message
1717 1717 elif message:
1718 1718 # pickup the patch msg
1719 1719 message = message.strip()
1720 1720 else:
1721 1721 # launch the editor
1722 1722 message = None
1723 1723 ui.debug(_('message:\n%s\n') % message)
1724 1724
1725 1725 wp = repo.parents()
1726 1726 if opts.get('exact'):
1727 1727 if not nodeid or not p1:
1728 1728 raise util.Abort(_('not a mercurial patch'))
1729 1729 p1 = repo.lookup(p1)
1730 1730 p2 = repo.lookup(p2 or hex(nullid))
1731 1731
1732 1732 if p1 != wp[0].node():
1733 1733 hg.clean(repo, p1)
1734 1734 repo.dirstate.setparents(p1, p2)
1735 1735 elif p2:
1736 1736 try:
1737 1737 p1 = repo.lookup(p1)
1738 1738 p2 = repo.lookup(p2)
1739 1739 if p1 == wp[0].node():
1740 1740 repo.dirstate.setparents(p1, p2)
1741 1741 except error.RepoError:
1742 1742 pass
1743 1743 if opts.get('exact') or opts.get('import_branch'):
1744 1744 repo.dirstate.setbranch(branch or 'default')
1745 1745
1746 1746 files = {}
1747 1747 try:
1748 1748 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1749 1749 files=files)
1750 1750 finally:
1751 1751 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1752 1752 if not opts.get('no_commit'):
1753 1753 n = repo.commit(files, message, opts.get('user') or user,
1754 1754 opts.get('date') or date,
1755 1755 editor=cmdutil.commiteditor)
1756 1756 if opts.get('exact'):
1757 1757 if hex(n) != nodeid:
1758 1758 repo.rollback()
1759 1759 raise util.Abort(_('patch is damaged'
1760 1760 ' or loses information'))
1761 1761 # Force a dirstate write so that the next transaction
1762 1762 # backups an up-do-date file.
1763 1763 repo.dirstate.write()
1764 1764 finally:
1765 1765 os.unlink(tmpname)
1766 1766 finally:
1767 1767 release(lock, wlock)
1768 1768
1769 1769 def incoming(ui, repo, source="default", **opts):
1770 1770 """show new changesets found in source
1771 1771
1772 1772 Show new changesets found in the specified path/URL or the default
1773 1773 pull location. These are the changesets that would be pulled if a
1774 1774 pull was requested.
1775 1775
1776 1776 For remote repository, using --bundle avoids downloading the
1777 1777 changesets twice if the incoming is followed by a pull.
1778 1778
1779 1779 See pull for valid source format details.
1780 1780 """
1781 1781 limit = cmdutil.loglimit(opts)
1782 1782 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1783 1783 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1784 1784 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1785 1785 if revs:
1786 1786 revs = [other.lookup(rev) for rev in revs]
1787 1787 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1788 1788 force=opts["force"])
1789 1789 if not incoming:
1790 1790 try:
1791 1791 os.unlink(opts["bundle"])
1792 1792 except:
1793 1793 pass
1794 1794 ui.status(_("no changes found\n"))
1795 1795 return 1
1796 1796
1797 1797 cleanup = None
1798 1798 try:
1799 1799 fname = opts["bundle"]
1800 1800 if fname or not other.local():
1801 1801 # create a bundle (uncompressed if other repo is not local)
1802 1802
1803 1803 if revs is None and other.capable('changegroupsubset'):
1804 1804 revs = rheads
1805 1805
1806 1806 if revs is None:
1807 1807 cg = other.changegroup(incoming, "incoming")
1808 1808 else:
1809 1809 cg = other.changegroupsubset(incoming, revs, 'incoming')
1810 1810 bundletype = other.local() and "HG10BZ" or "HG10UN"
1811 1811 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1812 1812 # keep written bundle?
1813 1813 if opts["bundle"]:
1814 1814 cleanup = None
1815 1815 if not other.local():
1816 1816 # use the created uncompressed bundlerepo
1817 1817 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1818 1818
1819 1819 o = other.changelog.nodesbetween(incoming, revs)[0]
1820 1820 if opts.get('newest_first'):
1821 1821 o.reverse()
1822 1822 displayer = cmdutil.show_changeset(ui, other, opts)
1823 1823 count = 0
1824 1824 for n in o:
1825 1825 if count >= limit:
1826 1826 break
1827 1827 parents = [p for p in other.changelog.parents(n) if p != nullid]
1828 1828 if opts.get('no_merges') and len(parents) == 2:
1829 1829 continue
1830 1830 count += 1
1831 1831 displayer.show(other[n])
1832 1832 finally:
1833 1833 if hasattr(other, 'close'):
1834 1834 other.close()
1835 1835 if cleanup:
1836 1836 os.unlink(cleanup)
1837 1837
1838 1838 def init(ui, dest=".", **opts):
1839 1839 """create a new repository in the given directory
1840 1840
1841 1841 Initialize a new repository in the given directory. If the given
1842 1842 directory does not exist, it is created.
1843 1843
1844 1844 If no directory is given, the current directory is used.
1845 1845
1846 1846 It is possible to specify an ssh:// URL as the destination.
1847 1847 See 'hg help urls' for more information.
1848 1848 """
1849 1849 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1850 1850
1851 1851 def locate(ui, repo, *pats, **opts):
1852 1852 """locate files matching specific patterns
1853 1853
1854 1854 Print all files under Mercurial control whose names match the
1855 1855 given patterns.
1856 1856
1857 1857 This command searches the entire repository by default. To search
1858 1858 just the current directory and its subdirectories, use
1859 1859 "--include .".
1860 1860
1861 1861 If no patterns are given to match, this command prints all file
1862 1862 names.
1863 1863
1864 1864 If you want to feed the output of this command into the "xargs"
1865 1865 command, use the -0 option to both this command and "xargs". This
1866 1866 will avoid the problem of "xargs" treating single filenames that
1867 1867 contain white space as multiple filenames.
1868 1868 """
1869 1869 end = opts.get('print0') and '\0' or '\n'
1870 1870 rev = opts.get('rev') or None
1871 1871
1872 1872 ret = 1
1873 1873 m = cmdutil.match(repo, pats, opts, default='relglob')
1874 1874 m.bad = lambda x,y: False
1875 1875 for abs in repo[rev].walk(m):
1876 1876 if not rev and abs not in repo.dirstate:
1877 1877 continue
1878 1878 if opts.get('fullpath'):
1879 1879 ui.write(repo.wjoin(abs), end)
1880 1880 else:
1881 1881 ui.write(((pats and m.rel(abs)) or abs), end)
1882 1882 ret = 0
1883 1883
1884 1884 return ret
1885 1885
1886 1886 def log(ui, repo, *pats, **opts):
1887 1887 """show revision history of entire repository or files
1888 1888
1889 1889 Print the revision history of the specified files or the entire
1890 1890 project.
1891 1891
1892 1892 File history is shown without following rename or copy history of
1893 1893 files. Use -f/--follow with a file name to follow history across
1894 1894 renames and copies. --follow without a file name will only show
1895 1895 ancestors or descendants of the starting revision. --follow-first
1896 1896 only follows the first parent of merge revisions.
1897 1897
1898 1898 If no revision range is specified, the default is tip:0 unless
1899 1899 --follow is set, in which case the working directory parent is
1900 1900 used as the starting revision.
1901 1901
1902 1902 See 'hg help dates' for a list of formats valid for -d/--date.
1903 1903
1904 1904 By default this command outputs: changeset id and hash, tags,
1905 1905 non-trivial parents, user, date and time, and a summary for each
1906 1906 commit. When the -v/--verbose switch is used, the list of changed
1907 1907 files and full commit message is shown.
1908 1908
1909 1909 NOTE: log -p/--patch may generate unexpected diff output for merge
1910 1910 changesets, as it will only compare the merge changeset against
1911 1911 its first parent. Also, the files: list will only reflect files
1912 1912 that are different from BOTH parents.
1913 1913
1914 1914 """
1915 1915
1916 1916 get = util.cachefunc(lambda r: repo[r].changeset())
1917 1917 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1918 1918
1919 1919 limit = cmdutil.loglimit(opts)
1920 1920 count = 0
1921 1921
1922 1922 if opts.get('copies') and opts.get('rev'):
1923 1923 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1924 1924 else:
1925 1925 endrev = len(repo)
1926 1926 rcache = {}
1927 1927 ncache = {}
1928 1928 def getrenamed(fn, rev):
1929 1929 '''looks up all renames for a file (up to endrev) the first
1930 1930 time the file is given. It indexes on the changerev and only
1931 1931 parses the manifest if linkrev != changerev.
1932 1932 Returns rename info for fn at changerev rev.'''
1933 1933 if fn not in rcache:
1934 1934 rcache[fn] = {}
1935 1935 ncache[fn] = {}
1936 1936 fl = repo.file(fn)
1937 1937 for i in fl:
1938 1938 node = fl.node(i)
1939 1939 lr = fl.linkrev(i)
1940 1940 renamed = fl.renamed(node)
1941 1941 rcache[fn][lr] = renamed
1942 1942 if renamed:
1943 1943 ncache[fn][node] = renamed
1944 1944 if lr >= endrev:
1945 1945 break
1946 1946 if rev in rcache[fn]:
1947 1947 return rcache[fn][rev]
1948 1948
1949 1949 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1950 1950 # filectx logic.
1951 1951
1952 1952 try:
1953 1953 return repo[rev][fn].renamed()
1954 1954 except error.LookupError:
1955 1955 pass
1956 1956 return None
1957 1957
1958 1958 df = False
1959 1959 if opts["date"]:
1960 1960 df = util.matchdate(opts["date"])
1961 1961
1962 1962 only_branches = opts.get('only_branch')
1963 1963
1964 1964 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1965 1965 for st, rev, fns in changeiter:
1966 1966 if st == 'add':
1967 1967 parents = [p for p in repo.changelog.parentrevs(rev)
1968 1968 if p != nullrev]
1969 1969 if opts.get('no_merges') and len(parents) == 2:
1970 1970 continue
1971 1971 if opts.get('only_merges') and len(parents) != 2:
1972 1972 continue
1973 1973
1974 1974 if only_branches:
1975 1975 revbranch = get(rev)[5]['branch']
1976 1976 if revbranch not in only_branches:
1977 1977 continue
1978 1978
1979 1979 if df:
1980 1980 changes = get(rev)
1981 1981 if not df(changes[2][0]):
1982 1982 continue
1983 1983
1984 1984 if opts.get('keyword'):
1985 1985 changes = get(rev)
1986 1986 miss = 0
1987 1987 for k in [kw.lower() for kw in opts['keyword']]:
1988 1988 if not (k in changes[1].lower() or
1989 1989 k in changes[4].lower() or
1990 1990 k in " ".join(changes[3]).lower()):
1991 1991 miss = 1
1992 1992 break
1993 1993 if miss:
1994 1994 continue
1995 1995
1996 1996 if opts['user']:
1997 1997 changes = get(rev)
1998 1998 if not [k for k in opts['user'] if k in changes[1]]:
1999 1999 continue
2000 2000
2001 2001 copies = []
2002 2002 if opts.get('copies') and rev:
2003 2003 for fn in get(rev)[3]:
2004 2004 rename = getrenamed(fn, rev)
2005 2005 if rename:
2006 2006 copies.append((fn, rename[0]))
2007 2007 displayer.show(context.changectx(repo, rev), copies=copies)
2008 2008 elif st == 'iter':
2009 2009 if count == limit: break
2010 2010 if displayer.flush(rev):
2011 2011 count += 1
2012 2012
2013 2013 def manifest(ui, repo, node=None, rev=None):
2014 2014 """output the current or given revision of the project manifest
2015 2015
2016 2016 Print a list of version controlled files for the given revision.
2017 2017 If no revision is given, the first parent of the working directory
2018 2018 is used, or the null revision if none is checked out.
2019 2019
2020 2020 With -v flag, print file permissions, symlink and executable bits.
2021 2021 With --debug flag, print file revision hashes.
2022 2022 """
2023 2023
2024 2024 if rev and node:
2025 2025 raise util.Abort(_("please specify just one revision"))
2026 2026
2027 2027 if not node:
2028 2028 node = rev
2029 2029
2030 2030 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2031 2031 ctx = repo[node]
2032 2032 for f in ctx:
2033 2033 if ui.debugflag:
2034 2034 ui.write("%40s " % hex(ctx.manifest()[f]))
2035 2035 if ui.verbose:
2036 2036 ui.write(decor[ctx.flags(f)])
2037 2037 ui.write("%s\n" % f)
2038 2038
2039 2039 def merge(ui, repo, node=None, **opts):
2040 2040 """merge working directory with another revision
2041 2041
2042 2042 The contents of the current working directory is updated with all
2043 2043 changes made in the requested revision since the last common
2044 2044 predecessor revision.
2045 2045
2046 2046 Files that changed between either parent are marked as changed for
2047 2047 the next commit and a commit must be performed before any further
2048 2048 updates are allowed. The next commit has two parents.
2049 2049
2050 2050 If no revision is specified, the working directory's parent is a
2051 2051 head revision, and the current branch contains exactly one other
2052 2052 head, the other head is merged with by default. Otherwise, an
2053 2053 explicit revision to merge with must be provided.
2054 2054 """
2055 2055
2056 2056 if opts.get('rev') and node:
2057 2057 raise util.Abort(_("please specify just one revision"))
2058 2058 if not node:
2059 2059 node = opts.get('rev')
2060 2060
2061 2061 if not node:
2062 2062 branch = repo.changectx(None).branch()
2063 2063 bheads = repo.branchheads(branch)
2064 2064 if len(bheads) > 2:
2065 2065 raise util.Abort(_("branch '%s' has %d heads - "
2066 2066 "please merge with an explicit rev") %
2067 2067 (branch, len(bheads)))
2068 2068
2069 2069 parent = repo.dirstate.parents()[0]
2070 2070 if len(bheads) == 1:
2071 2071 if len(repo.heads()) > 1:
2072 2072 raise util.Abort(_("branch '%s' has one head - "
2073 2073 "please merge with an explicit rev") %
2074 2074 branch)
2075 2075 msg = _('there is nothing to merge')
2076 2076 if parent != repo.lookup(repo[None].branch()):
2077 2077 msg = _('%s - use "hg update" instead') % msg
2078 2078 raise util.Abort(msg)
2079 2079
2080 2080 if parent not in bheads:
2081 2081 raise util.Abort(_('working dir not at a head rev - '
2082 2082 'use "hg update" or merge with an explicit rev'))
2083 2083 node = parent == bheads[0] and bheads[-1] or bheads[0]
2084 2084
2085 2085 if opts.get('show'):
2086 2086 p1 = repo['.']
2087 2087 p2 = repo[node]
2088 2088 common = p1.ancestor(p2)
2089 2089 roots, heads = [common.node()], [p2.node()]
2090 2090 displayer = cmdutil.show_changeset(ui, repo, opts)
2091 2091 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2092 2092 displayer.show(repo[node])
2093 2093 return 0
2094 2094
2095 2095 return hg.merge(repo, node, force=opts.get('force'))
2096 2096
2097 2097 def outgoing(ui, repo, dest=None, **opts):
2098 2098 """show changesets not found in destination
2099 2099
2100 2100 Show changesets not found in the specified destination repository
2101 2101 or the default push location. These are the changesets that would
2102 2102 be pushed if a push was requested.
2103 2103
2104 2104 See pull for valid destination format details.
2105 2105 """
2106 2106 limit = cmdutil.loglimit(opts)
2107 2107 dest, revs, checkout = hg.parseurl(
2108 2108 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2109 2109 if revs:
2110 2110 revs = [repo.lookup(rev) for rev in revs]
2111 2111
2112 2112 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2113 2113 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2114 2114 o = repo.findoutgoing(other, force=opts.get('force'))
2115 2115 if not o:
2116 2116 ui.status(_("no changes found\n"))
2117 2117 return 1
2118 2118 o = repo.changelog.nodesbetween(o, revs)[0]
2119 2119 if opts.get('newest_first'):
2120 2120 o.reverse()
2121 2121 displayer = cmdutil.show_changeset(ui, repo, opts)
2122 2122 count = 0
2123 2123 for n in o:
2124 2124 if count >= limit:
2125 2125 break
2126 2126 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2127 2127 if opts.get('no_merges') and len(parents) == 2:
2128 2128 continue
2129 2129 count += 1
2130 2130 displayer.show(repo[n])
2131 2131
2132 2132 def parents(ui, repo, file_=None, **opts):
2133 2133 """show the parents of the working directory or revision
2134 2134
2135 2135 Print the working directory's parent revisions. If a revision is
2136 2136 given via -r/--rev, the parent of that revision will be printed.
2137 2137 If a file argument is given, revision in which the file was last
2138 2138 changed (before the working directory revision or the argument to
2139 2139 --rev if given) is printed.
2140 2140 """
2141 2141 rev = opts.get('rev')
2142 2142 if rev:
2143 2143 ctx = repo[rev]
2144 2144 else:
2145 2145 ctx = repo[None]
2146 2146
2147 2147 if file_:
2148 2148 m = cmdutil.match(repo, (file_,), opts)
2149 2149 if m.anypats() or len(m.files()) != 1:
2150 2150 raise util.Abort(_('can only specify an explicit file name'))
2151 2151 file_ = m.files()[0]
2152 2152 filenodes = []
2153 2153 for cp in ctx.parents():
2154 2154 if not cp:
2155 2155 continue
2156 2156 try:
2157 2157 filenodes.append(cp.filenode(file_))
2158 2158 except error.LookupError:
2159 2159 pass
2160 2160 if not filenodes:
2161 2161 raise util.Abort(_("'%s' not found in manifest!") % file_)
2162 2162 fl = repo.file(file_)
2163 2163 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2164 2164 else:
2165 2165 p = [cp.node() for cp in ctx.parents()]
2166 2166
2167 2167 displayer = cmdutil.show_changeset(ui, repo, opts)
2168 2168 for n in p:
2169 2169 if n != nullid:
2170 2170 displayer.show(repo[n])
2171 2171
2172 2172 def paths(ui, repo, search=None):
2173 2173 """show aliases for remote repositories
2174 2174
2175 2175 Show definition of symbolic path name NAME. If no name is given,
2176 2176 show definition of available names.
2177 2177
2178 2178 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2179 2179 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2180 2180
2181 2181 See 'hg help urls' for more information.
2182 2182 """
2183 2183 if search:
2184 2184 for name, path in ui.configitems("paths"):
2185 2185 if name == search:
2186 2186 ui.write("%s\n" % url.hidepassword(path))
2187 2187 return
2188 2188 ui.warn(_("not found!\n"))
2189 2189 return 1
2190 2190 else:
2191 2191 for name, path in ui.configitems("paths"):
2192 2192 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2193 2193
2194 2194 def postincoming(ui, repo, modheads, optupdate, checkout):
2195 2195 if modheads == 0:
2196 2196 return
2197 2197 if optupdate:
2198 2198 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2199 2199 return hg.update(repo, checkout)
2200 2200 else:
2201 2201 ui.status(_("not updating, since new heads added\n"))
2202 2202 if modheads > 1:
2203 2203 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2204 2204 else:
2205 2205 ui.status(_("(run 'hg update' to get a working copy)\n"))
2206 2206
2207 2207 def pull(ui, repo, source="default", **opts):
2208 2208 """pull changes from the specified source
2209 2209
2210 2210 Pull changes from a remote repository to the local one.
2211 2211
2212 2212 This finds all changes from the repository at the specified path
2213 2213 or URL and adds them to the local repository. By default, this
2214 2214 does not update the copy of the project in the working directory.
2215 2215
2216 2216 Use hg incoming if you want to see what will be added by the next
2217 2217 pull without actually adding the changes to the repository.
2218 2218
2219 2219 If SOURCE is omitted, the 'default' path will be used.
2220 2220 See 'hg help urls' for more information.
2221 2221 """
2222 2222 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2223 2223 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2224 2224 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2225 2225 if revs:
2226 2226 try:
2227 2227 revs = [other.lookup(rev) for rev in revs]
2228 2228 except error.CapabilityError:
2229 2229 err = _("Other repository doesn't support revision lookup, "
2230 2230 "so a rev cannot be specified.")
2231 2231 raise util.Abort(err)
2232 2232
2233 2233 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2234 2234 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2235 2235
2236 2236 def push(ui, repo, dest=None, **opts):
2237 2237 """push changes to the specified destination
2238 2238
2239 2239 Push changes from the local repository to the given destination.
2240 2240
2241 2241 This is the symmetrical operation for pull. It moves changes from
2242 2242 the current repository to a different one. If the destination is
2243 2243 local this is identical to a pull in that directory from the
2244 2244 current one.
2245 2245
2246 2246 By default, push will refuse to run if it detects the result would
2247 2247 increase the number of remote heads. This generally indicates the
2248 2248 the client has forgotten to pull and merge before pushing.
2249 2249
2250 2250 If -r/--rev is used, the named revision and all its ancestors will
2251 2251 be pushed to the remote repository.
2252 2252
2253 2253 Look at the help text for URLs for important details about ssh://
2254 2254 URLs. If DESTINATION is omitted, a default path will be used.
2255 2255 See 'hg help urls' for more information.
2256 2256 """
2257 2257 dest, revs, checkout = hg.parseurl(
2258 2258 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2259 2259 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2260 2260 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2261 2261 if revs:
2262 2262 revs = [repo.lookup(rev) for rev in revs]
2263 2263 r = repo.push(other, opts.get('force'), revs=revs)
2264 2264 return r == 0
2265 2265
2266 2266 def recover(ui, repo):
2267 2267 """roll back an interrupted transaction
2268 2268
2269 2269 Recover from an interrupted commit or pull.
2270 2270
2271 2271 This command tries to fix the repository status after an
2272 2272 interrupted operation. It should only be necessary when Mercurial
2273 2273 suggests it.
2274 2274 """
2275 2275 if repo.recover():
2276 2276 return hg.verify(repo)
2277 2277 return 1
2278 2278
2279 2279 def remove(ui, repo, *pats, **opts):
2280 2280 """remove the specified files on the next commit
2281 2281
2282 2282 Schedule the indicated files for removal from the repository.
2283 2283
2284 2284 This only removes files from the current branch, not from the
2285 2285 entire project history. -A/--after can be used to remove only
2286 2286 files that have already been deleted, -f/--force can be used to
2287 2287 force deletion, and -Af can be used to remove files from the next
2288 2288 revision without deleting them.
2289 2289
2290 2290 The following table details the behavior of remove for different
2291 2291 file states (columns) and option combinations (rows). The file
2292 2292 states are Added, Clean, Modified and Missing (as reported by hg
2293 2293 status). The actions are Warn, Remove (from branch) and Delete
2294 2294 (from disk).
2295 2295
2296 2296 A C M !
2297 2297 none W RD W R
2298 2298 -f R RD RD R
2299 2299 -A W W W R
2300 2300 -Af R R R R
2301 2301
2302 2302 This command schedules the files to be removed at the next commit.
2303 2303 To undo a remove before that, see hg revert.
2304 2304 """
2305 2305
2306 2306 after, force = opts.get('after'), opts.get('force')
2307 2307 if not pats and not after:
2308 2308 raise util.Abort(_('no files specified'))
2309 2309
2310 2310 m = cmdutil.match(repo, pats, opts)
2311 2311 s = repo.status(match=m, clean=True)
2312 2312 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2313 2313
2314 2314 def warn(files, reason):
2315 2315 for f in files:
2316 2316 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2317 2317 % (m.rel(f), reason))
2318 2318
2319 2319 if force:
2320 2320 remove, forget = modified + deleted + clean, added
2321 2321 elif after:
2322 2322 remove, forget = deleted, []
2323 2323 warn(modified + added + clean, _('still exists'))
2324 2324 else:
2325 2325 remove, forget = deleted + clean, []
2326 2326 warn(modified, _('is modified'))
2327 2327 warn(added, _('has been marked for add'))
2328 2328
2329 2329 for f in sorted(remove + forget):
2330 2330 if ui.verbose or not m.exact(f):
2331 2331 ui.status(_('removing %s\n') % m.rel(f))
2332 2332
2333 2333 repo.forget(forget)
2334 2334 repo.remove(remove, unlink=not after)
2335 2335
2336 2336 def rename(ui, repo, *pats, **opts):
2337 2337 """rename files; equivalent of copy + remove
2338 2338
2339 2339 Mark dest as copies of sources; mark sources for deletion. If dest
2340 2340 is a directory, copies are put in that directory. If dest is a
2341 2341 file, there can only be one source.
2342 2342
2343 2343 By default, this command copies the contents of files as they
2344 2344 exist in the working directory. If invoked with -A/--after, the
2345 2345 operation is recorded, but no copying is performed.
2346 2346
2347 2347 This command takes effect at the next commit. To undo a rename
2348 2348 before that, see hg revert.
2349 2349 """
2350 2350 wlock = repo.wlock(False)
2351 2351 try:
2352 2352 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2353 2353 finally:
2354 2354 wlock.release()
2355 2355
2356 2356 def resolve(ui, repo, *pats, **opts):
2357 2357 """retry file merges from a merge or update
2358 2358
2359 2359 This command will cleanly retry unresolved file merges using file
2360 2360 revisions preserved from the last update or merge. To attempt to
2361 2361 resolve all unresolved files, use the -a/--all switch.
2362 2362
2363 2363 If a conflict is resolved manually, please note that the changes
2364 2364 will be overwritten if the merge is retried with resolve. The
2365 2365 -m/--mark switch should be used to mark the file as resolved.
2366 2366
2367 2367 This command will also allow listing resolved files and manually
2368 2368 marking and unmarking files as resolved. All files must be marked
2369 2369 as resolved before the new commits are permitted.
2370 2370
2371 2371 The codes used to show the status of files are:
2372 2372 U = unresolved
2373 2373 R = resolved
2374 2374 """
2375 2375
2376 2376 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2377 2377
2378 2378 if (show and (mark or unmark)) or (mark and unmark):
2379 2379 raise util.Abort(_("too many options specified"))
2380 2380 if pats and all:
2381 2381 raise util.Abort(_("can't specify --all and patterns"))
2382 2382 if not (all or pats or show or mark or unmark):
2383 2383 raise util.Abort(_('no files or directories specified; '
2384 2384 'use --all to remerge all files'))
2385 2385
2386 2386 ms = merge_.mergestate(repo)
2387 2387 m = cmdutil.match(repo, pats, opts)
2388 2388
2389 2389 for f in ms:
2390 2390 if m(f):
2391 2391 if show:
2392 2392 ui.write("%s %s\n" % (ms[f].upper(), f))
2393 2393 elif mark:
2394 2394 ms.mark(f, "r")
2395 2395 elif unmark:
2396 2396 ms.mark(f, "u")
2397 2397 else:
2398 2398 wctx = repo[None]
2399 2399 mctx = wctx.parents()[-1]
2400 2400
2401 2401 # backup pre-resolve (merge uses .orig for its own purposes)
2402 2402 a = repo.wjoin(f)
2403 2403 util.copyfile(a, a + ".resolve")
2404 2404
2405 2405 # resolve file
2406 2406 ms.resolve(f, wctx, mctx)
2407 2407
2408 2408 # replace filemerge's .orig file with our resolve file
2409 2409 util.rename(a + ".resolve", a + ".orig")
2410 2410
2411 2411 def revert(ui, repo, *pats, **opts):
2412 2412 """restore individual files or directories to an earlier state
2413 2413
2414 2414 (Use update -r to check out earlier revisions, revert does not
2415 2415 change the working directory parents.)
2416 2416
2417 2417 With no revision specified, revert the named files or directories
2418 2418 to the contents they had in the parent of the working directory.
2419 2419 This restores the contents of the affected files to an unmodified
2420 2420 state and unschedules adds, removes, copies, and renames. If the
2421 2421 working directory has two parents, you must explicitly specify the
2422 2422 revision to revert to.
2423 2423
2424 2424 Using the -r/--rev option, revert the given files or directories
2425 2425 to their contents as of a specific revision. This can be helpful
2426 2426 to "roll back" some or all of an earlier change. See 'hg help
2427 2427 dates' for a list of formats valid for -d/--date.
2428 2428
2429 2429 Revert modifies the working directory. It does not commit any
2430 2430 changes, or change the parent of the working directory. If you
2431 2431 revert to a revision other than the parent of the working
2432 2432 directory, the reverted files will thus appear modified
2433 2433 afterwards.
2434 2434
2435 2435 If a file has been deleted, it is restored. If the executable mode
2436 2436 of a file was changed, it is reset.
2437 2437
2438 2438 If names are given, all files matching the names are reverted.
2439 2439 If no arguments are given, no files are reverted.
2440 2440
2441 2441 Modified files are saved with a .orig suffix before reverting.
2442 2442 To disable these backups, use --no-backup.
2443 2443 """
2444 2444
2445 2445 if opts["date"]:
2446 2446 if opts["rev"]:
2447 2447 raise util.Abort(_("you can't specify a revision and a date"))
2448 2448 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2449 2449
2450 2450 if not pats and not opts.get('all'):
2451 2451 raise util.Abort(_('no files or directories specified; '
2452 2452 'use --all to revert the whole repo'))
2453 2453
2454 2454 parent, p2 = repo.dirstate.parents()
2455 2455 if not opts.get('rev') and p2 != nullid:
2456 2456 raise util.Abort(_('uncommitted merge - please provide a '
2457 2457 'specific revision'))
2458 2458 ctx = repo[opts.get('rev')]
2459 2459 node = ctx.node()
2460 2460 mf = ctx.manifest()
2461 2461 if node == parent:
2462 2462 pmf = mf
2463 2463 else:
2464 2464 pmf = None
2465 2465
2466 2466 # need all matching names in dirstate and manifest of target rev,
2467 2467 # so have to walk both. do not print errors if files exist in one
2468 2468 # but not other.
2469 2469
2470 2470 names = {}
2471 2471
2472 2472 wlock = repo.wlock()
2473 2473 try:
2474 2474 # walk dirstate.
2475 2475
2476 2476 m = cmdutil.match(repo, pats, opts)
2477 2477 m.bad = lambda x,y: False
2478 2478 for abs in repo.walk(m):
2479 2479 names[abs] = m.rel(abs), m.exact(abs)
2480 2480
2481 2481 # walk target manifest.
2482 2482
2483 2483 def badfn(path, msg):
2484 2484 if path in names:
2485 2485 return False
2486 2486 path_ = path + '/'
2487 2487 for f in names:
2488 2488 if f.startswith(path_):
2489 2489 return False
2490 2490 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2491 2491 return False
2492 2492
2493 2493 m = cmdutil.match(repo, pats, opts)
2494 2494 m.bad = badfn
2495 2495 for abs in repo[node].walk(m):
2496 2496 if abs not in names:
2497 2497 names[abs] = m.rel(abs), m.exact(abs)
2498 2498
2499 2499 m = cmdutil.matchfiles(repo, names)
2500 2500 changes = repo.status(match=m)[:4]
2501 2501 modified, added, removed, deleted = map(set, changes)
2502 2502
2503 2503 # if f is a rename, also revert the source
2504 2504 cwd = repo.getcwd()
2505 2505 for f in added:
2506 2506 src = repo.dirstate.copied(f)
2507 2507 if src and src not in names and repo.dirstate[src] == 'r':
2508 2508 removed.add(src)
2509 2509 names[src] = (repo.pathto(src, cwd), True)
2510 2510
2511 2511 def removeforget(abs):
2512 2512 if repo.dirstate[abs] == 'a':
2513 2513 return _('forgetting %s\n')
2514 2514 return _('removing %s\n')
2515 2515
2516 2516 revert = ([], _('reverting %s\n'))
2517 2517 add = ([], _('adding %s\n'))
2518 2518 remove = ([], removeforget)
2519 2519 undelete = ([], _('undeleting %s\n'))
2520 2520
2521 2521 disptable = (
2522 2522 # dispatch table:
2523 2523 # file state
2524 2524 # action if in target manifest
2525 2525 # action if not in target manifest
2526 2526 # make backup if in target manifest
2527 2527 # make backup if not in target manifest
2528 2528 (modified, revert, remove, True, True),
2529 2529 (added, revert, remove, True, False),
2530 2530 (removed, undelete, None, False, False),
2531 2531 (deleted, revert, remove, False, False),
2532 2532 )
2533 2533
2534 2534 for abs, (rel, exact) in sorted(names.items()):
2535 2535 mfentry = mf.get(abs)
2536 2536 target = repo.wjoin(abs)
2537 2537 def handle(xlist, dobackup):
2538 2538 xlist[0].append(abs)
2539 2539 if dobackup and not opts.get('no_backup') and util.lexists(target):
2540 2540 bakname = "%s.orig" % rel
2541 2541 ui.note(_('saving current version of %s as %s\n') %
2542 2542 (rel, bakname))
2543 2543 if not opts.get('dry_run'):
2544 2544 util.copyfile(target, bakname)
2545 2545 if ui.verbose or not exact:
2546 2546 msg = xlist[1]
2547 2547 if not isinstance(msg, basestring):
2548 2548 msg = msg(abs)
2549 2549 ui.status(msg % rel)
2550 2550 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2551 2551 if abs not in table: continue
2552 2552 # file has changed in dirstate
2553 2553 if mfentry:
2554 2554 handle(hitlist, backuphit)
2555 2555 elif misslist is not None:
2556 2556 handle(misslist, backupmiss)
2557 2557 break
2558 2558 else:
2559 2559 if abs not in repo.dirstate:
2560 2560 if mfentry:
2561 2561 handle(add, True)
2562 2562 elif exact:
2563 2563 ui.warn(_('file not managed: %s\n') % rel)
2564 2564 continue
2565 2565 # file has not changed in dirstate
2566 2566 if node == parent:
2567 2567 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2568 2568 continue
2569 2569 if pmf is None:
2570 2570 # only need parent manifest in this unlikely case,
2571 2571 # so do not read by default
2572 2572 pmf = repo[parent].manifest()
2573 2573 if abs in pmf:
2574 2574 if mfentry:
2575 2575 # if version of file is same in parent and target
2576 2576 # manifests, do nothing
2577 2577 if (pmf[abs] != mfentry or
2578 2578 pmf.flags(abs) != mf.flags(abs)):
2579 2579 handle(revert, False)
2580 2580 else:
2581 2581 handle(remove, False)
2582 2582
2583 2583 if not opts.get('dry_run'):
2584 2584 def checkout(f):
2585 2585 fc = ctx[f]
2586 2586 repo.wwrite(f, fc.data(), fc.flags())
2587 2587
2588 2588 audit_path = util.path_auditor(repo.root)
2589 2589 for f in remove[0]:
2590 2590 if repo.dirstate[f] == 'a':
2591 2591 repo.dirstate.forget(f)
2592 2592 continue
2593 2593 audit_path(f)
2594 2594 try:
2595 2595 util.unlink(repo.wjoin(f))
2596 2596 except OSError:
2597 2597 pass
2598 2598 repo.dirstate.remove(f)
2599 2599
2600 2600 normal = None
2601 2601 if node == parent:
2602 2602 # We're reverting to our parent. If possible, we'd like status
2603 2603 # to report the file as clean. We have to use normallookup for
2604 2604 # merges to avoid losing information about merged/dirty files.
2605 2605 if p2 != nullid:
2606 2606 normal = repo.dirstate.normallookup
2607 2607 else:
2608 2608 normal = repo.dirstate.normal
2609 2609 for f in revert[0]:
2610 2610 checkout(f)
2611 2611 if normal:
2612 2612 normal(f)
2613 2613
2614 2614 for f in add[0]:
2615 2615 checkout(f)
2616 2616 repo.dirstate.add(f)
2617 2617
2618 2618 normal = repo.dirstate.normallookup
2619 2619 if node == parent and p2 == nullid:
2620 2620 normal = repo.dirstate.normal
2621 2621 for f in undelete[0]:
2622 2622 checkout(f)
2623 2623 normal(f)
2624 2624
2625 2625 finally:
2626 2626 wlock.release()
2627 2627
2628 2628 def rollback(ui, repo):
2629 2629 """roll back the last transaction
2630 2630
2631 2631 This command should be used with care. There is only one level of
2632 2632 rollback, and there is no way to undo a rollback. It will also
2633 2633 restore the dirstate at the time of the last transaction, losing
2634 2634 any dirstate changes since that time.
2635 2635
2636 2636 Transactions are used to encapsulate the effects of all commands
2637 2637 that create new changesets or propagate existing changesets into a
2638 2638 repository. For example, the following commands are transactional,
2639 2639 and their effects can be rolled back:
2640 2640
2641 2641 commit
2642 2642 import
2643 2643 pull
2644 2644 push (with this repository as destination)
2645 2645 unbundle
2646 2646
2647 2647 This command is not intended for use on public repositories. Once
2648 2648 changes are visible for pull by other users, rolling a transaction
2649 2649 back locally is ineffective (someone else may already have pulled
2650 2650 the changes). Furthermore, a race is possible with readers of the
2651 2651 repository; for example an in-progress pull from the repository
2652 2652 may fail if a rollback is performed.
2653 2653 """
2654 2654 repo.rollback()
2655 2655
2656 2656 def root(ui, repo):
2657 2657 """print the root (top) of the current working directory
2658 2658
2659 2659 Print the root directory of the current repository.
2660 2660 """
2661 2661 ui.write(repo.root + "\n")
2662 2662
2663 2663 def serve(ui, repo, **opts):
2664 2664 """export the repository via HTTP
2665 2665
2666 2666 Start a local HTTP repository browser and pull server.
2667 2667
2668 2668 By default, the server logs accesses to stdout and errors to
2669 2669 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2670 2670 files.
2671 2671 """
2672 2672
2673 2673 if opts["stdio"]:
2674 2674 if repo is None:
2675 2675 raise error.RepoError(_("There is no Mercurial repository here"
2676 2676 " (.hg not found)"))
2677 2677 s = sshserver.sshserver(ui, repo)
2678 2678 s.serve_forever()
2679 2679
2680 2680 baseui = repo and repo.baseui or ui
2681 2681 optlist = ("name templates style address port prefix ipv6"
2682 2682 " accesslog errorlog webdir_conf certificate")
2683 2683 for o in optlist.split():
2684 2684 if opts[o]:
2685 2685 baseui.setconfig("web", o, str(opts[o]))
2686 2686 if (repo is not None) and (repo.ui != baseui):
2687 2687 repo.ui.setconfig("web", o, str(opts[o]))
2688 2688
2689 2689 if repo is None and not ui.config("web", "webdir_conf"):
2690 2690 raise error.RepoError(_("There is no Mercurial repository here"
2691 2691 " (.hg not found)"))
2692 2692
2693 2693 class service:
2694 2694 def init(self):
2695 2695 util.set_signal_handler()
2696 2696 self.httpd = server.create_server(baseui, repo)
2697 2697
2698 2698 if not ui.verbose: return
2699 2699
2700 2700 if self.httpd.prefix:
2701 2701 prefix = self.httpd.prefix.strip('/') + '/'
2702 2702 else:
2703 2703 prefix = ''
2704 2704
2705 2705 port = ':%d' % self.httpd.port
2706 2706 if port == ':80':
2707 2707 port = ''
2708 2708
2709 2709 bindaddr = self.httpd.addr
2710 2710 if bindaddr == '0.0.0.0':
2711 2711 bindaddr = '*'
2712 2712 elif ':' in bindaddr: # IPv6
2713 2713 bindaddr = '[%s]' % bindaddr
2714 2714
2715 2715 fqaddr = self.httpd.fqaddr
2716 2716 if ':' in fqaddr:
2717 2717 fqaddr = '[%s]' % fqaddr
2718 2718 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2719 2719 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2720 2720
2721 2721 def run(self):
2722 2722 self.httpd.serve_forever()
2723 2723
2724 2724 service = service()
2725 2725
2726 2726 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2727 2727
2728 2728 def status(ui, repo, *pats, **opts):
2729 2729 """show changed files in the working directory
2730 2730
2731 2731 Show status of files in the repository. If names are given, only
2732 2732 files that match are shown. Files that are clean or ignored or
2733 2733 source of a copy/move operation, are not listed unless -c/--clean,
2734 2734 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2735 2735 described with "show only ..." are given, the options -mardu are
2736 2736 used.
2737 2737
2738 2738 Option -q/--quiet hides untracked (unknown and ignored) files
2739 2739 unless explicitly requested with -u/--unknown or -i/--ignored.
2740 2740
2741 2741 NOTE: status may appear to disagree with diff if permissions have
2742 2742 changed or a merge has occurred. The standard diff format does not
2743 2743 report permission changes and diff only reports changes relative
2744 2744 to one merge parent.
2745 2745
2746 2746 If one revision is given, it is used as the base revision.
2747 2747 If two revisions are given, the difference between them is shown.
2748 2748
2749 2749 The codes used to show the status of files are:
2750 2750 M = modified
2751 2751 A = added
2752 2752 R = removed
2753 2753 C = clean
2754 2754 ! = missing (deleted by non-hg command, but still tracked)
2755 2755 ? = not tracked
2756 2756 I = ignored
2757 2757 = the previous added file was copied from here
2758 2758 """
2759 2759
2760 2760 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2761 2761 cwd = (pats and repo.getcwd()) or ''
2762 2762 end = opts.get('print0') and '\0' or '\n'
2763 2763 copy = {}
2764 2764 states = 'modified added removed deleted unknown ignored clean'.split()
2765 2765 show = [k for k in states if opts.get(k)]
2766 2766 if opts.get('all'):
2767 2767 show += ui.quiet and (states[:4] + ['clean']) or states
2768 2768 if not show:
2769 2769 show = ui.quiet and states[:4] or states[:5]
2770 2770
2771 2771 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2772 2772 'ignored' in show, 'clean' in show, 'unknown' in show)
2773 2773 changestates = zip(states, 'MAR!?IC', stat)
2774 2774
2775 2775 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2776 2776 ctxn = repo[nullid]
2777 2777 ctx1 = repo[node1]
2778 2778 ctx2 = repo[node2]
2779 2779 added = stat[1]
2780 2780 if node2 is None:
2781 2781 added = stat[0] + stat[1] # merged?
2782 2782
2783 2783 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2784 2784 if k in added:
2785 2785 copy[k] = v
2786 2786 elif v in added:
2787 2787 copy[v] = k
2788 2788
2789 2789 for state, char, files in changestates:
2790 2790 if state in show:
2791 2791 format = "%s %%s%s" % (char, end)
2792 2792 if opts.get('no_status'):
2793 2793 format = "%%s%s" % end
2794 2794
2795 2795 for f in files:
2796 2796 ui.write(format % repo.pathto(f, cwd))
2797 2797 if f in copy:
2798 2798 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2799 2799
2800 2800 def tag(ui, repo, name1, *names, **opts):
2801 2801 """add one or more tags for the current or given revision
2802 2802
2803 2803 Name a particular revision using <name>.
2804 2804
2805 2805 Tags are used to name particular revisions of the repository and are
2806 2806 very useful to compare different revisions, to go back to significant
2807 2807 earlier versions or to mark branch points as releases, etc.
2808 2808
2809 2809 If no revision is given, the parent of the working directory is
2810 2810 used, or tip if no revision is checked out.
2811 2811
2812 2812 To facilitate version control, distribution, and merging of tags,
2813 2813 they are stored as a file named ".hgtags" which is managed
2814 2814 similarly to other project files and can be hand-edited if
2815 2815 necessary. The file '.hg/localtags' is used for local tags (not
2816 2816 shared among repositories).
2817 2817
2818 2818 See 'hg help dates' for a list of formats valid for -d/--date.
2819 2819 """
2820 2820
2821 2821 rev_ = "."
2822 2822 names = (name1,) + names
2823 2823 if len(names) != len(set(names)):
2824 2824 raise util.Abort(_('tag names must be unique'))
2825 2825 for n in names:
2826 2826 if n in ['tip', '.', 'null']:
2827 2827 raise util.Abort(_('the name \'%s\' is reserved') % n)
2828 2828 if opts.get('rev') and opts.get('remove'):
2829 2829 raise util.Abort(_("--rev and --remove are incompatible"))
2830 2830 if opts.get('rev'):
2831 2831 rev_ = opts['rev']
2832 2832 message = opts.get('message')
2833 2833 if opts.get('remove'):
2834 2834 expectedtype = opts.get('local') and 'local' or 'global'
2835 2835 for n in names:
2836 2836 if not repo.tagtype(n):
2837 2837 raise util.Abort(_('tag \'%s\' does not exist') % n)
2838 2838 if repo.tagtype(n) != expectedtype:
2839 2839 if expectedtype == 'global':
2840 2840 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2841 2841 else:
2842 2842 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2843 2843 rev_ = nullid
2844 2844 if not message:
2845 2845 message = _('Removed tag %s') % ', '.join(names)
2846 2846 elif not opts.get('force'):
2847 2847 for n in names:
2848 2848 if n in repo.tags():
2849 2849 raise util.Abort(_('tag \'%s\' already exists '
2850 2850 '(use -f to force)') % n)
2851 2851 if not rev_ and repo.dirstate.parents()[1] != nullid:
2852 2852 raise util.Abort(_('uncommitted merge - please provide a '
2853 2853 'specific revision'))
2854 2854 r = repo[rev_].node()
2855 2855
2856 2856 if not message:
2857 2857 message = (_('Added tag %s for changeset %s') %
2858 2858 (', '.join(names), short(r)))
2859 2859
2860 2860 date = opts.get('date')
2861 2861 if date:
2862 2862 date = util.parsedate(date)
2863 2863
2864 2864 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2865 2865
2866 2866 def tags(ui, repo):
2867 2867 """list repository tags
2868 2868
2869 2869 This lists both regular and local tags. When the -v/--verbose
2870 2870 switch is used, a third column "local" is printed for local tags.
2871 2871 """
2872 2872
2873 2873 hexfunc = ui.debugflag and hex or short
2874 2874 tagtype = ""
2875 2875
2876 2876 for t, n in reversed(repo.tagslist()):
2877 2877 if ui.quiet:
2878 2878 ui.write("%s\n" % t)
2879 2879 continue
2880 2880
2881 2881 try:
2882 2882 hn = hexfunc(n)
2883 2883 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2884 2884 except error.LookupError:
2885 2885 r = " ?:%s" % hn
2886 2886 else:
2887 2887 spaces = " " * (30 - encoding.colwidth(t))
2888 2888 if ui.verbose:
2889 2889 if repo.tagtype(t) == 'local':
2890 2890 tagtype = " local"
2891 2891 else:
2892 2892 tagtype = ""
2893 2893 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2894 2894
2895 2895 def tip(ui, repo, **opts):
2896 2896 """show the tip revision
2897 2897
2898 2898 The tip revision (usually just called the tip) is the most
2899 2899 recently added changeset in the repository, the most recently
2900 2900 changed head.
2901 2901
2902 2902 If you have just made a commit, that commit will be the tip. If
2903 2903 you have just pulled changes from another repository, the tip of
2904 2904 that repository becomes the current tip. The "tip" tag is special
2905 2905 and cannot be renamed or assigned to a different changeset.
2906 2906 """
2907 2907 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2908 2908
2909 2909 def unbundle(ui, repo, fname1, *fnames, **opts):
2910 2910 """apply one or more changegroup files
2911 2911
2912 2912 Apply one or more compressed changegroup files generated by the
2913 2913 bundle command.
2914 2914 """
2915 2915 fnames = (fname1,) + fnames
2916 2916
2917 2917 lock = repo.lock()
2918 2918 try:
2919 2919 for fname in fnames:
2920 2920 f = url.open(ui, fname)
2921 2921 gen = changegroup.readbundle(f, fname)
2922 2922 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2923 2923 finally:
2924 2924 lock.release()
2925 2925
2926 2926 return postincoming(ui, repo, modheads, opts.get('update'), None)
2927 2927
2928 2928 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2929 2929 """update working directory
2930 2930
2931 2931 Update the repository's working directory to the specified
2932 2932 revision, or the tip of the current branch if none is specified.
2933 2933 Use null as the revision to remove the working copy (like 'hg
2934 2934 clone -U').
2935 2935
2936 2936 When the working directory contains no uncommitted changes, it
2937 2937 will be replaced by the state of the requested revision from the
2938 2938 repository. When the requested revision is on a different branch,
2939 2939 the working directory will additionally be switched to that
2940 2940 branch.
2941 2941
2942 2942 When there are uncommitted changes, use option -C/--clean to
2943 2943 discard them, forcibly replacing the state of the working
2944 2944 directory with the requested revision.
2945 2945
2946 2946 When there are uncommitted changes and option -C/--clean is not
2947 2947 used, and the parent revision and requested revision are on the
2948 2948 same branch, and one of them is an ancestor of the other, then the
2949 2949 new working directory will contain the requested revision merged
2950 2950 with the uncommitted changes. Otherwise, the update will fail with
2951 2951 a suggestion to use 'merge' or 'update -C' instead.
2952 2952
2953 2953 If you want to update just one file to an older revision, use
2954 2954 revert.
2955 2955
2956 2956 See 'hg help dates' for a list of formats valid for -d/--date.
2957 2957 """
2958 2958 if rev and node:
2959 2959 raise util.Abort(_("please specify just one revision"))
2960 2960
2961 2961 if not rev:
2962 2962 rev = node
2963 2963
2964 2964 if date:
2965 2965 if rev:
2966 2966 raise util.Abort(_("you can't specify a revision and a date"))
2967 2967 rev = cmdutil.finddate(ui, repo, date)
2968 2968
2969 2969 if clean:
2970 2970 return hg.clean(repo, rev)
2971 2971 else:
2972 2972 return hg.update(repo, rev)
2973 2973
2974 2974 def verify(ui, repo):
2975 2975 """verify the integrity of the repository
2976 2976
2977 2977 Verify the integrity of the current repository.
2978 2978
2979 2979 This will perform an extensive check of the repository's
2980 2980 integrity, validating the hashes and checksums of each entry in
2981 2981 the changelog, manifest, and tracked files, as well as the
2982 2982 integrity of their crosslinks and indices.
2983 2983 """
2984 2984 return hg.verify(repo)
2985 2985
2986 2986 def version_(ui):
2987 2987 """output version and copyright information"""
2988 2988 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2989 2989 % util.version())
2990 2990 ui.status(_(
2991 2991 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2992 2992 "This is free software; see the source for copying conditions. "
2993 2993 "There is NO\nwarranty; "
2994 2994 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2995 2995 ))
2996 2996
2997 2997 # Command options and aliases are listed here, alphabetically
2998 2998
2999 2999 globalopts = [
3000 3000 ('R', 'repository', '',
3001 3001 _('repository root directory or symbolic path name')),
3002 3002 ('', 'cwd', '', _('change working directory')),
3003 3003 ('y', 'noninteractive', None,
3004 3004 _('do not prompt, assume \'yes\' for any required answers')),
3005 3005 ('q', 'quiet', None, _('suppress output')),
3006 3006 ('v', 'verbose', None, _('enable additional output')),
3007 3007 ('', 'config', [], _('set/override config option')),
3008 3008 ('', 'debug', None, _('enable debugging output')),
3009 3009 ('', 'debugger', None, _('start debugger')),
3010 3010 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3011 3011 ('', 'encodingmode', encoding.encodingmode,
3012 3012 _('set the charset encoding mode')),
3013 3013 ('', 'traceback', None, _('print traceback on exception')),
3014 3014 ('', 'time', None, _('time how long the command takes')),
3015 3015 ('', 'profile', None, _('print command execution profile')),
3016 3016 ('', 'version', None, _('output version information and exit')),
3017 3017 ('h', 'help', None, _('display help and exit')),
3018 3018 ]
3019 3019
3020 3020 dryrunopts = [('n', 'dry-run', None,
3021 3021 _('do not perform actions, just print output'))]
3022 3022
3023 3023 remoteopts = [
3024 3024 ('e', 'ssh', '', _('specify ssh command to use')),
3025 3025 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3026 3026 ]
3027 3027
3028 3028 walkopts = [
3029 3029 ('I', 'include', [], _('include names matching the given patterns')),
3030 3030 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3031 3031 ]
3032 3032
3033 3033 commitopts = [
3034 3034 ('m', 'message', '', _('use <text> as commit message')),
3035 3035 ('l', 'logfile', '', _('read commit message from <file>')),
3036 3036 ]
3037 3037
3038 3038 commitopts2 = [
3039 3039 ('d', 'date', '', _('record datecode as commit date')),
3040 3040 ('u', 'user', '', _('record the specified user as committer')),
3041 3041 ]
3042 3042
3043 3043 templateopts = [
3044 3044 ('', 'style', '', _('display using template map file')),
3045 3045 ('', 'template', '', _('display with template')),
3046 3046 ]
3047 3047
3048 3048 logopts = [
3049 3049 ('p', 'patch', None, _('show patch')),
3050 3050 ('g', 'git', None, _('use git extended diff format')),
3051 3051 ('l', 'limit', '', _('limit number of changes displayed')),
3052 3052 ('M', 'no-merges', None, _('do not show merges')),
3053 3053 ] + templateopts
3054 3054
3055 3055 diffopts = [
3056 3056 ('a', 'text', None, _('treat all files as text')),
3057 3057 ('g', 'git', None, _('use git extended diff format')),
3058 3058 ('', 'nodates', None, _("don't include dates in diff headers"))
3059 3059 ]
3060 3060
3061 3061 diffopts2 = [
3062 3062 ('p', 'show-function', None, _('show which function each change is in')),
3063 3063 ('w', 'ignore-all-space', None,
3064 3064 _('ignore white space when comparing lines')),
3065 3065 ('b', 'ignore-space-change', None,
3066 3066 _('ignore changes in the amount of white space')),
3067 3067 ('B', 'ignore-blank-lines', None,
3068 3068 _('ignore changes whose lines are all blank')),
3069 3069 ('U', 'unified', '', _('number of lines of context to show'))
3070 3070 ]
3071 3071
3072 3072 similarityopts = [
3073 3073 ('s', 'similarity', '',
3074 3074 _('guess renamed files by similarity (0<=s<=100)'))
3075 3075 ]
3076 3076
3077 3077 table = {
3078 3078 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3079 3079 "addremove":
3080 3080 (addremove, similarityopts + walkopts + dryrunopts,
3081 3081 _('[OPTION]... [FILE]...')),
3082 3082 "^annotate|blame":
3083 3083 (annotate,
3084 3084 [('r', 'rev', '', _('annotate the specified revision')),
3085 3085 ('f', 'follow', None, _('follow file copies and renames')),
3086 3086 ('a', 'text', None, _('treat all files as text')),
3087 3087 ('u', 'user', None, _('list the author (long with -v)')),
3088 3088 ('d', 'date', None, _('list the date (short with -q)')),
3089 3089 ('n', 'number', None, _('list the revision number (default)')),
3090 3090 ('c', 'changeset', None, _('list the changeset')),
3091 3091 ('l', 'line-number', None,
3092 3092 _('show line number at the first appearance'))
3093 3093 ] + walkopts,
3094 3094 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3095 3095 "archive":
3096 3096 (archive,
3097 3097 [('', 'no-decode', None, _('do not pass files through decoders')),
3098 3098 ('p', 'prefix', '', _('directory prefix for files in archive')),
3099 3099 ('r', 'rev', '', _('revision to distribute')),
3100 3100 ('t', 'type', '', _('type of distribution to create')),
3101 3101 ] + walkopts,
3102 3102 _('[OPTION]... DEST')),
3103 3103 "backout":
3104 3104 (backout,
3105 3105 [('', 'merge', None,
3106 3106 _('merge with old dirstate parent after backout')),
3107 3107 ('', 'parent', '', _('parent to choose when backing out merge')),
3108 3108 ('r', 'rev', '', _('revision to backout')),
3109 3109 ] + walkopts + commitopts + commitopts2,
3110 3110 _('[OPTION]... [-r] REV')),
3111 3111 "bisect":
3112 3112 (bisect,
3113 3113 [('r', 'reset', False, _('reset bisect state')),
3114 3114 ('g', 'good', False, _('mark changeset good')),
3115 3115 ('b', 'bad', False, _('mark changeset bad')),
3116 3116 ('s', 'skip', False, _('skip testing changeset')),
3117 3117 ('c', 'command', '', _('use command to check changeset state')),
3118 3118 ('U', 'noupdate', False, _('do not update to target'))],
3119 3119 _("[-gbsr] [-c CMD] [REV]")),
3120 3120 "branch":
3121 3121 (branch,
3122 3122 [('f', 'force', None,
3123 3123 _('set branch name even if it shadows an existing branch')),
3124 3124 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3125 3125 _('[-fC] [NAME]')),
3126 3126 "branches":
3127 3127 (branches,
3128 3128 [('a', 'active', False,
3129 3129 _('show only branches that have unmerged heads'))],
3130 3130 _('[-a]')),
3131 3131 "bundle":
3132 3132 (bundle,
3133 3133 [('f', 'force', None,
3134 3134 _('run even when remote repository is unrelated')),
3135 3135 ('r', 'rev', [],
3136 3136 _('a changeset up to which you would like to bundle')),
3137 3137 ('', 'base', [],
3138 3138 _('a base changeset to specify instead of a destination')),
3139 3139 ('a', 'all', None, _('bundle all changesets in the repository')),
3140 3140 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3141 3141 ] + remoteopts,
3142 3142 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3143 3143 "cat":
3144 3144 (cat,
3145 3145 [('o', 'output', '', _('print output to file with formatted name')),
3146 3146 ('r', 'rev', '', _('print the given revision')),
3147 3147 ('', 'decode', None, _('apply any matching decode filter')),
3148 3148 ] + walkopts,
3149 3149 _('[OPTION]... FILE...')),
3150 3150 "^clone":
3151 3151 (clone,
3152 3152 [('U', 'noupdate', None,
3153 3153 _('the clone will only contain a repository (no working copy)')),
3154 3154 ('r', 'rev', [],
3155 3155 _('a changeset you would like to have after cloning')),
3156 3156 ('', 'pull', None, _('use pull protocol to copy metadata')),
3157 3157 ('', 'uncompressed', None,
3158 3158 _('use uncompressed transfer (fast over LAN)')),
3159 3159 ] + remoteopts,
3160 3160 _('[OPTION]... SOURCE [DEST]')),
3161 3161 "^commit|ci":
3162 3162 (commit,
3163 3163 [('A', 'addremove', None,
3164 3164 _('mark new/missing files as added/removed before committing')),
3165 3165 ('', 'close-branch', None,
3166 3166 _('mark a branch as closed, hiding it from the branch list')),
3167 3167 ] + walkopts + commitopts + commitopts2,
3168 3168 _('[OPTION]... [FILE]...')),
3169 3169 "copy|cp":
3170 3170 (copy,
3171 3171 [('A', 'after', None, _('record a copy that has already occurred')),
3172 3172 ('f', 'force', None,
3173 3173 _('forcibly copy over an existing managed file')),
3174 3174 ] + walkopts + dryrunopts,
3175 3175 _('[OPTION]... [SOURCE]... DEST')),
3176 3176 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3177 3177 "debugcheckstate": (debugcheckstate, []),
3178 3178 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3179 3179 "debugcomplete":
3180 3180 (debugcomplete,
3181 3181 [('o', 'options', None, _('show the command options'))],
3182 3182 _('[-o] CMD')),
3183 3183 "debugdate":
3184 3184 (debugdate,
3185 3185 [('e', 'extended', None, _('try extended date formats'))],
3186 3186 _('[-e] DATE [RANGE]')),
3187 3187 "debugdata": (debugdata, [], _('FILE REV')),
3188 3188 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3189 3189 "debugindex": (debugindex, [], _('FILE')),
3190 3190 "debugindexdot": (debugindexdot, [], _('FILE')),
3191 3191 "debuginstall": (debuginstall, []),
3192 3192 "debugrebuildstate":
3193 3193 (debugrebuildstate,
3194 3194 [('r', 'rev', '', _('revision to rebuild to'))],
3195 3195 _('[-r REV] [REV]')),
3196 3196 "debugrename":
3197 3197 (debugrename,
3198 3198 [('r', 'rev', '', _('revision to debug'))],
3199 3199 _('[-r REV] FILE')),
3200 3200 "debugsetparents":
3201 3201 (debugsetparents, [], _('REV1 [REV2]')),
3202 3202 "debugstate":
3203 3203 (debugstate,
3204 3204 [('', 'nodates', None, _('do not display the saved mtime'))],
3205 3205 _('[OPTION]...')),
3206 3206 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3207 3207 "^diff":
3208 3208 (diff,
3209 3209 [('r', 'rev', [], _('revision')),
3210 3210 ('c', 'change', '', _('change made by revision'))
3211 3211 ] + diffopts + diffopts2 + walkopts,
3212 3212 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3213 3213 "^export":
3214 3214 (export,
3215 3215 [('o', 'output', '', _('print output to file with formatted name')),
3216 3216 ('', 'switch-parent', None, _('diff against the second parent'))
3217 3217 ] + diffopts,
3218 3218 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3219 3219 "grep":
3220 3220 (grep,
3221 3221 [('0', 'print0', None, _('end fields with NUL')),
3222 3222 ('', 'all', None, _('print all revisions that match')),
3223 3223 ('f', 'follow', None,
3224 3224 _('follow changeset history, or file history across copies and renames')),
3225 3225 ('i', 'ignore-case', None, _('ignore case when matching')),
3226 3226 ('l', 'files-with-matches', None,
3227 3227 _('print only filenames and revisions that match')),
3228 3228 ('n', 'line-number', None, _('print matching line numbers')),
3229 3229 ('r', 'rev', [], _('search in given revision range')),
3230 3230 ('u', 'user', None, _('list the author (long with -v)')),
3231 3231 ('d', 'date', None, _('list the date (short with -q)')),
3232 3232 ] + walkopts,
3233 3233 _('[OPTION]... PATTERN [FILE]...')),
3234 3234 "heads":
3235 3235 (heads,
3236 3236 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3237 3237 ('a', 'active', False,
3238 3238 _('show only the active heads from open branches')),
3239 3239 ] + templateopts,
3240 3240 _('[-r REV] [REV]...')),
3241 3241 "help": (help_, [], _('[TOPIC]')),
3242 3242 "identify|id":
3243 3243 (identify,
3244 3244 [('r', 'rev', '', _('identify the specified revision')),
3245 3245 ('n', 'num', None, _('show local revision number')),
3246 3246 ('i', 'id', None, _('show global revision id')),
3247 3247 ('b', 'branch', None, _('show branch')),
3248 3248 ('t', 'tags', None, _('show tags'))],
3249 3249 _('[-nibt] [-r REV] [SOURCE]')),
3250 3250 "import|patch":
3251 3251 (import_,
3252 3252 [('p', 'strip', 1,
3253 3253 _('directory strip option for patch. This has the same '
3254 3254 'meaning as the corresponding patch option')),
3255 3255 ('b', 'base', '', _('base path')),
3256 3256 ('f', 'force', None,
3257 3257 _('skip check for outstanding uncommitted changes')),
3258 3258 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3259 3259 ('', 'exact', None,
3260 3260 _('apply patch to the nodes from which it was generated')),
3261 3261 ('', 'import-branch', None,
3262 3262 _('use any branch information in patch (implied by --exact)'))] +
3263 3263 commitopts + commitopts2 + similarityopts,
3264 3264 _('[OPTION]... PATCH...')),
3265 3265 "incoming|in":
3266 3266 (incoming,
3267 3267 [('f', 'force', None,
3268 3268 _('run even when remote repository is unrelated')),
3269 3269 ('n', 'newest-first', None, _('show newest record first')),
3270 3270 ('', 'bundle', '', _('file to store the bundles into')),
3271 3271 ('r', 'rev', [],
3272 3272 _('a specific revision up to which you would like to pull')),
3273 3273 ] + logopts + remoteopts,
3274 3274 _('[-p] [-n] [-M] [-f] [-r REV]...'
3275 3275 ' [--bundle FILENAME] [SOURCE]')),
3276 3276 "^init":
3277 3277 (init,
3278 3278 remoteopts,
3279 3279 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3280 3280 "locate":
3281 3281 (locate,
3282 3282 [('r', 'rev', '', _('search the repository as it stood at REV')),
3283 3283 ('0', 'print0', None,
3284 3284 _('end filenames with NUL, for use with xargs')),
3285 3285 ('f', 'fullpath', None,
3286 3286 _('print complete paths from the filesystem root')),
3287 3287 ] + walkopts,
3288 3288 _('[OPTION]... [PATTERN]...')),
3289 3289 "^log|history":
3290 3290 (log,
3291 3291 [('f', 'follow', None,
3292 3292 _('follow changeset history, or file history across copies and renames')),
3293 3293 ('', 'follow-first', None,
3294 3294 _('only follow the first parent of merge changesets')),
3295 3295 ('d', 'date', '', _('show revisions matching date spec')),
3296 3296 ('C', 'copies', None, _('show copied files')),
3297 3297 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3298 3298 ('r', 'rev', [], _('show the specified revision or range')),
3299 3299 ('', 'removed', None, _('include revisions where files were removed')),
3300 3300 ('m', 'only-merges', None, _('show only merges')),
3301 3301 ('u', 'user', [], _('revisions committed by user')),
3302 3302 ('b', 'only-branch', [],
3303 3303 _('show only changesets within the given named branch')),
3304 3304 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3305 3305 ] + logopts + walkopts,
3306 3306 _('[OPTION]... [FILE]')),
3307 3307 "manifest":
3308 3308 (manifest,
3309 3309 [('r', 'rev', '', _('revision to display'))],
3310 3310 _('[-r REV]')),
3311 3311 "^merge":
3312 3312 (merge,
3313 3313 [('f', 'force', None, _('force a merge with outstanding changes')),
3314 3314 ('r', 'rev', '', _('revision to merge')),
3315 3315 ('S', 'show', None,
3316 3316 _('review revisions to merge (no merge is performed)'))],
3317 3317 _('[-f] [[-r] REV]')),
3318 3318 "outgoing|out":
3319 3319 (outgoing,
3320 3320 [('f', 'force', None,
3321 3321 _('run even when remote repository is unrelated')),
3322 3322 ('r', 'rev', [],
3323 3323 _('a specific revision up to which you would like to push')),
3324 3324 ('n', 'newest-first', None, _('show newest record first')),
3325 3325 ] + logopts + remoteopts,
3326 3326 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3327 3327 "^parents":
3328 3328 (parents,
3329 3329 [('r', 'rev', '', _('show parents from the specified revision')),
3330 3330 ] + templateopts,
3331 3331 _('hg parents [-r REV] [FILE]')),
3332 3332 "paths": (paths, [], _('[NAME]')),
3333 3333 "^pull":
3334 3334 (pull,
3335 3335 [('u', 'update', None,
3336 3336 _('update to new tip if changesets were pulled')),
3337 3337 ('f', 'force', None,
3338 3338 _('run even when remote repository is unrelated')),
3339 3339 ('r', 'rev', [],
3340 3340 _('a specific revision up to which you would like to pull')),
3341 3341 ] + remoteopts,
3342 3342 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3343 3343 "^push":
3344 3344 (push,
3345 3345 [('f', 'force', None, _('force push')),
3346 3346 ('r', 'rev', [],
3347 3347 _('a specific revision up to which you would like to push')),
3348 3348 ] + remoteopts,
3349 3349 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3350 3350 "recover": (recover, []),
3351 3351 "^remove|rm":
3352 3352 (remove,
3353 3353 [('A', 'after', None, _('record delete for missing files')),
3354 3354 ('f', 'force', None,
3355 3355 _('remove (and delete) file even if added or modified')),
3356 3356 ] + walkopts,
3357 3357 _('[OPTION]... FILE...')),
3358 3358 "rename|mv":
3359 3359 (rename,
3360 3360 [('A', 'after', None, _('record a rename that has already occurred')),
3361 3361 ('f', 'force', None,
3362 3362 _('forcibly copy over an existing managed file')),
3363 3363 ] + walkopts + dryrunopts,
3364 3364 _('[OPTION]... SOURCE... DEST')),
3365 3365 "resolve":
3366 3366 (resolve,
3367 3367 [('a', 'all', None, _('remerge all unresolved files')),
3368 3368 ('l', 'list', None, _('list state of files needing merge')),
3369 3369 ('m', 'mark', None, _('mark files as resolved')),
3370 3370 ('u', 'unmark', None, _('unmark files as resolved'))]
3371 3371 + walkopts,
3372 3372 _('[OPTION]... [FILE]...')),
3373 3373 "revert":
3374 3374 (revert,
3375 3375 [('a', 'all', None, _('revert all changes when no arguments given')),
3376 3376 ('d', 'date', '', _('tipmost revision matching date')),
3377 3377 ('r', 'rev', '', _('revision to revert to')),
3378 3378 ('', 'no-backup', None, _('do not save backup copies of files')),
3379 3379 ] + walkopts + dryrunopts,
3380 3380 _('[OPTION]... [-r REV] [NAME]...')),
3381 3381 "rollback": (rollback, []),
3382 3382 "root": (root, []),
3383 3383 "^serve":
3384 3384 (serve,
3385 3385 [('A', 'accesslog', '', _('name of access log file to write to')),
3386 3386 ('d', 'daemon', None, _('run server in background')),
3387 3387 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3388 3388 ('E', 'errorlog', '', _('name of error log file to write to')),
3389 3389 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3390 3390 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3391 3391 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3392 3392 ('n', 'name', '',
3393 3393 _('name to show in web pages (default: working directory)')),
3394 3394 ('', 'webdir-conf', '', _('name of the webdir config file'
3395 3395 ' (serve more than one repository)')),
3396 3396 ('', 'pid-file', '', _('name of file to write process ID to')),
3397 3397 ('', 'stdio', None, _('for remote clients')),
3398 3398 ('t', 'templates', '', _('web templates to use')),
3399 3399 ('', 'style', '', _('template style to use')),
3400 3400 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3401 3401 ('', 'certificate', '', _('SSL certificate file'))],
3402 3402 _('[OPTION]...')),
3403 3403 "showconfig|debugconfig":
3404 3404 (showconfig,
3405 3405 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3406 3406 _('[-u] [NAME]...')),
3407 3407 "^status|st":
3408 3408 (status,
3409 3409 [('A', 'all', None, _('show status of all files')),
3410 3410 ('m', 'modified', None, _('show only modified files')),
3411 3411 ('a', 'added', None, _('show only added files')),
3412 3412 ('r', 'removed', None, _('show only removed files')),
3413 3413 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3414 3414 ('c', 'clean', None, _('show only files without changes')),
3415 3415 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3416 3416 ('i', 'ignored', None, _('show only ignored files')),
3417 3417 ('n', 'no-status', None, _('hide status prefix')),
3418 3418 ('C', 'copies', None, _('show source of copied files')),
3419 3419 ('0', 'print0', None,
3420 3420 _('end filenames with NUL, for use with xargs')),
3421 3421 ('', 'rev', [], _('show difference from revision')),
3422 3422 ] + walkopts,
3423 3423 _('[OPTION]... [FILE]...')),
3424 3424 "tag":
3425 3425 (tag,
3426 3426 [('f', 'force', None, _('replace existing tag')),
3427 3427 ('l', 'local', None, _('make the tag local')),
3428 3428 ('r', 'rev', '', _('revision to tag')),
3429 3429 ('', 'remove', None, _('remove a tag')),
3430 3430 # -l/--local is already there, commitopts cannot be used
3431 3431 ('m', 'message', '', _('use <text> as commit message')),
3432 3432 ] + commitopts2,
3433 3433 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3434 3434 "tags": (tags, []),
3435 3435 "tip":
3436 3436 (tip,
3437 3437 [('p', 'patch', None, _('show patch')),
3438 3438 ('g', 'git', None, _('use git extended diff format')),
3439 3439 ] + templateopts,
3440 3440 _('[-p]')),
3441 3441 "unbundle":
3442 3442 (unbundle,
3443 3443 [('u', 'update', None,
3444 3444 _('update to new tip if changesets were unbundled'))],
3445 3445 _('[-u] FILE...')),
3446 3446 "^update|up|checkout|co":
3447 3447 (update,
3448 3448 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3449 3449 ('d', 'date', '', _('tipmost revision matching date')),
3450 3450 ('r', 'rev', '', _('revision'))],
3451 3451 _('[-C] [-d DATE] [[-r] REV]')),
3452 3452 "verify": (verify, []),
3453 3453 "version": (version_, []),
3454 3454 }
3455 3455
3456 3456 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3457 3457 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3458 3458 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,1390 +1,1389
1 1 # revlog.py - storage back-end for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 """Storage back-end for Mercurial.
9 9
10 10 This provides efficient delta storage with O(1) retrieve and append
11 11 and O(changes) merge between branches.
12 12 """
13 13
14 14 # import stuff from node for others to import from revlog
15 15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
16 16 from i18n import _
17 17 import changegroup, ancestor, mdiff, parsers, error, util
18 18 import struct, zlib, errno
19 19
20 20 _pack = struct.pack
21 21 _unpack = struct.unpack
22 22 _compress = zlib.compress
23 23 _decompress = zlib.decompress
24 24 _sha = util.sha1
25 25
26 26 # revlog flags
27 27 REVLOGV0 = 0
28 28 REVLOGNG = 1
29 29 REVLOGNGINLINEDATA = (1 << 16)
30 30 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
31 31 REVLOG_DEFAULT_FORMAT = REVLOGNG
32 32 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
33 33
34 34 _prereadsize = 1048576
35 35
36 36 RevlogError = error.RevlogError
37 37 LookupError = error.LookupError
38 38
39 39 def getoffset(q):
40 40 return int(q >> 16)
41 41
42 42 def gettype(q):
43 43 return int(q & 0xFFFF)
44 44
45 45 def offset_type(offset, type):
46 46 return long(long(offset) << 16 | type)
47 47
48 48 nullhash = _sha(nullid)
49 49
50 50 def hash(text, p1, p2):
51 51 """generate a hash from the given text and its parent hashes
52 52
53 53 This hash combines both the current file contents and its history
54 54 in a manner that makes it easy to distinguish nodes with the same
55 55 content in the revision graph.
56 56 """
57 57 # As of now, if one of the parent node is null, p2 is null
58 58 if p2 == nullid:
59 59 # deep copy of a hash is faster than creating one
60 60 s = nullhash.copy()
61 61 s.update(p1)
62 62 else:
63 63 # none of the parent nodes are nullid
64 64 l = [p1, p2]
65 65 l.sort()
66 66 s = _sha(l[0])
67 67 s.update(l[1])
68 68 s.update(text)
69 69 return s.digest()
70 70
71 71 def compress(text):
72 72 """ generate a possibly-compressed representation of text """
73 73 if not text:
74 74 return ("", text)
75 75 l = len(text)
76 76 bin = None
77 77 if l < 44:
78 78 pass
79 79 elif l > 1000000:
80 80 # zlib makes an internal copy, thus doubling memory usage for
81 81 # large files, so lets do this in pieces
82 82 z = zlib.compressobj()
83 83 p = []
84 84 pos = 0
85 85 while pos < l:
86 86 pos2 = pos + 2**20
87 87 p.append(z.compress(text[pos:pos2]))
88 88 pos = pos2
89 89 p.append(z.flush())
90 90 if sum(map(len, p)) < l:
91 91 bin = "".join(p)
92 92 else:
93 93 bin = _compress(text)
94 94 if bin is None or len(bin) > l:
95 95 if text[0] == '\0':
96 96 return ("", text)
97 97 return ('u', text)
98 98 return ("", bin)
99 99
100 100 def decompress(bin):
101 101 """ decompress the given input """
102 102 if not bin:
103 103 return bin
104 104 t = bin[0]
105 105 if t == '\0':
106 106 return bin
107 107 if t == 'x':
108 108 return _decompress(bin)
109 109 if t == 'u':
110 110 return bin[1:]
111 111 raise RevlogError(_("unknown compression type %r") % t)
112 112
113 113 class lazyparser(object):
114 114 """
115 115 this class avoids the need to parse the entirety of large indices
116 116 """
117 117
118 118 # lazyparser is not safe to use on windows if win32 extensions not
119 119 # available. it keeps file handle open, which make it not possible
120 120 # to break hardlinks on local cloned repos.
121 121
122 122 def __init__(self, dataf, size):
123 123 self.dataf = dataf
124 124 self.s = struct.calcsize(indexformatng)
125 125 self.datasize = size
126 126 self.l = size/self.s
127 127 self.index = [None] * self.l
128 128 self.map = {nullid: nullrev}
129 129 self.allmap = 0
130 130 self.all = 0
131 131 self.mapfind_count = 0
132 132
133 133 def loadmap(self):
134 134 """
135 135 during a commit, we need to make sure the rev being added is
136 136 not a duplicate. This requires loading the entire index,
137 137 which is fairly slow. loadmap can load up just the node map,
138 138 which takes much less time.
139 139 """
140 140 if self.allmap:
141 141 return
142 142 end = self.datasize
143 143 self.allmap = 1
144 144 cur = 0
145 145 count = 0
146 146 blocksize = self.s * 256
147 147 self.dataf.seek(0)
148 148 while cur < end:
149 149 data = self.dataf.read(blocksize)
150 150 off = 0
151 151 for x in xrange(256):
152 152 n = data[off + ngshaoffset:off + ngshaoffset + 20]
153 153 self.map[n] = count
154 154 count += 1
155 155 if count >= self.l:
156 156 break
157 157 off += self.s
158 158 cur += blocksize
159 159
160 160 def loadblock(self, blockstart, blocksize, data=None):
161 161 if self.all:
162 162 return
163 163 if data is None:
164 164 self.dataf.seek(blockstart)
165 165 if blockstart + blocksize > self.datasize:
166 166 # the revlog may have grown since we've started running,
167 167 # but we don't have space in self.index for more entries.
168 168 # limit blocksize so that we don't get too much data.
169 169 blocksize = max(self.datasize - blockstart, 0)
170 170 data = self.dataf.read(blocksize)
171 171 lend = len(data) / self.s
172 172 i = blockstart / self.s
173 173 off = 0
174 174 # lazyindex supports __delitem__
175 175 if lend > len(self.index) - i:
176 176 lend = len(self.index) - i
177 177 for x in xrange(lend):
178 178 if self.index[i + x] == None:
179 179 b = data[off : off + self.s]
180 180 self.index[i + x] = b
181 181 n = b[ngshaoffset:ngshaoffset + 20]
182 182 self.map[n] = i + x
183 183 off += self.s
184 184
185 185 def findnode(self, node):
186 186 """search backwards through the index file for a specific node"""
187 187 if self.allmap:
188 188 return None
189 189
190 190 # hg log will cause many many searches for the manifest
191 191 # nodes. After we get called a few times, just load the whole
192 192 # thing.
193 193 if self.mapfind_count > 8:
194 194 self.loadmap()
195 195 if node in self.map:
196 196 return node
197 197 return None
198 198 self.mapfind_count += 1
199 199 last = self.l - 1
200 200 while self.index[last] != None:
201 201 if last == 0:
202 202 self.all = 1
203 203 self.allmap = 1
204 204 return None
205 205 last -= 1
206 206 end = (last + 1) * self.s
207 207 blocksize = self.s * 256
208 208 while end >= 0:
209 209 start = max(end - blocksize, 0)
210 210 self.dataf.seek(start)
211 211 data = self.dataf.read(end - start)
212 212 findend = end - start
213 213 while True:
214 214 # we're searching backwards, so we have to make sure
215 215 # we don't find a changeset where this node is a parent
216 216 off = data.find(node, 0, findend)
217 217 findend = off
218 218 if off >= 0:
219 219 i = off / self.s
220 220 off = i * self.s
221 221 n = data[off + ngshaoffset:off + ngshaoffset + 20]
222 222 if n == node:
223 223 self.map[n] = i + start / self.s
224 224 return node
225 225 else:
226 226 break
227 227 end -= blocksize
228 228 return None
229 229
230 230 def loadindex(self, i=None, end=None):
231 231 if self.all:
232 232 return
233 233 all = False
234 234 if i == None:
235 235 blockstart = 0
236 236 blocksize = (65536 / self.s) * self.s
237 237 end = self.datasize
238 238 all = True
239 239 else:
240 240 if end:
241 241 blockstart = i * self.s
242 242 end = end * self.s
243 243 blocksize = end - blockstart
244 244 else:
245 245 blockstart = (i & ~1023) * self.s
246 246 blocksize = self.s * 1024
247 247 end = blockstart + blocksize
248 248 while blockstart < end:
249 249 self.loadblock(blockstart, blocksize)
250 250 blockstart += blocksize
251 251 if all:
252 252 self.all = True
253 253
254 254 class lazyindex(object):
255 255 """a lazy version of the index array"""
256 256 def __init__(self, parser):
257 257 self.p = parser
258 258 def __len__(self):
259 259 return len(self.p.index)
260 260 def load(self, pos):
261 261 if pos < 0:
262 262 pos += len(self.p.index)
263 263 self.p.loadindex(pos)
264 264 return self.p.index[pos]
265 265 def __getitem__(self, pos):
266 266 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
267 267 def __setitem__(self, pos, item):
268 268 self.p.index[pos] = _pack(indexformatng, *item)
269 269 def __delitem__(self, pos):
270 270 del self.p.index[pos]
271 271 def insert(self, pos, e):
272 272 self.p.index.insert(pos, _pack(indexformatng, *e))
273 273 def append(self, e):
274 274 self.p.index.append(_pack(indexformatng, *e))
275 275
276 276 class lazymap(object):
277 277 """a lazy version of the node map"""
278 278 def __init__(self, parser):
279 279 self.p = parser
280 280 def load(self, key):
281 281 n = self.p.findnode(key)
282 282 if n == None:
283 283 raise KeyError(key)
284 284 def __contains__(self, key):
285 285 if key in self.p.map:
286 286 return True
287 287 self.p.loadmap()
288 288 return key in self.p.map
289 289 def __iter__(self):
290 290 yield nullid
291 291 for i in xrange(self.p.l):
292 292 ret = self.p.index[i]
293 293 if not ret:
294 294 self.p.loadindex(i)
295 295 ret = self.p.index[i]
296 296 if isinstance(ret, str):
297 297 ret = _unpack(indexformatng, ret)
298 298 yield ret[7]
299 299 def __getitem__(self, key):
300 300 try:
301 301 return self.p.map[key]
302 302 except KeyError:
303 303 try:
304 304 self.load(key)
305 305 return self.p.map[key]
306 306 except KeyError:
307 307 raise KeyError("node " + hex(key))
308 308 def __setitem__(self, key, val):
309 309 self.p.map[key] = val
310 310 def __delitem__(self, key):
311 311 del self.p.map[key]
312 312
313 313 indexformatv0 = ">4l20s20s20s"
314 314 v0shaoffset = 56
315 315
316 316 class revlogoldio(object):
317 317 def __init__(self):
318 318 self.size = struct.calcsize(indexformatv0)
319 319
320 320 def parseindex(self, fp, data, inline):
321 321 s = self.size
322 322 index = []
323 323 nodemap = {nullid: nullrev}
324 324 n = off = 0
325 325 if len(data) < _prereadsize:
326 326 data += fp.read() # read the rest
327 327 l = len(data)
328 328 while off + s <= l:
329 329 cur = data[off:off + s]
330 330 off += s
331 331 e = _unpack(indexformatv0, cur)
332 332 # transform to revlogv1 format
333 333 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
334 334 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
335 335 index.append(e2)
336 336 nodemap[e[6]] = n
337 337 n += 1
338 338
339 339 return index, nodemap, None
340 340
341 341 def packentry(self, entry, node, version, rev):
342 342 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
343 343 node(entry[5]), node(entry[6]), entry[7])
344 344 return _pack(indexformatv0, *e2)
345 345
346 346 # index ng:
347 347 # 6 bytes offset
348 348 # 2 bytes flags
349 349 # 4 bytes compressed length
350 350 # 4 bytes uncompressed length
351 351 # 4 bytes: base rev
352 352 # 4 bytes link rev
353 353 # 4 bytes parent 1 rev
354 354 # 4 bytes parent 2 rev
355 355 # 32 bytes: nodeid
356 356 indexformatng = ">Qiiiiii20s12x"
357 357 ngshaoffset = 32
358 358 versionformat = ">I"
359 359
360 360 class revlogio(object):
361 361 def __init__(self):
362 362 self.size = struct.calcsize(indexformatng)
363 363
364 364 def parseindex(self, fp, data, inline):
365 365 try:
366 366 size = len(data)
367 367 if size == _prereadsize:
368 368 size = util.fstat(fp).st_size
369 369 except AttributeError:
370 370 size = 0
371 371
372 372 if util.openhardlinks() and not inline and size > _prereadsize:
373 373 # big index, let's parse it on demand
374 374 parser = lazyparser(fp, size)
375 375 index = lazyindex(parser)
376 376 nodemap = lazymap(parser)
377 377 e = list(index[0])
378 378 type = gettype(e[0])
379 379 e[0] = offset_type(0, type)
380 380 index[0] = e
381 381 return index, nodemap, None
382 382
383 383 # call the C implementation to parse the index data
384 384 index, nodemap, cache = parsers.parse_index(data, inline)
385 385 return index, nodemap, cache
386 386
387 387 def packentry(self, entry, node, version, rev):
388 388 p = _pack(indexformatng, *entry)
389 389 if rev == 0:
390 390 p = _pack(versionformat, version) + p[4:]
391 391 return p
392 392
393 393 class revlog(object):
394 394 """
395 395 the underlying revision storage object
396 396
397 397 A revlog consists of two parts, an index and the revision data.
398 398
399 399 The index is a file with a fixed record size containing
400 400 information on each revision, including its nodeid (hash), the
401 401 nodeids of its parents, the position and offset of its data within
402 402 the data file, and the revision it's based on. Finally, each entry
403 403 contains a linkrev entry that can serve as a pointer to external
404 404 data.
405 405
406 406 The revision data itself is a linear collection of data chunks.
407 407 Each chunk represents a revision and is usually represented as a
408 408 delta against the previous chunk. To bound lookup time, runs of
409 409 deltas are limited to about 2 times the length of the original
410 410 version data. This makes retrieval of a version proportional to
411 411 its size, or O(1) relative to the number of revisions.
412 412
413 413 Both pieces of the revlog are written to in an append-only
414 414 fashion, which means we never need to rewrite a file to insert or
415 415 remove data, and can use some simple techniques to avoid the need
416 416 for locking while reading.
417 417 """
418 418 def __init__(self, opener, indexfile):
419 419 """
420 420 create a revlog object
421 421
422 422 opener is a function that abstracts the file opening operation
423 423 and can be used to implement COW semantics or the like.
424 424 """
425 425 self.indexfile = indexfile
426 426 self.datafile = indexfile[:-2] + ".d"
427 427 self.opener = opener
428 428 self._cache = None
429 429 self._chunkcache = (0, '')
430 430 self.nodemap = {nullid: nullrev}
431 431 self.index = []
432 432
433 433 v = REVLOG_DEFAULT_VERSION
434 434 if hasattr(opener, "defversion"):
435 435 v = opener.defversion
436 436 if v & REVLOGNG:
437 437 v |= REVLOGNGINLINEDATA
438 438
439 439 i = ''
440 440 try:
441 441 f = self.opener(self.indexfile)
442 442 i = f.read(_prereadsize)
443 443 if len(i) > 0:
444 444 v = struct.unpack(versionformat, i[:4])[0]
445 445 except IOError, inst:
446 446 if inst.errno != errno.ENOENT:
447 447 raise
448 448
449 449 self.version = v
450 450 self._inline = v & REVLOGNGINLINEDATA
451 451 flags = v & ~0xFFFF
452 452 fmt = v & 0xFFFF
453 453 if fmt == REVLOGV0 and flags:
454 454 raise RevlogError(_("index %s unknown flags %#04x for format v0")
455 455 % (self.indexfile, flags >> 16))
456 456 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
457 457 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
458 458 % (self.indexfile, flags >> 16))
459 459 elif fmt > REVLOGNG:
460 460 raise RevlogError(_("index %s unknown format %d")
461 461 % (self.indexfile, fmt))
462 462
463 463 self._io = revlogio()
464 464 if self.version == REVLOGV0:
465 465 self._io = revlogoldio()
466 466 if i:
467 467 try:
468 468 d = self._io.parseindex(f, i, self._inline)
469 469 except (ValueError, IndexError), e:
470 470 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
471 471 self.index, self.nodemap, self._chunkcache = d
472 472 if not self._chunkcache:
473 473 self._chunkcache = (0, '')
474 474
475 475 # add the magic null revision at -1 (if it hasn't been done already)
476 476 if (self.index == [] or isinstance(self.index, lazyindex) or
477 477 self.index[-1][7] != nullid) :
478 478 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
479 479
480 480 def _loadindex(self, start, end):
481 481 """load a block of indexes all at once from the lazy parser"""
482 482 if isinstance(self.index, lazyindex):
483 483 self.index.p.loadindex(start, end)
484 484
485 485 def _loadindexmap(self):
486 486 """loads both the map and the index from the lazy parser"""
487 487 if isinstance(self.index, lazyindex):
488 488 p = self.index.p
489 489 p.loadindex()
490 490 self.nodemap = p.map
491 491
492 492 def _loadmap(self):
493 493 """loads the map from the lazy parser"""
494 494 if isinstance(self.nodemap, lazymap):
495 495 self.nodemap.p.loadmap()
496 496 self.nodemap = self.nodemap.p.map
497 497
498 498 def tip(self):
499 499 return self.node(len(self.index) - 2)
500 500 def __len__(self):
501 501 return len(self.index) - 1
502 502 def __iter__(self):
503 503 for i in xrange(len(self)):
504 504 yield i
505 505 def rev(self, node):
506 506 try:
507 507 return self.nodemap[node]
508 508 except KeyError:
509 509 raise LookupError(node, self.indexfile, _('no node'))
510 510 def node(self, rev):
511 511 return self.index[rev][7]
512 512 def linkrev(self, rev):
513 513 return self.index[rev][4]
514 514 def parents(self, node):
515 515 i = self.index
516 516 d = i[self.rev(node)]
517 517 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
518 518 def parentrevs(self, rev):
519 519 return self.index[rev][5:7]
520 520 def start(self, rev):
521 521 return int(self.index[rev][0] >> 16)
522 522 def end(self, rev):
523 523 return self.start(rev) + self.length(rev)
524 524 def length(self, rev):
525 525 return self.index[rev][1]
526 526 def base(self, rev):
527 527 return self.index[rev][3]
528 528
529 529 def size(self, rev):
530 530 """return the length of the uncompressed text for a given revision"""
531 531 l = self.index[rev][2]
532 532 if l >= 0:
533 533 return l
534 534
535 535 t = self.revision(self.node(rev))
536 536 return len(t)
537 537
538 538 # alternate implementation, The advantage to this code is it
539 539 # will be faster for a single revision. But, the results are not
540 540 # cached, so finding the size of every revision will be slower.
541 541 """
542 542 if self.cache and self.cache[1] == rev:
543 543 return len(self.cache[2])
544 544
545 545 base = self.base(rev)
546 546 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
547 547 base = self.cache[1]
548 548 text = self.cache[2]
549 549 else:
550 550 text = self.revision(self.node(base))
551 551
552 552 l = len(text)
553 553 for x in xrange(base + 1, rev + 1):
554 554 l = mdiff.patchedsize(l, self.chunk(x))
555 555 return l
556 556 """
557 557
558 558 def reachable(self, node, stop=None):
559 """return a hash of all nodes ancestral to a given node, including
559 """return the set of all nodes ancestral to a given node, including
560 560 the node itself, stopping when stop is matched"""
561 reachable = {}
561 reachable = set((node,))
562 562 visit = [node]
563 reachable[node] = 1
564 563 if stop:
565 564 stopn = self.rev(stop)
566 565 else:
567 566 stopn = 0
568 567 while visit:
569 568 n = visit.pop(0)
570 569 if n == stop:
571 570 continue
572 571 if n == nullid:
573 572 continue
574 573 for p in self.parents(n):
575 574 if self.rev(p) < stopn:
576 575 continue
577 576 if p not in reachable:
578 reachable[p] = 1
577 reachable.add(p)
579 578 visit.append(p)
580 579 return reachable
581 580
582 581 def ancestors(self, *revs):
583 582 'Generate the ancestors of revs using a breadth-first visit'
584 583 visit = list(revs)
585 584 seen = set([nullrev])
586 585 while visit:
587 586 for parent in self.parentrevs(visit.pop(0)):
588 587 if parent not in seen:
589 588 visit.append(parent)
590 589 seen.add(parent)
591 590 yield parent
592 591
593 592 def descendants(self, *revs):
594 593 'Generate the descendants of revs in topological order'
595 594 seen = set(revs)
596 595 for i in xrange(min(revs) + 1, len(self)):
597 596 for x in self.parentrevs(i):
598 597 if x != nullrev and x in seen:
599 598 seen.add(i)
600 599 yield i
601 600 break
602 601
603 602 def findmissing(self, common=None, heads=None):
604 603 '''
605 604 returns the topologically sorted list of nodes from the set:
606 605 missing = (ancestors(heads) \ ancestors(common))
607 606
608 607 where ancestors() is the set of ancestors from heads, heads included
609 608
610 609 if heads is None, the heads of the revlog are used
611 610 if common is None, nullid is assumed to be a common node
612 611 '''
613 612 if common is None:
614 613 common = [nullid]
615 614 if heads is None:
616 615 heads = self.heads()
617 616
618 617 common = [self.rev(n) for n in common]
619 618 heads = [self.rev(n) for n in heads]
620 619
621 620 # we want the ancestors, but inclusive
622 621 has = set(self.ancestors(*common))
623 622 has.add(nullrev)
624 623 has.update(common)
625 624
626 625 # take all ancestors from heads that aren't in has
627 626 missing = set()
628 627 visit = [r for r in heads if r not in has]
629 628 while visit:
630 629 r = visit.pop(0)
631 630 if r in missing:
632 631 continue
633 632 else:
634 633 missing.add(r)
635 634 for p in self.parentrevs(r):
636 635 if p not in has:
637 636 visit.append(p)
638 637 missing = list(missing)
639 638 missing.sort()
640 639 return [self.node(r) for r in missing]
641 640
642 641 def nodesbetween(self, roots=None, heads=None):
643 642 """Return a tuple containing three elements. Elements 1 and 2 contain
644 643 a final list bases and heads after all the unreachable ones have been
645 644 pruned. Element 0 contains a topologically sorted list of all
646 645
647 646 nodes that satisfy these constraints:
648 647 1. All nodes must be descended from a node in roots (the nodes on
649 648 roots are considered descended from themselves).
650 649 2. All nodes must also be ancestors of a node in heads (the nodes in
651 650 heads are considered to be their own ancestors).
652 651
653 652 If roots is unspecified, nullid is assumed as the only root.
654 653 If heads is unspecified, it is taken to be the output of the
655 654 heads method (i.e. a list of all nodes in the repository that
656 655 have no children)."""
657 656 nonodes = ([], [], [])
658 657 if roots is not None:
659 658 roots = list(roots)
660 659 if not roots:
661 660 return nonodes
662 661 lowestrev = min([self.rev(n) for n in roots])
663 662 else:
664 663 roots = [nullid] # Everybody's a descendent of nullid
665 664 lowestrev = nullrev
666 665 if (lowestrev == nullrev) and (heads is None):
667 666 # We want _all_ the nodes!
668 667 return ([self.node(r) for r in self], [nullid], list(self.heads()))
669 668 if heads is None:
670 669 # All nodes are ancestors, so the latest ancestor is the last
671 670 # node.
672 671 highestrev = len(self) - 1
673 672 # Set ancestors to None to signal that every node is an ancestor.
674 673 ancestors = None
675 674 # Set heads to an empty dictionary for later discovery of heads
676 675 heads = {}
677 676 else:
678 677 heads = list(heads)
679 678 if not heads:
680 679 return nonodes
681 ancestors = {}
680 ancestors = set()
682 681 # Turn heads into a dictionary so we can remove 'fake' heads.
683 682 # Also, later we will be using it to filter out the heads we can't
684 683 # find from roots.
685 684 heads = dict.fromkeys(heads, 0)
686 685 # Start at the top and keep marking parents until we're done.
687 686 nodestotag = set(heads)
688 687 # Remember where the top was so we can use it as a limit later.
689 688 highestrev = max([self.rev(n) for n in nodestotag])
690 689 while nodestotag:
691 690 # grab a node to tag
692 691 n = nodestotag.pop()
693 692 # Never tag nullid
694 693 if n == nullid:
695 694 continue
696 695 # A node's revision number represents its place in a
697 696 # topologically sorted list of nodes.
698 697 r = self.rev(n)
699 698 if r >= lowestrev:
700 699 if n not in ancestors:
701 700 # If we are possibly a descendent of one of the roots
702 701 # and we haven't already been marked as an ancestor
703 ancestors[n] = 1 # Mark as ancestor
702 ancestors.add(n) # Mark as ancestor
704 703 # Add non-nullid parents to list of nodes to tag.
705 704 nodestotag.update([p for p in self.parents(n) if
706 705 p != nullid])
707 706 elif n in heads: # We've seen it before, is it a fake head?
708 707 # So it is, real heads should not be the ancestors of
709 708 # any other heads.
710 709 heads.pop(n)
711 710 if not ancestors:
712 711 return nonodes
713 712 # Now that we have our set of ancestors, we want to remove any
714 713 # roots that are not ancestors.
715 714
716 715 # If one of the roots was nullid, everything is included anyway.
717 716 if lowestrev > nullrev:
718 717 # But, since we weren't, let's recompute the lowest rev to not
719 718 # include roots that aren't ancestors.
720 719
721 720 # Filter out roots that aren't ancestors of heads
722 721 roots = [n for n in roots if n in ancestors]
723 722 # Recompute the lowest revision
724 723 if roots:
725 724 lowestrev = min([self.rev(n) for n in roots])
726 725 else:
727 726 # No more roots? Return empty list
728 727 return nonodes
729 728 else:
730 729 # We are descending from nullid, and don't need to care about
731 730 # any other roots.
732 731 lowestrev = nullrev
733 732 roots = [nullid]
734 733 # Transform our roots list into a set.
735 734 descendents = set(roots)
736 735 # Also, keep the original roots so we can filter out roots that aren't
737 736 # 'real' roots (i.e. are descended from other roots).
738 737 roots = descendents.copy()
739 738 # Our topologically sorted list of output nodes.
740 739 orderedout = []
741 740 # Don't start at nullid since we don't want nullid in our output list,
742 741 # and if nullid shows up in descedents, empty parents will look like
743 742 # they're descendents.
744 743 for r in xrange(max(lowestrev, 0), highestrev + 1):
745 744 n = self.node(r)
746 745 isdescendent = False
747 746 if lowestrev == nullrev: # Everybody is a descendent of nullid
748 747 isdescendent = True
749 748 elif n in descendents:
750 749 # n is already a descendent
751 750 isdescendent = True
752 751 # This check only needs to be done here because all the roots
753 752 # will start being marked is descendents before the loop.
754 753 if n in roots:
755 754 # If n was a root, check if it's a 'real' root.
756 755 p = tuple(self.parents(n))
757 756 # If any of its parents are descendents, it's not a root.
758 757 if (p[0] in descendents) or (p[1] in descendents):
759 758 roots.remove(n)
760 759 else:
761 760 p = tuple(self.parents(n))
762 761 # A node is a descendent if either of its parents are
763 762 # descendents. (We seeded the dependents list with the roots
764 763 # up there, remember?)
765 764 if (p[0] in descendents) or (p[1] in descendents):
766 765 descendents.add(n)
767 766 isdescendent = True
768 767 if isdescendent and ((ancestors is None) or (n in ancestors)):
769 768 # Only include nodes that are both descendents and ancestors.
770 769 orderedout.append(n)
771 770 if (ancestors is not None) and (n in heads):
772 771 # We're trying to figure out which heads are reachable
773 772 # from roots.
774 773 # Mark this head as having been reached
775 774 heads[n] = 1
776 775 elif ancestors is None:
777 776 # Otherwise, we're trying to discover the heads.
778 777 # Assume this is a head because if it isn't, the next step
779 778 # will eventually remove it.
780 779 heads[n] = 1
781 780 # But, obviously its parents aren't.
782 781 for p in self.parents(n):
783 782 heads.pop(p, None)
784 783 heads = [n for n in heads.iterkeys() if heads[n] != 0]
785 784 roots = list(roots)
786 785 assert orderedout
787 786 assert roots
788 787 assert heads
789 788 return (orderedout, roots, heads)
790 789
791 790 def heads(self, start=None, stop=None):
792 791 """return the list of all nodes that have no children
793 792
794 793 if start is specified, only heads that are descendants of
795 794 start will be returned
796 795 if stop is specified, it will consider all the revs from stop
797 796 as if they had no children
798 797 """
799 798 if start is None and stop is None:
800 799 count = len(self)
801 800 if not count:
802 801 return [nullid]
803 802 ishead = [1] * (count + 1)
804 803 index = self.index
805 804 for r in xrange(count):
806 805 e = index[r]
807 806 ishead[e[5]] = ishead[e[6]] = 0
808 807 return [self.node(r) for r in xrange(count) if ishead[r]]
809 808
810 809 if start is None:
811 810 start = nullid
812 811 if stop is None:
813 812 stop = []
814 813 stoprevs = set([self.rev(n) for n in stop])
815 814 startrev = self.rev(start)
816 reachable = {startrev: 1}
817 heads = {startrev: 1}
815 reachable = set((startrev,))
816 heads = set((startrev,))
818 817
819 818 parentrevs = self.parentrevs
820 819 for r in xrange(startrev + 1, len(self)):
821 820 for p in parentrevs(r):
822 821 if p in reachable:
823 822 if r not in stoprevs:
824 reachable[r] = 1
825 heads[r] = 1
823 reachable.add(r)
824 heads.add(r)
826 825 if p in heads and p not in stoprevs:
827 del heads[p]
826 heads.remove(p)
828 827
829 828 return [self.node(r) for r in heads]
830 829
831 830 def children(self, node):
832 831 """find the children of a given node"""
833 832 c = []
834 833 p = self.rev(node)
835 834 for r in range(p + 1, len(self)):
836 835 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
837 836 if prevs:
838 837 for pr in prevs:
839 838 if pr == p:
840 839 c.append(self.node(r))
841 840 elif p == nullrev:
842 841 c.append(self.node(r))
843 842 return c
844 843
845 844 def _match(self, id):
846 845 if isinstance(id, (long, int)):
847 846 # rev
848 847 return self.node(id)
849 848 if len(id) == 20:
850 849 # possibly a binary node
851 850 # odds of a binary node being all hex in ASCII are 1 in 10**25
852 851 try:
853 852 node = id
854 853 self.rev(node) # quick search the index
855 854 return node
856 855 except LookupError:
857 856 pass # may be partial hex id
858 857 try:
859 858 # str(rev)
860 859 rev = int(id)
861 860 if str(rev) != id:
862 861 raise ValueError
863 862 if rev < 0:
864 863 rev = len(self) + rev
865 864 if rev < 0 or rev >= len(self):
866 865 raise ValueError
867 866 return self.node(rev)
868 867 except (ValueError, OverflowError):
869 868 pass
870 869 if len(id) == 40:
871 870 try:
872 871 # a full hex nodeid?
873 872 node = bin(id)
874 873 self.rev(node)
875 874 return node
876 875 except (TypeError, LookupError):
877 876 pass
878 877
879 878 def _partialmatch(self, id):
880 879 if len(id) < 40:
881 880 try:
882 881 # hex(node)[:...]
883 882 l = len(id) / 2 # grab an even number of digits
884 883 bin_id = bin(id[:l*2])
885 884 nl = [n for n in self.nodemap if n[:l] == bin_id]
886 885 nl = [n for n in nl if hex(n).startswith(id)]
887 886 if len(nl) > 0:
888 887 if len(nl) == 1:
889 888 return nl[0]
890 889 raise LookupError(id, self.indexfile,
891 890 _('ambiguous identifier'))
892 891 return None
893 892 except TypeError:
894 893 pass
895 894
896 895 def lookup(self, id):
897 896 """locate a node based on:
898 897 - revision number or str(revision number)
899 898 - nodeid or subset of hex nodeid
900 899 """
901 900 n = self._match(id)
902 901 if n is not None:
903 902 return n
904 903 n = self._partialmatch(id)
905 904 if n:
906 905 return n
907 906
908 907 raise LookupError(id, self.indexfile, _('no match found'))
909 908
910 909 def cmp(self, node, text):
911 910 """compare text with a given file revision"""
912 911 p1, p2 = self.parents(node)
913 912 return hash(text, p1, p2) != node
914 913
915 914 def _addchunk(self, offset, data):
916 915 o, d = self._chunkcache
917 916 # try to add to existing cache
918 917 if o + len(d) == offset and len(d) + len(data) < _prereadsize:
919 918 self._chunkcache = o, d + data
920 919 else:
921 920 self._chunkcache = offset, data
922 921
923 922 def _loadchunk(self, offset, length, df=None):
924 923 if not df:
925 924 if self._inline:
926 925 df = self.opener(self.indexfile)
927 926 else:
928 927 df = self.opener(self.datafile)
929 928
930 929 readahead = max(65536, length)
931 930 df.seek(offset)
932 931 d = df.read(readahead)
933 932 self._addchunk(offset, d)
934 933 if readahead > length:
935 934 return d[:length]
936 935 return d
937 936
938 937 def _getchunk(self, offset, length, df=None):
939 938 o, d = self._chunkcache
940 939 l = len(d)
941 940
942 941 # is it in the cache?
943 942 cachestart = offset - o
944 943 cacheend = cachestart + length
945 944 if cachestart >= 0 and cacheend <= l:
946 945 if cachestart == 0 and cacheend == l:
947 946 return d # avoid a copy
948 947 return d[cachestart:cacheend]
949 948
950 949 return self._loadchunk(offset, length, df)
951 950
952 951 def _prime(self, startrev, endrev, df):
953 952 start = self.start(startrev)
954 953 end = self.end(endrev)
955 954 if self._inline:
956 955 start += (startrev + 1) * self._io.size
957 956 end += (startrev + 1) * self._io.size
958 957 self._loadchunk(start, end - start, df)
959 958
960 959 def chunk(self, rev, df=None):
961 960 start, length = self.start(rev), self.length(rev)
962 961 if self._inline:
963 962 start += (rev + 1) * self._io.size
964 963 return decompress(self._getchunk(start, length, df))
965 964
966 965 def revdiff(self, rev1, rev2):
967 966 """return or calculate a delta between two revisions"""
968 967 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
969 968 return self.chunk(rev2)
970 969
971 970 return mdiff.textdiff(self.revision(self.node(rev1)),
972 971 self.revision(self.node(rev2)))
973 972
974 973 def revision(self, node):
975 974 """return an uncompressed revision of a given node"""
976 975 if node == nullid:
977 976 return ""
978 977 if self._cache and self._cache[0] == node:
979 978 return str(self._cache[2])
980 979
981 980 # look up what we need to read
982 981 text = None
983 982 rev = self.rev(node)
984 983 base = self.base(rev)
985 984
986 985 # check rev flags
987 986 if self.index[rev][0] & 0xFFFF:
988 987 raise RevlogError(_('incompatible revision flag %x') %
989 988 (self.index[rev][0] & 0xFFFF))
990 989
991 990 df = None
992 991
993 992 # do we have useful data cached?
994 993 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
995 994 base = self._cache[1]
996 995 text = str(self._cache[2])
997 996 self._loadindex(base, rev + 1)
998 997 if not self._inline and rev > base + 1:
999 998 df = self.opener(self.datafile)
1000 999 self._prime(base, rev, df)
1001 1000 else:
1002 1001 self._loadindex(base, rev + 1)
1003 1002 if not self._inline and rev > base:
1004 1003 df = self.opener(self.datafile)
1005 1004 self._prime(base, rev, df)
1006 1005 text = self.chunk(base, df=df)
1007 1006
1008 1007 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
1009 1008 text = mdiff.patches(text, bins)
1010 1009 p1, p2 = self.parents(node)
1011 1010 if node != hash(text, p1, p2):
1012 1011 raise RevlogError(_("integrity check failed on %s:%d")
1013 1012 % (self.datafile, rev))
1014 1013
1015 1014 self._cache = (node, rev, text)
1016 1015 return text
1017 1016
1018 1017 def checkinlinesize(self, tr, fp=None):
1019 1018 if not self._inline or (self.start(-2) + self.length(-2)) < 131072:
1020 1019 return
1021 1020
1022 1021 trinfo = tr.find(self.indexfile)
1023 1022 if trinfo == None:
1024 1023 raise RevlogError(_("%s not found in the transaction")
1025 1024 % self.indexfile)
1026 1025
1027 1026 trindex = trinfo[2]
1028 1027 dataoff = self.start(trindex)
1029 1028
1030 1029 tr.add(self.datafile, dataoff)
1031 1030
1032 1031 if fp:
1033 1032 fp.flush()
1034 1033 fp.close()
1035 1034
1036 1035 df = self.opener(self.datafile, 'w')
1037 1036 try:
1038 1037 calc = self._io.size
1039 1038 for r in self:
1040 1039 start = self.start(r) + (r + 1) * calc
1041 1040 length = self.length(r)
1042 1041 d = self._getchunk(start, length)
1043 1042 df.write(d)
1044 1043 finally:
1045 1044 df.close()
1046 1045
1047 1046 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1048 1047 self.version &= ~(REVLOGNGINLINEDATA)
1049 1048 self._inline = False
1050 1049 for i in self:
1051 1050 e = self._io.packentry(self.index[i], self.node, self.version, i)
1052 1051 fp.write(e)
1053 1052
1054 1053 # if we don't call rename, the temp file will never replace the
1055 1054 # real index
1056 1055 fp.rename()
1057 1056
1058 1057 tr.replace(self.indexfile, trindex * calc)
1059 1058 self._chunkcache = (0, '')
1060 1059
1061 1060 def addrevision(self, text, transaction, link, p1, p2, d=None):
1062 1061 """add a revision to the log
1063 1062
1064 1063 text - the revision data to add
1065 1064 transaction - the transaction object used for rollback
1066 1065 link - the linkrev data to add
1067 1066 p1, p2 - the parent nodeids of the revision
1068 1067 d - an optional precomputed delta
1069 1068 """
1070 1069 dfh = None
1071 1070 if not self._inline:
1072 1071 dfh = self.opener(self.datafile, "a")
1073 1072 ifh = self.opener(self.indexfile, "a+")
1074 1073 try:
1075 1074 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1076 1075 finally:
1077 1076 if dfh:
1078 1077 dfh.close()
1079 1078 ifh.close()
1080 1079
1081 1080 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1082 1081 node = hash(text, p1, p2)
1083 1082 if node in self.nodemap:
1084 1083 return node
1085 1084
1086 1085 curr = len(self)
1087 1086 prev = curr - 1
1088 1087 base = self.base(prev)
1089 1088 offset = self.end(prev)
1090 1089
1091 1090 if curr:
1092 1091 if not d:
1093 1092 ptext = self.revision(self.node(prev))
1094 1093 d = mdiff.textdiff(ptext, text)
1095 1094 data = compress(d)
1096 1095 l = len(data[1]) + len(data[0])
1097 1096 dist = l + offset - self.start(base)
1098 1097
1099 1098 # full versions are inserted when the needed deltas
1100 1099 # become comparable to the uncompressed text
1101 1100 if not curr or dist > len(text) * 2:
1102 1101 data = compress(text)
1103 1102 l = len(data[1]) + len(data[0])
1104 1103 base = curr
1105 1104
1106 1105 e = (offset_type(offset, 0), l, len(text),
1107 1106 base, link, self.rev(p1), self.rev(p2), node)
1108 1107 self.index.insert(-1, e)
1109 1108 self.nodemap[node] = curr
1110 1109
1111 1110 entry = self._io.packentry(e, self.node, self.version, curr)
1112 1111 if not self._inline:
1113 1112 transaction.add(self.datafile, offset)
1114 1113 transaction.add(self.indexfile, curr * len(entry))
1115 1114 if data[0]:
1116 1115 dfh.write(data[0])
1117 1116 dfh.write(data[1])
1118 1117 dfh.flush()
1119 1118 ifh.write(entry)
1120 1119 else:
1121 1120 offset += curr * self._io.size
1122 1121 transaction.add(self.indexfile, offset, curr)
1123 1122 ifh.write(entry)
1124 1123 ifh.write(data[0])
1125 1124 ifh.write(data[1])
1126 1125 self.checkinlinesize(transaction, ifh)
1127 1126
1128 1127 self._cache = (node, curr, text)
1129 1128 return node
1130 1129
1131 1130 def ancestor(self, a, b):
1132 1131 """calculate the least common ancestor of nodes a and b"""
1133 1132
1134 1133 def parents(rev):
1135 1134 return [p for p in self.parentrevs(rev) if p != nullrev]
1136 1135
1137 1136 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1138 1137 if c is None:
1139 1138 return nullid
1140 1139
1141 1140 return self.node(c)
1142 1141
1143 1142 def group(self, nodelist, lookup, infocollect=None):
1144 1143 """calculate a delta group
1145 1144
1146 1145 Given a list of changeset revs, return a set of deltas and
1147 1146 metadata corresponding to nodes. the first delta is
1148 1147 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1149 1148 have this parent as it has all history before these
1150 1149 changesets. parent is parent[0]
1151 1150 """
1152 1151
1153 1152 # if we don't have any revisions touched by these changesets, bail
1154 1153 if not nodelist:
1155 1154 yield changegroup.closechunk()
1156 1155 return
1157 1156
1158 1157 revs = [self.rev(n) for n in nodelist]
1159 1158
1160 1159 # add the parent of the first rev
1161 1160 p = self.parentrevs(revs[0])[0]
1162 1161 revs.insert(0, p)
1163 1162
1164 1163 # build deltas
1165 1164 for d in xrange(0, len(revs) - 1):
1166 1165 a, b = revs[d], revs[d + 1]
1167 1166 nb = self.node(b)
1168 1167
1169 1168 if infocollect is not None:
1170 1169 infocollect(nb)
1171 1170
1172 1171 p = self.parents(nb)
1173 1172 meta = nb + p[0] + p[1] + lookup(nb)
1174 1173 if a == -1:
1175 1174 d = self.revision(nb)
1176 1175 meta += mdiff.trivialdiffheader(len(d))
1177 1176 else:
1178 1177 d = self.revdiff(a, b)
1179 1178 yield changegroup.chunkheader(len(meta) + len(d))
1180 1179 yield meta
1181 1180 if len(d) > 2**20:
1182 1181 pos = 0
1183 1182 while pos < len(d):
1184 1183 pos2 = pos + 2 ** 18
1185 1184 yield d[pos:pos2]
1186 1185 pos = pos2
1187 1186 else:
1188 1187 yield d
1189 1188
1190 1189 yield changegroup.closechunk()
1191 1190
1192 1191 def addgroup(self, revs, linkmapper, transaction):
1193 1192 """
1194 1193 add a delta group
1195 1194
1196 1195 given a set of deltas, add them to the revision log. the
1197 1196 first delta is against its parent, which should be in our
1198 1197 log, the rest are against the previous delta.
1199 1198 """
1200 1199
1201 1200 #track the base of the current delta log
1202 1201 r = len(self)
1203 1202 t = r - 1
1204 1203 node = None
1205 1204
1206 1205 base = prev = nullrev
1207 1206 start = end = textlen = 0
1208 1207 if r:
1209 1208 end = self.end(t)
1210 1209
1211 1210 ifh = self.opener(self.indexfile, "a+")
1212 1211 isize = r * self._io.size
1213 1212 if self._inline:
1214 1213 transaction.add(self.indexfile, end + isize, r)
1215 1214 dfh = None
1216 1215 else:
1217 1216 transaction.add(self.indexfile, isize, r)
1218 1217 transaction.add(self.datafile, end)
1219 1218 dfh = self.opener(self.datafile, "a")
1220 1219
1221 1220 try:
1222 1221 # loop through our set of deltas
1223 1222 chain = None
1224 1223 for chunk in revs:
1225 1224 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1226 1225 link = linkmapper(cs)
1227 1226 if node in self.nodemap:
1228 1227 # this can happen if two branches make the same change
1229 1228 chain = node
1230 1229 continue
1231 1230 delta = buffer(chunk, 80)
1232 1231 del chunk
1233 1232
1234 1233 for p in (p1, p2):
1235 1234 if not p in self.nodemap:
1236 1235 raise LookupError(p, self.indexfile, _('unknown parent'))
1237 1236
1238 1237 if not chain:
1239 1238 # retrieve the parent revision of the delta chain
1240 1239 chain = p1
1241 1240 if not chain in self.nodemap:
1242 1241 raise LookupError(chain, self.indexfile, _('unknown base'))
1243 1242
1244 1243 # full versions are inserted when the needed deltas become
1245 1244 # comparable to the uncompressed text or when the previous
1246 1245 # version is not the one we have a delta against. We use
1247 1246 # the size of the previous full rev as a proxy for the
1248 1247 # current size.
1249 1248
1250 1249 if chain == prev:
1251 1250 cdelta = compress(delta)
1252 1251 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1253 1252 textlen = mdiff.patchedsize(textlen, delta)
1254 1253
1255 1254 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1256 1255 # flush our writes here so we can read it in revision
1257 1256 if dfh:
1258 1257 dfh.flush()
1259 1258 ifh.flush()
1260 1259 text = self.revision(chain)
1261 1260 if len(text) == 0:
1262 1261 # skip over trivial delta header
1263 1262 text = buffer(delta, 12)
1264 1263 else:
1265 1264 text = mdiff.patches(text, [delta])
1266 1265 del delta
1267 1266 chk = self._addrevision(text, transaction, link, p1, p2, None,
1268 1267 ifh, dfh)
1269 1268 if not dfh and not self._inline:
1270 1269 # addrevision switched from inline to conventional
1271 1270 # reopen the index
1272 1271 dfh = self.opener(self.datafile, "a")
1273 1272 ifh = self.opener(self.indexfile, "a")
1274 1273 if chk != node:
1275 1274 raise RevlogError(_("consistency error adding group"))
1276 1275 textlen = len(text)
1277 1276 else:
1278 1277 e = (offset_type(end, 0), cdeltalen, textlen, base,
1279 1278 link, self.rev(p1), self.rev(p2), node)
1280 1279 self.index.insert(-1, e)
1281 1280 self.nodemap[node] = r
1282 1281 entry = self._io.packentry(e, self.node, self.version, r)
1283 1282 if self._inline:
1284 1283 ifh.write(entry)
1285 1284 ifh.write(cdelta[0])
1286 1285 ifh.write(cdelta[1])
1287 1286 self.checkinlinesize(transaction, ifh)
1288 1287 if not self._inline:
1289 1288 dfh = self.opener(self.datafile, "a")
1290 1289 ifh = self.opener(self.indexfile, "a")
1291 1290 else:
1292 1291 dfh.write(cdelta[0])
1293 1292 dfh.write(cdelta[1])
1294 1293 ifh.write(entry)
1295 1294
1296 1295 t, r, chain, prev = r, r + 1, node, node
1297 1296 base = self.base(t)
1298 1297 start = self.start(base)
1299 1298 end = self.end(t)
1300 1299 finally:
1301 1300 if dfh:
1302 1301 dfh.close()
1303 1302 ifh.close()
1304 1303
1305 1304 return node
1306 1305
1307 1306 def strip(self, minlink, transaction):
1308 1307 """truncate the revlog on the first revision with a linkrev >= minlink
1309 1308
1310 1309 This function is called when we're stripping revision minlink and
1311 1310 its descendants from the repository.
1312 1311
1313 1312 We have to remove all revisions with linkrev >= minlink, because
1314 1313 the equivalent changelog revisions will be renumbered after the
1315 1314 strip.
1316 1315
1317 1316 So we truncate the revlog on the first of these revisions, and
1318 1317 trust that the caller has saved the revisions that shouldn't be
1319 1318 removed and that it'll readd them after this truncation.
1320 1319 """
1321 1320 if len(self) == 0:
1322 1321 return
1323 1322
1324 1323 if isinstance(self.index, lazyindex):
1325 1324 self._loadindexmap()
1326 1325
1327 1326 for rev in self:
1328 1327 if self.index[rev][4] >= minlink:
1329 1328 break
1330 1329 else:
1331 1330 return
1332 1331
1333 1332 # first truncate the files on disk
1334 1333 end = self.start(rev)
1335 1334 if not self._inline:
1336 1335 transaction.add(self.datafile, end)
1337 1336 end = rev * self._io.size
1338 1337 else:
1339 1338 end += rev * self._io.size
1340 1339
1341 1340 transaction.add(self.indexfile, end)
1342 1341
1343 1342 # then reset internal state in memory to forget those revisions
1344 1343 self._cache = None
1345 1344 self._chunkcache = (0, '')
1346 1345 for x in xrange(rev, len(self)):
1347 1346 del self.nodemap[self.node(x)]
1348 1347
1349 1348 del self.index[rev:-1]
1350 1349
1351 1350 def checksize(self):
1352 1351 expected = 0
1353 1352 if len(self):
1354 1353 expected = max(0, self.end(len(self) - 1))
1355 1354
1356 1355 try:
1357 1356 f = self.opener(self.datafile)
1358 1357 f.seek(0, 2)
1359 1358 actual = f.tell()
1360 1359 dd = actual - expected
1361 1360 except IOError, inst:
1362 1361 if inst.errno != errno.ENOENT:
1363 1362 raise
1364 1363 dd = 0
1365 1364
1366 1365 try:
1367 1366 f = self.opener(self.indexfile)
1368 1367 f.seek(0, 2)
1369 1368 actual = f.tell()
1370 1369 s = self._io.size
1371 1370 i = max(0, actual / s)
1372 1371 di = actual - (i * s)
1373 1372 if self._inline:
1374 1373 databytes = 0
1375 1374 for r in self:
1376 1375 databytes += max(0, self.length(r))
1377 1376 dd = 0
1378 1377 di = actual - len(self) * s - databytes
1379 1378 except IOError, inst:
1380 1379 if inst.errno != errno.ENOENT:
1381 1380 raise
1382 1381 di = 0
1383 1382
1384 1383 return (dd, di)
1385 1384
1386 1385 def files(self):
1387 1386 res = [ self.indexfile ]
1388 1387 if not self._inline:
1389 1388 res.append(self.datafile)
1390 1389 return res
General Comments 0
You need to be logged in to leave comments. Login now