##// END OF EJS Templates
localrepo/branchcache: remove lbranchmap(), convert users to use utf-8 names...
Benoit Boissinot -
r9675:ee913987 default
parent child Browse files
Show More
@@ -1,3676 +1,3677 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, subprocess, difflib, time, tempfile
12 12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 13 import patch, help, mdiff, url, encoding
14 14 import archival, changegroup, cmdutil, sshserver, hbisect
15 15 from hgweb import server
16 16 import merge as merge_
17 17 import minirst
18 18
19 19 # Commands start here, listed alphabetically
20 20
21 21 def add(ui, repo, *pats, **opts):
22 22 """add the specified files on the next commit
23 23
24 24 Schedule files to be version controlled and added to the
25 25 repository.
26 26
27 27 The files will be added to the repository at the next commit. To
28 28 undo an add before that, see hg forget.
29 29
30 30 If no names are given, add all files to the repository.
31 31 """
32 32
33 33 bad = []
34 34 exacts = {}
35 35 names = []
36 36 m = cmdutil.match(repo, pats, opts)
37 37 oldbad = m.bad
38 38 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
39 39
40 40 for f in repo.walk(m):
41 41 exact = m.exact(f)
42 42 if exact or f not in repo.dirstate:
43 43 names.append(f)
44 44 if ui.verbose or not exact:
45 45 ui.status(_('adding %s\n') % m.rel(f))
46 46 if not opts.get('dry_run'):
47 47 bad += [f for f in repo.add(names) if f in m.files()]
48 48 return bad and 1 or 0
49 49
50 50 def addremove(ui, repo, *pats, **opts):
51 51 """add all new files, delete all missing files
52 52
53 53 Add all new files and remove all missing files from the
54 54 repository.
55 55
56 56 New files are ignored if they match any of the patterns in
57 57 .hgignore. As with add, these changes take effect at the next
58 58 commit.
59 59
60 60 Use the -s/--similarity option to detect renamed files. With a
61 61 parameter greater than 0, this compares every removed file with
62 62 every added file and records those similar enough as renames. This
63 63 option takes a percentage between 0 (disabled) and 100 (files must
64 64 be identical) as its parameter. Detecting renamed files this way
65 65 can be expensive.
66 66 """
67 67 try:
68 68 sim = float(opts.get('similarity') or 0)
69 69 except ValueError:
70 70 raise util.Abort(_('similarity must be a number'))
71 71 if sim < 0 or sim > 100:
72 72 raise util.Abort(_('similarity must be between 0 and 100'))
73 73 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74 74
75 75 def annotate(ui, repo, *pats, **opts):
76 76 """show changeset information by line for each file
77 77
78 78 List changes in files, showing the revision id responsible for
79 79 each line
80 80
81 81 This command is useful for discovering when a change was made and
82 82 by whom.
83 83
84 84 Without the -a/--text option, annotate will avoid processing files
85 85 it detects as binary. With -a, annotate will annotate the file
86 86 anyway, although the results will probably be neither useful
87 87 nor desirable.
88 88 """
89 89 datefunc = ui.quiet and util.shortdate or util.datestr
90 90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
91 91
92 92 if not pats:
93 93 raise util.Abort(_('at least one filename or pattern is required'))
94 94
95 95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
96 96 ('number', lambda x: str(x[0].rev())),
97 97 ('changeset', lambda x: short(x[0].node())),
98 98 ('date', getdate),
99 99 ('follow', lambda x: x[0].path()),
100 100 ]
101 101
102 102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
103 103 and not opts.get('follow')):
104 104 opts['number'] = 1
105 105
106 106 linenumber = opts.get('line_number') is not None
107 107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
108 108 raise util.Abort(_('at least one of -n/-c is required for -l'))
109 109
110 110 funcmap = [func for op, func in opmap if opts.get(op)]
111 111 if linenumber:
112 112 lastfunc = funcmap[-1]
113 113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
114 114
115 115 ctx = repo[opts.get('rev')]
116 116
117 117 m = cmdutil.match(repo, pats, opts)
118 118 for abs in ctx.walk(m):
119 119 fctx = ctx[abs]
120 120 if not opts.get('text') and util.binary(fctx.data()):
121 121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
122 122 continue
123 123
124 124 lines = fctx.annotate(follow=opts.get('follow'),
125 125 linenumber=linenumber)
126 126 pieces = []
127 127
128 128 for f in funcmap:
129 129 l = [f(n) for n, dummy in lines]
130 130 if l:
131 131 ml = max(map(len, l))
132 132 pieces.append(["%*s" % (ml, x) for x in l])
133 133
134 134 if pieces:
135 135 for p, l in zip(zip(*pieces), lines):
136 136 ui.write("%s: %s" % (" ".join(p), l[1]))
137 137
138 138 def archive(ui, repo, dest, **opts):
139 139 '''create an unversioned archive of a repository revision
140 140
141 141 By default, the revision used is the parent of the working
142 142 directory; use -r/--rev to specify a different revision.
143 143
144 144 To specify the type of archive to create, use -t/--type. Valid
145 145 types are::
146 146
147 147 "files" (default): a directory full of files
148 148 "tar": tar archive, uncompressed
149 149 "tbz2": tar archive, compressed using bzip2
150 150 "tgz": tar archive, compressed using gzip
151 151 "uzip": zip archive, uncompressed
152 152 "zip": zip archive, compressed using deflate
153 153
154 154 The exact name of the destination archive or directory is given
155 155 using a format string; see 'hg help export' for details.
156 156
157 157 Each member added to an archive file has a directory prefix
158 158 prepended. Use -p/--prefix to specify a format string for the
159 159 prefix. The default is the basename of the archive, with suffixes
160 160 removed.
161 161 '''
162 162
163 163 ctx = repo[opts.get('rev')]
164 164 if not ctx:
165 165 raise util.Abort(_('no working directory: please specify a revision'))
166 166 node = ctx.node()
167 167 dest = cmdutil.make_filename(repo, dest, node)
168 168 if os.path.realpath(dest) == repo.root:
169 169 raise util.Abort(_('repository root cannot be destination'))
170 170 matchfn = cmdutil.match(repo, [], opts)
171 171 kind = opts.get('type') or 'files'
172 172 prefix = opts.get('prefix')
173 173 if dest == '-':
174 174 if kind == 'files':
175 175 raise util.Abort(_('cannot archive plain files to stdout'))
176 176 dest = sys.stdout
177 177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
178 178 prefix = cmdutil.make_filename(repo, prefix, node)
179 179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
180 180 matchfn, prefix)
181 181
182 182 def backout(ui, repo, node=None, rev=None, **opts):
183 183 '''reverse effect of earlier changeset
184 184
185 185 Commit the backed out changes as a new changeset. The new
186 186 changeset is a child of the backed out changeset.
187 187
188 188 If you backout a changeset other than the tip, a new head is
189 189 created. This head will be the new tip and you should merge this
190 190 backout changeset with another head.
191 191
192 192 The --merge option remembers the parent of the working directory
193 193 before starting the backout, then merges the new head with that
194 194 changeset afterwards. This saves you from doing the merge by hand.
195 195 The result of this merge is not committed, as with a normal merge.
196 196
197 197 See 'hg help dates' for a list of formats valid for -d/--date.
198 198 '''
199 199 if rev and node:
200 200 raise util.Abort(_("please specify just one revision"))
201 201
202 202 if not rev:
203 203 rev = node
204 204
205 205 if not rev:
206 206 raise util.Abort(_("please specify a revision to backout"))
207 207
208 208 date = opts.get('date')
209 209 if date:
210 210 opts['date'] = util.parsedate(date)
211 211
212 212 cmdutil.bail_if_changed(repo)
213 213 node = repo.lookup(rev)
214 214
215 215 op1, op2 = repo.dirstate.parents()
216 216 a = repo.changelog.ancestor(op1, node)
217 217 if a != node:
218 218 raise util.Abort(_('cannot backout change on a different branch'))
219 219
220 220 p1, p2 = repo.changelog.parents(node)
221 221 if p1 == nullid:
222 222 raise util.Abort(_('cannot backout a change with no parents'))
223 223 if p2 != nullid:
224 224 if not opts.get('parent'):
225 225 raise util.Abort(_('cannot backout a merge changeset without '
226 226 '--parent'))
227 227 p = repo.lookup(opts['parent'])
228 228 if p not in (p1, p2):
229 229 raise util.Abort(_('%s is not a parent of %s') %
230 230 (short(p), short(node)))
231 231 parent = p
232 232 else:
233 233 if opts.get('parent'):
234 234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
235 235 parent = p1
236 236
237 237 # the backout should appear on the same branch
238 238 branch = repo.dirstate.branch()
239 239 hg.clean(repo, node, show_stats=False)
240 240 repo.dirstate.setbranch(branch)
241 241 revert_opts = opts.copy()
242 242 revert_opts['date'] = None
243 243 revert_opts['all'] = True
244 244 revert_opts['rev'] = hex(parent)
245 245 revert_opts['no_backup'] = None
246 246 revert(ui, repo, **revert_opts)
247 247 commit_opts = opts.copy()
248 248 commit_opts['addremove'] = False
249 249 if not commit_opts['message'] and not commit_opts['logfile']:
250 250 # we don't translate commit messages
251 251 commit_opts['message'] = "Backed out changeset %s" % short(node)
252 252 commit_opts['force_editor'] = True
253 253 commit(ui, repo, **commit_opts)
254 254 def nice(node):
255 255 return '%d:%s' % (repo.changelog.rev(node), short(node))
256 256 ui.status(_('changeset %s backs out changeset %s\n') %
257 257 (nice(repo.changelog.tip()), nice(node)))
258 258 if op1 != node:
259 259 hg.clean(repo, op1, show_stats=False)
260 260 if opts.get('merge'):
261 261 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
262 262 hg.merge(repo, hex(repo.changelog.tip()))
263 263 else:
264 264 ui.status(_('the backout changeset is a new head - '
265 265 'do not forget to merge\n'))
266 266 ui.status(_('(use "backout --merge" '
267 267 'if you want to auto-merge)\n'))
268 268
269 269 def bisect(ui, repo, rev=None, extra=None, command=None,
270 270 reset=None, good=None, bad=None, skip=None, noupdate=None):
271 271 """subdivision search of changesets
272 272
273 273 This command helps to find changesets which introduce problems. To
274 274 use, mark the earliest changeset you know exhibits the problem as
275 275 bad, then mark the latest changeset which is free from the problem
276 276 as good. Bisect will update your working directory to a revision
277 277 for testing (unless the -U/--noupdate option is specified). Once
278 278 you have performed tests, mark the working directory as good or
279 279 bad, and bisect will either update to another candidate changeset
280 280 or announce that it has found the bad revision.
281 281
282 282 As a shortcut, you can also use the revision argument to mark a
283 283 revision as good or bad without checking it out first.
284 284
285 285 If you supply a command, it will be used for automatic bisection.
286 286 Its exit status will be used to mark revisions as good or bad:
287 287 status 0 means good, 125 means to skip the revision, 127
288 288 (command not found) will abort the bisection, and any other
289 289 non-zero exit status means the revision is bad.
290 290 """
291 291 def print_result(nodes, good):
292 292 displayer = cmdutil.show_changeset(ui, repo, {})
293 293 if len(nodes) == 1:
294 294 # narrowed it down to a single revision
295 295 if good:
296 296 ui.write(_("The first good revision is:\n"))
297 297 else:
298 298 ui.write(_("The first bad revision is:\n"))
299 299 displayer.show(repo[nodes[0]])
300 300 else:
301 301 # multiple possible revisions
302 302 if good:
303 303 ui.write(_("Due to skipped revisions, the first "
304 304 "good revision could be any of:\n"))
305 305 else:
306 306 ui.write(_("Due to skipped revisions, the first "
307 307 "bad revision could be any of:\n"))
308 308 for n in nodes:
309 309 displayer.show(repo[n])
310 310
311 311 def check_state(state, interactive=True):
312 312 if not state['good'] or not state['bad']:
313 313 if (good or bad or skip or reset) and interactive:
314 314 return
315 315 if not state['good']:
316 316 raise util.Abort(_('cannot bisect (no known good revisions)'))
317 317 else:
318 318 raise util.Abort(_('cannot bisect (no known bad revisions)'))
319 319 return True
320 320
321 321 # backward compatibility
322 322 if rev in "good bad reset init".split():
323 323 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
324 324 cmd, rev, extra = rev, extra, None
325 325 if cmd == "good":
326 326 good = True
327 327 elif cmd == "bad":
328 328 bad = True
329 329 else:
330 330 reset = True
331 331 elif extra or good + bad + skip + reset + bool(command) > 1:
332 332 raise util.Abort(_('incompatible arguments'))
333 333
334 334 if reset:
335 335 p = repo.join("bisect.state")
336 336 if os.path.exists(p):
337 337 os.unlink(p)
338 338 return
339 339
340 340 state = hbisect.load_state(repo)
341 341
342 342 if command:
343 343 changesets = 1
344 344 try:
345 345 while changesets:
346 346 # update state
347 347 status = util.system(command)
348 348 if status == 125:
349 349 transition = "skip"
350 350 elif status == 0:
351 351 transition = "good"
352 352 # status < 0 means process was killed
353 353 elif status == 127:
354 354 raise util.Abort(_("failed to execute %s") % command)
355 355 elif status < 0:
356 356 raise util.Abort(_("%s killed") % command)
357 357 else:
358 358 transition = "bad"
359 359 ctx = repo[rev or '.']
360 360 state[transition].append(ctx.node())
361 361 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
362 362 check_state(state, interactive=False)
363 363 # bisect
364 364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 365 # update to next check
366 366 cmdutil.bail_if_changed(repo)
367 367 hg.clean(repo, nodes[0], show_stats=False)
368 368 finally:
369 369 hbisect.save_state(repo, state)
370 370 return print_result(nodes, good)
371 371
372 372 # update state
373 373 node = repo.lookup(rev or '.')
374 374 if good:
375 375 state['good'].append(node)
376 376 elif bad:
377 377 state['bad'].append(node)
378 378 elif skip:
379 379 state['skip'].append(node)
380 380
381 381 hbisect.save_state(repo, state)
382 382
383 383 if not check_state(state):
384 384 return
385 385
386 386 # actually bisect
387 387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 388 if changesets == 0:
389 389 print_result(nodes, good)
390 390 else:
391 391 assert len(nodes) == 1 # only a single node can be tested next
392 392 node = nodes[0]
393 393 # compute the approximate number of remaining tests
394 394 tests, size = 0, 2
395 395 while size <= changesets:
396 396 tests, size = tests + 1, size * 2
397 397 rev = repo.changelog.rev(node)
398 398 ui.write(_("Testing changeset %d:%s "
399 399 "(%d changesets remaining, ~%d tests)\n")
400 400 % (rev, short(node), changesets, tests))
401 401 if not noupdate:
402 402 cmdutil.bail_if_changed(repo)
403 403 return hg.clean(repo, node)
404 404
405 405 def branch(ui, repo, label=None, **opts):
406 406 """set or show the current branch name
407 407
408 408 With no argument, show the current branch name. With one argument,
409 409 set the working directory branch name (the branch will not exist
410 410 in the repository until the next commit). Standard practice
411 411 recommends that primary development take place on the 'default'
412 412 branch.
413 413
414 414 Unless -f/--force is specified, branch will not let you set a
415 415 branch name that already exists, even if it's inactive.
416 416
417 417 Use -C/--clean to reset the working directory branch to that of
418 418 the parent of the working directory, negating a previous branch
419 419 change.
420 420
421 421 Use the command 'hg update' to switch to an existing branch. Use
422 422 'hg commit --close-branch' to mark this branch as closed.
423 423 """
424 424
425 425 if opts.get('clean'):
426 426 label = repo[None].parents()[0].branch()
427 427 repo.dirstate.setbranch(label)
428 428 ui.status(_('reset working directory to branch %s\n') % label)
429 429 elif label:
430 430 if not opts.get('force') and label in repo.branchtags():
431 431 if label not in [p.branch() for p in repo.parents()]:
432 432 raise util.Abort(_('a branch of the same name already exists'
433 433 ' (use --force to override)'))
434 434 repo.dirstate.setbranch(encoding.fromlocal(label))
435 435 ui.status(_('marked working directory as branch %s\n') % label)
436 436 else:
437 437 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
438 438
439 439 def branches(ui, repo, active=False, closed=False):
440 440 """list repository named branches
441 441
442 442 List the repository's named branches, indicating which ones are
443 443 inactive. If -c/--closed is specified, also list branches which have
444 444 been marked closed (see hg commit --close-branch).
445 445
446 446 If -a/--active is specified, only show active branches. A branch
447 447 is considered active if it contains repository heads.
448 448
449 449 Use the command 'hg update' to switch to an existing branch.
450 450 """
451 451
452 452 hexfunc = ui.debugflag and hex or short
453 activebranches = [encoding.tolocal(repo[n].branch())
454 for n in repo.heads()]
453 activebranches = [repo[n].branch() for n in repo.heads()]
455 454 def testactive(tag, node):
456 455 realhead = tag in activebranches
457 456 open = node in repo.branchheads(tag, closed=False)
458 457 return realhead and open
459 458 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
460 459 for tag, node in repo.branchtags().items()],
461 460 reverse=True)
462 461
463 462 for isactive, node, tag in branches:
464 463 if (not active) or isactive:
464 encodedtag = encoding.tolocal(tag)
465 465 if ui.quiet:
466 ui.write("%s\n" % tag)
466 ui.write("%s\n" % encodedtag)
467 467 else:
468 468 hn = repo.lookup(node)
469 469 if isactive:
470 470 notice = ''
471 471 elif hn not in repo.branchheads(tag, closed=False):
472 472 if not closed:
473 473 continue
474 474 notice = ' (closed)'
475 475 else:
476 476 notice = ' (inactive)'
477 rev = str(node).rjust(31 - encoding.colwidth(tag))
478 data = tag, rev, hexfunc(hn), notice
477 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
478 data = encodedtag, rev, hexfunc(hn), notice
479 479 ui.write("%s %s:%s%s\n" % data)
480 480
481 481 def bundle(ui, repo, fname, dest=None, **opts):
482 482 """create a changegroup file
483 483
484 484 Generate a compressed changegroup file collecting changesets not
485 485 known to be in another repository.
486 486
487 487 If no destination repository is specified the destination is
488 488 assumed to have all the nodes specified by one or more --base
489 489 parameters. To create a bundle containing all changesets, use
490 490 -a/--all (or --base null).
491 491
492 492 You can change compression method with the -t/--type option.
493 493 The available compression methods are: none, bzip2, and
494 494 gzip (by default, bundles are compressed using bzip2).
495 495
496 496 The bundle file can then be transferred using conventional means
497 497 and applied to another repository with the unbundle or pull
498 498 command. This is useful when direct push and pull are not
499 499 available or when exporting an entire repository is undesirable.
500 500
501 501 Applying bundles preserves all changeset contents including
502 502 permissions, copy/rename information, and revision history.
503 503 """
504 504 revs = opts.get('rev') or None
505 505 if revs:
506 506 revs = [repo.lookup(rev) for rev in revs]
507 507 if opts.get('all'):
508 508 base = ['null']
509 509 else:
510 510 base = opts.get('base')
511 511 if base:
512 512 if dest:
513 513 raise util.Abort(_("--base is incompatible with specifying "
514 514 "a destination"))
515 515 base = [repo.lookup(rev) for rev in base]
516 516 # create the right base
517 517 # XXX: nodesbetween / changegroup* should be "fixed" instead
518 518 o = []
519 519 has = set((nullid,))
520 520 for n in base:
521 521 has.update(repo.changelog.reachable(n))
522 522 if revs:
523 523 visit = list(revs)
524 524 else:
525 525 visit = repo.changelog.heads()
526 526 seen = {}
527 527 while visit:
528 528 n = visit.pop(0)
529 529 parents = [p for p in repo.changelog.parents(n) if p not in has]
530 530 if len(parents) == 0:
531 531 o.insert(0, n)
532 532 else:
533 533 for p in parents:
534 534 if p not in seen:
535 535 seen[p] = 1
536 536 visit.append(p)
537 537 else:
538 538 dest, revs, checkout = hg.parseurl(
539 539 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
540 540 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
541 541 o = repo.findoutgoing(other, force=opts.get('force'))
542 542
543 543 if revs:
544 544 cg = repo.changegroupsubset(o, revs, 'bundle')
545 545 else:
546 546 cg = repo.changegroup(o, 'bundle')
547 547
548 548 bundletype = opts.get('type', 'bzip2').lower()
549 549 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
550 550 bundletype = btypes.get(bundletype)
551 551 if bundletype not in changegroup.bundletypes:
552 552 raise util.Abort(_('unknown bundle type specified with --type'))
553 553
554 554 changegroup.writebundle(cg, fname, bundletype)
555 555
556 556 def cat(ui, repo, file1, *pats, **opts):
557 557 """output the current or given revision of files
558 558
559 559 Print the specified files as they were at the given revision. If
560 560 no revision is given, the parent of the working directory is used,
561 561 or tip if no revision is checked out.
562 562
563 563 Output may be to a file, in which case the name of the file is
564 564 given using a format string. The formatting rules are the same as
565 565 for the export command, with the following additions::
566 566
567 567 %s basename of file being printed
568 568 %d dirname of file being printed, or '.' if in repository root
569 569 %p root-relative path name of file being printed
570 570 """
571 571 ctx = repo[opts.get('rev')]
572 572 err = 1
573 573 m = cmdutil.match(repo, (file1,) + pats, opts)
574 574 for abs in ctx.walk(m):
575 575 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
576 576 data = ctx[abs].data()
577 577 if opts.get('decode'):
578 578 data = repo.wwritedata(abs, data)
579 579 fp.write(data)
580 580 err = 0
581 581 return err
582 582
583 583 def clone(ui, source, dest=None, **opts):
584 584 """make a copy of an existing repository
585 585
586 586 Create a copy of an existing repository in a new directory.
587 587
588 588 If no destination directory name is specified, it defaults to the
589 589 basename of the source.
590 590
591 591 The location of the source is added to the new repository's
592 592 .hg/hgrc file, as the default to be used for future pulls.
593 593
594 594 If you use the -r/--rev option to clone up to a specific revision,
595 595 no subsequent revisions (including subsequent tags) will be
596 596 present in the cloned repository. This option implies --pull, even
597 597 on local repositories.
598 598
599 599 By default, clone will check out the head of the 'default' branch.
600 600 If the -U/--noupdate option is used, the new clone will contain
601 601 only a repository (.hg) and no working copy (the working copy
602 602 parent is the null revision).
603 603
604 604 See 'hg help urls' for valid source format details.
605 605
606 606 It is possible to specify an ssh:// URL as the destination, but no
607 607 .hg/hgrc and working directory will be created on the remote side.
608 608 Please see 'hg help urls' for important details about ssh:// URLs.
609 609
610 610 For efficiency, hardlinks are used for cloning whenever the source
611 611 and destination are on the same filesystem (note this applies only
612 612 to the repository data, not to the checked out files). Some
613 613 filesystems, such as AFS, implement hardlinking incorrectly, but
614 614 do not report errors. In these cases, use the --pull option to
615 615 avoid hardlinking.
616 616
617 617 In some cases, you can clone repositories and checked out files
618 618 using full hardlinks with ::
619 619
620 620 $ cp -al REPO REPOCLONE
621 621
622 622 This is the fastest way to clone, but it is not always safe. The
623 623 operation is not atomic (making sure REPO is not modified during
624 624 the operation is up to you) and you have to make sure your editor
625 625 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
626 626 this is not compatible with certain extensions that place their
627 627 metadata under the .hg directory, such as mq.
628 628 """
629 629 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
630 630 pull=opts.get('pull'),
631 631 stream=opts.get('uncompressed'),
632 632 rev=opts.get('rev'),
633 633 update=not opts.get('noupdate'))
634 634
635 635 def commit(ui, repo, *pats, **opts):
636 636 """commit the specified files or all outstanding changes
637 637
638 638 Commit changes to the given files into the repository. Unlike a
639 639 centralized RCS, this operation is a local operation. See hg push
640 640 for a way to actively distribute your changes.
641 641
642 642 If a list of files is omitted, all changes reported by "hg status"
643 643 will be committed.
644 644
645 645 If you are committing the result of a merge, do not provide any
646 646 filenames or -I/-X filters.
647 647
648 648 If no commit message is specified, the configured editor is
649 649 started to prompt you for a message.
650 650
651 651 See 'hg help dates' for a list of formats valid for -d/--date.
652 652 """
653 653 extra = {}
654 654 if opts.get('close_branch'):
655 655 extra['close'] = 1
656 656 e = cmdutil.commiteditor
657 657 if opts.get('force_editor'):
658 658 e = cmdutil.commitforceeditor
659 659
660 660 def commitfunc(ui, repo, message, match, opts):
661 661 return repo.commit(message, opts.get('user'), opts.get('date'), match,
662 662 editor=e, extra=extra)
663 663
664 664 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
665 665 if not node:
666 666 ui.status(_("nothing changed\n"))
667 667 return
668 668 cl = repo.changelog
669 669 rev = cl.rev(node)
670 670 parents = cl.parentrevs(rev)
671 671 if rev - 1 in parents:
672 672 # one of the parents was the old tip
673 673 pass
674 674 elif (parents == (nullrev, nullrev) or
675 675 len(cl.heads(cl.node(parents[0]))) > 1 and
676 676 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
677 677 ui.status(_('created new head\n'))
678 678
679 679 if ui.debugflag:
680 680 ui.write(_('committed changeset %d:%s\n') % (rev, hex(node)))
681 681 elif ui.verbose:
682 682 ui.write(_('committed changeset %d:%s\n') % (rev, short(node)))
683 683
684 684 def copy(ui, repo, *pats, **opts):
685 685 """mark files as copied for the next commit
686 686
687 687 Mark dest as having copies of source files. If dest is a
688 688 directory, copies are put in that directory. If dest is a file,
689 689 the source must be a single file.
690 690
691 691 By default, this command copies the contents of files as they
692 692 exist in the working directory. If invoked with -A/--after, the
693 693 operation is recorded, but no copying is performed.
694 694
695 695 This command takes effect with the next commit. To undo a copy
696 696 before that, see hg revert.
697 697 """
698 698 wlock = repo.wlock(False)
699 699 try:
700 700 return cmdutil.copy(ui, repo, pats, opts)
701 701 finally:
702 702 wlock.release()
703 703
704 704 def debugancestor(ui, repo, *args):
705 705 """find the ancestor revision of two revisions in a given index"""
706 706 if len(args) == 3:
707 707 index, rev1, rev2 = args
708 708 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
709 709 lookup = r.lookup
710 710 elif len(args) == 2:
711 711 if not repo:
712 712 raise util.Abort(_("There is no Mercurial repository here "
713 713 "(.hg not found)"))
714 714 rev1, rev2 = args
715 715 r = repo.changelog
716 716 lookup = repo.lookup
717 717 else:
718 718 raise util.Abort(_('either two or three arguments required'))
719 719 a = r.ancestor(lookup(rev1), lookup(rev2))
720 720 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
721 721
722 722 def debugcommands(ui, cmd='', *args):
723 723 for cmd, vals in sorted(table.iteritems()):
724 724 cmd = cmd.split('|')[0].strip('^')
725 725 opts = ', '.join([i[1] for i in vals[1]])
726 726 ui.write('%s: %s\n' % (cmd, opts))
727 727
728 728 def debugcomplete(ui, cmd='', **opts):
729 729 """returns the completion list associated with the given command"""
730 730
731 731 if opts.get('options'):
732 732 options = []
733 733 otables = [globalopts]
734 734 if cmd:
735 735 aliases, entry = cmdutil.findcmd(cmd, table, False)
736 736 otables.append(entry[1])
737 737 for t in otables:
738 738 for o in t:
739 739 if o[0]:
740 740 options.append('-%s' % o[0])
741 741 options.append('--%s' % o[1])
742 742 ui.write("%s\n" % "\n".join(options))
743 743 return
744 744
745 745 cmdlist = cmdutil.findpossible(cmd, table)
746 746 if ui.verbose:
747 747 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
748 748 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
749 749
750 750 def debugfsinfo(ui, path = "."):
751 751 open('.debugfsinfo', 'w').write('')
752 752 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
753 753 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
754 754 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
755 755 and 'yes' or 'no'))
756 756 os.unlink('.debugfsinfo')
757 757
758 758 def debugrebuildstate(ui, repo, rev="tip"):
759 759 """rebuild the dirstate as it would look like for the given revision"""
760 760 ctx = repo[rev]
761 761 wlock = repo.wlock()
762 762 try:
763 763 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
764 764 finally:
765 765 wlock.release()
766 766
767 767 def debugcheckstate(ui, repo):
768 768 """validate the correctness of the current dirstate"""
769 769 parent1, parent2 = repo.dirstate.parents()
770 770 m1 = repo[parent1].manifest()
771 771 m2 = repo[parent2].manifest()
772 772 errors = 0
773 773 for f in repo.dirstate:
774 774 state = repo.dirstate[f]
775 775 if state in "nr" and f not in m1:
776 776 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
777 777 errors += 1
778 778 if state in "a" and f in m1:
779 779 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
780 780 errors += 1
781 781 if state in "m" and f not in m1 and f not in m2:
782 782 ui.warn(_("%s in state %s, but not in either manifest\n") %
783 783 (f, state))
784 784 errors += 1
785 785 for f in m1:
786 786 state = repo.dirstate[f]
787 787 if state not in "nrm":
788 788 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
789 789 errors += 1
790 790 if errors:
791 791 error = _(".hg/dirstate inconsistent with current parent's manifest")
792 792 raise util.Abort(error)
793 793
794 794 def showconfig(ui, repo, *values, **opts):
795 795 """show combined config settings from all hgrc files
796 796
797 797 With no arguments, print names and values of all config items.
798 798
799 799 With one argument of the form section.name, print just the value
800 800 of that config item.
801 801
802 802 With multiple arguments, print names and values of all config
803 803 items with matching section names.
804 804
805 805 With --debug, the source (filename and line number) is printed
806 806 for each config item.
807 807 """
808 808
809 809 untrusted = bool(opts.get('untrusted'))
810 810 if values:
811 811 if len([v for v in values if '.' in v]) > 1:
812 812 raise util.Abort(_('only one config item permitted'))
813 813 for section, name, value in ui.walkconfig(untrusted=untrusted):
814 814 sectname = section + '.' + name
815 815 if values:
816 816 for v in values:
817 817 if v == section:
818 818 ui.debug('%s: ' %
819 819 ui.configsource(section, name, untrusted))
820 820 ui.write('%s=%s\n' % (sectname, value))
821 821 elif v == sectname:
822 822 ui.debug('%s: ' %
823 823 ui.configsource(section, name, untrusted))
824 824 ui.write(value, '\n')
825 825 else:
826 826 ui.debug('%s: ' %
827 827 ui.configsource(section, name, untrusted))
828 828 ui.write('%s=%s\n' % (sectname, value))
829 829
830 830 def debugsetparents(ui, repo, rev1, rev2=None):
831 831 """manually set the parents of the current working directory
832 832
833 833 This is useful for writing repository conversion tools, but should
834 834 be used with care.
835 835 """
836 836
837 837 if not rev2:
838 838 rev2 = hex(nullid)
839 839
840 840 wlock = repo.wlock()
841 841 try:
842 842 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
843 843 finally:
844 844 wlock.release()
845 845
846 846 def debugstate(ui, repo, nodates=None):
847 847 """show the contents of the current dirstate"""
848 848 timestr = ""
849 849 showdate = not nodates
850 850 for file_, ent in sorted(repo.dirstate._map.iteritems()):
851 851 if showdate:
852 852 if ent[3] == -1:
853 853 # Pad or slice to locale representation
854 854 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
855 855 timestr = 'unset'
856 856 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
857 857 else:
858 858 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
859 859 if ent[1] & 020000:
860 860 mode = 'lnk'
861 861 else:
862 862 mode = '%3o' % (ent[1] & 0777)
863 863 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
864 864 for f in repo.dirstate.copies():
865 865 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
866 866
867 867 def debugsub(ui, repo, rev=None):
868 868 if rev == '':
869 869 rev = None
870 870 for k,v in sorted(repo[rev].substate.items()):
871 871 ui.write('path %s\n' % k)
872 872 ui.write(' source %s\n' % v[0])
873 873 ui.write(' revision %s\n' % v[1])
874 874
875 875 def debugdata(ui, file_, rev):
876 876 """dump the contents of a data file revision"""
877 877 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
878 878 try:
879 879 ui.write(r.revision(r.lookup(rev)))
880 880 except KeyError:
881 881 raise util.Abort(_('invalid revision identifier %s') % rev)
882 882
883 883 def debugdate(ui, date, range=None, **opts):
884 884 """parse and display a date"""
885 885 if opts["extended"]:
886 886 d = util.parsedate(date, util.extendeddateformats)
887 887 else:
888 888 d = util.parsedate(date)
889 889 ui.write("internal: %s %s\n" % d)
890 890 ui.write("standard: %s\n" % util.datestr(d))
891 891 if range:
892 892 m = util.matchdate(range)
893 893 ui.write("match: %s\n" % m(d[0]))
894 894
895 895 def debugindex(ui, file_):
896 896 """dump the contents of an index file"""
897 897 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
898 898 ui.write(" rev offset length base linkrev"
899 899 " nodeid p1 p2\n")
900 900 for i in r:
901 901 node = r.node(i)
902 902 try:
903 903 pp = r.parents(node)
904 904 except:
905 905 pp = [nullid, nullid]
906 906 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
907 907 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
908 908 short(node), short(pp[0]), short(pp[1])))
909 909
910 910 def debugindexdot(ui, file_):
911 911 """dump an index DAG as a graphviz dot file"""
912 912 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
913 913 ui.write("digraph G {\n")
914 914 for i in r:
915 915 node = r.node(i)
916 916 pp = r.parents(node)
917 917 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
918 918 if pp[1] != nullid:
919 919 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
920 920 ui.write("}\n")
921 921
922 922 def debuginstall(ui):
923 923 '''test Mercurial installation'''
924 924
925 925 def writetemp(contents):
926 926 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
927 927 f = os.fdopen(fd, "wb")
928 928 f.write(contents)
929 929 f.close()
930 930 return name
931 931
932 932 problems = 0
933 933
934 934 # encoding
935 935 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
936 936 try:
937 937 encoding.fromlocal("test")
938 938 except util.Abort, inst:
939 939 ui.write(" %s\n" % inst)
940 940 ui.write(_(" (check that your locale is properly set)\n"))
941 941 problems += 1
942 942
943 943 # compiled modules
944 944 ui.status(_("Checking extensions...\n"))
945 945 try:
946 946 import bdiff, mpatch, base85
947 947 except Exception, inst:
948 948 ui.write(" %s\n" % inst)
949 949 ui.write(_(" One or more extensions could not be found"))
950 950 ui.write(_(" (check that you compiled the extensions)\n"))
951 951 problems += 1
952 952
953 953 # templates
954 954 ui.status(_("Checking templates...\n"))
955 955 try:
956 956 import templater
957 957 templater.templater(templater.templatepath("map-cmdline.default"))
958 958 except Exception, inst:
959 959 ui.write(" %s\n" % inst)
960 960 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
961 961 problems += 1
962 962
963 963 # patch
964 964 ui.status(_("Checking patch...\n"))
965 965 patchproblems = 0
966 966 a = "1\n2\n3\n4\n"
967 967 b = "1\n2\n3\ninsert\n4\n"
968 968 fa = writetemp(a)
969 969 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
970 970 os.path.basename(fa))
971 971 fd = writetemp(d)
972 972
973 973 files = {}
974 974 try:
975 975 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
976 976 except util.Abort, e:
977 977 ui.write(_(" patch call failed:\n"))
978 978 ui.write(" " + str(e) + "\n")
979 979 patchproblems += 1
980 980 else:
981 981 if list(files) != [os.path.basename(fa)]:
982 982 ui.write(_(" unexpected patch output!\n"))
983 983 patchproblems += 1
984 984 a = open(fa).read()
985 985 if a != b:
986 986 ui.write(_(" patch test failed!\n"))
987 987 patchproblems += 1
988 988
989 989 if patchproblems:
990 990 if ui.config('ui', 'patch'):
991 991 ui.write(_(" (Current patch tool may be incompatible with patch,"
992 992 " or misconfigured. Please check your .hgrc file)\n"))
993 993 else:
994 994 ui.write(_(" Internal patcher failure, please report this error"
995 995 " to http://mercurial.selenic.com/bts/\n"))
996 996 problems += patchproblems
997 997
998 998 os.unlink(fa)
999 999 os.unlink(fd)
1000 1000
1001 1001 # editor
1002 1002 ui.status(_("Checking commit editor...\n"))
1003 1003 editor = ui.geteditor()
1004 1004 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1005 1005 if not cmdpath:
1006 1006 if editor == 'vi':
1007 1007 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1008 1008 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1009 1009 else:
1010 1010 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1011 1011 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1012 1012 problems += 1
1013 1013
1014 1014 # check username
1015 1015 ui.status(_("Checking username...\n"))
1016 1016 user = os.environ.get("HGUSER")
1017 1017 if user is None:
1018 1018 user = ui.config("ui", "username")
1019 1019 if user is None:
1020 1020 user = os.environ.get("EMAIL")
1021 1021 if not user:
1022 1022 ui.warn(" ")
1023 1023 ui.username()
1024 1024 ui.write(_(" (specify a username in your .hgrc file)\n"))
1025 1025
1026 1026 if not problems:
1027 1027 ui.status(_("No problems detected\n"))
1028 1028 else:
1029 1029 ui.write(_("%s problems detected,"
1030 1030 " please check your install!\n") % problems)
1031 1031
1032 1032 return problems
1033 1033
1034 1034 def debugrename(ui, repo, file1, *pats, **opts):
1035 1035 """dump rename information"""
1036 1036
1037 1037 ctx = repo[opts.get('rev')]
1038 1038 m = cmdutil.match(repo, (file1,) + pats, opts)
1039 1039 for abs in ctx.walk(m):
1040 1040 fctx = ctx[abs]
1041 1041 o = fctx.filelog().renamed(fctx.filenode())
1042 1042 rel = m.rel(abs)
1043 1043 if o:
1044 1044 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1045 1045 else:
1046 1046 ui.write(_("%s not renamed\n") % rel)
1047 1047
1048 1048 def debugwalk(ui, repo, *pats, **opts):
1049 1049 """show how files match on given patterns"""
1050 1050 m = cmdutil.match(repo, pats, opts)
1051 1051 items = list(repo.walk(m))
1052 1052 if not items:
1053 1053 return
1054 1054 fmt = 'f %%-%ds %%-%ds %%s' % (
1055 1055 max([len(abs) for abs in items]),
1056 1056 max([len(m.rel(abs)) for abs in items]))
1057 1057 for abs in items:
1058 1058 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1059 1059 ui.write("%s\n" % line.rstrip())
1060 1060
1061 1061 def diff(ui, repo, *pats, **opts):
1062 1062 """diff repository (or selected files)
1063 1063
1064 1064 Show differences between revisions for the specified files.
1065 1065
1066 1066 Differences between files are shown using the unified diff format.
1067 1067
1068 1068 NOTE: diff may generate unexpected results for merges, as it will
1069 1069 default to comparing against the working directory's first parent
1070 1070 changeset if no revisions are specified.
1071 1071
1072 1072 When two revision arguments are given, then changes are shown
1073 1073 between those revisions. If only one revision is specified then
1074 1074 that revision is compared to the working directory, and, when no
1075 1075 revisions are specified, the working directory files are compared
1076 1076 to its parent.
1077 1077
1078 1078 Without the -a/--text option, diff will avoid generating diffs of
1079 1079 files it detects as binary. With -a, diff will generate a diff
1080 1080 anyway, probably with undesirable results.
1081 1081
1082 1082 Use the -g/--git option to generate diffs in the git extended diff
1083 1083 format. For more information, read 'hg help diffs'.
1084 1084 """
1085 1085
1086 1086 revs = opts.get('rev')
1087 1087 change = opts.get('change')
1088 1088 stat = opts.get('stat')
1089 1089
1090 1090 if revs and change:
1091 1091 msg = _('cannot specify --rev and --change at the same time')
1092 1092 raise util.Abort(msg)
1093 1093 elif change:
1094 1094 node2 = repo.lookup(change)
1095 1095 node1 = repo[node2].parents()[0].node()
1096 1096 else:
1097 1097 node1, node2 = cmdutil.revpair(repo, revs)
1098 1098
1099 1099 if stat:
1100 1100 opts['unified'] = '0'
1101 1101 diffopts = patch.diffopts(ui, opts)
1102 1102
1103 1103 m = cmdutil.match(repo, pats, opts)
1104 1104 it = patch.diff(repo, node1, node2, match=m, opts=diffopts)
1105 1105 if stat:
1106 1106 width = ui.interactive() and util.termwidth() or 80
1107 1107 ui.write(patch.diffstat(util.iterlines(it), width=width,
1108 1108 git=diffopts.git))
1109 1109 else:
1110 1110 for chunk in it:
1111 1111 ui.write(chunk)
1112 1112
1113 1113 def export(ui, repo, *changesets, **opts):
1114 1114 """dump the header and diffs for one or more changesets
1115 1115
1116 1116 Print the changeset header and diffs for one or more revisions.
1117 1117
1118 1118 The information shown in the changeset header is: author,
1119 1119 changeset hash, parent(s) and commit comment.
1120 1120
1121 1121 NOTE: export may generate unexpected diff output for merge
1122 1122 changesets, as it will compare the merge changeset against its
1123 1123 first parent only.
1124 1124
1125 1125 Output may be to a file, in which case the name of the file is
1126 1126 given using a format string. The formatting rules are as follows::
1127 1127
1128 1128 %% literal "%" character
1129 1129 %H changeset hash (40 bytes of hexadecimal)
1130 1130 %N number of patches being generated
1131 1131 %R changeset revision number
1132 1132 %b basename of the exporting repository
1133 1133 %h short-form changeset hash (12 bytes of hexadecimal)
1134 1134 %n zero-padded sequence number, starting at 1
1135 1135 %r zero-padded changeset revision number
1136 1136
1137 1137 Without the -a/--text option, export will avoid generating diffs
1138 1138 of files it detects as binary. With -a, export will generate a
1139 1139 diff anyway, probably with undesirable results.
1140 1140
1141 1141 Use the -g/--git option to generate diffs in the git extended diff
1142 1142 format. See 'hg help diffs' for more information.
1143 1143
1144 1144 With the --switch-parent option, the diff will be against the
1145 1145 second parent. It can be useful to review a merge.
1146 1146 """
1147 1147 if not changesets:
1148 1148 raise util.Abort(_("export requires at least one changeset"))
1149 1149 revs = cmdutil.revrange(repo, changesets)
1150 1150 if len(revs) > 1:
1151 1151 ui.note(_('exporting patches:\n'))
1152 1152 else:
1153 1153 ui.note(_('exporting patch:\n'))
1154 1154 patch.export(repo, revs, template=opts.get('output'),
1155 1155 switch_parent=opts.get('switch_parent'),
1156 1156 opts=patch.diffopts(ui, opts))
1157 1157
1158 1158 def forget(ui, repo, *pats, **opts):
1159 1159 """forget the specified files on the next commit
1160 1160
1161 1161 Mark the specified files so they will no longer be tracked
1162 1162 after the next commit.
1163 1163
1164 1164 This only removes files from the current branch, not from the
1165 1165 entire project history, and it does not delete them from the
1166 1166 working directory.
1167 1167
1168 1168 To undo a forget before the next commit, see hg add.
1169 1169 """
1170 1170
1171 1171 if not pats:
1172 1172 raise util.Abort(_('no files specified'))
1173 1173
1174 1174 m = cmdutil.match(repo, pats, opts)
1175 1175 s = repo.status(match=m, clean=True)
1176 1176 forget = sorted(s[0] + s[1] + s[3] + s[6])
1177 1177
1178 1178 for f in m.files():
1179 1179 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1180 1180 ui.warn(_('not removing %s: file is already untracked\n')
1181 1181 % m.rel(f))
1182 1182
1183 1183 for f in forget:
1184 1184 if ui.verbose or not m.exact(f):
1185 1185 ui.status(_('removing %s\n') % m.rel(f))
1186 1186
1187 1187 repo.remove(forget, unlink=False)
1188 1188
1189 1189 def grep(ui, repo, pattern, *pats, **opts):
1190 1190 """search for a pattern in specified files and revisions
1191 1191
1192 1192 Search revisions of files for a regular expression.
1193 1193
1194 1194 This command behaves differently than Unix grep. It only accepts
1195 1195 Python/Perl regexps. It searches repository history, not the
1196 1196 working directory. It always prints the revision number in which a
1197 1197 match appears.
1198 1198
1199 1199 By default, grep only prints output for the first revision of a
1200 1200 file in which it finds a match. To get it to print every revision
1201 1201 that contains a change in match status ("-" for a match that
1202 1202 becomes a non-match, or "+" for a non-match that becomes a match),
1203 1203 use the --all flag.
1204 1204 """
1205 1205 reflags = 0
1206 1206 if opts.get('ignore_case'):
1207 1207 reflags |= re.I
1208 1208 try:
1209 1209 regexp = re.compile(pattern, reflags)
1210 1210 except Exception, inst:
1211 1211 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1212 1212 return None
1213 1213 sep, eol = ':', '\n'
1214 1214 if opts.get('print0'):
1215 1215 sep = eol = '\0'
1216 1216
1217 1217 getfile = util.lrucachefunc(repo.file)
1218 1218
1219 1219 def matchlines(body):
1220 1220 begin = 0
1221 1221 linenum = 0
1222 1222 while True:
1223 1223 match = regexp.search(body, begin)
1224 1224 if not match:
1225 1225 break
1226 1226 mstart, mend = match.span()
1227 1227 linenum += body.count('\n', begin, mstart) + 1
1228 1228 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1229 1229 begin = body.find('\n', mend) + 1 or len(body)
1230 1230 lend = begin - 1
1231 1231 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1232 1232
1233 1233 class linestate(object):
1234 1234 def __init__(self, line, linenum, colstart, colend):
1235 1235 self.line = line
1236 1236 self.linenum = linenum
1237 1237 self.colstart = colstart
1238 1238 self.colend = colend
1239 1239
1240 1240 def __hash__(self):
1241 1241 return hash((self.linenum, self.line))
1242 1242
1243 1243 def __eq__(self, other):
1244 1244 return self.line == other.line
1245 1245
1246 1246 matches = {}
1247 1247 copies = {}
1248 1248 def grepbody(fn, rev, body):
1249 1249 matches[rev].setdefault(fn, [])
1250 1250 m = matches[rev][fn]
1251 1251 for lnum, cstart, cend, line in matchlines(body):
1252 1252 s = linestate(line, lnum, cstart, cend)
1253 1253 m.append(s)
1254 1254
1255 1255 def difflinestates(a, b):
1256 1256 sm = difflib.SequenceMatcher(None, a, b)
1257 1257 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1258 1258 if tag == 'insert':
1259 1259 for i in xrange(blo, bhi):
1260 1260 yield ('+', b[i])
1261 1261 elif tag == 'delete':
1262 1262 for i in xrange(alo, ahi):
1263 1263 yield ('-', a[i])
1264 1264 elif tag == 'replace':
1265 1265 for i in xrange(alo, ahi):
1266 1266 yield ('-', a[i])
1267 1267 for i in xrange(blo, bhi):
1268 1268 yield ('+', b[i])
1269 1269
1270 1270 def display(fn, ctx, pstates, states):
1271 1271 rev = ctx.rev()
1272 1272 datefunc = ui.quiet and util.shortdate or util.datestr
1273 1273 found = False
1274 1274 filerevmatches = {}
1275 1275 if opts.get('all'):
1276 1276 iter = difflinestates(pstates, states)
1277 1277 else:
1278 1278 iter = [('', l) for l in states]
1279 1279 for change, l in iter:
1280 1280 cols = [fn, str(rev)]
1281 1281 if opts.get('line_number'):
1282 1282 cols.append(str(l.linenum))
1283 1283 if opts.get('all'):
1284 1284 cols.append(change)
1285 1285 if opts.get('user'):
1286 1286 cols.append(ui.shortuser(ctx.user()))
1287 1287 if opts.get('date'):
1288 1288 cols.append(datefunc(ctx.date()))
1289 1289 if opts.get('files_with_matches'):
1290 1290 c = (fn, rev)
1291 1291 if c in filerevmatches:
1292 1292 continue
1293 1293 filerevmatches[c] = 1
1294 1294 else:
1295 1295 cols.append(l.line)
1296 1296 ui.write(sep.join(cols), eol)
1297 1297 found = True
1298 1298 return found
1299 1299
1300 1300 skip = {}
1301 1301 revfiles = {}
1302 1302 matchfn = cmdutil.match(repo, pats, opts)
1303 1303 found = False
1304 1304 follow = opts.get('follow')
1305 1305
1306 1306 def prep(ctx, fns):
1307 1307 rev = ctx.rev()
1308 1308 pctx = ctx.parents()[0]
1309 1309 parent = pctx.rev()
1310 1310 matches.setdefault(rev, {})
1311 1311 matches.setdefault(parent, {})
1312 1312 files = revfiles.setdefault(rev, [])
1313 1313 for fn in fns:
1314 1314 flog = getfile(fn)
1315 1315 try:
1316 1316 fnode = ctx.filenode(fn)
1317 1317 except error.LookupError:
1318 1318 continue
1319 1319
1320 1320 copied = flog.renamed(fnode)
1321 1321 copy = follow and copied and copied[0]
1322 1322 if copy:
1323 1323 copies.setdefault(rev, {})[fn] = copy
1324 1324 if fn in skip:
1325 1325 if copy:
1326 1326 skip[copy] = True
1327 1327 continue
1328 1328 files.append(fn)
1329 1329
1330 1330 if fn not in matches[rev]:
1331 1331 grepbody(fn, rev, flog.read(fnode))
1332 1332
1333 1333 pfn = copy or fn
1334 1334 if pfn not in matches[parent]:
1335 1335 try:
1336 1336 fnode = pctx.filenode(pfn)
1337 1337 grepbody(pfn, parent, flog.read(fnode))
1338 1338 except error.LookupError:
1339 1339 pass
1340 1340
1341 1341 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1342 1342 rev = ctx.rev()
1343 1343 parent = ctx.parents()[0].rev()
1344 1344 for fn in sorted(revfiles.get(rev, [])):
1345 1345 states = matches[rev][fn]
1346 1346 copy = copies.get(rev, {}).get(fn)
1347 1347 if fn in skip:
1348 1348 if copy:
1349 1349 skip[copy] = True
1350 1350 continue
1351 1351 pstates = matches.get(parent, {}).get(copy or fn, [])
1352 1352 if pstates or states:
1353 1353 r = display(fn, ctx, pstates, states)
1354 1354 found = found or r
1355 1355 if r and not opts.get('all'):
1356 1356 skip[fn] = True
1357 1357 if copy:
1358 1358 skip[copy] = True
1359 1359 del matches[rev]
1360 1360 del revfiles[rev]
1361 1361
1362 1362 def heads(ui, repo, *branchrevs, **opts):
1363 1363 """show current repository heads or show branch heads
1364 1364
1365 1365 With no arguments, show all repository head changesets.
1366 1366
1367 1367 Repository "heads" are changesets with no child changesets. They are
1368 1368 where development generally takes place and are the usual targets
1369 1369 for update and merge operations.
1370 1370
1371 1371 If one or more REV is given, the "branch heads" will be shown for
1372 1372 the named branch associated with the specified changeset(s).
1373 1373
1374 1374 Branch heads are changesets on a named branch with no descendants on
1375 1375 the same branch. A branch head could be a "true" (repository) head,
1376 1376 or it could be the last changeset on that branch before it was
1377 1377 merged into another branch, or it could be the last changeset on the
1378 1378 branch before a new branch was created. If none of the branch heads
1379 1379 are true heads, the branch is considered inactive.
1380 1380
1381 1381 If -c/--closed is specified, also show branch heads marked closed
1382 1382 (see hg commit --close-branch).
1383 1383
1384 1384 If STARTREV is specified, only those heads that are descendants of
1385 1385 STARTREV will be displayed.
1386 1386 """
1387 1387 if opts.get('rev'):
1388 1388 start = repo.lookup(opts['rev'])
1389 1389 else:
1390 1390 start = None
1391 1391 closed = opts.get('closed')
1392 1392 hideinactive, _heads = opts.get('active'), None
1393 1393 if not branchrevs:
1394 1394 if closed:
1395 1395 raise error.Abort(_('you must specify a branch to use --closed'))
1396 1396 # Assume we're looking repo-wide heads if no revs were specified.
1397 1397 heads = repo.heads(start)
1398 1398 else:
1399 1399 if hideinactive:
1400 1400 _heads = repo.heads(start)
1401 1401 heads = []
1402 1402 visitedset = set()
1403 1403 for branchrev in branchrevs:
1404 branch = repo[branchrev].branch()
1404 branch = repo[encoding.fromlocal(branchrev)].branch()
1405 encodedbranch = encoding.tolocal(branch)
1405 1406 if branch in visitedset:
1406 1407 continue
1407 1408 visitedset.add(branch)
1408 1409 bheads = repo.branchheads(branch, start, closed=closed)
1409 1410 if not bheads:
1410 1411 if not opts.get('rev'):
1411 ui.warn(_("no open branch heads on branch %s\n") % branch)
1412 ui.warn(_("no open branch heads on branch %s\n") % encodedbranch)
1412 1413 elif branch != branchrev:
1413 1414 ui.warn(_("no changes on branch %s containing %s are "
1414 1415 "reachable from %s\n")
1415 % (branch, branchrev, opts.get('rev')))
1416 % (encodedbranch, branchrev, opts.get('rev')))
1416 1417 else:
1417 1418 ui.warn(_("no changes on branch %s are reachable from %s\n")
1418 % (branch, opts.get('rev')))
1419 % (encodedbranch, opts.get('rev')))
1419 1420 if hideinactive:
1420 1421 bheads = [bhead for bhead in bheads if bhead in _heads]
1421 1422 heads.extend(bheads)
1422 1423 if not heads:
1423 1424 return 1
1424 1425 displayer = cmdutil.show_changeset(ui, repo, opts)
1425 1426 for n in heads:
1426 1427 displayer.show(repo[n])
1427 1428
1428 1429 def help_(ui, name=None, with_version=False):
1429 1430 """show help for a given topic or a help overview
1430 1431
1431 1432 With no arguments, print a list of commands with short help messages.
1432 1433
1433 1434 Given a topic, extension, or command name, print help for that
1434 1435 topic."""
1435 1436 option_lists = []
1436 1437 textwidth = util.termwidth() - 2
1437 1438
1438 1439 def addglobalopts(aliases):
1439 1440 if ui.verbose:
1440 1441 option_lists.append((_("global options:"), globalopts))
1441 1442 if name == 'shortlist':
1442 1443 option_lists.append((_('use "hg help" for the full list '
1443 1444 'of commands'), ()))
1444 1445 else:
1445 1446 if name == 'shortlist':
1446 1447 msg = _('use "hg help" for the full list of commands '
1447 1448 'or "hg -v" for details')
1448 1449 elif aliases:
1449 1450 msg = _('use "hg -v help%s" to show aliases and '
1450 1451 'global options') % (name and " " + name or "")
1451 1452 else:
1452 1453 msg = _('use "hg -v help %s" to show global options') % name
1453 1454 option_lists.append((msg, ()))
1454 1455
1455 1456 def helpcmd(name):
1456 1457 if with_version:
1457 1458 version_(ui)
1458 1459 ui.write('\n')
1459 1460
1460 1461 try:
1461 1462 aliases, i = cmdutil.findcmd(name, table, False)
1462 1463 except error.AmbiguousCommand, inst:
1463 1464 # py3k fix: except vars can't be used outside the scope of the
1464 1465 # except block, nor can be used inside a lambda. python issue4617
1465 1466 prefix = inst.args[0]
1466 1467 select = lambda c: c.lstrip('^').startswith(prefix)
1467 1468 helplist(_('list of commands:\n\n'), select)
1468 1469 return
1469 1470
1470 1471 # synopsis
1471 1472 if len(i) > 2:
1472 1473 if i[2].startswith('hg'):
1473 1474 ui.write("%s\n" % i[2])
1474 1475 else:
1475 1476 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1476 1477 else:
1477 1478 ui.write('hg %s\n' % aliases[0])
1478 1479
1479 1480 # aliases
1480 1481 if not ui.quiet and len(aliases) > 1:
1481 1482 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1482 1483
1483 1484 # description
1484 1485 doc = gettext(i[0].__doc__)
1485 1486 if not doc:
1486 1487 doc = _("(no help text available)")
1487 1488 if ui.quiet:
1488 1489 doc = doc.splitlines()[0]
1489 1490 ui.write("\n%s\n" % minirst.format(doc, textwidth))
1490 1491
1491 1492 if not ui.quiet:
1492 1493 # options
1493 1494 if i[1]:
1494 1495 option_lists.append((_("options:\n"), i[1]))
1495 1496
1496 1497 addglobalopts(False)
1497 1498
1498 1499 def helplist(header, select=None):
1499 1500 h = {}
1500 1501 cmds = {}
1501 1502 for c, e in table.iteritems():
1502 1503 f = c.split("|", 1)[0]
1503 1504 if select and not select(f):
1504 1505 continue
1505 1506 if (not select and name != 'shortlist' and
1506 1507 e[0].__module__ != __name__):
1507 1508 continue
1508 1509 if name == "shortlist" and not f.startswith("^"):
1509 1510 continue
1510 1511 f = f.lstrip("^")
1511 1512 if not ui.debugflag and f.startswith("debug"):
1512 1513 continue
1513 1514 doc = e[0].__doc__
1514 1515 if doc and 'DEPRECATED' in doc and not ui.verbose:
1515 1516 continue
1516 1517 doc = gettext(doc)
1517 1518 if not doc:
1518 1519 doc = _("(no help text available)")
1519 1520 h[f] = doc.splitlines()[0].rstrip()
1520 1521 cmds[f] = c.lstrip("^")
1521 1522
1522 1523 if not h:
1523 1524 ui.status(_('no commands defined\n'))
1524 1525 return
1525 1526
1526 1527 ui.status(header)
1527 1528 fns = sorted(h)
1528 1529 m = max(map(len, fns))
1529 1530 for f in fns:
1530 1531 if ui.verbose:
1531 1532 commands = cmds[f].replace("|",", ")
1532 1533 ui.write(" %s:\n %s\n"%(commands, h[f]))
1533 1534 else:
1534 1535 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1535 1536
1536 1537 if name != 'shortlist':
1537 1538 exts, maxlength = extensions.enabled()
1538 1539 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1539 1540 if text:
1540 1541 ui.write("\n%s\n" % minirst.format(text, textwidth))
1541 1542
1542 1543 if not ui.quiet:
1543 1544 addglobalopts(True)
1544 1545
1545 1546 def helptopic(name):
1546 1547 for names, header, doc in help.helptable:
1547 1548 if name in names:
1548 1549 break
1549 1550 else:
1550 1551 raise error.UnknownCommand(name)
1551 1552
1552 1553 # description
1553 1554 if not doc:
1554 1555 doc = _("(no help text available)")
1555 1556 if hasattr(doc, '__call__'):
1556 1557 doc = doc()
1557 1558
1558 1559 ui.write("%s\n\n" % header)
1559 1560 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1560 1561
1561 1562 def helpext(name):
1562 1563 try:
1563 1564 mod = extensions.find(name)
1564 1565 except KeyError:
1565 1566 raise error.UnknownCommand(name)
1566 1567
1567 1568 doc = gettext(mod.__doc__) or _('no help text available')
1568 1569 if '\n' not in doc:
1569 1570 head, tail = doc, ""
1570 1571 else:
1571 1572 head, tail = doc.split('\n', 1)
1572 1573 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1573 1574 if tail:
1574 1575 ui.write(minirst.format(tail, textwidth))
1575 1576 ui.status('\n\n')
1576 1577
1577 1578 try:
1578 1579 ct = mod.cmdtable
1579 1580 except AttributeError:
1580 1581 ct = {}
1581 1582
1582 1583 modcmds = set([c.split('|', 1)[0] for c in ct])
1583 1584 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1584 1585
1585 1586 if name and name != 'shortlist':
1586 1587 i = None
1587 1588 for f in (helptopic, helpcmd, helpext):
1588 1589 try:
1589 1590 f(name)
1590 1591 i = None
1591 1592 break
1592 1593 except error.UnknownCommand, inst:
1593 1594 i = inst
1594 1595 if i:
1595 1596 raise i
1596 1597
1597 1598 else:
1598 1599 # program name
1599 1600 if ui.verbose or with_version:
1600 1601 version_(ui)
1601 1602 else:
1602 1603 ui.status(_("Mercurial Distributed SCM\n"))
1603 1604 ui.status('\n')
1604 1605
1605 1606 # list of commands
1606 1607 if name == "shortlist":
1607 1608 header = _('basic commands:\n\n')
1608 1609 else:
1609 1610 header = _('list of commands:\n\n')
1610 1611
1611 1612 helplist(header)
1612 1613
1613 1614 # list all option lists
1614 1615 opt_output = []
1615 1616 for title, options in option_lists:
1616 1617 opt_output.append(("\n%s" % title, None))
1617 1618 for shortopt, longopt, default, desc in options:
1618 1619 if _("DEPRECATED") in desc and not ui.verbose: continue
1619 1620 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1620 1621 longopt and " --%s" % longopt),
1621 1622 "%s%s" % (desc,
1622 1623 default
1623 1624 and _(" (default: %s)") % default
1624 1625 or "")))
1625 1626
1626 1627 if not name:
1627 1628 ui.write(_("\nadditional help topics:\n\n"))
1628 1629 topics = []
1629 1630 for names, header, doc in help.helptable:
1630 1631 topics.append((sorted(names, key=len, reverse=True)[0], header))
1631 1632 topics_len = max([len(s[0]) for s in topics])
1632 1633 for t, desc in topics:
1633 1634 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1634 1635
1635 1636 if opt_output:
1636 1637 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1637 1638 for first, second in opt_output:
1638 1639 if second:
1639 1640 second = util.wrap(second, opts_len + 3)
1640 1641 ui.write(" %-*s %s\n" % (opts_len, first, second))
1641 1642 else:
1642 1643 ui.write("%s\n" % first)
1643 1644
1644 1645 def identify(ui, repo, source=None,
1645 1646 rev=None, num=None, id=None, branch=None, tags=None):
1646 1647 """identify the working copy or specified revision
1647 1648
1648 1649 With no revision, print a summary of the current state of the
1649 1650 repository.
1650 1651
1651 1652 Specifying a path to a repository root or Mercurial bundle will
1652 1653 cause lookup to operate on that repository/bundle.
1653 1654
1654 1655 This summary identifies the repository state using one or two
1655 1656 parent hash identifiers, followed by a "+" if there are
1656 1657 uncommitted changes in the working directory, a list of tags for
1657 1658 this revision and a branch name for non-default branches.
1658 1659 """
1659 1660
1660 1661 if not repo and not source:
1661 1662 raise util.Abort(_("There is no Mercurial repository here "
1662 1663 "(.hg not found)"))
1663 1664
1664 1665 hexfunc = ui.debugflag and hex or short
1665 1666 default = not (num or id or branch or tags)
1666 1667 output = []
1667 1668
1668 1669 revs = []
1669 1670 if source:
1670 1671 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1671 1672 repo = hg.repository(ui, source)
1672 1673
1673 1674 if not repo.local():
1674 1675 if not rev and revs:
1675 1676 rev = revs[0]
1676 1677 if not rev:
1677 1678 rev = "tip"
1678 1679 if num or branch or tags:
1679 1680 raise util.Abort(
1680 1681 "can't query remote revision number, branch, or tags")
1681 1682 output = [hexfunc(repo.lookup(rev))]
1682 1683 elif not rev:
1683 1684 ctx = repo[None]
1684 1685 parents = ctx.parents()
1685 1686 changed = False
1686 1687 if default or id or num:
1687 1688 changed = ctx.files() + ctx.deleted()
1688 1689 if default or id:
1689 1690 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1690 1691 (changed) and "+" or "")]
1691 1692 if num:
1692 1693 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1693 1694 (changed) and "+" or ""))
1694 1695 else:
1695 1696 ctx = repo[rev]
1696 1697 if default or id:
1697 1698 output = [hexfunc(ctx.node())]
1698 1699 if num:
1699 1700 output.append(str(ctx.rev()))
1700 1701
1701 1702 if repo.local() and default and not ui.quiet:
1702 1703 b = encoding.tolocal(ctx.branch())
1703 1704 if b != 'default':
1704 1705 output.append("(%s)" % b)
1705 1706
1706 1707 # multiple tags for a single parent separated by '/'
1707 1708 t = "/".join(ctx.tags())
1708 1709 if t:
1709 1710 output.append(t)
1710 1711
1711 1712 if branch:
1712 1713 output.append(encoding.tolocal(ctx.branch()))
1713 1714
1714 1715 if tags:
1715 1716 output.extend(ctx.tags())
1716 1717
1717 1718 ui.write("%s\n" % ' '.join(output))
1718 1719
1719 1720 def import_(ui, repo, patch1, *patches, **opts):
1720 1721 """import an ordered set of patches
1721 1722
1722 1723 Import a list of patches and commit them individually (unless
1723 1724 --no-commit is specified).
1724 1725
1725 1726 If there are outstanding changes in the working directory, import
1726 1727 will abort unless given the -f/--force flag.
1727 1728
1728 1729 You can import a patch straight from a mail message. Even patches
1729 1730 as attachments work (to use the body part, it must have type
1730 1731 text/plain or text/x-patch). From and Subject headers of email
1731 1732 message are used as default committer and commit message. All
1732 1733 text/plain body parts before first diff are added to commit
1733 1734 message.
1734 1735
1735 1736 If the imported patch was generated by hg export, user and
1736 1737 description from patch override values from message headers and
1737 1738 body. Values given on command line with -m/--message and -u/--user
1738 1739 override these.
1739 1740
1740 1741 If --exact is specified, import will set the working directory to
1741 1742 the parent of each patch before applying it, and will abort if the
1742 1743 resulting changeset has a different ID than the one recorded in
1743 1744 the patch. This may happen due to character set problems or other
1744 1745 deficiencies in the text patch format.
1745 1746
1746 1747 With -s/--similarity, hg will attempt to discover renames and
1747 1748 copies in the patch in the same way as 'addremove'.
1748 1749
1749 1750 To read a patch from standard input, use "-" as the patch name. If
1750 1751 a URL is specified, the patch will be downloaded from it.
1751 1752 See 'hg help dates' for a list of formats valid for -d/--date.
1752 1753 """
1753 1754 patches = (patch1,) + patches
1754 1755
1755 1756 date = opts.get('date')
1756 1757 if date:
1757 1758 opts['date'] = util.parsedate(date)
1758 1759
1759 1760 try:
1760 1761 sim = float(opts.get('similarity') or 0)
1761 1762 except ValueError:
1762 1763 raise util.Abort(_('similarity must be a number'))
1763 1764 if sim < 0 or sim > 100:
1764 1765 raise util.Abort(_('similarity must be between 0 and 100'))
1765 1766
1766 1767 if opts.get('exact') or not opts.get('force'):
1767 1768 cmdutil.bail_if_changed(repo)
1768 1769
1769 1770 d = opts["base"]
1770 1771 strip = opts["strip"]
1771 1772 wlock = lock = None
1772 1773 try:
1773 1774 wlock = repo.wlock()
1774 1775 lock = repo.lock()
1775 1776 for p in patches:
1776 1777 pf = os.path.join(d, p)
1777 1778
1778 1779 if pf == '-':
1779 1780 ui.status(_("applying patch from stdin\n"))
1780 1781 pf = sys.stdin
1781 1782 else:
1782 1783 ui.status(_("applying %s\n") % p)
1783 1784 pf = url.open(ui, pf)
1784 1785 data = patch.extract(ui, pf)
1785 1786 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1786 1787
1787 1788 if tmpname is None:
1788 1789 raise util.Abort(_('no diffs found'))
1789 1790
1790 1791 try:
1791 1792 cmdline_message = cmdutil.logmessage(opts)
1792 1793 if cmdline_message:
1793 1794 # pickup the cmdline msg
1794 1795 message = cmdline_message
1795 1796 elif message:
1796 1797 # pickup the patch msg
1797 1798 message = message.strip()
1798 1799 else:
1799 1800 # launch the editor
1800 1801 message = None
1801 1802 ui.debug('message:\n%s\n' % message)
1802 1803
1803 1804 wp = repo.parents()
1804 1805 if opts.get('exact'):
1805 1806 if not nodeid or not p1:
1806 1807 raise util.Abort(_('not a Mercurial patch'))
1807 1808 p1 = repo.lookup(p1)
1808 1809 p2 = repo.lookup(p2 or hex(nullid))
1809 1810
1810 1811 if p1 != wp[0].node():
1811 1812 hg.clean(repo, p1)
1812 1813 repo.dirstate.setparents(p1, p2)
1813 1814 elif p2:
1814 1815 try:
1815 1816 p1 = repo.lookup(p1)
1816 1817 p2 = repo.lookup(p2)
1817 1818 if p1 == wp[0].node():
1818 1819 repo.dirstate.setparents(p1, p2)
1819 1820 except error.RepoError:
1820 1821 pass
1821 1822 if opts.get('exact') or opts.get('import_branch'):
1822 1823 repo.dirstate.setbranch(branch or 'default')
1823 1824
1824 1825 files = {}
1825 1826 try:
1826 1827 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1827 1828 files=files, eolmode=None)
1828 1829 finally:
1829 1830 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1830 1831 if not opts.get('no_commit'):
1831 1832 m = cmdutil.matchfiles(repo, files or [])
1832 1833 n = repo.commit(message, opts.get('user') or user,
1833 1834 opts.get('date') or date, match=m,
1834 1835 editor=cmdutil.commiteditor)
1835 1836 if opts.get('exact'):
1836 1837 if hex(n) != nodeid:
1837 1838 repo.rollback()
1838 1839 raise util.Abort(_('patch is damaged'
1839 1840 ' or loses information'))
1840 1841 # Force a dirstate write so that the next transaction
1841 1842 # backups an up-do-date file.
1842 1843 repo.dirstate.write()
1843 1844 finally:
1844 1845 os.unlink(tmpname)
1845 1846 finally:
1846 1847 release(lock, wlock)
1847 1848
1848 1849 def incoming(ui, repo, source="default", **opts):
1849 1850 """show new changesets found in source
1850 1851
1851 1852 Show new changesets found in the specified path/URL or the default
1852 1853 pull location. These are the changesets that would have been pulled
1853 1854 if a pull at the time you issued this command.
1854 1855
1855 1856 For remote repository, using --bundle avoids downloading the
1856 1857 changesets twice if the incoming is followed by a pull.
1857 1858
1858 1859 See pull for valid source format details.
1859 1860 """
1860 1861 limit = cmdutil.loglimit(opts)
1861 1862 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1862 1863 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1863 1864 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1864 1865 if revs:
1865 1866 revs = [other.lookup(rev) for rev in revs]
1866 1867 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1867 1868 force=opts["force"])
1868 1869 if not incoming:
1869 1870 try:
1870 1871 os.unlink(opts["bundle"])
1871 1872 except:
1872 1873 pass
1873 1874 ui.status(_("no changes found\n"))
1874 1875 return 1
1875 1876
1876 1877 cleanup = None
1877 1878 try:
1878 1879 fname = opts["bundle"]
1879 1880 if fname or not other.local():
1880 1881 # create a bundle (uncompressed if other repo is not local)
1881 1882
1882 1883 if revs is None and other.capable('changegroupsubset'):
1883 1884 revs = rheads
1884 1885
1885 1886 if revs is None:
1886 1887 cg = other.changegroup(incoming, "incoming")
1887 1888 else:
1888 1889 cg = other.changegroupsubset(incoming, revs, 'incoming')
1889 1890 bundletype = other.local() and "HG10BZ" or "HG10UN"
1890 1891 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1891 1892 # keep written bundle?
1892 1893 if opts["bundle"]:
1893 1894 cleanup = None
1894 1895 if not other.local():
1895 1896 # use the created uncompressed bundlerepo
1896 1897 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1897 1898
1898 1899 o = other.changelog.nodesbetween(incoming, revs)[0]
1899 1900 if opts.get('newest_first'):
1900 1901 o.reverse()
1901 1902 displayer = cmdutil.show_changeset(ui, other, opts)
1902 1903 count = 0
1903 1904 for n in o:
1904 1905 if count >= limit:
1905 1906 break
1906 1907 parents = [p for p in other.changelog.parents(n) if p != nullid]
1907 1908 if opts.get('no_merges') and len(parents) == 2:
1908 1909 continue
1909 1910 count += 1
1910 1911 displayer.show(other[n])
1911 1912 finally:
1912 1913 if hasattr(other, 'close'):
1913 1914 other.close()
1914 1915 if cleanup:
1915 1916 os.unlink(cleanup)
1916 1917
1917 1918 def init(ui, dest=".", **opts):
1918 1919 """create a new repository in the given directory
1919 1920
1920 1921 Initialize a new repository in the given directory. If the given
1921 1922 directory does not exist, it will be created.
1922 1923
1923 1924 If no directory is given, the current directory is used.
1924 1925
1925 1926 It is possible to specify an ssh:// URL as the destination.
1926 1927 See 'hg help urls' for more information.
1927 1928 """
1928 1929 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1929 1930
1930 1931 def locate(ui, repo, *pats, **opts):
1931 1932 """locate files matching specific patterns
1932 1933
1933 1934 Print files under Mercurial control in the working directory whose
1934 1935 names match the given patterns.
1935 1936
1936 1937 By default, this command searches all directories in the working
1937 1938 directory. To search just the current directory and its
1938 1939 subdirectories, use "--include .".
1939 1940
1940 1941 If no patterns are given to match, this command prints the names
1941 1942 of all files under Mercurial control in the working directory.
1942 1943
1943 1944 If you want to feed the output of this command into the "xargs"
1944 1945 command, use the -0 option to both this command and "xargs". This
1945 1946 will avoid the problem of "xargs" treating single filenames that
1946 1947 contain whitespace as multiple filenames.
1947 1948 """
1948 1949 end = opts.get('print0') and '\0' or '\n'
1949 1950 rev = opts.get('rev') or None
1950 1951
1951 1952 ret = 1
1952 1953 m = cmdutil.match(repo, pats, opts, default='relglob')
1953 1954 m.bad = lambda x,y: False
1954 1955 for abs in repo[rev].walk(m):
1955 1956 if not rev and abs not in repo.dirstate:
1956 1957 continue
1957 1958 if opts.get('fullpath'):
1958 1959 ui.write(repo.wjoin(abs), end)
1959 1960 else:
1960 1961 ui.write(((pats and m.rel(abs)) or abs), end)
1961 1962 ret = 0
1962 1963
1963 1964 return ret
1964 1965
1965 1966 def log(ui, repo, *pats, **opts):
1966 1967 """show revision history of entire repository or files
1967 1968
1968 1969 Print the revision history of the specified files or the entire
1969 1970 project.
1970 1971
1971 1972 File history is shown without following rename or copy history of
1972 1973 files. Use -f/--follow with a filename to follow history across
1973 1974 renames and copies. --follow without a filename will only show
1974 1975 ancestors or descendants of the starting revision. --follow-first
1975 1976 only follows the first parent of merge revisions.
1976 1977
1977 1978 If no revision range is specified, the default is tip:0 unless
1978 1979 --follow is set, in which case the working directory parent is
1979 1980 used as the starting revision.
1980 1981
1981 1982 See 'hg help dates' for a list of formats valid for -d/--date.
1982 1983
1983 1984 By default this command prints revision number and changeset id,
1984 1985 tags, non-trivial parents, user, date and time, and a summary for
1985 1986 each commit. When the -v/--verbose switch is used, the list of
1986 1987 changed files and full commit message are shown.
1987 1988
1988 1989 NOTE: log -p/--patch may generate unexpected diff output for merge
1989 1990 changesets, as it will only compare the merge changeset against
1990 1991 its first parent. Also, only files different from BOTH parents
1991 1992 will appear in files:.
1992 1993 """
1993 1994
1994 1995 matchfn = cmdutil.match(repo, pats, opts)
1995 1996 limit = cmdutil.loglimit(opts)
1996 1997 count = 0
1997 1998
1998 1999 if opts.get('copies') and opts.get('rev'):
1999 2000 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2000 2001 else:
2001 2002 endrev = len(repo)
2002 2003 rcache = {}
2003 2004 ncache = {}
2004 2005 def getrenamed(fn, rev):
2005 2006 '''looks up all renames for a file (up to endrev) the first
2006 2007 time the file is given. It indexes on the changerev and only
2007 2008 parses the manifest if linkrev != changerev.
2008 2009 Returns rename info for fn at changerev rev.'''
2009 2010 if fn not in rcache:
2010 2011 rcache[fn] = {}
2011 2012 ncache[fn] = {}
2012 2013 fl = repo.file(fn)
2013 2014 for i in fl:
2014 2015 node = fl.node(i)
2015 2016 lr = fl.linkrev(i)
2016 2017 renamed = fl.renamed(node)
2017 2018 rcache[fn][lr] = renamed
2018 2019 if renamed:
2019 2020 ncache[fn][node] = renamed
2020 2021 if lr >= endrev:
2021 2022 break
2022 2023 if rev in rcache[fn]:
2023 2024 return rcache[fn][rev]
2024 2025
2025 2026 # If linkrev != rev (i.e. rev not found in rcache) fallback to
2026 2027 # filectx logic.
2027 2028
2028 2029 try:
2029 2030 return repo[rev][fn].renamed()
2030 2031 except error.LookupError:
2031 2032 pass
2032 2033 return None
2033 2034
2034 2035 df = False
2035 2036 if opts["date"]:
2036 2037 df = util.matchdate(opts["date"])
2037 2038
2038 2039 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2039 2040 def prep(ctx, fns):
2040 2041 rev = ctx.rev()
2041 2042 parents = [p for p in repo.changelog.parentrevs(rev)
2042 2043 if p != nullrev]
2043 2044 if opts.get('no_merges') and len(parents) == 2:
2044 2045 return
2045 2046 if opts.get('only_merges') and len(parents) != 2:
2046 2047 return
2047 2048 if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
2048 2049 return
2049 2050 if df and not df(ctx.date()[0]):
2050 2051 return
2051 2052 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2052 2053 return
2053 2054 if opts.get('keyword'):
2054 2055 for k in [kw.lower() for kw in opts['keyword']]:
2055 2056 if (k in ctx.user().lower() or
2056 2057 k in ctx.description().lower() or
2057 2058 k in " ".join(ctx.files()).lower()):
2058 2059 break
2059 2060 else:
2060 2061 return
2061 2062
2062 2063 copies = []
2063 2064 if opts.get('copies') and rev:
2064 2065 for fn in ctx.files():
2065 2066 rename = getrenamed(fn, rev)
2066 2067 if rename:
2067 2068 copies.append((fn, rename[0]))
2068 2069
2069 2070 displayer.show(ctx, copies=copies)
2070 2071
2071 2072 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2072 2073 if count != limit:
2073 2074 if displayer.flush(ctx.rev()):
2074 2075 count += 1
2075 2076
2076 2077 def manifest(ui, repo, node=None, rev=None):
2077 2078 """output the current or given revision of the project manifest
2078 2079
2079 2080 Print a list of version controlled files for the given revision.
2080 2081 If no revision is given, the first parent of the working directory
2081 2082 is used, or the null revision if no revision is checked out.
2082 2083
2083 2084 With -v, print file permissions, symlink and executable bits.
2084 2085 With --debug, print file revision hashes.
2085 2086 """
2086 2087
2087 2088 if rev and node:
2088 2089 raise util.Abort(_("please specify just one revision"))
2089 2090
2090 2091 if not node:
2091 2092 node = rev
2092 2093
2093 2094 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2094 2095 ctx = repo[node]
2095 2096 for f in ctx:
2096 2097 if ui.debugflag:
2097 2098 ui.write("%40s " % hex(ctx.manifest()[f]))
2098 2099 if ui.verbose:
2099 2100 ui.write(decor[ctx.flags(f)])
2100 2101 ui.write("%s\n" % f)
2101 2102
2102 2103 def merge(ui, repo, node=None, **opts):
2103 2104 """merge working directory with another revision
2104 2105
2105 2106 The current working directory is updated with all changes made in
2106 2107 the requested revision since the last common predecessor revision.
2107 2108
2108 2109 Files that changed between either parent are marked as changed for
2109 2110 the next commit and a commit must be performed before any further
2110 2111 updates to the repository are allowed. The next commit will have
2111 2112 two parents.
2112 2113
2113 2114 If no revision is specified, the working directory's parent is a
2114 2115 head revision, and the current branch contains exactly one other
2115 2116 head, the other head is merged with by default. Otherwise, an
2116 2117 explicit revision with which to merge with must be provided.
2117 2118 """
2118 2119
2119 2120 if opts.get('rev') and node:
2120 2121 raise util.Abort(_("please specify just one revision"))
2121 2122 if not node:
2122 2123 node = opts.get('rev')
2123 2124
2124 2125 if not node:
2125 2126 branch = repo.changectx(None).branch()
2126 2127 bheads = repo.branchheads(branch)
2127 2128 if len(bheads) > 2:
2128 2129 raise util.Abort(_("branch '%s' has %d heads - "
2129 2130 "please merge with an explicit rev") %
2130 2131 (branch, len(bheads)))
2131 2132
2132 2133 parent = repo.dirstate.parents()[0]
2133 2134 if len(bheads) == 1:
2134 2135 if len(repo.heads()) > 1:
2135 2136 raise util.Abort(_("branch '%s' has one head - "
2136 2137 "please merge with an explicit rev") %
2137 2138 branch)
2138 2139 msg = _('there is nothing to merge')
2139 2140 if parent != repo.lookup(repo[None].branch()):
2140 2141 msg = _('%s - use "hg update" instead') % msg
2141 2142 raise util.Abort(msg)
2142 2143
2143 2144 if parent not in bheads:
2144 2145 raise util.Abort(_('working dir not at a head rev - '
2145 2146 'use "hg update" or merge with an explicit rev'))
2146 2147 node = parent == bheads[0] and bheads[-1] or bheads[0]
2147 2148
2148 2149 if opts.get('preview'):
2149 2150 p1 = repo['.']
2150 2151 p2 = repo[node]
2151 2152 common = p1.ancestor(p2)
2152 2153 roots, heads = [common.node()], [p2.node()]
2153 2154 displayer = cmdutil.show_changeset(ui, repo, opts)
2154 2155 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2155 2156 if node not in roots:
2156 2157 displayer.show(repo[node])
2157 2158 return 0
2158 2159
2159 2160 return hg.merge(repo, node, force=opts.get('force'))
2160 2161
2161 2162 def outgoing(ui, repo, dest=None, **opts):
2162 2163 """show changesets not found in destination
2163 2164
2164 2165 Show changesets not found in the specified destination repository
2165 2166 or the default push location. These are the changesets that would
2166 2167 be pushed if a push was requested.
2167 2168
2168 2169 See pull for valid destination format details.
2169 2170 """
2170 2171 limit = cmdutil.loglimit(opts)
2171 2172 dest, revs, checkout = hg.parseurl(
2172 2173 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2173 2174 if revs:
2174 2175 revs = [repo.lookup(rev) for rev in revs]
2175 2176
2176 2177 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2177 2178 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2178 2179 o = repo.findoutgoing(other, force=opts.get('force'))
2179 2180 if not o:
2180 2181 ui.status(_("no changes found\n"))
2181 2182 return 1
2182 2183 o = repo.changelog.nodesbetween(o, revs)[0]
2183 2184 if opts.get('newest_first'):
2184 2185 o.reverse()
2185 2186 displayer = cmdutil.show_changeset(ui, repo, opts)
2186 2187 count = 0
2187 2188 for n in o:
2188 2189 if count >= limit:
2189 2190 break
2190 2191 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2191 2192 if opts.get('no_merges') and len(parents) == 2:
2192 2193 continue
2193 2194 count += 1
2194 2195 displayer.show(repo[n])
2195 2196
2196 2197 def parents(ui, repo, file_=None, **opts):
2197 2198 """show the parents of the working directory or revision
2198 2199
2199 2200 Print the working directory's parent revisions. If a revision is
2200 2201 given via -r/--rev, the parent of that revision will be printed.
2201 2202 If a file argument is given, the revision in which the file was
2202 2203 last changed (before the working directory revision or the
2203 2204 argument to --rev if given) is printed.
2204 2205 """
2205 2206 rev = opts.get('rev')
2206 2207 if rev:
2207 2208 ctx = repo[rev]
2208 2209 else:
2209 2210 ctx = repo[None]
2210 2211
2211 2212 if file_:
2212 2213 m = cmdutil.match(repo, (file_,), opts)
2213 2214 if m.anypats() or len(m.files()) != 1:
2214 2215 raise util.Abort(_('can only specify an explicit filename'))
2215 2216 file_ = m.files()[0]
2216 2217 filenodes = []
2217 2218 for cp in ctx.parents():
2218 2219 if not cp:
2219 2220 continue
2220 2221 try:
2221 2222 filenodes.append(cp.filenode(file_))
2222 2223 except error.LookupError:
2223 2224 pass
2224 2225 if not filenodes:
2225 2226 raise util.Abort(_("'%s' not found in manifest!") % file_)
2226 2227 fl = repo.file(file_)
2227 2228 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2228 2229 else:
2229 2230 p = [cp.node() for cp in ctx.parents()]
2230 2231
2231 2232 displayer = cmdutil.show_changeset(ui, repo, opts)
2232 2233 for n in p:
2233 2234 if n != nullid:
2234 2235 displayer.show(repo[n])
2235 2236
2236 2237 def paths(ui, repo, search=None):
2237 2238 """show aliases for remote repositories
2238 2239
2239 2240 Show definition of symbolic path name NAME. If no name is given,
2240 2241 show definition of all available names.
2241 2242
2242 2243 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2243 2244 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2244 2245
2245 2246 See 'hg help urls' for more information.
2246 2247 """
2247 2248 if search:
2248 2249 for name, path in ui.configitems("paths"):
2249 2250 if name == search:
2250 2251 ui.write("%s\n" % url.hidepassword(path))
2251 2252 return
2252 2253 ui.warn(_("not found!\n"))
2253 2254 return 1
2254 2255 else:
2255 2256 for name, path in ui.configitems("paths"):
2256 2257 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2257 2258
2258 2259 def postincoming(ui, repo, modheads, optupdate, checkout):
2259 2260 if modheads == 0:
2260 2261 return
2261 2262 if optupdate:
2262 2263 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2263 2264 return hg.update(repo, checkout)
2264 2265 else:
2265 2266 ui.status(_("not updating, since new heads added\n"))
2266 2267 if modheads > 1:
2267 2268 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2268 2269 else:
2269 2270 ui.status(_("(run 'hg update' to get a working copy)\n"))
2270 2271
2271 2272 def pull(ui, repo, source="default", **opts):
2272 2273 """pull changes from the specified source
2273 2274
2274 2275 Pull changes from a remote repository to a local one.
2275 2276
2276 2277 This finds all changes from the repository at the specified path
2277 2278 or URL and adds them to a local repository (the current one unless
2278 2279 -R is specified). By default, this does not update the copy of the
2279 2280 project in the working directory.
2280 2281
2281 2282 Use hg incoming if you want to see what would have been added by a
2282 2283 pull at the time you issued this command. If you then decide to
2283 2284 added those changes to the repository, you should use pull -r X
2284 2285 where X is the last changeset listed by hg incoming.
2285 2286
2286 2287 If SOURCE is omitted, the 'default' path will be used.
2287 2288 See 'hg help urls' for more information.
2288 2289 """
2289 2290 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2290 2291 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2291 2292 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2292 2293 if revs:
2293 2294 try:
2294 2295 revs = [other.lookup(rev) for rev in revs]
2295 2296 except error.CapabilityError:
2296 2297 err = _("Other repository doesn't support revision lookup, "
2297 2298 "so a rev cannot be specified.")
2298 2299 raise util.Abort(err)
2299 2300
2300 2301 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2301 2302 if checkout:
2302 2303 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2303 2304 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2304 2305
2305 2306 def push(ui, repo, dest=None, **opts):
2306 2307 """push changes to the specified destination
2307 2308
2308 2309 Push changes from the local repository to the given destination.
2309 2310
2310 2311 This is the symmetrical operation for pull. It moves changes from
2311 2312 the current repository to a different one. If the destination is
2312 2313 local this is identical to a pull in that directory from the
2313 2314 current one.
2314 2315
2315 2316 By default, push will refuse to run if it detects the result would
2316 2317 increase the number of remote heads. This generally indicates the
2317 2318 user forgot to pull and merge before pushing.
2318 2319
2319 2320 If -r/--rev is used, the named revision and all its ancestors will
2320 2321 be pushed to the remote repository.
2321 2322
2322 2323 Please see 'hg help urls' for important details about ssh://
2323 2324 URLs. If DESTINATION is omitted, a default path will be used.
2324 2325 """
2325 2326 dest, revs, checkout = hg.parseurl(
2326 2327 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2327 2328 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2328 2329 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2329 2330 if revs:
2330 2331 revs = [repo.lookup(rev) for rev in revs]
2331 2332
2332 2333 # push subrepos depth-first for coherent ordering
2333 2334 c = repo['']
2334 2335 subs = c.substate # only repos that are committed
2335 2336 for s in sorted(subs):
2336 2337 c.sub(s).push(opts.get('force'))
2337 2338
2338 2339 r = repo.push(other, opts.get('force'), revs=revs)
2339 2340 return r == 0
2340 2341
2341 2342 def recover(ui, repo):
2342 2343 """roll back an interrupted transaction
2343 2344
2344 2345 Recover from an interrupted commit or pull.
2345 2346
2346 2347 This command tries to fix the repository status after an
2347 2348 interrupted operation. It should only be necessary when Mercurial
2348 2349 suggests it.
2349 2350 """
2350 2351 if repo.recover():
2351 2352 return hg.verify(repo)
2352 2353 return 1
2353 2354
2354 2355 def remove(ui, repo, *pats, **opts):
2355 2356 """remove the specified files on the next commit
2356 2357
2357 2358 Schedule the indicated files for removal from the repository.
2358 2359
2359 2360 This only removes files from the current branch, not from the
2360 2361 entire project history. -A/--after can be used to remove only
2361 2362 files that have already been deleted, -f/--force can be used to
2362 2363 force deletion, and -Af can be used to remove files from the next
2363 2364 revision without deleting them from the working directory.
2364 2365
2365 2366 The following table details the behavior of remove for different
2366 2367 file states (columns) and option combinations (rows). The file
2367 2368 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2368 2369 reported by hg status). The actions are Warn, Remove (from branch)
2369 2370 and Delete (from disk)::
2370 2371
2371 2372 A C M !
2372 2373 none W RD W R
2373 2374 -f R RD RD R
2374 2375 -A W W W R
2375 2376 -Af R R R R
2376 2377
2377 2378 This command schedules the files to be removed at the next commit.
2378 2379 To undo a remove before that, see hg revert.
2379 2380 """
2380 2381
2381 2382 after, force = opts.get('after'), opts.get('force')
2382 2383 if not pats and not after:
2383 2384 raise util.Abort(_('no files specified'))
2384 2385
2385 2386 m = cmdutil.match(repo, pats, opts)
2386 2387 s = repo.status(match=m, clean=True)
2387 2388 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2388 2389
2389 2390 for f in m.files():
2390 2391 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2391 2392 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2392 2393
2393 2394 def warn(files, reason):
2394 2395 for f in files:
2395 2396 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2396 2397 % (m.rel(f), reason))
2397 2398
2398 2399 if force:
2399 2400 remove, forget = modified + deleted + clean, added
2400 2401 elif after:
2401 2402 remove, forget = deleted, []
2402 2403 warn(modified + added + clean, _('still exists'))
2403 2404 else:
2404 2405 remove, forget = deleted + clean, []
2405 2406 warn(modified, _('is modified'))
2406 2407 warn(added, _('has been marked for add'))
2407 2408
2408 2409 for f in sorted(remove + forget):
2409 2410 if ui.verbose or not m.exact(f):
2410 2411 ui.status(_('removing %s\n') % m.rel(f))
2411 2412
2412 2413 repo.forget(forget)
2413 2414 repo.remove(remove, unlink=not after)
2414 2415
2415 2416 def rename(ui, repo, *pats, **opts):
2416 2417 """rename files; equivalent of copy + remove
2417 2418
2418 2419 Mark dest as copies of sources; mark sources for deletion. If dest
2419 2420 is a directory, copies are put in that directory. If dest is a
2420 2421 file, there can only be one source.
2421 2422
2422 2423 By default, this command copies the contents of files as they
2423 2424 exist in the working directory. If invoked with -A/--after, the
2424 2425 operation is recorded, but no copying is performed.
2425 2426
2426 2427 This command takes effect at the next commit. To undo a rename
2427 2428 before that, see hg revert.
2428 2429 """
2429 2430 wlock = repo.wlock(False)
2430 2431 try:
2431 2432 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2432 2433 finally:
2433 2434 wlock.release()
2434 2435
2435 2436 def resolve(ui, repo, *pats, **opts):
2436 2437 """retry file merges from a merge or update
2437 2438
2438 2439 This command can cleanly retry unresolved file merges using file
2439 2440 revisions preserved from the last update or merge. To attempt to
2440 2441 resolve all unresolved files, use the -a/--all switch.
2441 2442
2442 2443 If a conflict is resolved manually, please note that the changes
2443 2444 will be overwritten if the merge is retried with resolve. The
2444 2445 -m/--mark switch should be used to mark the file as resolved.
2445 2446
2446 2447 This command also allows listing resolved files and manually
2447 2448 indicating whether or not files are resolved. All files must be
2448 2449 marked as resolved before a commit is permitted.
2449 2450
2450 2451 The codes used to show the status of files are::
2451 2452
2452 2453 U = unresolved
2453 2454 R = resolved
2454 2455 """
2455 2456
2456 2457 all, mark, unmark, show, nostatus = \
2457 2458 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2458 2459
2459 2460 if (show and (mark or unmark)) or (mark and unmark):
2460 2461 raise util.Abort(_("too many options specified"))
2461 2462 if pats and all:
2462 2463 raise util.Abort(_("can't specify --all and patterns"))
2463 2464 if not (all or pats or show or mark or unmark):
2464 2465 raise util.Abort(_('no files or directories specified; '
2465 2466 'use --all to remerge all files'))
2466 2467
2467 2468 ms = merge_.mergestate(repo)
2468 2469 m = cmdutil.match(repo, pats, opts)
2469 2470
2470 2471 for f in ms:
2471 2472 if m(f):
2472 2473 if show:
2473 2474 if nostatus:
2474 2475 ui.write("%s\n" % f)
2475 2476 else:
2476 2477 ui.write("%s %s\n" % (ms[f].upper(), f))
2477 2478 elif mark:
2478 2479 ms.mark(f, "r")
2479 2480 elif unmark:
2480 2481 ms.mark(f, "u")
2481 2482 else:
2482 2483 wctx = repo[None]
2483 2484 mctx = wctx.parents()[-1]
2484 2485
2485 2486 # backup pre-resolve (merge uses .orig for its own purposes)
2486 2487 a = repo.wjoin(f)
2487 2488 util.copyfile(a, a + ".resolve")
2488 2489
2489 2490 # resolve file
2490 2491 ms.resolve(f, wctx, mctx)
2491 2492
2492 2493 # replace filemerge's .orig file with our resolve file
2493 2494 util.rename(a + ".resolve", a + ".orig")
2494 2495
2495 2496 def revert(ui, repo, *pats, **opts):
2496 2497 """restore individual files or directories to an earlier state
2497 2498
2498 2499 (Use update -r to check out earlier revisions, revert does not
2499 2500 change the working directory parents.)
2500 2501
2501 2502 With no revision specified, revert the named files or directories
2502 2503 to the contents they had in the parent of the working directory.
2503 2504 This restores the contents of the affected files to an unmodified
2504 2505 state and unschedules adds, removes, copies, and renames. If the
2505 2506 working directory has two parents, you must explicitly specify the
2506 2507 revision to revert to.
2507 2508
2508 2509 Using the -r/--rev option, revert the given files or directories
2509 2510 to their contents as of a specific revision. This can be helpful
2510 2511 to "roll back" some or all of an earlier change. See 'hg help
2511 2512 dates' for a list of formats valid for -d/--date.
2512 2513
2513 2514 Revert modifies the working directory. It does not commit any
2514 2515 changes, or change the parent of the working directory. If you
2515 2516 revert to a revision other than the parent of the working
2516 2517 directory, the reverted files will thus appear modified
2517 2518 afterwards.
2518 2519
2519 2520 If a file has been deleted, it is restored. If the executable mode
2520 2521 of a file was changed, it is reset.
2521 2522
2522 2523 If names are given, all files matching the names are reverted.
2523 2524 If no arguments are given, no files are reverted.
2524 2525
2525 2526 Modified files are saved with a .orig suffix before reverting.
2526 2527 To disable these backups, use --no-backup.
2527 2528 """
2528 2529
2529 2530 if opts["date"]:
2530 2531 if opts["rev"]:
2531 2532 raise util.Abort(_("you can't specify a revision and a date"))
2532 2533 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2533 2534
2534 2535 if not pats and not opts.get('all'):
2535 2536 raise util.Abort(_('no files or directories specified; '
2536 2537 'use --all to revert the whole repo'))
2537 2538
2538 2539 parent, p2 = repo.dirstate.parents()
2539 2540 if not opts.get('rev') and p2 != nullid:
2540 2541 raise util.Abort(_('uncommitted merge - please provide a '
2541 2542 'specific revision'))
2542 2543 ctx = repo[opts.get('rev')]
2543 2544 node = ctx.node()
2544 2545 mf = ctx.manifest()
2545 2546 if node == parent:
2546 2547 pmf = mf
2547 2548 else:
2548 2549 pmf = None
2549 2550
2550 2551 # need all matching names in dirstate and manifest of target rev,
2551 2552 # so have to walk both. do not print errors if files exist in one
2552 2553 # but not other.
2553 2554
2554 2555 names = {}
2555 2556
2556 2557 wlock = repo.wlock()
2557 2558 try:
2558 2559 # walk dirstate.
2559 2560
2560 2561 m = cmdutil.match(repo, pats, opts)
2561 2562 m.bad = lambda x,y: False
2562 2563 for abs in repo.walk(m):
2563 2564 names[abs] = m.rel(abs), m.exact(abs)
2564 2565
2565 2566 # walk target manifest.
2566 2567
2567 2568 def badfn(path, msg):
2568 2569 if path in names:
2569 2570 return
2570 2571 path_ = path + '/'
2571 2572 for f in names:
2572 2573 if f.startswith(path_):
2573 2574 return
2574 2575 ui.warn("%s: %s\n" % (m.rel(path), msg))
2575 2576
2576 2577 m = cmdutil.match(repo, pats, opts)
2577 2578 m.bad = badfn
2578 2579 for abs in repo[node].walk(m):
2579 2580 if abs not in names:
2580 2581 names[abs] = m.rel(abs), m.exact(abs)
2581 2582
2582 2583 m = cmdutil.matchfiles(repo, names)
2583 2584 changes = repo.status(match=m)[:4]
2584 2585 modified, added, removed, deleted = map(set, changes)
2585 2586
2586 2587 # if f is a rename, also revert the source
2587 2588 cwd = repo.getcwd()
2588 2589 for f in added:
2589 2590 src = repo.dirstate.copied(f)
2590 2591 if src and src not in names and repo.dirstate[src] == 'r':
2591 2592 removed.add(src)
2592 2593 names[src] = (repo.pathto(src, cwd), True)
2593 2594
2594 2595 def removeforget(abs):
2595 2596 if repo.dirstate[abs] == 'a':
2596 2597 return _('forgetting %s\n')
2597 2598 return _('removing %s\n')
2598 2599
2599 2600 revert = ([], _('reverting %s\n'))
2600 2601 add = ([], _('adding %s\n'))
2601 2602 remove = ([], removeforget)
2602 2603 undelete = ([], _('undeleting %s\n'))
2603 2604
2604 2605 disptable = (
2605 2606 # dispatch table:
2606 2607 # file state
2607 2608 # action if in target manifest
2608 2609 # action if not in target manifest
2609 2610 # make backup if in target manifest
2610 2611 # make backup if not in target manifest
2611 2612 (modified, revert, remove, True, True),
2612 2613 (added, revert, remove, True, False),
2613 2614 (removed, undelete, None, False, False),
2614 2615 (deleted, revert, remove, False, False),
2615 2616 )
2616 2617
2617 2618 for abs, (rel, exact) in sorted(names.items()):
2618 2619 mfentry = mf.get(abs)
2619 2620 target = repo.wjoin(abs)
2620 2621 def handle(xlist, dobackup):
2621 2622 xlist[0].append(abs)
2622 2623 if dobackup and not opts.get('no_backup') and util.lexists(target):
2623 2624 bakname = "%s.orig" % rel
2624 2625 ui.note(_('saving current version of %s as %s\n') %
2625 2626 (rel, bakname))
2626 2627 if not opts.get('dry_run'):
2627 2628 util.copyfile(target, bakname)
2628 2629 if ui.verbose or not exact:
2629 2630 msg = xlist[1]
2630 2631 if not isinstance(msg, basestring):
2631 2632 msg = msg(abs)
2632 2633 ui.status(msg % rel)
2633 2634 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2634 2635 if abs not in table: continue
2635 2636 # file has changed in dirstate
2636 2637 if mfentry:
2637 2638 handle(hitlist, backuphit)
2638 2639 elif misslist is not None:
2639 2640 handle(misslist, backupmiss)
2640 2641 break
2641 2642 else:
2642 2643 if abs not in repo.dirstate:
2643 2644 if mfentry:
2644 2645 handle(add, True)
2645 2646 elif exact:
2646 2647 ui.warn(_('file not managed: %s\n') % rel)
2647 2648 continue
2648 2649 # file has not changed in dirstate
2649 2650 if node == parent:
2650 2651 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2651 2652 continue
2652 2653 if pmf is None:
2653 2654 # only need parent manifest in this unlikely case,
2654 2655 # so do not read by default
2655 2656 pmf = repo[parent].manifest()
2656 2657 if abs in pmf:
2657 2658 if mfentry:
2658 2659 # if version of file is same in parent and target
2659 2660 # manifests, do nothing
2660 2661 if (pmf[abs] != mfentry or
2661 2662 pmf.flags(abs) != mf.flags(abs)):
2662 2663 handle(revert, False)
2663 2664 else:
2664 2665 handle(remove, False)
2665 2666
2666 2667 if not opts.get('dry_run'):
2667 2668 def checkout(f):
2668 2669 fc = ctx[f]
2669 2670 repo.wwrite(f, fc.data(), fc.flags())
2670 2671
2671 2672 audit_path = util.path_auditor(repo.root)
2672 2673 for f in remove[0]:
2673 2674 if repo.dirstate[f] == 'a':
2674 2675 repo.dirstate.forget(f)
2675 2676 continue
2676 2677 audit_path(f)
2677 2678 try:
2678 2679 util.unlink(repo.wjoin(f))
2679 2680 except OSError:
2680 2681 pass
2681 2682 repo.dirstate.remove(f)
2682 2683
2683 2684 normal = None
2684 2685 if node == parent:
2685 2686 # We're reverting to our parent. If possible, we'd like status
2686 2687 # to report the file as clean. We have to use normallookup for
2687 2688 # merges to avoid losing information about merged/dirty files.
2688 2689 if p2 != nullid:
2689 2690 normal = repo.dirstate.normallookup
2690 2691 else:
2691 2692 normal = repo.dirstate.normal
2692 2693 for f in revert[0]:
2693 2694 checkout(f)
2694 2695 if normal:
2695 2696 normal(f)
2696 2697
2697 2698 for f in add[0]:
2698 2699 checkout(f)
2699 2700 repo.dirstate.add(f)
2700 2701
2701 2702 normal = repo.dirstate.normallookup
2702 2703 if node == parent and p2 == nullid:
2703 2704 normal = repo.dirstate.normal
2704 2705 for f in undelete[0]:
2705 2706 checkout(f)
2706 2707 normal(f)
2707 2708
2708 2709 finally:
2709 2710 wlock.release()
2710 2711
2711 2712 def rollback(ui, repo):
2712 2713 """roll back the last transaction
2713 2714
2714 2715 This command should be used with care. There is only one level of
2715 2716 rollback, and there is no way to undo a rollback. It will also
2716 2717 restore the dirstate at the time of the last transaction, losing
2717 2718 any dirstate changes since that time. This command does not alter
2718 2719 the working directory.
2719 2720
2720 2721 Transactions are used to encapsulate the effects of all commands
2721 2722 that create new changesets or propagate existing changesets into a
2722 2723 repository. For example, the following commands are transactional,
2723 2724 and their effects can be rolled back::
2724 2725
2725 2726 commit
2726 2727 import
2727 2728 pull
2728 2729 push (with this repository as destination)
2729 2730 unbundle
2730 2731
2731 2732 This command is not intended for use on public repositories. Once
2732 2733 changes are visible for pull by other users, rolling a transaction
2733 2734 back locally is ineffective (someone else may already have pulled
2734 2735 the changes). Furthermore, a race is possible with readers of the
2735 2736 repository; for example an in-progress pull from the repository
2736 2737 may fail if a rollback is performed.
2737 2738 """
2738 2739 repo.rollback()
2739 2740
2740 2741 def root(ui, repo):
2741 2742 """print the root (top) of the current working directory
2742 2743
2743 2744 Print the root directory of the current repository.
2744 2745 """
2745 2746 ui.write(repo.root + "\n")
2746 2747
2747 2748 def serve(ui, repo, **opts):
2748 2749 """export the repository via HTTP
2749 2750
2750 2751 Start a local HTTP repository browser and pull server.
2751 2752
2752 2753 By default, the server logs accesses to stdout and errors to
2753 2754 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2754 2755 files.
2755 2756 """
2756 2757
2757 2758 if opts["stdio"]:
2758 2759 if repo is None:
2759 2760 raise error.RepoError(_("There is no Mercurial repository here"
2760 2761 " (.hg not found)"))
2761 2762 s = sshserver.sshserver(ui, repo)
2762 2763 s.serve_forever()
2763 2764
2764 2765 baseui = repo and repo.baseui or ui
2765 2766 optlist = ("name templates style address port prefix ipv6"
2766 2767 " accesslog errorlog webdir_conf certificate encoding")
2767 2768 for o in optlist.split():
2768 2769 if opts.get(o, None):
2769 2770 baseui.setconfig("web", o, str(opts[o]))
2770 2771 if (repo is not None) and (repo.ui != baseui):
2771 2772 repo.ui.setconfig("web", o, str(opts[o]))
2772 2773
2773 2774 if repo is None and not ui.config("web", "webdir_conf"):
2774 2775 raise error.RepoError(_("There is no Mercurial repository here"
2775 2776 " (.hg not found)"))
2776 2777
2777 2778 class service(object):
2778 2779 def init(self):
2779 2780 util.set_signal_handler()
2780 2781 self.httpd = server.create_server(baseui, repo)
2781 2782
2782 2783 if not ui.verbose: return
2783 2784
2784 2785 if self.httpd.prefix:
2785 2786 prefix = self.httpd.prefix.strip('/') + '/'
2786 2787 else:
2787 2788 prefix = ''
2788 2789
2789 2790 port = ':%d' % self.httpd.port
2790 2791 if port == ':80':
2791 2792 port = ''
2792 2793
2793 2794 bindaddr = self.httpd.addr
2794 2795 if bindaddr == '0.0.0.0':
2795 2796 bindaddr = '*'
2796 2797 elif ':' in bindaddr: # IPv6
2797 2798 bindaddr = '[%s]' % bindaddr
2798 2799
2799 2800 fqaddr = self.httpd.fqaddr
2800 2801 if ':' in fqaddr:
2801 2802 fqaddr = '[%s]' % fqaddr
2802 2803 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2803 2804 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2804 2805
2805 2806 def run(self):
2806 2807 self.httpd.serve_forever()
2807 2808
2808 2809 service = service()
2809 2810
2810 2811 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2811 2812
2812 2813 def status(ui, repo, *pats, **opts):
2813 2814 """show changed files in the working directory
2814 2815
2815 2816 Show status of files in the repository. If names are given, only
2816 2817 files that match are shown. Files that are clean or ignored or
2817 2818 the source of a copy/move operation, are not listed unless
2818 2819 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2819 2820 Unless options described with "show only ..." are given, the
2820 2821 options -mardu are used.
2821 2822
2822 2823 Option -q/--quiet hides untracked (unknown and ignored) files
2823 2824 unless explicitly requested with -u/--unknown or -i/--ignored.
2824 2825
2825 2826 NOTE: status may appear to disagree with diff if permissions have
2826 2827 changed or a merge has occurred. The standard diff format does not
2827 2828 report permission changes and diff only reports changes relative
2828 2829 to one merge parent.
2829 2830
2830 2831 If one revision is given, it is used as the base revision.
2831 2832 If two revisions are given, the differences between them are
2832 2833 shown.
2833 2834
2834 2835 The codes used to show the status of files are::
2835 2836
2836 2837 M = modified
2837 2838 A = added
2838 2839 R = removed
2839 2840 C = clean
2840 2841 ! = missing (deleted by non-hg command, but still tracked)
2841 2842 ? = not tracked
2842 2843 I = ignored
2843 2844 = origin of the previous file listed as A (added)
2844 2845 """
2845 2846
2846 2847 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2847 2848 cwd = (pats and repo.getcwd()) or ''
2848 2849 end = opts.get('print0') and '\0' or '\n'
2849 2850 copy = {}
2850 2851 states = 'modified added removed deleted unknown ignored clean'.split()
2851 2852 show = [k for k in states if opts.get(k)]
2852 2853 if opts.get('all'):
2853 2854 show += ui.quiet and (states[:4] + ['clean']) or states
2854 2855 if not show:
2855 2856 show = ui.quiet and states[:4] or states[:5]
2856 2857
2857 2858 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2858 2859 'ignored' in show, 'clean' in show, 'unknown' in show)
2859 2860 changestates = zip(states, 'MAR!?IC', stat)
2860 2861
2861 2862 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2862 2863 ctxn = repo[nullid]
2863 2864 ctx1 = repo[node1]
2864 2865 ctx2 = repo[node2]
2865 2866 added = stat[1]
2866 2867 if node2 is None:
2867 2868 added = stat[0] + stat[1] # merged?
2868 2869
2869 2870 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2870 2871 if k in added:
2871 2872 copy[k] = v
2872 2873 elif v in added:
2873 2874 copy[v] = k
2874 2875
2875 2876 for state, char, files in changestates:
2876 2877 if state in show:
2877 2878 format = "%s %%s%s" % (char, end)
2878 2879 if opts.get('no_status'):
2879 2880 format = "%%s%s" % end
2880 2881
2881 2882 for f in files:
2882 2883 ui.write(format % repo.pathto(f, cwd))
2883 2884 if f in copy:
2884 2885 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2885 2886
2886 2887 def summary(ui, repo, **opts):
2887 2888 """summarize working directory state
2888 2889
2889 2890 This generates a brief summary of the working directory state,
2890 2891 including parents, branch, commit status, and available updates.
2891 2892
2892 2893 With the --remote option, this will check the default paths for
2893 2894 incoming and outgoing changes. This can be time-consuming.
2894 2895 """
2895 2896
2896 2897 ctx = repo[None]
2897 2898 parents = ctx.parents()
2898 2899 pnode = parents[0].node()
2899 2900 tags = repo.tags()
2900 2901
2901 2902 for p in parents:
2902 2903 t = ' '.join([t for t in tags if tags[t] == p.node()])
2903 2904 if p.rev() == -1:
2904 2905 if not len(repo):
2905 2906 t += _(' (empty repository)')
2906 2907 else:
2907 2908 t += _(' (no revision checked out)')
2908 2909 ui.write(_('parent: %d:%s %s\n') % (p.rev(), str(p), t))
2909 2910 if p.description():
2910 2911 ui.status(' ' + p.description().splitlines()[0].strip() + '\n')
2911 2912
2912 2913 branch = ctx.branch()
2913 2914 bheads = repo.branchheads(branch)
2914 2915 ui.status(_('branch: %s\n') % branch)
2915 2916
2916 2917 st = list(repo.status(unknown=True))[:7]
2917 2918 ms = merge_.mergestate(repo)
2918 2919 st.append([f for f in ms if f == 'u'])
2919 2920 labels = [_('%d modified'), _('%d added'), _('%d removed'),
2920 2921 _('%d deleted'), _('%d unknown'), _('%d ignored'),
2921 2922 _('%d unresolved')]
2922 2923 t = []
2923 2924 for s,l in zip(st, labels):
2924 2925 if s:
2925 2926 t.append(l % len(s))
2926 2927
2927 2928 t = ', '.join(t)
2928 2929
2929 2930 if len(parents) > 1:
2930 2931 t += _(' (merge)')
2931 2932 elif branch != parents[0].branch():
2932 2933 t += _(' (new branch)')
2933 2934 elif (not st[0] and not st[1] and not st[2]):
2934 2935 t += _(' (clean)')
2935 2936 elif pnode not in bheads:
2936 2937 t += _(' (new branch head)')
2937 2938
2938 2939 if 'clean' in t:
2939 2940 ui.status(_('commit: %s\n') % t.strip())
2940 2941 else:
2941 2942 ui.write(_('commit: %s\n') % t.strip())
2942 2943
2943 2944 # all ancestors of branch heads - all ancestors of parent = new csets
2944 2945 new = [0] * len(repo)
2945 2946 cl = repo.changelog
2946 2947 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
2947 2948 new[a] = 1
2948 2949 for a in cl.ancestors(*[p.rev() for p in parents]):
2949 2950 new[a] = 0
2950 2951 new = sum(new)
2951 2952
2952 2953 if new == 0:
2953 2954 ui.status(_('update: (current)\n'))
2954 2955 elif pnode not in bheads:
2955 2956 ui.write(_('update: %d new changesets (update)\n') % new)
2956 2957 else:
2957 2958 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
2958 2959 (new, len(bheads)))
2959 2960
2960 2961 if opts.get('remote'):
2961 2962 t = []
2962 2963 source, revs, checkout = hg.parseurl(ui.expandpath('default'),
2963 2964 opts.get('rev'))
2964 2965 other = hg.repository(cmdutil.remoteui(repo, {}), source)
2965 2966 ui.debug('comparing with %s\n' % url.hidepassword(source))
2966 2967 repo.ui.pushbuffer()
2967 2968 common, incoming, rheads = repo.findcommonincoming(other)
2968 2969 repo.ui.popbuffer()
2969 2970 if incoming:
2970 2971 t.append(_('1 or more incoming'))
2971 2972
2972 2973 dest, revs, checkout = hg.parseurl(
2973 2974 ui.expandpath('default-push', 'default'))
2974 2975 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
2975 2976 ui.debug('comparing with %s\n' % url.hidepassword(dest))
2976 2977 repo.ui.pushbuffer()
2977 2978 o = repo.findoutgoing(other)
2978 2979 repo.ui.popbuffer()
2979 2980 o = repo.changelog.nodesbetween(o, revs)[0]
2980 2981 if o:
2981 2982 t.append(_('%d outgoing') % len(o))
2982 2983
2983 2984 if t:
2984 2985 ui.write(_('remote: %s\n') % (', '.join(t)))
2985 2986 else:
2986 2987 ui.status(_('remote: (synced)\n'))
2987 2988
2988 2989 def tag(ui, repo, name1, *names, **opts):
2989 2990 """add one or more tags for the current or given revision
2990 2991
2991 2992 Name a particular revision using <name>.
2992 2993
2993 2994 Tags are used to name particular revisions of the repository and are
2994 2995 very useful to compare different revisions, to go back to significant
2995 2996 earlier versions or to mark branch points as releases, etc.
2996 2997
2997 2998 If no revision is given, the parent of the working directory is
2998 2999 used, or tip if no revision is checked out.
2999 3000
3000 3001 To facilitate version control, distribution, and merging of tags,
3001 3002 they are stored as a file named ".hgtags" which is managed
3002 3003 similarly to other project files and can be hand-edited if
3003 3004 necessary. The file '.hg/localtags' is used for local tags (not
3004 3005 shared among repositories).
3005 3006
3006 3007 See 'hg help dates' for a list of formats valid for -d/--date.
3007 3008 """
3008 3009
3009 3010 rev_ = "."
3010 3011 names = (name1,) + names
3011 3012 if len(names) != len(set(names)):
3012 3013 raise util.Abort(_('tag names must be unique'))
3013 3014 for n in names:
3014 3015 if n in ['tip', '.', 'null']:
3015 3016 raise util.Abort(_('the name \'%s\' is reserved') % n)
3016 3017 if opts.get('rev') and opts.get('remove'):
3017 3018 raise util.Abort(_("--rev and --remove are incompatible"))
3018 3019 if opts.get('rev'):
3019 3020 rev_ = opts['rev']
3020 3021 message = opts.get('message')
3021 3022 if opts.get('remove'):
3022 3023 expectedtype = opts.get('local') and 'local' or 'global'
3023 3024 for n in names:
3024 3025 if not repo.tagtype(n):
3025 3026 raise util.Abort(_('tag \'%s\' does not exist') % n)
3026 3027 if repo.tagtype(n) != expectedtype:
3027 3028 if expectedtype == 'global':
3028 3029 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3029 3030 else:
3030 3031 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3031 3032 rev_ = nullid
3032 3033 if not message:
3033 3034 # we don't translate commit messages
3034 3035 message = 'Removed tag %s' % ', '.join(names)
3035 3036 elif not opts.get('force'):
3036 3037 for n in names:
3037 3038 if n in repo.tags():
3038 3039 raise util.Abort(_('tag \'%s\' already exists '
3039 3040 '(use -f to force)') % n)
3040 3041 if not rev_ and repo.dirstate.parents()[1] != nullid:
3041 3042 raise util.Abort(_('uncommitted merge - please provide a '
3042 3043 'specific revision'))
3043 3044 r = repo[rev_].node()
3044 3045
3045 3046 if not message:
3046 3047 # we don't translate commit messages
3047 3048 message = ('Added tag %s for changeset %s' %
3048 3049 (', '.join(names), short(r)))
3049 3050
3050 3051 date = opts.get('date')
3051 3052 if date:
3052 3053 date = util.parsedate(date)
3053 3054
3054 3055 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3055 3056
3056 3057 def tags(ui, repo):
3057 3058 """list repository tags
3058 3059
3059 3060 This lists both regular and local tags. When the -v/--verbose
3060 3061 switch is used, a third column "local" is printed for local tags.
3061 3062 """
3062 3063
3063 3064 hexfunc = ui.debugflag and hex or short
3064 3065 tagtype = ""
3065 3066
3066 3067 for t, n in reversed(repo.tagslist()):
3067 3068 if ui.quiet:
3068 3069 ui.write("%s\n" % t)
3069 3070 continue
3070 3071
3071 3072 try:
3072 3073 hn = hexfunc(n)
3073 3074 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3074 3075 except error.LookupError:
3075 3076 r = " ?:%s" % hn
3076 3077 else:
3077 3078 spaces = " " * (30 - encoding.colwidth(t))
3078 3079 if ui.verbose:
3079 3080 if repo.tagtype(t) == 'local':
3080 3081 tagtype = " local"
3081 3082 else:
3082 3083 tagtype = ""
3083 3084 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3084 3085
3085 3086 def tip(ui, repo, **opts):
3086 3087 """show the tip revision
3087 3088
3088 3089 The tip revision (usually just called the tip) is the changeset
3089 3090 most recently added to the repository (and therefore the most
3090 3091 recently changed head).
3091 3092
3092 3093 If you have just made a commit, that commit will be the tip. If
3093 3094 you have just pulled changes from another repository, the tip of
3094 3095 that repository becomes the current tip. The "tip" tag is special
3095 3096 and cannot be renamed or assigned to a different changeset.
3096 3097 """
3097 3098 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
3098 3099
3099 3100 def unbundle(ui, repo, fname1, *fnames, **opts):
3100 3101 """apply one or more changegroup files
3101 3102
3102 3103 Apply one or more compressed changegroup files generated by the
3103 3104 bundle command.
3104 3105 """
3105 3106 fnames = (fname1,) + fnames
3106 3107
3107 3108 lock = repo.lock()
3108 3109 try:
3109 3110 for fname in fnames:
3110 3111 f = url.open(ui, fname)
3111 3112 gen = changegroup.readbundle(f, fname)
3112 3113 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3113 3114 finally:
3114 3115 lock.release()
3115 3116
3116 3117 return postincoming(ui, repo, modheads, opts.get('update'), None)
3117 3118
3118 3119 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3119 3120 """update working directory
3120 3121
3121 3122 Update the repository's working directory to the specified
3122 3123 revision, or the tip of the current branch if none is specified.
3123 3124 Use null as the revision to remove the working copy (like 'hg
3124 3125 clone -U').
3125 3126
3126 3127 When the working directory contains no uncommitted changes, it
3127 3128 will be replaced by the state of the requested revision from the
3128 3129 repository. When the requested revision is on a different branch,
3129 3130 the working directory will additionally be switched to that
3130 3131 branch.
3131 3132
3132 3133 When there are uncommitted changes, use option -C/--clean to
3133 3134 discard them, forcibly replacing the state of the working
3134 3135 directory with the requested revision. Alternately, use -c/--check
3135 3136 to abort.
3136 3137
3137 3138 When there are uncommitted changes and option -C/--clean is not
3138 3139 used, and the parent revision and requested revision are on the
3139 3140 same branch, and one of them is an ancestor of the other, then the
3140 3141 new working directory will contain the requested revision merged
3141 3142 with the uncommitted changes. Otherwise, the update will fail with
3142 3143 a suggestion to use 'merge' or 'update -C' instead.
3143 3144
3144 3145 If you want to update just one file to an older revision, use
3145 3146 revert.
3146 3147
3147 3148 See 'hg help dates' for a list of formats valid for -d/--date.
3148 3149 """
3149 3150 if rev and node:
3150 3151 raise util.Abort(_("please specify just one revision"))
3151 3152
3152 3153 if not rev:
3153 3154 rev = node
3154 3155
3155 3156 if check and clean:
3156 3157 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3157 3158
3158 3159 if check:
3159 3160 # we could use dirty() but we can ignore merge and branch trivia
3160 3161 c = repo[None]
3161 3162 if c.modified() or c.added() or c.removed():
3162 3163 raise util.Abort(_("uncommitted local changes"))
3163 3164
3164 3165 if date:
3165 3166 if rev:
3166 3167 raise util.Abort(_("you can't specify a revision and a date"))
3167 3168 rev = cmdutil.finddate(ui, repo, date)
3168 3169
3169 3170 if clean or check:
3170 3171 return hg.clean(repo, rev)
3171 3172 else:
3172 3173 return hg.update(repo, rev)
3173 3174
3174 3175 def verify(ui, repo):
3175 3176 """verify the integrity of the repository
3176 3177
3177 3178 Verify the integrity of the current repository.
3178 3179
3179 3180 This will perform an extensive check of the repository's
3180 3181 integrity, validating the hashes and checksums of each entry in
3181 3182 the changelog, manifest, and tracked files, as well as the
3182 3183 integrity of their crosslinks and indices.
3183 3184 """
3184 3185 return hg.verify(repo)
3185 3186
3186 3187 def version_(ui):
3187 3188 """output version and copyright information"""
3188 3189 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3189 3190 % util.version())
3190 3191 ui.status(_(
3191 3192 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
3192 3193 "This is free software; see the source for copying conditions. "
3193 3194 "There is NO\nwarranty; "
3194 3195 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3195 3196 ))
3196 3197
3197 3198 # Command options and aliases are listed here, alphabetically
3198 3199
3199 3200 globalopts = [
3200 3201 ('R', 'repository', '',
3201 3202 _('repository root directory or name of overlay bundle file')),
3202 3203 ('', 'cwd', '', _('change working directory')),
3203 3204 ('y', 'noninteractive', None,
3204 3205 _('do not prompt, assume \'yes\' for any required answers')),
3205 3206 ('q', 'quiet', None, _('suppress output')),
3206 3207 ('v', 'verbose', None, _('enable additional output')),
3207 3208 ('', 'config', [], _('set/override config option')),
3208 3209 ('', 'debug', None, _('enable debugging output')),
3209 3210 ('', 'debugger', None, _('start debugger')),
3210 3211 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3211 3212 ('', 'encodingmode', encoding.encodingmode,
3212 3213 _('set the charset encoding mode')),
3213 3214 ('', 'traceback', None, _('print traceback on exception')),
3214 3215 ('', 'time', None, _('time how long the command takes')),
3215 3216 ('', 'profile', None, _('print command execution profile')),
3216 3217 ('', 'version', None, _('output version information and exit')),
3217 3218 ('h', 'help', None, _('display help and exit')),
3218 3219 ]
3219 3220
3220 3221 dryrunopts = [('n', 'dry-run', None,
3221 3222 _('do not perform actions, just print output'))]
3222 3223
3223 3224 remoteopts = [
3224 3225 ('e', 'ssh', '', _('specify ssh command to use')),
3225 3226 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3226 3227 ]
3227 3228
3228 3229 walkopts = [
3229 3230 ('I', 'include', [], _('include names matching the given patterns')),
3230 3231 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3231 3232 ]
3232 3233
3233 3234 commitopts = [
3234 3235 ('m', 'message', '', _('use <text> as commit message')),
3235 3236 ('l', 'logfile', '', _('read commit message from <file>')),
3236 3237 ]
3237 3238
3238 3239 commitopts2 = [
3239 3240 ('d', 'date', '', _('record datecode as commit date')),
3240 3241 ('u', 'user', '', _('record the specified user as committer')),
3241 3242 ]
3242 3243
3243 3244 templateopts = [
3244 3245 ('', 'style', '', _('display using template map file')),
3245 3246 ('', 'template', '', _('display with template')),
3246 3247 ]
3247 3248
3248 3249 logopts = [
3249 3250 ('p', 'patch', None, _('show patch')),
3250 3251 ('g', 'git', None, _('use git extended diff format')),
3251 3252 ('l', 'limit', '', _('limit number of changes displayed')),
3252 3253 ('M', 'no-merges', None, _('do not show merges')),
3253 3254 ] + templateopts
3254 3255
3255 3256 diffopts = [
3256 3257 ('a', 'text', None, _('treat all files as text')),
3257 3258 ('g', 'git', None, _('use git extended diff format')),
3258 3259 ('', 'nodates', None, _("don't include dates in diff headers"))
3259 3260 ]
3260 3261
3261 3262 diffopts2 = [
3262 3263 ('p', 'show-function', None, _('show which function each change is in')),
3263 3264 ('w', 'ignore-all-space', None,
3264 3265 _('ignore white space when comparing lines')),
3265 3266 ('b', 'ignore-space-change', None,
3266 3267 _('ignore changes in the amount of white space')),
3267 3268 ('B', 'ignore-blank-lines', None,
3268 3269 _('ignore changes whose lines are all blank')),
3269 3270 ('U', 'unified', '', _('number of lines of context to show')),
3270 3271 ('', 'stat', None, _('output diffstat-style summary of changes')),
3271 3272 ]
3272 3273
3273 3274 similarityopts = [
3274 3275 ('s', 'similarity', '',
3275 3276 _('guess renamed files by similarity (0<=s<=100)'))
3276 3277 ]
3277 3278
3278 3279 table = {
3279 3280 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3280 3281 "addremove":
3281 3282 (addremove, similarityopts + walkopts + dryrunopts,
3282 3283 _('[OPTION]... [FILE]...')),
3283 3284 "^annotate|blame":
3284 3285 (annotate,
3285 3286 [('r', 'rev', '', _('annotate the specified revision')),
3286 3287 ('f', 'follow', None, _('follow file copies and renames')),
3287 3288 ('a', 'text', None, _('treat all files as text')),
3288 3289 ('u', 'user', None, _('list the author (long with -v)')),
3289 3290 ('d', 'date', None, _('list the date (short with -q)')),
3290 3291 ('n', 'number', None, _('list the revision number (default)')),
3291 3292 ('c', 'changeset', None, _('list the changeset')),
3292 3293 ('l', 'line-number', None,
3293 3294 _('show line number at the first appearance'))
3294 3295 ] + walkopts,
3295 3296 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3296 3297 "archive":
3297 3298 (archive,
3298 3299 [('', 'no-decode', None, _('do not pass files through decoders')),
3299 3300 ('p', 'prefix', '', _('directory prefix for files in archive')),
3300 3301 ('r', 'rev', '', _('revision to distribute')),
3301 3302 ('t', 'type', '', _('type of distribution to create')),
3302 3303 ] + walkopts,
3303 3304 _('[OPTION]... DEST')),
3304 3305 "backout":
3305 3306 (backout,
3306 3307 [('', 'merge', None,
3307 3308 _('merge with old dirstate parent after backout')),
3308 3309 ('', 'parent', '', _('parent to choose when backing out merge')),
3309 3310 ('r', 'rev', '', _('revision to backout')),
3310 3311 ] + walkopts + commitopts + commitopts2,
3311 3312 _('[OPTION]... [-r] REV')),
3312 3313 "bisect":
3313 3314 (bisect,
3314 3315 [('r', 'reset', False, _('reset bisect state')),
3315 3316 ('g', 'good', False, _('mark changeset good')),
3316 3317 ('b', 'bad', False, _('mark changeset bad')),
3317 3318 ('s', 'skip', False, _('skip testing changeset')),
3318 3319 ('c', 'command', '', _('use command to check changeset state')),
3319 3320 ('U', 'noupdate', False, _('do not update to target'))],
3320 3321 _("[-gbsr] [-c CMD] [REV]")),
3321 3322 "branch":
3322 3323 (branch,
3323 3324 [('f', 'force', None,
3324 3325 _('set branch name even if it shadows an existing branch')),
3325 3326 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3326 3327 _('[-fC] [NAME]')),
3327 3328 "branches":
3328 3329 (branches,
3329 3330 [('a', 'active', False,
3330 3331 _('show only branches that have unmerged heads')),
3331 3332 ('c', 'closed', False,
3332 3333 _('show normal and closed branches'))],
3333 3334 _('[-a]')),
3334 3335 "bundle":
3335 3336 (bundle,
3336 3337 [('f', 'force', None,
3337 3338 _('run even when remote repository is unrelated')),
3338 3339 ('r', 'rev', [],
3339 3340 _('a changeset up to which you would like to bundle')),
3340 3341 ('', 'base', [],
3341 3342 _('a base changeset to specify instead of a destination')),
3342 3343 ('a', 'all', None, _('bundle all changesets in the repository')),
3343 3344 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3344 3345 ] + remoteopts,
3345 3346 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3346 3347 "cat":
3347 3348 (cat,
3348 3349 [('o', 'output', '', _('print output to file with formatted name')),
3349 3350 ('r', 'rev', '', _('print the given revision')),
3350 3351 ('', 'decode', None, _('apply any matching decode filter')),
3351 3352 ] + walkopts,
3352 3353 _('[OPTION]... FILE...')),
3353 3354 "^clone":
3354 3355 (clone,
3355 3356 [('U', 'noupdate', None,
3356 3357 _('the clone will only contain a repository (no working copy)')),
3357 3358 ('r', 'rev', [],
3358 3359 _('a changeset you would like to have after cloning')),
3359 3360 ('', 'pull', None, _('use pull protocol to copy metadata')),
3360 3361 ('', 'uncompressed', None,
3361 3362 _('use uncompressed transfer (fast over LAN)')),
3362 3363 ] + remoteopts,
3363 3364 _('[OPTION]... SOURCE [DEST]')),
3364 3365 "^commit|ci":
3365 3366 (commit,
3366 3367 [('A', 'addremove', None,
3367 3368 _('mark new/missing files as added/removed before committing')),
3368 3369 ('', 'close-branch', None,
3369 3370 _('mark a branch as closed, hiding it from the branch list')),
3370 3371 ] + walkopts + commitopts + commitopts2,
3371 3372 _('[OPTION]... [FILE]...')),
3372 3373 "copy|cp":
3373 3374 (copy,
3374 3375 [('A', 'after', None, _('record a copy that has already occurred')),
3375 3376 ('f', 'force', None,
3376 3377 _('forcibly copy over an existing managed file')),
3377 3378 ] + walkopts + dryrunopts,
3378 3379 _('[OPTION]... [SOURCE]... DEST')),
3379 3380 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3380 3381 "debugcheckstate": (debugcheckstate, [], ''),
3381 3382 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3382 3383 "debugcomplete":
3383 3384 (debugcomplete,
3384 3385 [('o', 'options', None, _('show the command options'))],
3385 3386 _('[-o] CMD')),
3386 3387 "debugdate":
3387 3388 (debugdate,
3388 3389 [('e', 'extended', None, _('try extended date formats'))],
3389 3390 _('[-e] DATE [RANGE]')),
3390 3391 "debugdata": (debugdata, [], _('FILE REV')),
3391 3392 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3392 3393 "debugindex": (debugindex, [], _('FILE')),
3393 3394 "debugindexdot": (debugindexdot, [], _('FILE')),
3394 3395 "debuginstall": (debuginstall, [], ''),
3395 3396 "debugrebuildstate":
3396 3397 (debugrebuildstate,
3397 3398 [('r', 'rev', '', _('revision to rebuild to'))],
3398 3399 _('[-r REV] [REV]')),
3399 3400 "debugrename":
3400 3401 (debugrename,
3401 3402 [('r', 'rev', '', _('revision to debug'))],
3402 3403 _('[-r REV] FILE')),
3403 3404 "debugsetparents":
3404 3405 (debugsetparents, [], _('REV1 [REV2]')),
3405 3406 "debugstate":
3406 3407 (debugstate,
3407 3408 [('', 'nodates', None, _('do not display the saved mtime'))],
3408 3409 _('[OPTION]...')),
3409 3410 "debugsub":
3410 3411 (debugsub,
3411 3412 [('r', 'rev', '', _('revision to check'))],
3412 3413 _('[-r REV] [REV]')),
3413 3414 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3414 3415 "^diff":
3415 3416 (diff,
3416 3417 [('r', 'rev', [], _('revision')),
3417 3418 ('c', 'change', '', _('change made by revision'))
3418 3419 ] + diffopts + diffopts2 + walkopts,
3419 3420 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3420 3421 "^export":
3421 3422 (export,
3422 3423 [('o', 'output', '', _('print output to file with formatted name')),
3423 3424 ('', 'switch-parent', None, _('diff against the second parent'))
3424 3425 ] + diffopts,
3425 3426 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3426 3427 "^forget":
3427 3428 (forget,
3428 3429 [] + walkopts,
3429 3430 _('[OPTION]... FILE...')),
3430 3431 "grep":
3431 3432 (grep,
3432 3433 [('0', 'print0', None, _('end fields with NUL')),
3433 3434 ('', 'all', None, _('print all revisions that match')),
3434 3435 ('f', 'follow', None,
3435 3436 _('follow changeset history, or file history across copies and renames')),
3436 3437 ('i', 'ignore-case', None, _('ignore case when matching')),
3437 3438 ('l', 'files-with-matches', None,
3438 3439 _('print only filenames and revisions that match')),
3439 3440 ('n', 'line-number', None, _('print matching line numbers')),
3440 3441 ('r', 'rev', [], _('search in given revision range')),
3441 3442 ('u', 'user', None, _('list the author (long with -v)')),
3442 3443 ('d', 'date', None, _('list the date (short with -q)')),
3443 3444 ] + walkopts,
3444 3445 _('[OPTION]... PATTERN [FILE]...')),
3445 3446 "heads":
3446 3447 (heads,
3447 3448 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3448 3449 ('a', 'active', False,
3449 3450 _('show only the active branch heads from open branches')),
3450 3451 ('c', 'closed', False,
3451 3452 _('show normal and closed branch heads')),
3452 3453 ] + templateopts,
3453 3454 _('[-r STARTREV] [REV]...')),
3454 3455 "help": (help_, [], _('[TOPIC]')),
3455 3456 "identify|id":
3456 3457 (identify,
3457 3458 [('r', 'rev', '', _('identify the specified revision')),
3458 3459 ('n', 'num', None, _('show local revision number')),
3459 3460 ('i', 'id', None, _('show global revision id')),
3460 3461 ('b', 'branch', None, _('show branch')),
3461 3462 ('t', 'tags', None, _('show tags'))],
3462 3463 _('[-nibt] [-r REV] [SOURCE]')),
3463 3464 "import|patch":
3464 3465 (import_,
3465 3466 [('p', 'strip', 1,
3466 3467 _('directory strip option for patch. This has the same '
3467 3468 'meaning as the corresponding patch option')),
3468 3469 ('b', 'base', '', _('base path')),
3469 3470 ('f', 'force', None,
3470 3471 _('skip check for outstanding uncommitted changes')),
3471 3472 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3472 3473 ('', 'exact', None,
3473 3474 _('apply patch to the nodes from which it was generated')),
3474 3475 ('', 'import-branch', None,
3475 3476 _('use any branch information in patch (implied by --exact)'))] +
3476 3477 commitopts + commitopts2 + similarityopts,
3477 3478 _('[OPTION]... PATCH...')),
3478 3479 "incoming|in":
3479 3480 (incoming,
3480 3481 [('f', 'force', None,
3481 3482 _('run even when remote repository is unrelated')),
3482 3483 ('n', 'newest-first', None, _('show newest record first')),
3483 3484 ('', 'bundle', '', _('file to store the bundles into')),
3484 3485 ('r', 'rev', [],
3485 3486 _('a specific remote revision up to which you would like to pull')),
3486 3487 ] + logopts + remoteopts,
3487 3488 _('[-p] [-n] [-M] [-f] [-r REV]...'
3488 3489 ' [--bundle FILENAME] [SOURCE]')),
3489 3490 "^init":
3490 3491 (init,
3491 3492 remoteopts,
3492 3493 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3493 3494 "locate":
3494 3495 (locate,
3495 3496 [('r', 'rev', '', _('search the repository as it stood at REV')),
3496 3497 ('0', 'print0', None,
3497 3498 _('end filenames with NUL, for use with xargs')),
3498 3499 ('f', 'fullpath', None,
3499 3500 _('print complete paths from the filesystem root')),
3500 3501 ] + walkopts,
3501 3502 _('[OPTION]... [PATTERN]...')),
3502 3503 "^log|history":
3503 3504 (log,
3504 3505 [('f', 'follow', None,
3505 3506 _('follow changeset history, or file history across copies and renames')),
3506 3507 ('', 'follow-first', None,
3507 3508 _('only follow the first parent of merge changesets')),
3508 3509 ('d', 'date', '', _('show revisions matching date spec')),
3509 3510 ('C', 'copies', None, _('show copied files')),
3510 3511 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3511 3512 ('r', 'rev', [], _('show the specified revision or range')),
3512 3513 ('', 'removed', None, _('include revisions where files were removed')),
3513 3514 ('m', 'only-merges', None, _('show only merges')),
3514 3515 ('u', 'user', [], _('revisions committed by user')),
3515 3516 ('b', 'only-branch', [],
3516 3517 _('show only changesets within the given named branch')),
3517 3518 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3518 3519 ] + logopts + walkopts,
3519 3520 _('[OPTION]... [FILE]')),
3520 3521 "manifest":
3521 3522 (manifest,
3522 3523 [('r', 'rev', '', _('revision to display'))],
3523 3524 _('[-r REV]')),
3524 3525 "^merge":
3525 3526 (merge,
3526 3527 [('f', 'force', None, _('force a merge with outstanding changes')),
3527 3528 ('r', 'rev', '', _('revision to merge')),
3528 3529 ('P', 'preview', None,
3529 3530 _('review revisions to merge (no merge is performed)'))],
3530 3531 _('[-f] [[-r] REV]')),
3531 3532 "outgoing|out":
3532 3533 (outgoing,
3533 3534 [('f', 'force', None,
3534 3535 _('run even when remote repository is unrelated')),
3535 3536 ('r', 'rev', [],
3536 3537 _('a specific revision up to which you would like to push')),
3537 3538 ('n', 'newest-first', None, _('show newest record first')),
3538 3539 ] + logopts + remoteopts,
3539 3540 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3540 3541 "parents":
3541 3542 (parents,
3542 3543 [('r', 'rev', '', _('show parents from the specified revision')),
3543 3544 ] + templateopts,
3544 3545 _('[-r REV] [FILE]')),
3545 3546 "paths": (paths, [], _('[NAME]')),
3546 3547 "^pull":
3547 3548 (pull,
3548 3549 [('u', 'update', None,
3549 3550 _('update to new tip if changesets were pulled')),
3550 3551 ('f', 'force', None,
3551 3552 _('run even when remote repository is unrelated')),
3552 3553 ('r', 'rev', [],
3553 3554 _('a specific remote revision up to which you would like to pull')),
3554 3555 ] + remoteopts,
3555 3556 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3556 3557 "^push":
3557 3558 (push,
3558 3559 [('f', 'force', None, _('force push')),
3559 3560 ('r', 'rev', [],
3560 3561 _('a specific revision up to which you would like to push')),
3561 3562 ] + remoteopts,
3562 3563 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3563 3564 "recover": (recover, []),
3564 3565 "^remove|rm":
3565 3566 (remove,
3566 3567 [('A', 'after', None, _('record delete for missing files')),
3567 3568 ('f', 'force', None,
3568 3569 _('remove (and delete) file even if added or modified')),
3569 3570 ] + walkopts,
3570 3571 _('[OPTION]... FILE...')),
3571 3572 "rename|mv":
3572 3573 (rename,
3573 3574 [('A', 'after', None, _('record a rename that has already occurred')),
3574 3575 ('f', 'force', None,
3575 3576 _('forcibly copy over an existing managed file')),
3576 3577 ] + walkopts + dryrunopts,
3577 3578 _('[OPTION]... SOURCE... DEST')),
3578 3579 "resolve":
3579 3580 (resolve,
3580 3581 [('a', 'all', None, _('remerge all unresolved files')),
3581 3582 ('l', 'list', None, _('list state of files needing merge')),
3582 3583 ('m', 'mark', None, _('mark files as resolved')),
3583 3584 ('u', 'unmark', None, _('unmark files as resolved')),
3584 3585 ('n', 'no-status', None, _('hide status prefix'))]
3585 3586 + walkopts,
3586 3587 _('[OPTION]... [FILE]...')),
3587 3588 "revert":
3588 3589 (revert,
3589 3590 [('a', 'all', None, _('revert all changes when no arguments given')),
3590 3591 ('d', 'date', '', _('tipmost revision matching date')),
3591 3592 ('r', 'rev', '', _('revision to revert to')),
3592 3593 ('', 'no-backup', None, _('do not save backup copies of files')),
3593 3594 ] + walkopts + dryrunopts,
3594 3595 _('[OPTION]... [-r REV] [NAME]...')),
3595 3596 "rollback": (rollback, []),
3596 3597 "root": (root, []),
3597 3598 "^serve":
3598 3599 (serve,
3599 3600 [('A', 'accesslog', '', _('name of access log file to write to')),
3600 3601 ('d', 'daemon', None, _('run server in background')),
3601 3602 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3602 3603 ('E', 'errorlog', '', _('name of error log file to write to')),
3603 3604 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3604 3605 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3605 3606 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3606 3607 ('n', 'name', '',
3607 3608 _('name to show in web pages (default: working directory)')),
3608 3609 ('', 'webdir-conf', '', _('name of the webdir config file'
3609 3610 ' (serve more than one repository)')),
3610 3611 ('', 'pid-file', '', _('name of file to write process ID to')),
3611 3612 ('', 'stdio', None, _('for remote clients')),
3612 3613 ('t', 'templates', '', _('web templates to use')),
3613 3614 ('', 'style', '', _('template style to use')),
3614 3615 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3615 3616 ('', 'certificate', '', _('SSL certificate file'))],
3616 3617 _('[OPTION]...')),
3617 3618 "showconfig|debugconfig":
3618 3619 (showconfig,
3619 3620 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3620 3621 _('[-u] [NAME]...')),
3621 3622 "^summary|sum":
3622 3623 (summary,
3623 3624 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
3624 3625 "^status|st":
3625 3626 (status,
3626 3627 [('A', 'all', None, _('show status of all files')),
3627 3628 ('m', 'modified', None, _('show only modified files')),
3628 3629 ('a', 'added', None, _('show only added files')),
3629 3630 ('r', 'removed', None, _('show only removed files')),
3630 3631 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3631 3632 ('c', 'clean', None, _('show only files without changes')),
3632 3633 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3633 3634 ('i', 'ignored', None, _('show only ignored files')),
3634 3635 ('n', 'no-status', None, _('hide status prefix')),
3635 3636 ('C', 'copies', None, _('show source of copied files')),
3636 3637 ('0', 'print0', None,
3637 3638 _('end filenames with NUL, for use with xargs')),
3638 3639 ('', 'rev', [], _('show difference from revision')),
3639 3640 ] + walkopts,
3640 3641 _('[OPTION]... [FILE]...')),
3641 3642 "tag":
3642 3643 (tag,
3643 3644 [('f', 'force', None, _('replace existing tag')),
3644 3645 ('l', 'local', None, _('make the tag local')),
3645 3646 ('r', 'rev', '', _('revision to tag')),
3646 3647 ('', 'remove', None, _('remove a tag')),
3647 3648 # -l/--local is already there, commitopts cannot be used
3648 3649 ('m', 'message', '', _('use <text> as commit message')),
3649 3650 ] + commitopts2,
3650 3651 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3651 3652 "tags": (tags, [], ''),
3652 3653 "tip":
3653 3654 (tip,
3654 3655 [('p', 'patch', None, _('show patch')),
3655 3656 ('g', 'git', None, _('use git extended diff format')),
3656 3657 ] + templateopts,
3657 3658 _('[-p]')),
3658 3659 "unbundle":
3659 3660 (unbundle,
3660 3661 [('u', 'update', None,
3661 3662 _('update to new tip if changesets were unbundled'))],
3662 3663 _('[-u] FILE...')),
3663 3664 "^update|up|checkout|co":
3664 3665 (update,
3665 3666 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3666 3667 ('c', 'check', None, _('check for uncommitted changes')),
3667 3668 ('d', 'date', '', _('tipmost revision matching date')),
3668 3669 ('r', 'rev', '', _('revision'))],
3669 3670 _('[-C] [-d DATE] [[-r] REV]')),
3670 3671 "verify": (verify, []),
3671 3672 "version": (version_, []),
3672 3673 }
3673 3674
3674 3675 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3675 3676 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3676 3677 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2170 +1,2160 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import lock, transaction, store, encoding
13 13 import util, extensions, hook, error
14 14 import match as match_
15 15 import merge as merge_
16 16 import tags as tags_
17 17 from lock import release
18 18 import weakref, stat, errno, os, time, inspect
19 19 propertycache = util.propertycache
20 20
21 21 class localrepository(repo.repository):
22 22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
23 23 supported = set('revlogv1 store fncache shared'.split())
24 24
25 25 def __init__(self, baseui, path=None, create=0):
26 26 repo.repository.__init__(self)
27 27 self.root = os.path.realpath(path)
28 28 self.path = os.path.join(self.root, ".hg")
29 29 self.origroot = path
30 30 self.opener = util.opener(self.path)
31 31 self.wopener = util.opener(self.root)
32 32 self.baseui = baseui
33 33 self.ui = baseui.copy()
34 34
35 35 try:
36 36 self.ui.readconfig(self.join("hgrc"), self.root)
37 37 extensions.loadall(self.ui)
38 38 except IOError:
39 39 pass
40 40
41 41 if not os.path.isdir(self.path):
42 42 if create:
43 43 if not os.path.exists(path):
44 44 os.mkdir(path)
45 45 os.mkdir(self.path)
46 46 requirements = ["revlogv1"]
47 47 if self.ui.configbool('format', 'usestore', True):
48 48 os.mkdir(os.path.join(self.path, "store"))
49 49 requirements.append("store")
50 50 if self.ui.configbool('format', 'usefncache', True):
51 51 requirements.append("fncache")
52 52 # create an invalid changelog
53 53 self.opener("00changelog.i", "a").write(
54 54 '\0\0\0\2' # represents revlogv2
55 55 ' dummy changelog to prevent using the old repo layout'
56 56 )
57 57 reqfile = self.opener("requires", "w")
58 58 for r in requirements:
59 59 reqfile.write("%s\n" % r)
60 60 reqfile.close()
61 61 else:
62 62 raise error.RepoError(_("repository %s not found") % path)
63 63 elif create:
64 64 raise error.RepoError(_("repository %s already exists") % path)
65 65 else:
66 66 # find requirements
67 67 requirements = set()
68 68 try:
69 69 requirements = set(self.opener("requires").read().splitlines())
70 70 except IOError, inst:
71 71 if inst.errno != errno.ENOENT:
72 72 raise
73 73 for r in requirements - self.supported:
74 74 raise error.RepoError(_("requirement '%s' not supported") % r)
75 75
76 76 self.sharedpath = self.path
77 77 try:
78 78 s = os.path.realpath(self.opener("sharedpath").read())
79 79 if not os.path.exists(s):
80 80 raise error.RepoError(
81 81 _('.hg/sharedpath points to nonexistent directory %s') % s)
82 82 self.sharedpath = s
83 83 except IOError, inst:
84 84 if inst.errno != errno.ENOENT:
85 85 raise
86 86
87 87 self.store = store.store(requirements, self.sharedpath, util.opener)
88 88 self.spath = self.store.path
89 89 self.sopener = self.store.opener
90 90 self.sjoin = self.store.join
91 91 self.opener.createmode = self.store.createmode
92 92
93 93 # These two define the set of tags for this repository. _tags
94 94 # maps tag name to node; _tagtypes maps tag name to 'global' or
95 95 # 'local'. (Global tags are defined by .hgtags across all
96 96 # heads, and local tags are defined in .hg/localtags.) They
97 97 # constitute the in-memory cache of tags.
98 98 self._tags = None
99 99 self._tagtypes = None
100 100
101 101 self._branchcache = None # in UTF-8
102 102 self._branchcachetip = None
103 103 self.nodetagscache = None
104 104 self.filterpats = {}
105 105 self._datafilters = {}
106 106 self._transref = self._lockref = self._wlockref = None
107 107
108 108 @propertycache
109 109 def changelog(self):
110 110 c = changelog.changelog(self.sopener)
111 111 if 'HG_PENDING' in os.environ:
112 112 p = os.environ['HG_PENDING']
113 113 if p.startswith(self.root):
114 114 c.readpending('00changelog.i.a')
115 115 self.sopener.defversion = c.version
116 116 return c
117 117
118 118 @propertycache
119 119 def manifest(self):
120 120 return manifest.manifest(self.sopener)
121 121
122 122 @propertycache
123 123 def dirstate(self):
124 124 return dirstate.dirstate(self.opener, self.ui, self.root)
125 125
126 126 def __getitem__(self, changeid):
127 127 if changeid is None:
128 128 return context.workingctx(self)
129 129 return context.changectx(self, changeid)
130 130
131 131 def __nonzero__(self):
132 132 return True
133 133
134 134 def __len__(self):
135 135 return len(self.changelog)
136 136
137 137 def __iter__(self):
138 138 for i in xrange(len(self)):
139 139 yield i
140 140
141 141 def url(self):
142 142 return 'file:' + self.root
143 143
144 144 def hook(self, name, throw=False, **args):
145 145 return hook.hook(self.ui, self, name, throw, **args)
146 146
147 147 tag_disallowed = ':\r\n'
148 148
149 149 def _tag(self, names, node, message, local, user, date, extra={}):
150 150 if isinstance(names, str):
151 151 allchars = names
152 152 names = (names,)
153 153 else:
154 154 allchars = ''.join(names)
155 155 for c in self.tag_disallowed:
156 156 if c in allchars:
157 157 raise util.Abort(_('%r cannot be used in a tag name') % c)
158 158
159 159 for name in names:
160 160 self.hook('pretag', throw=True, node=hex(node), tag=name,
161 161 local=local)
162 162
163 163 def writetags(fp, names, munge, prevtags):
164 164 fp.seek(0, 2)
165 165 if prevtags and prevtags[-1] != '\n':
166 166 fp.write('\n')
167 167 for name in names:
168 168 m = munge and munge(name) or name
169 169 if self._tagtypes and name in self._tagtypes:
170 170 old = self._tags.get(name, nullid)
171 171 fp.write('%s %s\n' % (hex(old), m))
172 172 fp.write('%s %s\n' % (hex(node), m))
173 173 fp.close()
174 174
175 175 prevtags = ''
176 176 if local:
177 177 try:
178 178 fp = self.opener('localtags', 'r+')
179 179 except IOError:
180 180 fp = self.opener('localtags', 'a')
181 181 else:
182 182 prevtags = fp.read()
183 183
184 184 # local tags are stored in the current charset
185 185 writetags(fp, names, None, prevtags)
186 186 for name in names:
187 187 self.hook('tag', node=hex(node), tag=name, local=local)
188 188 return
189 189
190 190 try:
191 191 fp = self.wfile('.hgtags', 'rb+')
192 192 except IOError:
193 193 fp = self.wfile('.hgtags', 'ab')
194 194 else:
195 195 prevtags = fp.read()
196 196
197 197 # committed tags are stored in UTF-8
198 198 writetags(fp, names, encoding.fromlocal, prevtags)
199 199
200 200 if '.hgtags' not in self.dirstate:
201 201 self.add(['.hgtags'])
202 202
203 203 m = match_.exact(self.root, '', ['.hgtags'])
204 204 tagnode = self.commit(message, user, date, extra=extra, match=m)
205 205
206 206 for name in names:
207 207 self.hook('tag', node=hex(node), tag=name, local=local)
208 208
209 209 return tagnode
210 210
211 211 def tag(self, names, node, message, local, user, date):
212 212 '''tag a revision with one or more symbolic names.
213 213
214 214 names is a list of strings or, when adding a single tag, names may be a
215 215 string.
216 216
217 217 if local is True, the tags are stored in a per-repository file.
218 218 otherwise, they are stored in the .hgtags file, and a new
219 219 changeset is committed with the change.
220 220
221 221 keyword arguments:
222 222
223 223 local: whether to store tags in non-version-controlled file
224 224 (default False)
225 225
226 226 message: commit message to use if committing
227 227
228 228 user: name of user to use if committing
229 229
230 230 date: date tuple to use if committing'''
231 231
232 232 for x in self.status()[:5]:
233 233 if '.hgtags' in x:
234 234 raise util.Abort(_('working copy of .hgtags is changed '
235 235 '(please commit .hgtags manually)'))
236 236
237 237 self.tags() # instantiate the cache
238 238 self._tag(names, node, message, local, user, date)
239 239
240 240 def tags(self):
241 241 '''return a mapping of tag to node'''
242 242 if self._tags is None:
243 243 (self._tags, self._tagtypes) = self._findtags()
244 244
245 245 return self._tags
246 246
247 247 def _findtags(self):
248 248 '''Do the hard work of finding tags. Return a pair of dicts
249 249 (tags, tagtypes) where tags maps tag name to node, and tagtypes
250 250 maps tag name to a string like \'global\' or \'local\'.
251 251 Subclasses or extensions are free to add their own tags, but
252 252 should be aware that the returned dicts will be retained for the
253 253 duration of the localrepo object.'''
254 254
255 255 # XXX what tagtype should subclasses/extensions use? Currently
256 256 # mq and bookmarks add tags, but do not set the tagtype at all.
257 257 # Should each extension invent its own tag type? Should there
258 258 # be one tagtype for all such "virtual" tags? Or is the status
259 259 # quo fine?
260 260
261 261 alltags = {} # map tag name to (node, hist)
262 262 tagtypes = {}
263 263
264 264 tags_.findglobaltags(self.ui, self, alltags, tagtypes)
265 265 tags_.readlocaltags(self.ui, self, alltags, tagtypes)
266 266
267 267 # Build the return dicts. Have to re-encode tag names because
268 268 # the tags module always uses UTF-8 (in order not to lose info
269 269 # writing to the cache), but the rest of Mercurial wants them in
270 270 # local encoding.
271 271 tags = {}
272 272 for (name, (node, hist)) in alltags.iteritems():
273 273 if node != nullid:
274 274 tags[encoding.tolocal(name)] = node
275 275 tags['tip'] = self.changelog.tip()
276 276 tagtypes = dict([(encoding.tolocal(name), value)
277 277 for (name, value) in tagtypes.iteritems()])
278 278 return (tags, tagtypes)
279 279
280 280 def tagtype(self, tagname):
281 281 '''
282 282 return the type of the given tag. result can be:
283 283
284 284 'local' : a local tag
285 285 'global' : a global tag
286 286 None : tag does not exist
287 287 '''
288 288
289 289 self.tags()
290 290
291 291 return self._tagtypes.get(tagname)
292 292
293 293 def tagslist(self):
294 294 '''return a list of tags ordered by revision'''
295 295 l = []
296 296 for t, n in self.tags().iteritems():
297 297 try:
298 298 r = self.changelog.rev(n)
299 299 except:
300 300 r = -2 # sort to the beginning of the list if unknown
301 301 l.append((r, t, n))
302 302 return [(t, n) for r, t, n in sorted(l)]
303 303
304 304 def nodetags(self, node):
305 305 '''return the tags associated with a node'''
306 306 if not self.nodetagscache:
307 307 self.nodetagscache = {}
308 308 for t, n in self.tags().iteritems():
309 309 self.nodetagscache.setdefault(n, []).append(t)
310 310 return self.nodetagscache.get(node, [])
311 311
312 312 def _branchtags(self, partial, lrev):
313 313 # TODO: rename this function?
314 314 tiprev = len(self) - 1
315 315 if lrev != tiprev:
316 316 self._updatebranchcache(partial, lrev+1, tiprev+1)
317 317 self._writebranchcache(partial, self.changelog.tip(), tiprev)
318 318
319 319 return partial
320 320
321 def lbranchmap(self):
322 branchcache = {}
323 partial = self.branchmap()
324
325 # the branch cache is stored on disk as UTF-8, but in the local
326 # charset internally
327 for k, v in partial.iteritems():
328 branchcache[encoding.tolocal(k)] = v
329 return branchcache
330
331 321 def branchmap(self):
332 322 tip = self.changelog.tip()
333 323 if self._branchcache is not None and self._branchcachetip == tip:
334 324 return self._branchcache
335 325
336 326 oldtip = self._branchcachetip
337 327 self._branchcachetip = tip
338 328 if oldtip is None or oldtip not in self.changelog.nodemap:
339 329 partial, last, lrev = self._readbranchcache()
340 330 else:
341 331 lrev = self.changelog.rev(oldtip)
342 332 partial = self._branchcache
343 333
344 334 self._branchtags(partial, lrev)
345 335 # this private cache holds all heads (not just tips)
346 336 self._branchcache = partial
347 337
348 338 return self._branchcache
349 339
350 340 def branchtags(self):
351 341 '''return a dict where branch names map to the tipmost head of
352 342 the branch, open heads come before closed'''
353 343 bt = {}
354 for bn, heads in self.lbranchmap().iteritems():
344 for bn, heads in self.branchmap().iteritems():
355 345 head = None
356 346 for i in range(len(heads)-1, -1, -1):
357 347 h = heads[i]
358 348 if 'close' not in self.changelog.read(h)[5]:
359 349 head = h
360 350 break
361 351 # no open heads were found
362 352 if head is None:
363 353 head = heads[-1]
364 354 bt[bn] = head
365 355 return bt
366 356
367 357
368 358 def _readbranchcache(self):
369 359 partial = {}
370 360 try:
371 361 f = self.opener("branchheads.cache")
372 362 lines = f.read().split('\n')
373 363 f.close()
374 364 except (IOError, OSError):
375 365 return {}, nullid, nullrev
376 366
377 367 try:
378 368 last, lrev = lines.pop(0).split(" ", 1)
379 369 last, lrev = bin(last), int(lrev)
380 370 if lrev >= len(self) or self[lrev].node() != last:
381 371 # invalidate the cache
382 372 raise ValueError('invalidating branch cache (tip differs)')
383 373 for l in lines:
384 374 if not l: continue
385 375 node, label = l.split(" ", 1)
386 376 partial.setdefault(label.strip(), []).append(bin(node))
387 377 except KeyboardInterrupt:
388 378 raise
389 379 except Exception, inst:
390 380 if self.ui.debugflag:
391 381 self.ui.warn(str(inst), '\n')
392 382 partial, last, lrev = {}, nullid, nullrev
393 383 return partial, last, lrev
394 384
395 385 def _writebranchcache(self, branches, tip, tiprev):
396 386 try:
397 387 f = self.opener("branchheads.cache", "w", atomictemp=True)
398 388 f.write("%s %s\n" % (hex(tip), tiprev))
399 389 for label, nodes in branches.iteritems():
400 390 for node in nodes:
401 391 f.write("%s %s\n" % (hex(node), label))
402 392 f.rename()
403 393 except (IOError, OSError):
404 394 pass
405 395
406 396 def _updatebranchcache(self, partial, start, end):
407 397 # collect new branch entries
408 398 newbranches = {}
409 399 for r in xrange(start, end):
410 400 c = self[r]
411 401 newbranches.setdefault(c.branch(), []).append(c.node())
412 402 # if older branchheads are reachable from new ones, they aren't
413 403 # really branchheads. Note checking parents is insufficient:
414 404 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
415 405 for branch, newnodes in newbranches.iteritems():
416 406 bheads = partial.setdefault(branch, [])
417 407 bheads.extend(newnodes)
418 408 if len(bheads) < 2:
419 409 continue
420 410 newbheads = []
421 411 # starting from tip means fewer passes over reachable
422 412 while newnodes:
423 413 latest = newnodes.pop()
424 414 if latest not in bheads:
425 415 continue
426 416 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
427 417 reachable = self.changelog.reachable(latest, minbhrev)
428 418 bheads = [b for b in bheads if b not in reachable]
429 419 newbheads.insert(0, latest)
430 420 bheads.extend(newbheads)
431 421 partial[branch] = bheads
432 422
433 423 def lookup(self, key):
434 424 if isinstance(key, int):
435 425 return self.changelog.node(key)
436 426 elif key == '.':
437 427 return self.dirstate.parents()[0]
438 428 elif key == 'null':
439 429 return nullid
440 430 elif key == 'tip':
441 431 return self.changelog.tip()
442 432 n = self.changelog._match(key)
443 433 if n:
444 434 return n
445 435 if key in self.tags():
446 436 return self.tags()[key]
447 437 if key in self.branchtags():
448 438 return self.branchtags()[key]
449 439 n = self.changelog._partialmatch(key)
450 440 if n:
451 441 return n
452 442
453 443 # can't find key, check if it might have come from damaged dirstate
454 444 if key in self.dirstate.parents():
455 445 raise error.Abort(_("working directory has unknown parent '%s'!")
456 446 % short(key))
457 447 try:
458 448 if len(key) == 20:
459 449 key = hex(key)
460 450 except:
461 451 pass
462 452 raise error.RepoLookupError(_("unknown revision '%s'") % key)
463 453
464 454 def local(self):
465 455 return True
466 456
467 457 def join(self, f):
468 458 return os.path.join(self.path, f)
469 459
470 460 def wjoin(self, f):
471 461 return os.path.join(self.root, f)
472 462
473 463 def rjoin(self, f):
474 464 return os.path.join(self.root, util.pconvert(f))
475 465
476 466 def file(self, f):
477 467 if f[0] == '/':
478 468 f = f[1:]
479 469 return filelog.filelog(self.sopener, f)
480 470
481 471 def changectx(self, changeid):
482 472 return self[changeid]
483 473
484 474 def parents(self, changeid=None):
485 475 '''get list of changectxs for parents of changeid'''
486 476 return self[changeid].parents()
487 477
488 478 def filectx(self, path, changeid=None, fileid=None):
489 479 """changeid can be a changeset revision, node, or tag.
490 480 fileid can be a file revision or node."""
491 481 return context.filectx(self, path, changeid, fileid)
492 482
493 483 def getcwd(self):
494 484 return self.dirstate.getcwd()
495 485
496 486 def pathto(self, f, cwd=None):
497 487 return self.dirstate.pathto(f, cwd)
498 488
499 489 def wfile(self, f, mode='r'):
500 490 return self.wopener(f, mode)
501 491
502 492 def _link(self, f):
503 493 return os.path.islink(self.wjoin(f))
504 494
505 495 def _filter(self, filter, filename, data):
506 496 if filter not in self.filterpats:
507 497 l = []
508 498 for pat, cmd in self.ui.configitems(filter):
509 499 if cmd == '!':
510 500 continue
511 501 mf = match_.match(self.root, '', [pat])
512 502 fn = None
513 503 params = cmd
514 504 for name, filterfn in self._datafilters.iteritems():
515 505 if cmd.startswith(name):
516 506 fn = filterfn
517 507 params = cmd[len(name):].lstrip()
518 508 break
519 509 if not fn:
520 510 fn = lambda s, c, **kwargs: util.filter(s, c)
521 511 # Wrap old filters not supporting keyword arguments
522 512 if not inspect.getargspec(fn)[2]:
523 513 oldfn = fn
524 514 fn = lambda s, c, **kwargs: oldfn(s, c)
525 515 l.append((mf, fn, params))
526 516 self.filterpats[filter] = l
527 517
528 518 for mf, fn, cmd in self.filterpats[filter]:
529 519 if mf(filename):
530 520 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
531 521 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
532 522 break
533 523
534 524 return data
535 525
536 526 def adddatafilter(self, name, filter):
537 527 self._datafilters[name] = filter
538 528
539 529 def wread(self, filename):
540 530 if self._link(filename):
541 531 data = os.readlink(self.wjoin(filename))
542 532 else:
543 533 data = self.wopener(filename, 'r').read()
544 534 return self._filter("encode", filename, data)
545 535
546 536 def wwrite(self, filename, data, flags):
547 537 data = self._filter("decode", filename, data)
548 538 try:
549 539 os.unlink(self.wjoin(filename))
550 540 except OSError:
551 541 pass
552 542 if 'l' in flags:
553 543 self.wopener.symlink(data, filename)
554 544 else:
555 545 self.wopener(filename, 'w').write(data)
556 546 if 'x' in flags:
557 547 util.set_flags(self.wjoin(filename), False, True)
558 548
559 549 def wwritedata(self, filename, data):
560 550 return self._filter("decode", filename, data)
561 551
562 552 def transaction(self):
563 553 tr = self._transref and self._transref() or None
564 554 if tr and tr.running():
565 555 return tr.nest()
566 556
567 557 # abort here if the journal already exists
568 558 if os.path.exists(self.sjoin("journal")):
569 559 raise error.RepoError(_("journal already exists - run hg recover"))
570 560
571 561 # save dirstate for rollback
572 562 try:
573 563 ds = self.opener("dirstate").read()
574 564 except IOError:
575 565 ds = ""
576 566 self.opener("journal.dirstate", "w").write(ds)
577 567 self.opener("journal.branch", "w").write(self.dirstate.branch())
578 568
579 569 renames = [(self.sjoin("journal"), self.sjoin("undo")),
580 570 (self.join("journal.dirstate"), self.join("undo.dirstate")),
581 571 (self.join("journal.branch"), self.join("undo.branch"))]
582 572 tr = transaction.transaction(self.ui.warn, self.sopener,
583 573 self.sjoin("journal"),
584 574 aftertrans(renames),
585 575 self.store.createmode)
586 576 self._transref = weakref.ref(tr)
587 577 return tr
588 578
589 579 def recover(self):
590 580 lock = self.lock()
591 581 try:
592 582 if os.path.exists(self.sjoin("journal")):
593 583 self.ui.status(_("rolling back interrupted transaction\n"))
594 584 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
595 585 self.invalidate()
596 586 return True
597 587 else:
598 588 self.ui.warn(_("no interrupted transaction available\n"))
599 589 return False
600 590 finally:
601 591 lock.release()
602 592
603 593 def rollback(self):
604 594 wlock = lock = None
605 595 try:
606 596 wlock = self.wlock()
607 597 lock = self.lock()
608 598 if os.path.exists(self.sjoin("undo")):
609 599 self.ui.status(_("rolling back last transaction\n"))
610 600 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
611 601 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
612 602 try:
613 603 branch = self.opener("undo.branch").read()
614 604 self.dirstate.setbranch(branch)
615 605 except IOError:
616 606 self.ui.warn(_("Named branch could not be reset, "
617 607 "current branch still is: %s\n")
618 608 % encoding.tolocal(self.dirstate.branch()))
619 609 self.invalidate()
620 610 self.dirstate.invalidate()
621 611 self.destroyed()
622 612 else:
623 613 self.ui.warn(_("no rollback information available\n"))
624 614 finally:
625 615 release(lock, wlock)
626 616
627 617 def invalidate(self):
628 618 for a in "changelog manifest".split():
629 619 if a in self.__dict__:
630 620 delattr(self, a)
631 621 self._tags = None
632 622 self._tagtypes = None
633 623 self.nodetagscache = None
634 624 self._branchcache = None # in UTF-8
635 625 self._branchcachetip = None
636 626
637 627 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 628 try:
639 629 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 630 except error.LockHeld, inst:
641 631 if not wait:
642 632 raise
643 633 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 634 (desc, inst.locker))
645 635 # default to 600 seconds timeout
646 636 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 637 releasefn, desc=desc)
648 638 if acquirefn:
649 639 acquirefn()
650 640 return l
651 641
652 642 def lock(self, wait=True):
653 643 '''Lock the repository store (.hg/store) and return a weak reference
654 644 to the lock. Use this before modifying the store (e.g. committing or
655 645 stripping). If you are opening a transaction, get a lock as well.)'''
656 646 l = self._lockref and self._lockref()
657 647 if l is not None and l.held:
658 648 l.lock()
659 649 return l
660 650
661 651 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
662 652 _('repository %s') % self.origroot)
663 653 self._lockref = weakref.ref(l)
664 654 return l
665 655
666 656 def wlock(self, wait=True):
667 657 '''Lock the non-store parts of the repository (everything under
668 658 .hg except .hg/store) and return a weak reference to the lock.
669 659 Use this before modifying files in .hg.'''
670 660 l = self._wlockref and self._wlockref()
671 661 if l is not None and l.held:
672 662 l.lock()
673 663 return l
674 664
675 665 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
676 666 self.dirstate.invalidate, _('working directory of %s') %
677 667 self.origroot)
678 668 self._wlockref = weakref.ref(l)
679 669 return l
680 670
681 671 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
682 672 """
683 673 commit an individual file as part of a larger transaction
684 674 """
685 675
686 676 fname = fctx.path()
687 677 text = fctx.data()
688 678 flog = self.file(fname)
689 679 fparent1 = manifest1.get(fname, nullid)
690 680 fparent2 = fparent2o = manifest2.get(fname, nullid)
691 681
692 682 meta = {}
693 683 copy = fctx.renamed()
694 684 if copy and copy[0] != fname:
695 685 # Mark the new revision of this file as a copy of another
696 686 # file. This copy data will effectively act as a parent
697 687 # of this new revision. If this is a merge, the first
698 688 # parent will be the nullid (meaning "look up the copy data")
699 689 # and the second one will be the other parent. For example:
700 690 #
701 691 # 0 --- 1 --- 3 rev1 changes file foo
702 692 # \ / rev2 renames foo to bar and changes it
703 693 # \- 2 -/ rev3 should have bar with all changes and
704 694 # should record that bar descends from
705 695 # bar in rev2 and foo in rev1
706 696 #
707 697 # this allows this merge to succeed:
708 698 #
709 699 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
710 700 # \ / merging rev3 and rev4 should use bar@rev2
711 701 # \- 2 --- 4 as the merge base
712 702 #
713 703
714 704 cfname = copy[0]
715 705 crev = manifest1.get(cfname)
716 706 newfparent = fparent2
717 707
718 708 if manifest2: # branch merge
719 709 if fparent2 == nullid or crev is None: # copied on remote side
720 710 if cfname in manifest2:
721 711 crev = manifest2[cfname]
722 712 newfparent = fparent1
723 713
724 714 # find source in nearest ancestor if we've lost track
725 715 if not crev:
726 716 self.ui.debug(" %s: searching for copy revision for %s\n" %
727 717 (fname, cfname))
728 718 for ancestor in self['.'].ancestors():
729 719 if cfname in ancestor:
730 720 crev = ancestor[cfname].filenode()
731 721 break
732 722
733 723 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
734 724 meta["copy"] = cfname
735 725 meta["copyrev"] = hex(crev)
736 726 fparent1, fparent2 = nullid, newfparent
737 727 elif fparent2 != nullid:
738 728 # is one parent an ancestor of the other?
739 729 fparentancestor = flog.ancestor(fparent1, fparent2)
740 730 if fparentancestor == fparent1:
741 731 fparent1, fparent2 = fparent2, nullid
742 732 elif fparentancestor == fparent2:
743 733 fparent2 = nullid
744 734
745 735 # is the file changed?
746 736 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
747 737 changelist.append(fname)
748 738 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
749 739
750 740 # are just the flags changed during merge?
751 741 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
752 742 changelist.append(fname)
753 743
754 744 return fparent1
755 745
756 746 def commit(self, text="", user=None, date=None, match=None, force=False,
757 747 editor=False, extra={}):
758 748 """Add a new revision to current repository.
759 749
760 750 Revision information is gathered from the working directory,
761 751 match can be used to filter the committed files. If editor is
762 752 supplied, it is called to get a commit message.
763 753 """
764 754
765 755 def fail(f, msg):
766 756 raise util.Abort('%s: %s' % (f, msg))
767 757
768 758 if not match:
769 759 match = match_.always(self.root, '')
770 760
771 761 if not force:
772 762 vdirs = []
773 763 match.dir = vdirs.append
774 764 match.bad = fail
775 765
776 766 wlock = self.wlock()
777 767 try:
778 768 p1, p2 = self.dirstate.parents()
779 769 wctx = self[None]
780 770
781 771 if (not force and p2 != nullid and match and
782 772 (match.files() or match.anypats())):
783 773 raise util.Abort(_('cannot partially commit a merge '
784 774 '(do not specify files or patterns)'))
785 775
786 776 changes = self.status(match=match, clean=force)
787 777 if force:
788 778 changes[0].extend(changes[6]) # mq may commit unchanged files
789 779
790 780 # check subrepos
791 781 subs = []
792 782 for s in wctx.substate:
793 783 if match(s) and wctx.sub(s).dirty():
794 784 subs.append(s)
795 785 if subs and '.hgsubstate' not in changes[0]:
796 786 changes[0].insert(0, '.hgsubstate')
797 787
798 788 # make sure all explicit patterns are matched
799 789 if not force and match.files():
800 790 matched = set(changes[0] + changes[1] + changes[2])
801 791
802 792 for f in match.files():
803 793 if f == '.' or f in matched or f in wctx.substate:
804 794 continue
805 795 if f in changes[3]: # missing
806 796 fail(f, _('file not found!'))
807 797 if f in vdirs: # visited directory
808 798 d = f + '/'
809 799 for mf in matched:
810 800 if mf.startswith(d):
811 801 break
812 802 else:
813 803 fail(f, _("no match under directory!"))
814 804 elif f not in self.dirstate:
815 805 fail(f, _("file not tracked!"))
816 806
817 807 if (not force and not extra.get("close") and p2 == nullid
818 808 and not (changes[0] or changes[1] or changes[2])
819 809 and self[None].branch() == self['.'].branch()):
820 810 return None
821 811
822 812 ms = merge_.mergestate(self)
823 813 for f in changes[0]:
824 814 if f in ms and ms[f] == 'u':
825 815 raise util.Abort(_("unresolved merge conflicts "
826 816 "(see hg resolve)"))
827 817
828 818 cctx = context.workingctx(self, (p1, p2), text, user, date,
829 819 extra, changes)
830 820 if editor:
831 821 cctx._text = editor(self, cctx, subs)
832 822
833 823 # commit subs
834 824 if subs:
835 825 state = wctx.substate.copy()
836 826 for s in subs:
837 827 self.ui.status(_('committing subrepository %s\n') % s)
838 828 sr = wctx.sub(s).commit(cctx._text, user, date)
839 829 state[s] = (state[s][0], sr)
840 830 subrepo.writestate(self, state)
841 831
842 832 ret = self.commitctx(cctx, True)
843 833
844 834 # update dirstate and mergestate
845 835 for f in changes[0] + changes[1]:
846 836 self.dirstate.normal(f)
847 837 for f in changes[2]:
848 838 self.dirstate.forget(f)
849 839 self.dirstate.setparents(ret)
850 840 ms.reset()
851 841
852 842 return ret
853 843
854 844 finally:
855 845 wlock.release()
856 846
857 847 def commitctx(self, ctx, error=False):
858 848 """Add a new revision to current repository.
859 849
860 850 Revision information is passed via the context argument.
861 851 """
862 852
863 853 tr = lock = None
864 854 removed = ctx.removed()
865 855 p1, p2 = ctx.p1(), ctx.p2()
866 856 m1 = p1.manifest().copy()
867 857 m2 = p2.manifest()
868 858 user = ctx.user()
869 859
870 860 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
871 861 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
872 862
873 863 lock = self.lock()
874 864 try:
875 865 tr = self.transaction()
876 866 trp = weakref.proxy(tr)
877 867
878 868 # check in files
879 869 new = {}
880 870 changed = []
881 871 linkrev = len(self)
882 872 for f in sorted(ctx.modified() + ctx.added()):
883 873 self.ui.note(f + "\n")
884 874 try:
885 875 fctx = ctx[f]
886 876 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
887 877 changed)
888 878 m1.set(f, fctx.flags())
889 879 except (OSError, IOError):
890 880 if error:
891 881 self.ui.warn(_("trouble committing %s!\n") % f)
892 882 raise
893 883 else:
894 884 removed.append(f)
895 885
896 886 # update manifest
897 887 m1.update(new)
898 888 removed = [f for f in sorted(removed) if f in m1 or f in m2]
899 889 drop = [f for f in removed if f in m1]
900 890 for f in drop:
901 891 del m1[f]
902 892 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
903 893 p2.manifestnode(), (new, drop))
904 894
905 895 # update changelog
906 896 self.changelog.delayupdate()
907 897 n = self.changelog.add(mn, changed + removed, ctx.description(),
908 898 trp, p1.node(), p2.node(),
909 899 user, ctx.date(), ctx.extra().copy())
910 900 p = lambda: self.changelog.writepending() and self.root or ""
911 901 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
912 902 parent2=xp2, pending=p)
913 903 self.changelog.finalize(trp)
914 904 tr.close()
915 905
916 906 if self._branchcache:
917 907 self.branchtags()
918 908
919 909 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
920 910 return n
921 911 finally:
922 912 del tr
923 913 lock.release()
924 914
925 915 def destroyed(self):
926 916 '''Inform the repository that nodes have been destroyed.
927 917 Intended for use by strip and rollback, so there's a common
928 918 place for anything that has to be done after destroying history.'''
929 919 # XXX it might be nice if we could take the list of destroyed
930 920 # nodes, but I don't see an easy way for rollback() to do that
931 921
932 922 # Ensure the persistent tag cache is updated. Doing it now
933 923 # means that the tag cache only has to worry about destroyed
934 924 # heads immediately after a strip/rollback. That in turn
935 925 # guarantees that "cachetip == currenttip" (comparing both rev
936 926 # and node) always means no nodes have been added or destroyed.
937 927
938 928 # XXX this is suboptimal when qrefresh'ing: we strip the current
939 929 # head, refresh the tag cache, then immediately add a new head.
940 930 # But I think doing it this way is necessary for the "instant
941 931 # tag cache retrieval" case to work.
942 932 tags_.findglobaltags(self.ui, self, {}, {})
943 933
944 934 def walk(self, match, node=None):
945 935 '''
946 936 walk recursively through the directory tree or a given
947 937 changeset, finding all files matched by the match
948 938 function
949 939 '''
950 940 return self[node].walk(match)
951 941
952 942 def status(self, node1='.', node2=None, match=None,
953 943 ignored=False, clean=False, unknown=False):
954 944 """return status of files between two nodes or node and working directory
955 945
956 946 If node1 is None, use the first dirstate parent instead.
957 947 If node2 is None, compare node1 with working directory.
958 948 """
959 949
960 950 def mfmatches(ctx):
961 951 mf = ctx.manifest().copy()
962 952 for fn in mf.keys():
963 953 if not match(fn):
964 954 del mf[fn]
965 955 return mf
966 956
967 957 if isinstance(node1, context.changectx):
968 958 ctx1 = node1
969 959 else:
970 960 ctx1 = self[node1]
971 961 if isinstance(node2, context.changectx):
972 962 ctx2 = node2
973 963 else:
974 964 ctx2 = self[node2]
975 965
976 966 working = ctx2.rev() is None
977 967 parentworking = working and ctx1 == self['.']
978 968 match = match or match_.always(self.root, self.getcwd())
979 969 listignored, listclean, listunknown = ignored, clean, unknown
980 970
981 971 # load earliest manifest first for caching reasons
982 972 if not working and ctx2.rev() < ctx1.rev():
983 973 ctx2.manifest()
984 974
985 975 if not parentworking:
986 976 def bad(f, msg):
987 977 if f not in ctx1:
988 978 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
989 979 match.bad = bad
990 980
991 981 if working: # we need to scan the working dir
992 982 s = self.dirstate.status(match, listignored, listclean, listunknown)
993 983 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
994 984
995 985 # check for any possibly clean files
996 986 if parentworking and cmp:
997 987 fixup = []
998 988 # do a full compare of any files that might have changed
999 989 for f in sorted(cmp):
1000 990 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1001 991 or ctx1[f].cmp(ctx2[f].data())):
1002 992 modified.append(f)
1003 993 else:
1004 994 fixup.append(f)
1005 995
1006 996 if listclean:
1007 997 clean += fixup
1008 998
1009 999 # update dirstate for files that are actually clean
1010 1000 if fixup:
1011 1001 try:
1012 1002 # updating the dirstate is optional
1013 1003 # so we don't wait on the lock
1014 1004 wlock = self.wlock(False)
1015 1005 try:
1016 1006 for f in fixup:
1017 1007 self.dirstate.normal(f)
1018 1008 finally:
1019 1009 wlock.release()
1020 1010 except error.LockError:
1021 1011 pass
1022 1012
1023 1013 if not parentworking:
1024 1014 mf1 = mfmatches(ctx1)
1025 1015 if working:
1026 1016 # we are comparing working dir against non-parent
1027 1017 # generate a pseudo-manifest for the working dir
1028 1018 mf2 = mfmatches(self['.'])
1029 1019 for f in cmp + modified + added:
1030 1020 mf2[f] = None
1031 1021 mf2.set(f, ctx2.flags(f))
1032 1022 for f in removed:
1033 1023 if f in mf2:
1034 1024 del mf2[f]
1035 1025 else:
1036 1026 # we are comparing two revisions
1037 1027 deleted, unknown, ignored = [], [], []
1038 1028 mf2 = mfmatches(ctx2)
1039 1029
1040 1030 modified, added, clean = [], [], []
1041 1031 for fn in mf2:
1042 1032 if fn in mf1:
1043 1033 if (mf1.flags(fn) != mf2.flags(fn) or
1044 1034 (mf1[fn] != mf2[fn] and
1045 1035 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1046 1036 modified.append(fn)
1047 1037 elif listclean:
1048 1038 clean.append(fn)
1049 1039 del mf1[fn]
1050 1040 else:
1051 1041 added.append(fn)
1052 1042 removed = mf1.keys()
1053 1043
1054 1044 r = modified, added, removed, deleted, unknown, ignored, clean
1055 1045 [l.sort() for l in r]
1056 1046 return r
1057 1047
1058 1048 def add(self, list):
1059 1049 wlock = self.wlock()
1060 1050 try:
1061 1051 rejected = []
1062 1052 for f in list:
1063 1053 p = self.wjoin(f)
1064 1054 try:
1065 1055 st = os.lstat(p)
1066 1056 except:
1067 1057 self.ui.warn(_("%s does not exist!\n") % f)
1068 1058 rejected.append(f)
1069 1059 continue
1070 1060 if st.st_size > 10000000:
1071 1061 self.ui.warn(_("%s: files over 10MB may cause memory and"
1072 1062 " performance problems\n"
1073 1063 "(use 'hg revert %s' to unadd the file)\n")
1074 1064 % (f, f))
1075 1065 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1076 1066 self.ui.warn(_("%s not added: only files and symlinks "
1077 1067 "supported currently\n") % f)
1078 1068 rejected.append(p)
1079 1069 elif self.dirstate[f] in 'amn':
1080 1070 self.ui.warn(_("%s already tracked!\n") % f)
1081 1071 elif self.dirstate[f] == 'r':
1082 1072 self.dirstate.normallookup(f)
1083 1073 else:
1084 1074 self.dirstate.add(f)
1085 1075 return rejected
1086 1076 finally:
1087 1077 wlock.release()
1088 1078
1089 1079 def forget(self, list):
1090 1080 wlock = self.wlock()
1091 1081 try:
1092 1082 for f in list:
1093 1083 if self.dirstate[f] != 'a':
1094 1084 self.ui.warn(_("%s not added!\n") % f)
1095 1085 else:
1096 1086 self.dirstate.forget(f)
1097 1087 finally:
1098 1088 wlock.release()
1099 1089
1100 1090 def remove(self, list, unlink=False):
1101 1091 if unlink:
1102 1092 for f in list:
1103 1093 try:
1104 1094 util.unlink(self.wjoin(f))
1105 1095 except OSError, inst:
1106 1096 if inst.errno != errno.ENOENT:
1107 1097 raise
1108 1098 wlock = self.wlock()
1109 1099 try:
1110 1100 for f in list:
1111 1101 if unlink and os.path.exists(self.wjoin(f)):
1112 1102 self.ui.warn(_("%s still exists!\n") % f)
1113 1103 elif self.dirstate[f] == 'a':
1114 1104 self.dirstate.forget(f)
1115 1105 elif f not in self.dirstate:
1116 1106 self.ui.warn(_("%s not tracked!\n") % f)
1117 1107 else:
1118 1108 self.dirstate.remove(f)
1119 1109 finally:
1120 1110 wlock.release()
1121 1111
1122 1112 def undelete(self, list):
1123 1113 manifests = [self.manifest.read(self.changelog.read(p)[0])
1124 1114 for p in self.dirstate.parents() if p != nullid]
1125 1115 wlock = self.wlock()
1126 1116 try:
1127 1117 for f in list:
1128 1118 if self.dirstate[f] != 'r':
1129 1119 self.ui.warn(_("%s not removed!\n") % f)
1130 1120 else:
1131 1121 m = f in manifests[0] and manifests[0] or manifests[1]
1132 1122 t = self.file(f).read(m[f])
1133 1123 self.wwrite(f, t, m.flags(f))
1134 1124 self.dirstate.normal(f)
1135 1125 finally:
1136 1126 wlock.release()
1137 1127
1138 1128 def copy(self, source, dest):
1139 1129 p = self.wjoin(dest)
1140 1130 if not (os.path.exists(p) or os.path.islink(p)):
1141 1131 self.ui.warn(_("%s does not exist!\n") % dest)
1142 1132 elif not (os.path.isfile(p) or os.path.islink(p)):
1143 1133 self.ui.warn(_("copy failed: %s is not a file or a "
1144 1134 "symbolic link\n") % dest)
1145 1135 else:
1146 1136 wlock = self.wlock()
1147 1137 try:
1148 1138 if self.dirstate[dest] in '?r':
1149 1139 self.dirstate.add(dest)
1150 1140 self.dirstate.copy(source, dest)
1151 1141 finally:
1152 1142 wlock.release()
1153 1143
1154 1144 def heads(self, start=None):
1155 1145 heads = self.changelog.heads(start)
1156 1146 # sort the output in rev descending order
1157 1147 heads = [(-self.changelog.rev(h), h) for h in heads]
1158 1148 return [n for (r, n) in sorted(heads)]
1159 1149
1160 1150 def branchheads(self, branch=None, start=None, closed=False):
1161 1151 '''return a (possibly filtered) list of heads for the given branch
1162 1152
1163 1153 Heads are returned in topological order, from newest to oldest.
1164 1154 If branch is None, use the dirstate branch.
1165 1155 If start is not None, return only heads reachable from start.
1166 1156 If closed is True, return heads that are marked as closed as well.
1167 1157 '''
1168 1158 if branch is None:
1169 1159 branch = self[None].branch()
1170 branches = self.lbranchmap()
1160 branches = self.branchmap()
1171 1161 if branch not in branches:
1172 1162 return []
1173 1163 # the cache returns heads ordered lowest to highest
1174 1164 bheads = list(reversed(branches[branch]))
1175 1165 if start is not None:
1176 1166 # filter out the heads that cannot be reached from startrev
1177 1167 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1178 1168 bheads = [h for h in bheads if h in fbheads]
1179 1169 if not closed:
1180 1170 bheads = [h for h in bheads if
1181 1171 ('close' not in self.changelog.read(h)[5])]
1182 1172 return bheads
1183 1173
1184 1174 def branches(self, nodes):
1185 1175 if not nodes:
1186 1176 nodes = [self.changelog.tip()]
1187 1177 b = []
1188 1178 for n in nodes:
1189 1179 t = n
1190 1180 while 1:
1191 1181 p = self.changelog.parents(n)
1192 1182 if p[1] != nullid or p[0] == nullid:
1193 1183 b.append((t, n, p[0], p[1]))
1194 1184 break
1195 1185 n = p[0]
1196 1186 return b
1197 1187
1198 1188 def between(self, pairs):
1199 1189 r = []
1200 1190
1201 1191 for top, bottom in pairs:
1202 1192 n, l, i = top, [], 0
1203 1193 f = 1
1204 1194
1205 1195 while n != bottom and n != nullid:
1206 1196 p = self.changelog.parents(n)[0]
1207 1197 if i == f:
1208 1198 l.append(n)
1209 1199 f = f * 2
1210 1200 n = p
1211 1201 i += 1
1212 1202
1213 1203 r.append(l)
1214 1204
1215 1205 return r
1216 1206
1217 1207 def findincoming(self, remote, base=None, heads=None, force=False):
1218 1208 """Return list of roots of the subsets of missing nodes from remote
1219 1209
1220 1210 If base dict is specified, assume that these nodes and their parents
1221 1211 exist on the remote side and that no child of a node of base exists
1222 1212 in both remote and self.
1223 1213 Furthermore base will be updated to include the nodes that exists
1224 1214 in self and remote but no children exists in self and remote.
1225 1215 If a list of heads is specified, return only nodes which are heads
1226 1216 or ancestors of these heads.
1227 1217
1228 1218 All the ancestors of base are in self and in remote.
1229 1219 All the descendants of the list returned are missing in self.
1230 1220 (and so we know that the rest of the nodes are missing in remote, see
1231 1221 outgoing)
1232 1222 """
1233 1223 return self.findcommonincoming(remote, base, heads, force)[1]
1234 1224
1235 1225 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1236 1226 """Return a tuple (common, missing roots, heads) used to identify
1237 1227 missing nodes from remote.
1238 1228
1239 1229 If base dict is specified, assume that these nodes and their parents
1240 1230 exist on the remote side and that no child of a node of base exists
1241 1231 in both remote and self.
1242 1232 Furthermore base will be updated to include the nodes that exists
1243 1233 in self and remote but no children exists in self and remote.
1244 1234 If a list of heads is specified, return only nodes which are heads
1245 1235 or ancestors of these heads.
1246 1236
1247 1237 All the ancestors of base are in self and in remote.
1248 1238 """
1249 1239 m = self.changelog.nodemap
1250 1240 search = []
1251 1241 fetch = set()
1252 1242 seen = set()
1253 1243 seenbranch = set()
1254 1244 if base is None:
1255 1245 base = {}
1256 1246
1257 1247 if not heads:
1258 1248 heads = remote.heads()
1259 1249
1260 1250 if self.changelog.tip() == nullid:
1261 1251 base[nullid] = 1
1262 1252 if heads != [nullid]:
1263 1253 return [nullid], [nullid], list(heads)
1264 1254 return [nullid], [], []
1265 1255
1266 1256 # assume we're closer to the tip than the root
1267 1257 # and start by examining the heads
1268 1258 self.ui.status(_("searching for changes\n"))
1269 1259
1270 1260 unknown = []
1271 1261 for h in heads:
1272 1262 if h not in m:
1273 1263 unknown.append(h)
1274 1264 else:
1275 1265 base[h] = 1
1276 1266
1277 1267 heads = unknown
1278 1268 if not unknown:
1279 1269 return base.keys(), [], []
1280 1270
1281 1271 req = set(unknown)
1282 1272 reqcnt = 0
1283 1273
1284 1274 # search through remote branches
1285 1275 # a 'branch' here is a linear segment of history, with four parts:
1286 1276 # head, root, first parent, second parent
1287 1277 # (a branch always has two parents (or none) by definition)
1288 1278 unknown = remote.branches(unknown)
1289 1279 while unknown:
1290 1280 r = []
1291 1281 while unknown:
1292 1282 n = unknown.pop(0)
1293 1283 if n[0] in seen:
1294 1284 continue
1295 1285
1296 1286 self.ui.debug("examining %s:%s\n"
1297 1287 % (short(n[0]), short(n[1])))
1298 1288 if n[0] == nullid: # found the end of the branch
1299 1289 pass
1300 1290 elif n in seenbranch:
1301 1291 self.ui.debug("branch already found\n")
1302 1292 continue
1303 1293 elif n[1] and n[1] in m: # do we know the base?
1304 1294 self.ui.debug("found incomplete branch %s:%s\n"
1305 1295 % (short(n[0]), short(n[1])))
1306 1296 search.append(n[0:2]) # schedule branch range for scanning
1307 1297 seenbranch.add(n)
1308 1298 else:
1309 1299 if n[1] not in seen and n[1] not in fetch:
1310 1300 if n[2] in m and n[3] in m:
1311 1301 self.ui.debug("found new changeset %s\n" %
1312 1302 short(n[1]))
1313 1303 fetch.add(n[1]) # earliest unknown
1314 1304 for p in n[2:4]:
1315 1305 if p in m:
1316 1306 base[p] = 1 # latest known
1317 1307
1318 1308 for p in n[2:4]:
1319 1309 if p not in req and p not in m:
1320 1310 r.append(p)
1321 1311 req.add(p)
1322 1312 seen.add(n[0])
1323 1313
1324 1314 if r:
1325 1315 reqcnt += 1
1326 1316 self.ui.debug("request %d: %s\n" %
1327 1317 (reqcnt, " ".join(map(short, r))))
1328 1318 for p in xrange(0, len(r), 10):
1329 1319 for b in remote.branches(r[p:p+10]):
1330 1320 self.ui.debug("received %s:%s\n" %
1331 1321 (short(b[0]), short(b[1])))
1332 1322 unknown.append(b)
1333 1323
1334 1324 # do binary search on the branches we found
1335 1325 while search:
1336 1326 newsearch = []
1337 1327 reqcnt += 1
1338 1328 for n, l in zip(search, remote.between(search)):
1339 1329 l.append(n[1])
1340 1330 p = n[0]
1341 1331 f = 1
1342 1332 for i in l:
1343 1333 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1344 1334 if i in m:
1345 1335 if f <= 2:
1346 1336 self.ui.debug("found new branch changeset %s\n" %
1347 1337 short(p))
1348 1338 fetch.add(p)
1349 1339 base[i] = 1
1350 1340 else:
1351 1341 self.ui.debug("narrowed branch search to %s:%s\n"
1352 1342 % (short(p), short(i)))
1353 1343 newsearch.append((p, i))
1354 1344 break
1355 1345 p, f = i, f * 2
1356 1346 search = newsearch
1357 1347
1358 1348 # sanity check our fetch list
1359 1349 for f in fetch:
1360 1350 if f in m:
1361 1351 raise error.RepoError(_("already have changeset ")
1362 1352 + short(f[:4]))
1363 1353
1364 1354 if base.keys() == [nullid]:
1365 1355 if force:
1366 1356 self.ui.warn(_("warning: repository is unrelated\n"))
1367 1357 else:
1368 1358 raise util.Abort(_("repository is unrelated"))
1369 1359
1370 1360 self.ui.debug("found new changesets starting at " +
1371 1361 " ".join([short(f) for f in fetch]) + "\n")
1372 1362
1373 1363 self.ui.debug("%d total queries\n" % reqcnt)
1374 1364
1375 1365 return base.keys(), list(fetch), heads
1376 1366
1377 1367 def findoutgoing(self, remote, base=None, heads=None, force=False):
1378 1368 """Return list of nodes that are roots of subsets not in remote
1379 1369
1380 1370 If base dict is specified, assume that these nodes and their parents
1381 1371 exist on the remote side.
1382 1372 If a list of heads is specified, return only nodes which are heads
1383 1373 or ancestors of these heads, and return a second element which
1384 1374 contains all remote heads which get new children.
1385 1375 """
1386 1376 if base is None:
1387 1377 base = {}
1388 1378 self.findincoming(remote, base, heads, force=force)
1389 1379
1390 1380 self.ui.debug("common changesets up to "
1391 1381 + " ".join(map(short, base.keys())) + "\n")
1392 1382
1393 1383 remain = set(self.changelog.nodemap)
1394 1384
1395 1385 # prune everything remote has from the tree
1396 1386 remain.remove(nullid)
1397 1387 remove = base.keys()
1398 1388 while remove:
1399 1389 n = remove.pop(0)
1400 1390 if n in remain:
1401 1391 remain.remove(n)
1402 1392 for p in self.changelog.parents(n):
1403 1393 remove.append(p)
1404 1394
1405 1395 # find every node whose parents have been pruned
1406 1396 subset = []
1407 1397 # find every remote head that will get new children
1408 1398 updated_heads = set()
1409 1399 for n in remain:
1410 1400 p1, p2 = self.changelog.parents(n)
1411 1401 if p1 not in remain and p2 not in remain:
1412 1402 subset.append(n)
1413 1403 if heads:
1414 1404 if p1 in heads:
1415 1405 updated_heads.add(p1)
1416 1406 if p2 in heads:
1417 1407 updated_heads.add(p2)
1418 1408
1419 1409 # this is the set of all roots we have to push
1420 1410 if heads:
1421 1411 return subset, list(updated_heads)
1422 1412 else:
1423 1413 return subset
1424 1414
1425 1415 def pull(self, remote, heads=None, force=False):
1426 1416 lock = self.lock()
1427 1417 try:
1428 1418 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1429 1419 force=force)
1430 1420 if fetch == [nullid]:
1431 1421 self.ui.status(_("requesting all changes\n"))
1432 1422
1433 1423 if not fetch:
1434 1424 self.ui.status(_("no changes found\n"))
1435 1425 return 0
1436 1426
1437 1427 if heads is None and remote.capable('changegroupsubset'):
1438 1428 heads = rheads
1439 1429
1440 1430 if heads is None:
1441 1431 cg = remote.changegroup(fetch, 'pull')
1442 1432 else:
1443 1433 if not remote.capable('changegroupsubset'):
1444 1434 raise util.Abort(_("Partial pull cannot be done because "
1445 1435 "other repository doesn't support "
1446 1436 "changegroupsubset."))
1447 1437 cg = remote.changegroupsubset(fetch, heads, 'pull')
1448 1438 return self.addchangegroup(cg, 'pull', remote.url())
1449 1439 finally:
1450 1440 lock.release()
1451 1441
1452 1442 def push(self, remote, force=False, revs=None):
1453 1443 # there are two ways to push to remote repo:
1454 1444 #
1455 1445 # addchangegroup assumes local user can lock remote
1456 1446 # repo (local filesystem, old ssh servers).
1457 1447 #
1458 1448 # unbundle assumes local user cannot lock remote repo (new ssh
1459 1449 # servers, http servers).
1460 1450
1461 1451 if remote.capable('unbundle'):
1462 1452 return self.push_unbundle(remote, force, revs)
1463 1453 return self.push_addchangegroup(remote, force, revs)
1464 1454
1465 1455 def prepush(self, remote, force, revs):
1466 1456 '''Analyze the local and remote repositories and determine which
1467 1457 changesets need to be pushed to the remote. Return a tuple
1468 1458 (changegroup, remoteheads). changegroup is a readable file-like
1469 1459 object whose read() returns successive changegroup chunks ready to
1470 1460 be sent over the wire. remoteheads is the list of remote heads.
1471 1461 '''
1472 1462 common = {}
1473 1463 remote_heads = remote.heads()
1474 1464 inc = self.findincoming(remote, common, remote_heads, force=force)
1475 1465
1476 1466 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1477 1467 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1478 1468
1479 1469 def checkbranch(lheads, rheads, updatelb):
1480 1470 '''
1481 1471 check whether there are more local heads than remote heads on
1482 1472 a specific branch.
1483 1473
1484 1474 lheads: local branch heads
1485 1475 rheads: remote branch heads
1486 1476 updatelb: outgoing local branch bases
1487 1477 '''
1488 1478
1489 1479 warn = 0
1490 1480
1491 1481 if not revs and len(lheads) > len(rheads):
1492 1482 warn = 1
1493 1483 else:
1494 1484 # add local heads involved in the push
1495 1485 updatelheads = [self.changelog.heads(x, lheads)
1496 1486 for x in updatelb]
1497 1487 newheads = set(sum(updatelheads, [])) & set(lheads)
1498 1488
1499 1489 if not newheads:
1500 1490 return True
1501 1491
1502 1492 # add heads we don't have or that are not involved in the push
1503 1493 for r in rheads:
1504 1494 if r in self.changelog.nodemap:
1505 1495 desc = self.changelog.heads(r, heads)
1506 1496 l = [h for h in heads if h in desc]
1507 1497 if not l:
1508 1498 newheads.add(r)
1509 1499 else:
1510 1500 newheads.add(r)
1511 1501 if len(newheads) > len(rheads):
1512 1502 warn = 1
1513 1503
1514 1504 if warn:
1515 1505 if not rheads: # new branch requires --force
1516 1506 self.ui.warn(_("abort: push creates new"
1517 1507 " remote branch '%s'!\n") %
1518 1508 self[updatelb[0]].branch())
1519 1509 else:
1520 1510 self.ui.warn(_("abort: push creates new remote heads!\n"))
1521 1511
1522 1512 self.ui.status(_("(did you forget to merge?"
1523 1513 " use push -f to force)\n"))
1524 1514 return False
1525 1515 return True
1526 1516
1527 1517 if not bases:
1528 1518 self.ui.status(_("no changes found\n"))
1529 1519 return None, 1
1530 1520 elif not force:
1531 1521 # Check for each named branch if we're creating new remote heads.
1532 1522 # To be a remote head after push, node must be either:
1533 1523 # - unknown locally
1534 1524 # - a local outgoing head descended from update
1535 1525 # - a remote head that's known locally and not
1536 1526 # ancestral to an outgoing head
1537 1527 #
1538 1528 # New named branches cannot be created without --force.
1539 1529
1540 1530 if remote_heads != [nullid]:
1541 1531 if remote.capable('branchmap'):
1542 1532 localhds = {}
1543 1533 if not revs:
1544 1534 localhds = self.branchmap()
1545 1535 else:
1546 1536 for n in heads:
1547 1537 branch = self[n].branch()
1548 1538 if branch in localhds:
1549 1539 localhds[branch].append(n)
1550 1540 else:
1551 1541 localhds[branch] = [n]
1552 1542
1553 1543 remotehds = remote.branchmap()
1554 1544
1555 1545 for lh in localhds:
1556 1546 if lh in remotehds:
1557 1547 rheads = remotehds[lh]
1558 1548 else:
1559 1549 rheads = []
1560 1550 lheads = localhds[lh]
1561 1551 updatelb = [upd for upd in update
1562 1552 if self[upd].branch() == lh]
1563 1553 if not updatelb:
1564 1554 continue
1565 1555 if not checkbranch(lheads, rheads, updatelb):
1566 1556 return None, 0
1567 1557 else:
1568 1558 if not checkbranch(heads, remote_heads, update):
1569 1559 return None, 0
1570 1560
1571 1561 if inc:
1572 1562 self.ui.warn(_("note: unsynced remote changes!\n"))
1573 1563
1574 1564
1575 1565 if revs is None:
1576 1566 # use the fast path, no race possible on push
1577 1567 cg = self._changegroup(common.keys(), 'push')
1578 1568 else:
1579 1569 cg = self.changegroupsubset(update, revs, 'push')
1580 1570 return cg, remote_heads
1581 1571
1582 1572 def push_addchangegroup(self, remote, force, revs):
1583 1573 lock = remote.lock()
1584 1574 try:
1585 1575 ret = self.prepush(remote, force, revs)
1586 1576 if ret[0] is not None:
1587 1577 cg, remote_heads = ret
1588 1578 return remote.addchangegroup(cg, 'push', self.url())
1589 1579 return ret[1]
1590 1580 finally:
1591 1581 lock.release()
1592 1582
1593 1583 def push_unbundle(self, remote, force, revs):
1594 1584 # local repo finds heads on server, finds out what revs it
1595 1585 # must push. once revs transferred, if server finds it has
1596 1586 # different heads (someone else won commit/push race), server
1597 1587 # aborts.
1598 1588
1599 1589 ret = self.prepush(remote, force, revs)
1600 1590 if ret[0] is not None:
1601 1591 cg, remote_heads = ret
1602 1592 if force: remote_heads = ['force']
1603 1593 return remote.unbundle(cg, remote_heads, 'push')
1604 1594 return ret[1]
1605 1595
1606 1596 def changegroupinfo(self, nodes, source):
1607 1597 if self.ui.verbose or source == 'bundle':
1608 1598 self.ui.status(_("%d changesets found\n") % len(nodes))
1609 1599 if self.ui.debugflag:
1610 1600 self.ui.debug("list of changesets:\n")
1611 1601 for node in nodes:
1612 1602 self.ui.debug("%s\n" % hex(node))
1613 1603
1614 1604 def changegroupsubset(self, bases, heads, source, extranodes=None):
1615 1605 """Compute a changegroup consisting of all the nodes that are
1616 1606 descendents of any of the bases and ancestors of any of the heads.
1617 1607 Return a chunkbuffer object whose read() method will return
1618 1608 successive changegroup chunks.
1619 1609
1620 1610 It is fairly complex as determining which filenodes and which
1621 1611 manifest nodes need to be included for the changeset to be complete
1622 1612 is non-trivial.
1623 1613
1624 1614 Another wrinkle is doing the reverse, figuring out which changeset in
1625 1615 the changegroup a particular filenode or manifestnode belongs to.
1626 1616
1627 1617 The caller can specify some nodes that must be included in the
1628 1618 changegroup using the extranodes argument. It should be a dict
1629 1619 where the keys are the filenames (or 1 for the manifest), and the
1630 1620 values are lists of (node, linknode) tuples, where node is a wanted
1631 1621 node and linknode is the changelog node that should be transmitted as
1632 1622 the linkrev.
1633 1623 """
1634 1624
1635 1625 if extranodes is None:
1636 1626 # can we go through the fast path ?
1637 1627 heads.sort()
1638 1628 allheads = self.heads()
1639 1629 allheads.sort()
1640 1630 if heads == allheads:
1641 1631 common = []
1642 1632 # parents of bases are known from both sides
1643 1633 for n in bases:
1644 1634 for p in self.changelog.parents(n):
1645 1635 if p != nullid:
1646 1636 common.append(p)
1647 1637 return self._changegroup(common, source)
1648 1638
1649 1639 self.hook('preoutgoing', throw=True, source=source)
1650 1640
1651 1641 # Set up some initial variables
1652 1642 # Make it easy to refer to self.changelog
1653 1643 cl = self.changelog
1654 1644 # msng is short for missing - compute the list of changesets in this
1655 1645 # changegroup.
1656 1646 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1657 1647 self.changegroupinfo(msng_cl_lst, source)
1658 1648 # Some bases may turn out to be superfluous, and some heads may be
1659 1649 # too. nodesbetween will return the minimal set of bases and heads
1660 1650 # necessary to re-create the changegroup.
1661 1651
1662 1652 # Known heads are the list of heads that it is assumed the recipient
1663 1653 # of this changegroup will know about.
1664 1654 knownheads = set()
1665 1655 # We assume that all parents of bases are known heads.
1666 1656 for n in bases:
1667 1657 knownheads.update(cl.parents(n))
1668 1658 knownheads.discard(nullid)
1669 1659 knownheads = list(knownheads)
1670 1660 if knownheads:
1671 1661 # Now that we know what heads are known, we can compute which
1672 1662 # changesets are known. The recipient must know about all
1673 1663 # changesets required to reach the known heads from the null
1674 1664 # changeset.
1675 1665 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1676 1666 junk = None
1677 1667 # Transform the list into a set.
1678 1668 has_cl_set = set(has_cl_set)
1679 1669 else:
1680 1670 # If there were no known heads, the recipient cannot be assumed to
1681 1671 # know about any changesets.
1682 1672 has_cl_set = set()
1683 1673
1684 1674 # Make it easy to refer to self.manifest
1685 1675 mnfst = self.manifest
1686 1676 # We don't know which manifests are missing yet
1687 1677 msng_mnfst_set = {}
1688 1678 # Nor do we know which filenodes are missing.
1689 1679 msng_filenode_set = {}
1690 1680
1691 1681 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1692 1682 junk = None
1693 1683
1694 1684 # A changeset always belongs to itself, so the changenode lookup
1695 1685 # function for a changenode is identity.
1696 1686 def identity(x):
1697 1687 return x
1698 1688
1699 1689 # If we determine that a particular file or manifest node must be a
1700 1690 # node that the recipient of the changegroup will already have, we can
1701 1691 # also assume the recipient will have all the parents. This function
1702 1692 # prunes them from the set of missing nodes.
1703 1693 def prune_parents(revlog, hasset, msngset):
1704 1694 haslst = list(hasset)
1705 1695 haslst.sort(key=revlog.rev)
1706 1696 for node in haslst:
1707 1697 parentlst = [p for p in revlog.parents(node) if p != nullid]
1708 1698 while parentlst:
1709 1699 n = parentlst.pop()
1710 1700 if n not in hasset:
1711 1701 hasset.add(n)
1712 1702 p = [p for p in revlog.parents(n) if p != nullid]
1713 1703 parentlst.extend(p)
1714 1704 for n in hasset:
1715 1705 msngset.pop(n, None)
1716 1706
1717 1707 # This is a function generating function used to set up an environment
1718 1708 # for the inner function to execute in.
1719 1709 def manifest_and_file_collector(changedfileset):
1720 1710 # This is an information gathering function that gathers
1721 1711 # information from each changeset node that goes out as part of
1722 1712 # the changegroup. The information gathered is a list of which
1723 1713 # manifest nodes are potentially required (the recipient may
1724 1714 # already have them) and total list of all files which were
1725 1715 # changed in any changeset in the changegroup.
1726 1716 #
1727 1717 # We also remember the first changenode we saw any manifest
1728 1718 # referenced by so we can later determine which changenode 'owns'
1729 1719 # the manifest.
1730 1720 def collect_manifests_and_files(clnode):
1731 1721 c = cl.read(clnode)
1732 1722 for f in c[3]:
1733 1723 # This is to make sure we only have one instance of each
1734 1724 # filename string for each filename.
1735 1725 changedfileset.setdefault(f, f)
1736 1726 msng_mnfst_set.setdefault(c[0], clnode)
1737 1727 return collect_manifests_and_files
1738 1728
1739 1729 # Figure out which manifest nodes (of the ones we think might be part
1740 1730 # of the changegroup) the recipient must know about and remove them
1741 1731 # from the changegroup.
1742 1732 def prune_manifests():
1743 1733 has_mnfst_set = set()
1744 1734 for n in msng_mnfst_set:
1745 1735 # If a 'missing' manifest thinks it belongs to a changenode
1746 1736 # the recipient is assumed to have, obviously the recipient
1747 1737 # must have that manifest.
1748 1738 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1749 1739 if linknode in has_cl_set:
1750 1740 has_mnfst_set.add(n)
1751 1741 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1752 1742
1753 1743 # Use the information collected in collect_manifests_and_files to say
1754 1744 # which changenode any manifestnode belongs to.
1755 1745 def lookup_manifest_link(mnfstnode):
1756 1746 return msng_mnfst_set[mnfstnode]
1757 1747
1758 1748 # A function generating function that sets up the initial environment
1759 1749 # the inner function.
1760 1750 def filenode_collector(changedfiles):
1761 1751 next_rev = [0]
1762 1752 # This gathers information from each manifestnode included in the
1763 1753 # changegroup about which filenodes the manifest node references
1764 1754 # so we can include those in the changegroup too.
1765 1755 #
1766 1756 # It also remembers which changenode each filenode belongs to. It
1767 1757 # does this by assuming the a filenode belongs to the changenode
1768 1758 # the first manifest that references it belongs to.
1769 1759 def collect_msng_filenodes(mnfstnode):
1770 1760 r = mnfst.rev(mnfstnode)
1771 1761 if r == next_rev[0]:
1772 1762 # If the last rev we looked at was the one just previous,
1773 1763 # we only need to see a diff.
1774 1764 deltamf = mnfst.readdelta(mnfstnode)
1775 1765 # For each line in the delta
1776 1766 for f, fnode in deltamf.iteritems():
1777 1767 f = changedfiles.get(f, None)
1778 1768 # And if the file is in the list of files we care
1779 1769 # about.
1780 1770 if f is not None:
1781 1771 # Get the changenode this manifest belongs to
1782 1772 clnode = msng_mnfst_set[mnfstnode]
1783 1773 # Create the set of filenodes for the file if
1784 1774 # there isn't one already.
1785 1775 ndset = msng_filenode_set.setdefault(f, {})
1786 1776 # And set the filenode's changelog node to the
1787 1777 # manifest's if it hasn't been set already.
1788 1778 ndset.setdefault(fnode, clnode)
1789 1779 else:
1790 1780 # Otherwise we need a full manifest.
1791 1781 m = mnfst.read(mnfstnode)
1792 1782 # For every file in we care about.
1793 1783 for f in changedfiles:
1794 1784 fnode = m.get(f, None)
1795 1785 # If it's in the manifest
1796 1786 if fnode is not None:
1797 1787 # See comments above.
1798 1788 clnode = msng_mnfst_set[mnfstnode]
1799 1789 ndset = msng_filenode_set.setdefault(f, {})
1800 1790 ndset.setdefault(fnode, clnode)
1801 1791 # Remember the revision we hope to see next.
1802 1792 next_rev[0] = r + 1
1803 1793 return collect_msng_filenodes
1804 1794
1805 1795 # We have a list of filenodes we think we need for a file, lets remove
1806 1796 # all those we know the recipient must have.
1807 1797 def prune_filenodes(f, filerevlog):
1808 1798 msngset = msng_filenode_set[f]
1809 1799 hasset = set()
1810 1800 # If a 'missing' filenode thinks it belongs to a changenode we
1811 1801 # assume the recipient must have, then the recipient must have
1812 1802 # that filenode.
1813 1803 for n in msngset:
1814 1804 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1815 1805 if clnode in has_cl_set:
1816 1806 hasset.add(n)
1817 1807 prune_parents(filerevlog, hasset, msngset)
1818 1808
1819 1809 # A function generator function that sets up the a context for the
1820 1810 # inner function.
1821 1811 def lookup_filenode_link_func(fname):
1822 1812 msngset = msng_filenode_set[fname]
1823 1813 # Lookup the changenode the filenode belongs to.
1824 1814 def lookup_filenode_link(fnode):
1825 1815 return msngset[fnode]
1826 1816 return lookup_filenode_link
1827 1817
1828 1818 # Add the nodes that were explicitly requested.
1829 1819 def add_extra_nodes(name, nodes):
1830 1820 if not extranodes or name not in extranodes:
1831 1821 return
1832 1822
1833 1823 for node, linknode in extranodes[name]:
1834 1824 if node not in nodes:
1835 1825 nodes[node] = linknode
1836 1826
1837 1827 # Now that we have all theses utility functions to help out and
1838 1828 # logically divide up the task, generate the group.
1839 1829 def gengroup():
1840 1830 # The set of changed files starts empty.
1841 1831 changedfiles = {}
1842 1832 # Create a changenode group generator that will call our functions
1843 1833 # back to lookup the owning changenode and collect information.
1844 1834 group = cl.group(msng_cl_lst, identity,
1845 1835 manifest_and_file_collector(changedfiles))
1846 1836 for chnk in group:
1847 1837 yield chnk
1848 1838
1849 1839 # The list of manifests has been collected by the generator
1850 1840 # calling our functions back.
1851 1841 prune_manifests()
1852 1842 add_extra_nodes(1, msng_mnfst_set)
1853 1843 msng_mnfst_lst = msng_mnfst_set.keys()
1854 1844 # Sort the manifestnodes by revision number.
1855 1845 msng_mnfst_lst.sort(key=mnfst.rev)
1856 1846 # Create a generator for the manifestnodes that calls our lookup
1857 1847 # and data collection functions back.
1858 1848 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1859 1849 filenode_collector(changedfiles))
1860 1850 for chnk in group:
1861 1851 yield chnk
1862 1852
1863 1853 # These are no longer needed, dereference and toss the memory for
1864 1854 # them.
1865 1855 msng_mnfst_lst = None
1866 1856 msng_mnfst_set.clear()
1867 1857
1868 1858 if extranodes:
1869 1859 for fname in extranodes:
1870 1860 if isinstance(fname, int):
1871 1861 continue
1872 1862 msng_filenode_set.setdefault(fname, {})
1873 1863 changedfiles[fname] = 1
1874 1864 # Go through all our files in order sorted by name.
1875 1865 for fname in sorted(changedfiles):
1876 1866 filerevlog = self.file(fname)
1877 1867 if not len(filerevlog):
1878 1868 raise util.Abort(_("empty or missing revlog for %s") % fname)
1879 1869 # Toss out the filenodes that the recipient isn't really
1880 1870 # missing.
1881 1871 if fname in msng_filenode_set:
1882 1872 prune_filenodes(fname, filerevlog)
1883 1873 add_extra_nodes(fname, msng_filenode_set[fname])
1884 1874 msng_filenode_lst = msng_filenode_set[fname].keys()
1885 1875 else:
1886 1876 msng_filenode_lst = []
1887 1877 # If any filenodes are left, generate the group for them,
1888 1878 # otherwise don't bother.
1889 1879 if len(msng_filenode_lst) > 0:
1890 1880 yield changegroup.chunkheader(len(fname))
1891 1881 yield fname
1892 1882 # Sort the filenodes by their revision #
1893 1883 msng_filenode_lst.sort(key=filerevlog.rev)
1894 1884 # Create a group generator and only pass in a changenode
1895 1885 # lookup function as we need to collect no information
1896 1886 # from filenodes.
1897 1887 group = filerevlog.group(msng_filenode_lst,
1898 1888 lookup_filenode_link_func(fname))
1899 1889 for chnk in group:
1900 1890 yield chnk
1901 1891 if fname in msng_filenode_set:
1902 1892 # Don't need this anymore, toss it to free memory.
1903 1893 del msng_filenode_set[fname]
1904 1894 # Signal that no more groups are left.
1905 1895 yield changegroup.closechunk()
1906 1896
1907 1897 if msng_cl_lst:
1908 1898 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1909 1899
1910 1900 return util.chunkbuffer(gengroup())
1911 1901
1912 1902 def changegroup(self, basenodes, source):
1913 1903 # to avoid a race we use changegroupsubset() (issue1320)
1914 1904 return self.changegroupsubset(basenodes, self.heads(), source)
1915 1905
1916 1906 def _changegroup(self, common, source):
1917 1907 """Compute the changegroup of all nodes that we have that a recipient
1918 1908 doesn't. Return a chunkbuffer object whose read() method will return
1919 1909 successive changegroup chunks.
1920 1910
1921 1911 This is much easier than the previous function as we can assume that
1922 1912 the recipient has any changenode we aren't sending them.
1923 1913
1924 1914 common is the set of common nodes between remote and self"""
1925 1915
1926 1916 self.hook('preoutgoing', throw=True, source=source)
1927 1917
1928 1918 cl = self.changelog
1929 1919 nodes = cl.findmissing(common)
1930 1920 revset = set([cl.rev(n) for n in nodes])
1931 1921 self.changegroupinfo(nodes, source)
1932 1922
1933 1923 def identity(x):
1934 1924 return x
1935 1925
1936 1926 def gennodelst(log):
1937 1927 for r in log:
1938 1928 if log.linkrev(r) in revset:
1939 1929 yield log.node(r)
1940 1930
1941 1931 def changed_file_collector(changedfileset):
1942 1932 def collect_changed_files(clnode):
1943 1933 c = cl.read(clnode)
1944 1934 changedfileset.update(c[3])
1945 1935 return collect_changed_files
1946 1936
1947 1937 def lookuprevlink_func(revlog):
1948 1938 def lookuprevlink(n):
1949 1939 return cl.node(revlog.linkrev(revlog.rev(n)))
1950 1940 return lookuprevlink
1951 1941
1952 1942 def gengroup():
1953 1943 '''yield a sequence of changegroup chunks (strings)'''
1954 1944 # construct a list of all changed files
1955 1945 changedfiles = set()
1956 1946
1957 1947 for chnk in cl.group(nodes, identity,
1958 1948 changed_file_collector(changedfiles)):
1959 1949 yield chnk
1960 1950
1961 1951 mnfst = self.manifest
1962 1952 nodeiter = gennodelst(mnfst)
1963 1953 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1964 1954 yield chnk
1965 1955
1966 1956 for fname in sorted(changedfiles):
1967 1957 filerevlog = self.file(fname)
1968 1958 if not len(filerevlog):
1969 1959 raise util.Abort(_("empty or missing revlog for %s") % fname)
1970 1960 nodeiter = gennodelst(filerevlog)
1971 1961 nodeiter = list(nodeiter)
1972 1962 if nodeiter:
1973 1963 yield changegroup.chunkheader(len(fname))
1974 1964 yield fname
1975 1965 lookup = lookuprevlink_func(filerevlog)
1976 1966 for chnk in filerevlog.group(nodeiter, lookup):
1977 1967 yield chnk
1978 1968
1979 1969 yield changegroup.closechunk()
1980 1970
1981 1971 if nodes:
1982 1972 self.hook('outgoing', node=hex(nodes[0]), source=source)
1983 1973
1984 1974 return util.chunkbuffer(gengroup())
1985 1975
1986 1976 def addchangegroup(self, source, srctype, url, emptyok=False):
1987 1977 """add changegroup to repo.
1988 1978
1989 1979 return values:
1990 1980 - nothing changed or no source: 0
1991 1981 - more heads than before: 1+added heads (2..n)
1992 1982 - less heads than before: -1-removed heads (-2..-n)
1993 1983 - number of heads stays the same: 1
1994 1984 """
1995 1985 def csmap(x):
1996 1986 self.ui.debug("add changeset %s\n" % short(x))
1997 1987 return len(cl)
1998 1988
1999 1989 def revmap(x):
2000 1990 return cl.rev(x)
2001 1991
2002 1992 if not source:
2003 1993 return 0
2004 1994
2005 1995 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2006 1996
2007 1997 changesets = files = revisions = 0
2008 1998
2009 1999 # write changelog data to temp files so concurrent readers will not see
2010 2000 # inconsistent view
2011 2001 cl = self.changelog
2012 2002 cl.delayupdate()
2013 2003 oldheads = len(cl.heads())
2014 2004
2015 2005 tr = self.transaction()
2016 2006 try:
2017 2007 trp = weakref.proxy(tr)
2018 2008 # pull off the changeset group
2019 2009 self.ui.status(_("adding changesets\n"))
2020 2010 clstart = len(cl)
2021 2011 chunkiter = changegroup.chunkiter(source)
2022 2012 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2023 2013 raise util.Abort(_("received changelog group is empty"))
2024 2014 clend = len(cl)
2025 2015 changesets = clend - clstart
2026 2016
2027 2017 # pull off the manifest group
2028 2018 self.ui.status(_("adding manifests\n"))
2029 2019 chunkiter = changegroup.chunkiter(source)
2030 2020 # no need to check for empty manifest group here:
2031 2021 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2032 2022 # no new manifest will be created and the manifest group will
2033 2023 # be empty during the pull
2034 2024 self.manifest.addgroup(chunkiter, revmap, trp)
2035 2025
2036 2026 # process the files
2037 2027 self.ui.status(_("adding file changes\n"))
2038 2028 while 1:
2039 2029 f = changegroup.getchunk(source)
2040 2030 if not f:
2041 2031 break
2042 2032 self.ui.debug("adding %s revisions\n" % f)
2043 2033 fl = self.file(f)
2044 2034 o = len(fl)
2045 2035 chunkiter = changegroup.chunkiter(source)
2046 2036 if fl.addgroup(chunkiter, revmap, trp) is None:
2047 2037 raise util.Abort(_("received file revlog group is empty"))
2048 2038 revisions += len(fl) - o
2049 2039 files += 1
2050 2040
2051 2041 newheads = len(cl.heads())
2052 2042 heads = ""
2053 2043 if oldheads and newheads != oldheads:
2054 2044 heads = _(" (%+d heads)") % (newheads - oldheads)
2055 2045
2056 2046 self.ui.status(_("added %d changesets"
2057 2047 " with %d changes to %d files%s\n")
2058 2048 % (changesets, revisions, files, heads))
2059 2049
2060 2050 if changesets > 0:
2061 2051 p = lambda: cl.writepending() and self.root or ""
2062 2052 self.hook('pretxnchangegroup', throw=True,
2063 2053 node=hex(cl.node(clstart)), source=srctype,
2064 2054 url=url, pending=p)
2065 2055
2066 2056 # make changelog see real files again
2067 2057 cl.finalize(trp)
2068 2058
2069 2059 tr.close()
2070 2060 finally:
2071 2061 del tr
2072 2062
2073 2063 if changesets > 0:
2074 2064 # forcefully update the on-disk branch cache
2075 2065 self.ui.debug("updating the branch cache\n")
2076 2066 self.branchtags()
2077 2067 self.hook("changegroup", node=hex(cl.node(clstart)),
2078 2068 source=srctype, url=url)
2079 2069
2080 2070 for i in xrange(clstart, clend):
2081 2071 self.hook("incoming", node=hex(cl.node(i)),
2082 2072 source=srctype, url=url)
2083 2073
2084 2074 # never return 0 here:
2085 2075 if newheads < oldheads:
2086 2076 return newheads - oldheads - 1
2087 2077 else:
2088 2078 return newheads - oldheads + 1
2089 2079
2090 2080
2091 2081 def stream_in(self, remote):
2092 2082 fp = remote.stream_out()
2093 2083 l = fp.readline()
2094 2084 try:
2095 2085 resp = int(l)
2096 2086 except ValueError:
2097 2087 raise error.ResponseError(
2098 2088 _('Unexpected response from remote server:'), l)
2099 2089 if resp == 1:
2100 2090 raise util.Abort(_('operation forbidden by server'))
2101 2091 elif resp == 2:
2102 2092 raise util.Abort(_('locking the remote repository failed'))
2103 2093 elif resp != 0:
2104 2094 raise util.Abort(_('the server sent an unknown error code'))
2105 2095 self.ui.status(_('streaming all changes\n'))
2106 2096 l = fp.readline()
2107 2097 try:
2108 2098 total_files, total_bytes = map(int, l.split(' ', 1))
2109 2099 except (ValueError, TypeError):
2110 2100 raise error.ResponseError(
2111 2101 _('Unexpected response from remote server:'), l)
2112 2102 self.ui.status(_('%d files to transfer, %s of data\n') %
2113 2103 (total_files, util.bytecount(total_bytes)))
2114 2104 start = time.time()
2115 2105 for i in xrange(total_files):
2116 2106 # XXX doesn't support '\n' or '\r' in filenames
2117 2107 l = fp.readline()
2118 2108 try:
2119 2109 name, size = l.split('\0', 1)
2120 2110 size = int(size)
2121 2111 except (ValueError, TypeError):
2122 2112 raise error.ResponseError(
2123 2113 _('Unexpected response from remote server:'), l)
2124 2114 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2125 2115 # for backwards compat, name was partially encoded
2126 2116 ofp = self.sopener(store.decodedir(name), 'w')
2127 2117 for chunk in util.filechunkiter(fp, limit=size):
2128 2118 ofp.write(chunk)
2129 2119 ofp.close()
2130 2120 elapsed = time.time() - start
2131 2121 if elapsed <= 0:
2132 2122 elapsed = 0.001
2133 2123 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2134 2124 (util.bytecount(total_bytes), elapsed,
2135 2125 util.bytecount(total_bytes / elapsed)))
2136 2126 self.invalidate()
2137 2127 return len(self.heads()) + 1
2138 2128
2139 2129 def clone(self, remote, heads=[], stream=False):
2140 2130 '''clone remote repository.
2141 2131
2142 2132 keyword arguments:
2143 2133 heads: list of revs to clone (forces use of pull)
2144 2134 stream: use streaming clone if possible'''
2145 2135
2146 2136 # now, all clients that can request uncompressed clones can
2147 2137 # read repo formats supported by all servers that can serve
2148 2138 # them.
2149 2139
2150 2140 # if revlog format changes, client will have to check version
2151 2141 # and format flags on "stream" capability, and use
2152 2142 # uncompressed only if compatible.
2153 2143
2154 2144 if stream and not heads and remote.capable('stream'):
2155 2145 return self.stream_in(remote)
2156 2146 return self.pull(remote, heads)
2157 2147
2158 2148 # used to avoid circular references so destructors work
2159 2149 def aftertrans(files):
2160 2150 renamefiles = [tuple(t) for t in files]
2161 2151 def a():
2162 2152 for src, dest in renamefiles:
2163 2153 util.rename(src, dest)
2164 2154 return a
2165 2155
2166 2156 def instance(ui, path, create):
2167 2157 return localrepository(ui, util.drop_scheme('file', path), create)
2168 2158
2169 2159 def islocal(path):
2170 2160 return True
General Comments 0
You need to be logged in to leave comments. Login now