##// END OF EJS Templates
style: use consistent variable names (*mod) with imports which would shadow
Benoit Boissinot -
r10651:5f091fc1 default
parent child Browse files
Show More
@@ -1,3887 +1,3887 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, util, revlog, bundlerepo, extensions, copies, error
12 import hg, util, revlog, bundlerepo, extensions, copies, error
13 import patch, help, mdiff, url, encoding, templatekw
13 import patch, help, mdiff, url, encoding, templatekw
14 import archival, changegroup, cmdutil, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server, hgweb_mod, hgwebdir_mod
15 from hgweb import server, hgweb_mod, hgwebdir_mod
16 import merge as merge_
16 import merge as mergemod
17 import minirst
17 import minirst
18
18
19 # Commands start here, listed alphabetically
19 # Commands start here, listed alphabetically
20
20
21 def add(ui, repo, *pats, **opts):
21 def add(ui, repo, *pats, **opts):
22 """add the specified files on the next commit
22 """add the specified files on the next commit
23
23
24 Schedule files to be version controlled and added to the
24 Schedule files to be version controlled and added to the
25 repository.
25 repository.
26
26
27 The files will be added to the repository at the next commit. To
27 The files will be added to the repository at the next commit. To
28 undo an add before that, see hg forget.
28 undo an add before that, see hg forget.
29
29
30 If no names are given, add all files to the repository.
30 If no names are given, add all files to the repository.
31
31
32 .. container:: verbose
32 .. container:: verbose
33
33
34 An example showing how new (unknown) files are added
34 An example showing how new (unknown) files are added
35 automatically by ``hg add``::
35 automatically by ``hg add``::
36
36
37 $ ls
37 $ ls
38 foo.c
38 foo.c
39 $ hg status
39 $ hg status
40 ? foo.c
40 ? foo.c
41 $ hg add
41 $ hg add
42 adding foo.c
42 adding foo.c
43 $ hg status
43 $ hg status
44 A foo.c
44 A foo.c
45 """
45 """
46
46
47 bad = []
47 bad = []
48 names = []
48 names = []
49 m = cmdutil.match(repo, pats, opts)
49 m = cmdutil.match(repo, pats, opts)
50 oldbad = m.bad
50 oldbad = m.bad
51 m.bad = lambda x, y: bad.append(x) or oldbad(x, y)
51 m.bad = lambda x, y: bad.append(x) or oldbad(x, y)
52
52
53 for f in repo.walk(m):
53 for f in repo.walk(m):
54 exact = m.exact(f)
54 exact = m.exact(f)
55 if exact or f not in repo.dirstate:
55 if exact or f not in repo.dirstate:
56 names.append(f)
56 names.append(f)
57 if ui.verbose or not exact:
57 if ui.verbose or not exact:
58 ui.status(_('adding %s\n') % m.rel(f))
58 ui.status(_('adding %s\n') % m.rel(f))
59 if not opts.get('dry_run'):
59 if not opts.get('dry_run'):
60 bad += [f for f in repo.add(names) if f in m.files()]
60 bad += [f for f in repo.add(names) if f in m.files()]
61 return bad and 1 or 0
61 return bad and 1 or 0
62
62
63 def addremove(ui, repo, *pats, **opts):
63 def addremove(ui, repo, *pats, **opts):
64 """add all new files, delete all missing files
64 """add all new files, delete all missing files
65
65
66 Add all new files and remove all missing files from the
66 Add all new files and remove all missing files from the
67 repository.
67 repository.
68
68
69 New files are ignored if they match any of the patterns in
69 New files are ignored if they match any of the patterns in
70 .hgignore. As with add, these changes take effect at the next
70 .hgignore. As with add, these changes take effect at the next
71 commit.
71 commit.
72
72
73 Use the -s/--similarity option to detect renamed files. With a
73 Use the -s/--similarity option to detect renamed files. With a
74 parameter greater than 0, this compares every removed file with
74 parameter greater than 0, this compares every removed file with
75 every added file and records those similar enough as renames. This
75 every added file and records those similar enough as renames. This
76 option takes a percentage between 0 (disabled) and 100 (files must
76 option takes a percentage between 0 (disabled) and 100 (files must
77 be identical) as its parameter. Detecting renamed files this way
77 be identical) as its parameter. Detecting renamed files this way
78 can be expensive.
78 can be expensive.
79 """
79 """
80 try:
80 try:
81 sim = float(opts.get('similarity') or 0)
81 sim = float(opts.get('similarity') or 0)
82 except ValueError:
82 except ValueError:
83 raise util.Abort(_('similarity must be a number'))
83 raise util.Abort(_('similarity must be a number'))
84 if sim < 0 or sim > 100:
84 if sim < 0 or sim > 100:
85 raise util.Abort(_('similarity must be between 0 and 100'))
85 raise util.Abort(_('similarity must be between 0 and 100'))
86 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
86 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
87
87
88 def annotate(ui, repo, *pats, **opts):
88 def annotate(ui, repo, *pats, **opts):
89 """show changeset information by line for each file
89 """show changeset information by line for each file
90
90
91 List changes in files, showing the revision id responsible for
91 List changes in files, showing the revision id responsible for
92 each line
92 each line
93
93
94 This command is useful for discovering when a change was made and
94 This command is useful for discovering when a change was made and
95 by whom.
95 by whom.
96
96
97 Without the -a/--text option, annotate will avoid processing files
97 Without the -a/--text option, annotate will avoid processing files
98 it detects as binary. With -a, annotate will annotate the file
98 it detects as binary. With -a, annotate will annotate the file
99 anyway, although the results will probably be neither useful
99 anyway, although the results will probably be neither useful
100 nor desirable.
100 nor desirable.
101 """
101 """
102 if opts.get('follow'):
102 if opts.get('follow'):
103 # --follow is deprecated and now just an alias for -f/--file
103 # --follow is deprecated and now just an alias for -f/--file
104 # to mimic the behavior of Mercurial before version 1.5
104 # to mimic the behavior of Mercurial before version 1.5
105 opts['file'] = 1
105 opts['file'] = 1
106
106
107 datefunc = ui.quiet and util.shortdate or util.datestr
107 datefunc = ui.quiet and util.shortdate or util.datestr
108 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
108 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
109
109
110 if not pats:
110 if not pats:
111 raise util.Abort(_('at least one filename or pattern is required'))
111 raise util.Abort(_('at least one filename or pattern is required'))
112
112
113 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
113 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
114 ('number', lambda x: str(x[0].rev())),
114 ('number', lambda x: str(x[0].rev())),
115 ('changeset', lambda x: short(x[0].node())),
115 ('changeset', lambda x: short(x[0].node())),
116 ('date', getdate),
116 ('date', getdate),
117 ('file', lambda x: x[0].path()),
117 ('file', lambda x: x[0].path()),
118 ]
118 ]
119
119
120 if (not opts.get('user') and not opts.get('changeset')
120 if (not opts.get('user') and not opts.get('changeset')
121 and not opts.get('date') and not opts.get('file')):
121 and not opts.get('date') and not opts.get('file')):
122 opts['number'] = 1
122 opts['number'] = 1
123
123
124 linenumber = opts.get('line_number') is not None
124 linenumber = opts.get('line_number') is not None
125 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
125 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
126 raise util.Abort(_('at least one of -n/-c is required for -l'))
126 raise util.Abort(_('at least one of -n/-c is required for -l'))
127
127
128 funcmap = [func for op, func in opmap if opts.get(op)]
128 funcmap = [func for op, func in opmap if opts.get(op)]
129 if linenumber:
129 if linenumber:
130 lastfunc = funcmap[-1]
130 lastfunc = funcmap[-1]
131 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
131 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
132
132
133 ctx = repo[opts.get('rev')]
133 ctx = repo[opts.get('rev')]
134 m = cmdutil.match(repo, pats, opts)
134 m = cmdutil.match(repo, pats, opts)
135 follow = not opts.get('no_follow')
135 follow = not opts.get('no_follow')
136 for abs in ctx.walk(m):
136 for abs in ctx.walk(m):
137 fctx = ctx[abs]
137 fctx = ctx[abs]
138 if not opts.get('text') and util.binary(fctx.data()):
138 if not opts.get('text') and util.binary(fctx.data()):
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
140 continue
140 continue
141
141
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
143 pieces = []
143 pieces = []
144
144
145 for f in funcmap:
145 for f in funcmap:
146 l = [f(n) for n, dummy in lines]
146 l = [f(n) for n, dummy in lines]
147 if l:
147 if l:
148 ml = max(map(len, l))
148 ml = max(map(len, l))
149 pieces.append(["%*s" % (ml, x) for x in l])
149 pieces.append(["%*s" % (ml, x) for x in l])
150
150
151 if pieces:
151 if pieces:
152 for p, l in zip(zip(*pieces), lines):
152 for p, l in zip(zip(*pieces), lines):
153 ui.write("%s: %s" % (" ".join(p), l[1]))
153 ui.write("%s: %s" % (" ".join(p), l[1]))
154
154
155 def archive(ui, repo, dest, **opts):
155 def archive(ui, repo, dest, **opts):
156 '''create an unversioned archive of a repository revision
156 '''create an unversioned archive of a repository revision
157
157
158 By default, the revision used is the parent of the working
158 By default, the revision used is the parent of the working
159 directory; use -r/--rev to specify a different revision.
159 directory; use -r/--rev to specify a different revision.
160
160
161 The archive type is automatically detected based on file
161 The archive type is automatically detected based on file
162 extension (or override using -t/--type).
162 extension (or override using -t/--type).
163
163
164 Valid types are:
164 Valid types are:
165
165
166 :``files``: a directory full of files (default)
166 :``files``: a directory full of files (default)
167 :``tar``: tar archive, uncompressed
167 :``tar``: tar archive, uncompressed
168 :``tbz2``: tar archive, compressed using bzip2
168 :``tbz2``: tar archive, compressed using bzip2
169 :``tgz``: tar archive, compressed using gzip
169 :``tgz``: tar archive, compressed using gzip
170 :``uzip``: zip archive, uncompressed
170 :``uzip``: zip archive, uncompressed
171 :``zip``: zip archive, compressed using deflate
171 :``zip``: zip archive, compressed using deflate
172
172
173 The exact name of the destination archive or directory is given
173 The exact name of the destination archive or directory is given
174 using a format string; see 'hg help export' for details.
174 using a format string; see 'hg help export' for details.
175
175
176 Each member added to an archive file has a directory prefix
176 Each member added to an archive file has a directory prefix
177 prepended. Use -p/--prefix to specify a format string for the
177 prepended. Use -p/--prefix to specify a format string for the
178 prefix. The default is the basename of the archive, with suffixes
178 prefix. The default is the basename of the archive, with suffixes
179 removed.
179 removed.
180 '''
180 '''
181
181
182 ctx = repo[opts.get('rev')]
182 ctx = repo[opts.get('rev')]
183 if not ctx:
183 if not ctx:
184 raise util.Abort(_('no working directory: please specify a revision'))
184 raise util.Abort(_('no working directory: please specify a revision'))
185 node = ctx.node()
185 node = ctx.node()
186 dest = cmdutil.make_filename(repo, dest, node)
186 dest = cmdutil.make_filename(repo, dest, node)
187 if os.path.realpath(dest) == repo.root:
187 if os.path.realpath(dest) == repo.root:
188 raise util.Abort(_('repository root cannot be destination'))
188 raise util.Abort(_('repository root cannot be destination'))
189
189
190 def guess_type():
190 def guess_type():
191 exttypes = {
191 exttypes = {
192 'tar': ['.tar'],
192 'tar': ['.tar'],
193 'tbz2': ['.tbz2', '.tar.bz2'],
193 'tbz2': ['.tbz2', '.tar.bz2'],
194 'tgz': ['.tgz', '.tar.gz'],
194 'tgz': ['.tgz', '.tar.gz'],
195 'zip': ['.zip'],
195 'zip': ['.zip'],
196 }
196 }
197
197
198 for type, extensions in exttypes.items():
198 for type, extensions in exttypes.items():
199 if any(dest.endswith(ext) for ext in extensions):
199 if any(dest.endswith(ext) for ext in extensions):
200 return type
200 return type
201 return None
201 return None
202
202
203 kind = opts.get('type') or guess_type() or 'files'
203 kind = opts.get('type') or guess_type() or 'files'
204 prefix = opts.get('prefix')
204 prefix = opts.get('prefix')
205
205
206 if dest == '-':
206 if dest == '-':
207 if kind == 'files':
207 if kind == 'files':
208 raise util.Abort(_('cannot archive plain files to stdout'))
208 raise util.Abort(_('cannot archive plain files to stdout'))
209 dest = sys.stdout
209 dest = sys.stdout
210 if not prefix:
210 if not prefix:
211 prefix = os.path.basename(repo.root) + '-%h'
211 prefix = os.path.basename(repo.root) + '-%h'
212
212
213 prefix = cmdutil.make_filename(repo, prefix, node)
213 prefix = cmdutil.make_filename(repo, prefix, node)
214 matchfn = cmdutil.match(repo, [], opts)
214 matchfn = cmdutil.match(repo, [], opts)
215 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
215 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
216 matchfn, prefix)
216 matchfn, prefix)
217
217
218 def backout(ui, repo, node=None, rev=None, **opts):
218 def backout(ui, repo, node=None, rev=None, **opts):
219 '''reverse effect of earlier changeset
219 '''reverse effect of earlier changeset
220
220
221 Commit the backed out changes as a new changeset. The new
221 Commit the backed out changes as a new changeset. The new
222 changeset is a child of the backed out changeset.
222 changeset is a child of the backed out changeset.
223
223
224 If you backout a changeset other than the tip, a new head is
224 If you backout a changeset other than the tip, a new head is
225 created. This head will be the new tip and you should merge this
225 created. This head will be the new tip and you should merge this
226 backout changeset with another head.
226 backout changeset with another head.
227
227
228 The --merge option remembers the parent of the working directory
228 The --merge option remembers the parent of the working directory
229 before starting the backout, then merges the new head with that
229 before starting the backout, then merges the new head with that
230 changeset afterwards. This saves you from doing the merge by hand.
230 changeset afterwards. This saves you from doing the merge by hand.
231 The result of this merge is not committed, as with a normal merge.
231 The result of this merge is not committed, as with a normal merge.
232
232
233 See 'hg help dates' for a list of formats valid for -d/--date.
233 See 'hg help dates' for a list of formats valid for -d/--date.
234 '''
234 '''
235 if rev and node:
235 if rev and node:
236 raise util.Abort(_("please specify just one revision"))
236 raise util.Abort(_("please specify just one revision"))
237
237
238 if not rev:
238 if not rev:
239 rev = node
239 rev = node
240
240
241 if not rev:
241 if not rev:
242 raise util.Abort(_("please specify a revision to backout"))
242 raise util.Abort(_("please specify a revision to backout"))
243
243
244 date = opts.get('date')
244 date = opts.get('date')
245 if date:
245 if date:
246 opts['date'] = util.parsedate(date)
246 opts['date'] = util.parsedate(date)
247
247
248 cmdutil.bail_if_changed(repo)
248 cmdutil.bail_if_changed(repo)
249 node = repo.lookup(rev)
249 node = repo.lookup(rev)
250
250
251 op1, op2 = repo.dirstate.parents()
251 op1, op2 = repo.dirstate.parents()
252 a = repo.changelog.ancestor(op1, node)
252 a = repo.changelog.ancestor(op1, node)
253 if a != node:
253 if a != node:
254 raise util.Abort(_('cannot backout change on a different branch'))
254 raise util.Abort(_('cannot backout change on a different branch'))
255
255
256 p1, p2 = repo.changelog.parents(node)
256 p1, p2 = repo.changelog.parents(node)
257 if p1 == nullid:
257 if p1 == nullid:
258 raise util.Abort(_('cannot backout a change with no parents'))
258 raise util.Abort(_('cannot backout a change with no parents'))
259 if p2 != nullid:
259 if p2 != nullid:
260 if not opts.get('parent'):
260 if not opts.get('parent'):
261 raise util.Abort(_('cannot backout a merge changeset without '
261 raise util.Abort(_('cannot backout a merge changeset without '
262 '--parent'))
262 '--parent'))
263 p = repo.lookup(opts['parent'])
263 p = repo.lookup(opts['parent'])
264 if p not in (p1, p2):
264 if p not in (p1, p2):
265 raise util.Abort(_('%s is not a parent of %s') %
265 raise util.Abort(_('%s is not a parent of %s') %
266 (short(p), short(node)))
266 (short(p), short(node)))
267 parent = p
267 parent = p
268 else:
268 else:
269 if opts.get('parent'):
269 if opts.get('parent'):
270 raise util.Abort(_('cannot use --parent on non-merge changeset'))
270 raise util.Abort(_('cannot use --parent on non-merge changeset'))
271 parent = p1
271 parent = p1
272
272
273 # the backout should appear on the same branch
273 # the backout should appear on the same branch
274 branch = repo.dirstate.branch()
274 branch = repo.dirstate.branch()
275 hg.clean(repo, node, show_stats=False)
275 hg.clean(repo, node, show_stats=False)
276 repo.dirstate.setbranch(branch)
276 repo.dirstate.setbranch(branch)
277 revert_opts = opts.copy()
277 revert_opts = opts.copy()
278 revert_opts['date'] = None
278 revert_opts['date'] = None
279 revert_opts['all'] = True
279 revert_opts['all'] = True
280 revert_opts['rev'] = hex(parent)
280 revert_opts['rev'] = hex(parent)
281 revert_opts['no_backup'] = None
281 revert_opts['no_backup'] = None
282 revert(ui, repo, **revert_opts)
282 revert(ui, repo, **revert_opts)
283 commit_opts = opts.copy()
283 commit_opts = opts.copy()
284 commit_opts['addremove'] = False
284 commit_opts['addremove'] = False
285 if not commit_opts['message'] and not commit_opts['logfile']:
285 if not commit_opts['message'] and not commit_opts['logfile']:
286 # we don't translate commit messages
286 # we don't translate commit messages
287 commit_opts['message'] = "Backed out changeset %s" % short(node)
287 commit_opts['message'] = "Backed out changeset %s" % short(node)
288 commit_opts['force_editor'] = True
288 commit_opts['force_editor'] = True
289 commit(ui, repo, **commit_opts)
289 commit(ui, repo, **commit_opts)
290 def nice(node):
290 def nice(node):
291 return '%d:%s' % (repo.changelog.rev(node), short(node))
291 return '%d:%s' % (repo.changelog.rev(node), short(node))
292 ui.status(_('changeset %s backs out changeset %s\n') %
292 ui.status(_('changeset %s backs out changeset %s\n') %
293 (nice(repo.changelog.tip()), nice(node)))
293 (nice(repo.changelog.tip()), nice(node)))
294 if op1 != node:
294 if op1 != node:
295 hg.clean(repo, op1, show_stats=False)
295 hg.clean(repo, op1, show_stats=False)
296 if opts.get('merge'):
296 if opts.get('merge'):
297 ui.status(_('merging with changeset %s\n')
297 ui.status(_('merging with changeset %s\n')
298 % nice(repo.changelog.tip()))
298 % nice(repo.changelog.tip()))
299 hg.merge(repo, hex(repo.changelog.tip()))
299 hg.merge(repo, hex(repo.changelog.tip()))
300 else:
300 else:
301 ui.status(_('the backout changeset is a new head - '
301 ui.status(_('the backout changeset is a new head - '
302 'do not forget to merge\n'))
302 'do not forget to merge\n'))
303 ui.status(_('(use "backout --merge" '
303 ui.status(_('(use "backout --merge" '
304 'if you want to auto-merge)\n'))
304 'if you want to auto-merge)\n'))
305
305
306 def bisect(ui, repo, rev=None, extra=None, command=None,
306 def bisect(ui, repo, rev=None, extra=None, command=None,
307 reset=None, good=None, bad=None, skip=None, noupdate=None):
307 reset=None, good=None, bad=None, skip=None, noupdate=None):
308 """subdivision search of changesets
308 """subdivision search of changesets
309
309
310 This command helps to find changesets which introduce problems. To
310 This command helps to find changesets which introduce problems. To
311 use, mark the earliest changeset you know exhibits the problem as
311 use, mark the earliest changeset you know exhibits the problem as
312 bad, then mark the latest changeset which is free from the problem
312 bad, then mark the latest changeset which is free from the problem
313 as good. Bisect will update your working directory to a revision
313 as good. Bisect will update your working directory to a revision
314 for testing (unless the -U/--noupdate option is specified). Once
314 for testing (unless the -U/--noupdate option is specified). Once
315 you have performed tests, mark the working directory as good or
315 you have performed tests, mark the working directory as good or
316 bad, and bisect will either update to another candidate changeset
316 bad, and bisect will either update to another candidate changeset
317 or announce that it has found the bad revision.
317 or announce that it has found the bad revision.
318
318
319 As a shortcut, you can also use the revision argument to mark a
319 As a shortcut, you can also use the revision argument to mark a
320 revision as good or bad without checking it out first.
320 revision as good or bad without checking it out first.
321
321
322 If you supply a command, it will be used for automatic bisection.
322 If you supply a command, it will be used for automatic bisection.
323 Its exit status will be used to mark revisions as good or bad:
323 Its exit status will be used to mark revisions as good or bad:
324 status 0 means good, 125 means to skip the revision, 127
324 status 0 means good, 125 means to skip the revision, 127
325 (command not found) will abort the bisection, and any other
325 (command not found) will abort the bisection, and any other
326 non-zero exit status means the revision is bad.
326 non-zero exit status means the revision is bad.
327 """
327 """
328 def print_result(nodes, good):
328 def print_result(nodes, good):
329 displayer = cmdutil.show_changeset(ui, repo, {})
329 displayer = cmdutil.show_changeset(ui, repo, {})
330 if len(nodes) == 1:
330 if len(nodes) == 1:
331 # narrowed it down to a single revision
331 # narrowed it down to a single revision
332 if good:
332 if good:
333 ui.write(_("The first good revision is:\n"))
333 ui.write(_("The first good revision is:\n"))
334 else:
334 else:
335 ui.write(_("The first bad revision is:\n"))
335 ui.write(_("The first bad revision is:\n"))
336 displayer.show(repo[nodes[0]])
336 displayer.show(repo[nodes[0]])
337 else:
337 else:
338 # multiple possible revisions
338 # multiple possible revisions
339 if good:
339 if good:
340 ui.write(_("Due to skipped revisions, the first "
340 ui.write(_("Due to skipped revisions, the first "
341 "good revision could be any of:\n"))
341 "good revision could be any of:\n"))
342 else:
342 else:
343 ui.write(_("Due to skipped revisions, the first "
343 ui.write(_("Due to skipped revisions, the first "
344 "bad revision could be any of:\n"))
344 "bad revision could be any of:\n"))
345 for n in nodes:
345 for n in nodes:
346 displayer.show(repo[n])
346 displayer.show(repo[n])
347 displayer.close()
347 displayer.close()
348
348
349 def check_state(state, interactive=True):
349 def check_state(state, interactive=True):
350 if not state['good'] or not state['bad']:
350 if not state['good'] or not state['bad']:
351 if (good or bad or skip or reset) and interactive:
351 if (good or bad or skip or reset) and interactive:
352 return
352 return
353 if not state['good']:
353 if not state['good']:
354 raise util.Abort(_('cannot bisect (no known good revisions)'))
354 raise util.Abort(_('cannot bisect (no known good revisions)'))
355 else:
355 else:
356 raise util.Abort(_('cannot bisect (no known bad revisions)'))
356 raise util.Abort(_('cannot bisect (no known bad revisions)'))
357 return True
357 return True
358
358
359 # backward compatibility
359 # backward compatibility
360 if rev in "good bad reset init".split():
360 if rev in "good bad reset init".split():
361 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
361 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
362 cmd, rev, extra = rev, extra, None
362 cmd, rev, extra = rev, extra, None
363 if cmd == "good":
363 if cmd == "good":
364 good = True
364 good = True
365 elif cmd == "bad":
365 elif cmd == "bad":
366 bad = True
366 bad = True
367 else:
367 else:
368 reset = True
368 reset = True
369 elif extra or good + bad + skip + reset + bool(command) > 1:
369 elif extra or good + bad + skip + reset + bool(command) > 1:
370 raise util.Abort(_('incompatible arguments'))
370 raise util.Abort(_('incompatible arguments'))
371
371
372 if reset:
372 if reset:
373 p = repo.join("bisect.state")
373 p = repo.join("bisect.state")
374 if os.path.exists(p):
374 if os.path.exists(p):
375 os.unlink(p)
375 os.unlink(p)
376 return
376 return
377
377
378 state = hbisect.load_state(repo)
378 state = hbisect.load_state(repo)
379
379
380 if command:
380 if command:
381 changesets = 1
381 changesets = 1
382 try:
382 try:
383 while changesets:
383 while changesets:
384 # update state
384 # update state
385 status = util.system(command)
385 status = util.system(command)
386 if status == 125:
386 if status == 125:
387 transition = "skip"
387 transition = "skip"
388 elif status == 0:
388 elif status == 0:
389 transition = "good"
389 transition = "good"
390 # status < 0 means process was killed
390 # status < 0 means process was killed
391 elif status == 127:
391 elif status == 127:
392 raise util.Abort(_("failed to execute %s") % command)
392 raise util.Abort(_("failed to execute %s") % command)
393 elif status < 0:
393 elif status < 0:
394 raise util.Abort(_("%s killed") % command)
394 raise util.Abort(_("%s killed") % command)
395 else:
395 else:
396 transition = "bad"
396 transition = "bad"
397 ctx = repo[rev or '.']
397 ctx = repo[rev or '.']
398 state[transition].append(ctx.node())
398 state[transition].append(ctx.node())
399 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
399 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
400 check_state(state, interactive=False)
400 check_state(state, interactive=False)
401 # bisect
401 # bisect
402 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
402 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
403 # update to next check
403 # update to next check
404 cmdutil.bail_if_changed(repo)
404 cmdutil.bail_if_changed(repo)
405 hg.clean(repo, nodes[0], show_stats=False)
405 hg.clean(repo, nodes[0], show_stats=False)
406 finally:
406 finally:
407 hbisect.save_state(repo, state)
407 hbisect.save_state(repo, state)
408 return print_result(nodes, good)
408 return print_result(nodes, good)
409
409
410 # update state
410 # update state
411 node = repo.lookup(rev or '.')
411 node = repo.lookup(rev or '.')
412 if good or bad or skip:
412 if good or bad or skip:
413 if good:
413 if good:
414 state['good'].append(node)
414 state['good'].append(node)
415 elif bad:
415 elif bad:
416 state['bad'].append(node)
416 state['bad'].append(node)
417 elif skip:
417 elif skip:
418 state['skip'].append(node)
418 state['skip'].append(node)
419 hbisect.save_state(repo, state)
419 hbisect.save_state(repo, state)
420
420
421 if not check_state(state):
421 if not check_state(state):
422 return
422 return
423
423
424 # actually bisect
424 # actually bisect
425 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
425 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
426 if changesets == 0:
426 if changesets == 0:
427 print_result(nodes, good)
427 print_result(nodes, good)
428 else:
428 else:
429 assert len(nodes) == 1 # only a single node can be tested next
429 assert len(nodes) == 1 # only a single node can be tested next
430 node = nodes[0]
430 node = nodes[0]
431 # compute the approximate number of remaining tests
431 # compute the approximate number of remaining tests
432 tests, size = 0, 2
432 tests, size = 0, 2
433 while size <= changesets:
433 while size <= changesets:
434 tests, size = tests + 1, size * 2
434 tests, size = tests + 1, size * 2
435 rev = repo.changelog.rev(node)
435 rev = repo.changelog.rev(node)
436 ui.write(_("Testing changeset %d:%s "
436 ui.write(_("Testing changeset %d:%s "
437 "(%d changesets remaining, ~%d tests)\n")
437 "(%d changesets remaining, ~%d tests)\n")
438 % (rev, short(node), changesets, tests))
438 % (rev, short(node), changesets, tests))
439 if not noupdate:
439 if not noupdate:
440 cmdutil.bail_if_changed(repo)
440 cmdutil.bail_if_changed(repo)
441 return hg.clean(repo, node)
441 return hg.clean(repo, node)
442
442
443 def branch(ui, repo, label=None, **opts):
443 def branch(ui, repo, label=None, **opts):
444 """set or show the current branch name
444 """set or show the current branch name
445
445
446 With no argument, show the current branch name. With one argument,
446 With no argument, show the current branch name. With one argument,
447 set the working directory branch name (the branch will not exist
447 set the working directory branch name (the branch will not exist
448 in the repository until the next commit). Standard practice
448 in the repository until the next commit). Standard practice
449 recommends that primary development take place on the 'default'
449 recommends that primary development take place on the 'default'
450 branch.
450 branch.
451
451
452 Unless -f/--force is specified, branch will not let you set a
452 Unless -f/--force is specified, branch will not let you set a
453 branch name that already exists, even if it's inactive.
453 branch name that already exists, even if it's inactive.
454
454
455 Use -C/--clean to reset the working directory branch to that of
455 Use -C/--clean to reset the working directory branch to that of
456 the parent of the working directory, negating a previous branch
456 the parent of the working directory, negating a previous branch
457 change.
457 change.
458
458
459 Use the command 'hg update' to switch to an existing branch. Use
459 Use the command 'hg update' to switch to an existing branch. Use
460 'hg commit --close-branch' to mark this branch as closed.
460 'hg commit --close-branch' to mark this branch as closed.
461 """
461 """
462
462
463 if opts.get('clean'):
463 if opts.get('clean'):
464 label = repo[None].parents()[0].branch()
464 label = repo[None].parents()[0].branch()
465 repo.dirstate.setbranch(label)
465 repo.dirstate.setbranch(label)
466 ui.status(_('reset working directory to branch %s\n') % label)
466 ui.status(_('reset working directory to branch %s\n') % label)
467 elif label:
467 elif label:
468 utflabel = encoding.fromlocal(label)
468 utflabel = encoding.fromlocal(label)
469 if not opts.get('force') and utflabel in repo.branchtags():
469 if not opts.get('force') and utflabel in repo.branchtags():
470 if label not in [p.branch() for p in repo.parents()]:
470 if label not in [p.branch() for p in repo.parents()]:
471 raise util.Abort(_('a branch of the same name already exists'
471 raise util.Abort(_('a branch of the same name already exists'
472 " (use 'hg update' to switch to it)"))
472 " (use 'hg update' to switch to it)"))
473 repo.dirstate.setbranch(utflabel)
473 repo.dirstate.setbranch(utflabel)
474 ui.status(_('marked working directory as branch %s\n') % label)
474 ui.status(_('marked working directory as branch %s\n') % label)
475 else:
475 else:
476 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
476 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
477
477
478 def branches(ui, repo, active=False, closed=False):
478 def branches(ui, repo, active=False, closed=False):
479 """list repository named branches
479 """list repository named branches
480
480
481 List the repository's named branches, indicating which ones are
481 List the repository's named branches, indicating which ones are
482 inactive. If -c/--closed is specified, also list branches which have
482 inactive. If -c/--closed is specified, also list branches which have
483 been marked closed (see hg commit --close-branch).
483 been marked closed (see hg commit --close-branch).
484
484
485 If -a/--active is specified, only show active branches. A branch
485 If -a/--active is specified, only show active branches. A branch
486 is considered active if it contains repository heads.
486 is considered active if it contains repository heads.
487
487
488 Use the command 'hg update' to switch to an existing branch.
488 Use the command 'hg update' to switch to an existing branch.
489 """
489 """
490
490
491 hexfunc = ui.debugflag and hex or short
491 hexfunc = ui.debugflag and hex or short
492 activebranches = [repo[n].branch() for n in repo.heads()]
492 activebranches = [repo[n].branch() for n in repo.heads()]
493 def testactive(tag, node):
493 def testactive(tag, node):
494 realhead = tag in activebranches
494 realhead = tag in activebranches
495 open = node in repo.branchheads(tag, closed=False)
495 open = node in repo.branchheads(tag, closed=False)
496 return realhead and open
496 return realhead and open
497 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
497 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
498 for tag, node in repo.branchtags().items()],
498 for tag, node in repo.branchtags().items()],
499 reverse=True)
499 reverse=True)
500
500
501 for isactive, node, tag in branches:
501 for isactive, node, tag in branches:
502 if (not active) or isactive:
502 if (not active) or isactive:
503 encodedtag = encoding.tolocal(tag)
503 encodedtag = encoding.tolocal(tag)
504 if ui.quiet:
504 if ui.quiet:
505 ui.write("%s\n" % encodedtag)
505 ui.write("%s\n" % encodedtag)
506 else:
506 else:
507 hn = repo.lookup(node)
507 hn = repo.lookup(node)
508 if isactive:
508 if isactive:
509 notice = ''
509 notice = ''
510 elif hn not in repo.branchheads(tag, closed=False):
510 elif hn not in repo.branchheads(tag, closed=False):
511 if not closed:
511 if not closed:
512 continue
512 continue
513 notice = _(' (closed)')
513 notice = _(' (closed)')
514 else:
514 else:
515 notice = _(' (inactive)')
515 notice = _(' (inactive)')
516 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
516 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
517 data = encodedtag, rev, hexfunc(hn), notice
517 data = encodedtag, rev, hexfunc(hn), notice
518 ui.write("%s %s:%s%s\n" % data)
518 ui.write("%s %s:%s%s\n" % data)
519
519
520 def bundle(ui, repo, fname, dest=None, **opts):
520 def bundle(ui, repo, fname, dest=None, **opts):
521 """create a changegroup file
521 """create a changegroup file
522
522
523 Generate a compressed changegroup file collecting changesets not
523 Generate a compressed changegroup file collecting changesets not
524 known to be in another repository.
524 known to be in another repository.
525
525
526 If you omit the destination repository, then hg assumes the
526 If you omit the destination repository, then hg assumes the
527 destination will have all the nodes you specify with --base
527 destination will have all the nodes you specify with --base
528 parameters. To create a bundle containing all changesets, use
528 parameters. To create a bundle containing all changesets, use
529 -a/--all (or --base null).
529 -a/--all (or --base null).
530
530
531 You can change compression method with the -t/--type option.
531 You can change compression method with the -t/--type option.
532 The available compression methods are: none, bzip2, and
532 The available compression methods are: none, bzip2, and
533 gzip (by default, bundles are compressed using bzip2).
533 gzip (by default, bundles are compressed using bzip2).
534
534
535 The bundle file can then be transferred using conventional means
535 The bundle file can then be transferred using conventional means
536 and applied to another repository with the unbundle or pull
536 and applied to another repository with the unbundle or pull
537 command. This is useful when direct push and pull are not
537 command. This is useful when direct push and pull are not
538 available or when exporting an entire repository is undesirable.
538 available or when exporting an entire repository is undesirable.
539
539
540 Applying bundles preserves all changeset contents including
540 Applying bundles preserves all changeset contents including
541 permissions, copy/rename information, and revision history.
541 permissions, copy/rename information, and revision history.
542 """
542 """
543 revs = opts.get('rev') or None
543 revs = opts.get('rev') or None
544 if revs:
544 if revs:
545 revs = [repo.lookup(rev) for rev in revs]
545 revs = [repo.lookup(rev) for rev in revs]
546 if opts.get('all'):
546 if opts.get('all'):
547 base = ['null']
547 base = ['null']
548 else:
548 else:
549 base = opts.get('base')
549 base = opts.get('base')
550 if base:
550 if base:
551 if dest:
551 if dest:
552 raise util.Abort(_("--base is incompatible with specifying "
552 raise util.Abort(_("--base is incompatible with specifying "
553 "a destination"))
553 "a destination"))
554 base = [repo.lookup(rev) for rev in base]
554 base = [repo.lookup(rev) for rev in base]
555 # create the right base
555 # create the right base
556 # XXX: nodesbetween / changegroup* should be "fixed" instead
556 # XXX: nodesbetween / changegroup* should be "fixed" instead
557 o = []
557 o = []
558 has = set((nullid,))
558 has = set((nullid,))
559 for n in base:
559 for n in base:
560 has.update(repo.changelog.reachable(n))
560 has.update(repo.changelog.reachable(n))
561 if revs:
561 if revs:
562 visit = list(revs)
562 visit = list(revs)
563 has.difference_update(revs)
563 has.difference_update(revs)
564 else:
564 else:
565 visit = repo.changelog.heads()
565 visit = repo.changelog.heads()
566 seen = {}
566 seen = {}
567 while visit:
567 while visit:
568 n = visit.pop(0)
568 n = visit.pop(0)
569 parents = [p for p in repo.changelog.parents(n) if p not in has]
569 parents = [p for p in repo.changelog.parents(n) if p not in has]
570 if len(parents) == 0:
570 if len(parents) == 0:
571 if n not in has:
571 if n not in has:
572 o.append(n)
572 o.append(n)
573 else:
573 else:
574 for p in parents:
574 for p in parents:
575 if p not in seen:
575 if p not in seen:
576 seen[p] = 1
576 seen[p] = 1
577 visit.append(p)
577 visit.append(p)
578 else:
578 else:
579 dest = ui.expandpath(dest or 'default-push', dest or 'default')
579 dest = ui.expandpath(dest or 'default-push', dest or 'default')
580 dest, branches = hg.parseurl(dest, opts.get('branch'))
580 dest, branches = hg.parseurl(dest, opts.get('branch'))
581 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
581 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
582 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
582 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
583 o = repo.findoutgoing(other, force=opts.get('force'))
583 o = repo.findoutgoing(other, force=opts.get('force'))
584
584
585 if not o:
585 if not o:
586 ui.status(_("no changes found\n"))
586 ui.status(_("no changes found\n"))
587 return
587 return
588
588
589 if revs:
589 if revs:
590 cg = repo.changegroupsubset(o, revs, 'bundle')
590 cg = repo.changegroupsubset(o, revs, 'bundle')
591 else:
591 else:
592 cg = repo.changegroup(o, 'bundle')
592 cg = repo.changegroup(o, 'bundle')
593
593
594 bundletype = opts.get('type', 'bzip2').lower()
594 bundletype = opts.get('type', 'bzip2').lower()
595 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
595 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
596 bundletype = btypes.get(bundletype)
596 bundletype = btypes.get(bundletype)
597 if bundletype not in changegroup.bundletypes:
597 if bundletype not in changegroup.bundletypes:
598 raise util.Abort(_('unknown bundle type specified with --type'))
598 raise util.Abort(_('unknown bundle type specified with --type'))
599
599
600 changegroup.writebundle(cg, fname, bundletype)
600 changegroup.writebundle(cg, fname, bundletype)
601
601
602 def cat(ui, repo, file1, *pats, **opts):
602 def cat(ui, repo, file1, *pats, **opts):
603 """output the current or given revision of files
603 """output the current or given revision of files
604
604
605 Print the specified files as they were at the given revision. If
605 Print the specified files as they were at the given revision. If
606 no revision is given, the parent of the working directory is used,
606 no revision is given, the parent of the working directory is used,
607 or tip if no revision is checked out.
607 or tip if no revision is checked out.
608
608
609 Output may be to a file, in which case the name of the file is
609 Output may be to a file, in which case the name of the file is
610 given using a format string. The formatting rules are the same as
610 given using a format string. The formatting rules are the same as
611 for the export command, with the following additions:
611 for the export command, with the following additions:
612
612
613 :``%s``: basename of file being printed
613 :``%s``: basename of file being printed
614 :``%d``: dirname of file being printed, or '.' if in repository root
614 :``%d``: dirname of file being printed, or '.' if in repository root
615 :``%p``: root-relative path name of file being printed
615 :``%p``: root-relative path name of file being printed
616 """
616 """
617 ctx = repo[opts.get('rev')]
617 ctx = repo[opts.get('rev')]
618 err = 1
618 err = 1
619 m = cmdutil.match(repo, (file1,) + pats, opts)
619 m = cmdutil.match(repo, (file1,) + pats, opts)
620 for abs in ctx.walk(m):
620 for abs in ctx.walk(m):
621 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
621 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
622 data = ctx[abs].data()
622 data = ctx[abs].data()
623 if opts.get('decode'):
623 if opts.get('decode'):
624 data = repo.wwritedata(abs, data)
624 data = repo.wwritedata(abs, data)
625 fp.write(data)
625 fp.write(data)
626 err = 0
626 err = 0
627 return err
627 return err
628
628
629 def clone(ui, source, dest=None, **opts):
629 def clone(ui, source, dest=None, **opts):
630 """make a copy of an existing repository
630 """make a copy of an existing repository
631
631
632 Create a copy of an existing repository in a new directory.
632 Create a copy of an existing repository in a new directory.
633
633
634 If no destination directory name is specified, it defaults to the
634 If no destination directory name is specified, it defaults to the
635 basename of the source.
635 basename of the source.
636
636
637 The location of the source is added to the new repository's
637 The location of the source is added to the new repository's
638 .hg/hgrc file, as the default to be used for future pulls.
638 .hg/hgrc file, as the default to be used for future pulls.
639
639
640 See 'hg help urls' for valid source format details.
640 See 'hg help urls' for valid source format details.
641
641
642 It is possible to specify an ``ssh://`` URL as the destination, but no
642 It is possible to specify an ``ssh://`` URL as the destination, but no
643 .hg/hgrc and working directory will be created on the remote side.
643 .hg/hgrc and working directory will be created on the remote side.
644 Please see 'hg help urls' for important details about ``ssh://`` URLs.
644 Please see 'hg help urls' for important details about ``ssh://`` URLs.
645
645
646 A set of changesets (tags, or branch names) to pull may be specified
646 A set of changesets (tags, or branch names) to pull may be specified
647 by listing each changeset (tag, or branch name) with -r/--rev.
647 by listing each changeset (tag, or branch name) with -r/--rev.
648 If -r/--rev is used, the cloned repository will contain only a subset
648 If -r/--rev is used, the cloned repository will contain only a subset
649 of the changesets of the source repository. Only the set of changesets
649 of the changesets of the source repository. Only the set of changesets
650 defined by all -r/--rev options (including all their ancestors)
650 defined by all -r/--rev options (including all their ancestors)
651 will be pulled into the destination repository.
651 will be pulled into the destination repository.
652 No subsequent changesets (including subsequent tags) will be present
652 No subsequent changesets (including subsequent tags) will be present
653 in the destination.
653 in the destination.
654
654
655 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
655 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
656 local source repositories.
656 local source repositories.
657
657
658 For efficiency, hardlinks are used for cloning whenever the source
658 For efficiency, hardlinks are used for cloning whenever the source
659 and destination are on the same filesystem (note this applies only
659 and destination are on the same filesystem (note this applies only
660 to the repository data, not to the working directory). Some
660 to the repository data, not to the working directory). Some
661 filesystems, such as AFS, implement hardlinking incorrectly, but
661 filesystems, such as AFS, implement hardlinking incorrectly, but
662 do not report errors. In these cases, use the --pull option to
662 do not report errors. In these cases, use the --pull option to
663 avoid hardlinking.
663 avoid hardlinking.
664
664
665 In some cases, you can clone repositories and the working directory
665 In some cases, you can clone repositories and the working directory
666 using full hardlinks with ::
666 using full hardlinks with ::
667
667
668 $ cp -al REPO REPOCLONE
668 $ cp -al REPO REPOCLONE
669
669
670 This is the fastest way to clone, but it is not always safe. The
670 This is the fastest way to clone, but it is not always safe. The
671 operation is not atomic (making sure REPO is not modified during
671 operation is not atomic (making sure REPO is not modified during
672 the operation is up to you) and you have to make sure your editor
672 the operation is up to you) and you have to make sure your editor
673 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
673 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
674 this is not compatible with certain extensions that place their
674 this is not compatible with certain extensions that place their
675 metadata under the .hg directory, such as mq.
675 metadata under the .hg directory, such as mq.
676
676
677 Mercurial will update the working directory to the first applicable
677 Mercurial will update the working directory to the first applicable
678 revision from this list:
678 revision from this list:
679
679
680 a) null if -U or the source repository has no changesets
680 a) null if -U or the source repository has no changesets
681 b) if -u . and the source repository is local, the first parent of
681 b) if -u . and the source repository is local, the first parent of
682 the source repository's working directory
682 the source repository's working directory
683 c) the changeset specified with -u (if a branch name, this means the
683 c) the changeset specified with -u (if a branch name, this means the
684 latest head of that branch)
684 latest head of that branch)
685 d) the changeset specified with -r
685 d) the changeset specified with -r
686 e) the tipmost head specified with -b
686 e) the tipmost head specified with -b
687 f) the tipmost head specified with the url#branch source syntax
687 f) the tipmost head specified with the url#branch source syntax
688 g) the tipmost head of the default branch
688 g) the tipmost head of the default branch
689 h) tip
689 h) tip
690 """
690 """
691 if opts.get('noupdate') and opts.get('updaterev'):
691 if opts.get('noupdate') and opts.get('updaterev'):
692 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
692 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
693
693
694 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
694 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
695 pull=opts.get('pull'),
695 pull=opts.get('pull'),
696 stream=opts.get('uncompressed'),
696 stream=opts.get('uncompressed'),
697 rev=opts.get('rev'),
697 rev=opts.get('rev'),
698 update=opts.get('updaterev') or not opts.get('noupdate'),
698 update=opts.get('updaterev') or not opts.get('noupdate'),
699 branch=opts.get('branch'))
699 branch=opts.get('branch'))
700
700
701 def commit(ui, repo, *pats, **opts):
701 def commit(ui, repo, *pats, **opts):
702 """commit the specified files or all outstanding changes
702 """commit the specified files or all outstanding changes
703
703
704 Commit changes to the given files into the repository. Unlike a
704 Commit changes to the given files into the repository. Unlike a
705 centralized RCS, this operation is a local operation. See hg push
705 centralized RCS, this operation is a local operation. See hg push
706 for a way to actively distribute your changes.
706 for a way to actively distribute your changes.
707
707
708 If a list of files is omitted, all changes reported by "hg status"
708 If a list of files is omitted, all changes reported by "hg status"
709 will be committed.
709 will be committed.
710
710
711 If you are committing the result of a merge, do not provide any
711 If you are committing the result of a merge, do not provide any
712 filenames or -I/-X filters.
712 filenames or -I/-X filters.
713
713
714 If no commit message is specified, the configured editor is
714 If no commit message is specified, the configured editor is
715 started to prompt you for a message.
715 started to prompt you for a message.
716
716
717 See 'hg help dates' for a list of formats valid for -d/--date.
717 See 'hg help dates' for a list of formats valid for -d/--date.
718 """
718 """
719 extra = {}
719 extra = {}
720 if opts.get('close_branch'):
720 if opts.get('close_branch'):
721 extra['close'] = 1
721 extra['close'] = 1
722 e = cmdutil.commiteditor
722 e = cmdutil.commiteditor
723 if opts.get('force_editor'):
723 if opts.get('force_editor'):
724 e = cmdutil.commitforceeditor
724 e = cmdutil.commitforceeditor
725
725
726 def commitfunc(ui, repo, message, match, opts):
726 def commitfunc(ui, repo, message, match, opts):
727 return repo.commit(message, opts.get('user'), opts.get('date'), match,
727 return repo.commit(message, opts.get('user'), opts.get('date'), match,
728 editor=e, extra=extra)
728 editor=e, extra=extra)
729
729
730 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
730 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
731 if not node:
731 if not node:
732 ui.status(_("nothing changed\n"))
732 ui.status(_("nothing changed\n"))
733 return
733 return
734 cl = repo.changelog
734 cl = repo.changelog
735 rev = cl.rev(node)
735 rev = cl.rev(node)
736 parents = cl.parentrevs(rev)
736 parents = cl.parentrevs(rev)
737 if rev - 1 in parents:
737 if rev - 1 in parents:
738 # one of the parents was the old tip
738 # one of the parents was the old tip
739 pass
739 pass
740 elif (parents == (nullrev, nullrev) or
740 elif (parents == (nullrev, nullrev) or
741 len(cl.heads(cl.node(parents[0]))) > 1 and
741 len(cl.heads(cl.node(parents[0]))) > 1 and
742 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
742 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
743 ui.status(_('created new head\n'))
743 ui.status(_('created new head\n'))
744
744
745 if ui.debugflag:
745 if ui.debugflag:
746 ui.write(_('committed changeset %d:%s\n') % (rev, hex(node)))
746 ui.write(_('committed changeset %d:%s\n') % (rev, hex(node)))
747 elif ui.verbose:
747 elif ui.verbose:
748 ui.write(_('committed changeset %d:%s\n') % (rev, short(node)))
748 ui.write(_('committed changeset %d:%s\n') % (rev, short(node)))
749
749
750 def copy(ui, repo, *pats, **opts):
750 def copy(ui, repo, *pats, **opts):
751 """mark files as copied for the next commit
751 """mark files as copied for the next commit
752
752
753 Mark dest as having copies of source files. If dest is a
753 Mark dest as having copies of source files. If dest is a
754 directory, copies are put in that directory. If dest is a file,
754 directory, copies are put in that directory. If dest is a file,
755 the source must be a single file.
755 the source must be a single file.
756
756
757 By default, this command copies the contents of files as they
757 By default, this command copies the contents of files as they
758 exist in the working directory. If invoked with -A/--after, the
758 exist in the working directory. If invoked with -A/--after, the
759 operation is recorded, but no copying is performed.
759 operation is recorded, but no copying is performed.
760
760
761 This command takes effect with the next commit. To undo a copy
761 This command takes effect with the next commit. To undo a copy
762 before that, see hg revert.
762 before that, see hg revert.
763 """
763 """
764 wlock = repo.wlock(False)
764 wlock = repo.wlock(False)
765 try:
765 try:
766 return cmdutil.copy(ui, repo, pats, opts)
766 return cmdutil.copy(ui, repo, pats, opts)
767 finally:
767 finally:
768 wlock.release()
768 wlock.release()
769
769
770 def debugancestor(ui, repo, *args):
770 def debugancestor(ui, repo, *args):
771 """find the ancestor revision of two revisions in a given index"""
771 """find the ancestor revision of two revisions in a given index"""
772 if len(args) == 3:
772 if len(args) == 3:
773 index, rev1, rev2 = args
773 index, rev1, rev2 = args
774 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
774 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
775 lookup = r.lookup
775 lookup = r.lookup
776 elif len(args) == 2:
776 elif len(args) == 2:
777 if not repo:
777 if not repo:
778 raise util.Abort(_("There is no Mercurial repository here "
778 raise util.Abort(_("There is no Mercurial repository here "
779 "(.hg not found)"))
779 "(.hg not found)"))
780 rev1, rev2 = args
780 rev1, rev2 = args
781 r = repo.changelog
781 r = repo.changelog
782 lookup = repo.lookup
782 lookup = repo.lookup
783 else:
783 else:
784 raise util.Abort(_('either two or three arguments required'))
784 raise util.Abort(_('either two or three arguments required'))
785 a = r.ancestor(lookup(rev1), lookup(rev2))
785 a = r.ancestor(lookup(rev1), lookup(rev2))
786 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
786 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
787
787
788 def debugcommands(ui, cmd='', *args):
788 def debugcommands(ui, cmd='', *args):
789 for cmd, vals in sorted(table.iteritems()):
789 for cmd, vals in sorted(table.iteritems()):
790 cmd = cmd.split('|')[0].strip('^')
790 cmd = cmd.split('|')[0].strip('^')
791 opts = ', '.join([i[1] for i in vals[1]])
791 opts = ', '.join([i[1] for i in vals[1]])
792 ui.write('%s: %s\n' % (cmd, opts))
792 ui.write('%s: %s\n' % (cmd, opts))
793
793
794 def debugcomplete(ui, cmd='', **opts):
794 def debugcomplete(ui, cmd='', **opts):
795 """returns the completion list associated with the given command"""
795 """returns the completion list associated with the given command"""
796
796
797 if opts.get('options'):
797 if opts.get('options'):
798 options = []
798 options = []
799 otables = [globalopts]
799 otables = [globalopts]
800 if cmd:
800 if cmd:
801 aliases, entry = cmdutil.findcmd(cmd, table, False)
801 aliases, entry = cmdutil.findcmd(cmd, table, False)
802 otables.append(entry[1])
802 otables.append(entry[1])
803 for t in otables:
803 for t in otables:
804 for o in t:
804 for o in t:
805 if o[0]:
805 if o[0]:
806 options.append('-%s' % o[0])
806 options.append('-%s' % o[0])
807 options.append('--%s' % o[1])
807 options.append('--%s' % o[1])
808 ui.write("%s\n" % "\n".join(options))
808 ui.write("%s\n" % "\n".join(options))
809 return
809 return
810
810
811 cmdlist = cmdutil.findpossible(cmd, table)
811 cmdlist = cmdutil.findpossible(cmd, table)
812 if ui.verbose:
812 if ui.verbose:
813 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
813 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
814 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
814 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
815
815
816 def debugfsinfo(ui, path = "."):
816 def debugfsinfo(ui, path = "."):
817 open('.debugfsinfo', 'w').write('')
817 open('.debugfsinfo', 'w').write('')
818 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
818 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
819 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
819 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
820 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
820 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
821 and 'yes' or 'no'))
821 and 'yes' or 'no'))
822 os.unlink('.debugfsinfo')
822 os.unlink('.debugfsinfo')
823
823
824 def debugrebuildstate(ui, repo, rev="tip"):
824 def debugrebuildstate(ui, repo, rev="tip"):
825 """rebuild the dirstate as it would look like for the given revision"""
825 """rebuild the dirstate as it would look like for the given revision"""
826 ctx = repo[rev]
826 ctx = repo[rev]
827 wlock = repo.wlock()
827 wlock = repo.wlock()
828 try:
828 try:
829 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
829 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
830 finally:
830 finally:
831 wlock.release()
831 wlock.release()
832
832
833 def debugcheckstate(ui, repo):
833 def debugcheckstate(ui, repo):
834 """validate the correctness of the current dirstate"""
834 """validate the correctness of the current dirstate"""
835 parent1, parent2 = repo.dirstate.parents()
835 parent1, parent2 = repo.dirstate.parents()
836 m1 = repo[parent1].manifest()
836 m1 = repo[parent1].manifest()
837 m2 = repo[parent2].manifest()
837 m2 = repo[parent2].manifest()
838 errors = 0
838 errors = 0
839 for f in repo.dirstate:
839 for f in repo.dirstate:
840 state = repo.dirstate[f]
840 state = repo.dirstate[f]
841 if state in "nr" and f not in m1:
841 if state in "nr" and f not in m1:
842 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
842 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
843 errors += 1
843 errors += 1
844 if state in "a" and f in m1:
844 if state in "a" and f in m1:
845 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
845 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
846 errors += 1
846 errors += 1
847 if state in "m" and f not in m1 and f not in m2:
847 if state in "m" and f not in m1 and f not in m2:
848 ui.warn(_("%s in state %s, but not in either manifest\n") %
848 ui.warn(_("%s in state %s, but not in either manifest\n") %
849 (f, state))
849 (f, state))
850 errors += 1
850 errors += 1
851 for f in m1:
851 for f in m1:
852 state = repo.dirstate[f]
852 state = repo.dirstate[f]
853 if state not in "nrm":
853 if state not in "nrm":
854 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
854 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
855 errors += 1
855 errors += 1
856 if errors:
856 if errors:
857 error = _(".hg/dirstate inconsistent with current parent's manifest")
857 error = _(".hg/dirstate inconsistent with current parent's manifest")
858 raise util.Abort(error)
858 raise util.Abort(error)
859
859
860 def showconfig(ui, repo, *values, **opts):
860 def showconfig(ui, repo, *values, **opts):
861 """show combined config settings from all hgrc files
861 """show combined config settings from all hgrc files
862
862
863 With no arguments, print names and values of all config items.
863 With no arguments, print names and values of all config items.
864
864
865 With one argument of the form section.name, print just the value
865 With one argument of the form section.name, print just the value
866 of that config item.
866 of that config item.
867
867
868 With multiple arguments, print names and values of all config
868 With multiple arguments, print names and values of all config
869 items with matching section names.
869 items with matching section names.
870
870
871 With --debug, the source (filename and line number) is printed
871 With --debug, the source (filename and line number) is printed
872 for each config item.
872 for each config item.
873 """
873 """
874
874
875 untrusted = bool(opts.get('untrusted'))
875 untrusted = bool(opts.get('untrusted'))
876 if values:
876 if values:
877 if len([v for v in values if '.' in v]) > 1:
877 if len([v for v in values if '.' in v]) > 1:
878 raise util.Abort(_('only one config item permitted'))
878 raise util.Abort(_('only one config item permitted'))
879 for section, name, value in ui.walkconfig(untrusted=untrusted):
879 for section, name, value in ui.walkconfig(untrusted=untrusted):
880 sectname = section + '.' + name
880 sectname = section + '.' + name
881 if values:
881 if values:
882 for v in values:
882 for v in values:
883 if v == section:
883 if v == section:
884 ui.debug('%s: ' %
884 ui.debug('%s: ' %
885 ui.configsource(section, name, untrusted))
885 ui.configsource(section, name, untrusted))
886 ui.write('%s=%s\n' % (sectname, value))
886 ui.write('%s=%s\n' % (sectname, value))
887 elif v == sectname:
887 elif v == sectname:
888 ui.debug('%s: ' %
888 ui.debug('%s: ' %
889 ui.configsource(section, name, untrusted))
889 ui.configsource(section, name, untrusted))
890 ui.write(value, '\n')
890 ui.write(value, '\n')
891 else:
891 else:
892 ui.debug('%s: ' %
892 ui.debug('%s: ' %
893 ui.configsource(section, name, untrusted))
893 ui.configsource(section, name, untrusted))
894 ui.write('%s=%s\n' % (sectname, value))
894 ui.write('%s=%s\n' % (sectname, value))
895
895
896 def debugsetparents(ui, repo, rev1, rev2=None):
896 def debugsetparents(ui, repo, rev1, rev2=None):
897 """manually set the parents of the current working directory
897 """manually set the parents of the current working directory
898
898
899 This is useful for writing repository conversion tools, but should
899 This is useful for writing repository conversion tools, but should
900 be used with care.
900 be used with care.
901 """
901 """
902
902
903 if not rev2:
903 if not rev2:
904 rev2 = hex(nullid)
904 rev2 = hex(nullid)
905
905
906 wlock = repo.wlock()
906 wlock = repo.wlock()
907 try:
907 try:
908 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
908 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
909 finally:
909 finally:
910 wlock.release()
910 wlock.release()
911
911
912 def debugstate(ui, repo, nodates=None):
912 def debugstate(ui, repo, nodates=None):
913 """show the contents of the current dirstate"""
913 """show the contents of the current dirstate"""
914 timestr = ""
914 timestr = ""
915 showdate = not nodates
915 showdate = not nodates
916 for file_, ent in sorted(repo.dirstate._map.iteritems()):
916 for file_, ent in sorted(repo.dirstate._map.iteritems()):
917 if showdate:
917 if showdate:
918 if ent[3] == -1:
918 if ent[3] == -1:
919 # Pad or slice to locale representation
919 # Pad or slice to locale representation
920 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
920 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
921 time.localtime(0)))
921 time.localtime(0)))
922 timestr = 'unset'
922 timestr = 'unset'
923 timestr = (timestr[:locale_len] +
923 timestr = (timestr[:locale_len] +
924 ' ' * (locale_len - len(timestr)))
924 ' ' * (locale_len - len(timestr)))
925 else:
925 else:
926 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
926 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
927 time.localtime(ent[3]))
927 time.localtime(ent[3]))
928 if ent[1] & 020000:
928 if ent[1] & 020000:
929 mode = 'lnk'
929 mode = 'lnk'
930 else:
930 else:
931 mode = '%3o' % (ent[1] & 0777)
931 mode = '%3o' % (ent[1] & 0777)
932 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
932 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
933 for f in repo.dirstate.copies():
933 for f in repo.dirstate.copies():
934 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
934 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
935
935
936 def debugsub(ui, repo, rev=None):
936 def debugsub(ui, repo, rev=None):
937 if rev == '':
937 if rev == '':
938 rev = None
938 rev = None
939 for k, v in sorted(repo[rev].substate.items()):
939 for k, v in sorted(repo[rev].substate.items()):
940 ui.write('path %s\n' % k)
940 ui.write('path %s\n' % k)
941 ui.write(' source %s\n' % v[0])
941 ui.write(' source %s\n' % v[0])
942 ui.write(' revision %s\n' % v[1])
942 ui.write(' revision %s\n' % v[1])
943
943
944 def debugdata(ui, file_, rev):
944 def debugdata(ui, file_, rev):
945 """dump the contents of a data file revision"""
945 """dump the contents of a data file revision"""
946 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
946 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
947 try:
947 try:
948 ui.write(r.revision(r.lookup(rev)))
948 ui.write(r.revision(r.lookup(rev)))
949 except KeyError:
949 except KeyError:
950 raise util.Abort(_('invalid revision identifier %s') % rev)
950 raise util.Abort(_('invalid revision identifier %s') % rev)
951
951
952 def debugdate(ui, date, range=None, **opts):
952 def debugdate(ui, date, range=None, **opts):
953 """parse and display a date"""
953 """parse and display a date"""
954 if opts["extended"]:
954 if opts["extended"]:
955 d = util.parsedate(date, util.extendeddateformats)
955 d = util.parsedate(date, util.extendeddateformats)
956 else:
956 else:
957 d = util.parsedate(date)
957 d = util.parsedate(date)
958 ui.write("internal: %s %s\n" % d)
958 ui.write("internal: %s %s\n" % d)
959 ui.write("standard: %s\n" % util.datestr(d))
959 ui.write("standard: %s\n" % util.datestr(d))
960 if range:
960 if range:
961 m = util.matchdate(range)
961 m = util.matchdate(range)
962 ui.write("match: %s\n" % m(d[0]))
962 ui.write("match: %s\n" % m(d[0]))
963
963
964 def debugindex(ui, file_):
964 def debugindex(ui, file_):
965 """dump the contents of an index file"""
965 """dump the contents of an index file"""
966 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
966 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
967 ui.write(" rev offset length base linkrev"
967 ui.write(" rev offset length base linkrev"
968 " nodeid p1 p2\n")
968 " nodeid p1 p2\n")
969 for i in r:
969 for i in r:
970 node = r.node(i)
970 node = r.node(i)
971 try:
971 try:
972 pp = r.parents(node)
972 pp = r.parents(node)
973 except:
973 except:
974 pp = [nullid, nullid]
974 pp = [nullid, nullid]
975 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
975 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
976 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
976 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
977 short(node), short(pp[0]), short(pp[1])))
977 short(node), short(pp[0]), short(pp[1])))
978
978
979 def debugindexdot(ui, file_):
979 def debugindexdot(ui, file_):
980 """dump an index DAG as a graphviz dot file"""
980 """dump an index DAG as a graphviz dot file"""
981 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
981 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
982 ui.write("digraph G {\n")
982 ui.write("digraph G {\n")
983 for i in r:
983 for i in r:
984 node = r.node(i)
984 node = r.node(i)
985 pp = r.parents(node)
985 pp = r.parents(node)
986 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
986 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
987 if pp[1] != nullid:
987 if pp[1] != nullid:
988 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
988 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
989 ui.write("}\n")
989 ui.write("}\n")
990
990
991 def debuginstall(ui):
991 def debuginstall(ui):
992 '''test Mercurial installation'''
992 '''test Mercurial installation'''
993
993
994 def writetemp(contents):
994 def writetemp(contents):
995 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
995 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
996 f = os.fdopen(fd, "wb")
996 f = os.fdopen(fd, "wb")
997 f.write(contents)
997 f.write(contents)
998 f.close()
998 f.close()
999 return name
999 return name
1000
1000
1001 problems = 0
1001 problems = 0
1002
1002
1003 # encoding
1003 # encoding
1004 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1004 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1005 try:
1005 try:
1006 encoding.fromlocal("test")
1006 encoding.fromlocal("test")
1007 except util.Abort, inst:
1007 except util.Abort, inst:
1008 ui.write(" %s\n" % inst)
1008 ui.write(" %s\n" % inst)
1009 ui.write(_(" (check that your locale is properly set)\n"))
1009 ui.write(_(" (check that your locale is properly set)\n"))
1010 problems += 1
1010 problems += 1
1011
1011
1012 # compiled modules
1012 # compiled modules
1013 ui.status(_("Checking extensions...\n"))
1013 ui.status(_("Checking extensions...\n"))
1014 try:
1014 try:
1015 import bdiff, mpatch, base85
1015 import bdiff, mpatch, base85
1016 except Exception, inst:
1016 except Exception, inst:
1017 ui.write(" %s\n" % inst)
1017 ui.write(" %s\n" % inst)
1018 ui.write(_(" One or more extensions could not be found"))
1018 ui.write(_(" One or more extensions could not be found"))
1019 ui.write(_(" (check that you compiled the extensions)\n"))
1019 ui.write(_(" (check that you compiled the extensions)\n"))
1020 problems += 1
1020 problems += 1
1021
1021
1022 # templates
1022 # templates
1023 ui.status(_("Checking templates...\n"))
1023 ui.status(_("Checking templates...\n"))
1024 try:
1024 try:
1025 import templater
1025 import templater
1026 templater.templater(templater.templatepath("map-cmdline.default"))
1026 templater.templater(templater.templatepath("map-cmdline.default"))
1027 except Exception, inst:
1027 except Exception, inst:
1028 ui.write(" %s\n" % inst)
1028 ui.write(" %s\n" % inst)
1029 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1029 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1030 problems += 1
1030 problems += 1
1031
1031
1032 # patch
1032 # patch
1033 ui.status(_("Checking patch...\n"))
1033 ui.status(_("Checking patch...\n"))
1034 patchproblems = 0
1034 patchproblems = 0
1035 a = "1\n2\n3\n4\n"
1035 a = "1\n2\n3\n4\n"
1036 b = "1\n2\n3\ninsert\n4\n"
1036 b = "1\n2\n3\ninsert\n4\n"
1037 fa = writetemp(a)
1037 fa = writetemp(a)
1038 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1038 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1039 os.path.basename(fa))
1039 os.path.basename(fa))
1040 fd = writetemp(d)
1040 fd = writetemp(d)
1041
1041
1042 files = {}
1042 files = {}
1043 try:
1043 try:
1044 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1044 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1045 except util.Abort, e:
1045 except util.Abort, e:
1046 ui.write(_(" patch call failed:\n"))
1046 ui.write(_(" patch call failed:\n"))
1047 ui.write(" " + str(e) + "\n")
1047 ui.write(" " + str(e) + "\n")
1048 patchproblems += 1
1048 patchproblems += 1
1049 else:
1049 else:
1050 if list(files) != [os.path.basename(fa)]:
1050 if list(files) != [os.path.basename(fa)]:
1051 ui.write(_(" unexpected patch output!\n"))
1051 ui.write(_(" unexpected patch output!\n"))
1052 patchproblems += 1
1052 patchproblems += 1
1053 a = open(fa).read()
1053 a = open(fa).read()
1054 if a != b:
1054 if a != b:
1055 ui.write(_(" patch test failed!\n"))
1055 ui.write(_(" patch test failed!\n"))
1056 patchproblems += 1
1056 patchproblems += 1
1057
1057
1058 if patchproblems:
1058 if patchproblems:
1059 if ui.config('ui', 'patch'):
1059 if ui.config('ui', 'patch'):
1060 ui.write(_(" (Current patch tool may be incompatible with patch,"
1060 ui.write(_(" (Current patch tool may be incompatible with patch,"
1061 " or misconfigured. Please check your .hgrc file)\n"))
1061 " or misconfigured. Please check your .hgrc file)\n"))
1062 else:
1062 else:
1063 ui.write(_(" Internal patcher failure, please report this error"
1063 ui.write(_(" Internal patcher failure, please report this error"
1064 " to http://mercurial.selenic.com/bts/\n"))
1064 " to http://mercurial.selenic.com/bts/\n"))
1065 problems += patchproblems
1065 problems += patchproblems
1066
1066
1067 os.unlink(fa)
1067 os.unlink(fa)
1068 os.unlink(fd)
1068 os.unlink(fd)
1069
1069
1070 # editor
1070 # editor
1071 ui.status(_("Checking commit editor...\n"))
1071 ui.status(_("Checking commit editor...\n"))
1072 editor = ui.geteditor()
1072 editor = ui.geteditor()
1073 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1073 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1074 if not cmdpath:
1074 if not cmdpath:
1075 if editor == 'vi':
1075 if editor == 'vi':
1076 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1076 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1077 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1077 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1078 else:
1078 else:
1079 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1079 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1080 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1080 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1081 problems += 1
1081 problems += 1
1082
1082
1083 # check username
1083 # check username
1084 ui.status(_("Checking username...\n"))
1084 ui.status(_("Checking username...\n"))
1085 try:
1085 try:
1086 user = ui.username()
1086 user = ui.username()
1087 except util.Abort, e:
1087 except util.Abort, e:
1088 ui.write(" %s\n" % e)
1088 ui.write(" %s\n" % e)
1089 ui.write(_(" (specify a username in your .hgrc file)\n"))
1089 ui.write(_(" (specify a username in your .hgrc file)\n"))
1090 problems += 1
1090 problems += 1
1091
1091
1092 if not problems:
1092 if not problems:
1093 ui.status(_("No problems detected\n"))
1093 ui.status(_("No problems detected\n"))
1094 else:
1094 else:
1095 ui.write(_("%s problems detected,"
1095 ui.write(_("%s problems detected,"
1096 " please check your install!\n") % problems)
1096 " please check your install!\n") % problems)
1097
1097
1098 return problems
1098 return problems
1099
1099
1100 def debugrename(ui, repo, file1, *pats, **opts):
1100 def debugrename(ui, repo, file1, *pats, **opts):
1101 """dump rename information"""
1101 """dump rename information"""
1102
1102
1103 ctx = repo[opts.get('rev')]
1103 ctx = repo[opts.get('rev')]
1104 m = cmdutil.match(repo, (file1,) + pats, opts)
1104 m = cmdutil.match(repo, (file1,) + pats, opts)
1105 for abs in ctx.walk(m):
1105 for abs in ctx.walk(m):
1106 fctx = ctx[abs]
1106 fctx = ctx[abs]
1107 o = fctx.filelog().renamed(fctx.filenode())
1107 o = fctx.filelog().renamed(fctx.filenode())
1108 rel = m.rel(abs)
1108 rel = m.rel(abs)
1109 if o:
1109 if o:
1110 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1110 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1111 else:
1111 else:
1112 ui.write(_("%s not renamed\n") % rel)
1112 ui.write(_("%s not renamed\n") % rel)
1113
1113
1114 def debugwalk(ui, repo, *pats, **opts):
1114 def debugwalk(ui, repo, *pats, **opts):
1115 """show how files match on given patterns"""
1115 """show how files match on given patterns"""
1116 m = cmdutil.match(repo, pats, opts)
1116 m = cmdutil.match(repo, pats, opts)
1117 items = list(repo.walk(m))
1117 items = list(repo.walk(m))
1118 if not items:
1118 if not items:
1119 return
1119 return
1120 fmt = 'f %%-%ds %%-%ds %%s' % (
1120 fmt = 'f %%-%ds %%-%ds %%s' % (
1121 max([len(abs) for abs in items]),
1121 max([len(abs) for abs in items]),
1122 max([len(m.rel(abs)) for abs in items]))
1122 max([len(m.rel(abs)) for abs in items]))
1123 for abs in items:
1123 for abs in items:
1124 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1124 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1125 ui.write("%s\n" % line.rstrip())
1125 ui.write("%s\n" % line.rstrip())
1126
1126
1127 def diff(ui, repo, *pats, **opts):
1127 def diff(ui, repo, *pats, **opts):
1128 """diff repository (or selected files)
1128 """diff repository (or selected files)
1129
1129
1130 Show differences between revisions for the specified files.
1130 Show differences between revisions for the specified files.
1131
1131
1132 Differences between files are shown using the unified diff format.
1132 Differences between files are shown using the unified diff format.
1133
1133
1134 NOTE: diff may generate unexpected results for merges, as it will
1134 NOTE: diff may generate unexpected results for merges, as it will
1135 default to comparing against the working directory's first parent
1135 default to comparing against the working directory's first parent
1136 changeset if no revisions are specified.
1136 changeset if no revisions are specified.
1137
1137
1138 When two revision arguments are given, then changes are shown
1138 When two revision arguments are given, then changes are shown
1139 between those revisions. If only one revision is specified then
1139 between those revisions. If only one revision is specified then
1140 that revision is compared to the working directory, and, when no
1140 that revision is compared to the working directory, and, when no
1141 revisions are specified, the working directory files are compared
1141 revisions are specified, the working directory files are compared
1142 to its parent.
1142 to its parent.
1143
1143
1144 Alternatively you can specify -c/--change with a revision to see
1144 Alternatively you can specify -c/--change with a revision to see
1145 the changes in that changeset relative to its first parent.
1145 the changes in that changeset relative to its first parent.
1146
1146
1147 Without the -a/--text option, diff will avoid generating diffs of
1147 Without the -a/--text option, diff will avoid generating diffs of
1148 files it detects as binary. With -a, diff will generate a diff
1148 files it detects as binary. With -a, diff will generate a diff
1149 anyway, probably with undesirable results.
1149 anyway, probably with undesirable results.
1150
1150
1151 Use the -g/--git option to generate diffs in the git extended diff
1151 Use the -g/--git option to generate diffs in the git extended diff
1152 format. For more information, read 'hg help diffs'.
1152 format. For more information, read 'hg help diffs'.
1153 """
1153 """
1154
1154
1155 revs = opts.get('rev')
1155 revs = opts.get('rev')
1156 change = opts.get('change')
1156 change = opts.get('change')
1157 stat = opts.get('stat')
1157 stat = opts.get('stat')
1158 reverse = opts.get('reverse')
1158 reverse = opts.get('reverse')
1159
1159
1160 if revs and change:
1160 if revs and change:
1161 msg = _('cannot specify --rev and --change at the same time')
1161 msg = _('cannot specify --rev and --change at the same time')
1162 raise util.Abort(msg)
1162 raise util.Abort(msg)
1163 elif change:
1163 elif change:
1164 node2 = repo.lookup(change)
1164 node2 = repo.lookup(change)
1165 node1 = repo[node2].parents()[0].node()
1165 node1 = repo[node2].parents()[0].node()
1166 else:
1166 else:
1167 node1, node2 = cmdutil.revpair(repo, revs)
1167 node1, node2 = cmdutil.revpair(repo, revs)
1168
1168
1169 if reverse:
1169 if reverse:
1170 node1, node2 = node2, node1
1170 node1, node2 = node2, node1
1171
1171
1172 if stat:
1172 if stat:
1173 opts['unified'] = '0'
1173 opts['unified'] = '0'
1174 diffopts = patch.diffopts(ui, opts)
1174 diffopts = patch.diffopts(ui, opts)
1175
1175
1176 m = cmdutil.match(repo, pats, opts)
1176 m = cmdutil.match(repo, pats, opts)
1177 it = patch.diff(repo, node1, node2, match=m, opts=diffopts)
1177 it = patch.diff(repo, node1, node2, match=m, opts=diffopts)
1178 if stat:
1178 if stat:
1179 width = ui.interactive() and util.termwidth() or 80
1179 width = ui.interactive() and util.termwidth() or 80
1180 ui.write(patch.diffstat(util.iterlines(it), width=width,
1180 ui.write(patch.diffstat(util.iterlines(it), width=width,
1181 git=diffopts.git))
1181 git=diffopts.git))
1182 else:
1182 else:
1183 for chunk in it:
1183 for chunk in it:
1184 ui.write(chunk)
1184 ui.write(chunk)
1185
1185
1186 def export(ui, repo, *changesets, **opts):
1186 def export(ui, repo, *changesets, **opts):
1187 """dump the header and diffs for one or more changesets
1187 """dump the header and diffs for one or more changesets
1188
1188
1189 Print the changeset header and diffs for one or more revisions.
1189 Print the changeset header and diffs for one or more revisions.
1190
1190
1191 The information shown in the changeset header is: author, date,
1191 The information shown in the changeset header is: author, date,
1192 branch name (if non-default), changeset hash, parent(s) and commit
1192 branch name (if non-default), changeset hash, parent(s) and commit
1193 comment.
1193 comment.
1194
1194
1195 NOTE: export may generate unexpected diff output for merge
1195 NOTE: export may generate unexpected diff output for merge
1196 changesets, as it will compare the merge changeset against its
1196 changesets, as it will compare the merge changeset against its
1197 first parent only.
1197 first parent only.
1198
1198
1199 Output may be to a file, in which case the name of the file is
1199 Output may be to a file, in which case the name of the file is
1200 given using a format string. The formatting rules are as follows:
1200 given using a format string. The formatting rules are as follows:
1201
1201
1202 :``%%``: literal "%" character
1202 :``%%``: literal "%" character
1203 :``%H``: changeset hash (40 bytes of hexadecimal)
1203 :``%H``: changeset hash (40 bytes of hexadecimal)
1204 :``%N``: number of patches being generated
1204 :``%N``: number of patches being generated
1205 :``%R``: changeset revision number
1205 :``%R``: changeset revision number
1206 :``%b``: basename of the exporting repository
1206 :``%b``: basename of the exporting repository
1207 :``%h``: short-form changeset hash (12 bytes of hexadecimal)
1207 :``%h``: short-form changeset hash (12 bytes of hexadecimal)
1208 :``%n``: zero-padded sequence number, starting at 1
1208 :``%n``: zero-padded sequence number, starting at 1
1209 :``%r``: zero-padded changeset revision number
1209 :``%r``: zero-padded changeset revision number
1210
1210
1211 Without the -a/--text option, export will avoid generating diffs
1211 Without the -a/--text option, export will avoid generating diffs
1212 of files it detects as binary. With -a, export will generate a
1212 of files it detects as binary. With -a, export will generate a
1213 diff anyway, probably with undesirable results.
1213 diff anyway, probably with undesirable results.
1214
1214
1215 Use the -g/--git option to generate diffs in the git extended diff
1215 Use the -g/--git option to generate diffs in the git extended diff
1216 format. See 'hg help diffs' for more information.
1216 format. See 'hg help diffs' for more information.
1217
1217
1218 With the --switch-parent option, the diff will be against the
1218 With the --switch-parent option, the diff will be against the
1219 second parent. It can be useful to review a merge.
1219 second parent. It can be useful to review a merge.
1220 """
1220 """
1221 changesets += tuple(opts.get('rev', []))
1221 changesets += tuple(opts.get('rev', []))
1222 if not changesets:
1222 if not changesets:
1223 raise util.Abort(_("export requires at least one changeset"))
1223 raise util.Abort(_("export requires at least one changeset"))
1224 revs = cmdutil.revrange(repo, changesets)
1224 revs = cmdutil.revrange(repo, changesets)
1225 if len(revs) > 1:
1225 if len(revs) > 1:
1226 ui.note(_('exporting patches:\n'))
1226 ui.note(_('exporting patches:\n'))
1227 else:
1227 else:
1228 ui.note(_('exporting patch:\n'))
1228 ui.note(_('exporting patch:\n'))
1229 cmdutil.export(repo, revs, template=opts.get('output'),
1229 cmdutil.export(repo, revs, template=opts.get('output'),
1230 switch_parent=opts.get('switch_parent'),
1230 switch_parent=opts.get('switch_parent'),
1231 opts=patch.diffopts(ui, opts))
1231 opts=patch.diffopts(ui, opts))
1232
1232
1233 def forget(ui, repo, *pats, **opts):
1233 def forget(ui, repo, *pats, **opts):
1234 """forget the specified files on the next commit
1234 """forget the specified files on the next commit
1235
1235
1236 Mark the specified files so they will no longer be tracked
1236 Mark the specified files so they will no longer be tracked
1237 after the next commit.
1237 after the next commit.
1238
1238
1239 This only removes files from the current branch, not from the
1239 This only removes files from the current branch, not from the
1240 entire project history, and it does not delete them from the
1240 entire project history, and it does not delete them from the
1241 working directory.
1241 working directory.
1242
1242
1243 To undo a forget before the next commit, see hg add.
1243 To undo a forget before the next commit, see hg add.
1244 """
1244 """
1245
1245
1246 if not pats:
1246 if not pats:
1247 raise util.Abort(_('no files specified'))
1247 raise util.Abort(_('no files specified'))
1248
1248
1249 m = cmdutil.match(repo, pats, opts)
1249 m = cmdutil.match(repo, pats, opts)
1250 s = repo.status(match=m, clean=True)
1250 s = repo.status(match=m, clean=True)
1251 forget = sorted(s[0] + s[1] + s[3] + s[6])
1251 forget = sorted(s[0] + s[1] + s[3] + s[6])
1252
1252
1253 for f in m.files():
1253 for f in m.files():
1254 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1254 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1255 ui.warn(_('not removing %s: file is already untracked\n')
1255 ui.warn(_('not removing %s: file is already untracked\n')
1256 % m.rel(f))
1256 % m.rel(f))
1257
1257
1258 for f in forget:
1258 for f in forget:
1259 if ui.verbose or not m.exact(f):
1259 if ui.verbose or not m.exact(f):
1260 ui.status(_('removing %s\n') % m.rel(f))
1260 ui.status(_('removing %s\n') % m.rel(f))
1261
1261
1262 repo.remove(forget, unlink=False)
1262 repo.remove(forget, unlink=False)
1263
1263
1264 def grep(ui, repo, pattern, *pats, **opts):
1264 def grep(ui, repo, pattern, *pats, **opts):
1265 """search for a pattern in specified files and revisions
1265 """search for a pattern in specified files and revisions
1266
1266
1267 Search revisions of files for a regular expression.
1267 Search revisions of files for a regular expression.
1268
1268
1269 This command behaves differently than Unix grep. It only accepts
1269 This command behaves differently than Unix grep. It only accepts
1270 Python/Perl regexps. It searches repository history, not the
1270 Python/Perl regexps. It searches repository history, not the
1271 working directory. It always prints the revision number in which a
1271 working directory. It always prints the revision number in which a
1272 match appears.
1272 match appears.
1273
1273
1274 By default, grep only prints output for the first revision of a
1274 By default, grep only prints output for the first revision of a
1275 file in which it finds a match. To get it to print every revision
1275 file in which it finds a match. To get it to print every revision
1276 that contains a change in match status ("-" for a match that
1276 that contains a change in match status ("-" for a match that
1277 becomes a non-match, or "+" for a non-match that becomes a match),
1277 becomes a non-match, or "+" for a non-match that becomes a match),
1278 use the --all flag.
1278 use the --all flag.
1279 """
1279 """
1280 reflags = 0
1280 reflags = 0
1281 if opts.get('ignore_case'):
1281 if opts.get('ignore_case'):
1282 reflags |= re.I
1282 reflags |= re.I
1283 try:
1283 try:
1284 regexp = re.compile(pattern, reflags)
1284 regexp = re.compile(pattern, reflags)
1285 except Exception, inst:
1285 except Exception, inst:
1286 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1286 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1287 return None
1287 return None
1288 sep, eol = ':', '\n'
1288 sep, eol = ':', '\n'
1289 if opts.get('print0'):
1289 if opts.get('print0'):
1290 sep = eol = '\0'
1290 sep = eol = '\0'
1291
1291
1292 getfile = util.lrucachefunc(repo.file)
1292 getfile = util.lrucachefunc(repo.file)
1293
1293
1294 def matchlines(body):
1294 def matchlines(body):
1295 begin = 0
1295 begin = 0
1296 linenum = 0
1296 linenum = 0
1297 while True:
1297 while True:
1298 match = regexp.search(body, begin)
1298 match = regexp.search(body, begin)
1299 if not match:
1299 if not match:
1300 break
1300 break
1301 mstart, mend = match.span()
1301 mstart, mend = match.span()
1302 linenum += body.count('\n', begin, mstart) + 1
1302 linenum += body.count('\n', begin, mstart) + 1
1303 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1303 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1304 begin = body.find('\n', mend) + 1 or len(body)
1304 begin = body.find('\n', mend) + 1 or len(body)
1305 lend = begin - 1
1305 lend = begin - 1
1306 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1306 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1307
1307
1308 class linestate(object):
1308 class linestate(object):
1309 def __init__(self, line, linenum, colstart, colend):
1309 def __init__(self, line, linenum, colstart, colend):
1310 self.line = line
1310 self.line = line
1311 self.linenum = linenum
1311 self.linenum = linenum
1312 self.colstart = colstart
1312 self.colstart = colstart
1313 self.colend = colend
1313 self.colend = colend
1314
1314
1315 def __hash__(self):
1315 def __hash__(self):
1316 return hash((self.linenum, self.line))
1316 return hash((self.linenum, self.line))
1317
1317
1318 def __eq__(self, other):
1318 def __eq__(self, other):
1319 return self.line == other.line
1319 return self.line == other.line
1320
1320
1321 matches = {}
1321 matches = {}
1322 copies = {}
1322 copies = {}
1323 def grepbody(fn, rev, body):
1323 def grepbody(fn, rev, body):
1324 matches[rev].setdefault(fn, [])
1324 matches[rev].setdefault(fn, [])
1325 m = matches[rev][fn]
1325 m = matches[rev][fn]
1326 for lnum, cstart, cend, line in matchlines(body):
1326 for lnum, cstart, cend, line in matchlines(body):
1327 s = linestate(line, lnum, cstart, cend)
1327 s = linestate(line, lnum, cstart, cend)
1328 m.append(s)
1328 m.append(s)
1329
1329
1330 def difflinestates(a, b):
1330 def difflinestates(a, b):
1331 sm = difflib.SequenceMatcher(None, a, b)
1331 sm = difflib.SequenceMatcher(None, a, b)
1332 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1332 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1333 if tag == 'insert':
1333 if tag == 'insert':
1334 for i in xrange(blo, bhi):
1334 for i in xrange(blo, bhi):
1335 yield ('+', b[i])
1335 yield ('+', b[i])
1336 elif tag == 'delete':
1336 elif tag == 'delete':
1337 for i in xrange(alo, ahi):
1337 for i in xrange(alo, ahi):
1338 yield ('-', a[i])
1338 yield ('-', a[i])
1339 elif tag == 'replace':
1339 elif tag == 'replace':
1340 for i in xrange(alo, ahi):
1340 for i in xrange(alo, ahi):
1341 yield ('-', a[i])
1341 yield ('-', a[i])
1342 for i in xrange(blo, bhi):
1342 for i in xrange(blo, bhi):
1343 yield ('+', b[i])
1343 yield ('+', b[i])
1344
1344
1345 def display(fn, ctx, pstates, states):
1345 def display(fn, ctx, pstates, states):
1346 rev = ctx.rev()
1346 rev = ctx.rev()
1347 datefunc = ui.quiet and util.shortdate or util.datestr
1347 datefunc = ui.quiet and util.shortdate or util.datestr
1348 found = False
1348 found = False
1349 filerevmatches = {}
1349 filerevmatches = {}
1350 if opts.get('all'):
1350 if opts.get('all'):
1351 iter = difflinestates(pstates, states)
1351 iter = difflinestates(pstates, states)
1352 else:
1352 else:
1353 iter = [('', l) for l in states]
1353 iter = [('', l) for l in states]
1354 for change, l in iter:
1354 for change, l in iter:
1355 cols = [fn, str(rev)]
1355 cols = [fn, str(rev)]
1356 if opts.get('line_number'):
1356 if opts.get('line_number'):
1357 cols.append(str(l.linenum))
1357 cols.append(str(l.linenum))
1358 if opts.get('all'):
1358 if opts.get('all'):
1359 cols.append(change)
1359 cols.append(change)
1360 if opts.get('user'):
1360 if opts.get('user'):
1361 cols.append(ui.shortuser(ctx.user()))
1361 cols.append(ui.shortuser(ctx.user()))
1362 if opts.get('date'):
1362 if opts.get('date'):
1363 cols.append(datefunc(ctx.date()))
1363 cols.append(datefunc(ctx.date()))
1364 if opts.get('files_with_matches'):
1364 if opts.get('files_with_matches'):
1365 c = (fn, rev)
1365 c = (fn, rev)
1366 if c in filerevmatches:
1366 if c in filerevmatches:
1367 continue
1367 continue
1368 filerevmatches[c] = 1
1368 filerevmatches[c] = 1
1369 else:
1369 else:
1370 cols.append(l.line)
1370 cols.append(l.line)
1371 ui.write(sep.join(cols), eol)
1371 ui.write(sep.join(cols), eol)
1372 found = True
1372 found = True
1373 return found
1373 return found
1374
1374
1375 skip = {}
1375 skip = {}
1376 revfiles = {}
1376 revfiles = {}
1377 matchfn = cmdutil.match(repo, pats, opts)
1377 matchfn = cmdutil.match(repo, pats, opts)
1378 found = False
1378 found = False
1379 follow = opts.get('follow')
1379 follow = opts.get('follow')
1380
1380
1381 def prep(ctx, fns):
1381 def prep(ctx, fns):
1382 rev = ctx.rev()
1382 rev = ctx.rev()
1383 pctx = ctx.parents()[0]
1383 pctx = ctx.parents()[0]
1384 parent = pctx.rev()
1384 parent = pctx.rev()
1385 matches.setdefault(rev, {})
1385 matches.setdefault(rev, {})
1386 matches.setdefault(parent, {})
1386 matches.setdefault(parent, {})
1387 files = revfiles.setdefault(rev, [])
1387 files = revfiles.setdefault(rev, [])
1388 for fn in fns:
1388 for fn in fns:
1389 flog = getfile(fn)
1389 flog = getfile(fn)
1390 try:
1390 try:
1391 fnode = ctx.filenode(fn)
1391 fnode = ctx.filenode(fn)
1392 except error.LookupError:
1392 except error.LookupError:
1393 continue
1393 continue
1394
1394
1395 copied = flog.renamed(fnode)
1395 copied = flog.renamed(fnode)
1396 copy = follow and copied and copied[0]
1396 copy = follow and copied and copied[0]
1397 if copy:
1397 if copy:
1398 copies.setdefault(rev, {})[fn] = copy
1398 copies.setdefault(rev, {})[fn] = copy
1399 if fn in skip:
1399 if fn in skip:
1400 if copy:
1400 if copy:
1401 skip[copy] = True
1401 skip[copy] = True
1402 continue
1402 continue
1403 files.append(fn)
1403 files.append(fn)
1404
1404
1405 if fn not in matches[rev]:
1405 if fn not in matches[rev]:
1406 grepbody(fn, rev, flog.read(fnode))
1406 grepbody(fn, rev, flog.read(fnode))
1407
1407
1408 pfn = copy or fn
1408 pfn = copy or fn
1409 if pfn not in matches[parent]:
1409 if pfn not in matches[parent]:
1410 try:
1410 try:
1411 fnode = pctx.filenode(pfn)
1411 fnode = pctx.filenode(pfn)
1412 grepbody(pfn, parent, flog.read(fnode))
1412 grepbody(pfn, parent, flog.read(fnode))
1413 except error.LookupError:
1413 except error.LookupError:
1414 pass
1414 pass
1415
1415
1416 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1416 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1417 rev = ctx.rev()
1417 rev = ctx.rev()
1418 parent = ctx.parents()[0].rev()
1418 parent = ctx.parents()[0].rev()
1419 for fn in sorted(revfiles.get(rev, [])):
1419 for fn in sorted(revfiles.get(rev, [])):
1420 states = matches[rev][fn]
1420 states = matches[rev][fn]
1421 copy = copies.get(rev, {}).get(fn)
1421 copy = copies.get(rev, {}).get(fn)
1422 if fn in skip:
1422 if fn in skip:
1423 if copy:
1423 if copy:
1424 skip[copy] = True
1424 skip[copy] = True
1425 continue
1425 continue
1426 pstates = matches.get(parent, {}).get(copy or fn, [])
1426 pstates = matches.get(parent, {}).get(copy or fn, [])
1427 if pstates or states:
1427 if pstates or states:
1428 r = display(fn, ctx, pstates, states)
1428 r = display(fn, ctx, pstates, states)
1429 found = found or r
1429 found = found or r
1430 if r and not opts.get('all'):
1430 if r and not opts.get('all'):
1431 skip[fn] = True
1431 skip[fn] = True
1432 if copy:
1432 if copy:
1433 skip[copy] = True
1433 skip[copy] = True
1434 del matches[rev]
1434 del matches[rev]
1435 del revfiles[rev]
1435 del revfiles[rev]
1436
1436
1437 def heads(ui, repo, *branchrevs, **opts):
1437 def heads(ui, repo, *branchrevs, **opts):
1438 """show current repository heads or show branch heads
1438 """show current repository heads or show branch heads
1439
1439
1440 With no arguments, show all repository branch heads.
1440 With no arguments, show all repository branch heads.
1441
1441
1442 Repository "heads" are changesets with no child changesets. They are
1442 Repository "heads" are changesets with no child changesets. They are
1443 where development generally takes place and are the usual targets
1443 where development generally takes place and are the usual targets
1444 for update and merge operations. Branch heads are changesets that have
1444 for update and merge operations. Branch heads are changesets that have
1445 no child changeset on the same branch.
1445 no child changeset on the same branch.
1446
1446
1447 If one or more REVs are given, only branch heads on the branches
1447 If one or more REVs are given, only branch heads on the branches
1448 associated with the specified changesets are shown.
1448 associated with the specified changesets are shown.
1449
1449
1450 If -c/--closed is specified, also show branch heads marked closed
1450 If -c/--closed is specified, also show branch heads marked closed
1451 (see hg commit --close-branch).
1451 (see hg commit --close-branch).
1452
1452
1453 If STARTREV is specified, only those heads that are descendants of
1453 If STARTREV is specified, only those heads that are descendants of
1454 STARTREV will be displayed.
1454 STARTREV will be displayed.
1455
1455
1456 If -t/--topo is specified, named branch mechanics will be ignored and only
1456 If -t/--topo is specified, named branch mechanics will be ignored and only
1457 changesets without children will be shown.
1457 changesets without children will be shown.
1458 """
1458 """
1459
1459
1460 if opts.get('rev'):
1460 if opts.get('rev'):
1461 start = repo.lookup(opts['rev'])
1461 start = repo.lookup(opts['rev'])
1462 else:
1462 else:
1463 start = None
1463 start = None
1464
1464
1465 if opts.get('topo'):
1465 if opts.get('topo'):
1466 heads = [repo[h] for h in repo.heads(start)]
1466 heads = [repo[h] for h in repo.heads(start)]
1467 else:
1467 else:
1468 heads = []
1468 heads = []
1469 for b, ls in repo.branchmap().iteritems():
1469 for b, ls in repo.branchmap().iteritems():
1470 if start is None:
1470 if start is None:
1471 heads += [repo[h] for h in ls]
1471 heads += [repo[h] for h in ls]
1472 continue
1472 continue
1473 startrev = repo.changelog.rev(start)
1473 startrev = repo.changelog.rev(start)
1474 descendants = set(repo.changelog.descendants(startrev))
1474 descendants = set(repo.changelog.descendants(startrev))
1475 descendants.add(startrev)
1475 descendants.add(startrev)
1476 rev = repo.changelog.rev
1476 rev = repo.changelog.rev
1477 heads += [repo[h] for h in ls if rev(h) in descendants]
1477 heads += [repo[h] for h in ls if rev(h) in descendants]
1478
1478
1479 if branchrevs:
1479 if branchrevs:
1480 decode, encode = encoding.fromlocal, encoding.tolocal
1480 decode, encode = encoding.fromlocal, encoding.tolocal
1481 branches = set(repo[decode(br)].branch() for br in branchrevs)
1481 branches = set(repo[decode(br)].branch() for br in branchrevs)
1482 heads = [h for h in heads if h.branch() in branches]
1482 heads = [h for h in heads if h.branch() in branches]
1483
1483
1484 if not opts.get('closed'):
1484 if not opts.get('closed'):
1485 heads = [h for h in heads if not h.extra().get('close')]
1485 heads = [h for h in heads if not h.extra().get('close')]
1486
1486
1487 if opts.get('active') and branchrevs:
1487 if opts.get('active') and branchrevs:
1488 dagheads = repo.heads(start)
1488 dagheads = repo.heads(start)
1489 heads = [h for h in heads if h.node() in dagheads]
1489 heads = [h for h in heads if h.node() in dagheads]
1490
1490
1491 if branchrevs:
1491 if branchrevs:
1492 haveheads = set(h.branch() for h in heads)
1492 haveheads = set(h.branch() for h in heads)
1493 if branches - haveheads:
1493 if branches - haveheads:
1494 headless = ', '.join(encode(b) for b in branches - haveheads)
1494 headless = ', '.join(encode(b) for b in branches - haveheads)
1495 msg = _('no open branch heads found on branches %s')
1495 msg = _('no open branch heads found on branches %s')
1496 if opts.get('rev'):
1496 if opts.get('rev'):
1497 msg += _(' (started at %s)' % opts['rev'])
1497 msg += _(' (started at %s)' % opts['rev'])
1498 ui.warn((msg + '\n') % headless)
1498 ui.warn((msg + '\n') % headless)
1499
1499
1500 if not heads:
1500 if not heads:
1501 return 1
1501 return 1
1502
1502
1503 heads = sorted(heads, key=lambda x: -x.rev())
1503 heads = sorted(heads, key=lambda x: -x.rev())
1504 displayer = cmdutil.show_changeset(ui, repo, opts)
1504 displayer = cmdutil.show_changeset(ui, repo, opts)
1505 for ctx in heads:
1505 for ctx in heads:
1506 displayer.show(ctx)
1506 displayer.show(ctx)
1507 displayer.close()
1507 displayer.close()
1508
1508
1509 def help_(ui, name=None, with_version=False, unknowncmd=False):
1509 def help_(ui, name=None, with_version=False, unknowncmd=False):
1510 """show help for a given topic or a help overview
1510 """show help for a given topic or a help overview
1511
1511
1512 With no arguments, print a list of commands with short help messages.
1512 With no arguments, print a list of commands with short help messages.
1513
1513
1514 Given a topic, extension, or command name, print help for that
1514 Given a topic, extension, or command name, print help for that
1515 topic."""
1515 topic."""
1516 option_lists = []
1516 option_lists = []
1517 textwidth = util.termwidth() - 2
1517 textwidth = util.termwidth() - 2
1518
1518
1519 def addglobalopts(aliases):
1519 def addglobalopts(aliases):
1520 if ui.verbose:
1520 if ui.verbose:
1521 option_lists.append((_("global options:"), globalopts))
1521 option_lists.append((_("global options:"), globalopts))
1522 if name == 'shortlist':
1522 if name == 'shortlist':
1523 option_lists.append((_('use "hg help" for the full list '
1523 option_lists.append((_('use "hg help" for the full list '
1524 'of commands'), ()))
1524 'of commands'), ()))
1525 else:
1525 else:
1526 if name == 'shortlist':
1526 if name == 'shortlist':
1527 msg = _('use "hg help" for the full list of commands '
1527 msg = _('use "hg help" for the full list of commands '
1528 'or "hg -v" for details')
1528 'or "hg -v" for details')
1529 elif aliases:
1529 elif aliases:
1530 msg = _('use "hg -v help%s" to show aliases and '
1530 msg = _('use "hg -v help%s" to show aliases and '
1531 'global options') % (name and " " + name or "")
1531 'global options') % (name and " " + name or "")
1532 else:
1532 else:
1533 msg = _('use "hg -v help %s" to show global options') % name
1533 msg = _('use "hg -v help %s" to show global options') % name
1534 option_lists.append((msg, ()))
1534 option_lists.append((msg, ()))
1535
1535
1536 def helpcmd(name):
1536 def helpcmd(name):
1537 if with_version:
1537 if with_version:
1538 version_(ui)
1538 version_(ui)
1539 ui.write('\n')
1539 ui.write('\n')
1540
1540
1541 try:
1541 try:
1542 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
1542 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
1543 except error.AmbiguousCommand, inst:
1543 except error.AmbiguousCommand, inst:
1544 # py3k fix: except vars can't be used outside the scope of the
1544 # py3k fix: except vars can't be used outside the scope of the
1545 # except block, nor can be used inside a lambda. python issue4617
1545 # except block, nor can be used inside a lambda. python issue4617
1546 prefix = inst.args[0]
1546 prefix = inst.args[0]
1547 select = lambda c: c.lstrip('^').startswith(prefix)
1547 select = lambda c: c.lstrip('^').startswith(prefix)
1548 helplist(_('list of commands:\n\n'), select)
1548 helplist(_('list of commands:\n\n'), select)
1549 return
1549 return
1550
1550
1551 # check if it's an invalid alias and display its error if it is
1551 # check if it's an invalid alias and display its error if it is
1552 if getattr(entry[0], 'badalias', False):
1552 if getattr(entry[0], 'badalias', False):
1553 if not unknowncmd:
1553 if not unknowncmd:
1554 entry[0](ui)
1554 entry[0](ui)
1555 return
1555 return
1556
1556
1557 # synopsis
1557 # synopsis
1558 if len(entry) > 2:
1558 if len(entry) > 2:
1559 if entry[2].startswith('hg'):
1559 if entry[2].startswith('hg'):
1560 ui.write("%s\n" % entry[2])
1560 ui.write("%s\n" % entry[2])
1561 else:
1561 else:
1562 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
1562 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
1563 else:
1563 else:
1564 ui.write('hg %s\n' % aliases[0])
1564 ui.write('hg %s\n' % aliases[0])
1565
1565
1566 # aliases
1566 # aliases
1567 if not ui.quiet and len(aliases) > 1:
1567 if not ui.quiet and len(aliases) > 1:
1568 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1568 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1569
1569
1570 # description
1570 # description
1571 doc = gettext(entry[0].__doc__)
1571 doc = gettext(entry[0].__doc__)
1572 if not doc:
1572 if not doc:
1573 doc = _("(no help text available)")
1573 doc = _("(no help text available)")
1574 if hasattr(entry[0], 'definition'): # aliased command
1574 if hasattr(entry[0], 'definition'): # aliased command
1575 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
1575 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
1576 if ui.quiet:
1576 if ui.quiet:
1577 doc = doc.splitlines()[0]
1577 doc = doc.splitlines()[0]
1578 keep = ui.verbose and ['verbose'] or []
1578 keep = ui.verbose and ['verbose'] or []
1579 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
1579 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
1580 ui.write("\n%s\n" % formatted)
1580 ui.write("\n%s\n" % formatted)
1581 if pruned:
1581 if pruned:
1582 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
1582 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
1583
1583
1584 if not ui.quiet:
1584 if not ui.quiet:
1585 # options
1585 # options
1586 if entry[1]:
1586 if entry[1]:
1587 option_lists.append((_("options:\n"), entry[1]))
1587 option_lists.append((_("options:\n"), entry[1]))
1588
1588
1589 addglobalopts(False)
1589 addglobalopts(False)
1590
1590
1591 def helplist(header, select=None):
1591 def helplist(header, select=None):
1592 h = {}
1592 h = {}
1593 cmds = {}
1593 cmds = {}
1594 for c, e in table.iteritems():
1594 for c, e in table.iteritems():
1595 f = c.split("|", 1)[0]
1595 f = c.split("|", 1)[0]
1596 if select and not select(f):
1596 if select and not select(f):
1597 continue
1597 continue
1598 if (not select and name != 'shortlist' and
1598 if (not select and name != 'shortlist' and
1599 e[0].__module__ != __name__):
1599 e[0].__module__ != __name__):
1600 continue
1600 continue
1601 if name == "shortlist" and not f.startswith("^"):
1601 if name == "shortlist" and not f.startswith("^"):
1602 continue
1602 continue
1603 f = f.lstrip("^")
1603 f = f.lstrip("^")
1604 if not ui.debugflag and f.startswith("debug"):
1604 if not ui.debugflag and f.startswith("debug"):
1605 continue
1605 continue
1606 doc = e[0].__doc__
1606 doc = e[0].__doc__
1607 if doc and 'DEPRECATED' in doc and not ui.verbose:
1607 if doc and 'DEPRECATED' in doc and not ui.verbose:
1608 continue
1608 continue
1609 doc = gettext(doc)
1609 doc = gettext(doc)
1610 if not doc:
1610 if not doc:
1611 doc = _("(no help text available)")
1611 doc = _("(no help text available)")
1612 h[f] = doc.splitlines()[0].rstrip()
1612 h[f] = doc.splitlines()[0].rstrip()
1613 cmds[f] = c.lstrip("^")
1613 cmds[f] = c.lstrip("^")
1614
1614
1615 if not h:
1615 if not h:
1616 ui.status(_('no commands defined\n'))
1616 ui.status(_('no commands defined\n'))
1617 return
1617 return
1618
1618
1619 ui.status(header)
1619 ui.status(header)
1620 fns = sorted(h)
1620 fns = sorted(h)
1621 m = max(map(len, fns))
1621 m = max(map(len, fns))
1622 for f in fns:
1622 for f in fns:
1623 if ui.verbose:
1623 if ui.verbose:
1624 commands = cmds[f].replace("|",", ")
1624 commands = cmds[f].replace("|",", ")
1625 ui.write(" %s:\n %s\n"%(commands, h[f]))
1625 ui.write(" %s:\n %s\n"%(commands, h[f]))
1626 else:
1626 else:
1627 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1627 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1628
1628
1629 if not ui.quiet:
1629 if not ui.quiet:
1630 addglobalopts(True)
1630 addglobalopts(True)
1631
1631
1632 def helptopic(name):
1632 def helptopic(name):
1633 for names, header, doc in help.helptable:
1633 for names, header, doc in help.helptable:
1634 if name in names:
1634 if name in names:
1635 break
1635 break
1636 else:
1636 else:
1637 raise error.UnknownCommand(name)
1637 raise error.UnknownCommand(name)
1638
1638
1639 # description
1639 # description
1640 if not doc:
1640 if not doc:
1641 doc = _("(no help text available)")
1641 doc = _("(no help text available)")
1642 if hasattr(doc, '__call__'):
1642 if hasattr(doc, '__call__'):
1643 doc = doc()
1643 doc = doc()
1644
1644
1645 ui.write("%s\n\n" % header)
1645 ui.write("%s\n\n" % header)
1646 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1646 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1647
1647
1648 def helpext(name):
1648 def helpext(name):
1649 try:
1649 try:
1650 mod = extensions.find(name)
1650 mod = extensions.find(name)
1651 doc = gettext(mod.__doc__) or _('no help text available')
1651 doc = gettext(mod.__doc__) or _('no help text available')
1652 except KeyError:
1652 except KeyError:
1653 mod = None
1653 mod = None
1654 doc = extensions.disabledext(name)
1654 doc = extensions.disabledext(name)
1655 if not doc:
1655 if not doc:
1656 raise error.UnknownCommand(name)
1656 raise error.UnknownCommand(name)
1657
1657
1658 if '\n' not in doc:
1658 if '\n' not in doc:
1659 head, tail = doc, ""
1659 head, tail = doc, ""
1660 else:
1660 else:
1661 head, tail = doc.split('\n', 1)
1661 head, tail = doc.split('\n', 1)
1662 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1662 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1663 if tail:
1663 if tail:
1664 ui.write(minirst.format(tail, textwidth))
1664 ui.write(minirst.format(tail, textwidth))
1665 ui.status('\n\n')
1665 ui.status('\n\n')
1666
1666
1667 if mod:
1667 if mod:
1668 try:
1668 try:
1669 ct = mod.cmdtable
1669 ct = mod.cmdtable
1670 except AttributeError:
1670 except AttributeError:
1671 ct = {}
1671 ct = {}
1672 modcmds = set([c.split('|', 1)[0] for c in ct])
1672 modcmds = set([c.split('|', 1)[0] for c in ct])
1673 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1673 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1674 else:
1674 else:
1675 ui.write(_('use "hg help extensions" for information on enabling '
1675 ui.write(_('use "hg help extensions" for information on enabling '
1676 'extensions\n'))
1676 'extensions\n'))
1677
1677
1678 def helpextcmd(name):
1678 def helpextcmd(name):
1679 cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict'))
1679 cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict'))
1680 doc = gettext(mod.__doc__).splitlines()[0]
1680 doc = gettext(mod.__doc__).splitlines()[0]
1681
1681
1682 msg = help.listexts(_("'%s' is provided by the following "
1682 msg = help.listexts(_("'%s' is provided by the following "
1683 "extension:") % cmd, {ext: doc}, len(ext),
1683 "extension:") % cmd, {ext: doc}, len(ext),
1684 indent=4)
1684 indent=4)
1685 ui.write(minirst.format(msg, textwidth))
1685 ui.write(minirst.format(msg, textwidth))
1686 ui.write('\n\n')
1686 ui.write('\n\n')
1687 ui.write(_('use "hg help extensions" for information on enabling '
1687 ui.write(_('use "hg help extensions" for information on enabling '
1688 'extensions\n'))
1688 'extensions\n'))
1689
1689
1690 if name and name != 'shortlist':
1690 if name and name != 'shortlist':
1691 i = None
1691 i = None
1692 if unknowncmd:
1692 if unknowncmd:
1693 queries = (helpextcmd,)
1693 queries = (helpextcmd,)
1694 else:
1694 else:
1695 queries = (helptopic, helpcmd, helpext, helpextcmd)
1695 queries = (helptopic, helpcmd, helpext, helpextcmd)
1696 for f in queries:
1696 for f in queries:
1697 try:
1697 try:
1698 f(name)
1698 f(name)
1699 i = None
1699 i = None
1700 break
1700 break
1701 except error.UnknownCommand, inst:
1701 except error.UnknownCommand, inst:
1702 i = inst
1702 i = inst
1703 if i:
1703 if i:
1704 raise i
1704 raise i
1705
1705
1706 else:
1706 else:
1707 # program name
1707 # program name
1708 if ui.verbose or with_version:
1708 if ui.verbose or with_version:
1709 version_(ui)
1709 version_(ui)
1710 else:
1710 else:
1711 ui.status(_("Mercurial Distributed SCM\n"))
1711 ui.status(_("Mercurial Distributed SCM\n"))
1712 ui.status('\n')
1712 ui.status('\n')
1713
1713
1714 # list of commands
1714 # list of commands
1715 if name == "shortlist":
1715 if name == "shortlist":
1716 header = _('basic commands:\n\n')
1716 header = _('basic commands:\n\n')
1717 else:
1717 else:
1718 header = _('list of commands:\n\n')
1718 header = _('list of commands:\n\n')
1719
1719
1720 helplist(header)
1720 helplist(header)
1721 if name != 'shortlist':
1721 if name != 'shortlist':
1722 exts, maxlength = extensions.enabled()
1722 exts, maxlength = extensions.enabled()
1723 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1723 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1724 if text:
1724 if text:
1725 ui.write("\n%s\n" % minirst.format(text, textwidth))
1725 ui.write("\n%s\n" % minirst.format(text, textwidth))
1726
1726
1727 # list all option lists
1727 # list all option lists
1728 opt_output = []
1728 opt_output = []
1729 for title, options in option_lists:
1729 for title, options in option_lists:
1730 opt_output.append(("\n%s" % title, None))
1730 opt_output.append(("\n%s" % title, None))
1731 for shortopt, longopt, default, desc in options:
1731 for shortopt, longopt, default, desc in options:
1732 if _("DEPRECATED") in desc and not ui.verbose:
1732 if _("DEPRECATED") in desc and not ui.verbose:
1733 continue
1733 continue
1734 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1734 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1735 longopt and " --%s" % longopt),
1735 longopt and " --%s" % longopt),
1736 "%s%s" % (desc,
1736 "%s%s" % (desc,
1737 default
1737 default
1738 and _(" (default: %s)") % default
1738 and _(" (default: %s)") % default
1739 or "")))
1739 or "")))
1740
1740
1741 if not name:
1741 if not name:
1742 ui.write(_("\nadditional help topics:\n\n"))
1742 ui.write(_("\nadditional help topics:\n\n"))
1743 topics = []
1743 topics = []
1744 for names, header, doc in help.helptable:
1744 for names, header, doc in help.helptable:
1745 topics.append((sorted(names, key=len, reverse=True)[0], header))
1745 topics.append((sorted(names, key=len, reverse=True)[0], header))
1746 topics_len = max([len(s[0]) for s in topics])
1746 topics_len = max([len(s[0]) for s in topics])
1747 for t, desc in topics:
1747 for t, desc in topics:
1748 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1748 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1749
1749
1750 if opt_output:
1750 if opt_output:
1751 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1751 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1752 for first, second in opt_output:
1752 for first, second in opt_output:
1753 if second:
1753 if second:
1754 second = util.wrap(second, opts_len + 3)
1754 second = util.wrap(second, opts_len + 3)
1755 ui.write(" %-*s %s\n" % (opts_len, first, second))
1755 ui.write(" %-*s %s\n" % (opts_len, first, second))
1756 else:
1756 else:
1757 ui.write("%s\n" % first)
1757 ui.write("%s\n" % first)
1758
1758
1759 def identify(ui, repo, source=None,
1759 def identify(ui, repo, source=None,
1760 rev=None, num=None, id=None, branch=None, tags=None):
1760 rev=None, num=None, id=None, branch=None, tags=None):
1761 """identify the working copy or specified revision
1761 """identify the working copy or specified revision
1762
1762
1763 With no revision, print a summary of the current state of the
1763 With no revision, print a summary of the current state of the
1764 repository.
1764 repository.
1765
1765
1766 Specifying a path to a repository root or Mercurial bundle will
1766 Specifying a path to a repository root or Mercurial bundle will
1767 cause lookup to operate on that repository/bundle.
1767 cause lookup to operate on that repository/bundle.
1768
1768
1769 This summary identifies the repository state using one or two
1769 This summary identifies the repository state using one or two
1770 parent hash identifiers, followed by a "+" if there are
1770 parent hash identifiers, followed by a "+" if there are
1771 uncommitted changes in the working directory, a list of tags for
1771 uncommitted changes in the working directory, a list of tags for
1772 this revision and a branch name for non-default branches.
1772 this revision and a branch name for non-default branches.
1773 """
1773 """
1774
1774
1775 if not repo and not source:
1775 if not repo and not source:
1776 raise util.Abort(_("There is no Mercurial repository here "
1776 raise util.Abort(_("There is no Mercurial repository here "
1777 "(.hg not found)"))
1777 "(.hg not found)"))
1778
1778
1779 hexfunc = ui.debugflag and hex or short
1779 hexfunc = ui.debugflag and hex or short
1780 default = not (num or id or branch or tags)
1780 default = not (num or id or branch or tags)
1781 output = []
1781 output = []
1782
1782
1783 revs = []
1783 revs = []
1784 if source:
1784 if source:
1785 source, branches = hg.parseurl(ui.expandpath(source))
1785 source, branches = hg.parseurl(ui.expandpath(source))
1786 repo = hg.repository(ui, source)
1786 repo = hg.repository(ui, source)
1787 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1787 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1788
1788
1789 if not repo.local():
1789 if not repo.local():
1790 if not rev and revs:
1790 if not rev and revs:
1791 rev = revs[0]
1791 rev = revs[0]
1792 if not rev:
1792 if not rev:
1793 rev = "tip"
1793 rev = "tip"
1794 if num or branch or tags:
1794 if num or branch or tags:
1795 raise util.Abort(
1795 raise util.Abort(
1796 "can't query remote revision number, branch, or tags")
1796 "can't query remote revision number, branch, or tags")
1797 output = [hexfunc(repo.lookup(rev))]
1797 output = [hexfunc(repo.lookup(rev))]
1798 elif not rev:
1798 elif not rev:
1799 ctx = repo[None]
1799 ctx = repo[None]
1800 parents = ctx.parents()
1800 parents = ctx.parents()
1801 changed = False
1801 changed = False
1802 if default or id or num:
1802 if default or id or num:
1803 changed = util.any(repo.status())
1803 changed = util.any(repo.status())
1804 if default or id:
1804 if default or id:
1805 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1805 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1806 (changed) and "+" or "")]
1806 (changed) and "+" or "")]
1807 if num:
1807 if num:
1808 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1808 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1809 (changed) and "+" or ""))
1809 (changed) and "+" or ""))
1810 else:
1810 else:
1811 ctx = repo[rev]
1811 ctx = repo[rev]
1812 if default or id:
1812 if default or id:
1813 output = [hexfunc(ctx.node())]
1813 output = [hexfunc(ctx.node())]
1814 if num:
1814 if num:
1815 output.append(str(ctx.rev()))
1815 output.append(str(ctx.rev()))
1816
1816
1817 if repo.local() and default and not ui.quiet:
1817 if repo.local() and default and not ui.quiet:
1818 b = encoding.tolocal(ctx.branch())
1818 b = encoding.tolocal(ctx.branch())
1819 if b != 'default':
1819 if b != 'default':
1820 output.append("(%s)" % b)
1820 output.append("(%s)" % b)
1821
1821
1822 # multiple tags for a single parent separated by '/'
1822 # multiple tags for a single parent separated by '/'
1823 t = "/".join(ctx.tags())
1823 t = "/".join(ctx.tags())
1824 if t:
1824 if t:
1825 output.append(t)
1825 output.append(t)
1826
1826
1827 if branch:
1827 if branch:
1828 output.append(encoding.tolocal(ctx.branch()))
1828 output.append(encoding.tolocal(ctx.branch()))
1829
1829
1830 if tags:
1830 if tags:
1831 output.extend(ctx.tags())
1831 output.extend(ctx.tags())
1832
1832
1833 ui.write("%s\n" % ' '.join(output))
1833 ui.write("%s\n" % ' '.join(output))
1834
1834
1835 def import_(ui, repo, patch1, *patches, **opts):
1835 def import_(ui, repo, patch1, *patches, **opts):
1836 """import an ordered set of patches
1836 """import an ordered set of patches
1837
1837
1838 Import a list of patches and commit them individually (unless
1838 Import a list of patches and commit them individually (unless
1839 --no-commit is specified).
1839 --no-commit is specified).
1840
1840
1841 If there are outstanding changes in the working directory, import
1841 If there are outstanding changes in the working directory, import
1842 will abort unless given the -f/--force flag.
1842 will abort unless given the -f/--force flag.
1843
1843
1844 You can import a patch straight from a mail message. Even patches
1844 You can import a patch straight from a mail message. Even patches
1845 as attachments work (to use the body part, it must have type
1845 as attachments work (to use the body part, it must have type
1846 text/plain or text/x-patch). From and Subject headers of email
1846 text/plain or text/x-patch). From and Subject headers of email
1847 message are used as default committer and commit message. All
1847 message are used as default committer and commit message. All
1848 text/plain body parts before first diff are added to commit
1848 text/plain body parts before first diff are added to commit
1849 message.
1849 message.
1850
1850
1851 If the imported patch was generated by hg export, user and
1851 If the imported patch was generated by hg export, user and
1852 description from patch override values from message headers and
1852 description from patch override values from message headers and
1853 body. Values given on command line with -m/--message and -u/--user
1853 body. Values given on command line with -m/--message and -u/--user
1854 override these.
1854 override these.
1855
1855
1856 If --exact is specified, import will set the working directory to
1856 If --exact is specified, import will set the working directory to
1857 the parent of each patch before applying it, and will abort if the
1857 the parent of each patch before applying it, and will abort if the
1858 resulting changeset has a different ID than the one recorded in
1858 resulting changeset has a different ID than the one recorded in
1859 the patch. This may happen due to character set problems or other
1859 the patch. This may happen due to character set problems or other
1860 deficiencies in the text patch format.
1860 deficiencies in the text patch format.
1861
1861
1862 With -s/--similarity, hg will attempt to discover renames and
1862 With -s/--similarity, hg will attempt to discover renames and
1863 copies in the patch in the same way as 'addremove'.
1863 copies in the patch in the same way as 'addremove'.
1864
1864
1865 To read a patch from standard input, use "-" as the patch name. If
1865 To read a patch from standard input, use "-" as the patch name. If
1866 a URL is specified, the patch will be downloaded from it.
1866 a URL is specified, the patch will be downloaded from it.
1867 See 'hg help dates' for a list of formats valid for -d/--date.
1867 See 'hg help dates' for a list of formats valid for -d/--date.
1868 """
1868 """
1869 patches = (patch1,) + patches
1869 patches = (patch1,) + patches
1870
1870
1871 date = opts.get('date')
1871 date = opts.get('date')
1872 if date:
1872 if date:
1873 opts['date'] = util.parsedate(date)
1873 opts['date'] = util.parsedate(date)
1874
1874
1875 try:
1875 try:
1876 sim = float(opts.get('similarity') or 0)
1876 sim = float(opts.get('similarity') or 0)
1877 except ValueError:
1877 except ValueError:
1878 raise util.Abort(_('similarity must be a number'))
1878 raise util.Abort(_('similarity must be a number'))
1879 if sim < 0 or sim > 100:
1879 if sim < 0 or sim > 100:
1880 raise util.Abort(_('similarity must be between 0 and 100'))
1880 raise util.Abort(_('similarity must be between 0 and 100'))
1881
1881
1882 if opts.get('exact') or not opts.get('force'):
1882 if opts.get('exact') or not opts.get('force'):
1883 cmdutil.bail_if_changed(repo)
1883 cmdutil.bail_if_changed(repo)
1884
1884
1885 d = opts["base"]
1885 d = opts["base"]
1886 strip = opts["strip"]
1886 strip = opts["strip"]
1887 wlock = lock = None
1887 wlock = lock = None
1888
1888
1889 def tryone(ui, hunk):
1889 def tryone(ui, hunk):
1890 tmpname, message, user, date, branch, nodeid, p1, p2 = \
1890 tmpname, message, user, date, branch, nodeid, p1, p2 = \
1891 patch.extract(ui, hunk)
1891 patch.extract(ui, hunk)
1892
1892
1893 if not tmpname:
1893 if not tmpname:
1894 return None
1894 return None
1895 commitid = _('to working directory')
1895 commitid = _('to working directory')
1896
1896
1897 try:
1897 try:
1898 cmdline_message = cmdutil.logmessage(opts)
1898 cmdline_message = cmdutil.logmessage(opts)
1899 if cmdline_message:
1899 if cmdline_message:
1900 # pickup the cmdline msg
1900 # pickup the cmdline msg
1901 message = cmdline_message
1901 message = cmdline_message
1902 elif message:
1902 elif message:
1903 # pickup the patch msg
1903 # pickup the patch msg
1904 message = message.strip()
1904 message = message.strip()
1905 else:
1905 else:
1906 # launch the editor
1906 # launch the editor
1907 message = None
1907 message = None
1908 ui.debug('message:\n%s\n' % message)
1908 ui.debug('message:\n%s\n' % message)
1909
1909
1910 wp = repo.parents()
1910 wp = repo.parents()
1911 if opts.get('exact'):
1911 if opts.get('exact'):
1912 if not nodeid or not p1:
1912 if not nodeid or not p1:
1913 raise util.Abort(_('not a Mercurial patch'))
1913 raise util.Abort(_('not a Mercurial patch'))
1914 p1 = repo.lookup(p1)
1914 p1 = repo.lookup(p1)
1915 p2 = repo.lookup(p2 or hex(nullid))
1915 p2 = repo.lookup(p2 or hex(nullid))
1916
1916
1917 if p1 != wp[0].node():
1917 if p1 != wp[0].node():
1918 hg.clean(repo, p1)
1918 hg.clean(repo, p1)
1919 repo.dirstate.setparents(p1, p2)
1919 repo.dirstate.setparents(p1, p2)
1920 elif p2:
1920 elif p2:
1921 try:
1921 try:
1922 p1 = repo.lookup(p1)
1922 p1 = repo.lookup(p1)
1923 p2 = repo.lookup(p2)
1923 p2 = repo.lookup(p2)
1924 if p1 == wp[0].node():
1924 if p1 == wp[0].node():
1925 repo.dirstate.setparents(p1, p2)
1925 repo.dirstate.setparents(p1, p2)
1926 except error.RepoError:
1926 except error.RepoError:
1927 pass
1927 pass
1928 if opts.get('exact') or opts.get('import_branch'):
1928 if opts.get('exact') or opts.get('import_branch'):
1929 repo.dirstate.setbranch(branch or 'default')
1929 repo.dirstate.setbranch(branch or 'default')
1930
1930
1931 files = {}
1931 files = {}
1932 try:
1932 try:
1933 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1933 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1934 files=files, eolmode=None)
1934 files=files, eolmode=None)
1935 finally:
1935 finally:
1936 files = patch.updatedir(ui, repo, files,
1936 files = patch.updatedir(ui, repo, files,
1937 similarity=sim / 100.0)
1937 similarity=sim / 100.0)
1938 if not opts.get('no_commit'):
1938 if not opts.get('no_commit'):
1939 if opts.get('exact'):
1939 if opts.get('exact'):
1940 m = None
1940 m = None
1941 else:
1941 else:
1942 m = cmdutil.matchfiles(repo, files or [])
1942 m = cmdutil.matchfiles(repo, files or [])
1943 n = repo.commit(message, opts.get('user') or user,
1943 n = repo.commit(message, opts.get('user') or user,
1944 opts.get('date') or date, match=m,
1944 opts.get('date') or date, match=m,
1945 editor=cmdutil.commiteditor)
1945 editor=cmdutil.commiteditor)
1946 if opts.get('exact'):
1946 if opts.get('exact'):
1947 if hex(n) != nodeid:
1947 if hex(n) != nodeid:
1948 repo.rollback()
1948 repo.rollback()
1949 raise util.Abort(_('patch is damaged'
1949 raise util.Abort(_('patch is damaged'
1950 ' or loses information'))
1950 ' or loses information'))
1951 # Force a dirstate write so that the next transaction
1951 # Force a dirstate write so that the next transaction
1952 # backups an up-do-date file.
1952 # backups an up-do-date file.
1953 repo.dirstate.write()
1953 repo.dirstate.write()
1954 if n:
1954 if n:
1955 commitid = short(n)
1955 commitid = short(n)
1956
1956
1957 return commitid
1957 return commitid
1958 finally:
1958 finally:
1959 os.unlink(tmpname)
1959 os.unlink(tmpname)
1960
1960
1961 try:
1961 try:
1962 wlock = repo.wlock()
1962 wlock = repo.wlock()
1963 lock = repo.lock()
1963 lock = repo.lock()
1964 lastcommit = None
1964 lastcommit = None
1965 for p in patches:
1965 for p in patches:
1966 pf = os.path.join(d, p)
1966 pf = os.path.join(d, p)
1967
1967
1968 if pf == '-':
1968 if pf == '-':
1969 ui.status(_("applying patch from stdin\n"))
1969 ui.status(_("applying patch from stdin\n"))
1970 pf = sys.stdin
1970 pf = sys.stdin
1971 else:
1971 else:
1972 ui.status(_("applying %s\n") % p)
1972 ui.status(_("applying %s\n") % p)
1973 pf = url.open(ui, pf)
1973 pf = url.open(ui, pf)
1974
1974
1975 haspatch = False
1975 haspatch = False
1976 for hunk in patch.split(pf):
1976 for hunk in patch.split(pf):
1977 commitid = tryone(ui, hunk)
1977 commitid = tryone(ui, hunk)
1978 if commitid:
1978 if commitid:
1979 haspatch = True
1979 haspatch = True
1980 if lastcommit:
1980 if lastcommit:
1981 ui.status(_('applied %s\n') % lastcommit)
1981 ui.status(_('applied %s\n') % lastcommit)
1982 lastcommit = commitid
1982 lastcommit = commitid
1983
1983
1984 if not haspatch:
1984 if not haspatch:
1985 raise util.Abort(_('no diffs found'))
1985 raise util.Abort(_('no diffs found'))
1986
1986
1987 finally:
1987 finally:
1988 release(lock, wlock)
1988 release(lock, wlock)
1989
1989
1990 def incoming(ui, repo, source="default", **opts):
1990 def incoming(ui, repo, source="default", **opts):
1991 """show new changesets found in source
1991 """show new changesets found in source
1992
1992
1993 Show new changesets found in the specified path/URL or the default
1993 Show new changesets found in the specified path/URL or the default
1994 pull location. These are the changesets that would have been pulled
1994 pull location. These are the changesets that would have been pulled
1995 if a pull at the time you issued this command.
1995 if a pull at the time you issued this command.
1996
1996
1997 For remote repository, using --bundle avoids downloading the
1997 For remote repository, using --bundle avoids downloading the
1998 changesets twice if the incoming is followed by a pull.
1998 changesets twice if the incoming is followed by a pull.
1999
1999
2000 See pull for valid source format details.
2000 See pull for valid source format details.
2001 """
2001 """
2002 limit = cmdutil.loglimit(opts)
2002 limit = cmdutil.loglimit(opts)
2003 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2003 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2004 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2004 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2005 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2005 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2006 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2006 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2007 if revs:
2007 if revs:
2008 revs = [other.lookup(rev) for rev in revs]
2008 revs = [other.lookup(rev) for rev in revs]
2009 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
2009 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
2010 force=opts["force"])
2010 force=opts["force"])
2011 if not incoming:
2011 if not incoming:
2012 try:
2012 try:
2013 os.unlink(opts["bundle"])
2013 os.unlink(opts["bundle"])
2014 except:
2014 except:
2015 pass
2015 pass
2016 ui.status(_("no changes found\n"))
2016 ui.status(_("no changes found\n"))
2017 return 1
2017 return 1
2018
2018
2019 cleanup = None
2019 cleanup = None
2020 try:
2020 try:
2021 fname = opts["bundle"]
2021 fname = opts["bundle"]
2022 if fname or not other.local():
2022 if fname or not other.local():
2023 # create a bundle (uncompressed if other repo is not local)
2023 # create a bundle (uncompressed if other repo is not local)
2024
2024
2025 if revs is None and other.capable('changegroupsubset'):
2025 if revs is None and other.capable('changegroupsubset'):
2026 revs = rheads
2026 revs = rheads
2027
2027
2028 if revs is None:
2028 if revs is None:
2029 cg = other.changegroup(incoming, "incoming")
2029 cg = other.changegroup(incoming, "incoming")
2030 else:
2030 else:
2031 cg = other.changegroupsubset(incoming, revs, 'incoming')
2031 cg = other.changegroupsubset(incoming, revs, 'incoming')
2032 bundletype = other.local() and "HG10BZ" or "HG10UN"
2032 bundletype = other.local() and "HG10BZ" or "HG10UN"
2033 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
2033 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
2034 # keep written bundle?
2034 # keep written bundle?
2035 if opts["bundle"]:
2035 if opts["bundle"]:
2036 cleanup = None
2036 cleanup = None
2037 if not other.local():
2037 if not other.local():
2038 # use the created uncompressed bundlerepo
2038 # use the created uncompressed bundlerepo
2039 other = bundlerepo.bundlerepository(ui, repo.root, fname)
2039 other = bundlerepo.bundlerepository(ui, repo.root, fname)
2040
2040
2041 o = other.changelog.nodesbetween(incoming, revs)[0]
2041 o = other.changelog.nodesbetween(incoming, revs)[0]
2042 if opts.get('newest_first'):
2042 if opts.get('newest_first'):
2043 o.reverse()
2043 o.reverse()
2044 displayer = cmdutil.show_changeset(ui, other, opts)
2044 displayer = cmdutil.show_changeset(ui, other, opts)
2045 count = 0
2045 count = 0
2046 for n in o:
2046 for n in o:
2047 if limit is not None and count >= limit:
2047 if limit is not None and count >= limit:
2048 break
2048 break
2049 parents = [p for p in other.changelog.parents(n) if p != nullid]
2049 parents = [p for p in other.changelog.parents(n) if p != nullid]
2050 if opts.get('no_merges') and len(parents) == 2:
2050 if opts.get('no_merges') and len(parents) == 2:
2051 continue
2051 continue
2052 count += 1
2052 count += 1
2053 displayer.show(other[n])
2053 displayer.show(other[n])
2054 displayer.close()
2054 displayer.close()
2055 finally:
2055 finally:
2056 if hasattr(other, 'close'):
2056 if hasattr(other, 'close'):
2057 other.close()
2057 other.close()
2058 if cleanup:
2058 if cleanup:
2059 os.unlink(cleanup)
2059 os.unlink(cleanup)
2060
2060
2061 def init(ui, dest=".", **opts):
2061 def init(ui, dest=".", **opts):
2062 """create a new repository in the given directory
2062 """create a new repository in the given directory
2063
2063
2064 Initialize a new repository in the given directory. If the given
2064 Initialize a new repository in the given directory. If the given
2065 directory does not exist, it will be created.
2065 directory does not exist, it will be created.
2066
2066
2067 If no directory is given, the current directory is used.
2067 If no directory is given, the current directory is used.
2068
2068
2069 It is possible to specify an ``ssh://`` URL as the destination.
2069 It is possible to specify an ``ssh://`` URL as the destination.
2070 See 'hg help urls' for more information.
2070 See 'hg help urls' for more information.
2071 """
2071 """
2072 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
2072 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
2073
2073
2074 def locate(ui, repo, *pats, **opts):
2074 def locate(ui, repo, *pats, **opts):
2075 """locate files matching specific patterns
2075 """locate files matching specific patterns
2076
2076
2077 Print files under Mercurial control in the working directory whose
2077 Print files under Mercurial control in the working directory whose
2078 names match the given patterns.
2078 names match the given patterns.
2079
2079
2080 By default, this command searches all directories in the working
2080 By default, this command searches all directories in the working
2081 directory. To search just the current directory and its
2081 directory. To search just the current directory and its
2082 subdirectories, use "--include .".
2082 subdirectories, use "--include .".
2083
2083
2084 If no patterns are given to match, this command prints the names
2084 If no patterns are given to match, this command prints the names
2085 of all files under Mercurial control in the working directory.
2085 of all files under Mercurial control in the working directory.
2086
2086
2087 If you want to feed the output of this command into the "xargs"
2087 If you want to feed the output of this command into the "xargs"
2088 command, use the -0 option to both this command and "xargs". This
2088 command, use the -0 option to both this command and "xargs". This
2089 will avoid the problem of "xargs" treating single filenames that
2089 will avoid the problem of "xargs" treating single filenames that
2090 contain whitespace as multiple filenames.
2090 contain whitespace as multiple filenames.
2091 """
2091 """
2092 end = opts.get('print0') and '\0' or '\n'
2092 end = opts.get('print0') and '\0' or '\n'
2093 rev = opts.get('rev') or None
2093 rev = opts.get('rev') or None
2094
2094
2095 ret = 1
2095 ret = 1
2096 m = cmdutil.match(repo, pats, opts, default='relglob')
2096 m = cmdutil.match(repo, pats, opts, default='relglob')
2097 m.bad = lambda x, y: False
2097 m.bad = lambda x, y: False
2098 for abs in repo[rev].walk(m):
2098 for abs in repo[rev].walk(m):
2099 if not rev and abs not in repo.dirstate:
2099 if not rev and abs not in repo.dirstate:
2100 continue
2100 continue
2101 if opts.get('fullpath'):
2101 if opts.get('fullpath'):
2102 ui.write(repo.wjoin(abs), end)
2102 ui.write(repo.wjoin(abs), end)
2103 else:
2103 else:
2104 ui.write(((pats and m.rel(abs)) or abs), end)
2104 ui.write(((pats and m.rel(abs)) or abs), end)
2105 ret = 0
2105 ret = 0
2106
2106
2107 return ret
2107 return ret
2108
2108
2109 def log(ui, repo, *pats, **opts):
2109 def log(ui, repo, *pats, **opts):
2110 """show revision history of entire repository or files
2110 """show revision history of entire repository or files
2111
2111
2112 Print the revision history of the specified files or the entire
2112 Print the revision history of the specified files or the entire
2113 project.
2113 project.
2114
2114
2115 File history is shown without following rename or copy history of
2115 File history is shown without following rename or copy history of
2116 files. Use -f/--follow with a filename to follow history across
2116 files. Use -f/--follow with a filename to follow history across
2117 renames and copies. --follow without a filename will only show
2117 renames and copies. --follow without a filename will only show
2118 ancestors or descendants of the starting revision. --follow-first
2118 ancestors or descendants of the starting revision. --follow-first
2119 only follows the first parent of merge revisions.
2119 only follows the first parent of merge revisions.
2120
2120
2121 If no revision range is specified, the default is tip:0 unless
2121 If no revision range is specified, the default is tip:0 unless
2122 --follow is set, in which case the working directory parent is
2122 --follow is set, in which case the working directory parent is
2123 used as the starting revision.
2123 used as the starting revision.
2124
2124
2125 See 'hg help dates' for a list of formats valid for -d/--date.
2125 See 'hg help dates' for a list of formats valid for -d/--date.
2126
2126
2127 By default this command prints revision number and changeset id,
2127 By default this command prints revision number and changeset id,
2128 tags, non-trivial parents, user, date and time, and a summary for
2128 tags, non-trivial parents, user, date and time, and a summary for
2129 each commit. When the -v/--verbose switch is used, the list of
2129 each commit. When the -v/--verbose switch is used, the list of
2130 changed files and full commit message are shown.
2130 changed files and full commit message are shown.
2131
2131
2132 NOTE: log -p/--patch may generate unexpected diff output for merge
2132 NOTE: log -p/--patch may generate unexpected diff output for merge
2133 changesets, as it will only compare the merge changeset against
2133 changesets, as it will only compare the merge changeset against
2134 its first parent. Also, only files different from BOTH parents
2134 its first parent. Also, only files different from BOTH parents
2135 will appear in files:.
2135 will appear in files:.
2136 """
2136 """
2137
2137
2138 matchfn = cmdutil.match(repo, pats, opts)
2138 matchfn = cmdutil.match(repo, pats, opts)
2139 limit = cmdutil.loglimit(opts)
2139 limit = cmdutil.loglimit(opts)
2140 count = 0
2140 count = 0
2141
2141
2142 endrev = None
2142 endrev = None
2143 if opts.get('copies') and opts.get('rev'):
2143 if opts.get('copies') and opts.get('rev'):
2144 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2144 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2145
2145
2146 df = False
2146 df = False
2147 if opts["date"]:
2147 if opts["date"]:
2148 df = util.matchdate(opts["date"])
2148 df = util.matchdate(opts["date"])
2149
2149
2150 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2150 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2151 def prep(ctx, fns):
2151 def prep(ctx, fns):
2152 rev = ctx.rev()
2152 rev = ctx.rev()
2153 parents = [p for p in repo.changelog.parentrevs(rev)
2153 parents = [p for p in repo.changelog.parentrevs(rev)
2154 if p != nullrev]
2154 if p != nullrev]
2155 if opts.get('no_merges') and len(parents) == 2:
2155 if opts.get('no_merges') and len(parents) == 2:
2156 return
2156 return
2157 if opts.get('only_merges') and len(parents) != 2:
2157 if opts.get('only_merges') and len(parents) != 2:
2158 return
2158 return
2159 if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
2159 if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
2160 return
2160 return
2161 if df and not df(ctx.date()[0]):
2161 if df and not df(ctx.date()[0]):
2162 return
2162 return
2163 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2163 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2164 return
2164 return
2165 if opts.get('keyword'):
2165 if opts.get('keyword'):
2166 for k in [kw.lower() for kw in opts['keyword']]:
2166 for k in [kw.lower() for kw in opts['keyword']]:
2167 if (k in ctx.user().lower() or
2167 if (k in ctx.user().lower() or
2168 k in ctx.description().lower() or
2168 k in ctx.description().lower() or
2169 k in " ".join(ctx.files()).lower()):
2169 k in " ".join(ctx.files()).lower()):
2170 break
2170 break
2171 else:
2171 else:
2172 return
2172 return
2173
2173
2174 copies = None
2174 copies = None
2175 if opts.get('copies') and rev:
2175 if opts.get('copies') and rev:
2176 copies = []
2176 copies = []
2177 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2177 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2178 for fn in ctx.files():
2178 for fn in ctx.files():
2179 rename = getrenamed(fn, rev)
2179 rename = getrenamed(fn, rev)
2180 if rename:
2180 if rename:
2181 copies.append((fn, rename[0]))
2181 copies.append((fn, rename[0]))
2182
2182
2183 displayer.show(ctx, copies=copies)
2183 displayer.show(ctx, copies=copies)
2184
2184
2185 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2185 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2186 if count == limit:
2186 if count == limit:
2187 break
2187 break
2188 if displayer.flush(ctx.rev()):
2188 if displayer.flush(ctx.rev()):
2189 count += 1
2189 count += 1
2190 displayer.close()
2190 displayer.close()
2191
2191
2192 def manifest(ui, repo, node=None, rev=None):
2192 def manifest(ui, repo, node=None, rev=None):
2193 """output the current or given revision of the project manifest
2193 """output the current or given revision of the project manifest
2194
2194
2195 Print a list of version controlled files for the given revision.
2195 Print a list of version controlled files for the given revision.
2196 If no revision is given, the first parent of the working directory
2196 If no revision is given, the first parent of the working directory
2197 is used, or the null revision if no revision is checked out.
2197 is used, or the null revision if no revision is checked out.
2198
2198
2199 With -v, print file permissions, symlink and executable bits.
2199 With -v, print file permissions, symlink and executable bits.
2200 With --debug, print file revision hashes.
2200 With --debug, print file revision hashes.
2201 """
2201 """
2202
2202
2203 if rev and node:
2203 if rev and node:
2204 raise util.Abort(_("please specify just one revision"))
2204 raise util.Abort(_("please specify just one revision"))
2205
2205
2206 if not node:
2206 if not node:
2207 node = rev
2207 node = rev
2208
2208
2209 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2209 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2210 ctx = repo[node]
2210 ctx = repo[node]
2211 for f in ctx:
2211 for f in ctx:
2212 if ui.debugflag:
2212 if ui.debugflag:
2213 ui.write("%40s " % hex(ctx.manifest()[f]))
2213 ui.write("%40s " % hex(ctx.manifest()[f]))
2214 if ui.verbose:
2214 if ui.verbose:
2215 ui.write(decor[ctx.flags(f)])
2215 ui.write(decor[ctx.flags(f)])
2216 ui.write("%s\n" % f)
2216 ui.write("%s\n" % f)
2217
2217
2218 def merge(ui, repo, node=None, **opts):
2218 def merge(ui, repo, node=None, **opts):
2219 """merge working directory with another revision
2219 """merge working directory with another revision
2220
2220
2221 The current working directory is updated with all changes made in
2221 The current working directory is updated with all changes made in
2222 the requested revision since the last common predecessor revision.
2222 the requested revision since the last common predecessor revision.
2223
2223
2224 Files that changed between either parent are marked as changed for
2224 Files that changed between either parent are marked as changed for
2225 the next commit and a commit must be performed before any further
2225 the next commit and a commit must be performed before any further
2226 updates to the repository are allowed. The next commit will have
2226 updates to the repository are allowed. The next commit will have
2227 two parents.
2227 two parents.
2228
2228
2229 If no revision is specified, the working directory's parent is a
2229 If no revision is specified, the working directory's parent is a
2230 head revision, and the current branch contains exactly one other
2230 head revision, and the current branch contains exactly one other
2231 head, the other head is merged with by default. Otherwise, an
2231 head, the other head is merged with by default. Otherwise, an
2232 explicit revision with which to merge with must be provided.
2232 explicit revision with which to merge with must be provided.
2233 """
2233 """
2234
2234
2235 if opts.get('rev') and node:
2235 if opts.get('rev') and node:
2236 raise util.Abort(_("please specify just one revision"))
2236 raise util.Abort(_("please specify just one revision"))
2237 if not node:
2237 if not node:
2238 node = opts.get('rev')
2238 node = opts.get('rev')
2239
2239
2240 if not node:
2240 if not node:
2241 branch = repo.changectx(None).branch()
2241 branch = repo.changectx(None).branch()
2242 bheads = repo.branchheads(branch)
2242 bheads = repo.branchheads(branch)
2243 if len(bheads) > 2:
2243 if len(bheads) > 2:
2244 ui.warn(_("abort: branch '%s' has %d heads - "
2244 ui.warn(_("abort: branch '%s' has %d heads - "
2245 "please merge with an explicit rev\n")
2245 "please merge with an explicit rev\n")
2246 % (branch, len(bheads)))
2246 % (branch, len(bheads)))
2247 ui.status(_("(run 'hg heads .' to see heads)\n"))
2247 ui.status(_("(run 'hg heads .' to see heads)\n"))
2248 return False
2248 return False
2249
2249
2250 parent = repo.dirstate.parents()[0]
2250 parent = repo.dirstate.parents()[0]
2251 if len(bheads) == 1:
2251 if len(bheads) == 1:
2252 if len(repo.heads()) > 1:
2252 if len(repo.heads()) > 1:
2253 ui.warn(_("abort: branch '%s' has one head - "
2253 ui.warn(_("abort: branch '%s' has one head - "
2254 "please merge with an explicit rev\n" % branch))
2254 "please merge with an explicit rev\n" % branch))
2255 ui.status(_("(run 'hg heads' to see all heads)\n"))
2255 ui.status(_("(run 'hg heads' to see all heads)\n"))
2256 return False
2256 return False
2257 msg = _('there is nothing to merge')
2257 msg = _('there is nothing to merge')
2258 if parent != repo.lookup(repo[None].branch()):
2258 if parent != repo.lookup(repo[None].branch()):
2259 msg = _('%s - use "hg update" instead') % msg
2259 msg = _('%s - use "hg update" instead') % msg
2260 raise util.Abort(msg)
2260 raise util.Abort(msg)
2261
2261
2262 if parent not in bheads:
2262 if parent not in bheads:
2263 raise util.Abort(_('working dir not at a head rev - '
2263 raise util.Abort(_('working dir not at a head rev - '
2264 'use "hg update" or merge with an explicit rev'))
2264 'use "hg update" or merge with an explicit rev'))
2265 node = parent == bheads[0] and bheads[-1] or bheads[0]
2265 node = parent == bheads[0] and bheads[-1] or bheads[0]
2266
2266
2267 if opts.get('preview'):
2267 if opts.get('preview'):
2268 # find nodes that are ancestors of p2 but not of p1
2268 # find nodes that are ancestors of p2 but not of p1
2269 p1 = repo.lookup('.')
2269 p1 = repo.lookup('.')
2270 p2 = repo.lookup(node)
2270 p2 = repo.lookup(node)
2271 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2271 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2272
2272
2273 displayer = cmdutil.show_changeset(ui, repo, opts)
2273 displayer = cmdutil.show_changeset(ui, repo, opts)
2274 for node in nodes:
2274 for node in nodes:
2275 displayer.show(repo[node])
2275 displayer.show(repo[node])
2276 displayer.close()
2276 displayer.close()
2277 return 0
2277 return 0
2278
2278
2279 return hg.merge(repo, node, force=opts.get('force'))
2279 return hg.merge(repo, node, force=opts.get('force'))
2280
2280
2281 def outgoing(ui, repo, dest=None, **opts):
2281 def outgoing(ui, repo, dest=None, **opts):
2282 """show changesets not found in the destination
2282 """show changesets not found in the destination
2283
2283
2284 Show changesets not found in the specified destination repository
2284 Show changesets not found in the specified destination repository
2285 or the default push location. These are the changesets that would
2285 or the default push location. These are the changesets that would
2286 be pushed if a push was requested.
2286 be pushed if a push was requested.
2287
2287
2288 See pull for details of valid destination formats.
2288 See pull for details of valid destination formats.
2289 """
2289 """
2290 limit = cmdutil.loglimit(opts)
2290 limit = cmdutil.loglimit(opts)
2291 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2291 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2292 dest, branches = hg.parseurl(dest, opts.get('branch'))
2292 dest, branches = hg.parseurl(dest, opts.get('branch'))
2293 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2293 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2294 if revs:
2294 if revs:
2295 revs = [repo.lookup(rev) for rev in revs]
2295 revs = [repo.lookup(rev) for rev in revs]
2296
2296
2297 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2297 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2298 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2298 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2299 o = repo.findoutgoing(other, force=opts.get('force'))
2299 o = repo.findoutgoing(other, force=opts.get('force'))
2300 if not o:
2300 if not o:
2301 ui.status(_("no changes found\n"))
2301 ui.status(_("no changes found\n"))
2302 return 1
2302 return 1
2303 o = repo.changelog.nodesbetween(o, revs)[0]
2303 o = repo.changelog.nodesbetween(o, revs)[0]
2304 if opts.get('newest_first'):
2304 if opts.get('newest_first'):
2305 o.reverse()
2305 o.reverse()
2306 displayer = cmdutil.show_changeset(ui, repo, opts)
2306 displayer = cmdutil.show_changeset(ui, repo, opts)
2307 count = 0
2307 count = 0
2308 for n in o:
2308 for n in o:
2309 if limit is not None and count >= limit:
2309 if limit is not None and count >= limit:
2310 break
2310 break
2311 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2311 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2312 if opts.get('no_merges') and len(parents) == 2:
2312 if opts.get('no_merges') and len(parents) == 2:
2313 continue
2313 continue
2314 count += 1
2314 count += 1
2315 displayer.show(repo[n])
2315 displayer.show(repo[n])
2316 displayer.close()
2316 displayer.close()
2317
2317
2318 def parents(ui, repo, file_=None, **opts):
2318 def parents(ui, repo, file_=None, **opts):
2319 """show the parents of the working directory or revision
2319 """show the parents of the working directory or revision
2320
2320
2321 Print the working directory's parent revisions. If a revision is
2321 Print the working directory's parent revisions. If a revision is
2322 given via -r/--rev, the parent of that revision will be printed.
2322 given via -r/--rev, the parent of that revision will be printed.
2323 If a file argument is given, the revision in which the file was
2323 If a file argument is given, the revision in which the file was
2324 last changed (before the working directory revision or the
2324 last changed (before the working directory revision or the
2325 argument to --rev if given) is printed.
2325 argument to --rev if given) is printed.
2326 """
2326 """
2327 rev = opts.get('rev')
2327 rev = opts.get('rev')
2328 if rev:
2328 if rev:
2329 ctx = repo[rev]
2329 ctx = repo[rev]
2330 else:
2330 else:
2331 ctx = repo[None]
2331 ctx = repo[None]
2332
2332
2333 if file_:
2333 if file_:
2334 m = cmdutil.match(repo, (file_,), opts)
2334 m = cmdutil.match(repo, (file_,), opts)
2335 if m.anypats() or len(m.files()) != 1:
2335 if m.anypats() or len(m.files()) != 1:
2336 raise util.Abort(_('can only specify an explicit filename'))
2336 raise util.Abort(_('can only specify an explicit filename'))
2337 file_ = m.files()[0]
2337 file_ = m.files()[0]
2338 filenodes = []
2338 filenodes = []
2339 for cp in ctx.parents():
2339 for cp in ctx.parents():
2340 if not cp:
2340 if not cp:
2341 continue
2341 continue
2342 try:
2342 try:
2343 filenodes.append(cp.filenode(file_))
2343 filenodes.append(cp.filenode(file_))
2344 except error.LookupError:
2344 except error.LookupError:
2345 pass
2345 pass
2346 if not filenodes:
2346 if not filenodes:
2347 raise util.Abort(_("'%s' not found in manifest!") % file_)
2347 raise util.Abort(_("'%s' not found in manifest!") % file_)
2348 fl = repo.file(file_)
2348 fl = repo.file(file_)
2349 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2349 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2350 else:
2350 else:
2351 p = [cp.node() for cp in ctx.parents()]
2351 p = [cp.node() for cp in ctx.parents()]
2352
2352
2353 displayer = cmdutil.show_changeset(ui, repo, opts)
2353 displayer = cmdutil.show_changeset(ui, repo, opts)
2354 for n in p:
2354 for n in p:
2355 if n != nullid:
2355 if n != nullid:
2356 displayer.show(repo[n])
2356 displayer.show(repo[n])
2357 displayer.close()
2357 displayer.close()
2358
2358
2359 def paths(ui, repo, search=None):
2359 def paths(ui, repo, search=None):
2360 """show aliases for remote repositories
2360 """show aliases for remote repositories
2361
2361
2362 Show definition of symbolic path name NAME. If no name is given,
2362 Show definition of symbolic path name NAME. If no name is given,
2363 show definition of all available names.
2363 show definition of all available names.
2364
2364
2365 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2365 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2366 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2366 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2367
2367
2368 See 'hg help urls' for more information.
2368 See 'hg help urls' for more information.
2369 """
2369 """
2370 if search:
2370 if search:
2371 for name, path in ui.configitems("paths"):
2371 for name, path in ui.configitems("paths"):
2372 if name == search:
2372 if name == search:
2373 ui.write("%s\n" % url.hidepassword(path))
2373 ui.write("%s\n" % url.hidepassword(path))
2374 return
2374 return
2375 ui.warn(_("not found!\n"))
2375 ui.warn(_("not found!\n"))
2376 return 1
2376 return 1
2377 else:
2377 else:
2378 for name, path in ui.configitems("paths"):
2378 for name, path in ui.configitems("paths"):
2379 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2379 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2380
2380
2381 def postincoming(ui, repo, modheads, optupdate, checkout):
2381 def postincoming(ui, repo, modheads, optupdate, checkout):
2382 if modheads == 0:
2382 if modheads == 0:
2383 return
2383 return
2384 if optupdate:
2384 if optupdate:
2385 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2385 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2386 return hg.update(repo, checkout)
2386 return hg.update(repo, checkout)
2387 else:
2387 else:
2388 ui.status(_("not updating, since new heads added\n"))
2388 ui.status(_("not updating, since new heads added\n"))
2389 if modheads > 1:
2389 if modheads > 1:
2390 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2390 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2391 else:
2391 else:
2392 ui.status(_("(run 'hg update' to get a working copy)\n"))
2392 ui.status(_("(run 'hg update' to get a working copy)\n"))
2393
2393
2394 def pull(ui, repo, source="default", **opts):
2394 def pull(ui, repo, source="default", **opts):
2395 """pull changes from the specified source
2395 """pull changes from the specified source
2396
2396
2397 Pull changes from a remote repository to a local one.
2397 Pull changes from a remote repository to a local one.
2398
2398
2399 This finds all changes from the repository at the specified path
2399 This finds all changes from the repository at the specified path
2400 or URL and adds them to a local repository (the current one unless
2400 or URL and adds them to a local repository (the current one unless
2401 -R is specified). By default, this does not update the copy of the
2401 -R is specified). By default, this does not update the copy of the
2402 project in the working directory.
2402 project in the working directory.
2403
2403
2404 Use hg incoming if you want to see what would have been added by a
2404 Use hg incoming if you want to see what would have been added by a
2405 pull at the time you issued this command. If you then decide to
2405 pull at the time you issued this command. If you then decide to
2406 added those changes to the repository, you should use pull -r X
2406 added those changes to the repository, you should use pull -r X
2407 where X is the last changeset listed by hg incoming.
2407 where X is the last changeset listed by hg incoming.
2408
2408
2409 If SOURCE is omitted, the 'default' path will be used.
2409 If SOURCE is omitted, the 'default' path will be used.
2410 See 'hg help urls' for more information.
2410 See 'hg help urls' for more information.
2411 """
2411 """
2412 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2412 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2413 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2413 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2414 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2414 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2415 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2415 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2416 if revs:
2416 if revs:
2417 try:
2417 try:
2418 revs = [other.lookup(rev) for rev in revs]
2418 revs = [other.lookup(rev) for rev in revs]
2419 except error.CapabilityError:
2419 except error.CapabilityError:
2420 err = _("Other repository doesn't support revision lookup, "
2420 err = _("Other repository doesn't support revision lookup, "
2421 "so a rev cannot be specified.")
2421 "so a rev cannot be specified.")
2422 raise util.Abort(err)
2422 raise util.Abort(err)
2423
2423
2424 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2424 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2425 if checkout:
2425 if checkout:
2426 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2426 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2427 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2427 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2428
2428
2429 def push(ui, repo, dest=None, **opts):
2429 def push(ui, repo, dest=None, **opts):
2430 """push changes to the specified destination
2430 """push changes to the specified destination
2431
2431
2432 Push changes from the local repository to the specified destination.
2432 Push changes from the local repository to the specified destination.
2433
2433
2434 This is the symmetrical operation for pull. It moves changes from
2434 This is the symmetrical operation for pull. It moves changes from
2435 the current repository to a different one. If the destination is
2435 the current repository to a different one. If the destination is
2436 local this is identical to a pull in that directory from the
2436 local this is identical to a pull in that directory from the
2437 current one.
2437 current one.
2438
2438
2439 By default, push will refuse to run if it detects the result would
2439 By default, push will refuse to run if it detects the result would
2440 increase the number of remote heads. This generally indicates the
2440 increase the number of remote heads. This generally indicates the
2441 user forgot to pull and merge before pushing.
2441 user forgot to pull and merge before pushing.
2442
2442
2443 If -r/--rev is used, the named revision and all its ancestors will
2443 If -r/--rev is used, the named revision and all its ancestors will
2444 be pushed to the remote repository.
2444 be pushed to the remote repository.
2445
2445
2446 Please see 'hg help urls' for important details about ``ssh://``
2446 Please see 'hg help urls' for important details about ``ssh://``
2447 URLs. If DESTINATION is omitted, a default path will be used.
2447 URLs. If DESTINATION is omitted, a default path will be used.
2448 """
2448 """
2449 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2449 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2450 dest, branches = hg.parseurl(dest, opts.get('branch'))
2450 dest, branches = hg.parseurl(dest, opts.get('branch'))
2451 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2451 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2452 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2452 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2453 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2453 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2454 if revs:
2454 if revs:
2455 revs = [repo.lookup(rev) for rev in revs]
2455 revs = [repo.lookup(rev) for rev in revs]
2456
2456
2457 # push subrepos depth-first for coherent ordering
2457 # push subrepos depth-first for coherent ordering
2458 c = repo['']
2458 c = repo['']
2459 subs = c.substate # only repos that are committed
2459 subs = c.substate # only repos that are committed
2460 for s in sorted(subs):
2460 for s in sorted(subs):
2461 c.sub(s).push(opts.get('force'))
2461 c.sub(s).push(opts.get('force'))
2462
2462
2463 r = repo.push(other, opts.get('force'), revs=revs)
2463 r = repo.push(other, opts.get('force'), revs=revs)
2464 return r == 0
2464 return r == 0
2465
2465
2466 def recover(ui, repo):
2466 def recover(ui, repo):
2467 """roll back an interrupted transaction
2467 """roll back an interrupted transaction
2468
2468
2469 Recover from an interrupted commit or pull.
2469 Recover from an interrupted commit or pull.
2470
2470
2471 This command tries to fix the repository status after an
2471 This command tries to fix the repository status after an
2472 interrupted operation. It should only be necessary when Mercurial
2472 interrupted operation. It should only be necessary when Mercurial
2473 suggests it.
2473 suggests it.
2474 """
2474 """
2475 if repo.recover():
2475 if repo.recover():
2476 return hg.verify(repo)
2476 return hg.verify(repo)
2477 return 1
2477 return 1
2478
2478
2479 def remove(ui, repo, *pats, **opts):
2479 def remove(ui, repo, *pats, **opts):
2480 """remove the specified files on the next commit
2480 """remove the specified files on the next commit
2481
2481
2482 Schedule the indicated files for removal from the repository.
2482 Schedule the indicated files for removal from the repository.
2483
2483
2484 This only removes files from the current branch, not from the
2484 This only removes files from the current branch, not from the
2485 entire project history. -A/--after can be used to remove only
2485 entire project history. -A/--after can be used to remove only
2486 files that have already been deleted, -f/--force can be used to
2486 files that have already been deleted, -f/--force can be used to
2487 force deletion, and -Af can be used to remove files from the next
2487 force deletion, and -Af can be used to remove files from the next
2488 revision without deleting them from the working directory.
2488 revision without deleting them from the working directory.
2489
2489
2490 The following table details the behavior of remove for different
2490 The following table details the behavior of remove for different
2491 file states (columns) and option combinations (rows). The file
2491 file states (columns) and option combinations (rows). The file
2492 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2492 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2493 reported by hg status). The actions are Warn, Remove (from branch)
2493 reported by hg status). The actions are Warn, Remove (from branch)
2494 and Delete (from disk)::
2494 and Delete (from disk)::
2495
2495
2496 A C M !
2496 A C M !
2497 none W RD W R
2497 none W RD W R
2498 -f R RD RD R
2498 -f R RD RD R
2499 -A W W W R
2499 -A W W W R
2500 -Af R R R R
2500 -Af R R R R
2501
2501
2502 This command schedules the files to be removed at the next commit.
2502 This command schedules the files to be removed at the next commit.
2503 To undo a remove before that, see hg revert.
2503 To undo a remove before that, see hg revert.
2504 """
2504 """
2505
2505
2506 after, force = opts.get('after'), opts.get('force')
2506 after, force = opts.get('after'), opts.get('force')
2507 if not pats and not after:
2507 if not pats and not after:
2508 raise util.Abort(_('no files specified'))
2508 raise util.Abort(_('no files specified'))
2509
2509
2510 m = cmdutil.match(repo, pats, opts)
2510 m = cmdutil.match(repo, pats, opts)
2511 s = repo.status(match=m, clean=True)
2511 s = repo.status(match=m, clean=True)
2512 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2512 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2513
2513
2514 for f in m.files():
2514 for f in m.files():
2515 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2515 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2516 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2516 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2517
2517
2518 def warn(files, reason):
2518 def warn(files, reason):
2519 for f in files:
2519 for f in files:
2520 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2520 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2521 % (m.rel(f), reason))
2521 % (m.rel(f), reason))
2522
2522
2523 if force:
2523 if force:
2524 remove, forget = modified + deleted + clean, added
2524 remove, forget = modified + deleted + clean, added
2525 elif after:
2525 elif after:
2526 remove, forget = deleted, []
2526 remove, forget = deleted, []
2527 warn(modified + added + clean, _('still exists'))
2527 warn(modified + added + clean, _('still exists'))
2528 else:
2528 else:
2529 remove, forget = deleted + clean, []
2529 remove, forget = deleted + clean, []
2530 warn(modified, _('is modified'))
2530 warn(modified, _('is modified'))
2531 warn(added, _('has been marked for add'))
2531 warn(added, _('has been marked for add'))
2532
2532
2533 for f in sorted(remove + forget):
2533 for f in sorted(remove + forget):
2534 if ui.verbose or not m.exact(f):
2534 if ui.verbose or not m.exact(f):
2535 ui.status(_('removing %s\n') % m.rel(f))
2535 ui.status(_('removing %s\n') % m.rel(f))
2536
2536
2537 repo.forget(forget)
2537 repo.forget(forget)
2538 repo.remove(remove, unlink=not after)
2538 repo.remove(remove, unlink=not after)
2539
2539
2540 def rename(ui, repo, *pats, **opts):
2540 def rename(ui, repo, *pats, **opts):
2541 """rename files; equivalent of copy + remove
2541 """rename files; equivalent of copy + remove
2542
2542
2543 Mark dest as copies of sources; mark sources for deletion. If dest
2543 Mark dest as copies of sources; mark sources for deletion. If dest
2544 is a directory, copies are put in that directory. If dest is a
2544 is a directory, copies are put in that directory. If dest is a
2545 file, there can only be one source.
2545 file, there can only be one source.
2546
2546
2547 By default, this command copies the contents of files as they
2547 By default, this command copies the contents of files as they
2548 exist in the working directory. If invoked with -A/--after, the
2548 exist in the working directory. If invoked with -A/--after, the
2549 operation is recorded, but no copying is performed.
2549 operation is recorded, but no copying is performed.
2550
2550
2551 This command takes effect at the next commit. To undo a rename
2551 This command takes effect at the next commit. To undo a rename
2552 before that, see hg revert.
2552 before that, see hg revert.
2553 """
2553 """
2554 wlock = repo.wlock(False)
2554 wlock = repo.wlock(False)
2555 try:
2555 try:
2556 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2556 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2557 finally:
2557 finally:
2558 wlock.release()
2558 wlock.release()
2559
2559
2560 def resolve(ui, repo, *pats, **opts):
2560 def resolve(ui, repo, *pats, **opts):
2561 """various operations to help finish a merge
2561 """various operations to help finish a merge
2562
2562
2563 This command includes several actions that are often useful while
2563 This command includes several actions that are often useful while
2564 performing a merge, after running ``merge`` but before running
2564 performing a merge, after running ``merge`` but before running
2565 ``commit``. (It is only meaningful if your working directory has
2565 ``commit``. (It is only meaningful if your working directory has
2566 two parents.) It is most relevant for merges with unresolved
2566 two parents.) It is most relevant for merges with unresolved
2567 conflicts, which are typically a result of non-interactive merging with
2567 conflicts, which are typically a result of non-interactive merging with
2568 ``internal:merge`` or a command-line merge tool like ``diff3``.
2568 ``internal:merge`` or a command-line merge tool like ``diff3``.
2569
2569
2570 The available actions are:
2570 The available actions are:
2571
2571
2572 1) list files that were merged with conflicts (U, for unresolved)
2572 1) list files that were merged with conflicts (U, for unresolved)
2573 and without conflicts (R, for resolved): ``hg resolve -l``
2573 and without conflicts (R, for resolved): ``hg resolve -l``
2574 (this is like ``status`` for merges)
2574 (this is like ``status`` for merges)
2575 2) record that you have resolved conflicts in certain files:
2575 2) record that you have resolved conflicts in certain files:
2576 ``hg resolve -m [file ...]`` (default: mark all unresolved files)
2576 ``hg resolve -m [file ...]`` (default: mark all unresolved files)
2577 3) forget that you have resolved conflicts in certain files:
2577 3) forget that you have resolved conflicts in certain files:
2578 ``hg resolve -u [file ...]`` (default: unmark all resolved files)
2578 ``hg resolve -u [file ...]`` (default: unmark all resolved files)
2579 4) discard your current attempt(s) at resolving conflicts and
2579 4) discard your current attempt(s) at resolving conflicts and
2580 restart the merge from scratch: ``hg resolve file...``
2580 restart the merge from scratch: ``hg resolve file...``
2581 (or ``-a`` for all unresolved files)
2581 (or ``-a`` for all unresolved files)
2582
2582
2583 Note that Mercurial will not let you commit files with unresolved merge
2583 Note that Mercurial will not let you commit files with unresolved merge
2584 conflicts. You must use ``hg resolve -m ...`` before you can commit
2584 conflicts. You must use ``hg resolve -m ...`` before you can commit
2585 after a conflicting merge.
2585 after a conflicting merge.
2586 """
2586 """
2587
2587
2588 all, mark, unmark, show, nostatus = \
2588 all, mark, unmark, show, nostatus = \
2589 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2589 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2590
2590
2591 if (show and (mark or unmark)) or (mark and unmark):
2591 if (show and (mark or unmark)) or (mark and unmark):
2592 raise util.Abort(_("too many options specified"))
2592 raise util.Abort(_("too many options specified"))
2593 if pats and all:
2593 if pats and all:
2594 raise util.Abort(_("can't specify --all and patterns"))
2594 raise util.Abort(_("can't specify --all and patterns"))
2595 if not (all or pats or show or mark or unmark):
2595 if not (all or pats or show or mark or unmark):
2596 raise util.Abort(_('no files or directories specified; '
2596 raise util.Abort(_('no files or directories specified; '
2597 'use --all to remerge all files'))
2597 'use --all to remerge all files'))
2598
2598
2599 ms = merge_.mergestate(repo)
2599 ms = mergemod.mergestate(repo)
2600 m = cmdutil.match(repo, pats, opts)
2600 m = cmdutil.match(repo, pats, opts)
2601
2601
2602 for f in ms:
2602 for f in ms:
2603 if m(f):
2603 if m(f):
2604 if show:
2604 if show:
2605 if nostatus:
2605 if nostatus:
2606 ui.write("%s\n" % f)
2606 ui.write("%s\n" % f)
2607 else:
2607 else:
2608 ui.write("%s %s\n" % (ms[f].upper(), f))
2608 ui.write("%s %s\n" % (ms[f].upper(), f))
2609 elif mark:
2609 elif mark:
2610 ms.mark(f, "r")
2610 ms.mark(f, "r")
2611 elif unmark:
2611 elif unmark:
2612 ms.mark(f, "u")
2612 ms.mark(f, "u")
2613 else:
2613 else:
2614 wctx = repo[None]
2614 wctx = repo[None]
2615 mctx = wctx.parents()[-1]
2615 mctx = wctx.parents()[-1]
2616
2616
2617 # backup pre-resolve (merge uses .orig for its own purposes)
2617 # backup pre-resolve (merge uses .orig for its own purposes)
2618 a = repo.wjoin(f)
2618 a = repo.wjoin(f)
2619 util.copyfile(a, a + ".resolve")
2619 util.copyfile(a, a + ".resolve")
2620
2620
2621 # resolve file
2621 # resolve file
2622 ms.resolve(f, wctx, mctx)
2622 ms.resolve(f, wctx, mctx)
2623
2623
2624 # replace filemerge's .orig file with our resolve file
2624 # replace filemerge's .orig file with our resolve file
2625 util.rename(a + ".resolve", a + ".orig")
2625 util.rename(a + ".resolve", a + ".orig")
2626
2626
2627 def revert(ui, repo, *pats, **opts):
2627 def revert(ui, repo, *pats, **opts):
2628 """restore individual files or directories to an earlier state
2628 """restore individual files or directories to an earlier state
2629
2629
2630 (Use update -r to check out earlier revisions, revert does not
2630 (Use update -r to check out earlier revisions, revert does not
2631 change the working directory parents.)
2631 change the working directory parents.)
2632
2632
2633 With no revision specified, revert the named files or directories
2633 With no revision specified, revert the named files or directories
2634 to the contents they had in the parent of the working directory.
2634 to the contents they had in the parent of the working directory.
2635 This restores the contents of the affected files to an unmodified
2635 This restores the contents of the affected files to an unmodified
2636 state and unschedules adds, removes, copies, and renames. If the
2636 state and unschedules adds, removes, copies, and renames. If the
2637 working directory has two parents, you must explicitly specify a
2637 working directory has two parents, you must explicitly specify a
2638 revision.
2638 revision.
2639
2639
2640 Using the -r/--rev option, revert the given files or directories
2640 Using the -r/--rev option, revert the given files or directories
2641 to their contents as of a specific revision. This can be helpful
2641 to their contents as of a specific revision. This can be helpful
2642 to "roll back" some or all of an earlier change. See 'hg help
2642 to "roll back" some or all of an earlier change. See 'hg help
2643 dates' for a list of formats valid for -d/--date.
2643 dates' for a list of formats valid for -d/--date.
2644
2644
2645 Revert modifies the working directory. It does not commit any
2645 Revert modifies the working directory. It does not commit any
2646 changes, or change the parent of the working directory. If you
2646 changes, or change the parent of the working directory. If you
2647 revert to a revision other than the parent of the working
2647 revert to a revision other than the parent of the working
2648 directory, the reverted files will thus appear modified
2648 directory, the reverted files will thus appear modified
2649 afterwards.
2649 afterwards.
2650
2650
2651 If a file has been deleted, it is restored. If the executable mode
2651 If a file has been deleted, it is restored. If the executable mode
2652 of a file was changed, it is reset.
2652 of a file was changed, it is reset.
2653
2653
2654 If names are given, all files matching the names are reverted.
2654 If names are given, all files matching the names are reverted.
2655 If no arguments are given, no files are reverted.
2655 If no arguments are given, no files are reverted.
2656
2656
2657 Modified files are saved with a .orig suffix before reverting.
2657 Modified files are saved with a .orig suffix before reverting.
2658 To disable these backups, use --no-backup.
2658 To disable these backups, use --no-backup.
2659 """
2659 """
2660
2660
2661 if opts["date"]:
2661 if opts["date"]:
2662 if opts["rev"]:
2662 if opts["rev"]:
2663 raise util.Abort(_("you can't specify a revision and a date"))
2663 raise util.Abort(_("you can't specify a revision and a date"))
2664 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2664 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2665
2665
2666 if not pats and not opts.get('all'):
2666 if not pats and not opts.get('all'):
2667 raise util.Abort(_('no files or directories specified; '
2667 raise util.Abort(_('no files or directories specified; '
2668 'use --all to revert the whole repo'))
2668 'use --all to revert the whole repo'))
2669
2669
2670 parent, p2 = repo.dirstate.parents()
2670 parent, p2 = repo.dirstate.parents()
2671 if not opts.get('rev') and p2 != nullid:
2671 if not opts.get('rev') and p2 != nullid:
2672 raise util.Abort(_('uncommitted merge - please provide a '
2672 raise util.Abort(_('uncommitted merge - please provide a '
2673 'specific revision'))
2673 'specific revision'))
2674 ctx = repo[opts.get('rev')]
2674 ctx = repo[opts.get('rev')]
2675 node = ctx.node()
2675 node = ctx.node()
2676 mf = ctx.manifest()
2676 mf = ctx.manifest()
2677 if node == parent:
2677 if node == parent:
2678 pmf = mf
2678 pmf = mf
2679 else:
2679 else:
2680 pmf = None
2680 pmf = None
2681
2681
2682 # need all matching names in dirstate and manifest of target rev,
2682 # need all matching names in dirstate and manifest of target rev,
2683 # so have to walk both. do not print errors if files exist in one
2683 # so have to walk both. do not print errors if files exist in one
2684 # but not other.
2684 # but not other.
2685
2685
2686 names = {}
2686 names = {}
2687
2687
2688 wlock = repo.wlock()
2688 wlock = repo.wlock()
2689 try:
2689 try:
2690 # walk dirstate.
2690 # walk dirstate.
2691
2691
2692 m = cmdutil.match(repo, pats, opts)
2692 m = cmdutil.match(repo, pats, opts)
2693 m.bad = lambda x, y: False
2693 m.bad = lambda x, y: False
2694 for abs in repo.walk(m):
2694 for abs in repo.walk(m):
2695 names[abs] = m.rel(abs), m.exact(abs)
2695 names[abs] = m.rel(abs), m.exact(abs)
2696
2696
2697 # walk target manifest.
2697 # walk target manifest.
2698
2698
2699 def badfn(path, msg):
2699 def badfn(path, msg):
2700 if path in names:
2700 if path in names:
2701 return
2701 return
2702 path_ = path + '/'
2702 path_ = path + '/'
2703 for f in names:
2703 for f in names:
2704 if f.startswith(path_):
2704 if f.startswith(path_):
2705 return
2705 return
2706 ui.warn("%s: %s\n" % (m.rel(path), msg))
2706 ui.warn("%s: %s\n" % (m.rel(path), msg))
2707
2707
2708 m = cmdutil.match(repo, pats, opts)
2708 m = cmdutil.match(repo, pats, opts)
2709 m.bad = badfn
2709 m.bad = badfn
2710 for abs in repo[node].walk(m):
2710 for abs in repo[node].walk(m):
2711 if abs not in names:
2711 if abs not in names:
2712 names[abs] = m.rel(abs), m.exact(abs)
2712 names[abs] = m.rel(abs), m.exact(abs)
2713
2713
2714 m = cmdutil.matchfiles(repo, names)
2714 m = cmdutil.matchfiles(repo, names)
2715 changes = repo.status(match=m)[:4]
2715 changes = repo.status(match=m)[:4]
2716 modified, added, removed, deleted = map(set, changes)
2716 modified, added, removed, deleted = map(set, changes)
2717
2717
2718 # if f is a rename, also revert the source
2718 # if f is a rename, also revert the source
2719 cwd = repo.getcwd()
2719 cwd = repo.getcwd()
2720 for f in added:
2720 for f in added:
2721 src = repo.dirstate.copied(f)
2721 src = repo.dirstate.copied(f)
2722 if src and src not in names and repo.dirstate[src] == 'r':
2722 if src and src not in names and repo.dirstate[src] == 'r':
2723 removed.add(src)
2723 removed.add(src)
2724 names[src] = (repo.pathto(src, cwd), True)
2724 names[src] = (repo.pathto(src, cwd), True)
2725
2725
2726 def removeforget(abs):
2726 def removeforget(abs):
2727 if repo.dirstate[abs] == 'a':
2727 if repo.dirstate[abs] == 'a':
2728 return _('forgetting %s\n')
2728 return _('forgetting %s\n')
2729 return _('removing %s\n')
2729 return _('removing %s\n')
2730
2730
2731 revert = ([], _('reverting %s\n'))
2731 revert = ([], _('reverting %s\n'))
2732 add = ([], _('adding %s\n'))
2732 add = ([], _('adding %s\n'))
2733 remove = ([], removeforget)
2733 remove = ([], removeforget)
2734 undelete = ([], _('undeleting %s\n'))
2734 undelete = ([], _('undeleting %s\n'))
2735
2735
2736 disptable = (
2736 disptable = (
2737 # dispatch table:
2737 # dispatch table:
2738 # file state
2738 # file state
2739 # action if in target manifest
2739 # action if in target manifest
2740 # action if not in target manifest
2740 # action if not in target manifest
2741 # make backup if in target manifest
2741 # make backup if in target manifest
2742 # make backup if not in target manifest
2742 # make backup if not in target manifest
2743 (modified, revert, remove, True, True),
2743 (modified, revert, remove, True, True),
2744 (added, revert, remove, True, False),
2744 (added, revert, remove, True, False),
2745 (removed, undelete, None, False, False),
2745 (removed, undelete, None, False, False),
2746 (deleted, revert, remove, False, False),
2746 (deleted, revert, remove, False, False),
2747 )
2747 )
2748
2748
2749 for abs, (rel, exact) in sorted(names.items()):
2749 for abs, (rel, exact) in sorted(names.items()):
2750 mfentry = mf.get(abs)
2750 mfentry = mf.get(abs)
2751 target = repo.wjoin(abs)
2751 target = repo.wjoin(abs)
2752 def handle(xlist, dobackup):
2752 def handle(xlist, dobackup):
2753 xlist[0].append(abs)
2753 xlist[0].append(abs)
2754 if dobackup and not opts.get('no_backup') and util.lexists(target):
2754 if dobackup and not opts.get('no_backup') and util.lexists(target):
2755 bakname = "%s.orig" % rel
2755 bakname = "%s.orig" % rel
2756 ui.note(_('saving current version of %s as %s\n') %
2756 ui.note(_('saving current version of %s as %s\n') %
2757 (rel, bakname))
2757 (rel, bakname))
2758 if not opts.get('dry_run'):
2758 if not opts.get('dry_run'):
2759 util.copyfile(target, bakname)
2759 util.copyfile(target, bakname)
2760 if ui.verbose or not exact:
2760 if ui.verbose or not exact:
2761 msg = xlist[1]
2761 msg = xlist[1]
2762 if not isinstance(msg, basestring):
2762 if not isinstance(msg, basestring):
2763 msg = msg(abs)
2763 msg = msg(abs)
2764 ui.status(msg % rel)
2764 ui.status(msg % rel)
2765 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2765 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2766 if abs not in table:
2766 if abs not in table:
2767 continue
2767 continue
2768 # file has changed in dirstate
2768 # file has changed in dirstate
2769 if mfentry:
2769 if mfentry:
2770 handle(hitlist, backuphit)
2770 handle(hitlist, backuphit)
2771 elif misslist is not None:
2771 elif misslist is not None:
2772 handle(misslist, backupmiss)
2772 handle(misslist, backupmiss)
2773 break
2773 break
2774 else:
2774 else:
2775 if abs not in repo.dirstate:
2775 if abs not in repo.dirstate:
2776 if mfentry:
2776 if mfentry:
2777 handle(add, True)
2777 handle(add, True)
2778 elif exact:
2778 elif exact:
2779 ui.warn(_('file not managed: %s\n') % rel)
2779 ui.warn(_('file not managed: %s\n') % rel)
2780 continue
2780 continue
2781 # file has not changed in dirstate
2781 # file has not changed in dirstate
2782 if node == parent:
2782 if node == parent:
2783 if exact:
2783 if exact:
2784 ui.warn(_('no changes needed to %s\n') % rel)
2784 ui.warn(_('no changes needed to %s\n') % rel)
2785 continue
2785 continue
2786 if pmf is None:
2786 if pmf is None:
2787 # only need parent manifest in this unlikely case,
2787 # only need parent manifest in this unlikely case,
2788 # so do not read by default
2788 # so do not read by default
2789 pmf = repo[parent].manifest()
2789 pmf = repo[parent].manifest()
2790 if abs in pmf:
2790 if abs in pmf:
2791 if mfentry:
2791 if mfentry:
2792 # if version of file is same in parent and target
2792 # if version of file is same in parent and target
2793 # manifests, do nothing
2793 # manifests, do nothing
2794 if (pmf[abs] != mfentry or
2794 if (pmf[abs] != mfentry or
2795 pmf.flags(abs) != mf.flags(abs)):
2795 pmf.flags(abs) != mf.flags(abs)):
2796 handle(revert, False)
2796 handle(revert, False)
2797 else:
2797 else:
2798 handle(remove, False)
2798 handle(remove, False)
2799
2799
2800 if not opts.get('dry_run'):
2800 if not opts.get('dry_run'):
2801 def checkout(f):
2801 def checkout(f):
2802 fc = ctx[f]
2802 fc = ctx[f]
2803 repo.wwrite(f, fc.data(), fc.flags())
2803 repo.wwrite(f, fc.data(), fc.flags())
2804
2804
2805 audit_path = util.path_auditor(repo.root)
2805 audit_path = util.path_auditor(repo.root)
2806 for f in remove[0]:
2806 for f in remove[0]:
2807 if repo.dirstate[f] == 'a':
2807 if repo.dirstate[f] == 'a':
2808 repo.dirstate.forget(f)
2808 repo.dirstate.forget(f)
2809 continue
2809 continue
2810 audit_path(f)
2810 audit_path(f)
2811 try:
2811 try:
2812 util.unlink(repo.wjoin(f))
2812 util.unlink(repo.wjoin(f))
2813 except OSError:
2813 except OSError:
2814 pass
2814 pass
2815 repo.dirstate.remove(f)
2815 repo.dirstate.remove(f)
2816
2816
2817 normal = None
2817 normal = None
2818 if node == parent:
2818 if node == parent:
2819 # We're reverting to our parent. If possible, we'd like status
2819 # We're reverting to our parent. If possible, we'd like status
2820 # to report the file as clean. We have to use normallookup for
2820 # to report the file as clean. We have to use normallookup for
2821 # merges to avoid losing information about merged/dirty files.
2821 # merges to avoid losing information about merged/dirty files.
2822 if p2 != nullid:
2822 if p2 != nullid:
2823 normal = repo.dirstate.normallookup
2823 normal = repo.dirstate.normallookup
2824 else:
2824 else:
2825 normal = repo.dirstate.normal
2825 normal = repo.dirstate.normal
2826 for f in revert[0]:
2826 for f in revert[0]:
2827 checkout(f)
2827 checkout(f)
2828 if normal:
2828 if normal:
2829 normal(f)
2829 normal(f)
2830
2830
2831 for f in add[0]:
2831 for f in add[0]:
2832 checkout(f)
2832 checkout(f)
2833 repo.dirstate.add(f)
2833 repo.dirstate.add(f)
2834
2834
2835 normal = repo.dirstate.normallookup
2835 normal = repo.dirstate.normallookup
2836 if node == parent and p2 == nullid:
2836 if node == parent and p2 == nullid:
2837 normal = repo.dirstate.normal
2837 normal = repo.dirstate.normal
2838 for f in undelete[0]:
2838 for f in undelete[0]:
2839 checkout(f)
2839 checkout(f)
2840 normal(f)
2840 normal(f)
2841
2841
2842 finally:
2842 finally:
2843 wlock.release()
2843 wlock.release()
2844
2844
2845 def rollback(ui, repo):
2845 def rollback(ui, repo):
2846 """roll back the last transaction
2846 """roll back the last transaction
2847
2847
2848 This command should be used with care. There is only one level of
2848 This command should be used with care. There is only one level of
2849 rollback, and there is no way to undo a rollback. It will also
2849 rollback, and there is no way to undo a rollback. It will also
2850 restore the dirstate at the time of the last transaction, losing
2850 restore the dirstate at the time of the last transaction, losing
2851 any dirstate changes since that time. This command does not alter
2851 any dirstate changes since that time. This command does not alter
2852 the working directory.
2852 the working directory.
2853
2853
2854 Transactions are used to encapsulate the effects of all commands
2854 Transactions are used to encapsulate the effects of all commands
2855 that create new changesets or propagate existing changesets into a
2855 that create new changesets or propagate existing changesets into a
2856 repository. For example, the following commands are transactional,
2856 repository. For example, the following commands are transactional,
2857 and their effects can be rolled back:
2857 and their effects can be rolled back:
2858
2858
2859 - commit
2859 - commit
2860 - import
2860 - import
2861 - pull
2861 - pull
2862 - push (with this repository as the destination)
2862 - push (with this repository as the destination)
2863 - unbundle
2863 - unbundle
2864
2864
2865 This command is not intended for use on public repositories. Once
2865 This command is not intended for use on public repositories. Once
2866 changes are visible for pull by other users, rolling a transaction
2866 changes are visible for pull by other users, rolling a transaction
2867 back locally is ineffective (someone else may already have pulled
2867 back locally is ineffective (someone else may already have pulled
2868 the changes). Furthermore, a race is possible with readers of the
2868 the changes). Furthermore, a race is possible with readers of the
2869 repository; for example an in-progress pull from the repository
2869 repository; for example an in-progress pull from the repository
2870 may fail if a rollback is performed.
2870 may fail if a rollback is performed.
2871 """
2871 """
2872 repo.rollback()
2872 repo.rollback()
2873
2873
2874 def root(ui, repo):
2874 def root(ui, repo):
2875 """print the root (top) of the current working directory
2875 """print the root (top) of the current working directory
2876
2876
2877 Print the root directory of the current repository.
2877 Print the root directory of the current repository.
2878 """
2878 """
2879 ui.write(repo.root + "\n")
2879 ui.write(repo.root + "\n")
2880
2880
2881 def serve(ui, repo, **opts):
2881 def serve(ui, repo, **opts):
2882 """export the repository via HTTP
2882 """export the repository via HTTP
2883
2883
2884 Start a local HTTP repository browser and pull server.
2884 Start a local HTTP repository browser and pull server.
2885
2885
2886 By default, the server logs accesses to stdout and errors to
2886 By default, the server logs accesses to stdout and errors to
2887 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2887 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2888 files.
2888 files.
2889
2889
2890 To have the server choose a free port number to listen on, specify
2890 To have the server choose a free port number to listen on, specify
2891 a port number of 0; in this case, the server will print the port
2891 a port number of 0; in this case, the server will print the port
2892 number it uses.
2892 number it uses.
2893 """
2893 """
2894
2894
2895 if opts["stdio"]:
2895 if opts["stdio"]:
2896 if repo is None:
2896 if repo is None:
2897 raise error.RepoError(_("There is no Mercurial repository here"
2897 raise error.RepoError(_("There is no Mercurial repository here"
2898 " (.hg not found)"))
2898 " (.hg not found)"))
2899 s = sshserver.sshserver(ui, repo)
2899 s = sshserver.sshserver(ui, repo)
2900 s.serve_forever()
2900 s.serve_forever()
2901
2901
2902 # this way we can check if something was given in the command-line
2902 # this way we can check if something was given in the command-line
2903 if opts.get('port'):
2903 if opts.get('port'):
2904 opts['port'] = int(opts.get('port'))
2904 opts['port'] = int(opts.get('port'))
2905
2905
2906 baseui = repo and repo.baseui or ui
2906 baseui = repo and repo.baseui or ui
2907 optlist = ("name templates style address port prefix ipv6"
2907 optlist = ("name templates style address port prefix ipv6"
2908 " accesslog errorlog certificate encoding")
2908 " accesslog errorlog certificate encoding")
2909 for o in optlist.split():
2909 for o in optlist.split():
2910 val = opts.get(o, '')
2910 val = opts.get(o, '')
2911 if val in (None, ''): # should check against default options instead
2911 if val in (None, ''): # should check against default options instead
2912 continue
2912 continue
2913 baseui.setconfig("web", o, val)
2913 baseui.setconfig("web", o, val)
2914 if repo and repo.ui != baseui:
2914 if repo and repo.ui != baseui:
2915 repo.ui.setconfig("web", o, val)
2915 repo.ui.setconfig("web", o, val)
2916
2916
2917 if opts.get('webdir_conf'):
2917 if opts.get('webdir_conf'):
2918 app = hgwebdir_mod.hgwebdir(opts['webdir_conf'], ui)
2918 app = hgwebdir_mod.hgwebdir(opts['webdir_conf'], ui)
2919 elif repo is not None:
2919 elif repo is not None:
2920 app = hgweb_mod.hgweb(hg.repository(repo.ui, repo.root))
2920 app = hgweb_mod.hgweb(hg.repository(repo.ui, repo.root))
2921 else:
2921 else:
2922 raise error.RepoError(_("There is no Mercurial repository"
2922 raise error.RepoError(_("There is no Mercurial repository"
2923 " here (.hg not found)"))
2923 " here (.hg not found)"))
2924
2924
2925 class service(object):
2925 class service(object):
2926 def init(self):
2926 def init(self):
2927 util.set_signal_handler()
2927 util.set_signal_handler()
2928 self.httpd = server.create_server(ui, app)
2928 self.httpd = server.create_server(ui, app)
2929
2929
2930 if opts['port'] and not ui.verbose:
2930 if opts['port'] and not ui.verbose:
2931 return
2931 return
2932
2932
2933 if self.httpd.prefix:
2933 if self.httpd.prefix:
2934 prefix = self.httpd.prefix.strip('/') + '/'
2934 prefix = self.httpd.prefix.strip('/') + '/'
2935 else:
2935 else:
2936 prefix = ''
2936 prefix = ''
2937
2937
2938 port = ':%d' % self.httpd.port
2938 port = ':%d' % self.httpd.port
2939 if port == ':80':
2939 if port == ':80':
2940 port = ''
2940 port = ''
2941
2941
2942 bindaddr = self.httpd.addr
2942 bindaddr = self.httpd.addr
2943 if bindaddr == '0.0.0.0':
2943 if bindaddr == '0.0.0.0':
2944 bindaddr = '*'
2944 bindaddr = '*'
2945 elif ':' in bindaddr: # IPv6
2945 elif ':' in bindaddr: # IPv6
2946 bindaddr = '[%s]' % bindaddr
2946 bindaddr = '[%s]' % bindaddr
2947
2947
2948 fqaddr = self.httpd.fqaddr
2948 fqaddr = self.httpd.fqaddr
2949 if ':' in fqaddr:
2949 if ':' in fqaddr:
2950 fqaddr = '[%s]' % fqaddr
2950 fqaddr = '[%s]' % fqaddr
2951 if opts['port']:
2951 if opts['port']:
2952 write = ui.status
2952 write = ui.status
2953 else:
2953 else:
2954 write = ui.write
2954 write = ui.write
2955 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2955 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2956 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2956 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2957
2957
2958 def run(self):
2958 def run(self):
2959 self.httpd.serve_forever()
2959 self.httpd.serve_forever()
2960
2960
2961 service = service()
2961 service = service()
2962
2962
2963 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2963 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2964
2964
2965 def status(ui, repo, *pats, **opts):
2965 def status(ui, repo, *pats, **opts):
2966 """show changed files in the working directory
2966 """show changed files in the working directory
2967
2967
2968 Show status of files in the repository. If names are given, only
2968 Show status of files in the repository. If names are given, only
2969 files that match are shown. Files that are clean or ignored or
2969 files that match are shown. Files that are clean or ignored or
2970 the source of a copy/move operation, are not listed unless
2970 the source of a copy/move operation, are not listed unless
2971 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2971 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2972 Unless options described with "show only ..." are given, the
2972 Unless options described with "show only ..." are given, the
2973 options -mardu are used.
2973 options -mardu are used.
2974
2974
2975 Option -q/--quiet hides untracked (unknown and ignored) files
2975 Option -q/--quiet hides untracked (unknown and ignored) files
2976 unless explicitly requested with -u/--unknown or -i/--ignored.
2976 unless explicitly requested with -u/--unknown or -i/--ignored.
2977
2977
2978 NOTE: status may appear to disagree with diff if permissions have
2978 NOTE: status may appear to disagree with diff if permissions have
2979 changed or a merge has occurred. The standard diff format does not
2979 changed or a merge has occurred. The standard diff format does not
2980 report permission changes and diff only reports changes relative
2980 report permission changes and diff only reports changes relative
2981 to one merge parent.
2981 to one merge parent.
2982
2982
2983 If one revision is given, it is used as the base revision.
2983 If one revision is given, it is used as the base revision.
2984 If two revisions are given, the differences between them are
2984 If two revisions are given, the differences between them are
2985 shown. The --change option can also be used as a shortcut to list
2985 shown. The --change option can also be used as a shortcut to list
2986 the changed files of a revision from its first parent.
2986 the changed files of a revision from its first parent.
2987
2987
2988 The codes used to show the status of files are::
2988 The codes used to show the status of files are::
2989
2989
2990 M = modified
2990 M = modified
2991 A = added
2991 A = added
2992 R = removed
2992 R = removed
2993 C = clean
2993 C = clean
2994 ! = missing (deleted by non-hg command, but still tracked)
2994 ! = missing (deleted by non-hg command, but still tracked)
2995 ? = not tracked
2995 ? = not tracked
2996 I = ignored
2996 I = ignored
2997 = origin of the previous file listed as A (added)
2997 = origin of the previous file listed as A (added)
2998 """
2998 """
2999
2999
3000 revs = opts.get('rev')
3000 revs = opts.get('rev')
3001 change = opts.get('change')
3001 change = opts.get('change')
3002
3002
3003 if revs and change:
3003 if revs and change:
3004 msg = _('cannot specify --rev and --change at the same time')
3004 msg = _('cannot specify --rev and --change at the same time')
3005 raise util.Abort(msg)
3005 raise util.Abort(msg)
3006 elif change:
3006 elif change:
3007 node2 = repo.lookup(change)
3007 node2 = repo.lookup(change)
3008 node1 = repo[node2].parents()[0].node()
3008 node1 = repo[node2].parents()[0].node()
3009 else:
3009 else:
3010 node1, node2 = cmdutil.revpair(repo, revs)
3010 node1, node2 = cmdutil.revpair(repo, revs)
3011
3011
3012 cwd = (pats and repo.getcwd()) or ''
3012 cwd = (pats and repo.getcwd()) or ''
3013 end = opts.get('print0') and '\0' or '\n'
3013 end = opts.get('print0') and '\0' or '\n'
3014 copy = {}
3014 copy = {}
3015 states = 'modified added removed deleted unknown ignored clean'.split()
3015 states = 'modified added removed deleted unknown ignored clean'.split()
3016 show = [k for k in states if opts.get(k)]
3016 show = [k for k in states if opts.get(k)]
3017 if opts.get('all'):
3017 if opts.get('all'):
3018 show += ui.quiet and (states[:4] + ['clean']) or states
3018 show += ui.quiet and (states[:4] + ['clean']) or states
3019 if not show:
3019 if not show:
3020 show = ui.quiet and states[:4] or states[:5]
3020 show = ui.quiet and states[:4] or states[:5]
3021
3021
3022 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3022 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3023 'ignored' in show, 'clean' in show, 'unknown' in show)
3023 'ignored' in show, 'clean' in show, 'unknown' in show)
3024 changestates = zip(states, 'MAR!?IC', stat)
3024 changestates = zip(states, 'MAR!?IC', stat)
3025
3025
3026 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3026 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3027 ctxn = repo[nullid]
3027 ctxn = repo[nullid]
3028 ctx1 = repo[node1]
3028 ctx1 = repo[node1]
3029 ctx2 = repo[node2]
3029 ctx2 = repo[node2]
3030 added = stat[1]
3030 added = stat[1]
3031 if node2 is None:
3031 if node2 is None:
3032 added = stat[0] + stat[1] # merged?
3032 added = stat[0] + stat[1] # merged?
3033
3033
3034 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3034 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3035 if k in added:
3035 if k in added:
3036 copy[k] = v
3036 copy[k] = v
3037 elif v in added:
3037 elif v in added:
3038 copy[v] = k
3038 copy[v] = k
3039
3039
3040 for state, char, files in changestates:
3040 for state, char, files in changestates:
3041 if state in show:
3041 if state in show:
3042 format = "%s %%s%s" % (char, end)
3042 format = "%s %%s%s" % (char, end)
3043 if opts.get('no_status'):
3043 if opts.get('no_status'):
3044 format = "%%s%s" % end
3044 format = "%%s%s" % end
3045
3045
3046 for f in files:
3046 for f in files:
3047 ui.write(format % repo.pathto(f, cwd))
3047 ui.write(format % repo.pathto(f, cwd))
3048 if f in copy:
3048 if f in copy:
3049 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
3049 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
3050
3050
3051 def summary(ui, repo, **opts):
3051 def summary(ui, repo, **opts):
3052 """summarize working directory state
3052 """summarize working directory state
3053
3053
3054 This generates a brief summary of the working directory state,
3054 This generates a brief summary of the working directory state,
3055 including parents, branch, commit status, and available updates.
3055 including parents, branch, commit status, and available updates.
3056
3056
3057 With the --remote option, this will check the default paths for
3057 With the --remote option, this will check the default paths for
3058 incoming and outgoing changes. This can be time-consuming.
3058 incoming and outgoing changes. This can be time-consuming.
3059 """
3059 """
3060
3060
3061 ctx = repo[None]
3061 ctx = repo[None]
3062 parents = ctx.parents()
3062 parents = ctx.parents()
3063 pnode = parents[0].node()
3063 pnode = parents[0].node()
3064 tags = repo.tags()
3064 tags = repo.tags()
3065
3065
3066 for p in parents:
3066 for p in parents:
3067 t = ' '.join([t for t in tags if tags[t] == p.node()])
3067 t = ' '.join([t for t in tags if tags[t] == p.node()])
3068 if p.rev() == -1:
3068 if p.rev() == -1:
3069 if not len(repo):
3069 if not len(repo):
3070 t += _(' (empty repository)')
3070 t += _(' (empty repository)')
3071 else:
3071 else:
3072 t += _(' (no revision checked out)')
3072 t += _(' (no revision checked out)')
3073 ui.write(_('parent: %d:%s %s\n') % (p.rev(), str(p), t))
3073 ui.write(_('parent: %d:%s %s\n') % (p.rev(), str(p), t))
3074 if p.description():
3074 if p.description():
3075 ui.status(' ' + p.description().splitlines()[0].strip() + '\n')
3075 ui.status(' ' + p.description().splitlines()[0].strip() + '\n')
3076
3076
3077 branch = ctx.branch()
3077 branch = ctx.branch()
3078 bheads = repo.branchheads(branch)
3078 bheads = repo.branchheads(branch)
3079 m = _('branch: %s\n') % branch
3079 m = _('branch: %s\n') % branch
3080 if branch != 'default':
3080 if branch != 'default':
3081 ui.write(m)
3081 ui.write(m)
3082 else:
3082 else:
3083 ui.status(m)
3083 ui.status(m)
3084
3084
3085 st = list(repo.status(unknown=True))[:6]
3085 st = list(repo.status(unknown=True))[:6]
3086 ms = merge_.mergestate(repo)
3086 ms = mergemod.mergestate(repo)
3087 st.append([f for f in ms if ms[f] == 'u'])
3087 st.append([f for f in ms if ms[f] == 'u'])
3088 labels = [_('%d modified'), _('%d added'), _('%d removed'),
3088 labels = [_('%d modified'), _('%d added'), _('%d removed'),
3089 _('%d deleted'), _('%d unknown'), _('%d ignored'),
3089 _('%d deleted'), _('%d unknown'), _('%d ignored'),
3090 _('%d unresolved')]
3090 _('%d unresolved')]
3091 t = []
3091 t = []
3092 for s, l in zip(st, labels):
3092 for s, l in zip(st, labels):
3093 if s:
3093 if s:
3094 t.append(l % len(s))
3094 t.append(l % len(s))
3095
3095
3096 t = ', '.join(t)
3096 t = ', '.join(t)
3097 cleanworkdir = False
3097 cleanworkdir = False
3098
3098
3099 if len(parents) > 1:
3099 if len(parents) > 1:
3100 t += _(' (merge)')
3100 t += _(' (merge)')
3101 elif branch != parents[0].branch():
3101 elif branch != parents[0].branch():
3102 t += _(' (new branch)')
3102 t += _(' (new branch)')
3103 elif (not st[0] and not st[1] and not st[2]):
3103 elif (not st[0] and not st[1] and not st[2]):
3104 t += _(' (clean)')
3104 t += _(' (clean)')
3105 cleanworkdir = True
3105 cleanworkdir = True
3106 elif pnode not in bheads:
3106 elif pnode not in bheads:
3107 t += _(' (new branch head)')
3107 t += _(' (new branch head)')
3108
3108
3109 if cleanworkdir:
3109 if cleanworkdir:
3110 ui.status(_('commit: %s\n') % t.strip())
3110 ui.status(_('commit: %s\n') % t.strip())
3111 else:
3111 else:
3112 ui.write(_('commit: %s\n') % t.strip())
3112 ui.write(_('commit: %s\n') % t.strip())
3113
3113
3114 # all ancestors of branch heads - all ancestors of parent = new csets
3114 # all ancestors of branch heads - all ancestors of parent = new csets
3115 new = [0] * len(repo)
3115 new = [0] * len(repo)
3116 cl = repo.changelog
3116 cl = repo.changelog
3117 for a in [cl.rev(n) for n in bheads]:
3117 for a in [cl.rev(n) for n in bheads]:
3118 new[a] = 1
3118 new[a] = 1
3119 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3119 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3120 new[a] = 1
3120 new[a] = 1
3121 for a in [p.rev() for p in parents]:
3121 for a in [p.rev() for p in parents]:
3122 if a >= 0:
3122 if a >= 0:
3123 new[a] = 0
3123 new[a] = 0
3124 for a in cl.ancestors(*[p.rev() for p in parents]):
3124 for a in cl.ancestors(*[p.rev() for p in parents]):
3125 new[a] = 0
3125 new[a] = 0
3126 new = sum(new)
3126 new = sum(new)
3127
3127
3128 if new == 0:
3128 if new == 0:
3129 ui.status(_('update: (current)\n'))
3129 ui.status(_('update: (current)\n'))
3130 elif pnode not in bheads:
3130 elif pnode not in bheads:
3131 ui.write(_('update: %d new changesets (update)\n') % new)
3131 ui.write(_('update: %d new changesets (update)\n') % new)
3132 else:
3132 else:
3133 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3133 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3134 (new, len(bheads)))
3134 (new, len(bheads)))
3135
3135
3136 if opts.get('remote'):
3136 if opts.get('remote'):
3137 t = []
3137 t = []
3138 source, branches = hg.parseurl(ui.expandpath('default'))
3138 source, branches = hg.parseurl(ui.expandpath('default'))
3139 other = hg.repository(cmdutil.remoteui(repo, {}), source)
3139 other = hg.repository(cmdutil.remoteui(repo, {}), source)
3140 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3140 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3141 ui.debug('comparing with %s\n' % url.hidepassword(source))
3141 ui.debug('comparing with %s\n' % url.hidepassword(source))
3142 repo.ui.pushbuffer()
3142 repo.ui.pushbuffer()
3143 common, incoming, rheads = repo.findcommonincoming(other)
3143 common, incoming, rheads = repo.findcommonincoming(other)
3144 repo.ui.popbuffer()
3144 repo.ui.popbuffer()
3145 if incoming:
3145 if incoming:
3146 t.append(_('1 or more incoming'))
3146 t.append(_('1 or more incoming'))
3147
3147
3148 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3148 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3149 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3149 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3150 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
3150 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
3151 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3151 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3152 repo.ui.pushbuffer()
3152 repo.ui.pushbuffer()
3153 o = repo.findoutgoing(other)
3153 o = repo.findoutgoing(other)
3154 repo.ui.popbuffer()
3154 repo.ui.popbuffer()
3155 o = repo.changelog.nodesbetween(o, None)[0]
3155 o = repo.changelog.nodesbetween(o, None)[0]
3156 if o:
3156 if o:
3157 t.append(_('%d outgoing') % len(o))
3157 t.append(_('%d outgoing') % len(o))
3158
3158
3159 if t:
3159 if t:
3160 ui.write(_('remote: %s\n') % (', '.join(t)))
3160 ui.write(_('remote: %s\n') % (', '.join(t)))
3161 else:
3161 else:
3162 ui.status(_('remote: (synced)\n'))
3162 ui.status(_('remote: (synced)\n'))
3163
3163
3164 def tag(ui, repo, name1, *names, **opts):
3164 def tag(ui, repo, name1, *names, **opts):
3165 """add one or more tags for the current or given revision
3165 """add one or more tags for the current or given revision
3166
3166
3167 Name a particular revision using <name>.
3167 Name a particular revision using <name>.
3168
3168
3169 Tags are used to name particular revisions of the repository and are
3169 Tags are used to name particular revisions of the repository and are
3170 very useful to compare different revisions, to go back to significant
3170 very useful to compare different revisions, to go back to significant
3171 earlier versions or to mark branch points as releases, etc.
3171 earlier versions or to mark branch points as releases, etc.
3172
3172
3173 If no revision is given, the parent of the working directory is
3173 If no revision is given, the parent of the working directory is
3174 used, or tip if no revision is checked out.
3174 used, or tip if no revision is checked out.
3175
3175
3176 To facilitate version control, distribution, and merging of tags,
3176 To facilitate version control, distribution, and merging of tags,
3177 they are stored as a file named ".hgtags" which is managed
3177 they are stored as a file named ".hgtags" which is managed
3178 similarly to other project files and can be hand-edited if
3178 similarly to other project files and can be hand-edited if
3179 necessary. The file '.hg/localtags' is used for local tags (not
3179 necessary. The file '.hg/localtags' is used for local tags (not
3180 shared among repositories).
3180 shared among repositories).
3181
3181
3182 See 'hg help dates' for a list of formats valid for -d/--date.
3182 See 'hg help dates' for a list of formats valid for -d/--date.
3183 """
3183 """
3184
3184
3185 rev_ = "."
3185 rev_ = "."
3186 names = (name1,) + names
3186 names = (name1,) + names
3187 if len(names) != len(set(names)):
3187 if len(names) != len(set(names)):
3188 raise util.Abort(_('tag names must be unique'))
3188 raise util.Abort(_('tag names must be unique'))
3189 for n in names:
3189 for n in names:
3190 if n in ['tip', '.', 'null']:
3190 if n in ['tip', '.', 'null']:
3191 raise util.Abort(_('the name \'%s\' is reserved') % n)
3191 raise util.Abort(_('the name \'%s\' is reserved') % n)
3192 if opts.get('rev') and opts.get('remove'):
3192 if opts.get('rev') and opts.get('remove'):
3193 raise util.Abort(_("--rev and --remove are incompatible"))
3193 raise util.Abort(_("--rev and --remove are incompatible"))
3194 if opts.get('rev'):
3194 if opts.get('rev'):
3195 rev_ = opts['rev']
3195 rev_ = opts['rev']
3196 message = opts.get('message')
3196 message = opts.get('message')
3197 if opts.get('remove'):
3197 if opts.get('remove'):
3198 expectedtype = opts.get('local') and 'local' or 'global'
3198 expectedtype = opts.get('local') and 'local' or 'global'
3199 for n in names:
3199 for n in names:
3200 if not repo.tagtype(n):
3200 if not repo.tagtype(n):
3201 raise util.Abort(_('tag \'%s\' does not exist') % n)
3201 raise util.Abort(_('tag \'%s\' does not exist') % n)
3202 if repo.tagtype(n) != expectedtype:
3202 if repo.tagtype(n) != expectedtype:
3203 if expectedtype == 'global':
3203 if expectedtype == 'global':
3204 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3204 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3205 else:
3205 else:
3206 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3206 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3207 rev_ = nullid
3207 rev_ = nullid
3208 if not message:
3208 if not message:
3209 # we don't translate commit messages
3209 # we don't translate commit messages
3210 message = 'Removed tag %s' % ', '.join(names)
3210 message = 'Removed tag %s' % ', '.join(names)
3211 elif not opts.get('force'):
3211 elif not opts.get('force'):
3212 for n in names:
3212 for n in names:
3213 if n in repo.tags():
3213 if n in repo.tags():
3214 raise util.Abort(_('tag \'%s\' already exists '
3214 raise util.Abort(_('tag \'%s\' already exists '
3215 '(use -f to force)') % n)
3215 '(use -f to force)') % n)
3216 if not rev_ and repo.dirstate.parents()[1] != nullid:
3216 if not rev_ and repo.dirstate.parents()[1] != nullid:
3217 raise util.Abort(_('uncommitted merge - please provide a '
3217 raise util.Abort(_('uncommitted merge - please provide a '
3218 'specific revision'))
3218 'specific revision'))
3219 r = repo[rev_].node()
3219 r = repo[rev_].node()
3220
3220
3221 if not message:
3221 if not message:
3222 # we don't translate commit messages
3222 # we don't translate commit messages
3223 message = ('Added tag %s for changeset %s' %
3223 message = ('Added tag %s for changeset %s' %
3224 (', '.join(names), short(r)))
3224 (', '.join(names), short(r)))
3225
3225
3226 date = opts.get('date')
3226 date = opts.get('date')
3227 if date:
3227 if date:
3228 date = util.parsedate(date)
3228 date = util.parsedate(date)
3229
3229
3230 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3230 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3231
3231
3232 def tags(ui, repo):
3232 def tags(ui, repo):
3233 """list repository tags
3233 """list repository tags
3234
3234
3235 This lists both regular and local tags. When the -v/--verbose
3235 This lists both regular and local tags. When the -v/--verbose
3236 switch is used, a third column "local" is printed for local tags.
3236 switch is used, a third column "local" is printed for local tags.
3237 """
3237 """
3238
3238
3239 hexfunc = ui.debugflag and hex or short
3239 hexfunc = ui.debugflag and hex or short
3240 tagtype = ""
3240 tagtype = ""
3241
3241
3242 for t, n in reversed(repo.tagslist()):
3242 for t, n in reversed(repo.tagslist()):
3243 if ui.quiet:
3243 if ui.quiet:
3244 ui.write("%s\n" % t)
3244 ui.write("%s\n" % t)
3245 continue
3245 continue
3246
3246
3247 try:
3247 try:
3248 hn = hexfunc(n)
3248 hn = hexfunc(n)
3249 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3249 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3250 except error.LookupError:
3250 except error.LookupError:
3251 r = " ?:%s" % hn
3251 r = " ?:%s" % hn
3252 else:
3252 else:
3253 spaces = " " * (30 - encoding.colwidth(t))
3253 spaces = " " * (30 - encoding.colwidth(t))
3254 if ui.verbose:
3254 if ui.verbose:
3255 if repo.tagtype(t) == 'local':
3255 if repo.tagtype(t) == 'local':
3256 tagtype = " local"
3256 tagtype = " local"
3257 else:
3257 else:
3258 tagtype = ""
3258 tagtype = ""
3259 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3259 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3260
3260
3261 def tip(ui, repo, **opts):
3261 def tip(ui, repo, **opts):
3262 """show the tip revision
3262 """show the tip revision
3263
3263
3264 The tip revision (usually just called the tip) is the changeset
3264 The tip revision (usually just called the tip) is the changeset
3265 most recently added to the repository (and therefore the most
3265 most recently added to the repository (and therefore the most
3266 recently changed head).
3266 recently changed head).
3267
3267
3268 If you have just made a commit, that commit will be the tip. If
3268 If you have just made a commit, that commit will be the tip. If
3269 you have just pulled changes from another repository, the tip of
3269 you have just pulled changes from another repository, the tip of
3270 that repository becomes the current tip. The "tip" tag is special
3270 that repository becomes the current tip. The "tip" tag is special
3271 and cannot be renamed or assigned to a different changeset.
3271 and cannot be renamed or assigned to a different changeset.
3272 """
3272 """
3273 displayer = cmdutil.show_changeset(ui, repo, opts)
3273 displayer = cmdutil.show_changeset(ui, repo, opts)
3274 displayer.show(repo[len(repo) - 1])
3274 displayer.show(repo[len(repo) - 1])
3275 displayer.close()
3275 displayer.close()
3276
3276
3277 def unbundle(ui, repo, fname1, *fnames, **opts):
3277 def unbundle(ui, repo, fname1, *fnames, **opts):
3278 """apply one or more changegroup files
3278 """apply one or more changegroup files
3279
3279
3280 Apply one or more compressed changegroup files generated by the
3280 Apply one or more compressed changegroup files generated by the
3281 bundle command.
3281 bundle command.
3282 """
3282 """
3283 fnames = (fname1,) + fnames
3283 fnames = (fname1,) + fnames
3284
3284
3285 lock = repo.lock()
3285 lock = repo.lock()
3286 try:
3286 try:
3287 for fname in fnames:
3287 for fname in fnames:
3288 f = url.open(ui, fname)
3288 f = url.open(ui, fname)
3289 gen = changegroup.readbundle(f, fname)
3289 gen = changegroup.readbundle(f, fname)
3290 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3290 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3291 finally:
3291 finally:
3292 lock.release()
3292 lock.release()
3293
3293
3294 return postincoming(ui, repo, modheads, opts.get('update'), None)
3294 return postincoming(ui, repo, modheads, opts.get('update'), None)
3295
3295
3296 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3296 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3297 """update working directory
3297 """update working directory
3298
3298
3299 Update the repository's working directory to the specified
3299 Update the repository's working directory to the specified
3300 changeset.
3300 changeset.
3301
3301
3302 If no changeset is specified, attempt to update to the head of the
3302 If no changeset is specified, attempt to update to the head of the
3303 current branch. If this head is a descendant of the working
3303 current branch. If this head is a descendant of the working
3304 directory's parent, update to it, otherwise abort.
3304 directory's parent, update to it, otherwise abort.
3305
3305
3306 The following rules apply when the working directory contains
3306 The following rules apply when the working directory contains
3307 uncommitted changes:
3307 uncommitted changes:
3308
3308
3309 1. If neither -c/--check nor -C/--clean is specified, and if
3309 1. If neither -c/--check nor -C/--clean is specified, and if
3310 the requested changeset is an ancestor or descendant of
3310 the requested changeset is an ancestor or descendant of
3311 the working directory's parent, the uncommitted changes
3311 the working directory's parent, the uncommitted changes
3312 are merged into the requested changeset and the merged
3312 are merged into the requested changeset and the merged
3313 result is left uncommitted. If the requested changeset is
3313 result is left uncommitted. If the requested changeset is
3314 not an ancestor or descendant (that is, it is on another
3314 not an ancestor or descendant (that is, it is on another
3315 branch), the update is aborted and the uncommitted changes
3315 branch), the update is aborted and the uncommitted changes
3316 are preserved.
3316 are preserved.
3317
3317
3318 2. With the -c/--check option, the update is aborted and the
3318 2. With the -c/--check option, the update is aborted and the
3319 uncommitted changes are preserved.
3319 uncommitted changes are preserved.
3320
3320
3321 3. With the -C/--clean option, uncommitted changes are discarded and
3321 3. With the -C/--clean option, uncommitted changes are discarded and
3322 the working directory is updated to the requested changeset.
3322 the working directory is updated to the requested changeset.
3323
3323
3324 Use null as the changeset to remove the working directory (like 'hg
3324 Use null as the changeset to remove the working directory (like 'hg
3325 clone -U').
3325 clone -U').
3326
3326
3327 If you want to update just one file to an older changeset, use 'hg revert'.
3327 If you want to update just one file to an older changeset, use 'hg revert'.
3328
3328
3329 See 'hg help dates' for a list of formats valid for -d/--date.
3329 See 'hg help dates' for a list of formats valid for -d/--date.
3330 """
3330 """
3331 if rev and node:
3331 if rev and node:
3332 raise util.Abort(_("please specify just one revision"))
3332 raise util.Abort(_("please specify just one revision"))
3333
3333
3334 if not rev:
3334 if not rev:
3335 rev = node
3335 rev = node
3336
3336
3337 if check and clean:
3337 if check and clean:
3338 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3338 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3339
3339
3340 if check:
3340 if check:
3341 # we could use dirty() but we can ignore merge and branch trivia
3341 # we could use dirty() but we can ignore merge and branch trivia
3342 c = repo[None]
3342 c = repo[None]
3343 if c.modified() or c.added() or c.removed():
3343 if c.modified() or c.added() or c.removed():
3344 raise util.Abort(_("uncommitted local changes"))
3344 raise util.Abort(_("uncommitted local changes"))
3345
3345
3346 if date:
3346 if date:
3347 if rev:
3347 if rev:
3348 raise util.Abort(_("you can't specify a revision and a date"))
3348 raise util.Abort(_("you can't specify a revision and a date"))
3349 rev = cmdutil.finddate(ui, repo, date)
3349 rev = cmdutil.finddate(ui, repo, date)
3350
3350
3351 if clean or check:
3351 if clean or check:
3352 return hg.clean(repo, rev)
3352 return hg.clean(repo, rev)
3353 else:
3353 else:
3354 return hg.update(repo, rev)
3354 return hg.update(repo, rev)
3355
3355
3356 def verify(ui, repo):
3356 def verify(ui, repo):
3357 """verify the integrity of the repository
3357 """verify the integrity of the repository
3358
3358
3359 Verify the integrity of the current repository.
3359 Verify the integrity of the current repository.
3360
3360
3361 This will perform an extensive check of the repository's
3361 This will perform an extensive check of the repository's
3362 integrity, validating the hashes and checksums of each entry in
3362 integrity, validating the hashes and checksums of each entry in
3363 the changelog, manifest, and tracked files, as well as the
3363 the changelog, manifest, and tracked files, as well as the
3364 integrity of their crosslinks and indices.
3364 integrity of their crosslinks and indices.
3365 """
3365 """
3366 return hg.verify(repo)
3366 return hg.verify(repo)
3367
3367
3368 def version_(ui):
3368 def version_(ui):
3369 """output version and copyright information"""
3369 """output version and copyright information"""
3370 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3370 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3371 % util.version())
3371 % util.version())
3372 ui.status(_(
3372 ui.status(_(
3373 "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n"
3373 "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n"
3374 "This is free software; see the source for copying conditions. "
3374 "This is free software; see the source for copying conditions. "
3375 "There is NO\nwarranty; "
3375 "There is NO\nwarranty; "
3376 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3376 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3377 ))
3377 ))
3378
3378
3379 # Command options and aliases are listed here, alphabetically
3379 # Command options and aliases are listed here, alphabetically
3380
3380
3381 globalopts = [
3381 globalopts = [
3382 ('R', 'repository', '',
3382 ('R', 'repository', '',
3383 _('repository root directory or name of overlay bundle file')),
3383 _('repository root directory or name of overlay bundle file')),
3384 ('', 'cwd', '', _('change working directory')),
3384 ('', 'cwd', '', _('change working directory')),
3385 ('y', 'noninteractive', None,
3385 ('y', 'noninteractive', None,
3386 _('do not prompt, assume \'yes\' for any required answers')),
3386 _('do not prompt, assume \'yes\' for any required answers')),
3387 ('q', 'quiet', None, _('suppress output')),
3387 ('q', 'quiet', None, _('suppress output')),
3388 ('v', 'verbose', None, _('enable additional output')),
3388 ('v', 'verbose', None, _('enable additional output')),
3389 ('', 'config', [],
3389 ('', 'config', [],
3390 _('set/override config option (use \'section.name=value\')')),
3390 _('set/override config option (use \'section.name=value\')')),
3391 ('', 'debug', None, _('enable debugging output')),
3391 ('', 'debug', None, _('enable debugging output')),
3392 ('', 'debugger', None, _('start debugger')),
3392 ('', 'debugger', None, _('start debugger')),
3393 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3393 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3394 ('', 'encodingmode', encoding.encodingmode,
3394 ('', 'encodingmode', encoding.encodingmode,
3395 _('set the charset encoding mode')),
3395 _('set the charset encoding mode')),
3396 ('', 'traceback', None, _('always print a traceback on exception')),
3396 ('', 'traceback', None, _('always print a traceback on exception')),
3397 ('', 'time', None, _('time how long the command takes')),
3397 ('', 'time', None, _('time how long the command takes')),
3398 ('', 'profile', None, _('print command execution profile')),
3398 ('', 'profile', None, _('print command execution profile')),
3399 ('', 'version', None, _('output version information and exit')),
3399 ('', 'version', None, _('output version information and exit')),
3400 ('h', 'help', None, _('display help and exit')),
3400 ('h', 'help', None, _('display help and exit')),
3401 ]
3401 ]
3402
3402
3403 dryrunopts = [('n', 'dry-run', None,
3403 dryrunopts = [('n', 'dry-run', None,
3404 _('do not perform actions, just print output'))]
3404 _('do not perform actions, just print output'))]
3405
3405
3406 remoteopts = [
3406 remoteopts = [
3407 ('e', 'ssh', '', _('specify ssh command to use')),
3407 ('e', 'ssh', '', _('specify ssh command to use')),
3408 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3408 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3409 ]
3409 ]
3410
3410
3411 walkopts = [
3411 walkopts = [
3412 ('I', 'include', [], _('include names matching the given patterns')),
3412 ('I', 'include', [], _('include names matching the given patterns')),
3413 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3413 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3414 ]
3414 ]
3415
3415
3416 commitopts = [
3416 commitopts = [
3417 ('m', 'message', '', _('use <text> as commit message')),
3417 ('m', 'message', '', _('use <text> as commit message')),
3418 ('l', 'logfile', '', _('read commit message from <file>')),
3418 ('l', 'logfile', '', _('read commit message from <file>')),
3419 ]
3419 ]
3420
3420
3421 commitopts2 = [
3421 commitopts2 = [
3422 ('d', 'date', '', _('record datecode as commit date')),
3422 ('d', 'date', '', _('record datecode as commit date')),
3423 ('u', 'user', '', _('record the specified user as committer')),
3423 ('u', 'user', '', _('record the specified user as committer')),
3424 ]
3424 ]
3425
3425
3426 templateopts = [
3426 templateopts = [
3427 ('', 'style', '', _('display using template map file')),
3427 ('', 'style', '', _('display using template map file')),
3428 ('', 'template', '', _('display with template')),
3428 ('', 'template', '', _('display with template')),
3429 ]
3429 ]
3430
3430
3431 logopts = [
3431 logopts = [
3432 ('p', 'patch', None, _('show patch')),
3432 ('p', 'patch', None, _('show patch')),
3433 ('g', 'git', None, _('use git extended diff format')),
3433 ('g', 'git', None, _('use git extended diff format')),
3434 ('l', 'limit', '', _('limit number of changes displayed')),
3434 ('l', 'limit', '', _('limit number of changes displayed')),
3435 ('M', 'no-merges', None, _('do not show merges')),
3435 ('M', 'no-merges', None, _('do not show merges')),
3436 ] + templateopts
3436 ] + templateopts
3437
3437
3438 diffopts = [
3438 diffopts = [
3439 ('a', 'text', None, _('treat all files as text')),
3439 ('a', 'text', None, _('treat all files as text')),
3440 ('g', 'git', None, _('use git extended diff format')),
3440 ('g', 'git', None, _('use git extended diff format')),
3441 ('', 'nodates', None, _('omit dates from diff headers'))
3441 ('', 'nodates', None, _('omit dates from diff headers'))
3442 ]
3442 ]
3443
3443
3444 diffopts2 = [
3444 diffopts2 = [
3445 ('p', 'show-function', None, _('show which function each change is in')),
3445 ('p', 'show-function', None, _('show which function each change is in')),
3446 ('', 'reverse', None, _('produce a diff that undoes the changes')),
3446 ('', 'reverse', None, _('produce a diff that undoes the changes')),
3447 ('w', 'ignore-all-space', None,
3447 ('w', 'ignore-all-space', None,
3448 _('ignore white space when comparing lines')),
3448 _('ignore white space when comparing lines')),
3449 ('b', 'ignore-space-change', None,
3449 ('b', 'ignore-space-change', None,
3450 _('ignore changes in the amount of white space')),
3450 _('ignore changes in the amount of white space')),
3451 ('B', 'ignore-blank-lines', None,
3451 ('B', 'ignore-blank-lines', None,
3452 _('ignore changes whose lines are all blank')),
3452 _('ignore changes whose lines are all blank')),
3453 ('U', 'unified', '', _('number of lines of context to show')),
3453 ('U', 'unified', '', _('number of lines of context to show')),
3454 ('', 'stat', None, _('output diffstat-style summary of changes')),
3454 ('', 'stat', None, _('output diffstat-style summary of changes')),
3455 ]
3455 ]
3456
3456
3457 similarityopts = [
3457 similarityopts = [
3458 ('s', 'similarity', '',
3458 ('s', 'similarity', '',
3459 _('guess renamed files by similarity (0<=s<=100)'))
3459 _('guess renamed files by similarity (0<=s<=100)'))
3460 ]
3460 ]
3461
3461
3462 table = {
3462 table = {
3463 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3463 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3464 "addremove":
3464 "addremove":
3465 (addremove, similarityopts + walkopts + dryrunopts,
3465 (addremove, similarityopts + walkopts + dryrunopts,
3466 _('[OPTION]... [FILE]...')),
3466 _('[OPTION]... [FILE]...')),
3467 "^annotate|blame":
3467 "^annotate|blame":
3468 (annotate,
3468 (annotate,
3469 [('r', 'rev', '', _('annotate the specified revision')),
3469 [('r', 'rev', '', _('annotate the specified revision')),
3470 ('', 'follow', None,
3470 ('', 'follow', None,
3471 _('follow copies/renames and list the filename (DEPRECATED)')),
3471 _('follow copies/renames and list the filename (DEPRECATED)')),
3472 ('', 'no-follow', None, _("don't follow copies and renames")),
3472 ('', 'no-follow', None, _("don't follow copies and renames")),
3473 ('a', 'text', None, _('treat all files as text')),
3473 ('a', 'text', None, _('treat all files as text')),
3474 ('u', 'user', None, _('list the author (long with -v)')),
3474 ('u', 'user', None, _('list the author (long with -v)')),
3475 ('f', 'file', None, _('list the filename')),
3475 ('f', 'file', None, _('list the filename')),
3476 ('d', 'date', None, _('list the date (short with -q)')),
3476 ('d', 'date', None, _('list the date (short with -q)')),
3477 ('n', 'number', None, _('list the revision number (default)')),
3477 ('n', 'number', None, _('list the revision number (default)')),
3478 ('c', 'changeset', None, _('list the changeset')),
3478 ('c', 'changeset', None, _('list the changeset')),
3479 ('l', 'line-number', None,
3479 ('l', 'line-number', None,
3480 _('show line number at the first appearance'))
3480 _('show line number at the first appearance'))
3481 ] + walkopts,
3481 ] + walkopts,
3482 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3482 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3483 "archive":
3483 "archive":
3484 (archive,
3484 (archive,
3485 [('', 'no-decode', None, _('do not pass files through decoders')),
3485 [('', 'no-decode', None, _('do not pass files through decoders')),
3486 ('p', 'prefix', '', _('directory prefix for files in archive')),
3486 ('p', 'prefix', '', _('directory prefix for files in archive')),
3487 ('r', 'rev', '', _('revision to distribute')),
3487 ('r', 'rev', '', _('revision to distribute')),
3488 ('t', 'type', '', _('type of distribution to create')),
3488 ('t', 'type', '', _('type of distribution to create')),
3489 ] + walkopts,
3489 ] + walkopts,
3490 _('[OPTION]... DEST')),
3490 _('[OPTION]... DEST')),
3491 "backout":
3491 "backout":
3492 (backout,
3492 (backout,
3493 [('', 'merge', None,
3493 [('', 'merge', None,
3494 _('merge with old dirstate parent after backout')),
3494 _('merge with old dirstate parent after backout')),
3495 ('', 'parent', '', _('parent to choose when backing out merge')),
3495 ('', 'parent', '', _('parent to choose when backing out merge')),
3496 ('r', 'rev', '', _('revision to backout')),
3496 ('r', 'rev', '', _('revision to backout')),
3497 ] + walkopts + commitopts + commitopts2,
3497 ] + walkopts + commitopts + commitopts2,
3498 _('[OPTION]... [-r] REV')),
3498 _('[OPTION]... [-r] REV')),
3499 "bisect":
3499 "bisect":
3500 (bisect,
3500 (bisect,
3501 [('r', 'reset', False, _('reset bisect state')),
3501 [('r', 'reset', False, _('reset bisect state')),
3502 ('g', 'good', False, _('mark changeset good')),
3502 ('g', 'good', False, _('mark changeset good')),
3503 ('b', 'bad', False, _('mark changeset bad')),
3503 ('b', 'bad', False, _('mark changeset bad')),
3504 ('s', 'skip', False, _('skip testing changeset')),
3504 ('s', 'skip', False, _('skip testing changeset')),
3505 ('c', 'command', '', _('use command to check changeset state')),
3505 ('c', 'command', '', _('use command to check changeset state')),
3506 ('U', 'noupdate', False, _('do not update to target'))],
3506 ('U', 'noupdate', False, _('do not update to target'))],
3507 _("[-gbsr] [-U] [-c CMD] [REV]")),
3507 _("[-gbsr] [-U] [-c CMD] [REV]")),
3508 "branch":
3508 "branch":
3509 (branch,
3509 (branch,
3510 [('f', 'force', None,
3510 [('f', 'force', None,
3511 _('set branch name even if it shadows an existing branch')),
3511 _('set branch name even if it shadows an existing branch')),
3512 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3512 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3513 _('[-fC] [NAME]')),
3513 _('[-fC] [NAME]')),
3514 "branches":
3514 "branches":
3515 (branches,
3515 (branches,
3516 [('a', 'active', False,
3516 [('a', 'active', False,
3517 _('show only branches that have unmerged heads')),
3517 _('show only branches that have unmerged heads')),
3518 ('c', 'closed', False,
3518 ('c', 'closed', False,
3519 _('show normal and closed branches'))],
3519 _('show normal and closed branches'))],
3520 _('[-ac]')),
3520 _('[-ac]')),
3521 "bundle":
3521 "bundle":
3522 (bundle,
3522 (bundle,
3523 [('f', 'force', None,
3523 [('f', 'force', None,
3524 _('run even when the destination is unrelated')),
3524 _('run even when the destination is unrelated')),
3525 ('r', 'rev', [],
3525 ('r', 'rev', [],
3526 _('a changeset intended to be added to the destination')),
3526 _('a changeset intended to be added to the destination')),
3527 ('b', 'branch', [],
3527 ('b', 'branch', [],
3528 _('a specific branch you would like to bundle')),
3528 _('a specific branch you would like to bundle')),
3529 ('', 'base', [],
3529 ('', 'base', [],
3530 _('a base changeset assumed to be available at the destination')),
3530 _('a base changeset assumed to be available at the destination')),
3531 ('a', 'all', None, _('bundle all changesets in the repository')),
3531 ('a', 'all', None, _('bundle all changesets in the repository')),
3532 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3532 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3533 ] + remoteopts,
3533 ] + remoteopts,
3534 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3534 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3535 "cat":
3535 "cat":
3536 (cat,
3536 (cat,
3537 [('o', 'output', '', _('print output to file with formatted name')),
3537 [('o', 'output', '', _('print output to file with formatted name')),
3538 ('r', 'rev', '', _('print the given revision')),
3538 ('r', 'rev', '', _('print the given revision')),
3539 ('', 'decode', None, _('apply any matching decode filter')),
3539 ('', 'decode', None, _('apply any matching decode filter')),
3540 ] + walkopts,
3540 ] + walkopts,
3541 _('[OPTION]... FILE...')),
3541 _('[OPTION]... FILE...')),
3542 "^clone":
3542 "^clone":
3543 (clone,
3543 (clone,
3544 [('U', 'noupdate', None,
3544 [('U', 'noupdate', None,
3545 _('the clone will include an empty working copy (only a repository)')),
3545 _('the clone will include an empty working copy (only a repository)')),
3546 ('u', 'updaterev', '',
3546 ('u', 'updaterev', '',
3547 _('revision, tag or branch to check out')),
3547 _('revision, tag or branch to check out')),
3548 ('r', 'rev', [],
3548 ('r', 'rev', [],
3549 _('include the specified changeset')),
3549 _('include the specified changeset')),
3550 ('b', 'branch', [],
3550 ('b', 'branch', [],
3551 _('clone only the specified branch')),
3551 _('clone only the specified branch')),
3552 ('', 'pull', None, _('use pull protocol to copy metadata')),
3552 ('', 'pull', None, _('use pull protocol to copy metadata')),
3553 ('', 'uncompressed', None,
3553 ('', 'uncompressed', None,
3554 _('use uncompressed transfer (fast over LAN)')),
3554 _('use uncompressed transfer (fast over LAN)')),
3555 ] + remoteopts,
3555 ] + remoteopts,
3556 _('[OPTION]... SOURCE [DEST]')),
3556 _('[OPTION]... SOURCE [DEST]')),
3557 "^commit|ci":
3557 "^commit|ci":
3558 (commit,
3558 (commit,
3559 [('A', 'addremove', None,
3559 [('A', 'addremove', None,
3560 _('mark new/missing files as added/removed before committing')),
3560 _('mark new/missing files as added/removed before committing')),
3561 ('', 'close-branch', None,
3561 ('', 'close-branch', None,
3562 _('mark a branch as closed, hiding it from the branch list')),
3562 _('mark a branch as closed, hiding it from the branch list')),
3563 ] + walkopts + commitopts + commitopts2,
3563 ] + walkopts + commitopts + commitopts2,
3564 _('[OPTION]... [FILE]...')),
3564 _('[OPTION]... [FILE]...')),
3565 "copy|cp":
3565 "copy|cp":
3566 (copy,
3566 (copy,
3567 [('A', 'after', None, _('record a copy that has already occurred')),
3567 [('A', 'after', None, _('record a copy that has already occurred')),
3568 ('f', 'force', None,
3568 ('f', 'force', None,
3569 _('forcibly copy over an existing managed file')),
3569 _('forcibly copy over an existing managed file')),
3570 ] + walkopts + dryrunopts,
3570 ] + walkopts + dryrunopts,
3571 _('[OPTION]... [SOURCE]... DEST')),
3571 _('[OPTION]... [SOURCE]... DEST')),
3572 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3572 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3573 "debugcheckstate": (debugcheckstate, [], ''),
3573 "debugcheckstate": (debugcheckstate, [], ''),
3574 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3574 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3575 "debugcomplete":
3575 "debugcomplete":
3576 (debugcomplete,
3576 (debugcomplete,
3577 [('o', 'options', None, _('show the command options'))],
3577 [('o', 'options', None, _('show the command options'))],
3578 _('[-o] CMD')),
3578 _('[-o] CMD')),
3579 "debugdate":
3579 "debugdate":
3580 (debugdate,
3580 (debugdate,
3581 [('e', 'extended', None, _('try extended date formats'))],
3581 [('e', 'extended', None, _('try extended date formats'))],
3582 _('[-e] DATE [RANGE]')),
3582 _('[-e] DATE [RANGE]')),
3583 "debugdata": (debugdata, [], _('FILE REV')),
3583 "debugdata": (debugdata, [], _('FILE REV')),
3584 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3584 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3585 "debugindex": (debugindex, [], _('FILE')),
3585 "debugindex": (debugindex, [], _('FILE')),
3586 "debugindexdot": (debugindexdot, [], _('FILE')),
3586 "debugindexdot": (debugindexdot, [], _('FILE')),
3587 "debuginstall": (debuginstall, [], ''),
3587 "debuginstall": (debuginstall, [], ''),
3588 "debugrebuildstate":
3588 "debugrebuildstate":
3589 (debugrebuildstate,
3589 (debugrebuildstate,
3590 [('r', 'rev', '', _('revision to rebuild to'))],
3590 [('r', 'rev', '', _('revision to rebuild to'))],
3591 _('[-r REV] [REV]')),
3591 _('[-r REV] [REV]')),
3592 "debugrename":
3592 "debugrename":
3593 (debugrename,
3593 (debugrename,
3594 [('r', 'rev', '', _('revision to debug'))],
3594 [('r', 'rev', '', _('revision to debug'))],
3595 _('[-r REV] FILE')),
3595 _('[-r REV] FILE')),
3596 "debugsetparents":
3596 "debugsetparents":
3597 (debugsetparents, [], _('REV1 [REV2]')),
3597 (debugsetparents, [], _('REV1 [REV2]')),
3598 "debugstate":
3598 "debugstate":
3599 (debugstate,
3599 (debugstate,
3600 [('', 'nodates', None, _('do not display the saved mtime'))],
3600 [('', 'nodates', None, _('do not display the saved mtime'))],
3601 _('[OPTION]...')),
3601 _('[OPTION]...')),
3602 "debugsub":
3602 "debugsub":
3603 (debugsub,
3603 (debugsub,
3604 [('r', 'rev', '', _('revision to check'))],
3604 [('r', 'rev', '', _('revision to check'))],
3605 _('[-r REV] [REV]')),
3605 _('[-r REV] [REV]')),
3606 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3606 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3607 "^diff":
3607 "^diff":
3608 (diff,
3608 (diff,
3609 [('r', 'rev', [], _('revision')),
3609 [('r', 'rev', [], _('revision')),
3610 ('c', 'change', '', _('change made by revision'))
3610 ('c', 'change', '', _('change made by revision'))
3611 ] + diffopts + diffopts2 + walkopts,
3611 ] + diffopts + diffopts2 + walkopts,
3612 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
3612 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
3613 "^export":
3613 "^export":
3614 (export,
3614 (export,
3615 [('o', 'output', '', _('print output to file with formatted name')),
3615 [('o', 'output', '', _('print output to file with formatted name')),
3616 ('', 'switch-parent', None, _('diff against the second parent')),
3616 ('', 'switch-parent', None, _('diff against the second parent')),
3617 ('r', 'rev', [], _('revisions to export')),
3617 ('r', 'rev', [], _('revisions to export')),
3618 ] + diffopts,
3618 ] + diffopts,
3619 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3619 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3620 "^forget":
3620 "^forget":
3621 (forget,
3621 (forget,
3622 [] + walkopts,
3622 [] + walkopts,
3623 _('[OPTION]... FILE...')),
3623 _('[OPTION]... FILE...')),
3624 "grep":
3624 "grep":
3625 (grep,
3625 (grep,
3626 [('0', 'print0', None, _('end fields with NUL')),
3626 [('0', 'print0', None, _('end fields with NUL')),
3627 ('', 'all', None, _('print all revisions that match')),
3627 ('', 'all', None, _('print all revisions that match')),
3628 ('f', 'follow', None,
3628 ('f', 'follow', None,
3629 _('follow changeset history,'
3629 _('follow changeset history,'
3630 ' or file history across copies and renames')),
3630 ' or file history across copies and renames')),
3631 ('i', 'ignore-case', None, _('ignore case when matching')),
3631 ('i', 'ignore-case', None, _('ignore case when matching')),
3632 ('l', 'files-with-matches', None,
3632 ('l', 'files-with-matches', None,
3633 _('print only filenames and revisions that match')),
3633 _('print only filenames and revisions that match')),
3634 ('n', 'line-number', None, _('print matching line numbers')),
3634 ('n', 'line-number', None, _('print matching line numbers')),
3635 ('r', 'rev', [], _('search in given revision range')),
3635 ('r', 'rev', [], _('search in given revision range')),
3636 ('u', 'user', None, _('list the author (long with -v)')),
3636 ('u', 'user', None, _('list the author (long with -v)')),
3637 ('d', 'date', None, _('list the date (short with -q)')),
3637 ('d', 'date', None, _('list the date (short with -q)')),
3638 ] + walkopts,
3638 ] + walkopts,
3639 _('[OPTION]... PATTERN [FILE]...')),
3639 _('[OPTION]... PATTERN [FILE]...')),
3640 "heads":
3640 "heads":
3641 (heads,
3641 (heads,
3642 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3642 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3643 ('t', 'topo', False, _('show topological heads only')),
3643 ('t', 'topo', False, _('show topological heads only')),
3644 ('a', 'active', False,
3644 ('a', 'active', False,
3645 _('show active branchheads only [DEPRECATED]')),
3645 _('show active branchheads only [DEPRECATED]')),
3646 ('c', 'closed', False,
3646 ('c', 'closed', False,
3647 _('show normal and closed branch heads')),
3647 _('show normal and closed branch heads')),
3648 ] + templateopts,
3648 ] + templateopts,
3649 _('[-ac] [-r STARTREV] [REV]...')),
3649 _('[-ac] [-r STARTREV] [REV]...')),
3650 "help": (help_, [], _('[TOPIC]')),
3650 "help": (help_, [], _('[TOPIC]')),
3651 "identify|id":
3651 "identify|id":
3652 (identify,
3652 (identify,
3653 [('r', 'rev', '', _('identify the specified revision')),
3653 [('r', 'rev', '', _('identify the specified revision')),
3654 ('n', 'num', None, _('show local revision number')),
3654 ('n', 'num', None, _('show local revision number')),
3655 ('i', 'id', None, _('show global revision id')),
3655 ('i', 'id', None, _('show global revision id')),
3656 ('b', 'branch', None, _('show branch')),
3656 ('b', 'branch', None, _('show branch')),
3657 ('t', 'tags', None, _('show tags'))],
3657 ('t', 'tags', None, _('show tags'))],
3658 _('[-nibt] [-r REV] [SOURCE]')),
3658 _('[-nibt] [-r REV] [SOURCE]')),
3659 "import|patch":
3659 "import|patch":
3660 (import_,
3660 (import_,
3661 [('p', 'strip', 1,
3661 [('p', 'strip', 1,
3662 _('directory strip option for patch. This has the same '
3662 _('directory strip option for patch. This has the same '
3663 'meaning as the corresponding patch option')),
3663 'meaning as the corresponding patch option')),
3664 ('b', 'base', '', _('base path')),
3664 ('b', 'base', '', _('base path')),
3665 ('f', 'force', None,
3665 ('f', 'force', None,
3666 _('skip check for outstanding uncommitted changes')),
3666 _('skip check for outstanding uncommitted changes')),
3667 ('', 'no-commit', None,
3667 ('', 'no-commit', None,
3668 _("don't commit, just update the working directory")),
3668 _("don't commit, just update the working directory")),
3669 ('', 'exact', None,
3669 ('', 'exact', None,
3670 _('apply patch to the nodes from which it was generated')),
3670 _('apply patch to the nodes from which it was generated')),
3671 ('', 'import-branch', None,
3671 ('', 'import-branch', None,
3672 _('use any branch information in patch (implied by --exact)'))] +
3672 _('use any branch information in patch (implied by --exact)'))] +
3673 commitopts + commitopts2 + similarityopts,
3673 commitopts + commitopts2 + similarityopts,
3674 _('[OPTION]... PATCH...')),
3674 _('[OPTION]... PATCH...')),
3675 "incoming|in":
3675 "incoming|in":
3676 (incoming,
3676 (incoming,
3677 [('f', 'force', None,
3677 [('f', 'force', None,
3678 _('run even if remote repository is unrelated')),
3678 _('run even if remote repository is unrelated')),
3679 ('n', 'newest-first', None, _('show newest record first')),
3679 ('n', 'newest-first', None, _('show newest record first')),
3680 ('', 'bundle', '', _('file to store the bundles into')),
3680 ('', 'bundle', '', _('file to store the bundles into')),
3681 ('r', 'rev', [],
3681 ('r', 'rev', [],
3682 _('a remote changeset intended to be added')),
3682 _('a remote changeset intended to be added')),
3683 ('b', 'branch', [],
3683 ('b', 'branch', [],
3684 _('a specific branch you would like to pull')),
3684 _('a specific branch you would like to pull')),
3685 ] + logopts + remoteopts,
3685 ] + logopts + remoteopts,
3686 _('[-p] [-n] [-M] [-f] [-r REV]...'
3686 _('[-p] [-n] [-M] [-f] [-r REV]...'
3687 ' [--bundle FILENAME] [SOURCE]')),
3687 ' [--bundle FILENAME] [SOURCE]')),
3688 "^init":
3688 "^init":
3689 (init,
3689 (init,
3690 remoteopts,
3690 remoteopts,
3691 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3691 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3692 "locate":
3692 "locate":
3693 (locate,
3693 (locate,
3694 [('r', 'rev', '', _('search the repository as it is in REV')),
3694 [('r', 'rev', '', _('search the repository as it is in REV')),
3695 ('0', 'print0', None,
3695 ('0', 'print0', None,
3696 _('end filenames with NUL, for use with xargs')),
3696 _('end filenames with NUL, for use with xargs')),
3697 ('f', 'fullpath', None,
3697 ('f', 'fullpath', None,
3698 _('print complete paths from the filesystem root')),
3698 _('print complete paths from the filesystem root')),
3699 ] + walkopts,
3699 ] + walkopts,
3700 _('[OPTION]... [PATTERN]...')),
3700 _('[OPTION]... [PATTERN]...')),
3701 "^log|history":
3701 "^log|history":
3702 (log,
3702 (log,
3703 [('f', 'follow', None,
3703 [('f', 'follow', None,
3704 _('follow changeset history,'
3704 _('follow changeset history,'
3705 ' or file history across copies and renames')),
3705 ' or file history across copies and renames')),
3706 ('', 'follow-first', None,
3706 ('', 'follow-first', None,
3707 _('only follow the first parent of merge changesets')),
3707 _('only follow the first parent of merge changesets')),
3708 ('d', 'date', '', _('show revisions matching date spec')),
3708 ('d', 'date', '', _('show revisions matching date spec')),
3709 ('C', 'copies', None, _('show copied files')),
3709 ('C', 'copies', None, _('show copied files')),
3710 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3710 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3711 ('r', 'rev', [], _('show the specified revision or range')),
3711 ('r', 'rev', [], _('show the specified revision or range')),
3712 ('', 'removed', None, _('include revisions where files were removed')),
3712 ('', 'removed', None, _('include revisions where files were removed')),
3713 ('m', 'only-merges', None, _('show only merges')),
3713 ('m', 'only-merges', None, _('show only merges')),
3714 ('u', 'user', [], _('revisions committed by user')),
3714 ('u', 'user', [], _('revisions committed by user')),
3715 ('b', 'only-branch', [],
3715 ('b', 'only-branch', [],
3716 _('show only changesets within the given named branch')),
3716 _('show only changesets within the given named branch')),
3717 ('P', 'prune', [],
3717 ('P', 'prune', [],
3718 _('do not display revision or any of its ancestors')),
3718 _('do not display revision or any of its ancestors')),
3719 ] + logopts + walkopts,
3719 ] + logopts + walkopts,
3720 _('[OPTION]... [FILE]')),
3720 _('[OPTION]... [FILE]')),
3721 "manifest":
3721 "manifest":
3722 (manifest,
3722 (manifest,
3723 [('r', 'rev', '', _('revision to display'))],
3723 [('r', 'rev', '', _('revision to display'))],
3724 _('[-r REV]')),
3724 _('[-r REV]')),
3725 "^merge":
3725 "^merge":
3726 (merge,
3726 (merge,
3727 [('f', 'force', None, _('force a merge with outstanding changes')),
3727 [('f', 'force', None, _('force a merge with outstanding changes')),
3728 ('r', 'rev', '', _('revision to merge')),
3728 ('r', 'rev', '', _('revision to merge')),
3729 ('P', 'preview', None,
3729 ('P', 'preview', None,
3730 _('review revisions to merge (no merge is performed)'))],
3730 _('review revisions to merge (no merge is performed)'))],
3731 _('[-P] [-f] [[-r] REV]')),
3731 _('[-P] [-f] [[-r] REV]')),
3732 "outgoing|out":
3732 "outgoing|out":
3733 (outgoing,
3733 (outgoing,
3734 [('f', 'force', None,
3734 [('f', 'force', None,
3735 _('run even when the destination is unrelated')),
3735 _('run even when the destination is unrelated')),
3736 ('r', 'rev', [],
3736 ('r', 'rev', [],
3737 _('a changeset intended to be included in the destination')),
3737 _('a changeset intended to be included in the destination')),
3738 ('n', 'newest-first', None, _('show newest record first')),
3738 ('n', 'newest-first', None, _('show newest record first')),
3739 ('b', 'branch', [],
3739 ('b', 'branch', [],
3740 _('a specific branch you would like to push')),
3740 _('a specific branch you would like to push')),
3741 ] + logopts + remoteopts,
3741 ] + logopts + remoteopts,
3742 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3742 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3743 "parents":
3743 "parents":
3744 (parents,
3744 (parents,
3745 [('r', 'rev', '', _('show parents of the specified revision')),
3745 [('r', 'rev', '', _('show parents of the specified revision')),
3746 ] + templateopts,
3746 ] + templateopts,
3747 _('[-r REV] [FILE]')),
3747 _('[-r REV] [FILE]')),
3748 "paths": (paths, [], _('[NAME]')),
3748 "paths": (paths, [], _('[NAME]')),
3749 "^pull":
3749 "^pull":
3750 (pull,
3750 (pull,
3751 [('u', 'update', None,
3751 [('u', 'update', None,
3752 _('update to new branch head if changesets were pulled')),
3752 _('update to new branch head if changesets were pulled')),
3753 ('f', 'force', None,
3753 ('f', 'force', None,
3754 _('run even when remote repository is unrelated')),
3754 _('run even when remote repository is unrelated')),
3755 ('r', 'rev', [],
3755 ('r', 'rev', [],
3756 _('a remote changeset intended to be added')),
3756 _('a remote changeset intended to be added')),
3757 ('b', 'branch', [],
3757 ('b', 'branch', [],
3758 _('a specific branch you would like to pull')),
3758 _('a specific branch you would like to pull')),
3759 ] + remoteopts,
3759 ] + remoteopts,
3760 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3760 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3761 "^push":
3761 "^push":
3762 (push,
3762 (push,
3763 [('f', 'force', None, _('force push')),
3763 [('f', 'force', None, _('force push')),
3764 ('r', 'rev', [],
3764 ('r', 'rev', [],
3765 _('a changeset intended to be included in the destination')),
3765 _('a changeset intended to be included in the destination')),
3766 ('b', 'branch', [],
3766 ('b', 'branch', [],
3767 _('a specific branch you would like to push')),
3767 _('a specific branch you would like to push')),
3768 ] + remoteopts,
3768 ] + remoteopts,
3769 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3769 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3770 "recover": (recover, []),
3770 "recover": (recover, []),
3771 "^remove|rm":
3771 "^remove|rm":
3772 (remove,
3772 (remove,
3773 [('A', 'after', None, _('record delete for missing files')),
3773 [('A', 'after', None, _('record delete for missing files')),
3774 ('f', 'force', None,
3774 ('f', 'force', None,
3775 _('remove (and delete) file even if added or modified')),
3775 _('remove (and delete) file even if added or modified')),
3776 ] + walkopts,
3776 ] + walkopts,
3777 _('[OPTION]... FILE...')),
3777 _('[OPTION]... FILE...')),
3778 "rename|mv":
3778 "rename|mv":
3779 (rename,
3779 (rename,
3780 [('A', 'after', None, _('record a rename that has already occurred')),
3780 [('A', 'after', None, _('record a rename that has already occurred')),
3781 ('f', 'force', None,
3781 ('f', 'force', None,
3782 _('forcibly copy over an existing managed file')),
3782 _('forcibly copy over an existing managed file')),
3783 ] + walkopts + dryrunopts,
3783 ] + walkopts + dryrunopts,
3784 _('[OPTION]... SOURCE... DEST')),
3784 _('[OPTION]... SOURCE... DEST')),
3785 "resolve":
3785 "resolve":
3786 (resolve,
3786 (resolve,
3787 [('a', 'all', None, _('select all unresolved files')),
3787 [('a', 'all', None, _('select all unresolved files')),
3788 ('l', 'list', None, _('list state of files needing merge')),
3788 ('l', 'list', None, _('list state of files needing merge')),
3789 ('m', 'mark', None, _('mark files as resolved')),
3789 ('m', 'mark', None, _('mark files as resolved')),
3790 ('u', 'unmark', None, _('unmark files as resolved')),
3790 ('u', 'unmark', None, _('unmark files as resolved')),
3791 ('n', 'no-status', None, _('hide status prefix'))]
3791 ('n', 'no-status', None, _('hide status prefix'))]
3792 + walkopts,
3792 + walkopts,
3793 _('[OPTION]... [FILE]...')),
3793 _('[OPTION]... [FILE]...')),
3794 "revert":
3794 "revert":
3795 (revert,
3795 (revert,
3796 [('a', 'all', None, _('revert all changes when no arguments given')),
3796 [('a', 'all', None, _('revert all changes when no arguments given')),
3797 ('d', 'date', '', _('tipmost revision matching date')),
3797 ('d', 'date', '', _('tipmost revision matching date')),
3798 ('r', 'rev', '', _('revert to the specified revision')),
3798 ('r', 'rev', '', _('revert to the specified revision')),
3799 ('', 'no-backup', None, _('do not save backup copies of files')),
3799 ('', 'no-backup', None, _('do not save backup copies of files')),
3800 ] + walkopts + dryrunopts,
3800 ] + walkopts + dryrunopts,
3801 _('[OPTION]... [-r REV] [NAME]...')),
3801 _('[OPTION]... [-r REV] [NAME]...')),
3802 "rollback": (rollback, []),
3802 "rollback": (rollback, []),
3803 "root": (root, []),
3803 "root": (root, []),
3804 "^serve":
3804 "^serve":
3805 (serve,
3805 (serve,
3806 [('A', 'accesslog', '', _('name of access log file to write to')),
3806 [('A', 'accesslog', '', _('name of access log file to write to')),
3807 ('d', 'daemon', None, _('run server in background')),
3807 ('d', 'daemon', None, _('run server in background')),
3808 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3808 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3809 ('E', 'errorlog', '', _('name of error log file to write to')),
3809 ('E', 'errorlog', '', _('name of error log file to write to')),
3810 # use string type, then we can check if something was passed
3810 # use string type, then we can check if something was passed
3811 ('p', 'port', '', _('port to listen on (default: 8000')),
3811 ('p', 'port', '', _('port to listen on (default: 8000')),
3812 ('a', 'address', '',
3812 ('a', 'address', '',
3813 _('address to listen on (default: all interfaces)')),
3813 _('address to listen on (default: all interfaces)')),
3814 ('', 'prefix', '',
3814 ('', 'prefix', '',
3815 _('prefix path to serve from (default: server root)')),
3815 _('prefix path to serve from (default: server root)')),
3816 ('n', 'name', '',
3816 ('n', 'name', '',
3817 _('name to show in web pages (default: working directory)')),
3817 _('name to show in web pages (default: working directory)')),
3818 ('', 'webdir-conf', '', _('name of the webdir config file'
3818 ('', 'webdir-conf', '', _('name of the webdir config file'
3819 ' (serve more than one repository)')),
3819 ' (serve more than one repository)')),
3820 ('', 'pid-file', '', _('name of file to write process ID to')),
3820 ('', 'pid-file', '', _('name of file to write process ID to')),
3821 ('', 'stdio', None, _('for remote clients')),
3821 ('', 'stdio', None, _('for remote clients')),
3822 ('t', 'templates', '', _('web templates to use')),
3822 ('t', 'templates', '', _('web templates to use')),
3823 ('', 'style', '', _('template style to use')),
3823 ('', 'style', '', _('template style to use')),
3824 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3824 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3825 ('', 'certificate', '', _('SSL certificate file'))],
3825 ('', 'certificate', '', _('SSL certificate file'))],
3826 _('[OPTION]...')),
3826 _('[OPTION]...')),
3827 "showconfig|debugconfig":
3827 "showconfig|debugconfig":
3828 (showconfig,
3828 (showconfig,
3829 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3829 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3830 _('[-u] [NAME]...')),
3830 _('[-u] [NAME]...')),
3831 "^summary|sum":
3831 "^summary|sum":
3832 (summary,
3832 (summary,
3833 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
3833 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
3834 "^status|st":
3834 "^status|st":
3835 (status,
3835 (status,
3836 [('A', 'all', None, _('show status of all files')),
3836 [('A', 'all', None, _('show status of all files')),
3837 ('m', 'modified', None, _('show only modified files')),
3837 ('m', 'modified', None, _('show only modified files')),
3838 ('a', 'added', None, _('show only added files')),
3838 ('a', 'added', None, _('show only added files')),
3839 ('r', 'removed', None, _('show only removed files')),
3839 ('r', 'removed', None, _('show only removed files')),
3840 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3840 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3841 ('c', 'clean', None, _('show only files without changes')),
3841 ('c', 'clean', None, _('show only files without changes')),
3842 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3842 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3843 ('i', 'ignored', None, _('show only ignored files')),
3843 ('i', 'ignored', None, _('show only ignored files')),
3844 ('n', 'no-status', None, _('hide status prefix')),
3844 ('n', 'no-status', None, _('hide status prefix')),
3845 ('C', 'copies', None, _('show source of copied files')),
3845 ('C', 'copies', None, _('show source of copied files')),
3846 ('0', 'print0', None,
3846 ('0', 'print0', None,
3847 _('end filenames with NUL, for use with xargs')),
3847 _('end filenames with NUL, for use with xargs')),
3848 ('', 'rev', [], _('show difference from revision')),
3848 ('', 'rev', [], _('show difference from revision')),
3849 ('', 'change', '', _('list the changed files of a revision')),
3849 ('', 'change', '', _('list the changed files of a revision')),
3850 ] + walkopts,
3850 ] + walkopts,
3851 _('[OPTION]... [FILE]...')),
3851 _('[OPTION]... [FILE]...')),
3852 "tag":
3852 "tag":
3853 (tag,
3853 (tag,
3854 [('f', 'force', None, _('replace existing tag')),
3854 [('f', 'force', None, _('replace existing tag')),
3855 ('l', 'local', None, _('make the tag local')),
3855 ('l', 'local', None, _('make the tag local')),
3856 ('r', 'rev', '', _('revision to tag')),
3856 ('r', 'rev', '', _('revision to tag')),
3857 ('', 'remove', None, _('remove a tag')),
3857 ('', 'remove', None, _('remove a tag')),
3858 # -l/--local is already there, commitopts cannot be used
3858 # -l/--local is already there, commitopts cannot be used
3859 ('m', 'message', '', _('use <text> as commit message')),
3859 ('m', 'message', '', _('use <text> as commit message')),
3860 ] + commitopts2,
3860 ] + commitopts2,
3861 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3861 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3862 "tags": (tags, [], ''),
3862 "tags": (tags, [], ''),
3863 "tip":
3863 "tip":
3864 (tip,
3864 (tip,
3865 [('p', 'patch', None, _('show patch')),
3865 [('p', 'patch', None, _('show patch')),
3866 ('g', 'git', None, _('use git extended diff format')),
3866 ('g', 'git', None, _('use git extended diff format')),
3867 ] + templateopts,
3867 ] + templateopts,
3868 _('[-p] [-g]')),
3868 _('[-p] [-g]')),
3869 "unbundle":
3869 "unbundle":
3870 (unbundle,
3870 (unbundle,
3871 [('u', 'update', None,
3871 [('u', 'update', None,
3872 _('update to new branch head if changesets were unbundled'))],
3872 _('update to new branch head if changesets were unbundled'))],
3873 _('[-u] FILE...')),
3873 _('[-u] FILE...')),
3874 "^update|up|checkout|co":
3874 "^update|up|checkout|co":
3875 (update,
3875 (update,
3876 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
3876 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
3877 ('c', 'check', None, _('check for uncommitted changes')),
3877 ('c', 'check', None, _('check for uncommitted changes')),
3878 ('d', 'date', '', _('tipmost revision matching date')),
3878 ('d', 'date', '', _('tipmost revision matching date')),
3879 ('r', 'rev', '', _('revision'))],
3879 ('r', 'rev', '', _('revision'))],
3880 _('[-c] [-C] [-d DATE] [[-r] REV]')),
3880 _('[-c] [-C] [-d DATE] [[-r] REV]')),
3881 "verify": (verify, []),
3881 "verify": (verify, []),
3882 "version": (version_, []),
3882 "version": (version_, []),
3883 }
3883 }
3884
3884
3885 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3885 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3886 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3886 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3887 optionalrepo = ("identify paths serve showconfig debugancestor")
3887 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,517 +1,517 b''
1 # dispatch.py - command dispatching for mercurial
1 # dispatch.py - command dispatching for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time
9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time
10 import util, commands, hg, fancyopts, extensions, hook, error
10 import util, commands, hg, fancyopts, extensions, hook, error
11 import cmdutil, encoding
11 import cmdutil, encoding
12 import ui as _ui
12 import ui as uimod
13
13
14 def run():
14 def run():
15 "run the command in sys.argv"
15 "run the command in sys.argv"
16 sys.exit(dispatch(sys.argv[1:]))
16 sys.exit(dispatch(sys.argv[1:]))
17
17
18 def dispatch(args):
18 def dispatch(args):
19 "run the command specified in args"
19 "run the command specified in args"
20 try:
20 try:
21 u = _ui.ui()
21 u = uimod.ui()
22 if '--traceback' in args:
22 if '--traceback' in args:
23 u.setconfig('ui', 'traceback', 'on')
23 u.setconfig('ui', 'traceback', 'on')
24 except util.Abort, inst:
24 except util.Abort, inst:
25 sys.stderr.write(_("abort: %s\n") % inst)
25 sys.stderr.write(_("abort: %s\n") % inst)
26 return -1
26 return -1
27 except error.ConfigError, inst:
27 except error.ConfigError, inst:
28 sys.stderr.write(_("hg: %s\n") % inst)
28 sys.stderr.write(_("hg: %s\n") % inst)
29 return -1
29 return -1
30 return _runcatch(u, args)
30 return _runcatch(u, args)
31
31
32 def _runcatch(ui, args):
32 def _runcatch(ui, args):
33 def catchterm(*args):
33 def catchterm(*args):
34 raise error.SignalInterrupt
34 raise error.SignalInterrupt
35
35
36 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
36 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
37 num = getattr(signal, name, None)
37 num = getattr(signal, name, None)
38 if num:
38 if num:
39 signal.signal(num, catchterm)
39 signal.signal(num, catchterm)
40
40
41 try:
41 try:
42 try:
42 try:
43 # enter the debugger before command execution
43 # enter the debugger before command execution
44 if '--debugger' in args:
44 if '--debugger' in args:
45 pdb.set_trace()
45 pdb.set_trace()
46 try:
46 try:
47 return _dispatch(ui, args)
47 return _dispatch(ui, args)
48 finally:
48 finally:
49 ui.flush()
49 ui.flush()
50 except:
50 except:
51 # enter the debugger when we hit an exception
51 # enter the debugger when we hit an exception
52 if '--debugger' in args:
52 if '--debugger' in args:
53 pdb.post_mortem(sys.exc_info()[2])
53 pdb.post_mortem(sys.exc_info()[2])
54 ui.traceback()
54 ui.traceback()
55 raise
55 raise
56
56
57 # Global exception handling, alphabetically
57 # Global exception handling, alphabetically
58 # Mercurial-specific first, followed by built-in and library exceptions
58 # Mercurial-specific first, followed by built-in and library exceptions
59 except error.AmbiguousCommand, inst:
59 except error.AmbiguousCommand, inst:
60 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
60 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
61 (inst.args[0], " ".join(inst.args[1])))
61 (inst.args[0], " ".join(inst.args[1])))
62 except error.ConfigError, inst:
62 except error.ConfigError, inst:
63 ui.warn(_("hg: %s\n") % inst.args[0])
63 ui.warn(_("hg: %s\n") % inst.args[0])
64 except error.LockHeld, inst:
64 except error.LockHeld, inst:
65 if inst.errno == errno.ETIMEDOUT:
65 if inst.errno == errno.ETIMEDOUT:
66 reason = _('timed out waiting for lock held by %s') % inst.locker
66 reason = _('timed out waiting for lock held by %s') % inst.locker
67 else:
67 else:
68 reason = _('lock held by %s') % inst.locker
68 reason = _('lock held by %s') % inst.locker
69 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
69 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
70 except error.LockUnavailable, inst:
70 except error.LockUnavailable, inst:
71 ui.warn(_("abort: could not lock %s: %s\n") %
71 ui.warn(_("abort: could not lock %s: %s\n") %
72 (inst.desc or inst.filename, inst.strerror))
72 (inst.desc or inst.filename, inst.strerror))
73 except error.ParseError, inst:
73 except error.ParseError, inst:
74 if inst.args[0]:
74 if inst.args[0]:
75 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
75 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
76 commands.help_(ui, inst.args[0])
76 commands.help_(ui, inst.args[0])
77 else:
77 else:
78 ui.warn(_("hg: %s\n") % inst.args[1])
78 ui.warn(_("hg: %s\n") % inst.args[1])
79 commands.help_(ui, 'shortlist')
79 commands.help_(ui, 'shortlist')
80 except error.RepoError, inst:
80 except error.RepoError, inst:
81 ui.warn(_("abort: %s!\n") % inst)
81 ui.warn(_("abort: %s!\n") % inst)
82 except error.ResponseError, inst:
82 except error.ResponseError, inst:
83 ui.warn(_("abort: %s") % inst.args[0])
83 ui.warn(_("abort: %s") % inst.args[0])
84 if not isinstance(inst.args[1], basestring):
84 if not isinstance(inst.args[1], basestring):
85 ui.warn(" %r\n" % (inst.args[1],))
85 ui.warn(" %r\n" % (inst.args[1],))
86 elif not inst.args[1]:
86 elif not inst.args[1]:
87 ui.warn(_(" empty string\n"))
87 ui.warn(_(" empty string\n"))
88 else:
88 else:
89 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
89 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
90 except error.RevlogError, inst:
90 except error.RevlogError, inst:
91 ui.warn(_("abort: %s!\n") % inst)
91 ui.warn(_("abort: %s!\n") % inst)
92 except error.SignalInterrupt:
92 except error.SignalInterrupt:
93 ui.warn(_("killed!\n"))
93 ui.warn(_("killed!\n"))
94 except error.UnknownCommand, inst:
94 except error.UnknownCommand, inst:
95 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
95 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
96 try:
96 try:
97 # check if the command is in a disabled extension
97 # check if the command is in a disabled extension
98 # (but don't check for extensions themselves)
98 # (but don't check for extensions themselves)
99 commands.help_(ui, inst.args[0], unknowncmd=True)
99 commands.help_(ui, inst.args[0], unknowncmd=True)
100 except error.UnknownCommand:
100 except error.UnknownCommand:
101 commands.help_(ui, 'shortlist')
101 commands.help_(ui, 'shortlist')
102 except util.Abort, inst:
102 except util.Abort, inst:
103 ui.warn(_("abort: %s\n") % inst)
103 ui.warn(_("abort: %s\n") % inst)
104 except ImportError, inst:
104 except ImportError, inst:
105 m = str(inst).split()[-1]
105 m = str(inst).split()[-1]
106 ui.warn(_("abort: could not import module %s!\n") % m)
106 ui.warn(_("abort: could not import module %s!\n") % m)
107 if m in "mpatch bdiff".split():
107 if m in "mpatch bdiff".split():
108 ui.warn(_("(did you forget to compile extensions?)\n"))
108 ui.warn(_("(did you forget to compile extensions?)\n"))
109 elif m in "zlib".split():
109 elif m in "zlib".split():
110 ui.warn(_("(is your Python install correct?)\n"))
110 ui.warn(_("(is your Python install correct?)\n"))
111 except IOError, inst:
111 except IOError, inst:
112 if hasattr(inst, "code"):
112 if hasattr(inst, "code"):
113 ui.warn(_("abort: %s\n") % inst)
113 ui.warn(_("abort: %s\n") % inst)
114 elif hasattr(inst, "reason"):
114 elif hasattr(inst, "reason"):
115 try: # usually it is in the form (errno, strerror)
115 try: # usually it is in the form (errno, strerror)
116 reason = inst.reason.args[1]
116 reason = inst.reason.args[1]
117 except: # it might be anything, for example a string
117 except: # it might be anything, for example a string
118 reason = inst.reason
118 reason = inst.reason
119 ui.warn(_("abort: error: %s\n") % reason)
119 ui.warn(_("abort: error: %s\n") % reason)
120 elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
120 elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
121 if ui.debugflag:
121 if ui.debugflag:
122 ui.warn(_("broken pipe\n"))
122 ui.warn(_("broken pipe\n"))
123 elif getattr(inst, "strerror", None):
123 elif getattr(inst, "strerror", None):
124 if getattr(inst, "filename", None):
124 if getattr(inst, "filename", None):
125 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
125 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
126 else:
126 else:
127 ui.warn(_("abort: %s\n") % inst.strerror)
127 ui.warn(_("abort: %s\n") % inst.strerror)
128 else:
128 else:
129 raise
129 raise
130 except OSError, inst:
130 except OSError, inst:
131 if getattr(inst, "filename", None):
131 if getattr(inst, "filename", None):
132 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
132 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
133 else:
133 else:
134 ui.warn(_("abort: %s\n") % inst.strerror)
134 ui.warn(_("abort: %s\n") % inst.strerror)
135 except KeyboardInterrupt:
135 except KeyboardInterrupt:
136 try:
136 try:
137 ui.warn(_("interrupted!\n"))
137 ui.warn(_("interrupted!\n"))
138 except IOError, inst:
138 except IOError, inst:
139 if inst.errno == errno.EPIPE:
139 if inst.errno == errno.EPIPE:
140 if ui.debugflag:
140 if ui.debugflag:
141 ui.warn(_("\nbroken pipe\n"))
141 ui.warn(_("\nbroken pipe\n"))
142 else:
142 else:
143 raise
143 raise
144 except MemoryError:
144 except MemoryError:
145 ui.warn(_("abort: out of memory\n"))
145 ui.warn(_("abort: out of memory\n"))
146 except SystemExit, inst:
146 except SystemExit, inst:
147 # Commands shouldn't sys.exit directly, but give a return code.
147 # Commands shouldn't sys.exit directly, but give a return code.
148 # Just in case catch this and and pass exit code to caller.
148 # Just in case catch this and and pass exit code to caller.
149 return inst.code
149 return inst.code
150 except socket.error, inst:
150 except socket.error, inst:
151 ui.warn(_("abort: %s\n") % inst.args[-1])
151 ui.warn(_("abort: %s\n") % inst.args[-1])
152 except:
152 except:
153 ui.warn(_("** unknown exception encountered, details follow\n"))
153 ui.warn(_("** unknown exception encountered, details follow\n"))
154 ui.warn(_("** report bug details to "
154 ui.warn(_("** report bug details to "
155 "http://mercurial.selenic.com/bts/\n"))
155 "http://mercurial.selenic.com/bts/\n"))
156 ui.warn(_("** or mercurial@selenic.com\n"))
156 ui.warn(_("** or mercurial@selenic.com\n"))
157 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
157 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
158 % util.version())
158 % util.version())
159 ui.warn(_("** Extensions loaded: %s\n")
159 ui.warn(_("** Extensions loaded: %s\n")
160 % ", ".join([x[0] for x in extensions.extensions()]))
160 % ", ".join([x[0] for x in extensions.extensions()]))
161 raise
161 raise
162
162
163 return -1
163 return -1
164
164
165 def aliasargs(fn):
165 def aliasargs(fn):
166 if hasattr(fn, 'args'):
166 if hasattr(fn, 'args'):
167 return fn.args
167 return fn.args
168 return []
168 return []
169
169
170 class cmdalias(object):
170 class cmdalias(object):
171 def __init__(self, name, definition, cmdtable):
171 def __init__(self, name, definition, cmdtable):
172 self.name = name
172 self.name = name
173 self.definition = definition
173 self.definition = definition
174 self.args = []
174 self.args = []
175 self.opts = []
175 self.opts = []
176 self.help = ''
176 self.help = ''
177 self.norepo = True
177 self.norepo = True
178 self.badalias = False
178 self.badalias = False
179
179
180 try:
180 try:
181 cmdutil.findcmd(self.name, cmdtable, True)
181 cmdutil.findcmd(self.name, cmdtable, True)
182 self.shadows = True
182 self.shadows = True
183 except error.UnknownCommand:
183 except error.UnknownCommand:
184 self.shadows = False
184 self.shadows = False
185
185
186 if not self.definition:
186 if not self.definition:
187 def fn(ui, *args):
187 def fn(ui, *args):
188 ui.warn(_("no definition for alias '%s'\n") % self.name)
188 ui.warn(_("no definition for alias '%s'\n") % self.name)
189 return 1
189 return 1
190 self.fn = fn
190 self.fn = fn
191 self.badalias = True
191 self.badalias = True
192
192
193 return
193 return
194
194
195 args = shlex.split(self.definition)
195 args = shlex.split(self.definition)
196 cmd = args.pop(0)
196 cmd = args.pop(0)
197
197
198 try:
198 try:
199 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
199 tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
200 if len(tableentry) > 2:
200 if len(tableentry) > 2:
201 self.fn, self.opts, self.help = tableentry
201 self.fn, self.opts, self.help = tableentry
202 else:
202 else:
203 self.fn, self.opts = tableentry
203 self.fn, self.opts = tableentry
204
204
205 self.args = aliasargs(self.fn) + args
205 self.args = aliasargs(self.fn) + args
206 if cmd not in commands.norepo.split(' '):
206 if cmd not in commands.norepo.split(' '):
207 self.norepo = False
207 self.norepo = False
208 if self.help.startswith("hg " + cmd):
208 if self.help.startswith("hg " + cmd):
209 # drop prefix in old-style help lines so hg shows the alias
209 # drop prefix in old-style help lines so hg shows the alias
210 self.help = self.help[4 + len(cmd):]
210 self.help = self.help[4 + len(cmd):]
211 self.__doc__ = self.fn.__doc__
211 self.__doc__ = self.fn.__doc__
212
212
213 except error.UnknownCommand:
213 except error.UnknownCommand:
214 def fn(ui, *args):
214 def fn(ui, *args):
215 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
215 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
216 % (self.name, cmd))
216 % (self.name, cmd))
217 try:
217 try:
218 # check if the command is in a disabled extension
218 # check if the command is in a disabled extension
219 commands.help_(ui, cmd, unknowncmd=True)
219 commands.help_(ui, cmd, unknowncmd=True)
220 except error.UnknownCommand:
220 except error.UnknownCommand:
221 pass
221 pass
222 return 1
222 return 1
223 self.fn = fn
223 self.fn = fn
224 self.badalias = True
224 self.badalias = True
225 except error.AmbiguousCommand:
225 except error.AmbiguousCommand:
226 def fn(ui, *args):
226 def fn(ui, *args):
227 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
227 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
228 % (self.name, cmd))
228 % (self.name, cmd))
229 return 1
229 return 1
230 self.fn = fn
230 self.fn = fn
231 self.badalias = True
231 self.badalias = True
232
232
233 def __call__(self, ui, *args, **opts):
233 def __call__(self, ui, *args, **opts):
234 if self.shadows:
234 if self.shadows:
235 ui.debug("alias '%s' shadows command\n" % self.name)
235 ui.debug("alias '%s' shadows command\n" % self.name)
236
236
237 return self.fn(ui, *args, **opts)
237 return self.fn(ui, *args, **opts)
238
238
239 def addaliases(ui, cmdtable):
239 def addaliases(ui, cmdtable):
240 # aliases are processed after extensions have been loaded, so they
240 # aliases are processed after extensions have been loaded, so they
241 # may use extension commands. Aliases can also use other alias definitions,
241 # may use extension commands. Aliases can also use other alias definitions,
242 # but only if they have been defined prior to the current definition.
242 # but only if they have been defined prior to the current definition.
243 for alias, definition in ui.configitems('alias'):
243 for alias, definition in ui.configitems('alias'):
244 aliasdef = cmdalias(alias, definition, cmdtable)
244 aliasdef = cmdalias(alias, definition, cmdtable)
245 cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help)
245 cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help)
246 if aliasdef.norepo:
246 if aliasdef.norepo:
247 commands.norepo += ' %s' % alias
247 commands.norepo += ' %s' % alias
248
248
249 def _parse(ui, args):
249 def _parse(ui, args):
250 options = {}
250 options = {}
251 cmdoptions = {}
251 cmdoptions = {}
252
252
253 try:
253 try:
254 args = fancyopts.fancyopts(args, commands.globalopts, options)
254 args = fancyopts.fancyopts(args, commands.globalopts, options)
255 except fancyopts.getopt.GetoptError, inst:
255 except fancyopts.getopt.GetoptError, inst:
256 raise error.ParseError(None, inst)
256 raise error.ParseError(None, inst)
257
257
258 if args:
258 if args:
259 cmd, args = args[0], args[1:]
259 cmd, args = args[0], args[1:]
260 aliases, entry = cmdutil.findcmd(cmd, commands.table,
260 aliases, entry = cmdutil.findcmd(cmd, commands.table,
261 ui.config("ui", "strict"))
261 ui.config("ui", "strict"))
262 cmd = aliases[0]
262 cmd = aliases[0]
263 args = aliasargs(entry[0]) + args
263 args = aliasargs(entry[0]) + args
264 defaults = ui.config("defaults", cmd)
264 defaults = ui.config("defaults", cmd)
265 if defaults:
265 if defaults:
266 args = map(util.expandpath, shlex.split(defaults)) + args
266 args = map(util.expandpath, shlex.split(defaults)) + args
267 c = list(entry[1])
267 c = list(entry[1])
268 else:
268 else:
269 cmd = None
269 cmd = None
270 c = []
270 c = []
271
271
272 # combine global options into local
272 # combine global options into local
273 for o in commands.globalopts:
273 for o in commands.globalopts:
274 c.append((o[0], o[1], options[o[1]], o[3]))
274 c.append((o[0], o[1], options[o[1]], o[3]))
275
275
276 try:
276 try:
277 args = fancyopts.fancyopts(args, c, cmdoptions, True)
277 args = fancyopts.fancyopts(args, c, cmdoptions, True)
278 except fancyopts.getopt.GetoptError, inst:
278 except fancyopts.getopt.GetoptError, inst:
279 raise error.ParseError(cmd, inst)
279 raise error.ParseError(cmd, inst)
280
280
281 # separate global options back out
281 # separate global options back out
282 for o in commands.globalopts:
282 for o in commands.globalopts:
283 n = o[1]
283 n = o[1]
284 options[n] = cmdoptions[n]
284 options[n] = cmdoptions[n]
285 del cmdoptions[n]
285 del cmdoptions[n]
286
286
287 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
287 return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
288
288
289 def _parseconfig(ui, config):
289 def _parseconfig(ui, config):
290 """parse the --config options from the command line"""
290 """parse the --config options from the command line"""
291 for cfg in config:
291 for cfg in config:
292 try:
292 try:
293 name, value = cfg.split('=', 1)
293 name, value = cfg.split('=', 1)
294 section, name = name.split('.', 1)
294 section, name = name.split('.', 1)
295 if not section or not name:
295 if not section or not name:
296 raise IndexError
296 raise IndexError
297 ui.setconfig(section, name, value)
297 ui.setconfig(section, name, value)
298 except (IndexError, ValueError):
298 except (IndexError, ValueError):
299 raise util.Abort(_('malformed --config option: %r '
299 raise util.Abort(_('malformed --config option: %r '
300 '(use --config section.name=value)') % cfg)
300 '(use --config section.name=value)') % cfg)
301
301
302 def _earlygetopt(aliases, args):
302 def _earlygetopt(aliases, args):
303 """Return list of values for an option (or aliases).
303 """Return list of values for an option (or aliases).
304
304
305 The values are listed in the order they appear in args.
305 The values are listed in the order they appear in args.
306 The options and values are removed from args.
306 The options and values are removed from args.
307 """
307 """
308 try:
308 try:
309 argcount = args.index("--")
309 argcount = args.index("--")
310 except ValueError:
310 except ValueError:
311 argcount = len(args)
311 argcount = len(args)
312 shortopts = [opt for opt in aliases if len(opt) == 2]
312 shortopts = [opt for opt in aliases if len(opt) == 2]
313 values = []
313 values = []
314 pos = 0
314 pos = 0
315 while pos < argcount:
315 while pos < argcount:
316 if args[pos] in aliases:
316 if args[pos] in aliases:
317 if pos + 1 >= argcount:
317 if pos + 1 >= argcount:
318 # ignore and let getopt report an error if there is no value
318 # ignore and let getopt report an error if there is no value
319 break
319 break
320 del args[pos]
320 del args[pos]
321 values.append(args.pop(pos))
321 values.append(args.pop(pos))
322 argcount -= 2
322 argcount -= 2
323 elif args[pos][:2] in shortopts:
323 elif args[pos][:2] in shortopts:
324 # short option can have no following space, e.g. hg log -Rfoo
324 # short option can have no following space, e.g. hg log -Rfoo
325 values.append(args.pop(pos)[2:])
325 values.append(args.pop(pos)[2:])
326 argcount -= 1
326 argcount -= 1
327 else:
327 else:
328 pos += 1
328 pos += 1
329 return values
329 return values
330
330
331 def runcommand(lui, repo, cmd, fullargs, ui, options, d):
331 def runcommand(lui, repo, cmd, fullargs, ui, options, d):
332 # run pre-hook, and abort if it fails
332 # run pre-hook, and abort if it fails
333 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
333 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
334 if ret:
334 if ret:
335 return ret
335 return ret
336 ret = _runcommand(ui, options, cmd, d)
336 ret = _runcommand(ui, options, cmd, d)
337 # run post-hook, passing command result
337 # run post-hook, passing command result
338 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
338 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
339 result = ret)
339 result = ret)
340 return ret
340 return ret
341
341
342 _loaded = set()
342 _loaded = set()
343 def _dispatch(ui, args):
343 def _dispatch(ui, args):
344 # read --config before doing anything else
344 # read --config before doing anything else
345 # (e.g. to change trust settings for reading .hg/hgrc)
345 # (e.g. to change trust settings for reading .hg/hgrc)
346 _parseconfig(ui, _earlygetopt(['--config'], args))
346 _parseconfig(ui, _earlygetopt(['--config'], args))
347
347
348 # check for cwd
348 # check for cwd
349 cwd = _earlygetopt(['--cwd'], args)
349 cwd = _earlygetopt(['--cwd'], args)
350 if cwd:
350 if cwd:
351 os.chdir(cwd[-1])
351 os.chdir(cwd[-1])
352
352
353 # read the local repository .hgrc into a local ui object
353 # read the local repository .hgrc into a local ui object
354 path = cmdutil.findrepo(os.getcwd()) or ""
354 path = cmdutil.findrepo(os.getcwd()) or ""
355 if not path:
355 if not path:
356 lui = ui
356 lui = ui
357 else:
357 else:
358 try:
358 try:
359 lui = ui.copy()
359 lui = ui.copy()
360 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
360 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
361 except IOError:
361 except IOError:
362 pass
362 pass
363
363
364 # now we can expand paths, even ones in .hg/hgrc
364 # now we can expand paths, even ones in .hg/hgrc
365 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
365 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
366 if rpath:
366 if rpath:
367 path = lui.expandpath(rpath[-1])
367 path = lui.expandpath(rpath[-1])
368 lui = ui.copy()
368 lui = ui.copy()
369 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
369 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
370
370
371 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
371 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
372 # reposetup. Programs like TortoiseHg will call _dispatch several
372 # reposetup. Programs like TortoiseHg will call _dispatch several
373 # times so we keep track of configured extensions in _loaded.
373 # times so we keep track of configured extensions in _loaded.
374 extensions.loadall(lui)
374 extensions.loadall(lui)
375 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
375 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
376
376
377 # (uisetup and extsetup are handled in extensions.loadall)
377 # (uisetup and extsetup are handled in extensions.loadall)
378
378
379 for name, module in exts:
379 for name, module in exts:
380 cmdtable = getattr(module, 'cmdtable', {})
380 cmdtable = getattr(module, 'cmdtable', {})
381 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
381 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
382 if overrides:
382 if overrides:
383 ui.warn(_("extension '%s' overrides commands: %s\n")
383 ui.warn(_("extension '%s' overrides commands: %s\n")
384 % (name, " ".join(overrides)))
384 % (name, " ".join(overrides)))
385 commands.table.update(cmdtable)
385 commands.table.update(cmdtable)
386 _loaded.add(name)
386 _loaded.add(name)
387
387
388 # (reposetup is handled in hg.repository)
388 # (reposetup is handled in hg.repository)
389
389
390 addaliases(lui, commands.table)
390 addaliases(lui, commands.table)
391
391
392 # check for fallback encoding
392 # check for fallback encoding
393 fallback = lui.config('ui', 'fallbackencoding')
393 fallback = lui.config('ui', 'fallbackencoding')
394 if fallback:
394 if fallback:
395 encoding.fallbackencoding = fallback
395 encoding.fallbackencoding = fallback
396
396
397 fullargs = args
397 fullargs = args
398 cmd, func, args, options, cmdoptions = _parse(lui, args)
398 cmd, func, args, options, cmdoptions = _parse(lui, args)
399
399
400 if options["config"]:
400 if options["config"]:
401 raise util.Abort(_("Option --config may not be abbreviated!"))
401 raise util.Abort(_("Option --config may not be abbreviated!"))
402 if options["cwd"]:
402 if options["cwd"]:
403 raise util.Abort(_("Option --cwd may not be abbreviated!"))
403 raise util.Abort(_("Option --cwd may not be abbreviated!"))
404 if options["repository"]:
404 if options["repository"]:
405 raise util.Abort(_(
405 raise util.Abort(_(
406 "Option -R has to be separated from other options (e.g. not -qR) "
406 "Option -R has to be separated from other options (e.g. not -qR) "
407 "and --repository may only be abbreviated as --repo!"))
407 "and --repository may only be abbreviated as --repo!"))
408
408
409 if options["encoding"]:
409 if options["encoding"]:
410 encoding.encoding = options["encoding"]
410 encoding.encoding = options["encoding"]
411 if options["encodingmode"]:
411 if options["encodingmode"]:
412 encoding.encodingmode = options["encodingmode"]
412 encoding.encodingmode = options["encodingmode"]
413 if options["time"]:
413 if options["time"]:
414 def get_times():
414 def get_times():
415 t = os.times()
415 t = os.times()
416 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
416 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
417 t = (t[0], t[1], t[2], t[3], time.clock())
417 t = (t[0], t[1], t[2], t[3], time.clock())
418 return t
418 return t
419 s = get_times()
419 s = get_times()
420 def print_time():
420 def print_time():
421 t = get_times()
421 t = get_times()
422 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
422 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
423 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
423 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
424 atexit.register(print_time)
424 atexit.register(print_time)
425
425
426 if options['verbose'] or options['debug'] or options['quiet']:
426 if options['verbose'] or options['debug'] or options['quiet']:
427 ui.setconfig('ui', 'verbose', str(bool(options['verbose'])))
427 ui.setconfig('ui', 'verbose', str(bool(options['verbose'])))
428 ui.setconfig('ui', 'debug', str(bool(options['debug'])))
428 ui.setconfig('ui', 'debug', str(bool(options['debug'])))
429 ui.setconfig('ui', 'quiet', str(bool(options['quiet'])))
429 ui.setconfig('ui', 'quiet', str(bool(options['quiet'])))
430 if options['traceback']:
430 if options['traceback']:
431 ui.setconfig('ui', 'traceback', 'on')
431 ui.setconfig('ui', 'traceback', 'on')
432 if options['noninteractive']:
432 if options['noninteractive']:
433 ui.setconfig('ui', 'interactive', 'off')
433 ui.setconfig('ui', 'interactive', 'off')
434
434
435 if options['help']:
435 if options['help']:
436 return commands.help_(ui, cmd, options['version'])
436 return commands.help_(ui, cmd, options['version'])
437 elif options['version']:
437 elif options['version']:
438 return commands.version_(ui)
438 return commands.version_(ui)
439 elif not cmd:
439 elif not cmd:
440 return commands.help_(ui, 'shortlist')
440 return commands.help_(ui, 'shortlist')
441
441
442 repo = None
442 repo = None
443 if cmd not in commands.norepo.split():
443 if cmd not in commands.norepo.split():
444 try:
444 try:
445 repo = hg.repository(ui, path=path)
445 repo = hg.repository(ui, path=path)
446 ui = repo.ui
446 ui = repo.ui
447 if not repo.local():
447 if not repo.local():
448 raise util.Abort(_("repository '%s' is not local") % path)
448 raise util.Abort(_("repository '%s' is not local") % path)
449 ui.setconfig("bundle", "mainreporoot", repo.root)
449 ui.setconfig("bundle", "mainreporoot", repo.root)
450 except error.RepoError:
450 except error.RepoError:
451 if cmd not in commands.optionalrepo.split():
451 if cmd not in commands.optionalrepo.split():
452 if args and not path: # try to infer -R from command args
452 if args and not path: # try to infer -R from command args
453 repos = map(cmdutil.findrepo, args)
453 repos = map(cmdutil.findrepo, args)
454 guess = repos[0]
454 guess = repos[0]
455 if guess and repos.count(guess) == len(repos):
455 if guess and repos.count(guess) == len(repos):
456 return _dispatch(ui, ['--repository', guess] + fullargs)
456 return _dispatch(ui, ['--repository', guess] + fullargs)
457 if not path:
457 if not path:
458 raise error.RepoError(_("There is no Mercurial repository"
458 raise error.RepoError(_("There is no Mercurial repository"
459 " here (.hg not found)"))
459 " here (.hg not found)"))
460 raise
460 raise
461 args.insert(0, repo)
461 args.insert(0, repo)
462 elif rpath:
462 elif rpath:
463 ui.warn("warning: --repository ignored\n")
463 ui.warn("warning: --repository ignored\n")
464
464
465 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
465 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
466 return runcommand(lui, repo, cmd, fullargs, ui, options, d)
466 return runcommand(lui, repo, cmd, fullargs, ui, options, d)
467
467
468 def _runcommand(ui, options, cmd, cmdfunc):
468 def _runcommand(ui, options, cmd, cmdfunc):
469 def checkargs():
469 def checkargs():
470 try:
470 try:
471 return cmdfunc()
471 return cmdfunc()
472 except error.SignatureError:
472 except error.SignatureError:
473 raise error.ParseError(cmd, _("invalid arguments"))
473 raise error.ParseError(cmd, _("invalid arguments"))
474
474
475 if options['profile']:
475 if options['profile']:
476 format = ui.config('profiling', 'format', default='text')
476 format = ui.config('profiling', 'format', default='text')
477
477
478 if not format in ['text', 'kcachegrind']:
478 if not format in ['text', 'kcachegrind']:
479 ui.warn(_("unrecognized profiling format '%s'"
479 ui.warn(_("unrecognized profiling format '%s'"
480 " - Ignored\n") % format)
480 " - Ignored\n") % format)
481 format = 'text'
481 format = 'text'
482
482
483 output = ui.config('profiling', 'output')
483 output = ui.config('profiling', 'output')
484
484
485 if output:
485 if output:
486 path = ui.expandpath(output)
486 path = ui.expandpath(output)
487 ostream = open(path, 'wb')
487 ostream = open(path, 'wb')
488 else:
488 else:
489 ostream = sys.stderr
489 ostream = sys.stderr
490
490
491 try:
491 try:
492 from mercurial import lsprof
492 from mercurial import lsprof
493 except ImportError:
493 except ImportError:
494 raise util.Abort(_(
494 raise util.Abort(_(
495 'lsprof not available - install from '
495 'lsprof not available - install from '
496 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
496 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
497 p = lsprof.Profiler()
497 p = lsprof.Profiler()
498 p.enable(subcalls=True)
498 p.enable(subcalls=True)
499 try:
499 try:
500 return checkargs()
500 return checkargs()
501 finally:
501 finally:
502 p.disable()
502 p.disable()
503
503
504 if format == 'kcachegrind':
504 if format == 'kcachegrind':
505 import lsprofcalltree
505 import lsprofcalltree
506 calltree = lsprofcalltree.KCacheGrind(p)
506 calltree = lsprofcalltree.KCacheGrind(p)
507 calltree.output(ostream)
507 calltree.output(ostream)
508 else:
508 else:
509 # format == 'text'
509 # format == 'text'
510 stats = lsprof.Stats(p.getstats())
510 stats = lsprof.Stats(p.getstats())
511 stats.sort()
511 stats.sort()
512 stats.pprint(top=10, file=ostream, climit=5)
512 stats.pprint(top=10, file=ostream, climit=5)
513
513
514 if output:
514 if output:
515 ostream.close()
515 ostream.close()
516 else:
516 else:
517 return checkargs()
517 return checkargs()
@@ -1,394 +1,394 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from i18n import _
9 from i18n import _
10 from lock import release
10 from lock import release
11 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
11 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import lock, util, extensions, error, encoding, node
12 import lock, util, extensions, error, encoding, node
13 import merge as _merge
13 import merge as mergemod
14 import verify as _verify
14 import verify as verifymod
15 import errno, os, shutil
15 import errno, os, shutil
16
16
17 def _local(path):
17 def _local(path):
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 bundlerepo or localrepo)
19 bundlerepo or localrepo)
20
20
21 def addbranchrevs(lrepo, repo, branches, revs):
21 def addbranchrevs(lrepo, repo, branches, revs):
22 if not branches:
22 if not branches:
23 return revs or None, revs and revs[0] or None
23 return revs or None, revs and revs[0] or None
24 revs = revs and list(revs) or []
24 revs = revs and list(revs) or []
25 if not repo.capable('branchmap'):
25 if not repo.capable('branchmap'):
26 revs.extend(branches)
26 revs.extend(branches)
27 return revs, revs[0]
27 return revs, revs[0]
28 branchmap = repo.branchmap()
28 branchmap = repo.branchmap()
29 for branch in branches:
29 for branch in branches:
30 if branch == '.':
30 if branch == '.':
31 if not lrepo or not lrepo.local():
31 if not lrepo or not lrepo.local():
32 raise util.Abort(_("dirstate branch not accessible"))
32 raise util.Abort(_("dirstate branch not accessible"))
33 revs.append(lrepo.dirstate.branch())
33 revs.append(lrepo.dirstate.branch())
34 else:
34 else:
35 butf8 = encoding.fromlocal(branch)
35 butf8 = encoding.fromlocal(branch)
36 if butf8 in branchmap:
36 if butf8 in branchmap:
37 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
37 revs.extend(node.hex(r) for r in reversed(branchmap[butf8]))
38 else:
38 else:
39 revs.append(branch)
39 revs.append(branch)
40 return revs, revs[0]
40 return revs, revs[0]
41
41
42 def parseurl(url, branches=None):
42 def parseurl(url, branches=None):
43 '''parse url#branch, returning url, branches+[branch]'''
43 '''parse url#branch, returning url, branches+[branch]'''
44
44
45 if '#' not in url:
45 if '#' not in url:
46 return url, branches or []
46 return url, branches or []
47 url, branch = url.split('#', 1)
47 url, branch = url.split('#', 1)
48 return url, (branches or []) + [branch]
48 return url, (branches or []) + [branch]
49
49
50 schemes = {
50 schemes = {
51 'bundle': bundlerepo,
51 'bundle': bundlerepo,
52 'file': _local,
52 'file': _local,
53 'http': httprepo,
53 'http': httprepo,
54 'https': httprepo,
54 'https': httprepo,
55 'ssh': sshrepo,
55 'ssh': sshrepo,
56 'static-http': statichttprepo,
56 'static-http': statichttprepo,
57 }
57 }
58
58
59 def _lookup(path):
59 def _lookup(path):
60 scheme = 'file'
60 scheme = 'file'
61 if path:
61 if path:
62 c = path.find(':')
62 c = path.find(':')
63 if c > 0:
63 if c > 0:
64 scheme = path[:c]
64 scheme = path[:c]
65 thing = schemes.get(scheme) or schemes['file']
65 thing = schemes.get(scheme) or schemes['file']
66 try:
66 try:
67 return thing(path)
67 return thing(path)
68 except TypeError:
68 except TypeError:
69 return thing
69 return thing
70
70
71 def islocal(repo):
71 def islocal(repo):
72 '''return true if repo or path is local'''
72 '''return true if repo or path is local'''
73 if isinstance(repo, str):
73 if isinstance(repo, str):
74 try:
74 try:
75 return _lookup(repo).islocal(repo)
75 return _lookup(repo).islocal(repo)
76 except AttributeError:
76 except AttributeError:
77 return False
77 return False
78 return repo.local()
78 return repo.local()
79
79
80 def repository(ui, path='', create=False):
80 def repository(ui, path='', create=False):
81 """return a repository object for the specified path"""
81 """return a repository object for the specified path"""
82 repo = _lookup(path).instance(ui, path, create)
82 repo = _lookup(path).instance(ui, path, create)
83 ui = getattr(repo, "ui", ui)
83 ui = getattr(repo, "ui", ui)
84 for name, module in extensions.extensions():
84 for name, module in extensions.extensions():
85 hook = getattr(module, 'reposetup', None)
85 hook = getattr(module, 'reposetup', None)
86 if hook:
86 if hook:
87 hook(ui, repo)
87 hook(ui, repo)
88 return repo
88 return repo
89
89
90 def defaultdest(source):
90 def defaultdest(source):
91 '''return default destination of clone if none is given'''
91 '''return default destination of clone if none is given'''
92 return os.path.basename(os.path.normpath(source))
92 return os.path.basename(os.path.normpath(source))
93
93
94 def localpath(path):
94 def localpath(path):
95 if path.startswith('file://localhost/'):
95 if path.startswith('file://localhost/'):
96 return path[16:]
96 return path[16:]
97 if path.startswith('file://'):
97 if path.startswith('file://'):
98 return path[7:]
98 return path[7:]
99 if path.startswith('file:'):
99 if path.startswith('file:'):
100 return path[5:]
100 return path[5:]
101 return path
101 return path
102
102
103 def share(ui, source, dest=None, update=True):
103 def share(ui, source, dest=None, update=True):
104 '''create a shared repository'''
104 '''create a shared repository'''
105
105
106 if not islocal(source):
106 if not islocal(source):
107 raise util.Abort(_('can only share local repositories'))
107 raise util.Abort(_('can only share local repositories'))
108
108
109 if not dest:
109 if not dest:
110 dest = defaultdest(source)
110 dest = defaultdest(source)
111 else:
111 else:
112 dest = ui.expandpath(dest)
112 dest = ui.expandpath(dest)
113
113
114 if isinstance(source, str):
114 if isinstance(source, str):
115 origsource = ui.expandpath(source)
115 origsource = ui.expandpath(source)
116 source, branches = parseurl(origsource)
116 source, branches = parseurl(origsource)
117 srcrepo = repository(ui, source)
117 srcrepo = repository(ui, source)
118 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
118 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
119 else:
119 else:
120 srcrepo = source
120 srcrepo = source
121 origsource = source = srcrepo.url()
121 origsource = source = srcrepo.url()
122 checkout = None
122 checkout = None
123
123
124 sharedpath = srcrepo.sharedpath # if our source is already sharing
124 sharedpath = srcrepo.sharedpath # if our source is already sharing
125
125
126 root = os.path.realpath(dest)
126 root = os.path.realpath(dest)
127 roothg = os.path.join(root, '.hg')
127 roothg = os.path.join(root, '.hg')
128
128
129 if os.path.exists(roothg):
129 if os.path.exists(roothg):
130 raise util.Abort(_('destination already exists'))
130 raise util.Abort(_('destination already exists'))
131
131
132 if not os.path.isdir(root):
132 if not os.path.isdir(root):
133 os.mkdir(root)
133 os.mkdir(root)
134 os.mkdir(roothg)
134 os.mkdir(roothg)
135
135
136 requirements = ''
136 requirements = ''
137 try:
137 try:
138 requirements = srcrepo.opener('requires').read()
138 requirements = srcrepo.opener('requires').read()
139 except IOError, inst:
139 except IOError, inst:
140 if inst.errno != errno.ENOENT:
140 if inst.errno != errno.ENOENT:
141 raise
141 raise
142
142
143 requirements += 'shared\n'
143 requirements += 'shared\n'
144 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
144 file(os.path.join(roothg, 'requires'), 'w').write(requirements)
145 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
145 file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath)
146
146
147 default = srcrepo.ui.config('paths', 'default')
147 default = srcrepo.ui.config('paths', 'default')
148 if default:
148 if default:
149 f = file(os.path.join(roothg, 'hgrc'), 'w')
149 f = file(os.path.join(roothg, 'hgrc'), 'w')
150 f.write('[paths]\ndefault = %s\n' % default)
150 f.write('[paths]\ndefault = %s\n' % default)
151 f.close()
151 f.close()
152
152
153 r = repository(ui, root)
153 r = repository(ui, root)
154
154
155 if update:
155 if update:
156 r.ui.status(_("updating working directory\n"))
156 r.ui.status(_("updating working directory\n"))
157 if update is not True:
157 if update is not True:
158 checkout = update
158 checkout = update
159 for test in (checkout, 'default', 'tip'):
159 for test in (checkout, 'default', 'tip'):
160 if test is None:
160 if test is None:
161 continue
161 continue
162 try:
162 try:
163 uprev = r.lookup(test)
163 uprev = r.lookup(test)
164 break
164 break
165 except error.RepoLookupError:
165 except error.RepoLookupError:
166 continue
166 continue
167 _update(r, uprev)
167 _update(r, uprev)
168
168
169 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
169 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
170 stream=False, branch=None):
170 stream=False, branch=None):
171 """Make a copy of an existing repository.
171 """Make a copy of an existing repository.
172
172
173 Create a copy of an existing repository in a new directory. The
173 Create a copy of an existing repository in a new directory. The
174 source and destination are URLs, as passed to the repository
174 source and destination are URLs, as passed to the repository
175 function. Returns a pair of repository objects, the source and
175 function. Returns a pair of repository objects, the source and
176 newly created destination.
176 newly created destination.
177
177
178 The location of the source is added to the new repository's
178 The location of the source is added to the new repository's
179 .hg/hgrc file, as the default to be used for future pulls and
179 .hg/hgrc file, as the default to be used for future pulls and
180 pushes.
180 pushes.
181
181
182 If an exception is raised, the partly cloned/updated destination
182 If an exception is raised, the partly cloned/updated destination
183 repository will be deleted.
183 repository will be deleted.
184
184
185 Arguments:
185 Arguments:
186
186
187 source: repository object or URL
187 source: repository object or URL
188
188
189 dest: URL of destination repository to create (defaults to base
189 dest: URL of destination repository to create (defaults to base
190 name of source repository)
190 name of source repository)
191
191
192 pull: always pull from source repository, even in local case
192 pull: always pull from source repository, even in local case
193
193
194 stream: stream raw data uncompressed from repository (fast over
194 stream: stream raw data uncompressed from repository (fast over
195 LAN, slow over WAN)
195 LAN, slow over WAN)
196
196
197 rev: revision to clone up to (implies pull=True)
197 rev: revision to clone up to (implies pull=True)
198
198
199 update: update working directory after clone completes, if
199 update: update working directory after clone completes, if
200 destination is local repository (True means update to default rev,
200 destination is local repository (True means update to default rev,
201 anything else is treated as a revision)
201 anything else is treated as a revision)
202
202
203 branch: branches to clone
203 branch: branches to clone
204 """
204 """
205
205
206 if isinstance(source, str):
206 if isinstance(source, str):
207 origsource = ui.expandpath(source)
207 origsource = ui.expandpath(source)
208 source, branch = parseurl(origsource, branch)
208 source, branch = parseurl(origsource, branch)
209 src_repo = repository(ui, source)
209 src_repo = repository(ui, source)
210 else:
210 else:
211 src_repo = source
211 src_repo = source
212 origsource = source = src_repo.url()
212 origsource = source = src_repo.url()
213 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
213 rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev)
214
214
215 if dest is None:
215 if dest is None:
216 dest = defaultdest(source)
216 dest = defaultdest(source)
217 ui.status(_("destination directory: %s\n") % dest)
217 ui.status(_("destination directory: %s\n") % dest)
218 else:
218 else:
219 dest = ui.expandpath(dest)
219 dest = ui.expandpath(dest)
220
220
221 dest = localpath(dest)
221 dest = localpath(dest)
222 source = localpath(source)
222 source = localpath(source)
223
223
224 if os.path.exists(dest):
224 if os.path.exists(dest):
225 if not os.path.isdir(dest):
225 if not os.path.isdir(dest):
226 raise util.Abort(_("destination '%s' already exists") % dest)
226 raise util.Abort(_("destination '%s' already exists") % dest)
227 elif os.listdir(dest):
227 elif os.listdir(dest):
228 raise util.Abort(_("destination '%s' is not empty") % dest)
228 raise util.Abort(_("destination '%s' is not empty") % dest)
229
229
230 class DirCleanup(object):
230 class DirCleanup(object):
231 def __init__(self, dir_):
231 def __init__(self, dir_):
232 self.rmtree = shutil.rmtree
232 self.rmtree = shutil.rmtree
233 self.dir_ = dir_
233 self.dir_ = dir_
234 def close(self):
234 def close(self):
235 self.dir_ = None
235 self.dir_ = None
236 def cleanup(self):
236 def cleanup(self):
237 if self.dir_:
237 if self.dir_:
238 self.rmtree(self.dir_, True)
238 self.rmtree(self.dir_, True)
239
239
240 src_lock = dest_lock = dir_cleanup = None
240 src_lock = dest_lock = dir_cleanup = None
241 try:
241 try:
242 if islocal(dest):
242 if islocal(dest):
243 dir_cleanup = DirCleanup(dest)
243 dir_cleanup = DirCleanup(dest)
244
244
245 abspath = origsource
245 abspath = origsource
246 copy = False
246 copy = False
247 if src_repo.cancopy() and islocal(dest):
247 if src_repo.cancopy() and islocal(dest):
248 abspath = os.path.abspath(util.drop_scheme('file', origsource))
248 abspath = os.path.abspath(util.drop_scheme('file', origsource))
249 copy = not pull and not rev
249 copy = not pull and not rev
250
250
251 if copy:
251 if copy:
252 try:
252 try:
253 # we use a lock here because if we race with commit, we
253 # we use a lock here because if we race with commit, we
254 # can end up with extra data in the cloned revlogs that's
254 # can end up with extra data in the cloned revlogs that's
255 # not pointed to by changesets, thus causing verify to
255 # not pointed to by changesets, thus causing verify to
256 # fail
256 # fail
257 src_lock = src_repo.lock(wait=False)
257 src_lock = src_repo.lock(wait=False)
258 except error.LockError:
258 except error.LockError:
259 copy = False
259 copy = False
260
260
261 if copy:
261 if copy:
262 src_repo.hook('preoutgoing', throw=True, source='clone')
262 src_repo.hook('preoutgoing', throw=True, source='clone')
263 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
263 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
264 if not os.path.exists(dest):
264 if not os.path.exists(dest):
265 os.mkdir(dest)
265 os.mkdir(dest)
266 else:
266 else:
267 # only clean up directories we create ourselves
267 # only clean up directories we create ourselves
268 dir_cleanup.dir_ = hgdir
268 dir_cleanup.dir_ = hgdir
269 try:
269 try:
270 dest_path = hgdir
270 dest_path = hgdir
271 os.mkdir(dest_path)
271 os.mkdir(dest_path)
272 except OSError, inst:
272 except OSError, inst:
273 if inst.errno == errno.EEXIST:
273 if inst.errno == errno.EEXIST:
274 dir_cleanup.close()
274 dir_cleanup.close()
275 raise util.Abort(_("destination '%s' already exists")
275 raise util.Abort(_("destination '%s' already exists")
276 % dest)
276 % dest)
277 raise
277 raise
278
278
279 for f in src_repo.store.copylist():
279 for f in src_repo.store.copylist():
280 src = os.path.join(src_repo.sharedpath, f)
280 src = os.path.join(src_repo.sharedpath, f)
281 dst = os.path.join(dest_path, f)
281 dst = os.path.join(dest_path, f)
282 dstbase = os.path.dirname(dst)
282 dstbase = os.path.dirname(dst)
283 if dstbase and not os.path.exists(dstbase):
283 if dstbase and not os.path.exists(dstbase):
284 os.mkdir(dstbase)
284 os.mkdir(dstbase)
285 if os.path.exists(src):
285 if os.path.exists(src):
286 if dst.endswith('data'):
286 if dst.endswith('data'):
287 # lock to avoid premature writing to the target
287 # lock to avoid premature writing to the target
288 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
288 dest_lock = lock.lock(os.path.join(dstbase, "lock"))
289 util.copyfiles(src, dst)
289 util.copyfiles(src, dst)
290
290
291 # we need to re-init the repo after manually copying the data
291 # we need to re-init the repo after manually copying the data
292 # into it
292 # into it
293 dest_repo = repository(ui, dest)
293 dest_repo = repository(ui, dest)
294 src_repo.hook('outgoing', source='clone', node='0'*40)
294 src_repo.hook('outgoing', source='clone', node='0'*40)
295 else:
295 else:
296 try:
296 try:
297 dest_repo = repository(ui, dest, create=True)
297 dest_repo = repository(ui, dest, create=True)
298 except OSError, inst:
298 except OSError, inst:
299 if inst.errno == errno.EEXIST:
299 if inst.errno == errno.EEXIST:
300 dir_cleanup.close()
300 dir_cleanup.close()
301 raise util.Abort(_("destination '%s' already exists")
301 raise util.Abort(_("destination '%s' already exists")
302 % dest)
302 % dest)
303 raise
303 raise
304
304
305 revs = None
305 revs = None
306 if rev:
306 if rev:
307 if 'lookup' not in src_repo.capabilities:
307 if 'lookup' not in src_repo.capabilities:
308 raise util.Abort(_("src repository does not support "
308 raise util.Abort(_("src repository does not support "
309 "revision lookup and so doesn't "
309 "revision lookup and so doesn't "
310 "support clone by revision"))
310 "support clone by revision"))
311 revs = [src_repo.lookup(r) for r in rev]
311 revs = [src_repo.lookup(r) for r in rev]
312 checkout = revs[0]
312 checkout = revs[0]
313 if dest_repo.local():
313 if dest_repo.local():
314 dest_repo.clone(src_repo, heads=revs, stream=stream)
314 dest_repo.clone(src_repo, heads=revs, stream=stream)
315 elif src_repo.local():
315 elif src_repo.local():
316 src_repo.push(dest_repo, revs=revs)
316 src_repo.push(dest_repo, revs=revs)
317 else:
317 else:
318 raise util.Abort(_("clone from remote to remote not supported"))
318 raise util.Abort(_("clone from remote to remote not supported"))
319
319
320 if dir_cleanup:
320 if dir_cleanup:
321 dir_cleanup.close()
321 dir_cleanup.close()
322
322
323 if dest_repo.local():
323 if dest_repo.local():
324 fp = dest_repo.opener("hgrc", "w", text=True)
324 fp = dest_repo.opener("hgrc", "w", text=True)
325 fp.write("[paths]\n")
325 fp.write("[paths]\n")
326 fp.write("default = %s\n" % abspath)
326 fp.write("default = %s\n" % abspath)
327 fp.close()
327 fp.close()
328
328
329 dest_repo.ui.setconfig('paths', 'default', abspath)
329 dest_repo.ui.setconfig('paths', 'default', abspath)
330
330
331 if update:
331 if update:
332 if update is not True:
332 if update is not True:
333 checkout = update
333 checkout = update
334 if src_repo.local():
334 if src_repo.local():
335 checkout = src_repo.lookup(update)
335 checkout = src_repo.lookup(update)
336 for test in (checkout, 'default', 'tip'):
336 for test in (checkout, 'default', 'tip'):
337 if test is None:
337 if test is None:
338 continue
338 continue
339 try:
339 try:
340 uprev = dest_repo.lookup(test)
340 uprev = dest_repo.lookup(test)
341 break
341 break
342 except error.RepoLookupError:
342 except error.RepoLookupError:
343 continue
343 continue
344 bn = dest_repo[uprev].branch()
344 bn = dest_repo[uprev].branch()
345 dest_repo.ui.status(_("updating to branch %s\n")
345 dest_repo.ui.status(_("updating to branch %s\n")
346 % encoding.tolocal(bn))
346 % encoding.tolocal(bn))
347 _update(dest_repo, uprev)
347 _update(dest_repo, uprev)
348
348
349 return src_repo, dest_repo
349 return src_repo, dest_repo
350 finally:
350 finally:
351 release(src_lock, dest_lock)
351 release(src_lock, dest_lock)
352 if dir_cleanup is not None:
352 if dir_cleanup is not None:
353 dir_cleanup.cleanup()
353 dir_cleanup.cleanup()
354
354
355 def _showstats(repo, stats):
355 def _showstats(repo, stats):
356 repo.ui.status(_("%d files updated, %d files merged, "
356 repo.ui.status(_("%d files updated, %d files merged, "
357 "%d files removed, %d files unresolved\n") % stats)
357 "%d files removed, %d files unresolved\n") % stats)
358
358
359 def update(repo, node):
359 def update(repo, node):
360 """update the working directory to node, merging linear changes"""
360 """update the working directory to node, merging linear changes"""
361 stats = _merge.update(repo, node, False, False, None)
361 stats = mergemod.update(repo, node, False, False, None)
362 _showstats(repo, stats)
362 _showstats(repo, stats)
363 if stats[3]:
363 if stats[3]:
364 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
364 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
365 return stats[3] > 0
365 return stats[3] > 0
366
366
367 # naming conflict in clone()
367 # naming conflict in clone()
368 _update = update
368 _update = update
369
369
370 def clean(repo, node, show_stats=True):
370 def clean(repo, node, show_stats=True):
371 """forcibly switch the working directory to node, clobbering changes"""
371 """forcibly switch the working directory to node, clobbering changes"""
372 stats = _merge.update(repo, node, False, True, None)
372 stats = mergemod.update(repo, node, False, True, None)
373 if show_stats:
373 if show_stats:
374 _showstats(repo, stats)
374 _showstats(repo, stats)
375 return stats[3] > 0
375 return stats[3] > 0
376
376
377 def merge(repo, node, force=None, remind=True):
377 def merge(repo, node, force=None, remind=True):
378 """branch merge with node, resolving changes"""
378 """branch merge with node, resolving changes"""
379 stats = _merge.update(repo, node, True, force, False)
379 stats = mergemod.update(repo, node, True, force, False)
380 _showstats(repo, stats)
380 _showstats(repo, stats)
381 if stats[3]:
381 if stats[3]:
382 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
382 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
383 "or 'hg update -C' to abandon\n"))
383 "or 'hg update -C' to abandon\n"))
384 elif remind:
384 elif remind:
385 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
385 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
386 return stats[3] > 0
386 return stats[3] > 0
387
387
388 def revert(repo, node, choose):
388 def revert(repo, node, choose):
389 """revert changes to revision in node without updating dirstate"""
389 """revert changes to revision in node without updating dirstate"""
390 return _merge.update(repo, node, False, True, choose)[3] > 0
390 return mergemod.update(repo, node, False, True, choose)[3] > 0
391
391
392 def verify(repo):
392 def verify(repo):
393 """verify the consistency of a repository"""
393 """verify the consistency of a repository"""
394 return _verify.verify(repo)
394 return verifymod.verify(repo)
@@ -1,2223 +1,2223 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo
10 import repo, changegroup, subrepo
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import util, extensions, hook, error
14 import match as match_
14 import match as matchmod
15 import merge as merge_
15 import merge as mergemod
16 import tags as tags_
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, stat, errno, os, time, inspect
18 import weakref, stat, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20
20
21 class localrepository(repo.repository):
21 class localrepository(repo.repository):
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
23 supported = set('revlogv1 store fncache shared'.split())
23 supported = set('revlogv1 store fncache shared'.split())
24
24
25 def __init__(self, baseui, path=None, create=0):
25 def __init__(self, baseui, path=None, create=0):
26 repo.repository.__init__(self)
26 repo.repository.__init__(self)
27 self.root = os.path.realpath(path)
27 self.root = os.path.realpath(path)
28 self.path = os.path.join(self.root, ".hg")
28 self.path = os.path.join(self.root, ".hg")
29 self.origroot = path
29 self.origroot = path
30 self.opener = util.opener(self.path)
30 self.opener = util.opener(self.path)
31 self.wopener = util.opener(self.root)
31 self.wopener = util.opener(self.root)
32 self.baseui = baseui
32 self.baseui = baseui
33 self.ui = baseui.copy()
33 self.ui = baseui.copy()
34
34
35 try:
35 try:
36 self.ui.readconfig(self.join("hgrc"), self.root)
36 self.ui.readconfig(self.join("hgrc"), self.root)
37 extensions.loadall(self.ui)
37 extensions.loadall(self.ui)
38 except IOError:
38 except IOError:
39 pass
39 pass
40
40
41 if not os.path.isdir(self.path):
41 if not os.path.isdir(self.path):
42 if create:
42 if create:
43 if not os.path.exists(path):
43 if not os.path.exists(path):
44 os.mkdir(path)
44 os.mkdir(path)
45 os.mkdir(self.path)
45 os.mkdir(self.path)
46 requirements = ["revlogv1"]
46 requirements = ["revlogv1"]
47 if self.ui.configbool('format', 'usestore', True):
47 if self.ui.configbool('format', 'usestore', True):
48 os.mkdir(os.path.join(self.path, "store"))
48 os.mkdir(os.path.join(self.path, "store"))
49 requirements.append("store")
49 requirements.append("store")
50 if self.ui.configbool('format', 'usefncache', True):
50 if self.ui.configbool('format', 'usefncache', True):
51 requirements.append("fncache")
51 requirements.append("fncache")
52 # create an invalid changelog
52 # create an invalid changelog
53 self.opener("00changelog.i", "a").write(
53 self.opener("00changelog.i", "a").write(
54 '\0\0\0\2' # represents revlogv2
54 '\0\0\0\2' # represents revlogv2
55 ' dummy changelog to prevent using the old repo layout'
55 ' dummy changelog to prevent using the old repo layout'
56 )
56 )
57 reqfile = self.opener("requires", "w")
57 reqfile = self.opener("requires", "w")
58 for r in requirements:
58 for r in requirements:
59 reqfile.write("%s\n" % r)
59 reqfile.write("%s\n" % r)
60 reqfile.close()
60 reqfile.close()
61 else:
61 else:
62 raise error.RepoError(_("repository %s not found") % path)
62 raise error.RepoError(_("repository %s not found") % path)
63 elif create:
63 elif create:
64 raise error.RepoError(_("repository %s already exists") % path)
64 raise error.RepoError(_("repository %s already exists") % path)
65 else:
65 else:
66 # find requirements
66 # find requirements
67 requirements = set()
67 requirements = set()
68 try:
68 try:
69 requirements = set(self.opener("requires").read().splitlines())
69 requirements = set(self.opener("requires").read().splitlines())
70 except IOError, inst:
70 except IOError, inst:
71 if inst.errno != errno.ENOENT:
71 if inst.errno != errno.ENOENT:
72 raise
72 raise
73 for r in requirements - self.supported:
73 for r in requirements - self.supported:
74 raise error.RepoError(_("requirement '%s' not supported") % r)
74 raise error.RepoError(_("requirement '%s' not supported") % r)
75
75
76 self.sharedpath = self.path
76 self.sharedpath = self.path
77 try:
77 try:
78 s = os.path.realpath(self.opener("sharedpath").read())
78 s = os.path.realpath(self.opener("sharedpath").read())
79 if not os.path.exists(s):
79 if not os.path.exists(s):
80 raise error.RepoError(
80 raise error.RepoError(
81 _('.hg/sharedpath points to nonexistent directory %s') % s)
81 _('.hg/sharedpath points to nonexistent directory %s') % s)
82 self.sharedpath = s
82 self.sharedpath = s
83 except IOError, inst:
83 except IOError, inst:
84 if inst.errno != errno.ENOENT:
84 if inst.errno != errno.ENOENT:
85 raise
85 raise
86
86
87 self.store = store.store(requirements, self.sharedpath, util.opener)
87 self.store = store.store(requirements, self.sharedpath, util.opener)
88 self.spath = self.store.path
88 self.spath = self.store.path
89 self.sopener = self.store.opener
89 self.sopener = self.store.opener
90 self.sjoin = self.store.join
90 self.sjoin = self.store.join
91 self.opener.createmode = self.store.createmode
91 self.opener.createmode = self.store.createmode
92 self.sopener.options = {}
92 self.sopener.options = {}
93
93
94 # These two define the set of tags for this repository. _tags
94 # These two define the set of tags for this repository. _tags
95 # maps tag name to node; _tagtypes maps tag name to 'global' or
95 # maps tag name to node; _tagtypes maps tag name to 'global' or
96 # 'local'. (Global tags are defined by .hgtags across all
96 # 'local'. (Global tags are defined by .hgtags across all
97 # heads, and local tags are defined in .hg/localtags.) They
97 # heads, and local tags are defined in .hg/localtags.) They
98 # constitute the in-memory cache of tags.
98 # constitute the in-memory cache of tags.
99 self._tags = None
99 self._tags = None
100 self._tagtypes = None
100 self._tagtypes = None
101
101
102 self._branchcache = None # in UTF-8
102 self._branchcache = None # in UTF-8
103 self._branchcachetip = None
103 self._branchcachetip = None
104 self.nodetagscache = None
104 self.nodetagscache = None
105 self.filterpats = {}
105 self.filterpats = {}
106 self._datafilters = {}
106 self._datafilters = {}
107 self._transref = self._lockref = self._wlockref = None
107 self._transref = self._lockref = self._wlockref = None
108
108
109 @propertycache
109 @propertycache
110 def changelog(self):
110 def changelog(self):
111 c = changelog.changelog(self.sopener)
111 c = changelog.changelog(self.sopener)
112 if 'HG_PENDING' in os.environ:
112 if 'HG_PENDING' in os.environ:
113 p = os.environ['HG_PENDING']
113 p = os.environ['HG_PENDING']
114 if p.startswith(self.root):
114 if p.startswith(self.root):
115 c.readpending('00changelog.i.a')
115 c.readpending('00changelog.i.a')
116 self.sopener.options['defversion'] = c.version
116 self.sopener.options['defversion'] = c.version
117 return c
117 return c
118
118
119 @propertycache
119 @propertycache
120 def manifest(self):
120 def manifest(self):
121 return manifest.manifest(self.sopener)
121 return manifest.manifest(self.sopener)
122
122
123 @propertycache
123 @propertycache
124 def dirstate(self):
124 def dirstate(self):
125 return dirstate.dirstate(self.opener, self.ui, self.root)
125 return dirstate.dirstate(self.opener, self.ui, self.root)
126
126
127 def __getitem__(self, changeid):
127 def __getitem__(self, changeid):
128 if changeid is None:
128 if changeid is None:
129 return context.workingctx(self)
129 return context.workingctx(self)
130 return context.changectx(self, changeid)
130 return context.changectx(self, changeid)
131
131
132 def __contains__(self, changeid):
132 def __contains__(self, changeid):
133 try:
133 try:
134 return bool(self.lookup(changeid))
134 return bool(self.lookup(changeid))
135 except error.RepoLookupError:
135 except error.RepoLookupError:
136 return False
136 return False
137
137
138 def __nonzero__(self):
138 def __nonzero__(self):
139 return True
139 return True
140
140
141 def __len__(self):
141 def __len__(self):
142 return len(self.changelog)
142 return len(self.changelog)
143
143
144 def __iter__(self):
144 def __iter__(self):
145 for i in xrange(len(self)):
145 for i in xrange(len(self)):
146 yield i
146 yield i
147
147
148 def url(self):
148 def url(self):
149 return 'file:' + self.root
149 return 'file:' + self.root
150
150
151 def hook(self, name, throw=False, **args):
151 def hook(self, name, throw=False, **args):
152 return hook.hook(self.ui, self, name, throw, **args)
152 return hook.hook(self.ui, self, name, throw, **args)
153
153
154 tag_disallowed = ':\r\n'
154 tag_disallowed = ':\r\n'
155
155
156 def _tag(self, names, node, message, local, user, date, extra={}):
156 def _tag(self, names, node, message, local, user, date, extra={}):
157 if isinstance(names, str):
157 if isinstance(names, str):
158 allchars = names
158 allchars = names
159 names = (names,)
159 names = (names,)
160 else:
160 else:
161 allchars = ''.join(names)
161 allchars = ''.join(names)
162 for c in self.tag_disallowed:
162 for c in self.tag_disallowed:
163 if c in allchars:
163 if c in allchars:
164 raise util.Abort(_('%r cannot be used in a tag name') % c)
164 raise util.Abort(_('%r cannot be used in a tag name') % c)
165
165
166 for name in names:
166 for name in names:
167 self.hook('pretag', throw=True, node=hex(node), tag=name,
167 self.hook('pretag', throw=True, node=hex(node), tag=name,
168 local=local)
168 local=local)
169
169
170 def writetags(fp, names, munge, prevtags):
170 def writetags(fp, names, munge, prevtags):
171 fp.seek(0, 2)
171 fp.seek(0, 2)
172 if prevtags and prevtags[-1] != '\n':
172 if prevtags and prevtags[-1] != '\n':
173 fp.write('\n')
173 fp.write('\n')
174 for name in names:
174 for name in names:
175 m = munge and munge(name) or name
175 m = munge and munge(name) or name
176 if self._tagtypes and name in self._tagtypes:
176 if self._tagtypes and name in self._tagtypes:
177 old = self._tags.get(name, nullid)
177 old = self._tags.get(name, nullid)
178 fp.write('%s %s\n' % (hex(old), m))
178 fp.write('%s %s\n' % (hex(old), m))
179 fp.write('%s %s\n' % (hex(node), m))
179 fp.write('%s %s\n' % (hex(node), m))
180 fp.close()
180 fp.close()
181
181
182 prevtags = ''
182 prevtags = ''
183 if local:
183 if local:
184 try:
184 try:
185 fp = self.opener('localtags', 'r+')
185 fp = self.opener('localtags', 'r+')
186 except IOError:
186 except IOError:
187 fp = self.opener('localtags', 'a')
187 fp = self.opener('localtags', 'a')
188 else:
188 else:
189 prevtags = fp.read()
189 prevtags = fp.read()
190
190
191 # local tags are stored in the current charset
191 # local tags are stored in the current charset
192 writetags(fp, names, None, prevtags)
192 writetags(fp, names, None, prevtags)
193 for name in names:
193 for name in names:
194 self.hook('tag', node=hex(node), tag=name, local=local)
194 self.hook('tag', node=hex(node), tag=name, local=local)
195 return
195 return
196
196
197 try:
197 try:
198 fp = self.wfile('.hgtags', 'rb+')
198 fp = self.wfile('.hgtags', 'rb+')
199 except IOError:
199 except IOError:
200 fp = self.wfile('.hgtags', 'ab')
200 fp = self.wfile('.hgtags', 'ab')
201 else:
201 else:
202 prevtags = fp.read()
202 prevtags = fp.read()
203
203
204 # committed tags are stored in UTF-8
204 # committed tags are stored in UTF-8
205 writetags(fp, names, encoding.fromlocal, prevtags)
205 writetags(fp, names, encoding.fromlocal, prevtags)
206
206
207 if '.hgtags' not in self.dirstate:
207 if '.hgtags' not in self.dirstate:
208 self.add(['.hgtags'])
208 self.add(['.hgtags'])
209
209
210 m = match_.exact(self.root, '', ['.hgtags'])
210 m = matchmod.exact(self.root, '', ['.hgtags'])
211 tagnode = self.commit(message, user, date, extra=extra, match=m)
211 tagnode = self.commit(message, user, date, extra=extra, match=m)
212
212
213 for name in names:
213 for name in names:
214 self.hook('tag', node=hex(node), tag=name, local=local)
214 self.hook('tag', node=hex(node), tag=name, local=local)
215
215
216 return tagnode
216 return tagnode
217
217
218 def tag(self, names, node, message, local, user, date):
218 def tag(self, names, node, message, local, user, date):
219 '''tag a revision with one or more symbolic names.
219 '''tag a revision with one or more symbolic names.
220
220
221 names is a list of strings or, when adding a single tag, names may be a
221 names is a list of strings or, when adding a single tag, names may be a
222 string.
222 string.
223
223
224 if local is True, the tags are stored in a per-repository file.
224 if local is True, the tags are stored in a per-repository file.
225 otherwise, they are stored in the .hgtags file, and a new
225 otherwise, they are stored in the .hgtags file, and a new
226 changeset is committed with the change.
226 changeset is committed with the change.
227
227
228 keyword arguments:
228 keyword arguments:
229
229
230 local: whether to store tags in non-version-controlled file
230 local: whether to store tags in non-version-controlled file
231 (default False)
231 (default False)
232
232
233 message: commit message to use if committing
233 message: commit message to use if committing
234
234
235 user: name of user to use if committing
235 user: name of user to use if committing
236
236
237 date: date tuple to use if committing'''
237 date: date tuple to use if committing'''
238
238
239 for x in self.status()[:5]:
239 for x in self.status()[:5]:
240 if '.hgtags' in x:
240 if '.hgtags' in x:
241 raise util.Abort(_('working copy of .hgtags is changed '
241 raise util.Abort(_('working copy of .hgtags is changed '
242 '(please commit .hgtags manually)'))
242 '(please commit .hgtags manually)'))
243
243
244 self.tags() # instantiate the cache
244 self.tags() # instantiate the cache
245 self._tag(names, node, message, local, user, date)
245 self._tag(names, node, message, local, user, date)
246
246
247 def tags(self):
247 def tags(self):
248 '''return a mapping of tag to node'''
248 '''return a mapping of tag to node'''
249 if self._tags is None:
249 if self._tags is None:
250 (self._tags, self._tagtypes) = self._findtags()
250 (self._tags, self._tagtypes) = self._findtags()
251
251
252 return self._tags
252 return self._tags
253
253
254 def _findtags(self):
254 def _findtags(self):
255 '''Do the hard work of finding tags. Return a pair of dicts
255 '''Do the hard work of finding tags. Return a pair of dicts
256 (tags, tagtypes) where tags maps tag name to node, and tagtypes
256 (tags, tagtypes) where tags maps tag name to node, and tagtypes
257 maps tag name to a string like \'global\' or \'local\'.
257 maps tag name to a string like \'global\' or \'local\'.
258 Subclasses or extensions are free to add their own tags, but
258 Subclasses or extensions are free to add their own tags, but
259 should be aware that the returned dicts will be retained for the
259 should be aware that the returned dicts will be retained for the
260 duration of the localrepo object.'''
260 duration of the localrepo object.'''
261
261
262 # XXX what tagtype should subclasses/extensions use? Currently
262 # XXX what tagtype should subclasses/extensions use? Currently
263 # mq and bookmarks add tags, but do not set the tagtype at all.
263 # mq and bookmarks add tags, but do not set the tagtype at all.
264 # Should each extension invent its own tag type? Should there
264 # Should each extension invent its own tag type? Should there
265 # be one tagtype for all such "virtual" tags? Or is the status
265 # be one tagtype for all such "virtual" tags? Or is the status
266 # quo fine?
266 # quo fine?
267
267
268 alltags = {} # map tag name to (node, hist)
268 alltags = {} # map tag name to (node, hist)
269 tagtypes = {}
269 tagtypes = {}
270
270
271 tags_.findglobaltags(self.ui, self, alltags, tagtypes)
271 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
272 tags_.readlocaltags(self.ui, self, alltags, tagtypes)
272 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
273
273
274 # Build the return dicts. Have to re-encode tag names because
274 # Build the return dicts. Have to re-encode tag names because
275 # the tags module always uses UTF-8 (in order not to lose info
275 # the tags module always uses UTF-8 (in order not to lose info
276 # writing to the cache), but the rest of Mercurial wants them in
276 # writing to the cache), but the rest of Mercurial wants them in
277 # local encoding.
277 # local encoding.
278 tags = {}
278 tags = {}
279 for (name, (node, hist)) in alltags.iteritems():
279 for (name, (node, hist)) in alltags.iteritems():
280 if node != nullid:
280 if node != nullid:
281 tags[encoding.tolocal(name)] = node
281 tags[encoding.tolocal(name)] = node
282 tags['tip'] = self.changelog.tip()
282 tags['tip'] = self.changelog.tip()
283 tagtypes = dict([(encoding.tolocal(name), value)
283 tagtypes = dict([(encoding.tolocal(name), value)
284 for (name, value) in tagtypes.iteritems()])
284 for (name, value) in tagtypes.iteritems()])
285 return (tags, tagtypes)
285 return (tags, tagtypes)
286
286
287 def tagtype(self, tagname):
287 def tagtype(self, tagname):
288 '''
288 '''
289 return the type of the given tag. result can be:
289 return the type of the given tag. result can be:
290
290
291 'local' : a local tag
291 'local' : a local tag
292 'global' : a global tag
292 'global' : a global tag
293 None : tag does not exist
293 None : tag does not exist
294 '''
294 '''
295
295
296 self.tags()
296 self.tags()
297
297
298 return self._tagtypes.get(tagname)
298 return self._tagtypes.get(tagname)
299
299
300 def tagslist(self):
300 def tagslist(self):
301 '''return a list of tags ordered by revision'''
301 '''return a list of tags ordered by revision'''
302 l = []
302 l = []
303 for t, n in self.tags().iteritems():
303 for t, n in self.tags().iteritems():
304 try:
304 try:
305 r = self.changelog.rev(n)
305 r = self.changelog.rev(n)
306 except:
306 except:
307 r = -2 # sort to the beginning of the list if unknown
307 r = -2 # sort to the beginning of the list if unknown
308 l.append((r, t, n))
308 l.append((r, t, n))
309 return [(t, n) for r, t, n in sorted(l)]
309 return [(t, n) for r, t, n in sorted(l)]
310
310
311 def nodetags(self, node):
311 def nodetags(self, node):
312 '''return the tags associated with a node'''
312 '''return the tags associated with a node'''
313 if not self.nodetagscache:
313 if not self.nodetagscache:
314 self.nodetagscache = {}
314 self.nodetagscache = {}
315 for t, n in self.tags().iteritems():
315 for t, n in self.tags().iteritems():
316 self.nodetagscache.setdefault(n, []).append(t)
316 self.nodetagscache.setdefault(n, []).append(t)
317 return self.nodetagscache.get(node, [])
317 return self.nodetagscache.get(node, [])
318
318
319 def _branchtags(self, partial, lrev):
319 def _branchtags(self, partial, lrev):
320 # TODO: rename this function?
320 # TODO: rename this function?
321 tiprev = len(self) - 1
321 tiprev = len(self) - 1
322 if lrev != tiprev:
322 if lrev != tiprev:
323 self._updatebranchcache(partial, lrev + 1, tiprev + 1)
323 self._updatebranchcache(partial, lrev + 1, tiprev + 1)
324 self._writebranchcache(partial, self.changelog.tip(), tiprev)
324 self._writebranchcache(partial, self.changelog.tip(), tiprev)
325
325
326 return partial
326 return partial
327
327
328 def branchmap(self):
328 def branchmap(self):
329 '''returns a dictionary {branch: [branchheads]}'''
329 '''returns a dictionary {branch: [branchheads]}'''
330 tip = self.changelog.tip()
330 tip = self.changelog.tip()
331 if self._branchcache is not None and self._branchcachetip == tip:
331 if self._branchcache is not None and self._branchcachetip == tip:
332 return self._branchcache
332 return self._branchcache
333
333
334 oldtip = self._branchcachetip
334 oldtip = self._branchcachetip
335 self._branchcachetip = tip
335 self._branchcachetip = tip
336 if oldtip is None or oldtip not in self.changelog.nodemap:
336 if oldtip is None or oldtip not in self.changelog.nodemap:
337 partial, last, lrev = self._readbranchcache()
337 partial, last, lrev = self._readbranchcache()
338 else:
338 else:
339 lrev = self.changelog.rev(oldtip)
339 lrev = self.changelog.rev(oldtip)
340 partial = self._branchcache
340 partial = self._branchcache
341
341
342 self._branchtags(partial, lrev)
342 self._branchtags(partial, lrev)
343 # this private cache holds all heads (not just tips)
343 # this private cache holds all heads (not just tips)
344 self._branchcache = partial
344 self._branchcache = partial
345
345
346 return self._branchcache
346 return self._branchcache
347
347
348 def branchtags(self):
348 def branchtags(self):
349 '''return a dict where branch names map to the tipmost head of
349 '''return a dict where branch names map to the tipmost head of
350 the branch, open heads come before closed'''
350 the branch, open heads come before closed'''
351 bt = {}
351 bt = {}
352 for bn, heads in self.branchmap().iteritems():
352 for bn, heads in self.branchmap().iteritems():
353 tip = heads[-1]
353 tip = heads[-1]
354 for h in reversed(heads):
354 for h in reversed(heads):
355 if 'close' not in self.changelog.read(h)[5]:
355 if 'close' not in self.changelog.read(h)[5]:
356 tip = h
356 tip = h
357 break
357 break
358 bt[bn] = tip
358 bt[bn] = tip
359 return bt
359 return bt
360
360
361
361
362 def _readbranchcache(self):
362 def _readbranchcache(self):
363 partial = {}
363 partial = {}
364 try:
364 try:
365 f = self.opener("branchheads.cache")
365 f = self.opener("branchheads.cache")
366 lines = f.read().split('\n')
366 lines = f.read().split('\n')
367 f.close()
367 f.close()
368 except (IOError, OSError):
368 except (IOError, OSError):
369 return {}, nullid, nullrev
369 return {}, nullid, nullrev
370
370
371 try:
371 try:
372 last, lrev = lines.pop(0).split(" ", 1)
372 last, lrev = lines.pop(0).split(" ", 1)
373 last, lrev = bin(last), int(lrev)
373 last, lrev = bin(last), int(lrev)
374 if lrev >= len(self) or self[lrev].node() != last:
374 if lrev >= len(self) or self[lrev].node() != last:
375 # invalidate the cache
375 # invalidate the cache
376 raise ValueError('invalidating branch cache (tip differs)')
376 raise ValueError('invalidating branch cache (tip differs)')
377 for l in lines:
377 for l in lines:
378 if not l:
378 if not l:
379 continue
379 continue
380 node, label = l.split(" ", 1)
380 node, label = l.split(" ", 1)
381 partial.setdefault(label.strip(), []).append(bin(node))
381 partial.setdefault(label.strip(), []).append(bin(node))
382 except KeyboardInterrupt:
382 except KeyboardInterrupt:
383 raise
383 raise
384 except Exception, inst:
384 except Exception, inst:
385 if self.ui.debugflag:
385 if self.ui.debugflag:
386 self.ui.warn(str(inst), '\n')
386 self.ui.warn(str(inst), '\n')
387 partial, last, lrev = {}, nullid, nullrev
387 partial, last, lrev = {}, nullid, nullrev
388 return partial, last, lrev
388 return partial, last, lrev
389
389
390 def _writebranchcache(self, branches, tip, tiprev):
390 def _writebranchcache(self, branches, tip, tiprev):
391 try:
391 try:
392 f = self.opener("branchheads.cache", "w", atomictemp=True)
392 f = self.opener("branchheads.cache", "w", atomictemp=True)
393 f.write("%s %s\n" % (hex(tip), tiprev))
393 f.write("%s %s\n" % (hex(tip), tiprev))
394 for label, nodes in branches.iteritems():
394 for label, nodes in branches.iteritems():
395 for node in nodes:
395 for node in nodes:
396 f.write("%s %s\n" % (hex(node), label))
396 f.write("%s %s\n" % (hex(node), label))
397 f.rename()
397 f.rename()
398 except (IOError, OSError):
398 except (IOError, OSError):
399 pass
399 pass
400
400
401 def _updatebranchcache(self, partial, start, end):
401 def _updatebranchcache(self, partial, start, end):
402 # collect new branch entries
402 # collect new branch entries
403 newbranches = {}
403 newbranches = {}
404 for r in xrange(start, end):
404 for r in xrange(start, end):
405 c = self[r]
405 c = self[r]
406 newbranches.setdefault(c.branch(), []).append(c.node())
406 newbranches.setdefault(c.branch(), []).append(c.node())
407 # if older branchheads are reachable from new ones, they aren't
407 # if older branchheads are reachable from new ones, they aren't
408 # really branchheads. Note checking parents is insufficient:
408 # really branchheads. Note checking parents is insufficient:
409 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
409 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
410 for branch, newnodes in newbranches.iteritems():
410 for branch, newnodes in newbranches.iteritems():
411 bheads = partial.setdefault(branch, [])
411 bheads = partial.setdefault(branch, [])
412 bheads.extend(newnodes)
412 bheads.extend(newnodes)
413 if len(bheads) < 2:
413 if len(bheads) < 2:
414 continue
414 continue
415 newbheads = []
415 newbheads = []
416 # starting from tip means fewer passes over reachable
416 # starting from tip means fewer passes over reachable
417 while newnodes:
417 while newnodes:
418 latest = newnodes.pop()
418 latest = newnodes.pop()
419 if latest not in bheads:
419 if latest not in bheads:
420 continue
420 continue
421 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
421 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
422 reachable = self.changelog.reachable(latest, minbhrev)
422 reachable = self.changelog.reachable(latest, minbhrev)
423 bheads = [b for b in bheads if b not in reachable]
423 bheads = [b for b in bheads if b not in reachable]
424 newbheads.insert(0, latest)
424 newbheads.insert(0, latest)
425 bheads.extend(newbheads)
425 bheads.extend(newbheads)
426 partial[branch] = bheads
426 partial[branch] = bheads
427
427
428 def lookup(self, key):
428 def lookup(self, key):
429 if isinstance(key, int):
429 if isinstance(key, int):
430 return self.changelog.node(key)
430 return self.changelog.node(key)
431 elif key == '.':
431 elif key == '.':
432 return self.dirstate.parents()[0]
432 return self.dirstate.parents()[0]
433 elif key == 'null':
433 elif key == 'null':
434 return nullid
434 return nullid
435 elif key == 'tip':
435 elif key == 'tip':
436 return self.changelog.tip()
436 return self.changelog.tip()
437 n = self.changelog._match(key)
437 n = self.changelog._match(key)
438 if n:
438 if n:
439 return n
439 return n
440 if key in self.tags():
440 if key in self.tags():
441 return self.tags()[key]
441 return self.tags()[key]
442 if key in self.branchtags():
442 if key in self.branchtags():
443 return self.branchtags()[key]
443 return self.branchtags()[key]
444 n = self.changelog._partialmatch(key)
444 n = self.changelog._partialmatch(key)
445 if n:
445 if n:
446 return n
446 return n
447
447
448 # can't find key, check if it might have come from damaged dirstate
448 # can't find key, check if it might have come from damaged dirstate
449 if key in self.dirstate.parents():
449 if key in self.dirstate.parents():
450 raise error.Abort(_("working directory has unknown parent '%s'!")
450 raise error.Abort(_("working directory has unknown parent '%s'!")
451 % short(key))
451 % short(key))
452 try:
452 try:
453 if len(key) == 20:
453 if len(key) == 20:
454 key = hex(key)
454 key = hex(key)
455 except:
455 except:
456 pass
456 pass
457 raise error.RepoLookupError(_("unknown revision '%s'") % key)
457 raise error.RepoLookupError(_("unknown revision '%s'") % key)
458
458
459 def local(self):
459 def local(self):
460 return True
460 return True
461
461
462 def join(self, f):
462 def join(self, f):
463 return os.path.join(self.path, f)
463 return os.path.join(self.path, f)
464
464
465 def wjoin(self, f):
465 def wjoin(self, f):
466 return os.path.join(self.root, f)
466 return os.path.join(self.root, f)
467
467
468 def rjoin(self, f):
468 def rjoin(self, f):
469 return os.path.join(self.root, util.pconvert(f))
469 return os.path.join(self.root, util.pconvert(f))
470
470
471 def file(self, f):
471 def file(self, f):
472 if f[0] == '/':
472 if f[0] == '/':
473 f = f[1:]
473 f = f[1:]
474 return filelog.filelog(self.sopener, f)
474 return filelog.filelog(self.sopener, f)
475
475
476 def changectx(self, changeid):
476 def changectx(self, changeid):
477 return self[changeid]
477 return self[changeid]
478
478
479 def parents(self, changeid=None):
479 def parents(self, changeid=None):
480 '''get list of changectxs for parents of changeid'''
480 '''get list of changectxs for parents of changeid'''
481 return self[changeid].parents()
481 return self[changeid].parents()
482
482
483 def filectx(self, path, changeid=None, fileid=None):
483 def filectx(self, path, changeid=None, fileid=None):
484 """changeid can be a changeset revision, node, or tag.
484 """changeid can be a changeset revision, node, or tag.
485 fileid can be a file revision or node."""
485 fileid can be a file revision or node."""
486 return context.filectx(self, path, changeid, fileid)
486 return context.filectx(self, path, changeid, fileid)
487
487
488 def getcwd(self):
488 def getcwd(self):
489 return self.dirstate.getcwd()
489 return self.dirstate.getcwd()
490
490
491 def pathto(self, f, cwd=None):
491 def pathto(self, f, cwd=None):
492 return self.dirstate.pathto(f, cwd)
492 return self.dirstate.pathto(f, cwd)
493
493
494 def wfile(self, f, mode='r'):
494 def wfile(self, f, mode='r'):
495 return self.wopener(f, mode)
495 return self.wopener(f, mode)
496
496
497 def _link(self, f):
497 def _link(self, f):
498 return os.path.islink(self.wjoin(f))
498 return os.path.islink(self.wjoin(f))
499
499
500 def _filter(self, filter, filename, data):
500 def _filter(self, filter, filename, data):
501 if filter not in self.filterpats:
501 if filter not in self.filterpats:
502 l = []
502 l = []
503 for pat, cmd in self.ui.configitems(filter):
503 for pat, cmd in self.ui.configitems(filter):
504 if cmd == '!':
504 if cmd == '!':
505 continue
505 continue
506 mf = match_.match(self.root, '', [pat])
506 mf = matchmod.match(self.root, '', [pat])
507 fn = None
507 fn = None
508 params = cmd
508 params = cmd
509 for name, filterfn in self._datafilters.iteritems():
509 for name, filterfn in self._datafilters.iteritems():
510 if cmd.startswith(name):
510 if cmd.startswith(name):
511 fn = filterfn
511 fn = filterfn
512 params = cmd[len(name):].lstrip()
512 params = cmd[len(name):].lstrip()
513 break
513 break
514 if not fn:
514 if not fn:
515 fn = lambda s, c, **kwargs: util.filter(s, c)
515 fn = lambda s, c, **kwargs: util.filter(s, c)
516 # Wrap old filters not supporting keyword arguments
516 # Wrap old filters not supporting keyword arguments
517 if not inspect.getargspec(fn)[2]:
517 if not inspect.getargspec(fn)[2]:
518 oldfn = fn
518 oldfn = fn
519 fn = lambda s, c, **kwargs: oldfn(s, c)
519 fn = lambda s, c, **kwargs: oldfn(s, c)
520 l.append((mf, fn, params))
520 l.append((mf, fn, params))
521 self.filterpats[filter] = l
521 self.filterpats[filter] = l
522
522
523 for mf, fn, cmd in self.filterpats[filter]:
523 for mf, fn, cmd in self.filterpats[filter]:
524 if mf(filename):
524 if mf(filename):
525 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
525 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
526 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
526 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
527 break
527 break
528
528
529 return data
529 return data
530
530
531 def adddatafilter(self, name, filter):
531 def adddatafilter(self, name, filter):
532 self._datafilters[name] = filter
532 self._datafilters[name] = filter
533
533
534 def wread(self, filename):
534 def wread(self, filename):
535 if self._link(filename):
535 if self._link(filename):
536 data = os.readlink(self.wjoin(filename))
536 data = os.readlink(self.wjoin(filename))
537 else:
537 else:
538 data = self.wopener(filename, 'r').read()
538 data = self.wopener(filename, 'r').read()
539 return self._filter("encode", filename, data)
539 return self._filter("encode", filename, data)
540
540
541 def wwrite(self, filename, data, flags):
541 def wwrite(self, filename, data, flags):
542 data = self._filter("decode", filename, data)
542 data = self._filter("decode", filename, data)
543 try:
543 try:
544 os.unlink(self.wjoin(filename))
544 os.unlink(self.wjoin(filename))
545 except OSError:
545 except OSError:
546 pass
546 pass
547 if 'l' in flags:
547 if 'l' in flags:
548 self.wopener.symlink(data, filename)
548 self.wopener.symlink(data, filename)
549 else:
549 else:
550 self.wopener(filename, 'w').write(data)
550 self.wopener(filename, 'w').write(data)
551 if 'x' in flags:
551 if 'x' in flags:
552 util.set_flags(self.wjoin(filename), False, True)
552 util.set_flags(self.wjoin(filename), False, True)
553
553
554 def wwritedata(self, filename, data):
554 def wwritedata(self, filename, data):
555 return self._filter("decode", filename, data)
555 return self._filter("decode", filename, data)
556
556
557 def transaction(self):
557 def transaction(self):
558 tr = self._transref and self._transref() or None
558 tr = self._transref and self._transref() or None
559 if tr and tr.running():
559 if tr and tr.running():
560 return tr.nest()
560 return tr.nest()
561
561
562 # abort here if the journal already exists
562 # abort here if the journal already exists
563 if os.path.exists(self.sjoin("journal")):
563 if os.path.exists(self.sjoin("journal")):
564 raise error.RepoError(
564 raise error.RepoError(
565 _("abandoned transaction found - run hg recover"))
565 _("abandoned transaction found - run hg recover"))
566
566
567 # save dirstate for rollback
567 # save dirstate for rollback
568 try:
568 try:
569 ds = self.opener("dirstate").read()
569 ds = self.opener("dirstate").read()
570 except IOError:
570 except IOError:
571 ds = ""
571 ds = ""
572 self.opener("journal.dirstate", "w").write(ds)
572 self.opener("journal.dirstate", "w").write(ds)
573 self.opener("journal.branch", "w").write(self.dirstate.branch())
573 self.opener("journal.branch", "w").write(self.dirstate.branch())
574
574
575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
577 (self.join("journal.branch"), self.join("undo.branch"))]
577 (self.join("journal.branch"), self.join("undo.branch"))]
578 tr = transaction.transaction(self.ui.warn, self.sopener,
578 tr = transaction.transaction(self.ui.warn, self.sopener,
579 self.sjoin("journal"),
579 self.sjoin("journal"),
580 aftertrans(renames),
580 aftertrans(renames),
581 self.store.createmode)
581 self.store.createmode)
582 self._transref = weakref.ref(tr)
582 self._transref = weakref.ref(tr)
583 return tr
583 return tr
584
584
585 def recover(self):
585 def recover(self):
586 lock = self.lock()
586 lock = self.lock()
587 try:
587 try:
588 if os.path.exists(self.sjoin("journal")):
588 if os.path.exists(self.sjoin("journal")):
589 self.ui.status(_("rolling back interrupted transaction\n"))
589 self.ui.status(_("rolling back interrupted transaction\n"))
590 transaction.rollback(self.sopener, self.sjoin("journal"),
590 transaction.rollback(self.sopener, self.sjoin("journal"),
591 self.ui.warn)
591 self.ui.warn)
592 self.invalidate()
592 self.invalidate()
593 return True
593 return True
594 else:
594 else:
595 self.ui.warn(_("no interrupted transaction available\n"))
595 self.ui.warn(_("no interrupted transaction available\n"))
596 return False
596 return False
597 finally:
597 finally:
598 lock.release()
598 lock.release()
599
599
600 def rollback(self):
600 def rollback(self):
601 wlock = lock = None
601 wlock = lock = None
602 try:
602 try:
603 wlock = self.wlock()
603 wlock = self.wlock()
604 lock = self.lock()
604 lock = self.lock()
605 if os.path.exists(self.sjoin("undo")):
605 if os.path.exists(self.sjoin("undo")):
606 self.ui.status(_("rolling back last transaction\n"))
606 self.ui.status(_("rolling back last transaction\n"))
607 transaction.rollback(self.sopener, self.sjoin("undo"),
607 transaction.rollback(self.sopener, self.sjoin("undo"),
608 self.ui.warn)
608 self.ui.warn)
609 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
609 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
610 try:
610 try:
611 branch = self.opener("undo.branch").read()
611 branch = self.opener("undo.branch").read()
612 self.dirstate.setbranch(branch)
612 self.dirstate.setbranch(branch)
613 except IOError:
613 except IOError:
614 self.ui.warn(_("Named branch could not be reset, "
614 self.ui.warn(_("Named branch could not be reset, "
615 "current branch still is: %s\n")
615 "current branch still is: %s\n")
616 % encoding.tolocal(self.dirstate.branch()))
616 % encoding.tolocal(self.dirstate.branch()))
617 self.invalidate()
617 self.invalidate()
618 self.dirstate.invalidate()
618 self.dirstate.invalidate()
619 self.destroyed()
619 self.destroyed()
620 else:
620 else:
621 self.ui.warn(_("no rollback information available\n"))
621 self.ui.warn(_("no rollback information available\n"))
622 finally:
622 finally:
623 release(lock, wlock)
623 release(lock, wlock)
624
624
625 def invalidatecaches(self):
625 def invalidatecaches(self):
626 self._tags = None
626 self._tags = None
627 self._tagtypes = None
627 self._tagtypes = None
628 self.nodetagscache = None
628 self.nodetagscache = None
629 self._branchcache = None # in UTF-8
629 self._branchcache = None # in UTF-8
630 self._branchcachetip = None
630 self._branchcachetip = None
631
631
632 def invalidate(self):
632 def invalidate(self):
633 for a in "changelog manifest".split():
633 for a in "changelog manifest".split():
634 if a in self.__dict__:
634 if a in self.__dict__:
635 delattr(self, a)
635 delattr(self, a)
636 self.invalidatecaches()
636 self.invalidatecaches()
637
637
638 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
639 try:
639 try:
640 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 l = lock.lock(lockname, 0, releasefn, desc=desc)
641 except error.LockHeld, inst:
641 except error.LockHeld, inst:
642 if not wait:
642 if not wait:
643 raise
643 raise
644 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 self.ui.warn(_("waiting for lock on %s held by %r\n") %
645 (desc, inst.locker))
645 (desc, inst.locker))
646 # default to 600 seconds timeout
646 # default to 600 seconds timeout
647 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
648 releasefn, desc=desc)
648 releasefn, desc=desc)
649 if acquirefn:
649 if acquirefn:
650 acquirefn()
650 acquirefn()
651 return l
651 return l
652
652
653 def lock(self, wait=True):
653 def lock(self, wait=True):
654 '''Lock the repository store (.hg/store) and return a weak reference
654 '''Lock the repository store (.hg/store) and return a weak reference
655 to the lock. Use this before modifying the store (e.g. committing or
655 to the lock. Use this before modifying the store (e.g. committing or
656 stripping). If you are opening a transaction, get a lock as well.)'''
656 stripping). If you are opening a transaction, get a lock as well.)'''
657 l = self._lockref and self._lockref()
657 l = self._lockref and self._lockref()
658 if l is not None and l.held:
658 if l is not None and l.held:
659 l.lock()
659 l.lock()
660 return l
660 return l
661
661
662 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
662 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
663 _('repository %s') % self.origroot)
663 _('repository %s') % self.origroot)
664 self._lockref = weakref.ref(l)
664 self._lockref = weakref.ref(l)
665 return l
665 return l
666
666
667 def wlock(self, wait=True):
667 def wlock(self, wait=True):
668 '''Lock the non-store parts of the repository (everything under
668 '''Lock the non-store parts of the repository (everything under
669 .hg except .hg/store) and return a weak reference to the lock.
669 .hg except .hg/store) and return a weak reference to the lock.
670 Use this before modifying files in .hg.'''
670 Use this before modifying files in .hg.'''
671 l = self._wlockref and self._wlockref()
671 l = self._wlockref and self._wlockref()
672 if l is not None and l.held:
672 if l is not None and l.held:
673 l.lock()
673 l.lock()
674 return l
674 return l
675
675
676 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
676 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
677 self.dirstate.invalidate, _('working directory of %s') %
677 self.dirstate.invalidate, _('working directory of %s') %
678 self.origroot)
678 self.origroot)
679 self._wlockref = weakref.ref(l)
679 self._wlockref = weakref.ref(l)
680 return l
680 return l
681
681
682 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
682 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
683 """
683 """
684 commit an individual file as part of a larger transaction
684 commit an individual file as part of a larger transaction
685 """
685 """
686
686
687 fname = fctx.path()
687 fname = fctx.path()
688 text = fctx.data()
688 text = fctx.data()
689 flog = self.file(fname)
689 flog = self.file(fname)
690 fparent1 = manifest1.get(fname, nullid)
690 fparent1 = manifest1.get(fname, nullid)
691 fparent2 = fparent2o = manifest2.get(fname, nullid)
691 fparent2 = fparent2o = manifest2.get(fname, nullid)
692
692
693 meta = {}
693 meta = {}
694 copy = fctx.renamed()
694 copy = fctx.renamed()
695 if copy and copy[0] != fname:
695 if copy and copy[0] != fname:
696 # Mark the new revision of this file as a copy of another
696 # Mark the new revision of this file as a copy of another
697 # file. This copy data will effectively act as a parent
697 # file. This copy data will effectively act as a parent
698 # of this new revision. If this is a merge, the first
698 # of this new revision. If this is a merge, the first
699 # parent will be the nullid (meaning "look up the copy data")
699 # parent will be the nullid (meaning "look up the copy data")
700 # and the second one will be the other parent. For example:
700 # and the second one will be the other parent. For example:
701 #
701 #
702 # 0 --- 1 --- 3 rev1 changes file foo
702 # 0 --- 1 --- 3 rev1 changes file foo
703 # \ / rev2 renames foo to bar and changes it
703 # \ / rev2 renames foo to bar and changes it
704 # \- 2 -/ rev3 should have bar with all changes and
704 # \- 2 -/ rev3 should have bar with all changes and
705 # should record that bar descends from
705 # should record that bar descends from
706 # bar in rev2 and foo in rev1
706 # bar in rev2 and foo in rev1
707 #
707 #
708 # this allows this merge to succeed:
708 # this allows this merge to succeed:
709 #
709 #
710 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
710 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
711 # \ / merging rev3 and rev4 should use bar@rev2
711 # \ / merging rev3 and rev4 should use bar@rev2
712 # \- 2 --- 4 as the merge base
712 # \- 2 --- 4 as the merge base
713 #
713 #
714
714
715 cfname = copy[0]
715 cfname = copy[0]
716 crev = manifest1.get(cfname)
716 crev = manifest1.get(cfname)
717 newfparent = fparent2
717 newfparent = fparent2
718
718
719 if manifest2: # branch merge
719 if manifest2: # branch merge
720 if fparent2 == nullid or crev is None: # copied on remote side
720 if fparent2 == nullid or crev is None: # copied on remote side
721 if cfname in manifest2:
721 if cfname in manifest2:
722 crev = manifest2[cfname]
722 crev = manifest2[cfname]
723 newfparent = fparent1
723 newfparent = fparent1
724
724
725 # find source in nearest ancestor if we've lost track
725 # find source in nearest ancestor if we've lost track
726 if not crev:
726 if not crev:
727 self.ui.debug(" %s: searching for copy revision for %s\n" %
727 self.ui.debug(" %s: searching for copy revision for %s\n" %
728 (fname, cfname))
728 (fname, cfname))
729 for ancestor in self['.'].ancestors():
729 for ancestor in self['.'].ancestors():
730 if cfname in ancestor:
730 if cfname in ancestor:
731 crev = ancestor[cfname].filenode()
731 crev = ancestor[cfname].filenode()
732 break
732 break
733
733
734 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
734 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
735 meta["copy"] = cfname
735 meta["copy"] = cfname
736 meta["copyrev"] = hex(crev)
736 meta["copyrev"] = hex(crev)
737 fparent1, fparent2 = nullid, newfparent
737 fparent1, fparent2 = nullid, newfparent
738 elif fparent2 != nullid:
738 elif fparent2 != nullid:
739 # is one parent an ancestor of the other?
739 # is one parent an ancestor of the other?
740 fparentancestor = flog.ancestor(fparent1, fparent2)
740 fparentancestor = flog.ancestor(fparent1, fparent2)
741 if fparentancestor == fparent1:
741 if fparentancestor == fparent1:
742 fparent1, fparent2 = fparent2, nullid
742 fparent1, fparent2 = fparent2, nullid
743 elif fparentancestor == fparent2:
743 elif fparentancestor == fparent2:
744 fparent2 = nullid
744 fparent2 = nullid
745
745
746 # is the file changed?
746 # is the file changed?
747 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
747 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
748 changelist.append(fname)
748 changelist.append(fname)
749 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
749 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
750
750
751 # are just the flags changed during merge?
751 # are just the flags changed during merge?
752 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
752 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
753 changelist.append(fname)
753 changelist.append(fname)
754
754
755 return fparent1
755 return fparent1
756
756
757 def commit(self, text="", user=None, date=None, match=None, force=False,
757 def commit(self, text="", user=None, date=None, match=None, force=False,
758 editor=False, extra={}):
758 editor=False, extra={}):
759 """Add a new revision to current repository.
759 """Add a new revision to current repository.
760
760
761 Revision information is gathered from the working directory,
761 Revision information is gathered from the working directory,
762 match can be used to filter the committed files. If editor is
762 match can be used to filter the committed files. If editor is
763 supplied, it is called to get a commit message.
763 supplied, it is called to get a commit message.
764 """
764 """
765
765
766 def fail(f, msg):
766 def fail(f, msg):
767 raise util.Abort('%s: %s' % (f, msg))
767 raise util.Abort('%s: %s' % (f, msg))
768
768
769 if not match:
769 if not match:
770 match = match_.always(self.root, '')
770 match = matchmod.always(self.root, '')
771
771
772 if not force:
772 if not force:
773 vdirs = []
773 vdirs = []
774 match.dir = vdirs.append
774 match.dir = vdirs.append
775 match.bad = fail
775 match.bad = fail
776
776
777 wlock = self.wlock()
777 wlock = self.wlock()
778 try:
778 try:
779 p1, p2 = self.dirstate.parents()
779 p1, p2 = self.dirstate.parents()
780 wctx = self[None]
780 wctx = self[None]
781
781
782 if (not force and p2 != nullid and match and
782 if (not force and p2 != nullid and match and
783 (match.files() or match.anypats())):
783 (match.files() or match.anypats())):
784 raise util.Abort(_('cannot partially commit a merge '
784 raise util.Abort(_('cannot partially commit a merge '
785 '(do not specify files or patterns)'))
785 '(do not specify files or patterns)'))
786
786
787 changes = self.status(match=match, clean=force)
787 changes = self.status(match=match, clean=force)
788 if force:
788 if force:
789 changes[0].extend(changes[6]) # mq may commit unchanged files
789 changes[0].extend(changes[6]) # mq may commit unchanged files
790
790
791 # check subrepos
791 # check subrepos
792 subs = []
792 subs = []
793 removedsubs = set()
793 removedsubs = set()
794 for p in wctx.parents():
794 for p in wctx.parents():
795 removedsubs.update(s for s in p.substate if match(s))
795 removedsubs.update(s for s in p.substate if match(s))
796 for s in wctx.substate:
796 for s in wctx.substate:
797 removedsubs.discard(s)
797 removedsubs.discard(s)
798 if match(s) and wctx.sub(s).dirty():
798 if match(s) and wctx.sub(s).dirty():
799 subs.append(s)
799 subs.append(s)
800 if (subs or removedsubs) and '.hgsubstate' not in changes[0]:
800 if (subs or removedsubs) and '.hgsubstate' not in changes[0]:
801 changes[0].insert(0, '.hgsubstate')
801 changes[0].insert(0, '.hgsubstate')
802
802
803 # make sure all explicit patterns are matched
803 # make sure all explicit patterns are matched
804 if not force and match.files():
804 if not force and match.files():
805 matched = set(changes[0] + changes[1] + changes[2])
805 matched = set(changes[0] + changes[1] + changes[2])
806
806
807 for f in match.files():
807 for f in match.files():
808 if f == '.' or f in matched or f in wctx.substate:
808 if f == '.' or f in matched or f in wctx.substate:
809 continue
809 continue
810 if f in changes[3]: # missing
810 if f in changes[3]: # missing
811 fail(f, _('file not found!'))
811 fail(f, _('file not found!'))
812 if f in vdirs: # visited directory
812 if f in vdirs: # visited directory
813 d = f + '/'
813 d = f + '/'
814 for mf in matched:
814 for mf in matched:
815 if mf.startswith(d):
815 if mf.startswith(d):
816 break
816 break
817 else:
817 else:
818 fail(f, _("no match under directory!"))
818 fail(f, _("no match under directory!"))
819 elif f not in self.dirstate:
819 elif f not in self.dirstate:
820 fail(f, _("file not tracked!"))
820 fail(f, _("file not tracked!"))
821
821
822 if (not force and not extra.get("close") and p2 == nullid
822 if (not force and not extra.get("close") and p2 == nullid
823 and not (changes[0] or changes[1] or changes[2])
823 and not (changes[0] or changes[1] or changes[2])
824 and self[None].branch() == self['.'].branch()):
824 and self[None].branch() == self['.'].branch()):
825 return None
825 return None
826
826
827 ms = merge_.mergestate(self)
827 ms = mergemod.mergestate(self)
828 for f in changes[0]:
828 for f in changes[0]:
829 if f in ms and ms[f] == 'u':
829 if f in ms and ms[f] == 'u':
830 raise util.Abort(_("unresolved merge conflicts "
830 raise util.Abort(_("unresolved merge conflicts "
831 "(see hg resolve)"))
831 "(see hg resolve)"))
832
832
833 cctx = context.workingctx(self, (p1, p2), text, user, date,
833 cctx = context.workingctx(self, (p1, p2), text, user, date,
834 extra, changes)
834 extra, changes)
835 if editor:
835 if editor:
836 cctx._text = editor(self, cctx, subs)
836 cctx._text = editor(self, cctx, subs)
837 edited = (text != cctx._text)
837 edited = (text != cctx._text)
838
838
839 # commit subs
839 # commit subs
840 if subs or removedsubs:
840 if subs or removedsubs:
841 state = wctx.substate.copy()
841 state = wctx.substate.copy()
842 for s in subs:
842 for s in subs:
843 self.ui.status(_('committing subrepository %s\n') % s)
843 self.ui.status(_('committing subrepository %s\n') % s)
844 sr = wctx.sub(s).commit(cctx._text, user, date)
844 sr = wctx.sub(s).commit(cctx._text, user, date)
845 state[s] = (state[s][0], sr)
845 state[s] = (state[s][0], sr)
846 subrepo.writestate(self, state)
846 subrepo.writestate(self, state)
847
847
848 # Save commit message in case this transaction gets rolled back
848 # Save commit message in case this transaction gets rolled back
849 # (e.g. by a pretxncommit hook). Leave the content alone on
849 # (e.g. by a pretxncommit hook). Leave the content alone on
850 # the assumption that the user will use the same editor again.
850 # the assumption that the user will use the same editor again.
851 msgfile = self.opener('last-message.txt', 'wb')
851 msgfile = self.opener('last-message.txt', 'wb')
852 msgfile.write(cctx._text)
852 msgfile.write(cctx._text)
853 msgfile.close()
853 msgfile.close()
854
854
855 try:
855 try:
856 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
856 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
857 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
857 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
858 ret = self.commitctx(cctx, True)
858 ret = self.commitctx(cctx, True)
859 except:
859 except:
860 if edited:
860 if edited:
861 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
861 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
862 self.ui.write(
862 self.ui.write(
863 _('note: commit message saved in %s\n') % msgfn)
863 _('note: commit message saved in %s\n') % msgfn)
864 raise
864 raise
865
865
866 # update dirstate and mergestate
866 # update dirstate and mergestate
867 for f in changes[0] + changes[1]:
867 for f in changes[0] + changes[1]:
868 self.dirstate.normal(f)
868 self.dirstate.normal(f)
869 for f in changes[2]:
869 for f in changes[2]:
870 self.dirstate.forget(f)
870 self.dirstate.forget(f)
871 self.dirstate.setparents(ret)
871 self.dirstate.setparents(ret)
872 ms.reset()
872 ms.reset()
873 finally:
873 finally:
874 wlock.release()
874 wlock.release()
875
875
876 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
876 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
877 return ret
877 return ret
878
878
879 def commitctx(self, ctx, error=False):
879 def commitctx(self, ctx, error=False):
880 """Add a new revision to current repository.
880 """Add a new revision to current repository.
881 Revision information is passed via the context argument.
881 Revision information is passed via the context argument.
882 """
882 """
883
883
884 tr = lock = None
884 tr = lock = None
885 removed = ctx.removed()
885 removed = ctx.removed()
886 p1, p2 = ctx.p1(), ctx.p2()
886 p1, p2 = ctx.p1(), ctx.p2()
887 m1 = p1.manifest().copy()
887 m1 = p1.manifest().copy()
888 m2 = p2.manifest()
888 m2 = p2.manifest()
889 user = ctx.user()
889 user = ctx.user()
890
890
891 lock = self.lock()
891 lock = self.lock()
892 try:
892 try:
893 tr = self.transaction()
893 tr = self.transaction()
894 trp = weakref.proxy(tr)
894 trp = weakref.proxy(tr)
895
895
896 # check in files
896 # check in files
897 new = {}
897 new = {}
898 changed = []
898 changed = []
899 linkrev = len(self)
899 linkrev = len(self)
900 for f in sorted(ctx.modified() + ctx.added()):
900 for f in sorted(ctx.modified() + ctx.added()):
901 self.ui.note(f + "\n")
901 self.ui.note(f + "\n")
902 try:
902 try:
903 fctx = ctx[f]
903 fctx = ctx[f]
904 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
904 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
905 changed)
905 changed)
906 m1.set(f, fctx.flags())
906 m1.set(f, fctx.flags())
907 except OSError, inst:
907 except OSError, inst:
908 self.ui.warn(_("trouble committing %s!\n") % f)
908 self.ui.warn(_("trouble committing %s!\n") % f)
909 raise
909 raise
910 except IOError, inst:
910 except IOError, inst:
911 errcode = getattr(inst, 'errno', errno.ENOENT)
911 errcode = getattr(inst, 'errno', errno.ENOENT)
912 if error or errcode and errcode != errno.ENOENT:
912 if error or errcode and errcode != errno.ENOENT:
913 self.ui.warn(_("trouble committing %s!\n") % f)
913 self.ui.warn(_("trouble committing %s!\n") % f)
914 raise
914 raise
915 else:
915 else:
916 removed.append(f)
916 removed.append(f)
917
917
918 # update manifest
918 # update manifest
919 m1.update(new)
919 m1.update(new)
920 removed = [f for f in sorted(removed) if f in m1 or f in m2]
920 removed = [f for f in sorted(removed) if f in m1 or f in m2]
921 drop = [f for f in removed if f in m1]
921 drop = [f for f in removed if f in m1]
922 for f in drop:
922 for f in drop:
923 del m1[f]
923 del m1[f]
924 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
924 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
925 p2.manifestnode(), (new, drop))
925 p2.manifestnode(), (new, drop))
926
926
927 # update changelog
927 # update changelog
928 self.changelog.delayupdate()
928 self.changelog.delayupdate()
929 n = self.changelog.add(mn, changed + removed, ctx.description(),
929 n = self.changelog.add(mn, changed + removed, ctx.description(),
930 trp, p1.node(), p2.node(),
930 trp, p1.node(), p2.node(),
931 user, ctx.date(), ctx.extra().copy())
931 user, ctx.date(), ctx.extra().copy())
932 p = lambda: self.changelog.writepending() and self.root or ""
932 p = lambda: self.changelog.writepending() and self.root or ""
933 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
933 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
934 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
934 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
935 parent2=xp2, pending=p)
935 parent2=xp2, pending=p)
936 self.changelog.finalize(trp)
936 self.changelog.finalize(trp)
937 tr.close()
937 tr.close()
938
938
939 if self._branchcache:
939 if self._branchcache:
940 self.branchtags()
940 self.branchtags()
941 return n
941 return n
942 finally:
942 finally:
943 del tr
943 del tr
944 lock.release()
944 lock.release()
945
945
946 def destroyed(self):
946 def destroyed(self):
947 '''Inform the repository that nodes have been destroyed.
947 '''Inform the repository that nodes have been destroyed.
948 Intended for use by strip and rollback, so there's a common
948 Intended for use by strip and rollback, so there's a common
949 place for anything that has to be done after destroying history.'''
949 place for anything that has to be done after destroying history.'''
950 # XXX it might be nice if we could take the list of destroyed
950 # XXX it might be nice if we could take the list of destroyed
951 # nodes, but I don't see an easy way for rollback() to do that
951 # nodes, but I don't see an easy way for rollback() to do that
952
952
953 # Ensure the persistent tag cache is updated. Doing it now
953 # Ensure the persistent tag cache is updated. Doing it now
954 # means that the tag cache only has to worry about destroyed
954 # means that the tag cache only has to worry about destroyed
955 # heads immediately after a strip/rollback. That in turn
955 # heads immediately after a strip/rollback. That in turn
956 # guarantees that "cachetip == currenttip" (comparing both rev
956 # guarantees that "cachetip == currenttip" (comparing both rev
957 # and node) always means no nodes have been added or destroyed.
957 # and node) always means no nodes have been added or destroyed.
958
958
959 # XXX this is suboptimal when qrefresh'ing: we strip the current
959 # XXX this is suboptimal when qrefresh'ing: we strip the current
960 # head, refresh the tag cache, then immediately add a new head.
960 # head, refresh the tag cache, then immediately add a new head.
961 # But I think doing it this way is necessary for the "instant
961 # But I think doing it this way is necessary for the "instant
962 # tag cache retrieval" case to work.
962 # tag cache retrieval" case to work.
963 self.invalidatecaches()
963 self.invalidatecaches()
964
964
965 def walk(self, match, node=None):
965 def walk(self, match, node=None):
966 '''
966 '''
967 walk recursively through the directory tree or a given
967 walk recursively through the directory tree or a given
968 changeset, finding all files matched by the match
968 changeset, finding all files matched by the match
969 function
969 function
970 '''
970 '''
971 return self[node].walk(match)
971 return self[node].walk(match)
972
972
973 def status(self, node1='.', node2=None, match=None,
973 def status(self, node1='.', node2=None, match=None,
974 ignored=False, clean=False, unknown=False):
974 ignored=False, clean=False, unknown=False):
975 """return status of files between two nodes or node and working directory
975 """return status of files between two nodes or node and working directory
976
976
977 If node1 is None, use the first dirstate parent instead.
977 If node1 is None, use the first dirstate parent instead.
978 If node2 is None, compare node1 with working directory.
978 If node2 is None, compare node1 with working directory.
979 """
979 """
980
980
981 def mfmatches(ctx):
981 def mfmatches(ctx):
982 mf = ctx.manifest().copy()
982 mf = ctx.manifest().copy()
983 for fn in mf.keys():
983 for fn in mf.keys():
984 if not match(fn):
984 if not match(fn):
985 del mf[fn]
985 del mf[fn]
986 return mf
986 return mf
987
987
988 if isinstance(node1, context.changectx):
988 if isinstance(node1, context.changectx):
989 ctx1 = node1
989 ctx1 = node1
990 else:
990 else:
991 ctx1 = self[node1]
991 ctx1 = self[node1]
992 if isinstance(node2, context.changectx):
992 if isinstance(node2, context.changectx):
993 ctx2 = node2
993 ctx2 = node2
994 else:
994 else:
995 ctx2 = self[node2]
995 ctx2 = self[node2]
996
996
997 working = ctx2.rev() is None
997 working = ctx2.rev() is None
998 parentworking = working and ctx1 == self['.']
998 parentworking = working and ctx1 == self['.']
999 match = match or match_.always(self.root, self.getcwd())
999 match = match or matchmod.always(self.root, self.getcwd())
1000 listignored, listclean, listunknown = ignored, clean, unknown
1000 listignored, listclean, listunknown = ignored, clean, unknown
1001
1001
1002 # load earliest manifest first for caching reasons
1002 # load earliest manifest first for caching reasons
1003 if not working and ctx2.rev() < ctx1.rev():
1003 if not working and ctx2.rev() < ctx1.rev():
1004 ctx2.manifest()
1004 ctx2.manifest()
1005
1005
1006 if not parentworking:
1006 if not parentworking:
1007 def bad(f, msg):
1007 def bad(f, msg):
1008 if f not in ctx1:
1008 if f not in ctx1:
1009 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1009 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1010 match.bad = bad
1010 match.bad = bad
1011
1011
1012 if working: # we need to scan the working dir
1012 if working: # we need to scan the working dir
1013 subrepos = ctx1.substate.keys()
1013 subrepos = ctx1.substate.keys()
1014 s = self.dirstate.status(match, subrepos, listignored,
1014 s = self.dirstate.status(match, subrepos, listignored,
1015 listclean, listunknown)
1015 listclean, listunknown)
1016 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1016 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1017
1017
1018 # check for any possibly clean files
1018 # check for any possibly clean files
1019 if parentworking and cmp:
1019 if parentworking and cmp:
1020 fixup = []
1020 fixup = []
1021 # do a full compare of any files that might have changed
1021 # do a full compare of any files that might have changed
1022 for f in sorted(cmp):
1022 for f in sorted(cmp):
1023 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1023 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1024 or ctx1[f].cmp(ctx2[f].data())):
1024 or ctx1[f].cmp(ctx2[f].data())):
1025 modified.append(f)
1025 modified.append(f)
1026 else:
1026 else:
1027 fixup.append(f)
1027 fixup.append(f)
1028
1028
1029 if listclean:
1029 if listclean:
1030 clean += fixup
1030 clean += fixup
1031
1031
1032 # update dirstate for files that are actually clean
1032 # update dirstate for files that are actually clean
1033 if fixup:
1033 if fixup:
1034 try:
1034 try:
1035 # updating the dirstate is optional
1035 # updating the dirstate is optional
1036 # so we don't wait on the lock
1036 # so we don't wait on the lock
1037 wlock = self.wlock(False)
1037 wlock = self.wlock(False)
1038 try:
1038 try:
1039 for f in fixup:
1039 for f in fixup:
1040 self.dirstate.normal(f)
1040 self.dirstate.normal(f)
1041 finally:
1041 finally:
1042 wlock.release()
1042 wlock.release()
1043 except error.LockError:
1043 except error.LockError:
1044 pass
1044 pass
1045
1045
1046 if not parentworking:
1046 if not parentworking:
1047 mf1 = mfmatches(ctx1)
1047 mf1 = mfmatches(ctx1)
1048 if working:
1048 if working:
1049 # we are comparing working dir against non-parent
1049 # we are comparing working dir against non-parent
1050 # generate a pseudo-manifest for the working dir
1050 # generate a pseudo-manifest for the working dir
1051 mf2 = mfmatches(self['.'])
1051 mf2 = mfmatches(self['.'])
1052 for f in cmp + modified + added:
1052 for f in cmp + modified + added:
1053 mf2[f] = None
1053 mf2[f] = None
1054 mf2.set(f, ctx2.flags(f))
1054 mf2.set(f, ctx2.flags(f))
1055 for f in removed:
1055 for f in removed:
1056 if f in mf2:
1056 if f in mf2:
1057 del mf2[f]
1057 del mf2[f]
1058 else:
1058 else:
1059 # we are comparing two revisions
1059 # we are comparing two revisions
1060 deleted, unknown, ignored = [], [], []
1060 deleted, unknown, ignored = [], [], []
1061 mf2 = mfmatches(ctx2)
1061 mf2 = mfmatches(ctx2)
1062
1062
1063 modified, added, clean = [], [], []
1063 modified, added, clean = [], [], []
1064 for fn in mf2:
1064 for fn in mf2:
1065 if fn in mf1:
1065 if fn in mf1:
1066 if (mf1.flags(fn) != mf2.flags(fn) or
1066 if (mf1.flags(fn) != mf2.flags(fn) or
1067 (mf1[fn] != mf2[fn] and
1067 (mf1[fn] != mf2[fn] and
1068 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1068 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1069 modified.append(fn)
1069 modified.append(fn)
1070 elif listclean:
1070 elif listclean:
1071 clean.append(fn)
1071 clean.append(fn)
1072 del mf1[fn]
1072 del mf1[fn]
1073 else:
1073 else:
1074 added.append(fn)
1074 added.append(fn)
1075 removed = mf1.keys()
1075 removed = mf1.keys()
1076
1076
1077 r = modified, added, removed, deleted, unknown, ignored, clean
1077 r = modified, added, removed, deleted, unknown, ignored, clean
1078 [l.sort() for l in r]
1078 [l.sort() for l in r]
1079 return r
1079 return r
1080
1080
1081 def add(self, list):
1081 def add(self, list):
1082 wlock = self.wlock()
1082 wlock = self.wlock()
1083 try:
1083 try:
1084 rejected = []
1084 rejected = []
1085 for f in list:
1085 for f in list:
1086 p = self.wjoin(f)
1086 p = self.wjoin(f)
1087 try:
1087 try:
1088 st = os.lstat(p)
1088 st = os.lstat(p)
1089 except:
1089 except:
1090 self.ui.warn(_("%s does not exist!\n") % f)
1090 self.ui.warn(_("%s does not exist!\n") % f)
1091 rejected.append(f)
1091 rejected.append(f)
1092 continue
1092 continue
1093 if st.st_size > 10000000:
1093 if st.st_size > 10000000:
1094 self.ui.warn(_("%s: files over 10MB may cause memory and"
1094 self.ui.warn(_("%s: files over 10MB may cause memory and"
1095 " performance problems\n"
1095 " performance problems\n"
1096 "(use 'hg revert %s' to unadd the file)\n")
1096 "(use 'hg revert %s' to unadd the file)\n")
1097 % (f, f))
1097 % (f, f))
1098 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1098 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1099 self.ui.warn(_("%s not added: only files and symlinks "
1099 self.ui.warn(_("%s not added: only files and symlinks "
1100 "supported currently\n") % f)
1100 "supported currently\n") % f)
1101 rejected.append(p)
1101 rejected.append(p)
1102 elif self.dirstate[f] in 'amn':
1102 elif self.dirstate[f] in 'amn':
1103 self.ui.warn(_("%s already tracked!\n") % f)
1103 self.ui.warn(_("%s already tracked!\n") % f)
1104 elif self.dirstate[f] == 'r':
1104 elif self.dirstate[f] == 'r':
1105 self.dirstate.normallookup(f)
1105 self.dirstate.normallookup(f)
1106 else:
1106 else:
1107 self.dirstate.add(f)
1107 self.dirstate.add(f)
1108 return rejected
1108 return rejected
1109 finally:
1109 finally:
1110 wlock.release()
1110 wlock.release()
1111
1111
1112 def forget(self, list):
1112 def forget(self, list):
1113 wlock = self.wlock()
1113 wlock = self.wlock()
1114 try:
1114 try:
1115 for f in list:
1115 for f in list:
1116 if self.dirstate[f] != 'a':
1116 if self.dirstate[f] != 'a':
1117 self.ui.warn(_("%s not added!\n") % f)
1117 self.ui.warn(_("%s not added!\n") % f)
1118 else:
1118 else:
1119 self.dirstate.forget(f)
1119 self.dirstate.forget(f)
1120 finally:
1120 finally:
1121 wlock.release()
1121 wlock.release()
1122
1122
1123 def remove(self, list, unlink=False):
1123 def remove(self, list, unlink=False):
1124 if unlink:
1124 if unlink:
1125 for f in list:
1125 for f in list:
1126 try:
1126 try:
1127 util.unlink(self.wjoin(f))
1127 util.unlink(self.wjoin(f))
1128 except OSError, inst:
1128 except OSError, inst:
1129 if inst.errno != errno.ENOENT:
1129 if inst.errno != errno.ENOENT:
1130 raise
1130 raise
1131 wlock = self.wlock()
1131 wlock = self.wlock()
1132 try:
1132 try:
1133 for f in list:
1133 for f in list:
1134 if unlink and os.path.exists(self.wjoin(f)):
1134 if unlink and os.path.exists(self.wjoin(f)):
1135 self.ui.warn(_("%s still exists!\n") % f)
1135 self.ui.warn(_("%s still exists!\n") % f)
1136 elif self.dirstate[f] == 'a':
1136 elif self.dirstate[f] == 'a':
1137 self.dirstate.forget(f)
1137 self.dirstate.forget(f)
1138 elif f not in self.dirstate:
1138 elif f not in self.dirstate:
1139 self.ui.warn(_("%s not tracked!\n") % f)
1139 self.ui.warn(_("%s not tracked!\n") % f)
1140 else:
1140 else:
1141 self.dirstate.remove(f)
1141 self.dirstate.remove(f)
1142 finally:
1142 finally:
1143 wlock.release()
1143 wlock.release()
1144
1144
1145 def undelete(self, list):
1145 def undelete(self, list):
1146 manifests = [self.manifest.read(self.changelog.read(p)[0])
1146 manifests = [self.manifest.read(self.changelog.read(p)[0])
1147 for p in self.dirstate.parents() if p != nullid]
1147 for p in self.dirstate.parents() if p != nullid]
1148 wlock = self.wlock()
1148 wlock = self.wlock()
1149 try:
1149 try:
1150 for f in list:
1150 for f in list:
1151 if self.dirstate[f] != 'r':
1151 if self.dirstate[f] != 'r':
1152 self.ui.warn(_("%s not removed!\n") % f)
1152 self.ui.warn(_("%s not removed!\n") % f)
1153 else:
1153 else:
1154 m = f in manifests[0] and manifests[0] or manifests[1]
1154 m = f in manifests[0] and manifests[0] or manifests[1]
1155 t = self.file(f).read(m[f])
1155 t = self.file(f).read(m[f])
1156 self.wwrite(f, t, m.flags(f))
1156 self.wwrite(f, t, m.flags(f))
1157 self.dirstate.normal(f)
1157 self.dirstate.normal(f)
1158 finally:
1158 finally:
1159 wlock.release()
1159 wlock.release()
1160
1160
1161 def copy(self, source, dest):
1161 def copy(self, source, dest):
1162 p = self.wjoin(dest)
1162 p = self.wjoin(dest)
1163 if not (os.path.exists(p) or os.path.islink(p)):
1163 if not (os.path.exists(p) or os.path.islink(p)):
1164 self.ui.warn(_("%s does not exist!\n") % dest)
1164 self.ui.warn(_("%s does not exist!\n") % dest)
1165 elif not (os.path.isfile(p) or os.path.islink(p)):
1165 elif not (os.path.isfile(p) or os.path.islink(p)):
1166 self.ui.warn(_("copy failed: %s is not a file or a "
1166 self.ui.warn(_("copy failed: %s is not a file or a "
1167 "symbolic link\n") % dest)
1167 "symbolic link\n") % dest)
1168 else:
1168 else:
1169 wlock = self.wlock()
1169 wlock = self.wlock()
1170 try:
1170 try:
1171 if self.dirstate[dest] in '?r':
1171 if self.dirstate[dest] in '?r':
1172 self.dirstate.add(dest)
1172 self.dirstate.add(dest)
1173 self.dirstate.copy(source, dest)
1173 self.dirstate.copy(source, dest)
1174 finally:
1174 finally:
1175 wlock.release()
1175 wlock.release()
1176
1176
1177 def heads(self, start=None):
1177 def heads(self, start=None):
1178 heads = self.changelog.heads(start)
1178 heads = self.changelog.heads(start)
1179 # sort the output in rev descending order
1179 # sort the output in rev descending order
1180 heads = [(-self.changelog.rev(h), h) for h in heads]
1180 heads = [(-self.changelog.rev(h), h) for h in heads]
1181 return [n for (r, n) in sorted(heads)]
1181 return [n for (r, n) in sorted(heads)]
1182
1182
1183 def branchheads(self, branch=None, start=None, closed=False):
1183 def branchheads(self, branch=None, start=None, closed=False):
1184 '''return a (possibly filtered) list of heads for the given branch
1184 '''return a (possibly filtered) list of heads for the given branch
1185
1185
1186 Heads are returned in topological order, from newest to oldest.
1186 Heads are returned in topological order, from newest to oldest.
1187 If branch is None, use the dirstate branch.
1187 If branch is None, use the dirstate branch.
1188 If start is not None, return only heads reachable from start.
1188 If start is not None, return only heads reachable from start.
1189 If closed is True, return heads that are marked as closed as well.
1189 If closed is True, return heads that are marked as closed as well.
1190 '''
1190 '''
1191 if branch is None:
1191 if branch is None:
1192 branch = self[None].branch()
1192 branch = self[None].branch()
1193 branches = self.branchmap()
1193 branches = self.branchmap()
1194 if branch not in branches:
1194 if branch not in branches:
1195 return []
1195 return []
1196 # the cache returns heads ordered lowest to highest
1196 # the cache returns heads ordered lowest to highest
1197 bheads = list(reversed(branches[branch]))
1197 bheads = list(reversed(branches[branch]))
1198 if start is not None:
1198 if start is not None:
1199 # filter out the heads that cannot be reached from startrev
1199 # filter out the heads that cannot be reached from startrev
1200 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1200 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1201 bheads = [h for h in bheads if h in fbheads]
1201 bheads = [h for h in bheads if h in fbheads]
1202 if not closed:
1202 if not closed:
1203 bheads = [h for h in bheads if
1203 bheads = [h for h in bheads if
1204 ('close' not in self.changelog.read(h)[5])]
1204 ('close' not in self.changelog.read(h)[5])]
1205 return bheads
1205 return bheads
1206
1206
1207 def branches(self, nodes):
1207 def branches(self, nodes):
1208 if not nodes:
1208 if not nodes:
1209 nodes = [self.changelog.tip()]
1209 nodes = [self.changelog.tip()]
1210 b = []
1210 b = []
1211 for n in nodes:
1211 for n in nodes:
1212 t = n
1212 t = n
1213 while 1:
1213 while 1:
1214 p = self.changelog.parents(n)
1214 p = self.changelog.parents(n)
1215 if p[1] != nullid or p[0] == nullid:
1215 if p[1] != nullid or p[0] == nullid:
1216 b.append((t, n, p[0], p[1]))
1216 b.append((t, n, p[0], p[1]))
1217 break
1217 break
1218 n = p[0]
1218 n = p[0]
1219 return b
1219 return b
1220
1220
1221 def between(self, pairs):
1221 def between(self, pairs):
1222 r = []
1222 r = []
1223
1223
1224 for top, bottom in pairs:
1224 for top, bottom in pairs:
1225 n, l, i = top, [], 0
1225 n, l, i = top, [], 0
1226 f = 1
1226 f = 1
1227
1227
1228 while n != bottom and n != nullid:
1228 while n != bottom and n != nullid:
1229 p = self.changelog.parents(n)[0]
1229 p = self.changelog.parents(n)[0]
1230 if i == f:
1230 if i == f:
1231 l.append(n)
1231 l.append(n)
1232 f = f * 2
1232 f = f * 2
1233 n = p
1233 n = p
1234 i += 1
1234 i += 1
1235
1235
1236 r.append(l)
1236 r.append(l)
1237
1237
1238 return r
1238 return r
1239
1239
1240 def findincoming(self, remote, base=None, heads=None, force=False):
1240 def findincoming(self, remote, base=None, heads=None, force=False):
1241 """Return list of roots of the subsets of missing nodes from remote
1241 """Return list of roots of the subsets of missing nodes from remote
1242
1242
1243 If base dict is specified, assume that these nodes and their parents
1243 If base dict is specified, assume that these nodes and their parents
1244 exist on the remote side and that no child of a node of base exists
1244 exist on the remote side and that no child of a node of base exists
1245 in both remote and self.
1245 in both remote and self.
1246 Furthermore base will be updated to include the nodes that exists
1246 Furthermore base will be updated to include the nodes that exists
1247 in self and remote but no children exists in self and remote.
1247 in self and remote but no children exists in self and remote.
1248 If a list of heads is specified, return only nodes which are heads
1248 If a list of heads is specified, return only nodes which are heads
1249 or ancestors of these heads.
1249 or ancestors of these heads.
1250
1250
1251 All the ancestors of base are in self and in remote.
1251 All the ancestors of base are in self and in remote.
1252 All the descendants of the list returned are missing in self.
1252 All the descendants of the list returned are missing in self.
1253 (and so we know that the rest of the nodes are missing in remote, see
1253 (and so we know that the rest of the nodes are missing in remote, see
1254 outgoing)
1254 outgoing)
1255 """
1255 """
1256 return self.findcommonincoming(remote, base, heads, force)[1]
1256 return self.findcommonincoming(remote, base, heads, force)[1]
1257
1257
1258 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1258 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1259 """Return a tuple (common, missing roots, heads) used to identify
1259 """Return a tuple (common, missing roots, heads) used to identify
1260 missing nodes from remote.
1260 missing nodes from remote.
1261
1261
1262 If base dict is specified, assume that these nodes and their parents
1262 If base dict is specified, assume that these nodes and their parents
1263 exist on the remote side and that no child of a node of base exists
1263 exist on the remote side and that no child of a node of base exists
1264 in both remote and self.
1264 in both remote and self.
1265 Furthermore base will be updated to include the nodes that exists
1265 Furthermore base will be updated to include the nodes that exists
1266 in self and remote but no children exists in self and remote.
1266 in self and remote but no children exists in self and remote.
1267 If a list of heads is specified, return only nodes which are heads
1267 If a list of heads is specified, return only nodes which are heads
1268 or ancestors of these heads.
1268 or ancestors of these heads.
1269
1269
1270 All the ancestors of base are in self and in remote.
1270 All the ancestors of base are in self and in remote.
1271 """
1271 """
1272 m = self.changelog.nodemap
1272 m = self.changelog.nodemap
1273 search = []
1273 search = []
1274 fetch = set()
1274 fetch = set()
1275 seen = set()
1275 seen = set()
1276 seenbranch = set()
1276 seenbranch = set()
1277 if base is None:
1277 if base is None:
1278 base = {}
1278 base = {}
1279
1279
1280 if not heads:
1280 if not heads:
1281 heads = remote.heads()
1281 heads = remote.heads()
1282
1282
1283 if self.changelog.tip() == nullid:
1283 if self.changelog.tip() == nullid:
1284 base[nullid] = 1
1284 base[nullid] = 1
1285 if heads != [nullid]:
1285 if heads != [nullid]:
1286 return [nullid], [nullid], list(heads)
1286 return [nullid], [nullid], list(heads)
1287 return [nullid], [], []
1287 return [nullid], [], []
1288
1288
1289 # assume we're closer to the tip than the root
1289 # assume we're closer to the tip than the root
1290 # and start by examining the heads
1290 # and start by examining the heads
1291 self.ui.status(_("searching for changes\n"))
1291 self.ui.status(_("searching for changes\n"))
1292
1292
1293 unknown = []
1293 unknown = []
1294 for h in heads:
1294 for h in heads:
1295 if h not in m:
1295 if h not in m:
1296 unknown.append(h)
1296 unknown.append(h)
1297 else:
1297 else:
1298 base[h] = 1
1298 base[h] = 1
1299
1299
1300 heads = unknown
1300 heads = unknown
1301 if not unknown:
1301 if not unknown:
1302 return base.keys(), [], []
1302 return base.keys(), [], []
1303
1303
1304 req = set(unknown)
1304 req = set(unknown)
1305 reqcnt = 0
1305 reqcnt = 0
1306
1306
1307 # search through remote branches
1307 # search through remote branches
1308 # a 'branch' here is a linear segment of history, with four parts:
1308 # a 'branch' here is a linear segment of history, with four parts:
1309 # head, root, first parent, second parent
1309 # head, root, first parent, second parent
1310 # (a branch always has two parents (or none) by definition)
1310 # (a branch always has two parents (or none) by definition)
1311 unknown = remote.branches(unknown)
1311 unknown = remote.branches(unknown)
1312 while unknown:
1312 while unknown:
1313 r = []
1313 r = []
1314 while unknown:
1314 while unknown:
1315 n = unknown.pop(0)
1315 n = unknown.pop(0)
1316 if n[0] in seen:
1316 if n[0] in seen:
1317 continue
1317 continue
1318
1318
1319 self.ui.debug("examining %s:%s\n"
1319 self.ui.debug("examining %s:%s\n"
1320 % (short(n[0]), short(n[1])))
1320 % (short(n[0]), short(n[1])))
1321 if n[0] == nullid: # found the end of the branch
1321 if n[0] == nullid: # found the end of the branch
1322 pass
1322 pass
1323 elif n in seenbranch:
1323 elif n in seenbranch:
1324 self.ui.debug("branch already found\n")
1324 self.ui.debug("branch already found\n")
1325 continue
1325 continue
1326 elif n[1] and n[1] in m: # do we know the base?
1326 elif n[1] and n[1] in m: # do we know the base?
1327 self.ui.debug("found incomplete branch %s:%s\n"
1327 self.ui.debug("found incomplete branch %s:%s\n"
1328 % (short(n[0]), short(n[1])))
1328 % (short(n[0]), short(n[1])))
1329 search.append(n[0:2]) # schedule branch range for scanning
1329 search.append(n[0:2]) # schedule branch range for scanning
1330 seenbranch.add(n)
1330 seenbranch.add(n)
1331 else:
1331 else:
1332 if n[1] not in seen and n[1] not in fetch:
1332 if n[1] not in seen and n[1] not in fetch:
1333 if n[2] in m and n[3] in m:
1333 if n[2] in m and n[3] in m:
1334 self.ui.debug("found new changeset %s\n" %
1334 self.ui.debug("found new changeset %s\n" %
1335 short(n[1]))
1335 short(n[1]))
1336 fetch.add(n[1]) # earliest unknown
1336 fetch.add(n[1]) # earliest unknown
1337 for p in n[2:4]:
1337 for p in n[2:4]:
1338 if p in m:
1338 if p in m:
1339 base[p] = 1 # latest known
1339 base[p] = 1 # latest known
1340
1340
1341 for p in n[2:4]:
1341 for p in n[2:4]:
1342 if p not in req and p not in m:
1342 if p not in req and p not in m:
1343 r.append(p)
1343 r.append(p)
1344 req.add(p)
1344 req.add(p)
1345 seen.add(n[0])
1345 seen.add(n[0])
1346
1346
1347 if r:
1347 if r:
1348 reqcnt += 1
1348 reqcnt += 1
1349 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1349 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1350 self.ui.debug("request %d: %s\n" %
1350 self.ui.debug("request %d: %s\n" %
1351 (reqcnt, " ".join(map(short, r))))
1351 (reqcnt, " ".join(map(short, r))))
1352 for p in xrange(0, len(r), 10):
1352 for p in xrange(0, len(r), 10):
1353 for b in remote.branches(r[p:p + 10]):
1353 for b in remote.branches(r[p:p + 10]):
1354 self.ui.debug("received %s:%s\n" %
1354 self.ui.debug("received %s:%s\n" %
1355 (short(b[0]), short(b[1])))
1355 (short(b[0]), short(b[1])))
1356 unknown.append(b)
1356 unknown.append(b)
1357
1357
1358 # do binary search on the branches we found
1358 # do binary search on the branches we found
1359 while search:
1359 while search:
1360 newsearch = []
1360 newsearch = []
1361 reqcnt += 1
1361 reqcnt += 1
1362 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1362 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1363 for n, l in zip(search, remote.between(search)):
1363 for n, l in zip(search, remote.between(search)):
1364 l.append(n[1])
1364 l.append(n[1])
1365 p = n[0]
1365 p = n[0]
1366 f = 1
1366 f = 1
1367 for i in l:
1367 for i in l:
1368 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1368 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1369 if i in m:
1369 if i in m:
1370 if f <= 2:
1370 if f <= 2:
1371 self.ui.debug("found new branch changeset %s\n" %
1371 self.ui.debug("found new branch changeset %s\n" %
1372 short(p))
1372 short(p))
1373 fetch.add(p)
1373 fetch.add(p)
1374 base[i] = 1
1374 base[i] = 1
1375 else:
1375 else:
1376 self.ui.debug("narrowed branch search to %s:%s\n"
1376 self.ui.debug("narrowed branch search to %s:%s\n"
1377 % (short(p), short(i)))
1377 % (short(p), short(i)))
1378 newsearch.append((p, i))
1378 newsearch.append((p, i))
1379 break
1379 break
1380 p, f = i, f * 2
1380 p, f = i, f * 2
1381 search = newsearch
1381 search = newsearch
1382
1382
1383 # sanity check our fetch list
1383 # sanity check our fetch list
1384 for f in fetch:
1384 for f in fetch:
1385 if f in m:
1385 if f in m:
1386 raise error.RepoError(_("already have changeset ")
1386 raise error.RepoError(_("already have changeset ")
1387 + short(f[:4]))
1387 + short(f[:4]))
1388
1388
1389 if base.keys() == [nullid]:
1389 if base.keys() == [nullid]:
1390 if force:
1390 if force:
1391 self.ui.warn(_("warning: repository is unrelated\n"))
1391 self.ui.warn(_("warning: repository is unrelated\n"))
1392 else:
1392 else:
1393 raise util.Abort(_("repository is unrelated"))
1393 raise util.Abort(_("repository is unrelated"))
1394
1394
1395 self.ui.debug("found new changesets starting at " +
1395 self.ui.debug("found new changesets starting at " +
1396 " ".join([short(f) for f in fetch]) + "\n")
1396 " ".join([short(f) for f in fetch]) + "\n")
1397
1397
1398 self.ui.progress(_('searching'), None, unit=_('queries'))
1398 self.ui.progress(_('searching'), None, unit=_('queries'))
1399 self.ui.debug("%d total queries\n" % reqcnt)
1399 self.ui.debug("%d total queries\n" % reqcnt)
1400
1400
1401 return base.keys(), list(fetch), heads
1401 return base.keys(), list(fetch), heads
1402
1402
1403 def findoutgoing(self, remote, base=None, heads=None, force=False):
1403 def findoutgoing(self, remote, base=None, heads=None, force=False):
1404 """Return list of nodes that are roots of subsets not in remote
1404 """Return list of nodes that are roots of subsets not in remote
1405
1405
1406 If base dict is specified, assume that these nodes and their parents
1406 If base dict is specified, assume that these nodes and their parents
1407 exist on the remote side.
1407 exist on the remote side.
1408 If a list of heads is specified, return only nodes which are heads
1408 If a list of heads is specified, return only nodes which are heads
1409 or ancestors of these heads, and return a second element which
1409 or ancestors of these heads, and return a second element which
1410 contains all remote heads which get new children.
1410 contains all remote heads which get new children.
1411 """
1411 """
1412 if base is None:
1412 if base is None:
1413 base = {}
1413 base = {}
1414 self.findincoming(remote, base, heads, force=force)
1414 self.findincoming(remote, base, heads, force=force)
1415
1415
1416 self.ui.debug("common changesets up to "
1416 self.ui.debug("common changesets up to "
1417 + " ".join(map(short, base.keys())) + "\n")
1417 + " ".join(map(short, base.keys())) + "\n")
1418
1418
1419 remain = set(self.changelog.nodemap)
1419 remain = set(self.changelog.nodemap)
1420
1420
1421 # prune everything remote has from the tree
1421 # prune everything remote has from the tree
1422 remain.remove(nullid)
1422 remain.remove(nullid)
1423 remove = base.keys()
1423 remove = base.keys()
1424 while remove:
1424 while remove:
1425 n = remove.pop(0)
1425 n = remove.pop(0)
1426 if n in remain:
1426 if n in remain:
1427 remain.remove(n)
1427 remain.remove(n)
1428 for p in self.changelog.parents(n):
1428 for p in self.changelog.parents(n):
1429 remove.append(p)
1429 remove.append(p)
1430
1430
1431 # find every node whose parents have been pruned
1431 # find every node whose parents have been pruned
1432 subset = []
1432 subset = []
1433 # find every remote head that will get new children
1433 # find every remote head that will get new children
1434 updated_heads = set()
1434 updated_heads = set()
1435 for n in remain:
1435 for n in remain:
1436 p1, p2 = self.changelog.parents(n)
1436 p1, p2 = self.changelog.parents(n)
1437 if p1 not in remain and p2 not in remain:
1437 if p1 not in remain and p2 not in remain:
1438 subset.append(n)
1438 subset.append(n)
1439 if heads:
1439 if heads:
1440 if p1 in heads:
1440 if p1 in heads:
1441 updated_heads.add(p1)
1441 updated_heads.add(p1)
1442 if p2 in heads:
1442 if p2 in heads:
1443 updated_heads.add(p2)
1443 updated_heads.add(p2)
1444
1444
1445 # this is the set of all roots we have to push
1445 # this is the set of all roots we have to push
1446 if heads:
1446 if heads:
1447 return subset, list(updated_heads)
1447 return subset, list(updated_heads)
1448 else:
1448 else:
1449 return subset
1449 return subset
1450
1450
1451 def pull(self, remote, heads=None, force=False):
1451 def pull(self, remote, heads=None, force=False):
1452 lock = self.lock()
1452 lock = self.lock()
1453 try:
1453 try:
1454 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1454 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1455 force=force)
1455 force=force)
1456 if fetch == [nullid]:
1456 if fetch == [nullid]:
1457 self.ui.status(_("requesting all changes\n"))
1457 self.ui.status(_("requesting all changes\n"))
1458
1458
1459 if not fetch:
1459 if not fetch:
1460 self.ui.status(_("no changes found\n"))
1460 self.ui.status(_("no changes found\n"))
1461 return 0
1461 return 0
1462
1462
1463 if heads is None and remote.capable('changegroupsubset'):
1463 if heads is None and remote.capable('changegroupsubset'):
1464 heads = rheads
1464 heads = rheads
1465
1465
1466 if heads is None:
1466 if heads is None:
1467 cg = remote.changegroup(fetch, 'pull')
1467 cg = remote.changegroup(fetch, 'pull')
1468 else:
1468 else:
1469 if not remote.capable('changegroupsubset'):
1469 if not remote.capable('changegroupsubset'):
1470 raise util.Abort(_("Partial pull cannot be done because "
1470 raise util.Abort(_("Partial pull cannot be done because "
1471 "other repository doesn't support "
1471 "other repository doesn't support "
1472 "changegroupsubset."))
1472 "changegroupsubset."))
1473 cg = remote.changegroupsubset(fetch, heads, 'pull')
1473 cg = remote.changegroupsubset(fetch, heads, 'pull')
1474 return self.addchangegroup(cg, 'pull', remote.url())
1474 return self.addchangegroup(cg, 'pull', remote.url())
1475 finally:
1475 finally:
1476 lock.release()
1476 lock.release()
1477
1477
1478 def push(self, remote, force=False, revs=None):
1478 def push(self, remote, force=False, revs=None):
1479 # there are two ways to push to remote repo:
1479 # there are two ways to push to remote repo:
1480 #
1480 #
1481 # addchangegroup assumes local user can lock remote
1481 # addchangegroup assumes local user can lock remote
1482 # repo (local filesystem, old ssh servers).
1482 # repo (local filesystem, old ssh servers).
1483 #
1483 #
1484 # unbundle assumes local user cannot lock remote repo (new ssh
1484 # unbundle assumes local user cannot lock remote repo (new ssh
1485 # servers, http servers).
1485 # servers, http servers).
1486
1486
1487 if remote.capable('unbundle'):
1487 if remote.capable('unbundle'):
1488 return self.push_unbundle(remote, force, revs)
1488 return self.push_unbundle(remote, force, revs)
1489 return self.push_addchangegroup(remote, force, revs)
1489 return self.push_addchangegroup(remote, force, revs)
1490
1490
1491 def prepush(self, remote, force, revs):
1491 def prepush(self, remote, force, revs):
1492 '''Analyze the local and remote repositories and determine which
1492 '''Analyze the local and remote repositories and determine which
1493 changesets need to be pushed to the remote. Return a tuple
1493 changesets need to be pushed to the remote. Return a tuple
1494 (changegroup, remoteheads). changegroup is a readable file-like
1494 (changegroup, remoteheads). changegroup is a readable file-like
1495 object whose read() returns successive changegroup chunks ready to
1495 object whose read() returns successive changegroup chunks ready to
1496 be sent over the wire. remoteheads is the list of remote heads.
1496 be sent over the wire. remoteheads is the list of remote heads.
1497 '''
1497 '''
1498 common = {}
1498 common = {}
1499 remote_heads = remote.heads()
1499 remote_heads = remote.heads()
1500 inc = self.findincoming(remote, common, remote_heads, force=force)
1500 inc = self.findincoming(remote, common, remote_heads, force=force)
1501
1501
1502 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1502 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1503 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1503 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1504
1504
1505 def checkbranch(lheads, rheads, updatelb, branchname=None):
1505 def checkbranch(lheads, rheads, updatelb, branchname=None):
1506 '''
1506 '''
1507 check whether there are more local heads than remote heads on
1507 check whether there are more local heads than remote heads on
1508 a specific branch.
1508 a specific branch.
1509
1509
1510 lheads: local branch heads
1510 lheads: local branch heads
1511 rheads: remote branch heads
1511 rheads: remote branch heads
1512 updatelb: outgoing local branch bases
1512 updatelb: outgoing local branch bases
1513 '''
1513 '''
1514
1514
1515 warn = 0
1515 warn = 0
1516
1516
1517 if not revs and len(lheads) > len(rheads):
1517 if not revs and len(lheads) > len(rheads):
1518 warn = 1
1518 warn = 1
1519 else:
1519 else:
1520 # add local heads involved in the push
1520 # add local heads involved in the push
1521 updatelheads = [self.changelog.heads(x, lheads)
1521 updatelheads = [self.changelog.heads(x, lheads)
1522 for x in updatelb]
1522 for x in updatelb]
1523 newheads = set(sum(updatelheads, [])) & set(lheads)
1523 newheads = set(sum(updatelheads, [])) & set(lheads)
1524
1524
1525 if not newheads:
1525 if not newheads:
1526 return True
1526 return True
1527
1527
1528 # add heads we don't have or that are not involved in the push
1528 # add heads we don't have or that are not involved in the push
1529 for r in rheads:
1529 for r in rheads:
1530 if r in self.changelog.nodemap:
1530 if r in self.changelog.nodemap:
1531 desc = self.changelog.heads(r, heads)
1531 desc = self.changelog.heads(r, heads)
1532 l = [h for h in heads if h in desc]
1532 l = [h for h in heads if h in desc]
1533 if not l:
1533 if not l:
1534 newheads.add(r)
1534 newheads.add(r)
1535 else:
1535 else:
1536 newheads.add(r)
1536 newheads.add(r)
1537 if len(newheads) > len(rheads):
1537 if len(newheads) > len(rheads):
1538 warn = 1
1538 warn = 1
1539
1539
1540 if warn:
1540 if warn:
1541 if branchname is not None:
1541 if branchname is not None:
1542 msg = _("abort: push creates new remote heads"
1542 msg = _("abort: push creates new remote heads"
1543 " on branch '%s'!\n") % branchname
1543 " on branch '%s'!\n") % branchname
1544 else:
1544 else:
1545 msg = _("abort: push creates new remote heads!\n")
1545 msg = _("abort: push creates new remote heads!\n")
1546 self.ui.warn(msg)
1546 self.ui.warn(msg)
1547 if len(lheads) > len(rheads):
1547 if len(lheads) > len(rheads):
1548 self.ui.status(_("(did you forget to merge?"
1548 self.ui.status(_("(did you forget to merge?"
1549 " use push -f to force)\n"))
1549 " use push -f to force)\n"))
1550 else:
1550 else:
1551 self.ui.status(_("(you should pull and merge or"
1551 self.ui.status(_("(you should pull and merge or"
1552 " use push -f to force)\n"))
1552 " use push -f to force)\n"))
1553 return False
1553 return False
1554 return True
1554 return True
1555
1555
1556 if not bases:
1556 if not bases:
1557 self.ui.status(_("no changes found\n"))
1557 self.ui.status(_("no changes found\n"))
1558 return None, 1
1558 return None, 1
1559 elif not force:
1559 elif not force:
1560 # Check for each named branch if we're creating new remote heads.
1560 # Check for each named branch if we're creating new remote heads.
1561 # To be a remote head after push, node must be either:
1561 # To be a remote head after push, node must be either:
1562 # - unknown locally
1562 # - unknown locally
1563 # - a local outgoing head descended from update
1563 # - a local outgoing head descended from update
1564 # - a remote head that's known locally and not
1564 # - a remote head that's known locally and not
1565 # ancestral to an outgoing head
1565 # ancestral to an outgoing head
1566 #
1566 #
1567 # New named branches cannot be created without --force.
1567 # New named branches cannot be created without --force.
1568
1568
1569 if remote_heads != [nullid]:
1569 if remote_heads != [nullid]:
1570 if remote.capable('branchmap'):
1570 if remote.capable('branchmap'):
1571 remotebrheads = remote.branchmap()
1571 remotebrheads = remote.branchmap()
1572
1572
1573 if not revs:
1573 if not revs:
1574 localbrheads = self.branchmap()
1574 localbrheads = self.branchmap()
1575 else:
1575 else:
1576 localbrheads = {}
1576 localbrheads = {}
1577 for n in heads:
1577 for n in heads:
1578 branch = self[n].branch()
1578 branch = self[n].branch()
1579 localbrheads.setdefault(branch, []).append(n)
1579 localbrheads.setdefault(branch, []).append(n)
1580
1580
1581 newbranches = list(set(localbrheads) - set(remotebrheads))
1581 newbranches = list(set(localbrheads) - set(remotebrheads))
1582 if newbranches: # new branch requires --force
1582 if newbranches: # new branch requires --force
1583 branchnames = ', '.join("%s" % b for b in newbranches)
1583 branchnames = ', '.join("%s" % b for b in newbranches)
1584 self.ui.warn(_("abort: push creates "
1584 self.ui.warn(_("abort: push creates "
1585 "new remote branches: %s!\n")
1585 "new remote branches: %s!\n")
1586 % branchnames)
1586 % branchnames)
1587 # propose 'push -b .' in the msg too?
1587 # propose 'push -b .' in the msg too?
1588 self.ui.status(_("(use 'hg push -f' to force)\n"))
1588 self.ui.status(_("(use 'hg push -f' to force)\n"))
1589 return None, 0
1589 return None, 0
1590 for branch, lheads in localbrheads.iteritems():
1590 for branch, lheads in localbrheads.iteritems():
1591 if branch in remotebrheads:
1591 if branch in remotebrheads:
1592 rheads = remotebrheads[branch]
1592 rheads = remotebrheads[branch]
1593 if not checkbranch(lheads, rheads, update, branch):
1593 if not checkbranch(lheads, rheads, update, branch):
1594 return None, 0
1594 return None, 0
1595 else:
1595 else:
1596 if not checkbranch(heads, remote_heads, update):
1596 if not checkbranch(heads, remote_heads, update):
1597 return None, 0
1597 return None, 0
1598
1598
1599 if inc:
1599 if inc:
1600 self.ui.warn(_("note: unsynced remote changes!\n"))
1600 self.ui.warn(_("note: unsynced remote changes!\n"))
1601
1601
1602
1602
1603 if revs is None:
1603 if revs is None:
1604 # use the fast path, no race possible on push
1604 # use the fast path, no race possible on push
1605 nodes = self.changelog.findmissing(common.keys())
1605 nodes = self.changelog.findmissing(common.keys())
1606 cg = self._changegroup(nodes, 'push')
1606 cg = self._changegroup(nodes, 'push')
1607 else:
1607 else:
1608 cg = self.changegroupsubset(update, revs, 'push')
1608 cg = self.changegroupsubset(update, revs, 'push')
1609 return cg, remote_heads
1609 return cg, remote_heads
1610
1610
1611 def push_addchangegroup(self, remote, force, revs):
1611 def push_addchangegroup(self, remote, force, revs):
1612 lock = remote.lock()
1612 lock = remote.lock()
1613 try:
1613 try:
1614 ret = self.prepush(remote, force, revs)
1614 ret = self.prepush(remote, force, revs)
1615 if ret[0] is not None:
1615 if ret[0] is not None:
1616 cg, remote_heads = ret
1616 cg, remote_heads = ret
1617 return remote.addchangegroup(cg, 'push', self.url())
1617 return remote.addchangegroup(cg, 'push', self.url())
1618 return ret[1]
1618 return ret[1]
1619 finally:
1619 finally:
1620 lock.release()
1620 lock.release()
1621
1621
1622 def push_unbundle(self, remote, force, revs):
1622 def push_unbundle(self, remote, force, revs):
1623 # local repo finds heads on server, finds out what revs it
1623 # local repo finds heads on server, finds out what revs it
1624 # must push. once revs transferred, if server finds it has
1624 # must push. once revs transferred, if server finds it has
1625 # different heads (someone else won commit/push race), server
1625 # different heads (someone else won commit/push race), server
1626 # aborts.
1626 # aborts.
1627
1627
1628 ret = self.prepush(remote, force, revs)
1628 ret = self.prepush(remote, force, revs)
1629 if ret[0] is not None:
1629 if ret[0] is not None:
1630 cg, remote_heads = ret
1630 cg, remote_heads = ret
1631 if force:
1631 if force:
1632 remote_heads = ['force']
1632 remote_heads = ['force']
1633 return remote.unbundle(cg, remote_heads, 'push')
1633 return remote.unbundle(cg, remote_heads, 'push')
1634 return ret[1]
1634 return ret[1]
1635
1635
1636 def changegroupinfo(self, nodes, source):
1636 def changegroupinfo(self, nodes, source):
1637 if self.ui.verbose or source == 'bundle':
1637 if self.ui.verbose or source == 'bundle':
1638 self.ui.status(_("%d changesets found\n") % len(nodes))
1638 self.ui.status(_("%d changesets found\n") % len(nodes))
1639 if self.ui.debugflag:
1639 if self.ui.debugflag:
1640 self.ui.debug("list of changesets:\n")
1640 self.ui.debug("list of changesets:\n")
1641 for node in nodes:
1641 for node in nodes:
1642 self.ui.debug("%s\n" % hex(node))
1642 self.ui.debug("%s\n" % hex(node))
1643
1643
1644 def changegroupsubset(self, bases, heads, source, extranodes=None):
1644 def changegroupsubset(self, bases, heads, source, extranodes=None):
1645 """Compute a changegroup consisting of all the nodes that are
1645 """Compute a changegroup consisting of all the nodes that are
1646 descendents of any of the bases and ancestors of any of the heads.
1646 descendents of any of the bases and ancestors of any of the heads.
1647 Return a chunkbuffer object whose read() method will return
1647 Return a chunkbuffer object whose read() method will return
1648 successive changegroup chunks.
1648 successive changegroup chunks.
1649
1649
1650 It is fairly complex as determining which filenodes and which
1650 It is fairly complex as determining which filenodes and which
1651 manifest nodes need to be included for the changeset to be complete
1651 manifest nodes need to be included for the changeset to be complete
1652 is non-trivial.
1652 is non-trivial.
1653
1653
1654 Another wrinkle is doing the reverse, figuring out which changeset in
1654 Another wrinkle is doing the reverse, figuring out which changeset in
1655 the changegroup a particular filenode or manifestnode belongs to.
1655 the changegroup a particular filenode or manifestnode belongs to.
1656
1656
1657 The caller can specify some nodes that must be included in the
1657 The caller can specify some nodes that must be included in the
1658 changegroup using the extranodes argument. It should be a dict
1658 changegroup using the extranodes argument. It should be a dict
1659 where the keys are the filenames (or 1 for the manifest), and the
1659 where the keys are the filenames (or 1 for the manifest), and the
1660 values are lists of (node, linknode) tuples, where node is a wanted
1660 values are lists of (node, linknode) tuples, where node is a wanted
1661 node and linknode is the changelog node that should be transmitted as
1661 node and linknode is the changelog node that should be transmitted as
1662 the linkrev.
1662 the linkrev.
1663 """
1663 """
1664
1664
1665 # Set up some initial variables
1665 # Set up some initial variables
1666 # Make it easy to refer to self.changelog
1666 # Make it easy to refer to self.changelog
1667 cl = self.changelog
1667 cl = self.changelog
1668 # msng is short for missing - compute the list of changesets in this
1668 # msng is short for missing - compute the list of changesets in this
1669 # changegroup.
1669 # changegroup.
1670 if not bases:
1670 if not bases:
1671 bases = [nullid]
1671 bases = [nullid]
1672 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1672 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1673
1673
1674 if extranodes is None:
1674 if extranodes is None:
1675 # can we go through the fast path ?
1675 # can we go through the fast path ?
1676 heads.sort()
1676 heads.sort()
1677 allheads = self.heads()
1677 allheads = self.heads()
1678 allheads.sort()
1678 allheads.sort()
1679 if heads == allheads:
1679 if heads == allheads:
1680 return self._changegroup(msng_cl_lst, source)
1680 return self._changegroup(msng_cl_lst, source)
1681
1681
1682 # slow path
1682 # slow path
1683 self.hook('preoutgoing', throw=True, source=source)
1683 self.hook('preoutgoing', throw=True, source=source)
1684
1684
1685 self.changegroupinfo(msng_cl_lst, source)
1685 self.changegroupinfo(msng_cl_lst, source)
1686 # Some bases may turn out to be superfluous, and some heads may be
1686 # Some bases may turn out to be superfluous, and some heads may be
1687 # too. nodesbetween will return the minimal set of bases and heads
1687 # too. nodesbetween will return the minimal set of bases and heads
1688 # necessary to re-create the changegroup.
1688 # necessary to re-create the changegroup.
1689
1689
1690 # Known heads are the list of heads that it is assumed the recipient
1690 # Known heads are the list of heads that it is assumed the recipient
1691 # of this changegroup will know about.
1691 # of this changegroup will know about.
1692 knownheads = set()
1692 knownheads = set()
1693 # We assume that all parents of bases are known heads.
1693 # We assume that all parents of bases are known heads.
1694 for n in bases:
1694 for n in bases:
1695 knownheads.update(cl.parents(n))
1695 knownheads.update(cl.parents(n))
1696 knownheads.discard(nullid)
1696 knownheads.discard(nullid)
1697 knownheads = list(knownheads)
1697 knownheads = list(knownheads)
1698 if knownheads:
1698 if knownheads:
1699 # Now that we know what heads are known, we can compute which
1699 # Now that we know what heads are known, we can compute which
1700 # changesets are known. The recipient must know about all
1700 # changesets are known. The recipient must know about all
1701 # changesets required to reach the known heads from the null
1701 # changesets required to reach the known heads from the null
1702 # changeset.
1702 # changeset.
1703 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1703 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1704 junk = None
1704 junk = None
1705 # Transform the list into a set.
1705 # Transform the list into a set.
1706 has_cl_set = set(has_cl_set)
1706 has_cl_set = set(has_cl_set)
1707 else:
1707 else:
1708 # If there were no known heads, the recipient cannot be assumed to
1708 # If there were no known heads, the recipient cannot be assumed to
1709 # know about any changesets.
1709 # know about any changesets.
1710 has_cl_set = set()
1710 has_cl_set = set()
1711
1711
1712 # Make it easy to refer to self.manifest
1712 # Make it easy to refer to self.manifest
1713 mnfst = self.manifest
1713 mnfst = self.manifest
1714 # We don't know which manifests are missing yet
1714 # We don't know which manifests are missing yet
1715 msng_mnfst_set = {}
1715 msng_mnfst_set = {}
1716 # Nor do we know which filenodes are missing.
1716 # Nor do we know which filenodes are missing.
1717 msng_filenode_set = {}
1717 msng_filenode_set = {}
1718
1718
1719 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1719 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1720 junk = None
1720 junk = None
1721
1721
1722 # A changeset always belongs to itself, so the changenode lookup
1722 # A changeset always belongs to itself, so the changenode lookup
1723 # function for a changenode is identity.
1723 # function for a changenode is identity.
1724 def identity(x):
1724 def identity(x):
1725 return x
1725 return x
1726
1726
1727 # If we determine that a particular file or manifest node must be a
1727 # If we determine that a particular file or manifest node must be a
1728 # node that the recipient of the changegroup will already have, we can
1728 # node that the recipient of the changegroup will already have, we can
1729 # also assume the recipient will have all the parents. This function
1729 # also assume the recipient will have all the parents. This function
1730 # prunes them from the set of missing nodes.
1730 # prunes them from the set of missing nodes.
1731 def prune_parents(revlog, hasset, msngset):
1731 def prune_parents(revlog, hasset, msngset):
1732 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1732 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1733 msngset.pop(revlog.node(r), None)
1733 msngset.pop(revlog.node(r), None)
1734
1734
1735 # Use the information collected in collect_manifests_and_files to say
1735 # Use the information collected in collect_manifests_and_files to say
1736 # which changenode any manifestnode belongs to.
1736 # which changenode any manifestnode belongs to.
1737 def lookup_manifest_link(mnfstnode):
1737 def lookup_manifest_link(mnfstnode):
1738 return msng_mnfst_set[mnfstnode]
1738 return msng_mnfst_set[mnfstnode]
1739
1739
1740 # A function generating function that sets up the initial environment
1740 # A function generating function that sets up the initial environment
1741 # the inner function.
1741 # the inner function.
1742 def filenode_collector(changedfiles):
1742 def filenode_collector(changedfiles):
1743 # This gathers information from each manifestnode included in the
1743 # This gathers information from each manifestnode included in the
1744 # changegroup about which filenodes the manifest node references
1744 # changegroup about which filenodes the manifest node references
1745 # so we can include those in the changegroup too.
1745 # so we can include those in the changegroup too.
1746 #
1746 #
1747 # It also remembers which changenode each filenode belongs to. It
1747 # It also remembers which changenode each filenode belongs to. It
1748 # does this by assuming the a filenode belongs to the changenode
1748 # does this by assuming the a filenode belongs to the changenode
1749 # the first manifest that references it belongs to.
1749 # the first manifest that references it belongs to.
1750 def collect_msng_filenodes(mnfstnode):
1750 def collect_msng_filenodes(mnfstnode):
1751 r = mnfst.rev(mnfstnode)
1751 r = mnfst.rev(mnfstnode)
1752 if r - 1 in mnfst.parentrevs(r):
1752 if r - 1 in mnfst.parentrevs(r):
1753 # If the previous rev is one of the parents,
1753 # If the previous rev is one of the parents,
1754 # we only need to see a diff.
1754 # we only need to see a diff.
1755 deltamf = mnfst.readdelta(mnfstnode)
1755 deltamf = mnfst.readdelta(mnfstnode)
1756 # For each line in the delta
1756 # For each line in the delta
1757 for f, fnode in deltamf.iteritems():
1757 for f, fnode in deltamf.iteritems():
1758 f = changedfiles.get(f, None)
1758 f = changedfiles.get(f, None)
1759 # And if the file is in the list of files we care
1759 # And if the file is in the list of files we care
1760 # about.
1760 # about.
1761 if f is not None:
1761 if f is not None:
1762 # Get the changenode this manifest belongs to
1762 # Get the changenode this manifest belongs to
1763 clnode = msng_mnfst_set[mnfstnode]
1763 clnode = msng_mnfst_set[mnfstnode]
1764 # Create the set of filenodes for the file if
1764 # Create the set of filenodes for the file if
1765 # there isn't one already.
1765 # there isn't one already.
1766 ndset = msng_filenode_set.setdefault(f, {})
1766 ndset = msng_filenode_set.setdefault(f, {})
1767 # And set the filenode's changelog node to the
1767 # And set the filenode's changelog node to the
1768 # manifest's if it hasn't been set already.
1768 # manifest's if it hasn't been set already.
1769 ndset.setdefault(fnode, clnode)
1769 ndset.setdefault(fnode, clnode)
1770 else:
1770 else:
1771 # Otherwise we need a full manifest.
1771 # Otherwise we need a full manifest.
1772 m = mnfst.read(mnfstnode)
1772 m = mnfst.read(mnfstnode)
1773 # For every file in we care about.
1773 # For every file in we care about.
1774 for f in changedfiles:
1774 for f in changedfiles:
1775 fnode = m.get(f, None)
1775 fnode = m.get(f, None)
1776 # If it's in the manifest
1776 # If it's in the manifest
1777 if fnode is not None:
1777 if fnode is not None:
1778 # See comments above.
1778 # See comments above.
1779 clnode = msng_mnfst_set[mnfstnode]
1779 clnode = msng_mnfst_set[mnfstnode]
1780 ndset = msng_filenode_set.setdefault(f, {})
1780 ndset = msng_filenode_set.setdefault(f, {})
1781 ndset.setdefault(fnode, clnode)
1781 ndset.setdefault(fnode, clnode)
1782 return collect_msng_filenodes
1782 return collect_msng_filenodes
1783
1783
1784 # We have a list of filenodes we think we need for a file, lets remove
1784 # We have a list of filenodes we think we need for a file, lets remove
1785 # all those we know the recipient must have.
1785 # all those we know the recipient must have.
1786 def prune_filenodes(f, filerevlog):
1786 def prune_filenodes(f, filerevlog):
1787 msngset = msng_filenode_set[f]
1787 msngset = msng_filenode_set[f]
1788 hasset = set()
1788 hasset = set()
1789 # If a 'missing' filenode thinks it belongs to a changenode we
1789 # If a 'missing' filenode thinks it belongs to a changenode we
1790 # assume the recipient must have, then the recipient must have
1790 # assume the recipient must have, then the recipient must have
1791 # that filenode.
1791 # that filenode.
1792 for n in msngset:
1792 for n in msngset:
1793 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1793 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1794 if clnode in has_cl_set:
1794 if clnode in has_cl_set:
1795 hasset.add(n)
1795 hasset.add(n)
1796 prune_parents(filerevlog, hasset, msngset)
1796 prune_parents(filerevlog, hasset, msngset)
1797
1797
1798 # A function generator function that sets up the a context for the
1798 # A function generator function that sets up the a context for the
1799 # inner function.
1799 # inner function.
1800 def lookup_filenode_link_func(fname):
1800 def lookup_filenode_link_func(fname):
1801 msngset = msng_filenode_set[fname]
1801 msngset = msng_filenode_set[fname]
1802 # Lookup the changenode the filenode belongs to.
1802 # Lookup the changenode the filenode belongs to.
1803 def lookup_filenode_link(fnode):
1803 def lookup_filenode_link(fnode):
1804 return msngset[fnode]
1804 return msngset[fnode]
1805 return lookup_filenode_link
1805 return lookup_filenode_link
1806
1806
1807 # Add the nodes that were explicitly requested.
1807 # Add the nodes that were explicitly requested.
1808 def add_extra_nodes(name, nodes):
1808 def add_extra_nodes(name, nodes):
1809 if not extranodes or name not in extranodes:
1809 if not extranodes or name not in extranodes:
1810 return
1810 return
1811
1811
1812 for node, linknode in extranodes[name]:
1812 for node, linknode in extranodes[name]:
1813 if node not in nodes:
1813 if node not in nodes:
1814 nodes[node] = linknode
1814 nodes[node] = linknode
1815
1815
1816 # Now that we have all theses utility functions to help out and
1816 # Now that we have all theses utility functions to help out and
1817 # logically divide up the task, generate the group.
1817 # logically divide up the task, generate the group.
1818 def gengroup():
1818 def gengroup():
1819 # The set of changed files starts empty.
1819 # The set of changed files starts empty.
1820 changedfiles = {}
1820 changedfiles = {}
1821 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1821 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1822
1822
1823 # Create a changenode group generator that will call our functions
1823 # Create a changenode group generator that will call our functions
1824 # back to lookup the owning changenode and collect information.
1824 # back to lookup the owning changenode and collect information.
1825 group = cl.group(msng_cl_lst, identity, collect)
1825 group = cl.group(msng_cl_lst, identity, collect)
1826 cnt = 0
1826 cnt = 0
1827 for chnk in group:
1827 for chnk in group:
1828 yield chnk
1828 yield chnk
1829 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1829 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1830 cnt += 1
1830 cnt += 1
1831 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1831 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1832
1832
1833
1833
1834 # Figure out which manifest nodes (of the ones we think might be
1834 # Figure out which manifest nodes (of the ones we think might be
1835 # part of the changegroup) the recipient must know about and
1835 # part of the changegroup) the recipient must know about and
1836 # remove them from the changegroup.
1836 # remove them from the changegroup.
1837 has_mnfst_set = set()
1837 has_mnfst_set = set()
1838 for n in msng_mnfst_set:
1838 for n in msng_mnfst_set:
1839 # If a 'missing' manifest thinks it belongs to a changenode
1839 # If a 'missing' manifest thinks it belongs to a changenode
1840 # the recipient is assumed to have, obviously the recipient
1840 # the recipient is assumed to have, obviously the recipient
1841 # must have that manifest.
1841 # must have that manifest.
1842 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1842 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1843 if linknode in has_cl_set:
1843 if linknode in has_cl_set:
1844 has_mnfst_set.add(n)
1844 has_mnfst_set.add(n)
1845 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1845 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1846 add_extra_nodes(1, msng_mnfst_set)
1846 add_extra_nodes(1, msng_mnfst_set)
1847 msng_mnfst_lst = msng_mnfst_set.keys()
1847 msng_mnfst_lst = msng_mnfst_set.keys()
1848 # Sort the manifestnodes by revision number.
1848 # Sort the manifestnodes by revision number.
1849 msng_mnfst_lst.sort(key=mnfst.rev)
1849 msng_mnfst_lst.sort(key=mnfst.rev)
1850 # Create a generator for the manifestnodes that calls our lookup
1850 # Create a generator for the manifestnodes that calls our lookup
1851 # and data collection functions back.
1851 # and data collection functions back.
1852 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1852 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1853 filenode_collector(changedfiles))
1853 filenode_collector(changedfiles))
1854 cnt = 0
1854 cnt = 0
1855 for chnk in group:
1855 for chnk in group:
1856 yield chnk
1856 yield chnk
1857 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1857 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1858 cnt += 1
1858 cnt += 1
1859 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1859 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1860
1860
1861 # These are no longer needed, dereference and toss the memory for
1861 # These are no longer needed, dereference and toss the memory for
1862 # them.
1862 # them.
1863 msng_mnfst_lst = None
1863 msng_mnfst_lst = None
1864 msng_mnfst_set.clear()
1864 msng_mnfst_set.clear()
1865
1865
1866 if extranodes:
1866 if extranodes:
1867 for fname in extranodes:
1867 for fname in extranodes:
1868 if isinstance(fname, int):
1868 if isinstance(fname, int):
1869 continue
1869 continue
1870 msng_filenode_set.setdefault(fname, {})
1870 msng_filenode_set.setdefault(fname, {})
1871 changedfiles[fname] = 1
1871 changedfiles[fname] = 1
1872 # Go through all our files in order sorted by name.
1872 # Go through all our files in order sorted by name.
1873 cnt = 0
1873 cnt = 0
1874 for fname in sorted(changedfiles):
1874 for fname in sorted(changedfiles):
1875 filerevlog = self.file(fname)
1875 filerevlog = self.file(fname)
1876 if not len(filerevlog):
1876 if not len(filerevlog):
1877 raise util.Abort(_("empty or missing revlog for %s") % fname)
1877 raise util.Abort(_("empty or missing revlog for %s") % fname)
1878 # Toss out the filenodes that the recipient isn't really
1878 # Toss out the filenodes that the recipient isn't really
1879 # missing.
1879 # missing.
1880 if fname in msng_filenode_set:
1880 if fname in msng_filenode_set:
1881 prune_filenodes(fname, filerevlog)
1881 prune_filenodes(fname, filerevlog)
1882 add_extra_nodes(fname, msng_filenode_set[fname])
1882 add_extra_nodes(fname, msng_filenode_set[fname])
1883 msng_filenode_lst = msng_filenode_set[fname].keys()
1883 msng_filenode_lst = msng_filenode_set[fname].keys()
1884 else:
1884 else:
1885 msng_filenode_lst = []
1885 msng_filenode_lst = []
1886 # If any filenodes are left, generate the group for them,
1886 # If any filenodes are left, generate the group for them,
1887 # otherwise don't bother.
1887 # otherwise don't bother.
1888 if len(msng_filenode_lst) > 0:
1888 if len(msng_filenode_lst) > 0:
1889 yield changegroup.chunkheader(len(fname))
1889 yield changegroup.chunkheader(len(fname))
1890 yield fname
1890 yield fname
1891 # Sort the filenodes by their revision #
1891 # Sort the filenodes by their revision #
1892 msng_filenode_lst.sort(key=filerevlog.rev)
1892 msng_filenode_lst.sort(key=filerevlog.rev)
1893 # Create a group generator and only pass in a changenode
1893 # Create a group generator and only pass in a changenode
1894 # lookup function as we need to collect no information
1894 # lookup function as we need to collect no information
1895 # from filenodes.
1895 # from filenodes.
1896 group = filerevlog.group(msng_filenode_lst,
1896 group = filerevlog.group(msng_filenode_lst,
1897 lookup_filenode_link_func(fname))
1897 lookup_filenode_link_func(fname))
1898 for chnk in group:
1898 for chnk in group:
1899 self.ui.progress(
1899 self.ui.progress(
1900 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1900 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1901 cnt += 1
1901 cnt += 1
1902 yield chnk
1902 yield chnk
1903 if fname in msng_filenode_set:
1903 if fname in msng_filenode_set:
1904 # Don't need this anymore, toss it to free memory.
1904 # Don't need this anymore, toss it to free memory.
1905 del msng_filenode_set[fname]
1905 del msng_filenode_set[fname]
1906 # Signal that no more groups are left.
1906 # Signal that no more groups are left.
1907 yield changegroup.closechunk()
1907 yield changegroup.closechunk()
1908 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1908 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1909
1909
1910 if msng_cl_lst:
1910 if msng_cl_lst:
1911 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1911 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1912
1912
1913 return util.chunkbuffer(gengroup())
1913 return util.chunkbuffer(gengroup())
1914
1914
1915 def changegroup(self, basenodes, source):
1915 def changegroup(self, basenodes, source):
1916 # to avoid a race we use changegroupsubset() (issue1320)
1916 # to avoid a race we use changegroupsubset() (issue1320)
1917 return self.changegroupsubset(basenodes, self.heads(), source)
1917 return self.changegroupsubset(basenodes, self.heads(), source)
1918
1918
1919 def _changegroup(self, nodes, source):
1919 def _changegroup(self, nodes, source):
1920 """Compute the changegroup of all nodes that we have that a recipient
1920 """Compute the changegroup of all nodes that we have that a recipient
1921 doesn't. Return a chunkbuffer object whose read() method will return
1921 doesn't. Return a chunkbuffer object whose read() method will return
1922 successive changegroup chunks.
1922 successive changegroup chunks.
1923
1923
1924 This is much easier than the previous function as we can assume that
1924 This is much easier than the previous function as we can assume that
1925 the recipient has any changenode we aren't sending them.
1925 the recipient has any changenode we aren't sending them.
1926
1926
1927 nodes is the set of nodes to send"""
1927 nodes is the set of nodes to send"""
1928
1928
1929 self.hook('preoutgoing', throw=True, source=source)
1929 self.hook('preoutgoing', throw=True, source=source)
1930
1930
1931 cl = self.changelog
1931 cl = self.changelog
1932 revset = set([cl.rev(n) for n in nodes])
1932 revset = set([cl.rev(n) for n in nodes])
1933 self.changegroupinfo(nodes, source)
1933 self.changegroupinfo(nodes, source)
1934
1934
1935 def identity(x):
1935 def identity(x):
1936 return x
1936 return x
1937
1937
1938 def gennodelst(log):
1938 def gennodelst(log):
1939 for r in log:
1939 for r in log:
1940 if log.linkrev(r) in revset:
1940 if log.linkrev(r) in revset:
1941 yield log.node(r)
1941 yield log.node(r)
1942
1942
1943 def lookuprevlink_func(revlog):
1943 def lookuprevlink_func(revlog):
1944 def lookuprevlink(n):
1944 def lookuprevlink(n):
1945 return cl.node(revlog.linkrev(revlog.rev(n)))
1945 return cl.node(revlog.linkrev(revlog.rev(n)))
1946 return lookuprevlink
1946 return lookuprevlink
1947
1947
1948 def gengroup():
1948 def gengroup():
1949 '''yield a sequence of changegroup chunks (strings)'''
1949 '''yield a sequence of changegroup chunks (strings)'''
1950 # construct a list of all changed files
1950 # construct a list of all changed files
1951 changedfiles = {}
1951 changedfiles = {}
1952 mmfs = {}
1952 mmfs = {}
1953 collect = changegroup.collector(cl, mmfs, changedfiles)
1953 collect = changegroup.collector(cl, mmfs, changedfiles)
1954
1954
1955 cnt = 0
1955 cnt = 0
1956 for chnk in cl.group(nodes, identity, collect):
1956 for chnk in cl.group(nodes, identity, collect):
1957 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1957 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1958 cnt += 1
1958 cnt += 1
1959 yield chnk
1959 yield chnk
1960 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1960 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1961
1961
1962 mnfst = self.manifest
1962 mnfst = self.manifest
1963 nodeiter = gennodelst(mnfst)
1963 nodeiter = gennodelst(mnfst)
1964 cnt = 0
1964 cnt = 0
1965 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1965 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1966 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1966 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1967 cnt += 1
1967 cnt += 1
1968 yield chnk
1968 yield chnk
1969 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1969 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1970
1970
1971 cnt = 0
1971 cnt = 0
1972 for fname in sorted(changedfiles):
1972 for fname in sorted(changedfiles):
1973 filerevlog = self.file(fname)
1973 filerevlog = self.file(fname)
1974 if not len(filerevlog):
1974 if not len(filerevlog):
1975 raise util.Abort(_("empty or missing revlog for %s") % fname)
1975 raise util.Abort(_("empty or missing revlog for %s") % fname)
1976 nodeiter = gennodelst(filerevlog)
1976 nodeiter = gennodelst(filerevlog)
1977 nodeiter = list(nodeiter)
1977 nodeiter = list(nodeiter)
1978 if nodeiter:
1978 if nodeiter:
1979 yield changegroup.chunkheader(len(fname))
1979 yield changegroup.chunkheader(len(fname))
1980 yield fname
1980 yield fname
1981 lookup = lookuprevlink_func(filerevlog)
1981 lookup = lookuprevlink_func(filerevlog)
1982 for chnk in filerevlog.group(nodeiter, lookup):
1982 for chnk in filerevlog.group(nodeiter, lookup):
1983 self.ui.progress(
1983 self.ui.progress(
1984 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1984 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1985 cnt += 1
1985 cnt += 1
1986 yield chnk
1986 yield chnk
1987 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1987 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1988
1988
1989 yield changegroup.closechunk()
1989 yield changegroup.closechunk()
1990
1990
1991 if nodes:
1991 if nodes:
1992 self.hook('outgoing', node=hex(nodes[0]), source=source)
1992 self.hook('outgoing', node=hex(nodes[0]), source=source)
1993
1993
1994 return util.chunkbuffer(gengroup())
1994 return util.chunkbuffer(gengroup())
1995
1995
1996 def addchangegroup(self, source, srctype, url, emptyok=False):
1996 def addchangegroup(self, source, srctype, url, emptyok=False):
1997 """add changegroup to repo.
1997 """add changegroup to repo.
1998
1998
1999 return values:
1999 return values:
2000 - nothing changed or no source: 0
2000 - nothing changed or no source: 0
2001 - more heads than before: 1+added heads (2..n)
2001 - more heads than before: 1+added heads (2..n)
2002 - less heads than before: -1-removed heads (-2..-n)
2002 - less heads than before: -1-removed heads (-2..-n)
2003 - number of heads stays the same: 1
2003 - number of heads stays the same: 1
2004 """
2004 """
2005 def csmap(x):
2005 def csmap(x):
2006 self.ui.debug("add changeset %s\n" % short(x))
2006 self.ui.debug("add changeset %s\n" % short(x))
2007 return len(cl)
2007 return len(cl)
2008
2008
2009 def revmap(x):
2009 def revmap(x):
2010 return cl.rev(x)
2010 return cl.rev(x)
2011
2011
2012 if not source:
2012 if not source:
2013 return 0
2013 return 0
2014
2014
2015 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2015 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2016
2016
2017 changesets = files = revisions = 0
2017 changesets = files = revisions = 0
2018
2018
2019 # write changelog data to temp files so concurrent readers will not see
2019 # write changelog data to temp files so concurrent readers will not see
2020 # inconsistent view
2020 # inconsistent view
2021 cl = self.changelog
2021 cl = self.changelog
2022 cl.delayupdate()
2022 cl.delayupdate()
2023 oldheads = len(cl.heads())
2023 oldheads = len(cl.heads())
2024
2024
2025 tr = self.transaction()
2025 tr = self.transaction()
2026 try:
2026 try:
2027 trp = weakref.proxy(tr)
2027 trp = weakref.proxy(tr)
2028 # pull off the changeset group
2028 # pull off the changeset group
2029 self.ui.status(_("adding changesets\n"))
2029 self.ui.status(_("adding changesets\n"))
2030 clstart = len(cl)
2030 clstart = len(cl)
2031 class prog(object):
2031 class prog(object):
2032 step = _('changesets')
2032 step = _('changesets')
2033 count = 1
2033 count = 1
2034 ui = self.ui
2034 ui = self.ui
2035 def __call__(self):
2035 def __call__(self):
2036 self.ui.progress(self.step, self.count, unit=_('chunks'))
2036 self.ui.progress(self.step, self.count, unit=_('chunks'))
2037 self.count += 1
2037 self.count += 1
2038 pr = prog()
2038 pr = prog()
2039 chunkiter = changegroup.chunkiter(source, progress=pr)
2039 chunkiter = changegroup.chunkiter(source, progress=pr)
2040 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2040 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2041 raise util.Abort(_("received changelog group is empty"))
2041 raise util.Abort(_("received changelog group is empty"))
2042 clend = len(cl)
2042 clend = len(cl)
2043 changesets = clend - clstart
2043 changesets = clend - clstart
2044 self.ui.progress(_('changesets'), None)
2044 self.ui.progress(_('changesets'), None)
2045
2045
2046 # pull off the manifest group
2046 # pull off the manifest group
2047 self.ui.status(_("adding manifests\n"))
2047 self.ui.status(_("adding manifests\n"))
2048 pr.step = _('manifests')
2048 pr.step = _('manifests')
2049 pr.count = 1
2049 pr.count = 1
2050 chunkiter = changegroup.chunkiter(source, progress=pr)
2050 chunkiter = changegroup.chunkiter(source, progress=pr)
2051 # no need to check for empty manifest group here:
2051 # no need to check for empty manifest group here:
2052 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2052 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2053 # no new manifest will be created and the manifest group will
2053 # no new manifest will be created and the manifest group will
2054 # be empty during the pull
2054 # be empty during the pull
2055 self.manifest.addgroup(chunkiter, revmap, trp)
2055 self.manifest.addgroup(chunkiter, revmap, trp)
2056 self.ui.progress(_('manifests'), None)
2056 self.ui.progress(_('manifests'), None)
2057
2057
2058 needfiles = {}
2058 needfiles = {}
2059 if self.ui.configbool('server', 'validate', default=False):
2059 if self.ui.configbool('server', 'validate', default=False):
2060 # validate incoming csets have their manifests
2060 # validate incoming csets have their manifests
2061 for cset in xrange(clstart, clend):
2061 for cset in xrange(clstart, clend):
2062 mfest = self.changelog.read(self.changelog.node(cset))[0]
2062 mfest = self.changelog.read(self.changelog.node(cset))[0]
2063 mfest = self.manifest.readdelta(mfest)
2063 mfest = self.manifest.readdelta(mfest)
2064 # store file nodes we must see
2064 # store file nodes we must see
2065 for f, n in mfest.iteritems():
2065 for f, n in mfest.iteritems():
2066 needfiles.setdefault(f, set()).add(n)
2066 needfiles.setdefault(f, set()).add(n)
2067
2067
2068 # process the files
2068 # process the files
2069 self.ui.status(_("adding file changes\n"))
2069 self.ui.status(_("adding file changes\n"))
2070 pr.step = 'files'
2070 pr.step = 'files'
2071 pr.count = 1
2071 pr.count = 1
2072 while 1:
2072 while 1:
2073 f = changegroup.getchunk(source)
2073 f = changegroup.getchunk(source)
2074 if not f:
2074 if not f:
2075 break
2075 break
2076 self.ui.debug("adding %s revisions\n" % f)
2076 self.ui.debug("adding %s revisions\n" % f)
2077 fl = self.file(f)
2077 fl = self.file(f)
2078 o = len(fl)
2078 o = len(fl)
2079 chunkiter = changegroup.chunkiter(source, progress=pr)
2079 chunkiter = changegroup.chunkiter(source, progress=pr)
2080 if fl.addgroup(chunkiter, revmap, trp) is None:
2080 if fl.addgroup(chunkiter, revmap, trp) is None:
2081 raise util.Abort(_("received file revlog group is empty"))
2081 raise util.Abort(_("received file revlog group is empty"))
2082 revisions += len(fl) - o
2082 revisions += len(fl) - o
2083 files += 1
2083 files += 1
2084 if f in needfiles:
2084 if f in needfiles:
2085 needs = needfiles[f]
2085 needs = needfiles[f]
2086 for new in xrange(o, len(fl)):
2086 for new in xrange(o, len(fl)):
2087 n = fl.node(new)
2087 n = fl.node(new)
2088 if n in needs:
2088 if n in needs:
2089 needs.remove(n)
2089 needs.remove(n)
2090 if not needs:
2090 if not needs:
2091 del needfiles[f]
2091 del needfiles[f]
2092 self.ui.progress(_('files'), None)
2092 self.ui.progress(_('files'), None)
2093
2093
2094 for f, needs in needfiles.iteritems():
2094 for f, needs in needfiles.iteritems():
2095 fl = self.file(f)
2095 fl = self.file(f)
2096 for n in needs:
2096 for n in needs:
2097 try:
2097 try:
2098 fl.rev(n)
2098 fl.rev(n)
2099 except error.LookupError:
2099 except error.LookupError:
2100 raise util.Abort(
2100 raise util.Abort(
2101 _('missing file data for %s:%s - run hg verify') %
2101 _('missing file data for %s:%s - run hg verify') %
2102 (f, hex(n)))
2102 (f, hex(n)))
2103
2103
2104 newheads = len(cl.heads())
2104 newheads = len(cl.heads())
2105 heads = ""
2105 heads = ""
2106 if oldheads and newheads != oldheads:
2106 if oldheads and newheads != oldheads:
2107 heads = _(" (%+d heads)") % (newheads - oldheads)
2107 heads = _(" (%+d heads)") % (newheads - oldheads)
2108
2108
2109 self.ui.status(_("added %d changesets"
2109 self.ui.status(_("added %d changesets"
2110 " with %d changes to %d files%s\n")
2110 " with %d changes to %d files%s\n")
2111 % (changesets, revisions, files, heads))
2111 % (changesets, revisions, files, heads))
2112
2112
2113 if changesets > 0:
2113 if changesets > 0:
2114 p = lambda: cl.writepending() and self.root or ""
2114 p = lambda: cl.writepending() and self.root or ""
2115 self.hook('pretxnchangegroup', throw=True,
2115 self.hook('pretxnchangegroup', throw=True,
2116 node=hex(cl.node(clstart)), source=srctype,
2116 node=hex(cl.node(clstart)), source=srctype,
2117 url=url, pending=p)
2117 url=url, pending=p)
2118
2118
2119 # make changelog see real files again
2119 # make changelog see real files again
2120 cl.finalize(trp)
2120 cl.finalize(trp)
2121
2121
2122 tr.close()
2122 tr.close()
2123 finally:
2123 finally:
2124 del tr
2124 del tr
2125
2125
2126 if changesets > 0:
2126 if changesets > 0:
2127 # forcefully update the on-disk branch cache
2127 # forcefully update the on-disk branch cache
2128 self.ui.debug("updating the branch cache\n")
2128 self.ui.debug("updating the branch cache\n")
2129 self.branchtags()
2129 self.branchtags()
2130 self.hook("changegroup", node=hex(cl.node(clstart)),
2130 self.hook("changegroup", node=hex(cl.node(clstart)),
2131 source=srctype, url=url)
2131 source=srctype, url=url)
2132
2132
2133 for i in xrange(clstart, clend):
2133 for i in xrange(clstart, clend):
2134 self.hook("incoming", node=hex(cl.node(i)),
2134 self.hook("incoming", node=hex(cl.node(i)),
2135 source=srctype, url=url)
2135 source=srctype, url=url)
2136
2136
2137 # never return 0 here:
2137 # never return 0 here:
2138 if newheads < oldheads:
2138 if newheads < oldheads:
2139 return newheads - oldheads - 1
2139 return newheads - oldheads - 1
2140 else:
2140 else:
2141 return newheads - oldheads + 1
2141 return newheads - oldheads + 1
2142
2142
2143
2143
2144 def stream_in(self, remote):
2144 def stream_in(self, remote):
2145 fp = remote.stream_out()
2145 fp = remote.stream_out()
2146 l = fp.readline()
2146 l = fp.readline()
2147 try:
2147 try:
2148 resp = int(l)
2148 resp = int(l)
2149 except ValueError:
2149 except ValueError:
2150 raise error.ResponseError(
2150 raise error.ResponseError(
2151 _('Unexpected response from remote server:'), l)
2151 _('Unexpected response from remote server:'), l)
2152 if resp == 1:
2152 if resp == 1:
2153 raise util.Abort(_('operation forbidden by server'))
2153 raise util.Abort(_('operation forbidden by server'))
2154 elif resp == 2:
2154 elif resp == 2:
2155 raise util.Abort(_('locking the remote repository failed'))
2155 raise util.Abort(_('locking the remote repository failed'))
2156 elif resp != 0:
2156 elif resp != 0:
2157 raise util.Abort(_('the server sent an unknown error code'))
2157 raise util.Abort(_('the server sent an unknown error code'))
2158 self.ui.status(_('streaming all changes\n'))
2158 self.ui.status(_('streaming all changes\n'))
2159 l = fp.readline()
2159 l = fp.readline()
2160 try:
2160 try:
2161 total_files, total_bytes = map(int, l.split(' ', 1))
2161 total_files, total_bytes = map(int, l.split(' ', 1))
2162 except (ValueError, TypeError):
2162 except (ValueError, TypeError):
2163 raise error.ResponseError(
2163 raise error.ResponseError(
2164 _('Unexpected response from remote server:'), l)
2164 _('Unexpected response from remote server:'), l)
2165 self.ui.status(_('%d files to transfer, %s of data\n') %
2165 self.ui.status(_('%d files to transfer, %s of data\n') %
2166 (total_files, util.bytecount(total_bytes)))
2166 (total_files, util.bytecount(total_bytes)))
2167 start = time.time()
2167 start = time.time()
2168 for i in xrange(total_files):
2168 for i in xrange(total_files):
2169 # XXX doesn't support '\n' or '\r' in filenames
2169 # XXX doesn't support '\n' or '\r' in filenames
2170 l = fp.readline()
2170 l = fp.readline()
2171 try:
2171 try:
2172 name, size = l.split('\0', 1)
2172 name, size = l.split('\0', 1)
2173 size = int(size)
2173 size = int(size)
2174 except (ValueError, TypeError):
2174 except (ValueError, TypeError):
2175 raise error.ResponseError(
2175 raise error.ResponseError(
2176 _('Unexpected response from remote server:'), l)
2176 _('Unexpected response from remote server:'), l)
2177 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2177 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2178 # for backwards compat, name was partially encoded
2178 # for backwards compat, name was partially encoded
2179 ofp = self.sopener(store.decodedir(name), 'w')
2179 ofp = self.sopener(store.decodedir(name), 'w')
2180 for chunk in util.filechunkiter(fp, limit=size):
2180 for chunk in util.filechunkiter(fp, limit=size):
2181 ofp.write(chunk)
2181 ofp.write(chunk)
2182 ofp.close()
2182 ofp.close()
2183 elapsed = time.time() - start
2183 elapsed = time.time() - start
2184 if elapsed <= 0:
2184 if elapsed <= 0:
2185 elapsed = 0.001
2185 elapsed = 0.001
2186 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2186 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2187 (util.bytecount(total_bytes), elapsed,
2187 (util.bytecount(total_bytes), elapsed,
2188 util.bytecount(total_bytes / elapsed)))
2188 util.bytecount(total_bytes / elapsed)))
2189 self.invalidate()
2189 self.invalidate()
2190 return len(self.heads()) + 1
2190 return len(self.heads()) + 1
2191
2191
2192 def clone(self, remote, heads=[], stream=False):
2192 def clone(self, remote, heads=[], stream=False):
2193 '''clone remote repository.
2193 '''clone remote repository.
2194
2194
2195 keyword arguments:
2195 keyword arguments:
2196 heads: list of revs to clone (forces use of pull)
2196 heads: list of revs to clone (forces use of pull)
2197 stream: use streaming clone if possible'''
2197 stream: use streaming clone if possible'''
2198
2198
2199 # now, all clients that can request uncompressed clones can
2199 # now, all clients that can request uncompressed clones can
2200 # read repo formats supported by all servers that can serve
2200 # read repo formats supported by all servers that can serve
2201 # them.
2201 # them.
2202
2202
2203 # if revlog format changes, client will have to check version
2203 # if revlog format changes, client will have to check version
2204 # and format flags on "stream" capability, and use
2204 # and format flags on "stream" capability, and use
2205 # uncompressed only if compatible.
2205 # uncompressed only if compatible.
2206
2206
2207 if stream and not heads and remote.capable('stream'):
2207 if stream and not heads and remote.capable('stream'):
2208 return self.stream_in(remote)
2208 return self.stream_in(remote)
2209 return self.pull(remote, heads)
2209 return self.pull(remote, heads)
2210
2210
2211 # used to avoid circular references so destructors work
2211 # used to avoid circular references so destructors work
2212 def aftertrans(files):
2212 def aftertrans(files):
2213 renamefiles = [tuple(t) for t in files]
2213 renamefiles = [tuple(t) for t in files]
2214 def a():
2214 def a():
2215 for src, dest in renamefiles:
2215 for src, dest in renamefiles:
2216 util.rename(src, dest)
2216 util.rename(src, dest)
2217 return a
2217 return a
2218
2218
2219 def instance(ui, path, create):
2219 def instance(ui, path, create):
2220 return localrepository(ui, util.drop_scheme('file', path), create)
2220 return localrepository(ui, util.drop_scheme('file', path), create)
2221
2221
2222 def islocal(path):
2222 def islocal(path):
2223 return True
2223 return True
@@ -1,59 +1,59 b''
1 # osutil.py - pure Python version of osutil.c
1 # osutil.py - pure Python version of osutil.c
2 #
2 #
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 import stat as _stat
9 import stat as statmod
10
10
11 posixfile = open
11 posixfile = open
12
12
13 def _mode_to_kind(mode):
13 def _mode_to_kind(mode):
14 if _stat.S_ISREG(mode):
14 if statmod.S_ISREG(mode):
15 return _stat.S_IFREG
15 return statmod.S_IFREG
16 if _stat.S_ISDIR(mode):
16 if statmod.S_ISDIR(mode):
17 return _stat.S_IFDIR
17 return statmod.S_IFDIR
18 if _stat.S_ISLNK(mode):
18 if statmod.S_ISLNK(mode):
19 return _stat.S_IFLNK
19 return statmod.S_IFLNK
20 if _stat.S_ISBLK(mode):
20 if statmod.S_ISBLK(mode):
21 return _stat.S_IFBLK
21 return statmod.S_IFBLK
22 if _stat.S_ISCHR(mode):
22 if statmod.S_ISCHR(mode):
23 return _stat.S_IFCHR
23 return statmod.S_IFCHR
24 if _stat.S_ISFIFO(mode):
24 if statmod.S_ISFIFO(mode):
25 return _stat.S_IFIFO
25 return statmod.S_IFIFO
26 if _stat.S_ISSOCK(mode):
26 if statmod.S_ISSOCK(mode):
27 return _stat.S_IFSOCK
27 return statmod.S_IFSOCK
28 return mode
28 return mode
29
29
30 def listdir(path, stat=False, skip=None):
30 def listdir(path, stat=False, skip=None):
31 '''listdir(path, stat=False) -> list_of_tuples
31 '''listdir(path, stat=False) -> list_of_tuples
32
32
33 Return a sorted list containing information about the entries
33 Return a sorted list containing information about the entries
34 in the directory.
34 in the directory.
35
35
36 If stat is True, each element is a 3-tuple:
36 If stat is True, each element is a 3-tuple:
37
37
38 (name, type, stat object)
38 (name, type, stat object)
39
39
40 Otherwise, each element is a 2-tuple:
40 Otherwise, each element is a 2-tuple:
41
41
42 (name, type)
42 (name, type)
43 '''
43 '''
44 result = []
44 result = []
45 prefix = path
45 prefix = path
46 if not prefix.endswith(os.sep):
46 if not prefix.endswith(os.sep):
47 prefix += os.sep
47 prefix += os.sep
48 names = os.listdir(path)
48 names = os.listdir(path)
49 names.sort()
49 names.sort()
50 for fn in names:
50 for fn in names:
51 st = os.lstat(prefix + fn)
51 st = os.lstat(prefix + fn)
52 if fn == skip and _stat.S_ISDIR(st.st_mode):
52 if fn == skip and statmod.S_ISDIR(st.st_mode):
53 return []
53 return []
54 if stat:
54 if stat:
55 result.append((fn, _mode_to_kind(st.st_mode), st))
55 result.append((fn, _mode_to_kind(st.st_mode), st))
56 else:
56 else:
57 result.append((fn, _mode_to_kind(st.st_mode)))
57 result.append((fn, _mode_to_kind(st.st_mode)))
58 return result
58 return result
59
59
General Comments 0
You need to be logged in to leave comments. Login now