##// END OF EJS Templates
revlog: use set instead of dict
Benoit Boissinot -
r8464:7af92e70 default
parent child Browse files
Show More
@@ -1,3458 +1,3458
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, textwrap, subprocess, difflib, time
11 import os, re, sys, textwrap, subprocess, difflib, time
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 import patch, help, mdiff, tempfile, url, encoding
13 import patch, help, mdiff, tempfile, url, encoding
14 import archival, changegroup, cmdutil, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server
15 from hgweb import server
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the
23 Schedule files to be version controlled and added to the
24 repository.
24 repository.
25
25
26 The files will be added to the repository at the next commit. To
26 The files will be added to the repository at the next commit. To
27 undo an add before that, see hg revert.
27 undo an add before that, see hg revert.
28
28
29 If no names are given, add all files to the repository.
29 If no names are given, add all files to the repository.
30 """
30 """
31
31
32 rejected = None
32 rejected = None
33 exacts = {}
33 exacts = {}
34 names = []
34 names = []
35 m = cmdutil.match(repo, pats, opts)
35 m = cmdutil.match(repo, pats, opts)
36 m.bad = lambda x,y: True
36 m.bad = lambda x,y: True
37 for abs in repo.walk(m):
37 for abs in repo.walk(m):
38 if m.exact(abs):
38 if m.exact(abs):
39 if ui.verbose:
39 if ui.verbose:
40 ui.status(_('adding %s\n') % m.rel(abs))
40 ui.status(_('adding %s\n') % m.rel(abs))
41 names.append(abs)
41 names.append(abs)
42 exacts[abs] = 1
42 exacts[abs] = 1
43 elif abs not in repo.dirstate:
43 elif abs not in repo.dirstate:
44 ui.status(_('adding %s\n') % m.rel(abs))
44 ui.status(_('adding %s\n') % m.rel(abs))
45 names.append(abs)
45 names.append(abs)
46 if not opts.get('dry_run'):
46 if not opts.get('dry_run'):
47 rejected = repo.add(names)
47 rejected = repo.add(names)
48 rejected = [p for p in rejected if p in exacts]
48 rejected = [p for p in rejected if p in exacts]
49 return rejected and 1 or 0
49 return rejected and 1 or 0
50
50
51 def addremove(ui, repo, *pats, **opts):
51 def addremove(ui, repo, *pats, **opts):
52 """add all new files, delete all missing files
52 """add all new files, delete all missing files
53
53
54 Add all new files and remove all missing files from the
54 Add all new files and remove all missing files from the
55 repository.
55 repository.
56
56
57 New files are ignored if they match any of the patterns in
57 New files are ignored if they match any of the patterns in
58 .hgignore. As with add, these changes take effect at the next
58 .hgignore. As with add, these changes take effect at the next
59 commit.
59 commit.
60
60
61 Use the -s/--similarity option to detect renamed files. With a
61 Use the -s/--similarity option to detect renamed files. With a
62 parameter > 0, this compares every removed file with every added
62 parameter > 0, this compares every removed file with every added
63 file and records those similar enough as renames. This option
63 file and records those similar enough as renames. This option
64 takes a percentage between 0 (disabled) and 100 (files must be
64 takes a percentage between 0 (disabled) and 100 (files must be
65 identical) as its parameter. Detecting renamed files this way can
65 identical) as its parameter. Detecting renamed files this way can
66 be expensive.
66 be expensive.
67 """
67 """
68 try:
68 try:
69 sim = float(opts.get('similarity') or 0)
69 sim = float(opts.get('similarity') or 0)
70 except ValueError:
70 except ValueError:
71 raise util.Abort(_('similarity must be a number'))
71 raise util.Abort(_('similarity must be a number'))
72 if sim < 0 or sim > 100:
72 if sim < 0 or sim > 100:
73 raise util.Abort(_('similarity must be between 0 and 100'))
73 raise util.Abort(_('similarity must be between 0 and 100'))
74 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
75
75
76 def annotate(ui, repo, *pats, **opts):
76 def annotate(ui, repo, *pats, **opts):
77 """show changeset information per file line
77 """show changeset information per file line
78
78
79 List changes in files, showing the revision id responsible for
79 List changes in files, showing the revision id responsible for
80 each line
80 each line
81
81
82 This command is useful to discover who did a change or when a
82 This command is useful to discover who did a change or when a
83 change took place.
83 change took place.
84
84
85 Without the -a/--text option, annotate will avoid processing files
85 Without the -a/--text option, annotate will avoid processing files
86 it detects as binary. With -a, annotate will generate an
86 it detects as binary. With -a, annotate will generate an
87 annotation anyway, probably with undesirable results.
87 annotation anyway, probably with undesirable results.
88 """
88 """
89 datefunc = ui.quiet and util.shortdate or util.datestr
89 datefunc = ui.quiet and util.shortdate or util.datestr
90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
91
91
92 if not pats:
92 if not pats:
93 raise util.Abort(_('at least one file name or pattern required'))
93 raise util.Abort(_('at least one file name or pattern required'))
94
94
95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
96 ('number', lambda x: str(x[0].rev())),
96 ('number', lambda x: str(x[0].rev())),
97 ('changeset', lambda x: short(x[0].node())),
97 ('changeset', lambda x: short(x[0].node())),
98 ('date', getdate),
98 ('date', getdate),
99 ('follow', lambda x: x[0].path()),
99 ('follow', lambda x: x[0].path()),
100 ]
100 ]
101
101
102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
103 and not opts.get('follow')):
103 and not opts.get('follow')):
104 opts['number'] = 1
104 opts['number'] = 1
105
105
106 linenumber = opts.get('line_number') is not None
106 linenumber = opts.get('line_number') is not None
107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
108 raise util.Abort(_('at least one of -n/-c is required for -l'))
108 raise util.Abort(_('at least one of -n/-c is required for -l'))
109
109
110 funcmap = [func for op, func in opmap if opts.get(op)]
110 funcmap = [func for op, func in opmap if opts.get(op)]
111 if linenumber:
111 if linenumber:
112 lastfunc = funcmap[-1]
112 lastfunc = funcmap[-1]
113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
114
114
115 ctx = repo[opts.get('rev')]
115 ctx = repo[opts.get('rev')]
116
116
117 m = cmdutil.match(repo, pats, opts)
117 m = cmdutil.match(repo, pats, opts)
118 for abs in ctx.walk(m):
118 for abs in ctx.walk(m):
119 fctx = ctx[abs]
119 fctx = ctx[abs]
120 if not opts.get('text') and util.binary(fctx.data()):
120 if not opts.get('text') and util.binary(fctx.data()):
121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
122 continue
122 continue
123
123
124 lines = fctx.annotate(follow=opts.get('follow'),
124 lines = fctx.annotate(follow=opts.get('follow'),
125 linenumber=linenumber)
125 linenumber=linenumber)
126 pieces = []
126 pieces = []
127
127
128 for f in funcmap:
128 for f in funcmap:
129 l = [f(n) for n, dummy in lines]
129 l = [f(n) for n, dummy in lines]
130 if l:
130 if l:
131 ml = max(map(len, l))
131 ml = max(map(len, l))
132 pieces.append(["%*s" % (ml, x) for x in l])
132 pieces.append(["%*s" % (ml, x) for x in l])
133
133
134 if pieces:
134 if pieces:
135 for p, l in zip(zip(*pieces), lines):
135 for p, l in zip(zip(*pieces), lines):
136 ui.write("%s: %s" % (" ".join(p), l[1]))
136 ui.write("%s: %s" % (" ".join(p), l[1]))
137
137
138 def archive(ui, repo, dest, **opts):
138 def archive(ui, repo, dest, **opts):
139 '''create unversioned archive of a repository revision
139 '''create unversioned archive of a repository revision
140
140
141 By default, the revision used is the parent of the working
141 By default, the revision used is the parent of the working
142 directory; use -r/--rev to specify a different revision.
142 directory; use -r/--rev to specify a different revision.
143
143
144 To specify the type of archive to create, use -t/--type. Valid
144 To specify the type of archive to create, use -t/--type. Valid
145 types are:
145 types are:
146
146
147 "files" (default): a directory full of files
147 "files" (default): a directory full of files
148 "tar": tar archive, uncompressed
148 "tar": tar archive, uncompressed
149 "tbz2": tar archive, compressed using bzip2
149 "tbz2": tar archive, compressed using bzip2
150 "tgz": tar archive, compressed using gzip
150 "tgz": tar archive, compressed using gzip
151 "uzip": zip archive, uncompressed
151 "uzip": zip archive, uncompressed
152 "zip": zip archive, compressed using deflate
152 "zip": zip archive, compressed using deflate
153
153
154 The exact name of the destination archive or directory is given
154 The exact name of the destination archive or directory is given
155 using a format string; see 'hg help export' for details.
155 using a format string; see 'hg help export' for details.
156
156
157 Each member added to an archive file has a directory prefix
157 Each member added to an archive file has a directory prefix
158 prepended. Use -p/--prefix to specify a format string for the
158 prepended. Use -p/--prefix to specify a format string for the
159 prefix. The default is the basename of the archive, with suffixes
159 prefix. The default is the basename of the archive, with suffixes
160 removed.
160 removed.
161 '''
161 '''
162
162
163 ctx = repo[opts.get('rev')]
163 ctx = repo[opts.get('rev')]
164 if not ctx:
164 if not ctx:
165 raise util.Abort(_('no working directory: please specify a revision'))
165 raise util.Abort(_('no working directory: please specify a revision'))
166 node = ctx.node()
166 node = ctx.node()
167 dest = cmdutil.make_filename(repo, dest, node)
167 dest = cmdutil.make_filename(repo, dest, node)
168 if os.path.realpath(dest) == repo.root:
168 if os.path.realpath(dest) == repo.root:
169 raise util.Abort(_('repository root cannot be destination'))
169 raise util.Abort(_('repository root cannot be destination'))
170 matchfn = cmdutil.match(repo, [], opts)
170 matchfn = cmdutil.match(repo, [], opts)
171 kind = opts.get('type') or 'files'
171 kind = opts.get('type') or 'files'
172 prefix = opts.get('prefix')
172 prefix = opts.get('prefix')
173 if dest == '-':
173 if dest == '-':
174 if kind == 'files':
174 if kind == 'files':
175 raise util.Abort(_('cannot archive plain files to stdout'))
175 raise util.Abort(_('cannot archive plain files to stdout'))
176 dest = sys.stdout
176 dest = sys.stdout
177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
178 prefix = cmdutil.make_filename(repo, prefix, node)
178 prefix = cmdutil.make_filename(repo, prefix, node)
179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
180 matchfn, prefix)
180 matchfn, prefix)
181
181
182 def backout(ui, repo, node=None, rev=None, **opts):
182 def backout(ui, repo, node=None, rev=None, **opts):
183 '''reverse effect of earlier changeset
183 '''reverse effect of earlier changeset
184
184
185 Commit the backed out changes as a new changeset. The new
185 Commit the backed out changes as a new changeset. The new
186 changeset is a child of the backed out changeset.
186 changeset is a child of the backed out changeset.
187
187
188 If you back out a changeset other than the tip, a new head is
188 If you back out a changeset other than the tip, a new head is
189 created. This head will be the new tip and you should merge this
189 created. This head will be the new tip and you should merge this
190 backout changeset with another head (current one by default).
190 backout changeset with another head (current one by default).
191
191
192 The --merge option remembers the parent of the working directory
192 The --merge option remembers the parent of the working directory
193 before starting the backout, then merges the new head with that
193 before starting the backout, then merges the new head with that
194 changeset afterwards. This saves you from doing the merge by hand.
194 changeset afterwards. This saves you from doing the merge by hand.
195 The result of this merge is not committed, as with a normal merge.
195 The result of this merge is not committed, as with a normal merge.
196
196
197 See \'hg help dates\' for a list of formats valid for -d/--date.
197 See \'hg help dates\' for a list of formats valid for -d/--date.
198 '''
198 '''
199 if rev and node:
199 if rev and node:
200 raise util.Abort(_("please specify just one revision"))
200 raise util.Abort(_("please specify just one revision"))
201
201
202 if not rev:
202 if not rev:
203 rev = node
203 rev = node
204
204
205 if not rev:
205 if not rev:
206 raise util.Abort(_("please specify a revision to backout"))
206 raise util.Abort(_("please specify a revision to backout"))
207
207
208 date = opts.get('date')
208 date = opts.get('date')
209 if date:
209 if date:
210 opts['date'] = util.parsedate(date)
210 opts['date'] = util.parsedate(date)
211
211
212 cmdutil.bail_if_changed(repo)
212 cmdutil.bail_if_changed(repo)
213 node = repo.lookup(rev)
213 node = repo.lookup(rev)
214
214
215 op1, op2 = repo.dirstate.parents()
215 op1, op2 = repo.dirstate.parents()
216 a = repo.changelog.ancestor(op1, node)
216 a = repo.changelog.ancestor(op1, node)
217 if a != node:
217 if a != node:
218 raise util.Abort(_('cannot back out change on a different branch'))
218 raise util.Abort(_('cannot back out change on a different branch'))
219
219
220 p1, p2 = repo.changelog.parents(node)
220 p1, p2 = repo.changelog.parents(node)
221 if p1 == nullid:
221 if p1 == nullid:
222 raise util.Abort(_('cannot back out a change with no parents'))
222 raise util.Abort(_('cannot back out a change with no parents'))
223 if p2 != nullid:
223 if p2 != nullid:
224 if not opts.get('parent'):
224 if not opts.get('parent'):
225 raise util.Abort(_('cannot back out a merge changeset without '
225 raise util.Abort(_('cannot back out a merge changeset without '
226 '--parent'))
226 '--parent'))
227 p = repo.lookup(opts['parent'])
227 p = repo.lookup(opts['parent'])
228 if p not in (p1, p2):
228 if p not in (p1, p2):
229 raise util.Abort(_('%s is not a parent of %s') %
229 raise util.Abort(_('%s is not a parent of %s') %
230 (short(p), short(node)))
230 (short(p), short(node)))
231 parent = p
231 parent = p
232 else:
232 else:
233 if opts.get('parent'):
233 if opts.get('parent'):
234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
235 parent = p1
235 parent = p1
236
236
237 # the backout should appear on the same branch
237 # the backout should appear on the same branch
238 branch = repo.dirstate.branch()
238 branch = repo.dirstate.branch()
239 hg.clean(repo, node, show_stats=False)
239 hg.clean(repo, node, show_stats=False)
240 repo.dirstate.setbranch(branch)
240 repo.dirstate.setbranch(branch)
241 revert_opts = opts.copy()
241 revert_opts = opts.copy()
242 revert_opts['date'] = None
242 revert_opts['date'] = None
243 revert_opts['all'] = True
243 revert_opts['all'] = True
244 revert_opts['rev'] = hex(parent)
244 revert_opts['rev'] = hex(parent)
245 revert_opts['no_backup'] = None
245 revert_opts['no_backup'] = None
246 revert(ui, repo, **revert_opts)
246 revert(ui, repo, **revert_opts)
247 commit_opts = opts.copy()
247 commit_opts = opts.copy()
248 commit_opts['addremove'] = False
248 commit_opts['addremove'] = False
249 if not commit_opts['message'] and not commit_opts['logfile']:
249 if not commit_opts['message'] and not commit_opts['logfile']:
250 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
250 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
251 commit_opts['force_editor'] = True
251 commit_opts['force_editor'] = True
252 commit(ui, repo, **commit_opts)
252 commit(ui, repo, **commit_opts)
253 def nice(node):
253 def nice(node):
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
255 ui.status(_('changeset %s backs out changeset %s\n') %
255 ui.status(_('changeset %s backs out changeset %s\n') %
256 (nice(repo.changelog.tip()), nice(node)))
256 (nice(repo.changelog.tip()), nice(node)))
257 if op1 != node:
257 if op1 != node:
258 hg.clean(repo, op1, show_stats=False)
258 hg.clean(repo, op1, show_stats=False)
259 if opts.get('merge'):
259 if opts.get('merge'):
260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
261 hg.merge(repo, hex(repo.changelog.tip()))
261 hg.merge(repo, hex(repo.changelog.tip()))
262 else:
262 else:
263 ui.status(_('the backout changeset is a new head - '
263 ui.status(_('the backout changeset is a new head - '
264 'do not forget to merge\n'))
264 'do not forget to merge\n'))
265 ui.status(_('(use "backout --merge" '
265 ui.status(_('(use "backout --merge" '
266 'if you want to auto-merge)\n'))
266 'if you want to auto-merge)\n'))
267
267
268 def bisect(ui, repo, rev=None, extra=None, command=None,
268 def bisect(ui, repo, rev=None, extra=None, command=None,
269 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 reset=None, good=None, bad=None, skip=None, noupdate=None):
270 """subdivision search of changesets
270 """subdivision search of changesets
271
271
272 This command helps to find changesets which introduce problems. To
272 This command helps to find changesets which introduce problems. To
273 use, mark the earliest changeset you know exhibits the problem as
273 use, mark the earliest changeset you know exhibits the problem as
274 bad, then mark the latest changeset which is free from the problem
274 bad, then mark the latest changeset which is free from the problem
275 as good. Bisect will update your working directory to a revision
275 as good. Bisect will update your working directory to a revision
276 for testing (unless the -U/--noupdate option is specified). Once
276 for testing (unless the -U/--noupdate option is specified). Once
277 you have performed tests, mark the working directory as bad or
277 you have performed tests, mark the working directory as bad or
278 good and bisect will either update to another candidate changeset
278 good and bisect will either update to another candidate changeset
279 or announce that it has found the bad revision.
279 or announce that it has found the bad revision.
280
280
281 As a shortcut, you can also use the revision argument to mark a
281 As a shortcut, you can also use the revision argument to mark a
282 revision as good or bad without checking it out first.
282 revision as good or bad without checking it out first.
283
283
284 If you supply a command it will be used for automatic bisection.
284 If you supply a command it will be used for automatic bisection.
285 Its exit status will be used as flag to mark revision as bad or
285 Its exit status will be used as flag to mark revision as bad or
286 good. In case exit status is 0 the revision is marked as good, 125
286 good. In case exit status is 0 the revision is marked as good, 125
287 - skipped, 127 (command not found) - bisection will be aborted;
287 - skipped, 127 (command not found) - bisection will be aborted;
288 any other status bigger than 0 will mark revision as bad.
288 any other status bigger than 0 will mark revision as bad.
289 """
289 """
290 def print_result(nodes, good):
290 def print_result(nodes, good):
291 displayer = cmdutil.show_changeset(ui, repo, {})
291 displayer = cmdutil.show_changeset(ui, repo, {})
292 if len(nodes) == 1:
292 if len(nodes) == 1:
293 # narrowed it down to a single revision
293 # narrowed it down to a single revision
294 if good:
294 if good:
295 ui.write(_("The first good revision is:\n"))
295 ui.write(_("The first good revision is:\n"))
296 else:
296 else:
297 ui.write(_("The first bad revision is:\n"))
297 ui.write(_("The first bad revision is:\n"))
298 displayer.show(repo[nodes[0]])
298 displayer.show(repo[nodes[0]])
299 else:
299 else:
300 # multiple possible revisions
300 # multiple possible revisions
301 if good:
301 if good:
302 ui.write(_("Due to skipped revisions, the first "
302 ui.write(_("Due to skipped revisions, the first "
303 "good revision could be any of:\n"))
303 "good revision could be any of:\n"))
304 else:
304 else:
305 ui.write(_("Due to skipped revisions, the first "
305 ui.write(_("Due to skipped revisions, the first "
306 "bad revision could be any of:\n"))
306 "bad revision could be any of:\n"))
307 for n in nodes:
307 for n in nodes:
308 displayer.show(repo[n])
308 displayer.show(repo[n])
309
309
310 def check_state(state, interactive=True):
310 def check_state(state, interactive=True):
311 if not state['good'] or not state['bad']:
311 if not state['good'] or not state['bad']:
312 if (good or bad or skip or reset) and interactive:
312 if (good or bad or skip or reset) and interactive:
313 return
313 return
314 if not state['good']:
314 if not state['good']:
315 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 raise util.Abort(_('cannot bisect (no known good revisions)'))
316 else:
316 else:
317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
318 return True
318 return True
319
319
320 # backward compatibility
320 # backward compatibility
321 if rev in "good bad reset init".split():
321 if rev in "good bad reset init".split():
322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
323 cmd, rev, extra = rev, extra, None
323 cmd, rev, extra = rev, extra, None
324 if cmd == "good":
324 if cmd == "good":
325 good = True
325 good = True
326 elif cmd == "bad":
326 elif cmd == "bad":
327 bad = True
327 bad = True
328 else:
328 else:
329 reset = True
329 reset = True
330 elif extra or good + bad + skip + reset + bool(command) > 1:
330 elif extra or good + bad + skip + reset + bool(command) > 1:
331 raise util.Abort(_('incompatible arguments'))
331 raise util.Abort(_('incompatible arguments'))
332
332
333 if reset:
333 if reset:
334 p = repo.join("bisect.state")
334 p = repo.join("bisect.state")
335 if os.path.exists(p):
335 if os.path.exists(p):
336 os.unlink(p)
336 os.unlink(p)
337 return
337 return
338
338
339 state = hbisect.load_state(repo)
339 state = hbisect.load_state(repo)
340
340
341 if command:
341 if command:
342 commandpath = util.find_exe(command)
342 commandpath = util.find_exe(command)
343 changesets = 1
343 changesets = 1
344 try:
344 try:
345 while changesets:
345 while changesets:
346 # update state
346 # update state
347 status = subprocess.call([commandpath])
347 status = subprocess.call([commandpath])
348 if status == 125:
348 if status == 125:
349 transition = "skip"
349 transition = "skip"
350 elif status == 0:
350 elif status == 0:
351 transition = "good"
351 transition = "good"
352 # status < 0 means process was killed
352 # status < 0 means process was killed
353 elif status == 127:
353 elif status == 127:
354 raise util.Abort(_("failed to execute %s") % command)
354 raise util.Abort(_("failed to execute %s") % command)
355 elif status < 0:
355 elif status < 0:
356 raise util.Abort(_("%s killed") % command)
356 raise util.Abort(_("%s killed") % command)
357 else:
357 else:
358 transition = "bad"
358 transition = "bad"
359 node = repo.lookup(rev or '.')
359 node = repo.lookup(rev or '.')
360 state[transition].append(node)
360 state[transition].append(node)
361 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
361 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
362 check_state(state, interactive=False)
362 check_state(state, interactive=False)
363 # bisect
363 # bisect
364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 # update to next check
365 # update to next check
366 cmdutil.bail_if_changed(repo)
366 cmdutil.bail_if_changed(repo)
367 hg.clean(repo, nodes[0], show_stats=False)
367 hg.clean(repo, nodes[0], show_stats=False)
368 finally:
368 finally:
369 hbisect.save_state(repo, state)
369 hbisect.save_state(repo, state)
370 return print_result(nodes, not status)
370 return print_result(nodes, not status)
371
371
372 # update state
372 # update state
373 node = repo.lookup(rev or '.')
373 node = repo.lookup(rev or '.')
374 if good:
374 if good:
375 state['good'].append(node)
375 state['good'].append(node)
376 elif bad:
376 elif bad:
377 state['bad'].append(node)
377 state['bad'].append(node)
378 elif skip:
378 elif skip:
379 state['skip'].append(node)
379 state['skip'].append(node)
380
380
381 hbisect.save_state(repo, state)
381 hbisect.save_state(repo, state)
382
382
383 if not check_state(state):
383 if not check_state(state):
384 return
384 return
385
385
386 # actually bisect
386 # actually bisect
387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 if changesets == 0:
388 if changesets == 0:
389 print_result(nodes, good)
389 print_result(nodes, good)
390 else:
390 else:
391 assert len(nodes) == 1 # only a single node can be tested next
391 assert len(nodes) == 1 # only a single node can be tested next
392 node = nodes[0]
392 node = nodes[0]
393 # compute the approximate number of remaining tests
393 # compute the approximate number of remaining tests
394 tests, size = 0, 2
394 tests, size = 0, 2
395 while size <= changesets:
395 while size <= changesets:
396 tests, size = tests + 1, size * 2
396 tests, size = tests + 1, size * 2
397 rev = repo.changelog.rev(node)
397 rev = repo.changelog.rev(node)
398 ui.write(_("Testing changeset %s:%s "
398 ui.write(_("Testing changeset %s:%s "
399 "(%s changesets remaining, ~%s tests)\n")
399 "(%s changesets remaining, ~%s tests)\n")
400 % (rev, short(node), changesets, tests))
400 % (rev, short(node), changesets, tests))
401 if not noupdate:
401 if not noupdate:
402 cmdutil.bail_if_changed(repo)
402 cmdutil.bail_if_changed(repo)
403 return hg.clean(repo, node)
403 return hg.clean(repo, node)
404
404
405 def branch(ui, repo, label=None, **opts):
405 def branch(ui, repo, label=None, **opts):
406 """set or show the current branch name
406 """set or show the current branch name
407
407
408 With no argument, show the current branch name. With one argument,
408 With no argument, show the current branch name. With one argument,
409 set the working directory branch name (the branch does not exist
409 set the working directory branch name (the branch does not exist
410 in the repository until the next commit). It is recommended to use
410 in the repository until the next commit). It is recommended to use
411 the 'default' branch as your primary development branch.
411 the 'default' branch as your primary development branch.
412
412
413 Unless -f/--force is specified, branch will not let you set a
413 Unless -f/--force is specified, branch will not let you set a
414 branch name that shadows an existing branch.
414 branch name that shadows an existing branch.
415
415
416 Use -C/--clean to reset the working directory branch to that of
416 Use -C/--clean to reset the working directory branch to that of
417 the parent of the working directory, negating a previous branch
417 the parent of the working directory, negating a previous branch
418 change.
418 change.
419
419
420 Use the command 'hg update' to switch to an existing branch.
420 Use the command 'hg update' to switch to an existing branch.
421 """
421 """
422
422
423 if opts.get('clean'):
423 if opts.get('clean'):
424 label = repo[None].parents()[0].branch()
424 label = repo[None].parents()[0].branch()
425 repo.dirstate.setbranch(label)
425 repo.dirstate.setbranch(label)
426 ui.status(_('reset working directory to branch %s\n') % label)
426 ui.status(_('reset working directory to branch %s\n') % label)
427 elif label:
427 elif label:
428 if not opts.get('force') and label in repo.branchtags():
428 if not opts.get('force') and label in repo.branchtags():
429 if label not in [p.branch() for p in repo.parents()]:
429 if label not in [p.branch() for p in repo.parents()]:
430 raise util.Abort(_('a branch of the same name already exists'
430 raise util.Abort(_('a branch of the same name already exists'
431 ' (use --force to override)'))
431 ' (use --force to override)'))
432 repo.dirstate.setbranch(encoding.fromlocal(label))
432 repo.dirstate.setbranch(encoding.fromlocal(label))
433 ui.status(_('marked working directory as branch %s\n') % label)
433 ui.status(_('marked working directory as branch %s\n') % label)
434 else:
434 else:
435 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
435 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
436
436
437 def branches(ui, repo, active=False):
437 def branches(ui, repo, active=False):
438 """list repository named branches
438 """list repository named branches
439
439
440 List the repository's named branches, indicating which ones are
440 List the repository's named branches, indicating which ones are
441 inactive. If active is specified, only show active branches.
441 inactive. If active is specified, only show active branches.
442
442
443 A branch is considered active if it contains repository heads.
443 A branch is considered active if it contains repository heads.
444
444
445 Use the command 'hg update' to switch to an existing branch.
445 Use the command 'hg update' to switch to an existing branch.
446 """
446 """
447 hexfunc = ui.debugflag and hex or short
447 hexfunc = ui.debugflag and hex or short
448 activebranches = [encoding.tolocal(repo[n].branch())
448 activebranches = [encoding.tolocal(repo[n].branch())
449 for n in repo.heads(closed=False)]
449 for n in repo.heads(closed=False)]
450 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
450 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
451 for tag, node in repo.branchtags().items()],
451 for tag, node in repo.branchtags().items()],
452 reverse=True)
452 reverse=True)
453
453
454 for isactive, node, tag in branches:
454 for isactive, node, tag in branches:
455 if (not active) or isactive:
455 if (not active) or isactive:
456 if ui.quiet:
456 if ui.quiet:
457 ui.write("%s\n" % tag)
457 ui.write("%s\n" % tag)
458 else:
458 else:
459 hn = repo.lookup(node)
459 hn = repo.lookup(node)
460 if isactive:
460 if isactive:
461 notice = ''
461 notice = ''
462 elif hn not in repo.branchheads(tag, closed=False):
462 elif hn not in repo.branchheads(tag, closed=False):
463 notice = ' (closed)'
463 notice = ' (closed)'
464 else:
464 else:
465 notice = ' (inactive)'
465 notice = ' (inactive)'
466 rev = str(node).rjust(31 - encoding.colwidth(tag))
466 rev = str(node).rjust(31 - encoding.colwidth(tag))
467 data = tag, rev, hexfunc(hn), notice
467 data = tag, rev, hexfunc(hn), notice
468 ui.write("%s %s:%s%s\n" % data)
468 ui.write("%s %s:%s%s\n" % data)
469
469
470 def bundle(ui, repo, fname, dest=None, **opts):
470 def bundle(ui, repo, fname, dest=None, **opts):
471 """create a changegroup file
471 """create a changegroup file
472
472
473 Generate a compressed changegroup file collecting changesets not
473 Generate a compressed changegroup file collecting changesets not
474 known to be in another repository.
474 known to be in another repository.
475
475
476 If no destination repository is specified the destination is
476 If no destination repository is specified the destination is
477 assumed to have all the nodes specified by one or more --base
477 assumed to have all the nodes specified by one or more --base
478 parameters. To create a bundle containing all changesets, use
478 parameters. To create a bundle containing all changesets, use
479 -a/--all (or --base null). To change the compression method
479 -a/--all (or --base null). To change the compression method
480 applied, use the -t/--type option (by default, bundles are
480 applied, use the -t/--type option (by default, bundles are
481 compressed using bz2).
481 compressed using bz2).
482
482
483 The bundle file can then be transferred using conventional means
483 The bundle file can then be transferred using conventional means
484 and applied to another repository with the unbundle or pull
484 and applied to another repository with the unbundle or pull
485 command. This is useful when direct push and pull are not
485 command. This is useful when direct push and pull are not
486 available or when exporting an entire repository is undesirable.
486 available or when exporting an entire repository is undesirable.
487
487
488 Applying bundles preserves all changeset contents including
488 Applying bundles preserves all changeset contents including
489 permissions, copy/rename information, and revision history.
489 permissions, copy/rename information, and revision history.
490 """
490 """
491 revs = opts.get('rev') or None
491 revs = opts.get('rev') or None
492 if revs:
492 if revs:
493 revs = [repo.lookup(rev) for rev in revs]
493 revs = [repo.lookup(rev) for rev in revs]
494 if opts.get('all'):
494 if opts.get('all'):
495 base = ['null']
495 base = ['null']
496 else:
496 else:
497 base = opts.get('base')
497 base = opts.get('base')
498 if base:
498 if base:
499 if dest:
499 if dest:
500 raise util.Abort(_("--base is incompatible with specifiying "
500 raise util.Abort(_("--base is incompatible with specifiying "
501 "a destination"))
501 "a destination"))
502 base = [repo.lookup(rev) for rev in base]
502 base = [repo.lookup(rev) for rev in base]
503 # create the right base
503 # create the right base
504 # XXX: nodesbetween / changegroup* should be "fixed" instead
504 # XXX: nodesbetween / changegroup* should be "fixed" instead
505 o = []
505 o = []
506 has = {nullid: None}
506 has = set((nullid,))
507 for n in base:
507 for n in base:
508 has.update(repo.changelog.reachable(n))
508 has.update(repo.changelog.reachable(n))
509 if revs:
509 if revs:
510 visit = list(revs)
510 visit = list(revs)
511 else:
511 else:
512 visit = repo.changelog.heads()
512 visit = repo.changelog.heads()
513 seen = {}
513 seen = {}
514 while visit:
514 while visit:
515 n = visit.pop(0)
515 n = visit.pop(0)
516 parents = [p for p in repo.changelog.parents(n) if p not in has]
516 parents = [p for p in repo.changelog.parents(n) if p not in has]
517 if len(parents) == 0:
517 if len(parents) == 0:
518 o.insert(0, n)
518 o.insert(0, n)
519 else:
519 else:
520 for p in parents:
520 for p in parents:
521 if p not in seen:
521 if p not in seen:
522 seen[p] = 1
522 seen[p] = 1
523 visit.append(p)
523 visit.append(p)
524 else:
524 else:
525 dest, revs, checkout = hg.parseurl(
525 dest, revs, checkout = hg.parseurl(
526 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
526 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
527 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
527 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
528 o = repo.findoutgoing(other, force=opts.get('force'))
528 o = repo.findoutgoing(other, force=opts.get('force'))
529
529
530 if revs:
530 if revs:
531 cg = repo.changegroupsubset(o, revs, 'bundle')
531 cg = repo.changegroupsubset(o, revs, 'bundle')
532 else:
532 else:
533 cg = repo.changegroup(o, 'bundle')
533 cg = repo.changegroup(o, 'bundle')
534
534
535 bundletype = opts.get('type', 'bzip2').lower()
535 bundletype = opts.get('type', 'bzip2').lower()
536 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
536 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
537 bundletype = btypes.get(bundletype)
537 bundletype = btypes.get(bundletype)
538 if bundletype not in changegroup.bundletypes:
538 if bundletype not in changegroup.bundletypes:
539 raise util.Abort(_('unknown bundle type specified with --type'))
539 raise util.Abort(_('unknown bundle type specified with --type'))
540
540
541 changegroup.writebundle(cg, fname, bundletype)
541 changegroup.writebundle(cg, fname, bundletype)
542
542
543 def cat(ui, repo, file1, *pats, **opts):
543 def cat(ui, repo, file1, *pats, **opts):
544 """output the current or given revision of files
544 """output the current or given revision of files
545
545
546 Print the specified files as they were at the given revision. If
546 Print the specified files as they were at the given revision. If
547 no revision is given, the parent of the working directory is used,
547 no revision is given, the parent of the working directory is used,
548 or tip if no revision is checked out.
548 or tip if no revision is checked out.
549
549
550 Output may be to a file, in which case the name of the file is
550 Output may be to a file, in which case the name of the file is
551 given using a format string. The formatting rules are the same as
551 given using a format string. The formatting rules are the same as
552 for the export command, with the following additions:
552 for the export command, with the following additions:
553
553
554 %s basename of file being printed
554 %s basename of file being printed
555 %d dirname of file being printed, or '.' if in repository root
555 %d dirname of file being printed, or '.' if in repository root
556 %p root-relative path name of file being printed
556 %p root-relative path name of file being printed
557 """
557 """
558 ctx = repo[opts.get('rev')]
558 ctx = repo[opts.get('rev')]
559 err = 1
559 err = 1
560 m = cmdutil.match(repo, (file1,) + pats, opts)
560 m = cmdutil.match(repo, (file1,) + pats, opts)
561 for abs in ctx.walk(m):
561 for abs in ctx.walk(m):
562 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
562 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
563 data = ctx[abs].data()
563 data = ctx[abs].data()
564 if opts.get('decode'):
564 if opts.get('decode'):
565 data = repo.wwritedata(abs, data)
565 data = repo.wwritedata(abs, data)
566 fp.write(data)
566 fp.write(data)
567 err = 0
567 err = 0
568 return err
568 return err
569
569
570 def clone(ui, source, dest=None, **opts):
570 def clone(ui, source, dest=None, **opts):
571 """make a copy of an existing repository
571 """make a copy of an existing repository
572
572
573 Create a copy of an existing repository in a new directory.
573 Create a copy of an existing repository in a new directory.
574
574
575 If no destination directory name is specified, it defaults to the
575 If no destination directory name is specified, it defaults to the
576 basename of the source.
576 basename of the source.
577
577
578 The location of the source is added to the new repository's
578 The location of the source is added to the new repository's
579 .hg/hgrc file, as the default to be used for future pulls.
579 .hg/hgrc file, as the default to be used for future pulls.
580
580
581 If you use the -r/--rev option to clone up to a specific revision,
581 If you use the -r/--rev option to clone up to a specific revision,
582 no subsequent revisions (including subsequent tags) will be
582 no subsequent revisions (including subsequent tags) will be
583 present in the cloned repository. This option implies --pull, even
583 present in the cloned repository. This option implies --pull, even
584 on local repositories.
584 on local repositories.
585
585
586 By default, clone will check out the head of the 'default' branch.
586 By default, clone will check out the head of the 'default' branch.
587 If the -U/--noupdate option is used, the new clone will contain
587 If the -U/--noupdate option is used, the new clone will contain
588 only a repository (.hg) and no working copy (the working copy
588 only a repository (.hg) and no working copy (the working copy
589 parent is the null revision).
589 parent is the null revision).
590
590
591 See 'hg help urls' for valid source format details.
591 See 'hg help urls' for valid source format details.
592
592
593 It is possible to specify an ssh:// URL as the destination, but no
593 It is possible to specify an ssh:// URL as the destination, but no
594 .hg/hgrc and working directory will be created on the remote side.
594 .hg/hgrc and working directory will be created on the remote side.
595 Look at the help text for URLs for important details about ssh://
595 Look at the help text for URLs for important details about ssh://
596 URLs.
596 URLs.
597
597
598 For efficiency, hardlinks are used for cloning whenever the source
598 For efficiency, hardlinks are used for cloning whenever the source
599 and destination are on the same filesystem (note this applies only
599 and destination are on the same filesystem (note this applies only
600 to the repository data, not to the checked out files). Some
600 to the repository data, not to the checked out files). Some
601 filesystems, such as AFS, implement hardlinking incorrectly, but
601 filesystems, such as AFS, implement hardlinking incorrectly, but
602 do not report errors. In these cases, use the --pull option to
602 do not report errors. In these cases, use the --pull option to
603 avoid hardlinking.
603 avoid hardlinking.
604
604
605 In some cases, you can clone repositories and checked out files
605 In some cases, you can clone repositories and checked out files
606 using full hardlinks with
606 using full hardlinks with
607
607
608 $ cp -al REPO REPOCLONE
608 $ cp -al REPO REPOCLONE
609
609
610 This is the fastest way to clone, but it is not always safe. The
610 This is the fastest way to clone, but it is not always safe. The
611 operation is not atomic (making sure REPO is not modified during
611 operation is not atomic (making sure REPO is not modified during
612 the operation is up to you) and you have to make sure your editor
612 the operation is up to you) and you have to make sure your editor
613 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
613 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
614 this is not compatible with certain extensions that place their
614 this is not compatible with certain extensions that place their
615 metadata under the .hg directory, such as mq.
615 metadata under the .hg directory, such as mq.
616
616
617 """
617 """
618 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
618 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
619 pull=opts.get('pull'),
619 pull=opts.get('pull'),
620 stream=opts.get('uncompressed'),
620 stream=opts.get('uncompressed'),
621 rev=opts.get('rev'),
621 rev=opts.get('rev'),
622 update=not opts.get('noupdate'))
622 update=not opts.get('noupdate'))
623
623
624 def commit(ui, repo, *pats, **opts):
624 def commit(ui, repo, *pats, **opts):
625 """commit the specified files or all outstanding changes
625 """commit the specified files or all outstanding changes
626
626
627 Commit changes to the given files into the repository. Unlike a
627 Commit changes to the given files into the repository. Unlike a
628 centralized RCS, this operation is a local operation. See hg push
628 centralized RCS, this operation is a local operation. See hg push
629 for means to actively distribute your changes.
629 for means to actively distribute your changes.
630
630
631 If a list of files is omitted, all changes reported by "hg status"
631 If a list of files is omitted, all changes reported by "hg status"
632 will be committed.
632 will be committed.
633
633
634 If you are committing the result of a merge, do not provide any
634 If you are committing the result of a merge, do not provide any
635 file names or -I/-X filters.
635 file names or -I/-X filters.
636
636
637 If no commit message is specified, the configured editor is
637 If no commit message is specified, the configured editor is
638 started to prompt you for a message.
638 started to prompt you for a message.
639
639
640 See 'hg help dates' for a list of formats valid for -d/--date.
640 See 'hg help dates' for a list of formats valid for -d/--date.
641 """
641 """
642 extra = {}
642 extra = {}
643 if opts.get('close_branch'):
643 if opts.get('close_branch'):
644 extra['close'] = 1
644 extra['close'] = 1
645 e = cmdutil.commiteditor
645 e = cmdutil.commiteditor
646 if opts.get('force_editor'):
646 if opts.get('force_editor'):
647 e = cmdutil.commitforceeditor
647 e = cmdutil.commitforceeditor
648
648
649 def commitfunc(ui, repo, message, match, opts):
649 def commitfunc(ui, repo, message, match, opts):
650 return repo.commit(match.files(), message, opts.get('user'),
650 return repo.commit(match.files(), message, opts.get('user'),
651 opts.get('date'), match, editor=e, extra=extra)
651 opts.get('date'), match, editor=e, extra=extra)
652
652
653 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
653 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
654 if not node:
654 if not node:
655 return
655 return
656 cl = repo.changelog
656 cl = repo.changelog
657 rev = cl.rev(node)
657 rev = cl.rev(node)
658 parents = cl.parentrevs(rev)
658 parents = cl.parentrevs(rev)
659 if rev - 1 in parents:
659 if rev - 1 in parents:
660 # one of the parents was the old tip
660 # one of the parents was the old tip
661 pass
661 pass
662 elif (parents == (nullrev, nullrev) or
662 elif (parents == (nullrev, nullrev) or
663 len(cl.heads(cl.node(parents[0]))) > 1 and
663 len(cl.heads(cl.node(parents[0]))) > 1 and
664 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
664 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
665 ui.status(_('created new head\n'))
665 ui.status(_('created new head\n'))
666
666
667 if ui.debugflag:
667 if ui.debugflag:
668 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
668 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
669 elif ui.verbose:
669 elif ui.verbose:
670 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
670 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
671
671
672 def copy(ui, repo, *pats, **opts):
672 def copy(ui, repo, *pats, **opts):
673 """mark files as copied for the next commit
673 """mark files as copied for the next commit
674
674
675 Mark dest as having copies of source files. If dest is a
675 Mark dest as having copies of source files. If dest is a
676 directory, copies are put in that directory. If dest is a file,
676 directory, copies are put in that directory. If dest is a file,
677 the source must be a single file.
677 the source must be a single file.
678
678
679 By default, this command copies the contents of files as they
679 By default, this command copies the contents of files as they
680 stand in the working directory. If invoked with -A/--after, the
680 stand in the working directory. If invoked with -A/--after, the
681 operation is recorded, but no copying is performed.
681 operation is recorded, but no copying is performed.
682
682
683 This command takes effect with the next commit. To undo a copy
683 This command takes effect with the next commit. To undo a copy
684 before that, see hg revert.
684 before that, see hg revert.
685 """
685 """
686 wlock = repo.wlock(False)
686 wlock = repo.wlock(False)
687 try:
687 try:
688 return cmdutil.copy(ui, repo, pats, opts)
688 return cmdutil.copy(ui, repo, pats, opts)
689 finally:
689 finally:
690 wlock.release()
690 wlock.release()
691
691
692 def debugancestor(ui, repo, *args):
692 def debugancestor(ui, repo, *args):
693 """find the ancestor revision of two revisions in a given index"""
693 """find the ancestor revision of two revisions in a given index"""
694 if len(args) == 3:
694 if len(args) == 3:
695 index, rev1, rev2 = args
695 index, rev1, rev2 = args
696 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
696 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
697 lookup = r.lookup
697 lookup = r.lookup
698 elif len(args) == 2:
698 elif len(args) == 2:
699 if not repo:
699 if not repo:
700 raise util.Abort(_("There is no Mercurial repository here "
700 raise util.Abort(_("There is no Mercurial repository here "
701 "(.hg not found)"))
701 "(.hg not found)"))
702 rev1, rev2 = args
702 rev1, rev2 = args
703 r = repo.changelog
703 r = repo.changelog
704 lookup = repo.lookup
704 lookup = repo.lookup
705 else:
705 else:
706 raise util.Abort(_('either two or three arguments required'))
706 raise util.Abort(_('either two or three arguments required'))
707 a = r.ancestor(lookup(rev1), lookup(rev2))
707 a = r.ancestor(lookup(rev1), lookup(rev2))
708 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
708 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
709
709
710 def debugcommands(ui, cmd='', *args):
710 def debugcommands(ui, cmd='', *args):
711 for cmd, vals in sorted(table.iteritems()):
711 for cmd, vals in sorted(table.iteritems()):
712 cmd = cmd.split('|')[0].strip('^')
712 cmd = cmd.split('|')[0].strip('^')
713 opts = ', '.join([i[1] for i in vals[1]])
713 opts = ', '.join([i[1] for i in vals[1]])
714 ui.write('%s: %s\n' % (cmd, opts))
714 ui.write('%s: %s\n' % (cmd, opts))
715
715
716 def debugcomplete(ui, cmd='', **opts):
716 def debugcomplete(ui, cmd='', **opts):
717 """returns the completion list associated with the given command"""
717 """returns the completion list associated with the given command"""
718
718
719 if opts.get('options'):
719 if opts.get('options'):
720 options = []
720 options = []
721 otables = [globalopts]
721 otables = [globalopts]
722 if cmd:
722 if cmd:
723 aliases, entry = cmdutil.findcmd(cmd, table, False)
723 aliases, entry = cmdutil.findcmd(cmd, table, False)
724 otables.append(entry[1])
724 otables.append(entry[1])
725 for t in otables:
725 for t in otables:
726 for o in t:
726 for o in t:
727 if o[0]:
727 if o[0]:
728 options.append('-%s' % o[0])
728 options.append('-%s' % o[0])
729 options.append('--%s' % o[1])
729 options.append('--%s' % o[1])
730 ui.write("%s\n" % "\n".join(options))
730 ui.write("%s\n" % "\n".join(options))
731 return
731 return
732
732
733 cmdlist = cmdutil.findpossible(cmd, table)
733 cmdlist = cmdutil.findpossible(cmd, table)
734 if ui.verbose:
734 if ui.verbose:
735 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
735 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
736 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
736 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
737
737
738 def debugfsinfo(ui, path = "."):
738 def debugfsinfo(ui, path = "."):
739 file('.debugfsinfo', 'w').write('')
739 file('.debugfsinfo', 'w').write('')
740 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
740 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
741 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
741 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
742 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
742 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
743 and 'yes' or 'no'))
743 and 'yes' or 'no'))
744 os.unlink('.debugfsinfo')
744 os.unlink('.debugfsinfo')
745
745
746 def debugrebuildstate(ui, repo, rev="tip"):
746 def debugrebuildstate(ui, repo, rev="tip"):
747 """rebuild the dirstate as it would look like for the given revision"""
747 """rebuild the dirstate as it would look like for the given revision"""
748 ctx = repo[rev]
748 ctx = repo[rev]
749 wlock = repo.wlock()
749 wlock = repo.wlock()
750 try:
750 try:
751 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
751 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
752 finally:
752 finally:
753 wlock.release()
753 wlock.release()
754
754
755 def debugcheckstate(ui, repo):
755 def debugcheckstate(ui, repo):
756 """validate the correctness of the current dirstate"""
756 """validate the correctness of the current dirstate"""
757 parent1, parent2 = repo.dirstate.parents()
757 parent1, parent2 = repo.dirstate.parents()
758 m1 = repo[parent1].manifest()
758 m1 = repo[parent1].manifest()
759 m2 = repo[parent2].manifest()
759 m2 = repo[parent2].manifest()
760 errors = 0
760 errors = 0
761 for f in repo.dirstate:
761 for f in repo.dirstate:
762 state = repo.dirstate[f]
762 state = repo.dirstate[f]
763 if state in "nr" and f not in m1:
763 if state in "nr" and f not in m1:
764 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
764 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
765 errors += 1
765 errors += 1
766 if state in "a" and f in m1:
766 if state in "a" and f in m1:
767 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
767 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
768 errors += 1
768 errors += 1
769 if state in "m" and f not in m1 and f not in m2:
769 if state in "m" and f not in m1 and f not in m2:
770 ui.warn(_("%s in state %s, but not in either manifest\n") %
770 ui.warn(_("%s in state %s, but not in either manifest\n") %
771 (f, state))
771 (f, state))
772 errors += 1
772 errors += 1
773 for f in m1:
773 for f in m1:
774 state = repo.dirstate[f]
774 state = repo.dirstate[f]
775 if state not in "nrm":
775 if state not in "nrm":
776 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
776 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
777 errors += 1
777 errors += 1
778 if errors:
778 if errors:
779 error = _(".hg/dirstate inconsistent with current parent's manifest")
779 error = _(".hg/dirstate inconsistent with current parent's manifest")
780 raise util.Abort(error)
780 raise util.Abort(error)
781
781
782 def showconfig(ui, repo, *values, **opts):
782 def showconfig(ui, repo, *values, **opts):
783 """show combined config settings from all hgrc files
783 """show combined config settings from all hgrc files
784
784
785 With no args, print names and values of all config items.
785 With no args, print names and values of all config items.
786
786
787 With one arg of the form section.name, print just the value of
787 With one arg of the form section.name, print just the value of
788 that config item.
788 that config item.
789
789
790 With multiple args, print names and values of all config items
790 With multiple args, print names and values of all config items
791 with matching section names.
791 with matching section names.
792
792
793 With the --debug flag, the source (filename and line number) is
793 With the --debug flag, the source (filename and line number) is
794 printed for each config item.
794 printed for each config item.
795 """
795 """
796
796
797 untrusted = bool(opts.get('untrusted'))
797 untrusted = bool(opts.get('untrusted'))
798 if values:
798 if values:
799 if len([v for v in values if '.' in v]) > 1:
799 if len([v for v in values if '.' in v]) > 1:
800 raise util.Abort(_('only one config item permitted'))
800 raise util.Abort(_('only one config item permitted'))
801 for section, name, value in ui.walkconfig(untrusted=untrusted):
801 for section, name, value in ui.walkconfig(untrusted=untrusted):
802 sectname = section + '.' + name
802 sectname = section + '.' + name
803 if values:
803 if values:
804 for v in values:
804 for v in values:
805 if v == section:
805 if v == section:
806 ui.debug('%s: ' %
806 ui.debug('%s: ' %
807 ui.configsource(section, name, untrusted))
807 ui.configsource(section, name, untrusted))
808 ui.write('%s=%s\n' % (sectname, value))
808 ui.write('%s=%s\n' % (sectname, value))
809 elif v == sectname:
809 elif v == sectname:
810 ui.debug('%s: ' %
810 ui.debug('%s: ' %
811 ui.configsource(section, name, untrusted))
811 ui.configsource(section, name, untrusted))
812 ui.write(value, '\n')
812 ui.write(value, '\n')
813 else:
813 else:
814 ui.debug('%s: ' %
814 ui.debug('%s: ' %
815 ui.configsource(section, name, untrusted))
815 ui.configsource(section, name, untrusted))
816 ui.write('%s=%s\n' % (sectname, value))
816 ui.write('%s=%s\n' % (sectname, value))
817
817
818 def debugsetparents(ui, repo, rev1, rev2=None):
818 def debugsetparents(ui, repo, rev1, rev2=None):
819 """manually set the parents of the current working directory
819 """manually set the parents of the current working directory
820
820
821 This is useful for writing repository conversion tools, but should
821 This is useful for writing repository conversion tools, but should
822 be used with care.
822 be used with care.
823 """
823 """
824
824
825 if not rev2:
825 if not rev2:
826 rev2 = hex(nullid)
826 rev2 = hex(nullid)
827
827
828 wlock = repo.wlock()
828 wlock = repo.wlock()
829 try:
829 try:
830 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
830 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
831 finally:
831 finally:
832 wlock.release()
832 wlock.release()
833
833
834 def debugstate(ui, repo, nodates=None):
834 def debugstate(ui, repo, nodates=None):
835 """show the contents of the current dirstate"""
835 """show the contents of the current dirstate"""
836 timestr = ""
836 timestr = ""
837 showdate = not nodates
837 showdate = not nodates
838 for file_, ent in sorted(repo.dirstate._map.iteritems()):
838 for file_, ent in sorted(repo.dirstate._map.iteritems()):
839 if showdate:
839 if showdate:
840 if ent[3] == -1:
840 if ent[3] == -1:
841 # Pad or slice to locale representation
841 # Pad or slice to locale representation
842 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
842 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
843 timestr = 'unset'
843 timestr = 'unset'
844 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
844 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
845 else:
845 else:
846 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
846 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
847 if ent[1] & 020000:
847 if ent[1] & 020000:
848 mode = 'lnk'
848 mode = 'lnk'
849 else:
849 else:
850 mode = '%3o' % (ent[1] & 0777)
850 mode = '%3o' % (ent[1] & 0777)
851 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
851 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
852 for f in repo.dirstate.copies():
852 for f in repo.dirstate.copies():
853 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
853 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
854
854
855 def debugdata(ui, file_, rev):
855 def debugdata(ui, file_, rev):
856 """dump the contents of a data file revision"""
856 """dump the contents of a data file revision"""
857 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
857 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
858 try:
858 try:
859 ui.write(r.revision(r.lookup(rev)))
859 ui.write(r.revision(r.lookup(rev)))
860 except KeyError:
860 except KeyError:
861 raise util.Abort(_('invalid revision identifier %s') % rev)
861 raise util.Abort(_('invalid revision identifier %s') % rev)
862
862
863 def debugdate(ui, date, range=None, **opts):
863 def debugdate(ui, date, range=None, **opts):
864 """parse and display a date"""
864 """parse and display a date"""
865 if opts["extended"]:
865 if opts["extended"]:
866 d = util.parsedate(date, util.extendeddateformats)
866 d = util.parsedate(date, util.extendeddateformats)
867 else:
867 else:
868 d = util.parsedate(date)
868 d = util.parsedate(date)
869 ui.write("internal: %s %s\n" % d)
869 ui.write("internal: %s %s\n" % d)
870 ui.write("standard: %s\n" % util.datestr(d))
870 ui.write("standard: %s\n" % util.datestr(d))
871 if range:
871 if range:
872 m = util.matchdate(range)
872 m = util.matchdate(range)
873 ui.write("match: %s\n" % m(d[0]))
873 ui.write("match: %s\n" % m(d[0]))
874
874
875 def debugindex(ui, file_):
875 def debugindex(ui, file_):
876 """dump the contents of an index file"""
876 """dump the contents of an index file"""
877 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
877 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
878 ui.write(" rev offset length base linkrev"
878 ui.write(" rev offset length base linkrev"
879 " nodeid p1 p2\n")
879 " nodeid p1 p2\n")
880 for i in r:
880 for i in r:
881 node = r.node(i)
881 node = r.node(i)
882 try:
882 try:
883 pp = r.parents(node)
883 pp = r.parents(node)
884 except:
884 except:
885 pp = [nullid, nullid]
885 pp = [nullid, nullid]
886 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
886 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
887 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
887 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
888 short(node), short(pp[0]), short(pp[1])))
888 short(node), short(pp[0]), short(pp[1])))
889
889
890 def debugindexdot(ui, file_):
890 def debugindexdot(ui, file_):
891 """dump an index DAG as a .dot file"""
891 """dump an index DAG as a .dot file"""
892 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
892 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
893 ui.write("digraph G {\n")
893 ui.write("digraph G {\n")
894 for i in r:
894 for i in r:
895 node = r.node(i)
895 node = r.node(i)
896 pp = r.parents(node)
896 pp = r.parents(node)
897 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
897 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
898 if pp[1] != nullid:
898 if pp[1] != nullid:
899 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
899 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
900 ui.write("}\n")
900 ui.write("}\n")
901
901
902 def debuginstall(ui):
902 def debuginstall(ui):
903 '''test Mercurial installation'''
903 '''test Mercurial installation'''
904
904
905 def writetemp(contents):
905 def writetemp(contents):
906 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
906 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
907 f = os.fdopen(fd, "wb")
907 f = os.fdopen(fd, "wb")
908 f.write(contents)
908 f.write(contents)
909 f.close()
909 f.close()
910 return name
910 return name
911
911
912 problems = 0
912 problems = 0
913
913
914 # encoding
914 # encoding
915 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
915 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
916 try:
916 try:
917 encoding.fromlocal("test")
917 encoding.fromlocal("test")
918 except util.Abort, inst:
918 except util.Abort, inst:
919 ui.write(" %s\n" % inst)
919 ui.write(" %s\n" % inst)
920 ui.write(_(" (check that your locale is properly set)\n"))
920 ui.write(_(" (check that your locale is properly set)\n"))
921 problems += 1
921 problems += 1
922
922
923 # compiled modules
923 # compiled modules
924 ui.status(_("Checking extensions...\n"))
924 ui.status(_("Checking extensions...\n"))
925 try:
925 try:
926 import bdiff, mpatch, base85
926 import bdiff, mpatch, base85
927 except Exception, inst:
927 except Exception, inst:
928 ui.write(" %s\n" % inst)
928 ui.write(" %s\n" % inst)
929 ui.write(_(" One or more extensions could not be found"))
929 ui.write(_(" One or more extensions could not be found"))
930 ui.write(_(" (check that you compiled the extensions)\n"))
930 ui.write(_(" (check that you compiled the extensions)\n"))
931 problems += 1
931 problems += 1
932
932
933 # templates
933 # templates
934 ui.status(_("Checking templates...\n"))
934 ui.status(_("Checking templates...\n"))
935 try:
935 try:
936 import templater
936 import templater
937 templater.templater(templater.templatepath("map-cmdline.default"))
937 templater.templater(templater.templatepath("map-cmdline.default"))
938 except Exception, inst:
938 except Exception, inst:
939 ui.write(" %s\n" % inst)
939 ui.write(" %s\n" % inst)
940 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
940 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
941 problems += 1
941 problems += 1
942
942
943 # patch
943 # patch
944 ui.status(_("Checking patch...\n"))
944 ui.status(_("Checking patch...\n"))
945 patchproblems = 0
945 patchproblems = 0
946 a = "1\n2\n3\n4\n"
946 a = "1\n2\n3\n4\n"
947 b = "1\n2\n3\ninsert\n4\n"
947 b = "1\n2\n3\ninsert\n4\n"
948 fa = writetemp(a)
948 fa = writetemp(a)
949 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
949 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
950 os.path.basename(fa))
950 os.path.basename(fa))
951 fd = writetemp(d)
951 fd = writetemp(d)
952
952
953 files = {}
953 files = {}
954 try:
954 try:
955 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
955 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
956 except util.Abort, e:
956 except util.Abort, e:
957 ui.write(_(" patch call failed:\n"))
957 ui.write(_(" patch call failed:\n"))
958 ui.write(" " + str(e) + "\n")
958 ui.write(" " + str(e) + "\n")
959 patchproblems += 1
959 patchproblems += 1
960 else:
960 else:
961 if list(files) != [os.path.basename(fa)]:
961 if list(files) != [os.path.basename(fa)]:
962 ui.write(_(" unexpected patch output!\n"))
962 ui.write(_(" unexpected patch output!\n"))
963 patchproblems += 1
963 patchproblems += 1
964 a = file(fa).read()
964 a = file(fa).read()
965 if a != b:
965 if a != b:
966 ui.write(_(" patch test failed!\n"))
966 ui.write(_(" patch test failed!\n"))
967 patchproblems += 1
967 patchproblems += 1
968
968
969 if patchproblems:
969 if patchproblems:
970 if ui.config('ui', 'patch'):
970 if ui.config('ui', 'patch'):
971 ui.write(_(" (Current patch tool may be incompatible with patch,"
971 ui.write(_(" (Current patch tool may be incompatible with patch,"
972 " or misconfigured. Please check your .hgrc file)\n"))
972 " or misconfigured. Please check your .hgrc file)\n"))
973 else:
973 else:
974 ui.write(_(" Internal patcher failure, please report this error"
974 ui.write(_(" Internal patcher failure, please report this error"
975 " to http://www.selenic.com/mercurial/bts\n"))
975 " to http://www.selenic.com/mercurial/bts\n"))
976 problems += patchproblems
976 problems += patchproblems
977
977
978 os.unlink(fa)
978 os.unlink(fa)
979 os.unlink(fd)
979 os.unlink(fd)
980
980
981 # editor
981 # editor
982 ui.status(_("Checking commit editor...\n"))
982 ui.status(_("Checking commit editor...\n"))
983 editor = ui.geteditor()
983 editor = ui.geteditor()
984 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
984 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
985 if not cmdpath:
985 if not cmdpath:
986 if editor == 'vi':
986 if editor == 'vi':
987 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
987 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
988 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
988 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
989 else:
989 else:
990 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
990 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
991 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
991 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
992 problems += 1
992 problems += 1
993
993
994 # check username
994 # check username
995 ui.status(_("Checking username...\n"))
995 ui.status(_("Checking username...\n"))
996 user = os.environ.get("HGUSER")
996 user = os.environ.get("HGUSER")
997 if user is None:
997 if user is None:
998 user = ui.config("ui", "username")
998 user = ui.config("ui", "username")
999 if user is None:
999 if user is None:
1000 user = os.environ.get("EMAIL")
1000 user = os.environ.get("EMAIL")
1001 if not user:
1001 if not user:
1002 ui.warn(" ")
1002 ui.warn(" ")
1003 ui.username()
1003 ui.username()
1004 ui.write(_(" (specify a username in your .hgrc file)\n"))
1004 ui.write(_(" (specify a username in your .hgrc file)\n"))
1005
1005
1006 if not problems:
1006 if not problems:
1007 ui.status(_("No problems detected\n"))
1007 ui.status(_("No problems detected\n"))
1008 else:
1008 else:
1009 ui.write(_("%s problems detected,"
1009 ui.write(_("%s problems detected,"
1010 " please check your install!\n") % problems)
1010 " please check your install!\n") % problems)
1011
1011
1012 return problems
1012 return problems
1013
1013
1014 def debugrename(ui, repo, file1, *pats, **opts):
1014 def debugrename(ui, repo, file1, *pats, **opts):
1015 """dump rename information"""
1015 """dump rename information"""
1016
1016
1017 ctx = repo[opts.get('rev')]
1017 ctx = repo[opts.get('rev')]
1018 m = cmdutil.match(repo, (file1,) + pats, opts)
1018 m = cmdutil.match(repo, (file1,) + pats, opts)
1019 for abs in ctx.walk(m):
1019 for abs in ctx.walk(m):
1020 fctx = ctx[abs]
1020 fctx = ctx[abs]
1021 o = fctx.filelog().renamed(fctx.filenode())
1021 o = fctx.filelog().renamed(fctx.filenode())
1022 rel = m.rel(abs)
1022 rel = m.rel(abs)
1023 if o:
1023 if o:
1024 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1024 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1025 else:
1025 else:
1026 ui.write(_("%s not renamed\n") % rel)
1026 ui.write(_("%s not renamed\n") % rel)
1027
1027
1028 def debugwalk(ui, repo, *pats, **opts):
1028 def debugwalk(ui, repo, *pats, **opts):
1029 """show how files match on given patterns"""
1029 """show how files match on given patterns"""
1030 m = cmdutil.match(repo, pats, opts)
1030 m = cmdutil.match(repo, pats, opts)
1031 items = list(repo.walk(m))
1031 items = list(repo.walk(m))
1032 if not items:
1032 if not items:
1033 return
1033 return
1034 fmt = 'f %%-%ds %%-%ds %%s' % (
1034 fmt = 'f %%-%ds %%-%ds %%s' % (
1035 max([len(abs) for abs in items]),
1035 max([len(abs) for abs in items]),
1036 max([len(m.rel(abs)) for abs in items]))
1036 max([len(m.rel(abs)) for abs in items]))
1037 for abs in items:
1037 for abs in items:
1038 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1038 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1039 ui.write("%s\n" % line.rstrip())
1039 ui.write("%s\n" % line.rstrip())
1040
1040
1041 def diff(ui, repo, *pats, **opts):
1041 def diff(ui, repo, *pats, **opts):
1042 """diff repository (or selected files)
1042 """diff repository (or selected files)
1043
1043
1044 Show differences between revisions for the specified files.
1044 Show differences between revisions for the specified files.
1045
1045
1046 Differences between files are shown using the unified diff format.
1046 Differences between files are shown using the unified diff format.
1047
1047
1048 NOTE: diff may generate unexpected results for merges, as it will
1048 NOTE: diff may generate unexpected results for merges, as it will
1049 default to comparing against the working directory's first parent
1049 default to comparing against the working directory's first parent
1050 changeset if no revisions are specified.
1050 changeset if no revisions are specified.
1051
1051
1052 When two revision arguments are given, then changes are shown
1052 When two revision arguments are given, then changes are shown
1053 between those revisions. If only one revision is specified then
1053 between those revisions. If only one revision is specified then
1054 that revision is compared to the working directory, and, when no
1054 that revision is compared to the working directory, and, when no
1055 revisions are specified, the working directory files are compared
1055 revisions are specified, the working directory files are compared
1056 to its parent.
1056 to its parent.
1057
1057
1058 Without the -a/--text option, diff will avoid generating diffs of
1058 Without the -a/--text option, diff will avoid generating diffs of
1059 files it detects as binary. With -a, diff will generate a diff
1059 files it detects as binary. With -a, diff will generate a diff
1060 anyway, probably with undesirable results.
1060 anyway, probably with undesirable results.
1061
1061
1062 Use the -g/--git option to generate diffs in the git extended diff
1062 Use the -g/--git option to generate diffs in the git extended diff
1063 format. For more information, read 'hg help diffs'.
1063 format. For more information, read 'hg help diffs'.
1064 """
1064 """
1065
1065
1066 revs = opts.get('rev')
1066 revs = opts.get('rev')
1067 change = opts.get('change')
1067 change = opts.get('change')
1068
1068
1069 if revs and change:
1069 if revs and change:
1070 msg = _('cannot specify --rev and --change at the same time')
1070 msg = _('cannot specify --rev and --change at the same time')
1071 raise util.Abort(msg)
1071 raise util.Abort(msg)
1072 elif change:
1072 elif change:
1073 node2 = repo.lookup(change)
1073 node2 = repo.lookup(change)
1074 node1 = repo[node2].parents()[0].node()
1074 node1 = repo[node2].parents()[0].node()
1075 else:
1075 else:
1076 node1, node2 = cmdutil.revpair(repo, revs)
1076 node1, node2 = cmdutil.revpair(repo, revs)
1077
1077
1078 m = cmdutil.match(repo, pats, opts)
1078 m = cmdutil.match(repo, pats, opts)
1079 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1079 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1080 for chunk in it:
1080 for chunk in it:
1081 repo.ui.write(chunk)
1081 repo.ui.write(chunk)
1082
1082
1083 def export(ui, repo, *changesets, **opts):
1083 def export(ui, repo, *changesets, **opts):
1084 """dump the header and diffs for one or more changesets
1084 """dump the header and diffs for one or more changesets
1085
1085
1086 Print the changeset header and diffs for one or more revisions.
1086 Print the changeset header and diffs for one or more revisions.
1087
1087
1088 The information shown in the changeset header is: author,
1088 The information shown in the changeset header is: author,
1089 changeset hash, parent(s) and commit comment.
1089 changeset hash, parent(s) and commit comment.
1090
1090
1091 NOTE: export may generate unexpected diff output for merge
1091 NOTE: export may generate unexpected diff output for merge
1092 changesets, as it will compare the merge changeset against its
1092 changesets, as it will compare the merge changeset against its
1093 first parent only.
1093 first parent only.
1094
1094
1095 Output may be to a file, in which case the name of the file is
1095 Output may be to a file, in which case the name of the file is
1096 given using a format string. The formatting rules are as follows:
1096 given using a format string. The formatting rules are as follows:
1097
1097
1098 %% literal "%" character
1098 %% literal "%" character
1099 %H changeset hash (40 bytes of hexadecimal)
1099 %H changeset hash (40 bytes of hexadecimal)
1100 %N number of patches being generated
1100 %N number of patches being generated
1101 %R changeset revision number
1101 %R changeset revision number
1102 %b basename of the exporting repository
1102 %b basename of the exporting repository
1103 %h short-form changeset hash (12 bytes of hexadecimal)
1103 %h short-form changeset hash (12 bytes of hexadecimal)
1104 %n zero-padded sequence number, starting at 1
1104 %n zero-padded sequence number, starting at 1
1105 %r zero-padded changeset revision number
1105 %r zero-padded changeset revision number
1106
1106
1107 Without the -a/--text option, export will avoid generating diffs
1107 Without the -a/--text option, export will avoid generating diffs
1108 of files it detects as binary. With -a, export will generate a
1108 of files it detects as binary. With -a, export will generate a
1109 diff anyway, probably with undesirable results.
1109 diff anyway, probably with undesirable results.
1110
1110
1111 Use the -g/--git option to generate diffs in the git extended diff
1111 Use the -g/--git option to generate diffs in the git extended diff
1112 format. Read the diffs help topic for more information.
1112 format. Read the diffs help topic for more information.
1113
1113
1114 With the --switch-parent option, the diff will be against the
1114 With the --switch-parent option, the diff will be against the
1115 second parent. It can be useful to review a merge.
1115 second parent. It can be useful to review a merge.
1116 """
1116 """
1117 if not changesets:
1117 if not changesets:
1118 raise util.Abort(_("export requires at least one changeset"))
1118 raise util.Abort(_("export requires at least one changeset"))
1119 revs = cmdutil.revrange(repo, changesets)
1119 revs = cmdutil.revrange(repo, changesets)
1120 if len(revs) > 1:
1120 if len(revs) > 1:
1121 ui.note(_('exporting patches:\n'))
1121 ui.note(_('exporting patches:\n'))
1122 else:
1122 else:
1123 ui.note(_('exporting patch:\n'))
1123 ui.note(_('exporting patch:\n'))
1124 patch.export(repo, revs, template=opts.get('output'),
1124 patch.export(repo, revs, template=opts.get('output'),
1125 switch_parent=opts.get('switch_parent'),
1125 switch_parent=opts.get('switch_parent'),
1126 opts=patch.diffopts(ui, opts))
1126 opts=patch.diffopts(ui, opts))
1127
1127
1128 def grep(ui, repo, pattern, *pats, **opts):
1128 def grep(ui, repo, pattern, *pats, **opts):
1129 """search for a pattern in specified files and revisions
1129 """search for a pattern in specified files and revisions
1130
1130
1131 Search revisions of files for a regular expression.
1131 Search revisions of files for a regular expression.
1132
1132
1133 This command behaves differently than Unix grep. It only accepts
1133 This command behaves differently than Unix grep. It only accepts
1134 Python/Perl regexps. It searches repository history, not the
1134 Python/Perl regexps. It searches repository history, not the
1135 working directory. It always prints the revision number in which a
1135 working directory. It always prints the revision number in which a
1136 match appears.
1136 match appears.
1137
1137
1138 By default, grep only prints output for the first revision of a
1138 By default, grep only prints output for the first revision of a
1139 file in which it finds a match. To get it to print every revision
1139 file in which it finds a match. To get it to print every revision
1140 that contains a change in match status ("-" for a match that
1140 that contains a change in match status ("-" for a match that
1141 becomes a non-match, or "+" for a non-match that becomes a match),
1141 becomes a non-match, or "+" for a non-match that becomes a match),
1142 use the --all flag.
1142 use the --all flag.
1143 """
1143 """
1144 reflags = 0
1144 reflags = 0
1145 if opts.get('ignore_case'):
1145 if opts.get('ignore_case'):
1146 reflags |= re.I
1146 reflags |= re.I
1147 try:
1147 try:
1148 regexp = re.compile(pattern, reflags)
1148 regexp = re.compile(pattern, reflags)
1149 except Exception, inst:
1149 except Exception, inst:
1150 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1150 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1151 return None
1151 return None
1152 sep, eol = ':', '\n'
1152 sep, eol = ':', '\n'
1153 if opts.get('print0'):
1153 if opts.get('print0'):
1154 sep = eol = '\0'
1154 sep = eol = '\0'
1155
1155
1156 fcache = {}
1156 fcache = {}
1157 forder = []
1157 forder = []
1158 def getfile(fn):
1158 def getfile(fn):
1159 if fn not in fcache:
1159 if fn not in fcache:
1160 if len(fcache) > 20:
1160 if len(fcache) > 20:
1161 del fcache[forder.pop(0)]
1161 del fcache[forder.pop(0)]
1162 fcache[fn] = repo.file(fn)
1162 fcache[fn] = repo.file(fn)
1163 else:
1163 else:
1164 forder.remove(fn)
1164 forder.remove(fn)
1165
1165
1166 forder.append(fn)
1166 forder.append(fn)
1167 return fcache[fn]
1167 return fcache[fn]
1168
1168
1169 def matchlines(body):
1169 def matchlines(body):
1170 begin = 0
1170 begin = 0
1171 linenum = 0
1171 linenum = 0
1172 while True:
1172 while True:
1173 match = regexp.search(body, begin)
1173 match = regexp.search(body, begin)
1174 if not match:
1174 if not match:
1175 break
1175 break
1176 mstart, mend = match.span()
1176 mstart, mend = match.span()
1177 linenum += body.count('\n', begin, mstart) + 1
1177 linenum += body.count('\n', begin, mstart) + 1
1178 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1178 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1179 begin = body.find('\n', mend) + 1 or len(body)
1179 begin = body.find('\n', mend) + 1 or len(body)
1180 lend = begin - 1
1180 lend = begin - 1
1181 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1181 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1182
1182
1183 class linestate(object):
1183 class linestate(object):
1184 def __init__(self, line, linenum, colstart, colend):
1184 def __init__(self, line, linenum, colstart, colend):
1185 self.line = line
1185 self.line = line
1186 self.linenum = linenum
1186 self.linenum = linenum
1187 self.colstart = colstart
1187 self.colstart = colstart
1188 self.colend = colend
1188 self.colend = colend
1189
1189
1190 def __hash__(self):
1190 def __hash__(self):
1191 return hash((self.linenum, self.line))
1191 return hash((self.linenum, self.line))
1192
1192
1193 def __eq__(self, other):
1193 def __eq__(self, other):
1194 return self.line == other.line
1194 return self.line == other.line
1195
1195
1196 matches = {}
1196 matches = {}
1197 copies = {}
1197 copies = {}
1198 def grepbody(fn, rev, body):
1198 def grepbody(fn, rev, body):
1199 matches[rev].setdefault(fn, [])
1199 matches[rev].setdefault(fn, [])
1200 m = matches[rev][fn]
1200 m = matches[rev][fn]
1201 for lnum, cstart, cend, line in matchlines(body):
1201 for lnum, cstart, cend, line in matchlines(body):
1202 s = linestate(line, lnum, cstart, cend)
1202 s = linestate(line, lnum, cstart, cend)
1203 m.append(s)
1203 m.append(s)
1204
1204
1205 def difflinestates(a, b):
1205 def difflinestates(a, b):
1206 sm = difflib.SequenceMatcher(None, a, b)
1206 sm = difflib.SequenceMatcher(None, a, b)
1207 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1207 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1208 if tag == 'insert':
1208 if tag == 'insert':
1209 for i in xrange(blo, bhi):
1209 for i in xrange(blo, bhi):
1210 yield ('+', b[i])
1210 yield ('+', b[i])
1211 elif tag == 'delete':
1211 elif tag == 'delete':
1212 for i in xrange(alo, ahi):
1212 for i in xrange(alo, ahi):
1213 yield ('-', a[i])
1213 yield ('-', a[i])
1214 elif tag == 'replace':
1214 elif tag == 'replace':
1215 for i in xrange(alo, ahi):
1215 for i in xrange(alo, ahi):
1216 yield ('-', a[i])
1216 yield ('-', a[i])
1217 for i in xrange(blo, bhi):
1217 for i in xrange(blo, bhi):
1218 yield ('+', b[i])
1218 yield ('+', b[i])
1219
1219
1220 prev = {}
1220 prev = {}
1221 def display(fn, rev, states, prevstates):
1221 def display(fn, rev, states, prevstates):
1222 datefunc = ui.quiet and util.shortdate or util.datestr
1222 datefunc = ui.quiet and util.shortdate or util.datestr
1223 found = False
1223 found = False
1224 filerevmatches = {}
1224 filerevmatches = {}
1225 r = prev.get(fn, -1)
1225 r = prev.get(fn, -1)
1226 if opts.get('all'):
1226 if opts.get('all'):
1227 iter = difflinestates(states, prevstates)
1227 iter = difflinestates(states, prevstates)
1228 else:
1228 else:
1229 iter = [('', l) for l in prevstates]
1229 iter = [('', l) for l in prevstates]
1230 for change, l in iter:
1230 for change, l in iter:
1231 cols = [fn, str(r)]
1231 cols = [fn, str(r)]
1232 if opts.get('line_number'):
1232 if opts.get('line_number'):
1233 cols.append(str(l.linenum))
1233 cols.append(str(l.linenum))
1234 if opts.get('all'):
1234 if opts.get('all'):
1235 cols.append(change)
1235 cols.append(change)
1236 if opts.get('user'):
1236 if opts.get('user'):
1237 cols.append(ui.shortuser(get(r)[1]))
1237 cols.append(ui.shortuser(get(r)[1]))
1238 if opts.get('date'):
1238 if opts.get('date'):
1239 cols.append(datefunc(get(r)[2]))
1239 cols.append(datefunc(get(r)[2]))
1240 if opts.get('files_with_matches'):
1240 if opts.get('files_with_matches'):
1241 c = (fn, r)
1241 c = (fn, r)
1242 if c in filerevmatches:
1242 if c in filerevmatches:
1243 continue
1243 continue
1244 filerevmatches[c] = 1
1244 filerevmatches[c] = 1
1245 else:
1245 else:
1246 cols.append(l.line)
1246 cols.append(l.line)
1247 ui.write(sep.join(cols), eol)
1247 ui.write(sep.join(cols), eol)
1248 found = True
1248 found = True
1249 return found
1249 return found
1250
1250
1251 fstate = {}
1251 fstate = {}
1252 skip = {}
1252 skip = {}
1253 get = util.cachefunc(lambda r: repo[r].changeset())
1253 get = util.cachefunc(lambda r: repo[r].changeset())
1254 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1254 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1255 found = False
1255 found = False
1256 follow = opts.get('follow')
1256 follow = opts.get('follow')
1257 for st, rev, fns in changeiter:
1257 for st, rev, fns in changeiter:
1258 if st == 'window':
1258 if st == 'window':
1259 matches.clear()
1259 matches.clear()
1260 elif st == 'add':
1260 elif st == 'add':
1261 ctx = repo[rev]
1261 ctx = repo[rev]
1262 matches[rev] = {}
1262 matches[rev] = {}
1263 for fn in fns:
1263 for fn in fns:
1264 if fn in skip:
1264 if fn in skip:
1265 continue
1265 continue
1266 try:
1266 try:
1267 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1267 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1268 fstate.setdefault(fn, [])
1268 fstate.setdefault(fn, [])
1269 if follow:
1269 if follow:
1270 copied = getfile(fn).renamed(ctx.filenode(fn))
1270 copied = getfile(fn).renamed(ctx.filenode(fn))
1271 if copied:
1271 if copied:
1272 copies.setdefault(rev, {})[fn] = copied[0]
1272 copies.setdefault(rev, {})[fn] = copied[0]
1273 except error.LookupError:
1273 except error.LookupError:
1274 pass
1274 pass
1275 elif st == 'iter':
1275 elif st == 'iter':
1276 for fn, m in sorted(matches[rev].items()):
1276 for fn, m in sorted(matches[rev].items()):
1277 copy = copies.get(rev, {}).get(fn)
1277 copy = copies.get(rev, {}).get(fn)
1278 if fn in skip:
1278 if fn in skip:
1279 if copy:
1279 if copy:
1280 skip[copy] = True
1280 skip[copy] = True
1281 continue
1281 continue
1282 if fn in prev or fstate[fn]:
1282 if fn in prev or fstate[fn]:
1283 r = display(fn, rev, m, fstate[fn])
1283 r = display(fn, rev, m, fstate[fn])
1284 found = found or r
1284 found = found or r
1285 if r and not opts.get('all'):
1285 if r and not opts.get('all'):
1286 skip[fn] = True
1286 skip[fn] = True
1287 if copy:
1287 if copy:
1288 skip[copy] = True
1288 skip[copy] = True
1289 fstate[fn] = m
1289 fstate[fn] = m
1290 if copy:
1290 if copy:
1291 fstate[copy] = m
1291 fstate[copy] = m
1292 prev[fn] = rev
1292 prev[fn] = rev
1293
1293
1294 for fn, state in sorted(fstate.items()):
1294 for fn, state in sorted(fstate.items()):
1295 if fn in skip:
1295 if fn in skip:
1296 continue
1296 continue
1297 if fn not in copies.get(prev[fn], {}):
1297 if fn not in copies.get(prev[fn], {}):
1298 found = display(fn, rev, {}, state) or found
1298 found = display(fn, rev, {}, state) or found
1299 return (not found and 1) or 0
1299 return (not found and 1) or 0
1300
1300
1301 def heads(ui, repo, *branchrevs, **opts):
1301 def heads(ui, repo, *branchrevs, **opts):
1302 """show current repository heads or show branch heads
1302 """show current repository heads or show branch heads
1303
1303
1304 With no arguments, show all repository head changesets.
1304 With no arguments, show all repository head changesets.
1305
1305
1306 If branch or revisions names are given this will show the heads of
1306 If branch or revisions names are given this will show the heads of
1307 the specified branches or the branches those revisions are tagged
1307 the specified branches or the branches those revisions are tagged
1308 with.
1308 with.
1309
1309
1310 Repository "heads" are changesets that don't have child
1310 Repository "heads" are changesets that don't have child
1311 changesets. They are where development generally takes place and
1311 changesets. They are where development generally takes place and
1312 are the usual targets for update and merge operations.
1312 are the usual targets for update and merge operations.
1313
1313
1314 Branch heads are changesets that have a given branch tag, but have
1314 Branch heads are changesets that have a given branch tag, but have
1315 no child changesets with that tag. They are usually where
1315 no child changesets with that tag. They are usually where
1316 development on the given branch takes place.
1316 development on the given branch takes place.
1317 """
1317 """
1318 if opts.get('rev'):
1318 if opts.get('rev'):
1319 start = repo.lookup(opts['rev'])
1319 start = repo.lookup(opts['rev'])
1320 else:
1320 else:
1321 start = None
1321 start = None
1322 closed = not opts.get('active')
1322 closed = not opts.get('active')
1323 if not branchrevs:
1323 if not branchrevs:
1324 # Assume we're looking repo-wide heads if no revs were specified.
1324 # Assume we're looking repo-wide heads if no revs were specified.
1325 heads = repo.heads(start, closed=closed)
1325 heads = repo.heads(start, closed=closed)
1326 else:
1326 else:
1327 heads = []
1327 heads = []
1328 visitedset = set()
1328 visitedset = set()
1329 for branchrev in branchrevs:
1329 for branchrev in branchrevs:
1330 branch = repo[branchrev].branch()
1330 branch = repo[branchrev].branch()
1331 if branch in visitedset:
1331 if branch in visitedset:
1332 continue
1332 continue
1333 visitedset.add(branch)
1333 visitedset.add(branch)
1334 bheads = repo.branchheads(branch, start, closed=closed)
1334 bheads = repo.branchheads(branch, start, closed=closed)
1335 if not bheads:
1335 if not bheads:
1336 if branch != branchrev:
1336 if branch != branchrev:
1337 ui.warn(_("no changes on branch %s containing %s are "
1337 ui.warn(_("no changes on branch %s containing %s are "
1338 "reachable from %s\n")
1338 "reachable from %s\n")
1339 % (branch, branchrev, opts.get('rev')))
1339 % (branch, branchrev, opts.get('rev')))
1340 else:
1340 else:
1341 ui.warn(_("no changes on branch %s are reachable from %s\n")
1341 ui.warn(_("no changes on branch %s are reachable from %s\n")
1342 % (branch, opts.get('rev')))
1342 % (branch, opts.get('rev')))
1343 heads.extend(bheads)
1343 heads.extend(bheads)
1344 if not heads:
1344 if not heads:
1345 return 1
1345 return 1
1346 displayer = cmdutil.show_changeset(ui, repo, opts)
1346 displayer = cmdutil.show_changeset(ui, repo, opts)
1347 for n in heads:
1347 for n in heads:
1348 displayer.show(repo[n])
1348 displayer.show(repo[n])
1349
1349
1350 def help_(ui, name=None, with_version=False):
1350 def help_(ui, name=None, with_version=False):
1351 """show help for a given topic or a help overview
1351 """show help for a given topic or a help overview
1352
1352
1353 With no arguments, print a list of commands and short help.
1353 With no arguments, print a list of commands and short help.
1354
1354
1355 Given a topic, extension, or command name, print help for that
1355 Given a topic, extension, or command name, print help for that
1356 topic."""
1356 topic."""
1357 option_lists = []
1357 option_lists = []
1358
1358
1359 def addglobalopts(aliases):
1359 def addglobalopts(aliases):
1360 if ui.verbose:
1360 if ui.verbose:
1361 option_lists.append((_("global options:"), globalopts))
1361 option_lists.append((_("global options:"), globalopts))
1362 if name == 'shortlist':
1362 if name == 'shortlist':
1363 option_lists.append((_('use "hg help" for the full list '
1363 option_lists.append((_('use "hg help" for the full list '
1364 'of commands'), ()))
1364 'of commands'), ()))
1365 else:
1365 else:
1366 if name == 'shortlist':
1366 if name == 'shortlist':
1367 msg = _('use "hg help" for the full list of commands '
1367 msg = _('use "hg help" for the full list of commands '
1368 'or "hg -v" for details')
1368 'or "hg -v" for details')
1369 elif aliases:
1369 elif aliases:
1370 msg = _('use "hg -v help%s" to show aliases and '
1370 msg = _('use "hg -v help%s" to show aliases and '
1371 'global options') % (name and " " + name or "")
1371 'global options') % (name and " " + name or "")
1372 else:
1372 else:
1373 msg = _('use "hg -v help %s" to show global options') % name
1373 msg = _('use "hg -v help %s" to show global options') % name
1374 option_lists.append((msg, ()))
1374 option_lists.append((msg, ()))
1375
1375
1376 def helpcmd(name):
1376 def helpcmd(name):
1377 if with_version:
1377 if with_version:
1378 version_(ui)
1378 version_(ui)
1379 ui.write('\n')
1379 ui.write('\n')
1380
1380
1381 try:
1381 try:
1382 aliases, i = cmdutil.findcmd(name, table, False)
1382 aliases, i = cmdutil.findcmd(name, table, False)
1383 except error.AmbiguousCommand, inst:
1383 except error.AmbiguousCommand, inst:
1384 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1384 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1385 helplist(_('list of commands:\n\n'), select)
1385 helplist(_('list of commands:\n\n'), select)
1386 return
1386 return
1387
1387
1388 # synopsis
1388 # synopsis
1389 if len(i) > 2:
1389 if len(i) > 2:
1390 if i[2].startswith('hg'):
1390 if i[2].startswith('hg'):
1391 ui.write("%s\n" % i[2])
1391 ui.write("%s\n" % i[2])
1392 else:
1392 else:
1393 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1393 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1394 else:
1394 else:
1395 ui.write('hg %s\n' % aliases[0])
1395 ui.write('hg %s\n' % aliases[0])
1396
1396
1397 # aliases
1397 # aliases
1398 if not ui.quiet and len(aliases) > 1:
1398 if not ui.quiet and len(aliases) > 1:
1399 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1399 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1400
1400
1401 # description
1401 # description
1402 doc = gettext(i[0].__doc__)
1402 doc = gettext(i[0].__doc__)
1403 if not doc:
1403 if not doc:
1404 doc = _("(no help text available)")
1404 doc = _("(no help text available)")
1405 if ui.quiet:
1405 if ui.quiet:
1406 doc = doc.splitlines(0)[0]
1406 doc = doc.splitlines(0)[0]
1407 ui.write("\n%s\n" % doc.rstrip())
1407 ui.write("\n%s\n" % doc.rstrip())
1408
1408
1409 if not ui.quiet:
1409 if not ui.quiet:
1410 # options
1410 # options
1411 if i[1]:
1411 if i[1]:
1412 option_lists.append((_("options:\n"), i[1]))
1412 option_lists.append((_("options:\n"), i[1]))
1413
1413
1414 addglobalopts(False)
1414 addglobalopts(False)
1415
1415
1416 def helplist(header, select=None):
1416 def helplist(header, select=None):
1417 h = {}
1417 h = {}
1418 cmds = {}
1418 cmds = {}
1419 for c, e in table.iteritems():
1419 for c, e in table.iteritems():
1420 f = c.split("|", 1)[0]
1420 f = c.split("|", 1)[0]
1421 if select and not select(f):
1421 if select and not select(f):
1422 continue
1422 continue
1423 if (not select and name != 'shortlist' and
1423 if (not select and name != 'shortlist' and
1424 e[0].__module__ != __name__):
1424 e[0].__module__ != __name__):
1425 continue
1425 continue
1426 if name == "shortlist" and not f.startswith("^"):
1426 if name == "shortlist" and not f.startswith("^"):
1427 continue
1427 continue
1428 f = f.lstrip("^")
1428 f = f.lstrip("^")
1429 if not ui.debugflag and f.startswith("debug"):
1429 if not ui.debugflag and f.startswith("debug"):
1430 continue
1430 continue
1431 doc = gettext(e[0].__doc__)
1431 doc = gettext(e[0].__doc__)
1432 if not doc:
1432 if not doc:
1433 doc = _("(no help text available)")
1433 doc = _("(no help text available)")
1434 h[f] = doc.splitlines(0)[0].rstrip()
1434 h[f] = doc.splitlines(0)[0].rstrip()
1435 cmds[f] = c.lstrip("^")
1435 cmds[f] = c.lstrip("^")
1436
1436
1437 if not h:
1437 if not h:
1438 ui.status(_('no commands defined\n'))
1438 ui.status(_('no commands defined\n'))
1439 return
1439 return
1440
1440
1441 ui.status(header)
1441 ui.status(header)
1442 fns = sorted(h)
1442 fns = sorted(h)
1443 m = max(map(len, fns))
1443 m = max(map(len, fns))
1444 for f in fns:
1444 for f in fns:
1445 if ui.verbose:
1445 if ui.verbose:
1446 commands = cmds[f].replace("|",", ")
1446 commands = cmds[f].replace("|",", ")
1447 ui.write(" %s:\n %s\n"%(commands, h[f]))
1447 ui.write(" %s:\n %s\n"%(commands, h[f]))
1448 else:
1448 else:
1449 ui.write(' %-*s %s\n' % (m, f, h[f]))
1449 ui.write(' %-*s %s\n' % (m, f, h[f]))
1450
1450
1451 exts = list(extensions.extensions())
1451 exts = list(extensions.extensions())
1452 if exts and name != 'shortlist':
1452 if exts and name != 'shortlist':
1453 ui.write(_('\nenabled extensions:\n\n'))
1453 ui.write(_('\nenabled extensions:\n\n'))
1454 maxlength = 0
1454 maxlength = 0
1455 exthelps = []
1455 exthelps = []
1456 for ename, ext in exts:
1456 for ename, ext in exts:
1457 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1457 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1458 ename = ename.split('.')[-1]
1458 ename = ename.split('.')[-1]
1459 maxlength = max(len(ename), maxlength)
1459 maxlength = max(len(ename), maxlength)
1460 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1460 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1461 for ename, text in exthelps:
1461 for ename, text in exthelps:
1462 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1462 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1463
1463
1464 if not ui.quiet:
1464 if not ui.quiet:
1465 addglobalopts(True)
1465 addglobalopts(True)
1466
1466
1467 def helptopic(name):
1467 def helptopic(name):
1468 for names, header, doc in help.helptable:
1468 for names, header, doc in help.helptable:
1469 if name in names:
1469 if name in names:
1470 break
1470 break
1471 else:
1471 else:
1472 raise error.UnknownCommand(name)
1472 raise error.UnknownCommand(name)
1473
1473
1474 # description
1474 # description
1475 if not doc:
1475 if not doc:
1476 doc = _("(no help text available)")
1476 doc = _("(no help text available)")
1477 if hasattr(doc, '__call__'):
1477 if hasattr(doc, '__call__'):
1478 doc = doc()
1478 doc = doc()
1479
1479
1480 ui.write("%s\n" % header)
1480 ui.write("%s\n" % header)
1481 ui.write("%s\n" % doc.rstrip())
1481 ui.write("%s\n" % doc.rstrip())
1482
1482
1483 def helpext(name):
1483 def helpext(name):
1484 try:
1484 try:
1485 mod = extensions.find(name)
1485 mod = extensions.find(name)
1486 except KeyError:
1486 except KeyError:
1487 raise error.UnknownCommand(name)
1487 raise error.UnknownCommand(name)
1488
1488
1489 doc = gettext(mod.__doc__) or _('no help text available')
1489 doc = gettext(mod.__doc__) or _('no help text available')
1490 doc = doc.splitlines(0)
1490 doc = doc.splitlines(0)
1491 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1491 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1492 for d in doc[1:]:
1492 for d in doc[1:]:
1493 ui.write(d, '\n')
1493 ui.write(d, '\n')
1494
1494
1495 ui.status('\n')
1495 ui.status('\n')
1496
1496
1497 try:
1497 try:
1498 ct = mod.cmdtable
1498 ct = mod.cmdtable
1499 except AttributeError:
1499 except AttributeError:
1500 ct = {}
1500 ct = {}
1501
1501
1502 modcmds = set([c.split('|', 1)[0] for c in ct])
1502 modcmds = set([c.split('|', 1)[0] for c in ct])
1503 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1503 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1504
1504
1505 if name and name != 'shortlist':
1505 if name and name != 'shortlist':
1506 i = None
1506 i = None
1507 for f in (helptopic, helpcmd, helpext):
1507 for f in (helptopic, helpcmd, helpext):
1508 try:
1508 try:
1509 f(name)
1509 f(name)
1510 i = None
1510 i = None
1511 break
1511 break
1512 except error.UnknownCommand, inst:
1512 except error.UnknownCommand, inst:
1513 i = inst
1513 i = inst
1514 if i:
1514 if i:
1515 raise i
1515 raise i
1516
1516
1517 else:
1517 else:
1518 # program name
1518 # program name
1519 if ui.verbose or with_version:
1519 if ui.verbose or with_version:
1520 version_(ui)
1520 version_(ui)
1521 else:
1521 else:
1522 ui.status(_("Mercurial Distributed SCM\n"))
1522 ui.status(_("Mercurial Distributed SCM\n"))
1523 ui.status('\n')
1523 ui.status('\n')
1524
1524
1525 # list of commands
1525 # list of commands
1526 if name == "shortlist":
1526 if name == "shortlist":
1527 header = _('basic commands:\n\n')
1527 header = _('basic commands:\n\n')
1528 else:
1528 else:
1529 header = _('list of commands:\n\n')
1529 header = _('list of commands:\n\n')
1530
1530
1531 helplist(header)
1531 helplist(header)
1532
1532
1533 # list all option lists
1533 # list all option lists
1534 opt_output = []
1534 opt_output = []
1535 for title, options in option_lists:
1535 for title, options in option_lists:
1536 opt_output.append(("\n%s" % title, None))
1536 opt_output.append(("\n%s" % title, None))
1537 for shortopt, longopt, default, desc in options:
1537 for shortopt, longopt, default, desc in options:
1538 if "DEPRECATED" in desc and not ui.verbose: continue
1538 if "DEPRECATED" in desc and not ui.verbose: continue
1539 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1539 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1540 longopt and " --%s" % longopt),
1540 longopt and " --%s" % longopt),
1541 "%s%s" % (desc,
1541 "%s%s" % (desc,
1542 default
1542 default
1543 and _(" (default: %s)") % default
1543 and _(" (default: %s)") % default
1544 or "")))
1544 or "")))
1545
1545
1546 if not name:
1546 if not name:
1547 ui.write(_("\nadditional help topics:\n\n"))
1547 ui.write(_("\nadditional help topics:\n\n"))
1548 topics = []
1548 topics = []
1549 for names, header, doc in help.helptable:
1549 for names, header, doc in help.helptable:
1550 names = [(-len(name), name) for name in names]
1550 names = [(-len(name), name) for name in names]
1551 names.sort()
1551 names.sort()
1552 topics.append((names[0][1], header))
1552 topics.append((names[0][1], header))
1553 topics_len = max([len(s[0]) for s in topics])
1553 topics_len = max([len(s[0]) for s in topics])
1554 for t, desc in topics:
1554 for t, desc in topics:
1555 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1555 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1556
1556
1557 if opt_output:
1557 if opt_output:
1558 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1558 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1559 for first, second in opt_output:
1559 for first, second in opt_output:
1560 if second:
1560 if second:
1561 # wrap descriptions at 70 characters, just like the
1561 # wrap descriptions at 70 characters, just like the
1562 # main help texts
1562 # main help texts
1563 second = textwrap.wrap(second, width=70 - opts_len - 3)
1563 second = textwrap.wrap(second, width=70 - opts_len - 3)
1564 pad = '\n' + ' ' * (opts_len + 3)
1564 pad = '\n' + ' ' * (opts_len + 3)
1565 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1565 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1566 else:
1566 else:
1567 ui.write("%s\n" % first)
1567 ui.write("%s\n" % first)
1568
1568
1569 def identify(ui, repo, source=None,
1569 def identify(ui, repo, source=None,
1570 rev=None, num=None, id=None, branch=None, tags=None):
1570 rev=None, num=None, id=None, branch=None, tags=None):
1571 """identify the working copy or specified revision
1571 """identify the working copy or specified revision
1572
1572
1573 With no revision, print a summary of the current state of the
1573 With no revision, print a summary of the current state of the
1574 repository.
1574 repository.
1575
1575
1576 With a path, do a lookup in another repository.
1576 With a path, do a lookup in another repository.
1577
1577
1578 This summary identifies the repository state using one or two
1578 This summary identifies the repository state using one or two
1579 parent hash identifiers, followed by a "+" if there are
1579 parent hash identifiers, followed by a "+" if there are
1580 uncommitted changes in the working directory, a list of tags for
1580 uncommitted changes in the working directory, a list of tags for
1581 this revision and a branch name for non-default branches.
1581 this revision and a branch name for non-default branches.
1582 """
1582 """
1583
1583
1584 if not repo and not source:
1584 if not repo and not source:
1585 raise util.Abort(_("There is no Mercurial repository here "
1585 raise util.Abort(_("There is no Mercurial repository here "
1586 "(.hg not found)"))
1586 "(.hg not found)"))
1587
1587
1588 hexfunc = ui.debugflag and hex or short
1588 hexfunc = ui.debugflag and hex or short
1589 default = not (num or id or branch or tags)
1589 default = not (num or id or branch or tags)
1590 output = []
1590 output = []
1591
1591
1592 revs = []
1592 revs = []
1593 if source:
1593 if source:
1594 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1594 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1595 repo = hg.repository(ui, source)
1595 repo = hg.repository(ui, source)
1596
1596
1597 if not repo.local():
1597 if not repo.local():
1598 if not rev and revs:
1598 if not rev and revs:
1599 rev = revs[0]
1599 rev = revs[0]
1600 if not rev:
1600 if not rev:
1601 rev = "tip"
1601 rev = "tip"
1602 if num or branch or tags:
1602 if num or branch or tags:
1603 raise util.Abort(
1603 raise util.Abort(
1604 "can't query remote revision number, branch, or tags")
1604 "can't query remote revision number, branch, or tags")
1605 output = [hexfunc(repo.lookup(rev))]
1605 output = [hexfunc(repo.lookup(rev))]
1606 elif not rev:
1606 elif not rev:
1607 ctx = repo[None]
1607 ctx = repo[None]
1608 parents = ctx.parents()
1608 parents = ctx.parents()
1609 changed = False
1609 changed = False
1610 if default or id or num:
1610 if default or id or num:
1611 changed = ctx.files() + ctx.deleted()
1611 changed = ctx.files() + ctx.deleted()
1612 if default or id:
1612 if default or id:
1613 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1613 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1614 (changed) and "+" or "")]
1614 (changed) and "+" or "")]
1615 if num:
1615 if num:
1616 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1616 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1617 (changed) and "+" or ""))
1617 (changed) and "+" or ""))
1618 else:
1618 else:
1619 ctx = repo[rev]
1619 ctx = repo[rev]
1620 if default or id:
1620 if default or id:
1621 output = [hexfunc(ctx.node())]
1621 output = [hexfunc(ctx.node())]
1622 if num:
1622 if num:
1623 output.append(str(ctx.rev()))
1623 output.append(str(ctx.rev()))
1624
1624
1625 if repo.local() and default and not ui.quiet:
1625 if repo.local() and default and not ui.quiet:
1626 b = encoding.tolocal(ctx.branch())
1626 b = encoding.tolocal(ctx.branch())
1627 if b != 'default':
1627 if b != 'default':
1628 output.append("(%s)" % b)
1628 output.append("(%s)" % b)
1629
1629
1630 # multiple tags for a single parent separated by '/'
1630 # multiple tags for a single parent separated by '/'
1631 t = "/".join(ctx.tags())
1631 t = "/".join(ctx.tags())
1632 if t:
1632 if t:
1633 output.append(t)
1633 output.append(t)
1634
1634
1635 if branch:
1635 if branch:
1636 output.append(encoding.tolocal(ctx.branch()))
1636 output.append(encoding.tolocal(ctx.branch()))
1637
1637
1638 if tags:
1638 if tags:
1639 output.extend(ctx.tags())
1639 output.extend(ctx.tags())
1640
1640
1641 ui.write("%s\n" % ' '.join(output))
1641 ui.write("%s\n" % ' '.join(output))
1642
1642
1643 def import_(ui, repo, patch1, *patches, **opts):
1643 def import_(ui, repo, patch1, *patches, **opts):
1644 """import an ordered set of patches
1644 """import an ordered set of patches
1645
1645
1646 Import a list of patches and commit them individually.
1646 Import a list of patches and commit them individually.
1647
1647
1648 If there are outstanding changes in the working directory, import
1648 If there are outstanding changes in the working directory, import
1649 will abort unless given the -f/--force flag.
1649 will abort unless given the -f/--force flag.
1650
1650
1651 You can import a patch straight from a mail message. Even patches
1651 You can import a patch straight from a mail message. Even patches
1652 as attachments work (body part must be type text/plain or
1652 as attachments work (body part must be type text/plain or
1653 text/x-patch to be used). From and Subject headers of email
1653 text/x-patch to be used). From and Subject headers of email
1654 message are used as default committer and commit message. All
1654 message are used as default committer and commit message. All
1655 text/plain body parts before first diff are added to commit
1655 text/plain body parts before first diff are added to commit
1656 message.
1656 message.
1657
1657
1658 If the imported patch was generated by hg export, user and
1658 If the imported patch was generated by hg export, user and
1659 description from patch override values from message headers and
1659 description from patch override values from message headers and
1660 body. Values given on command line with -m/--message and -u/--user
1660 body. Values given on command line with -m/--message and -u/--user
1661 override these.
1661 override these.
1662
1662
1663 If --exact is specified, import will set the working directory to
1663 If --exact is specified, import will set the working directory to
1664 the parent of each patch before applying it, and will abort if the
1664 the parent of each patch before applying it, and will abort if the
1665 resulting changeset has a different ID than the one recorded in
1665 resulting changeset has a different ID than the one recorded in
1666 the patch. This may happen due to character set problems or other
1666 the patch. This may happen due to character set problems or other
1667 deficiencies in the text patch format.
1667 deficiencies in the text patch format.
1668
1668
1669 With -s/--similarity, hg will attempt to discover renames and
1669 With -s/--similarity, hg will attempt to discover renames and
1670 copies in the patch in the same way as 'addremove'.
1670 copies in the patch in the same way as 'addremove'.
1671
1671
1672 To read a patch from standard input, use patch name "-". See 'hg
1672 To read a patch from standard input, use patch name "-". See 'hg
1673 help dates' for a list of formats valid for -d/--date.
1673 help dates' for a list of formats valid for -d/--date.
1674 """
1674 """
1675 patches = (patch1,) + patches
1675 patches = (patch1,) + patches
1676
1676
1677 date = opts.get('date')
1677 date = opts.get('date')
1678 if date:
1678 if date:
1679 opts['date'] = util.parsedate(date)
1679 opts['date'] = util.parsedate(date)
1680
1680
1681 try:
1681 try:
1682 sim = float(opts.get('similarity') or 0)
1682 sim = float(opts.get('similarity') or 0)
1683 except ValueError:
1683 except ValueError:
1684 raise util.Abort(_('similarity must be a number'))
1684 raise util.Abort(_('similarity must be a number'))
1685 if sim < 0 or sim > 100:
1685 if sim < 0 or sim > 100:
1686 raise util.Abort(_('similarity must be between 0 and 100'))
1686 raise util.Abort(_('similarity must be between 0 and 100'))
1687
1687
1688 if opts.get('exact') or not opts.get('force'):
1688 if opts.get('exact') or not opts.get('force'):
1689 cmdutil.bail_if_changed(repo)
1689 cmdutil.bail_if_changed(repo)
1690
1690
1691 d = opts["base"]
1691 d = opts["base"]
1692 strip = opts["strip"]
1692 strip = opts["strip"]
1693 wlock = lock = None
1693 wlock = lock = None
1694 try:
1694 try:
1695 wlock = repo.wlock()
1695 wlock = repo.wlock()
1696 lock = repo.lock()
1696 lock = repo.lock()
1697 for p in patches:
1697 for p in patches:
1698 pf = os.path.join(d, p)
1698 pf = os.path.join(d, p)
1699
1699
1700 if pf == '-':
1700 if pf == '-':
1701 ui.status(_("applying patch from stdin\n"))
1701 ui.status(_("applying patch from stdin\n"))
1702 pf = sys.stdin
1702 pf = sys.stdin
1703 else:
1703 else:
1704 ui.status(_("applying %s\n") % p)
1704 ui.status(_("applying %s\n") % p)
1705 pf = url.open(ui, pf)
1705 pf = url.open(ui, pf)
1706 data = patch.extract(ui, pf)
1706 data = patch.extract(ui, pf)
1707 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1707 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1708
1708
1709 if tmpname is None:
1709 if tmpname is None:
1710 raise util.Abort(_('no diffs found'))
1710 raise util.Abort(_('no diffs found'))
1711
1711
1712 try:
1712 try:
1713 cmdline_message = cmdutil.logmessage(opts)
1713 cmdline_message = cmdutil.logmessage(opts)
1714 if cmdline_message:
1714 if cmdline_message:
1715 # pickup the cmdline msg
1715 # pickup the cmdline msg
1716 message = cmdline_message
1716 message = cmdline_message
1717 elif message:
1717 elif message:
1718 # pickup the patch msg
1718 # pickup the patch msg
1719 message = message.strip()
1719 message = message.strip()
1720 else:
1720 else:
1721 # launch the editor
1721 # launch the editor
1722 message = None
1722 message = None
1723 ui.debug(_('message:\n%s\n') % message)
1723 ui.debug(_('message:\n%s\n') % message)
1724
1724
1725 wp = repo.parents()
1725 wp = repo.parents()
1726 if opts.get('exact'):
1726 if opts.get('exact'):
1727 if not nodeid or not p1:
1727 if not nodeid or not p1:
1728 raise util.Abort(_('not a mercurial patch'))
1728 raise util.Abort(_('not a mercurial patch'))
1729 p1 = repo.lookup(p1)
1729 p1 = repo.lookup(p1)
1730 p2 = repo.lookup(p2 or hex(nullid))
1730 p2 = repo.lookup(p2 or hex(nullid))
1731
1731
1732 if p1 != wp[0].node():
1732 if p1 != wp[0].node():
1733 hg.clean(repo, p1)
1733 hg.clean(repo, p1)
1734 repo.dirstate.setparents(p1, p2)
1734 repo.dirstate.setparents(p1, p2)
1735 elif p2:
1735 elif p2:
1736 try:
1736 try:
1737 p1 = repo.lookup(p1)
1737 p1 = repo.lookup(p1)
1738 p2 = repo.lookup(p2)
1738 p2 = repo.lookup(p2)
1739 if p1 == wp[0].node():
1739 if p1 == wp[0].node():
1740 repo.dirstate.setparents(p1, p2)
1740 repo.dirstate.setparents(p1, p2)
1741 except error.RepoError:
1741 except error.RepoError:
1742 pass
1742 pass
1743 if opts.get('exact') or opts.get('import_branch'):
1743 if opts.get('exact') or opts.get('import_branch'):
1744 repo.dirstate.setbranch(branch or 'default')
1744 repo.dirstate.setbranch(branch or 'default')
1745
1745
1746 files = {}
1746 files = {}
1747 try:
1747 try:
1748 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1748 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1749 files=files)
1749 files=files)
1750 finally:
1750 finally:
1751 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1751 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1752 if not opts.get('no_commit'):
1752 if not opts.get('no_commit'):
1753 n = repo.commit(files, message, opts.get('user') or user,
1753 n = repo.commit(files, message, opts.get('user') or user,
1754 opts.get('date') or date,
1754 opts.get('date') or date,
1755 editor=cmdutil.commiteditor)
1755 editor=cmdutil.commiteditor)
1756 if opts.get('exact'):
1756 if opts.get('exact'):
1757 if hex(n) != nodeid:
1757 if hex(n) != nodeid:
1758 repo.rollback()
1758 repo.rollback()
1759 raise util.Abort(_('patch is damaged'
1759 raise util.Abort(_('patch is damaged'
1760 ' or loses information'))
1760 ' or loses information'))
1761 # Force a dirstate write so that the next transaction
1761 # Force a dirstate write so that the next transaction
1762 # backups an up-do-date file.
1762 # backups an up-do-date file.
1763 repo.dirstate.write()
1763 repo.dirstate.write()
1764 finally:
1764 finally:
1765 os.unlink(tmpname)
1765 os.unlink(tmpname)
1766 finally:
1766 finally:
1767 release(lock, wlock)
1767 release(lock, wlock)
1768
1768
1769 def incoming(ui, repo, source="default", **opts):
1769 def incoming(ui, repo, source="default", **opts):
1770 """show new changesets found in source
1770 """show new changesets found in source
1771
1771
1772 Show new changesets found in the specified path/URL or the default
1772 Show new changesets found in the specified path/URL or the default
1773 pull location. These are the changesets that would be pulled if a
1773 pull location. These are the changesets that would be pulled if a
1774 pull was requested.
1774 pull was requested.
1775
1775
1776 For remote repository, using --bundle avoids downloading the
1776 For remote repository, using --bundle avoids downloading the
1777 changesets twice if the incoming is followed by a pull.
1777 changesets twice if the incoming is followed by a pull.
1778
1778
1779 See pull for valid source format details.
1779 See pull for valid source format details.
1780 """
1780 """
1781 limit = cmdutil.loglimit(opts)
1781 limit = cmdutil.loglimit(opts)
1782 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1782 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1783 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1783 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1784 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1784 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1785 if revs:
1785 if revs:
1786 revs = [other.lookup(rev) for rev in revs]
1786 revs = [other.lookup(rev) for rev in revs]
1787 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1787 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1788 force=opts["force"])
1788 force=opts["force"])
1789 if not incoming:
1789 if not incoming:
1790 try:
1790 try:
1791 os.unlink(opts["bundle"])
1791 os.unlink(opts["bundle"])
1792 except:
1792 except:
1793 pass
1793 pass
1794 ui.status(_("no changes found\n"))
1794 ui.status(_("no changes found\n"))
1795 return 1
1795 return 1
1796
1796
1797 cleanup = None
1797 cleanup = None
1798 try:
1798 try:
1799 fname = opts["bundle"]
1799 fname = opts["bundle"]
1800 if fname or not other.local():
1800 if fname or not other.local():
1801 # create a bundle (uncompressed if other repo is not local)
1801 # create a bundle (uncompressed if other repo is not local)
1802
1802
1803 if revs is None and other.capable('changegroupsubset'):
1803 if revs is None and other.capable('changegroupsubset'):
1804 revs = rheads
1804 revs = rheads
1805
1805
1806 if revs is None:
1806 if revs is None:
1807 cg = other.changegroup(incoming, "incoming")
1807 cg = other.changegroup(incoming, "incoming")
1808 else:
1808 else:
1809 cg = other.changegroupsubset(incoming, revs, 'incoming')
1809 cg = other.changegroupsubset(incoming, revs, 'incoming')
1810 bundletype = other.local() and "HG10BZ" or "HG10UN"
1810 bundletype = other.local() and "HG10BZ" or "HG10UN"
1811 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1811 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1812 # keep written bundle?
1812 # keep written bundle?
1813 if opts["bundle"]:
1813 if opts["bundle"]:
1814 cleanup = None
1814 cleanup = None
1815 if not other.local():
1815 if not other.local():
1816 # use the created uncompressed bundlerepo
1816 # use the created uncompressed bundlerepo
1817 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1817 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1818
1818
1819 o = other.changelog.nodesbetween(incoming, revs)[0]
1819 o = other.changelog.nodesbetween(incoming, revs)[0]
1820 if opts.get('newest_first'):
1820 if opts.get('newest_first'):
1821 o.reverse()
1821 o.reverse()
1822 displayer = cmdutil.show_changeset(ui, other, opts)
1822 displayer = cmdutil.show_changeset(ui, other, opts)
1823 count = 0
1823 count = 0
1824 for n in o:
1824 for n in o:
1825 if count >= limit:
1825 if count >= limit:
1826 break
1826 break
1827 parents = [p for p in other.changelog.parents(n) if p != nullid]
1827 parents = [p for p in other.changelog.parents(n) if p != nullid]
1828 if opts.get('no_merges') and len(parents) == 2:
1828 if opts.get('no_merges') and len(parents) == 2:
1829 continue
1829 continue
1830 count += 1
1830 count += 1
1831 displayer.show(other[n])
1831 displayer.show(other[n])
1832 finally:
1832 finally:
1833 if hasattr(other, 'close'):
1833 if hasattr(other, 'close'):
1834 other.close()
1834 other.close()
1835 if cleanup:
1835 if cleanup:
1836 os.unlink(cleanup)
1836 os.unlink(cleanup)
1837
1837
1838 def init(ui, dest=".", **opts):
1838 def init(ui, dest=".", **opts):
1839 """create a new repository in the given directory
1839 """create a new repository in the given directory
1840
1840
1841 Initialize a new repository in the given directory. If the given
1841 Initialize a new repository in the given directory. If the given
1842 directory does not exist, it is created.
1842 directory does not exist, it is created.
1843
1843
1844 If no directory is given, the current directory is used.
1844 If no directory is given, the current directory is used.
1845
1845
1846 It is possible to specify an ssh:// URL as the destination.
1846 It is possible to specify an ssh:// URL as the destination.
1847 See 'hg help urls' for more information.
1847 See 'hg help urls' for more information.
1848 """
1848 """
1849 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1849 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1850
1850
1851 def locate(ui, repo, *pats, **opts):
1851 def locate(ui, repo, *pats, **opts):
1852 """locate files matching specific patterns
1852 """locate files matching specific patterns
1853
1853
1854 Print all files under Mercurial control whose names match the
1854 Print all files under Mercurial control whose names match the
1855 given patterns.
1855 given patterns.
1856
1856
1857 This command searches the entire repository by default. To search
1857 This command searches the entire repository by default. To search
1858 just the current directory and its subdirectories, use
1858 just the current directory and its subdirectories, use
1859 "--include .".
1859 "--include .".
1860
1860
1861 If no patterns are given to match, this command prints all file
1861 If no patterns are given to match, this command prints all file
1862 names.
1862 names.
1863
1863
1864 If you want to feed the output of this command into the "xargs"
1864 If you want to feed the output of this command into the "xargs"
1865 command, use the -0 option to both this command and "xargs". This
1865 command, use the -0 option to both this command and "xargs". This
1866 will avoid the problem of "xargs" treating single filenames that
1866 will avoid the problem of "xargs" treating single filenames that
1867 contain white space as multiple filenames.
1867 contain white space as multiple filenames.
1868 """
1868 """
1869 end = opts.get('print0') and '\0' or '\n'
1869 end = opts.get('print0') and '\0' or '\n'
1870 rev = opts.get('rev') or None
1870 rev = opts.get('rev') or None
1871
1871
1872 ret = 1
1872 ret = 1
1873 m = cmdutil.match(repo, pats, opts, default='relglob')
1873 m = cmdutil.match(repo, pats, opts, default='relglob')
1874 m.bad = lambda x,y: False
1874 m.bad = lambda x,y: False
1875 for abs in repo[rev].walk(m):
1875 for abs in repo[rev].walk(m):
1876 if not rev and abs not in repo.dirstate:
1876 if not rev and abs not in repo.dirstate:
1877 continue
1877 continue
1878 if opts.get('fullpath'):
1878 if opts.get('fullpath'):
1879 ui.write(repo.wjoin(abs), end)
1879 ui.write(repo.wjoin(abs), end)
1880 else:
1880 else:
1881 ui.write(((pats and m.rel(abs)) or abs), end)
1881 ui.write(((pats and m.rel(abs)) or abs), end)
1882 ret = 0
1882 ret = 0
1883
1883
1884 return ret
1884 return ret
1885
1885
1886 def log(ui, repo, *pats, **opts):
1886 def log(ui, repo, *pats, **opts):
1887 """show revision history of entire repository or files
1887 """show revision history of entire repository or files
1888
1888
1889 Print the revision history of the specified files or the entire
1889 Print the revision history of the specified files or the entire
1890 project.
1890 project.
1891
1891
1892 File history is shown without following rename or copy history of
1892 File history is shown without following rename or copy history of
1893 files. Use -f/--follow with a file name to follow history across
1893 files. Use -f/--follow with a file name to follow history across
1894 renames and copies. --follow without a file name will only show
1894 renames and copies. --follow without a file name will only show
1895 ancestors or descendants of the starting revision. --follow-first
1895 ancestors or descendants of the starting revision. --follow-first
1896 only follows the first parent of merge revisions.
1896 only follows the first parent of merge revisions.
1897
1897
1898 If no revision range is specified, the default is tip:0 unless
1898 If no revision range is specified, the default is tip:0 unless
1899 --follow is set, in which case the working directory parent is
1899 --follow is set, in which case the working directory parent is
1900 used as the starting revision.
1900 used as the starting revision.
1901
1901
1902 See 'hg help dates' for a list of formats valid for -d/--date.
1902 See 'hg help dates' for a list of formats valid for -d/--date.
1903
1903
1904 By default this command outputs: changeset id and hash, tags,
1904 By default this command outputs: changeset id and hash, tags,
1905 non-trivial parents, user, date and time, and a summary for each
1905 non-trivial parents, user, date and time, and a summary for each
1906 commit. When the -v/--verbose switch is used, the list of changed
1906 commit. When the -v/--verbose switch is used, the list of changed
1907 files and full commit message is shown.
1907 files and full commit message is shown.
1908
1908
1909 NOTE: log -p/--patch may generate unexpected diff output for merge
1909 NOTE: log -p/--patch may generate unexpected diff output for merge
1910 changesets, as it will only compare the merge changeset against
1910 changesets, as it will only compare the merge changeset against
1911 its first parent. Also, the files: list will only reflect files
1911 its first parent. Also, the files: list will only reflect files
1912 that are different from BOTH parents.
1912 that are different from BOTH parents.
1913
1913
1914 """
1914 """
1915
1915
1916 get = util.cachefunc(lambda r: repo[r].changeset())
1916 get = util.cachefunc(lambda r: repo[r].changeset())
1917 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1917 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1918
1918
1919 limit = cmdutil.loglimit(opts)
1919 limit = cmdutil.loglimit(opts)
1920 count = 0
1920 count = 0
1921
1921
1922 if opts.get('copies') and opts.get('rev'):
1922 if opts.get('copies') and opts.get('rev'):
1923 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1923 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1924 else:
1924 else:
1925 endrev = len(repo)
1925 endrev = len(repo)
1926 rcache = {}
1926 rcache = {}
1927 ncache = {}
1927 ncache = {}
1928 def getrenamed(fn, rev):
1928 def getrenamed(fn, rev):
1929 '''looks up all renames for a file (up to endrev) the first
1929 '''looks up all renames for a file (up to endrev) the first
1930 time the file is given. It indexes on the changerev and only
1930 time the file is given. It indexes on the changerev and only
1931 parses the manifest if linkrev != changerev.
1931 parses the manifest if linkrev != changerev.
1932 Returns rename info for fn at changerev rev.'''
1932 Returns rename info for fn at changerev rev.'''
1933 if fn not in rcache:
1933 if fn not in rcache:
1934 rcache[fn] = {}
1934 rcache[fn] = {}
1935 ncache[fn] = {}
1935 ncache[fn] = {}
1936 fl = repo.file(fn)
1936 fl = repo.file(fn)
1937 for i in fl:
1937 for i in fl:
1938 node = fl.node(i)
1938 node = fl.node(i)
1939 lr = fl.linkrev(i)
1939 lr = fl.linkrev(i)
1940 renamed = fl.renamed(node)
1940 renamed = fl.renamed(node)
1941 rcache[fn][lr] = renamed
1941 rcache[fn][lr] = renamed
1942 if renamed:
1942 if renamed:
1943 ncache[fn][node] = renamed
1943 ncache[fn][node] = renamed
1944 if lr >= endrev:
1944 if lr >= endrev:
1945 break
1945 break
1946 if rev in rcache[fn]:
1946 if rev in rcache[fn]:
1947 return rcache[fn][rev]
1947 return rcache[fn][rev]
1948
1948
1949 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1949 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1950 # filectx logic.
1950 # filectx logic.
1951
1951
1952 try:
1952 try:
1953 return repo[rev][fn].renamed()
1953 return repo[rev][fn].renamed()
1954 except error.LookupError:
1954 except error.LookupError:
1955 pass
1955 pass
1956 return None
1956 return None
1957
1957
1958 df = False
1958 df = False
1959 if opts["date"]:
1959 if opts["date"]:
1960 df = util.matchdate(opts["date"])
1960 df = util.matchdate(opts["date"])
1961
1961
1962 only_branches = opts.get('only_branch')
1962 only_branches = opts.get('only_branch')
1963
1963
1964 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1964 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1965 for st, rev, fns in changeiter:
1965 for st, rev, fns in changeiter:
1966 if st == 'add':
1966 if st == 'add':
1967 parents = [p for p in repo.changelog.parentrevs(rev)
1967 parents = [p for p in repo.changelog.parentrevs(rev)
1968 if p != nullrev]
1968 if p != nullrev]
1969 if opts.get('no_merges') and len(parents) == 2:
1969 if opts.get('no_merges') and len(parents) == 2:
1970 continue
1970 continue
1971 if opts.get('only_merges') and len(parents) != 2:
1971 if opts.get('only_merges') and len(parents) != 2:
1972 continue
1972 continue
1973
1973
1974 if only_branches:
1974 if only_branches:
1975 revbranch = get(rev)[5]['branch']
1975 revbranch = get(rev)[5]['branch']
1976 if revbranch not in only_branches:
1976 if revbranch not in only_branches:
1977 continue
1977 continue
1978
1978
1979 if df:
1979 if df:
1980 changes = get(rev)
1980 changes = get(rev)
1981 if not df(changes[2][0]):
1981 if not df(changes[2][0]):
1982 continue
1982 continue
1983
1983
1984 if opts.get('keyword'):
1984 if opts.get('keyword'):
1985 changes = get(rev)
1985 changes = get(rev)
1986 miss = 0
1986 miss = 0
1987 for k in [kw.lower() for kw in opts['keyword']]:
1987 for k in [kw.lower() for kw in opts['keyword']]:
1988 if not (k in changes[1].lower() or
1988 if not (k in changes[1].lower() or
1989 k in changes[4].lower() or
1989 k in changes[4].lower() or
1990 k in " ".join(changes[3]).lower()):
1990 k in " ".join(changes[3]).lower()):
1991 miss = 1
1991 miss = 1
1992 break
1992 break
1993 if miss:
1993 if miss:
1994 continue
1994 continue
1995
1995
1996 if opts['user']:
1996 if opts['user']:
1997 changes = get(rev)
1997 changes = get(rev)
1998 if not [k for k in opts['user'] if k in changes[1]]:
1998 if not [k for k in opts['user'] if k in changes[1]]:
1999 continue
1999 continue
2000
2000
2001 copies = []
2001 copies = []
2002 if opts.get('copies') and rev:
2002 if opts.get('copies') and rev:
2003 for fn in get(rev)[3]:
2003 for fn in get(rev)[3]:
2004 rename = getrenamed(fn, rev)
2004 rename = getrenamed(fn, rev)
2005 if rename:
2005 if rename:
2006 copies.append((fn, rename[0]))
2006 copies.append((fn, rename[0]))
2007 displayer.show(context.changectx(repo, rev), copies=copies)
2007 displayer.show(context.changectx(repo, rev), copies=copies)
2008 elif st == 'iter':
2008 elif st == 'iter':
2009 if count == limit: break
2009 if count == limit: break
2010 if displayer.flush(rev):
2010 if displayer.flush(rev):
2011 count += 1
2011 count += 1
2012
2012
2013 def manifest(ui, repo, node=None, rev=None):
2013 def manifest(ui, repo, node=None, rev=None):
2014 """output the current or given revision of the project manifest
2014 """output the current or given revision of the project manifest
2015
2015
2016 Print a list of version controlled files for the given revision.
2016 Print a list of version controlled files for the given revision.
2017 If no revision is given, the first parent of the working directory
2017 If no revision is given, the first parent of the working directory
2018 is used, or the null revision if none is checked out.
2018 is used, or the null revision if none is checked out.
2019
2019
2020 With -v flag, print file permissions, symlink and executable bits.
2020 With -v flag, print file permissions, symlink and executable bits.
2021 With --debug flag, print file revision hashes.
2021 With --debug flag, print file revision hashes.
2022 """
2022 """
2023
2023
2024 if rev and node:
2024 if rev and node:
2025 raise util.Abort(_("please specify just one revision"))
2025 raise util.Abort(_("please specify just one revision"))
2026
2026
2027 if not node:
2027 if not node:
2028 node = rev
2028 node = rev
2029
2029
2030 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2030 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2031 ctx = repo[node]
2031 ctx = repo[node]
2032 for f in ctx:
2032 for f in ctx:
2033 if ui.debugflag:
2033 if ui.debugflag:
2034 ui.write("%40s " % hex(ctx.manifest()[f]))
2034 ui.write("%40s " % hex(ctx.manifest()[f]))
2035 if ui.verbose:
2035 if ui.verbose:
2036 ui.write(decor[ctx.flags(f)])
2036 ui.write(decor[ctx.flags(f)])
2037 ui.write("%s\n" % f)
2037 ui.write("%s\n" % f)
2038
2038
2039 def merge(ui, repo, node=None, **opts):
2039 def merge(ui, repo, node=None, **opts):
2040 """merge working directory with another revision
2040 """merge working directory with another revision
2041
2041
2042 The contents of the current working directory is updated with all
2042 The contents of the current working directory is updated with all
2043 changes made in the requested revision since the last common
2043 changes made in the requested revision since the last common
2044 predecessor revision.
2044 predecessor revision.
2045
2045
2046 Files that changed between either parent are marked as changed for
2046 Files that changed between either parent are marked as changed for
2047 the next commit and a commit must be performed before any further
2047 the next commit and a commit must be performed before any further
2048 updates are allowed. The next commit has two parents.
2048 updates are allowed. The next commit has two parents.
2049
2049
2050 If no revision is specified, the working directory's parent is a
2050 If no revision is specified, the working directory's parent is a
2051 head revision, and the current branch contains exactly one other
2051 head revision, and the current branch contains exactly one other
2052 head, the other head is merged with by default. Otherwise, an
2052 head, the other head is merged with by default. Otherwise, an
2053 explicit revision to merge with must be provided.
2053 explicit revision to merge with must be provided.
2054 """
2054 """
2055
2055
2056 if opts.get('rev') and node:
2056 if opts.get('rev') and node:
2057 raise util.Abort(_("please specify just one revision"))
2057 raise util.Abort(_("please specify just one revision"))
2058 if not node:
2058 if not node:
2059 node = opts.get('rev')
2059 node = opts.get('rev')
2060
2060
2061 if not node:
2061 if not node:
2062 branch = repo.changectx(None).branch()
2062 branch = repo.changectx(None).branch()
2063 bheads = repo.branchheads(branch)
2063 bheads = repo.branchheads(branch)
2064 if len(bheads) > 2:
2064 if len(bheads) > 2:
2065 raise util.Abort(_("branch '%s' has %d heads - "
2065 raise util.Abort(_("branch '%s' has %d heads - "
2066 "please merge with an explicit rev") %
2066 "please merge with an explicit rev") %
2067 (branch, len(bheads)))
2067 (branch, len(bheads)))
2068
2068
2069 parent = repo.dirstate.parents()[0]
2069 parent = repo.dirstate.parents()[0]
2070 if len(bheads) == 1:
2070 if len(bheads) == 1:
2071 if len(repo.heads()) > 1:
2071 if len(repo.heads()) > 1:
2072 raise util.Abort(_("branch '%s' has one head - "
2072 raise util.Abort(_("branch '%s' has one head - "
2073 "please merge with an explicit rev") %
2073 "please merge with an explicit rev") %
2074 branch)
2074 branch)
2075 msg = _('there is nothing to merge')
2075 msg = _('there is nothing to merge')
2076 if parent != repo.lookup(repo[None].branch()):
2076 if parent != repo.lookup(repo[None].branch()):
2077 msg = _('%s - use "hg update" instead') % msg
2077 msg = _('%s - use "hg update" instead') % msg
2078 raise util.Abort(msg)
2078 raise util.Abort(msg)
2079
2079
2080 if parent not in bheads:
2080 if parent not in bheads:
2081 raise util.Abort(_('working dir not at a head rev - '
2081 raise util.Abort(_('working dir not at a head rev - '
2082 'use "hg update" or merge with an explicit rev'))
2082 'use "hg update" or merge with an explicit rev'))
2083 node = parent == bheads[0] and bheads[-1] or bheads[0]
2083 node = parent == bheads[0] and bheads[-1] or bheads[0]
2084
2084
2085 if opts.get('show'):
2085 if opts.get('show'):
2086 p1 = repo['.']
2086 p1 = repo['.']
2087 p2 = repo[node]
2087 p2 = repo[node]
2088 common = p1.ancestor(p2)
2088 common = p1.ancestor(p2)
2089 roots, heads = [common.node()], [p2.node()]
2089 roots, heads = [common.node()], [p2.node()]
2090 displayer = cmdutil.show_changeset(ui, repo, opts)
2090 displayer = cmdutil.show_changeset(ui, repo, opts)
2091 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2091 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2092 displayer.show(repo[node])
2092 displayer.show(repo[node])
2093 return 0
2093 return 0
2094
2094
2095 return hg.merge(repo, node, force=opts.get('force'))
2095 return hg.merge(repo, node, force=opts.get('force'))
2096
2096
2097 def outgoing(ui, repo, dest=None, **opts):
2097 def outgoing(ui, repo, dest=None, **opts):
2098 """show changesets not found in destination
2098 """show changesets not found in destination
2099
2099
2100 Show changesets not found in the specified destination repository
2100 Show changesets not found in the specified destination repository
2101 or the default push location. These are the changesets that would
2101 or the default push location. These are the changesets that would
2102 be pushed if a push was requested.
2102 be pushed if a push was requested.
2103
2103
2104 See pull for valid destination format details.
2104 See pull for valid destination format details.
2105 """
2105 """
2106 limit = cmdutil.loglimit(opts)
2106 limit = cmdutil.loglimit(opts)
2107 dest, revs, checkout = hg.parseurl(
2107 dest, revs, checkout = hg.parseurl(
2108 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2108 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2109 if revs:
2109 if revs:
2110 revs = [repo.lookup(rev) for rev in revs]
2110 revs = [repo.lookup(rev) for rev in revs]
2111
2111
2112 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2112 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2113 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2113 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2114 o = repo.findoutgoing(other, force=opts.get('force'))
2114 o = repo.findoutgoing(other, force=opts.get('force'))
2115 if not o:
2115 if not o:
2116 ui.status(_("no changes found\n"))
2116 ui.status(_("no changes found\n"))
2117 return 1
2117 return 1
2118 o = repo.changelog.nodesbetween(o, revs)[0]
2118 o = repo.changelog.nodesbetween(o, revs)[0]
2119 if opts.get('newest_first'):
2119 if opts.get('newest_first'):
2120 o.reverse()
2120 o.reverse()
2121 displayer = cmdutil.show_changeset(ui, repo, opts)
2121 displayer = cmdutil.show_changeset(ui, repo, opts)
2122 count = 0
2122 count = 0
2123 for n in o:
2123 for n in o:
2124 if count >= limit:
2124 if count >= limit:
2125 break
2125 break
2126 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2126 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2127 if opts.get('no_merges') and len(parents) == 2:
2127 if opts.get('no_merges') and len(parents) == 2:
2128 continue
2128 continue
2129 count += 1
2129 count += 1
2130 displayer.show(repo[n])
2130 displayer.show(repo[n])
2131
2131
2132 def parents(ui, repo, file_=None, **opts):
2132 def parents(ui, repo, file_=None, **opts):
2133 """show the parents of the working directory or revision
2133 """show the parents of the working directory or revision
2134
2134
2135 Print the working directory's parent revisions. If a revision is
2135 Print the working directory's parent revisions. If a revision is
2136 given via -r/--rev, the parent of that revision will be printed.
2136 given via -r/--rev, the parent of that revision will be printed.
2137 If a file argument is given, revision in which the file was last
2137 If a file argument is given, revision in which the file was last
2138 changed (before the working directory revision or the argument to
2138 changed (before the working directory revision or the argument to
2139 --rev if given) is printed.
2139 --rev if given) is printed.
2140 """
2140 """
2141 rev = opts.get('rev')
2141 rev = opts.get('rev')
2142 if rev:
2142 if rev:
2143 ctx = repo[rev]
2143 ctx = repo[rev]
2144 else:
2144 else:
2145 ctx = repo[None]
2145 ctx = repo[None]
2146
2146
2147 if file_:
2147 if file_:
2148 m = cmdutil.match(repo, (file_,), opts)
2148 m = cmdutil.match(repo, (file_,), opts)
2149 if m.anypats() or len(m.files()) != 1:
2149 if m.anypats() or len(m.files()) != 1:
2150 raise util.Abort(_('can only specify an explicit file name'))
2150 raise util.Abort(_('can only specify an explicit file name'))
2151 file_ = m.files()[0]
2151 file_ = m.files()[0]
2152 filenodes = []
2152 filenodes = []
2153 for cp in ctx.parents():
2153 for cp in ctx.parents():
2154 if not cp:
2154 if not cp:
2155 continue
2155 continue
2156 try:
2156 try:
2157 filenodes.append(cp.filenode(file_))
2157 filenodes.append(cp.filenode(file_))
2158 except error.LookupError:
2158 except error.LookupError:
2159 pass
2159 pass
2160 if not filenodes:
2160 if not filenodes:
2161 raise util.Abort(_("'%s' not found in manifest!") % file_)
2161 raise util.Abort(_("'%s' not found in manifest!") % file_)
2162 fl = repo.file(file_)
2162 fl = repo.file(file_)
2163 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2163 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2164 else:
2164 else:
2165 p = [cp.node() for cp in ctx.parents()]
2165 p = [cp.node() for cp in ctx.parents()]
2166
2166
2167 displayer = cmdutil.show_changeset(ui, repo, opts)
2167 displayer = cmdutil.show_changeset(ui, repo, opts)
2168 for n in p:
2168 for n in p:
2169 if n != nullid:
2169 if n != nullid:
2170 displayer.show(repo[n])
2170 displayer.show(repo[n])
2171
2171
2172 def paths(ui, repo, search=None):
2172 def paths(ui, repo, search=None):
2173 """show aliases for remote repositories
2173 """show aliases for remote repositories
2174
2174
2175 Show definition of symbolic path name NAME. If no name is given,
2175 Show definition of symbolic path name NAME. If no name is given,
2176 show definition of available names.
2176 show definition of available names.
2177
2177
2178 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2178 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2179 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2179 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2180
2180
2181 See 'hg help urls' for more information.
2181 See 'hg help urls' for more information.
2182 """
2182 """
2183 if search:
2183 if search:
2184 for name, path in ui.configitems("paths"):
2184 for name, path in ui.configitems("paths"):
2185 if name == search:
2185 if name == search:
2186 ui.write("%s\n" % url.hidepassword(path))
2186 ui.write("%s\n" % url.hidepassword(path))
2187 return
2187 return
2188 ui.warn(_("not found!\n"))
2188 ui.warn(_("not found!\n"))
2189 return 1
2189 return 1
2190 else:
2190 else:
2191 for name, path in ui.configitems("paths"):
2191 for name, path in ui.configitems("paths"):
2192 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2192 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2193
2193
2194 def postincoming(ui, repo, modheads, optupdate, checkout):
2194 def postincoming(ui, repo, modheads, optupdate, checkout):
2195 if modheads == 0:
2195 if modheads == 0:
2196 return
2196 return
2197 if optupdate:
2197 if optupdate:
2198 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2198 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2199 return hg.update(repo, checkout)
2199 return hg.update(repo, checkout)
2200 else:
2200 else:
2201 ui.status(_("not updating, since new heads added\n"))
2201 ui.status(_("not updating, since new heads added\n"))
2202 if modheads > 1:
2202 if modheads > 1:
2203 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2203 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2204 else:
2204 else:
2205 ui.status(_("(run 'hg update' to get a working copy)\n"))
2205 ui.status(_("(run 'hg update' to get a working copy)\n"))
2206
2206
2207 def pull(ui, repo, source="default", **opts):
2207 def pull(ui, repo, source="default", **opts):
2208 """pull changes from the specified source
2208 """pull changes from the specified source
2209
2209
2210 Pull changes from a remote repository to the local one.
2210 Pull changes from a remote repository to the local one.
2211
2211
2212 This finds all changes from the repository at the specified path
2212 This finds all changes from the repository at the specified path
2213 or URL and adds them to the local repository. By default, this
2213 or URL and adds them to the local repository. By default, this
2214 does not update the copy of the project in the working directory.
2214 does not update the copy of the project in the working directory.
2215
2215
2216 Use hg incoming if you want to see what will be added by the next
2216 Use hg incoming if you want to see what will be added by the next
2217 pull without actually adding the changes to the repository.
2217 pull without actually adding the changes to the repository.
2218
2218
2219 If SOURCE is omitted, the 'default' path will be used.
2219 If SOURCE is omitted, the 'default' path will be used.
2220 See 'hg help urls' for more information.
2220 See 'hg help urls' for more information.
2221 """
2221 """
2222 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2222 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2223 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2223 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2224 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2224 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2225 if revs:
2225 if revs:
2226 try:
2226 try:
2227 revs = [other.lookup(rev) for rev in revs]
2227 revs = [other.lookup(rev) for rev in revs]
2228 except error.CapabilityError:
2228 except error.CapabilityError:
2229 err = _("Other repository doesn't support revision lookup, "
2229 err = _("Other repository doesn't support revision lookup, "
2230 "so a rev cannot be specified.")
2230 "so a rev cannot be specified.")
2231 raise util.Abort(err)
2231 raise util.Abort(err)
2232
2232
2233 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2233 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2234 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2234 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2235
2235
2236 def push(ui, repo, dest=None, **opts):
2236 def push(ui, repo, dest=None, **opts):
2237 """push changes to the specified destination
2237 """push changes to the specified destination
2238
2238
2239 Push changes from the local repository to the given destination.
2239 Push changes from the local repository to the given destination.
2240
2240
2241 This is the symmetrical operation for pull. It moves changes from
2241 This is the symmetrical operation for pull. It moves changes from
2242 the current repository to a different one. If the destination is
2242 the current repository to a different one. If the destination is
2243 local this is identical to a pull in that directory from the
2243 local this is identical to a pull in that directory from the
2244 current one.
2244 current one.
2245
2245
2246 By default, push will refuse to run if it detects the result would
2246 By default, push will refuse to run if it detects the result would
2247 increase the number of remote heads. This generally indicates the
2247 increase the number of remote heads. This generally indicates the
2248 the client has forgotten to pull and merge before pushing.
2248 the client has forgotten to pull and merge before pushing.
2249
2249
2250 If -r/--rev is used, the named revision and all its ancestors will
2250 If -r/--rev is used, the named revision and all its ancestors will
2251 be pushed to the remote repository.
2251 be pushed to the remote repository.
2252
2252
2253 Look at the help text for URLs for important details about ssh://
2253 Look at the help text for URLs for important details about ssh://
2254 URLs. If DESTINATION is omitted, a default path will be used.
2254 URLs. If DESTINATION is omitted, a default path will be used.
2255 See 'hg help urls' for more information.
2255 See 'hg help urls' for more information.
2256 """
2256 """
2257 dest, revs, checkout = hg.parseurl(
2257 dest, revs, checkout = hg.parseurl(
2258 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2258 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2259 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2259 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2260 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2260 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2261 if revs:
2261 if revs:
2262 revs = [repo.lookup(rev) for rev in revs]
2262 revs = [repo.lookup(rev) for rev in revs]
2263 r = repo.push(other, opts.get('force'), revs=revs)
2263 r = repo.push(other, opts.get('force'), revs=revs)
2264 return r == 0
2264 return r == 0
2265
2265
2266 def recover(ui, repo):
2266 def recover(ui, repo):
2267 """roll back an interrupted transaction
2267 """roll back an interrupted transaction
2268
2268
2269 Recover from an interrupted commit or pull.
2269 Recover from an interrupted commit or pull.
2270
2270
2271 This command tries to fix the repository status after an
2271 This command tries to fix the repository status after an
2272 interrupted operation. It should only be necessary when Mercurial
2272 interrupted operation. It should only be necessary when Mercurial
2273 suggests it.
2273 suggests it.
2274 """
2274 """
2275 if repo.recover():
2275 if repo.recover():
2276 return hg.verify(repo)
2276 return hg.verify(repo)
2277 return 1
2277 return 1
2278
2278
2279 def remove(ui, repo, *pats, **opts):
2279 def remove(ui, repo, *pats, **opts):
2280 """remove the specified files on the next commit
2280 """remove the specified files on the next commit
2281
2281
2282 Schedule the indicated files for removal from the repository.
2282 Schedule the indicated files for removal from the repository.
2283
2283
2284 This only removes files from the current branch, not from the
2284 This only removes files from the current branch, not from the
2285 entire project history. -A/--after can be used to remove only
2285 entire project history. -A/--after can be used to remove only
2286 files that have already been deleted, -f/--force can be used to
2286 files that have already been deleted, -f/--force can be used to
2287 force deletion, and -Af can be used to remove files from the next
2287 force deletion, and -Af can be used to remove files from the next
2288 revision without deleting them.
2288 revision without deleting them.
2289
2289
2290 The following table details the behavior of remove for different
2290 The following table details the behavior of remove for different
2291 file states (columns) and option combinations (rows). The file
2291 file states (columns) and option combinations (rows). The file
2292 states are Added, Clean, Modified and Missing (as reported by hg
2292 states are Added, Clean, Modified and Missing (as reported by hg
2293 status). The actions are Warn, Remove (from branch) and Delete
2293 status). The actions are Warn, Remove (from branch) and Delete
2294 (from disk).
2294 (from disk).
2295
2295
2296 A C M !
2296 A C M !
2297 none W RD W R
2297 none W RD W R
2298 -f R RD RD R
2298 -f R RD RD R
2299 -A W W W R
2299 -A W W W R
2300 -Af R R R R
2300 -Af R R R R
2301
2301
2302 This command schedules the files to be removed at the next commit.
2302 This command schedules the files to be removed at the next commit.
2303 To undo a remove before that, see hg revert.
2303 To undo a remove before that, see hg revert.
2304 """
2304 """
2305
2305
2306 after, force = opts.get('after'), opts.get('force')
2306 after, force = opts.get('after'), opts.get('force')
2307 if not pats and not after:
2307 if not pats and not after:
2308 raise util.Abort(_('no files specified'))
2308 raise util.Abort(_('no files specified'))
2309
2309
2310 m = cmdutil.match(repo, pats, opts)
2310 m = cmdutil.match(repo, pats, opts)
2311 s = repo.status(match=m, clean=True)
2311 s = repo.status(match=m, clean=True)
2312 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2312 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2313
2313
2314 def warn(files, reason):
2314 def warn(files, reason):
2315 for f in files:
2315 for f in files:
2316 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2316 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2317 % (m.rel(f), reason))
2317 % (m.rel(f), reason))
2318
2318
2319 if force:
2319 if force:
2320 remove, forget = modified + deleted + clean, added
2320 remove, forget = modified + deleted + clean, added
2321 elif after:
2321 elif after:
2322 remove, forget = deleted, []
2322 remove, forget = deleted, []
2323 warn(modified + added + clean, _('still exists'))
2323 warn(modified + added + clean, _('still exists'))
2324 else:
2324 else:
2325 remove, forget = deleted + clean, []
2325 remove, forget = deleted + clean, []
2326 warn(modified, _('is modified'))
2326 warn(modified, _('is modified'))
2327 warn(added, _('has been marked for add'))
2327 warn(added, _('has been marked for add'))
2328
2328
2329 for f in sorted(remove + forget):
2329 for f in sorted(remove + forget):
2330 if ui.verbose or not m.exact(f):
2330 if ui.verbose or not m.exact(f):
2331 ui.status(_('removing %s\n') % m.rel(f))
2331 ui.status(_('removing %s\n') % m.rel(f))
2332
2332
2333 repo.forget(forget)
2333 repo.forget(forget)
2334 repo.remove(remove, unlink=not after)
2334 repo.remove(remove, unlink=not after)
2335
2335
2336 def rename(ui, repo, *pats, **opts):
2336 def rename(ui, repo, *pats, **opts):
2337 """rename files; equivalent of copy + remove
2337 """rename files; equivalent of copy + remove
2338
2338
2339 Mark dest as copies of sources; mark sources for deletion. If dest
2339 Mark dest as copies of sources; mark sources for deletion. If dest
2340 is a directory, copies are put in that directory. If dest is a
2340 is a directory, copies are put in that directory. If dest is a
2341 file, there can only be one source.
2341 file, there can only be one source.
2342
2342
2343 By default, this command copies the contents of files as they
2343 By default, this command copies the contents of files as they
2344 exist in the working directory. If invoked with -A/--after, the
2344 exist in the working directory. If invoked with -A/--after, the
2345 operation is recorded, but no copying is performed.
2345 operation is recorded, but no copying is performed.
2346
2346
2347 This command takes effect at the next commit. To undo a rename
2347 This command takes effect at the next commit. To undo a rename
2348 before that, see hg revert.
2348 before that, see hg revert.
2349 """
2349 """
2350 wlock = repo.wlock(False)
2350 wlock = repo.wlock(False)
2351 try:
2351 try:
2352 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2352 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2353 finally:
2353 finally:
2354 wlock.release()
2354 wlock.release()
2355
2355
2356 def resolve(ui, repo, *pats, **opts):
2356 def resolve(ui, repo, *pats, **opts):
2357 """retry file merges from a merge or update
2357 """retry file merges from a merge or update
2358
2358
2359 This command will cleanly retry unresolved file merges using file
2359 This command will cleanly retry unresolved file merges using file
2360 revisions preserved from the last update or merge. To attempt to
2360 revisions preserved from the last update or merge. To attempt to
2361 resolve all unresolved files, use the -a/--all switch.
2361 resolve all unresolved files, use the -a/--all switch.
2362
2362
2363 If a conflict is resolved manually, please note that the changes
2363 If a conflict is resolved manually, please note that the changes
2364 will be overwritten if the merge is retried with resolve. The
2364 will be overwritten if the merge is retried with resolve. The
2365 -m/--mark switch should be used to mark the file as resolved.
2365 -m/--mark switch should be used to mark the file as resolved.
2366
2366
2367 This command will also allow listing resolved files and manually
2367 This command will also allow listing resolved files and manually
2368 marking and unmarking files as resolved. All files must be marked
2368 marking and unmarking files as resolved. All files must be marked
2369 as resolved before the new commits are permitted.
2369 as resolved before the new commits are permitted.
2370
2370
2371 The codes used to show the status of files are:
2371 The codes used to show the status of files are:
2372 U = unresolved
2372 U = unresolved
2373 R = resolved
2373 R = resolved
2374 """
2374 """
2375
2375
2376 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2376 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2377
2377
2378 if (show and (mark or unmark)) or (mark and unmark):
2378 if (show and (mark or unmark)) or (mark and unmark):
2379 raise util.Abort(_("too many options specified"))
2379 raise util.Abort(_("too many options specified"))
2380 if pats and all:
2380 if pats and all:
2381 raise util.Abort(_("can't specify --all and patterns"))
2381 raise util.Abort(_("can't specify --all and patterns"))
2382 if not (all or pats or show or mark or unmark):
2382 if not (all or pats or show or mark or unmark):
2383 raise util.Abort(_('no files or directories specified; '
2383 raise util.Abort(_('no files or directories specified; '
2384 'use --all to remerge all files'))
2384 'use --all to remerge all files'))
2385
2385
2386 ms = merge_.mergestate(repo)
2386 ms = merge_.mergestate(repo)
2387 m = cmdutil.match(repo, pats, opts)
2387 m = cmdutil.match(repo, pats, opts)
2388
2388
2389 for f in ms:
2389 for f in ms:
2390 if m(f):
2390 if m(f):
2391 if show:
2391 if show:
2392 ui.write("%s %s\n" % (ms[f].upper(), f))
2392 ui.write("%s %s\n" % (ms[f].upper(), f))
2393 elif mark:
2393 elif mark:
2394 ms.mark(f, "r")
2394 ms.mark(f, "r")
2395 elif unmark:
2395 elif unmark:
2396 ms.mark(f, "u")
2396 ms.mark(f, "u")
2397 else:
2397 else:
2398 wctx = repo[None]
2398 wctx = repo[None]
2399 mctx = wctx.parents()[-1]
2399 mctx = wctx.parents()[-1]
2400
2400
2401 # backup pre-resolve (merge uses .orig for its own purposes)
2401 # backup pre-resolve (merge uses .orig for its own purposes)
2402 a = repo.wjoin(f)
2402 a = repo.wjoin(f)
2403 util.copyfile(a, a + ".resolve")
2403 util.copyfile(a, a + ".resolve")
2404
2404
2405 # resolve file
2405 # resolve file
2406 ms.resolve(f, wctx, mctx)
2406 ms.resolve(f, wctx, mctx)
2407
2407
2408 # replace filemerge's .orig file with our resolve file
2408 # replace filemerge's .orig file with our resolve file
2409 util.rename(a + ".resolve", a + ".orig")
2409 util.rename(a + ".resolve", a + ".orig")
2410
2410
2411 def revert(ui, repo, *pats, **opts):
2411 def revert(ui, repo, *pats, **opts):
2412 """restore individual files or directories to an earlier state
2412 """restore individual files or directories to an earlier state
2413
2413
2414 (Use update -r to check out earlier revisions, revert does not
2414 (Use update -r to check out earlier revisions, revert does not
2415 change the working directory parents.)
2415 change the working directory parents.)
2416
2416
2417 With no revision specified, revert the named files or directories
2417 With no revision specified, revert the named files or directories
2418 to the contents they had in the parent of the working directory.
2418 to the contents they had in the parent of the working directory.
2419 This restores the contents of the affected files to an unmodified
2419 This restores the contents of the affected files to an unmodified
2420 state and unschedules adds, removes, copies, and renames. If the
2420 state and unschedules adds, removes, copies, and renames. If the
2421 working directory has two parents, you must explicitly specify the
2421 working directory has two parents, you must explicitly specify the
2422 revision to revert to.
2422 revision to revert to.
2423
2423
2424 Using the -r/--rev option, revert the given files or directories
2424 Using the -r/--rev option, revert the given files or directories
2425 to their contents as of a specific revision. This can be helpful
2425 to their contents as of a specific revision. This can be helpful
2426 to "roll back" some or all of an earlier change. See 'hg help
2426 to "roll back" some or all of an earlier change. See 'hg help
2427 dates' for a list of formats valid for -d/--date.
2427 dates' for a list of formats valid for -d/--date.
2428
2428
2429 Revert modifies the working directory. It does not commit any
2429 Revert modifies the working directory. It does not commit any
2430 changes, or change the parent of the working directory. If you
2430 changes, or change the parent of the working directory. If you
2431 revert to a revision other than the parent of the working
2431 revert to a revision other than the parent of the working
2432 directory, the reverted files will thus appear modified
2432 directory, the reverted files will thus appear modified
2433 afterwards.
2433 afterwards.
2434
2434
2435 If a file has been deleted, it is restored. If the executable mode
2435 If a file has been deleted, it is restored. If the executable mode
2436 of a file was changed, it is reset.
2436 of a file was changed, it is reset.
2437
2437
2438 If names are given, all files matching the names are reverted.
2438 If names are given, all files matching the names are reverted.
2439 If no arguments are given, no files are reverted.
2439 If no arguments are given, no files are reverted.
2440
2440
2441 Modified files are saved with a .orig suffix before reverting.
2441 Modified files are saved with a .orig suffix before reverting.
2442 To disable these backups, use --no-backup.
2442 To disable these backups, use --no-backup.
2443 """
2443 """
2444
2444
2445 if opts["date"]:
2445 if opts["date"]:
2446 if opts["rev"]:
2446 if opts["rev"]:
2447 raise util.Abort(_("you can't specify a revision and a date"))
2447 raise util.Abort(_("you can't specify a revision and a date"))
2448 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2448 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2449
2449
2450 if not pats and not opts.get('all'):
2450 if not pats and not opts.get('all'):
2451 raise util.Abort(_('no files or directories specified; '
2451 raise util.Abort(_('no files or directories specified; '
2452 'use --all to revert the whole repo'))
2452 'use --all to revert the whole repo'))
2453
2453
2454 parent, p2 = repo.dirstate.parents()
2454 parent, p2 = repo.dirstate.parents()
2455 if not opts.get('rev') and p2 != nullid:
2455 if not opts.get('rev') and p2 != nullid:
2456 raise util.Abort(_('uncommitted merge - please provide a '
2456 raise util.Abort(_('uncommitted merge - please provide a '
2457 'specific revision'))
2457 'specific revision'))
2458 ctx = repo[opts.get('rev')]
2458 ctx = repo[opts.get('rev')]
2459 node = ctx.node()
2459 node = ctx.node()
2460 mf = ctx.manifest()
2460 mf = ctx.manifest()
2461 if node == parent:
2461 if node == parent:
2462 pmf = mf
2462 pmf = mf
2463 else:
2463 else:
2464 pmf = None
2464 pmf = None
2465
2465
2466 # need all matching names in dirstate and manifest of target rev,
2466 # need all matching names in dirstate and manifest of target rev,
2467 # so have to walk both. do not print errors if files exist in one
2467 # so have to walk both. do not print errors if files exist in one
2468 # but not other.
2468 # but not other.
2469
2469
2470 names = {}
2470 names = {}
2471
2471
2472 wlock = repo.wlock()
2472 wlock = repo.wlock()
2473 try:
2473 try:
2474 # walk dirstate.
2474 # walk dirstate.
2475
2475
2476 m = cmdutil.match(repo, pats, opts)
2476 m = cmdutil.match(repo, pats, opts)
2477 m.bad = lambda x,y: False
2477 m.bad = lambda x,y: False
2478 for abs in repo.walk(m):
2478 for abs in repo.walk(m):
2479 names[abs] = m.rel(abs), m.exact(abs)
2479 names[abs] = m.rel(abs), m.exact(abs)
2480
2480
2481 # walk target manifest.
2481 # walk target manifest.
2482
2482
2483 def badfn(path, msg):
2483 def badfn(path, msg):
2484 if path in names:
2484 if path in names:
2485 return False
2485 return False
2486 path_ = path + '/'
2486 path_ = path + '/'
2487 for f in names:
2487 for f in names:
2488 if f.startswith(path_):
2488 if f.startswith(path_):
2489 return False
2489 return False
2490 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2490 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2491 return False
2491 return False
2492
2492
2493 m = cmdutil.match(repo, pats, opts)
2493 m = cmdutil.match(repo, pats, opts)
2494 m.bad = badfn
2494 m.bad = badfn
2495 for abs in repo[node].walk(m):
2495 for abs in repo[node].walk(m):
2496 if abs not in names:
2496 if abs not in names:
2497 names[abs] = m.rel(abs), m.exact(abs)
2497 names[abs] = m.rel(abs), m.exact(abs)
2498
2498
2499 m = cmdutil.matchfiles(repo, names)
2499 m = cmdutil.matchfiles(repo, names)
2500 changes = repo.status(match=m)[:4]
2500 changes = repo.status(match=m)[:4]
2501 modified, added, removed, deleted = map(set, changes)
2501 modified, added, removed, deleted = map(set, changes)
2502
2502
2503 # if f is a rename, also revert the source
2503 # if f is a rename, also revert the source
2504 cwd = repo.getcwd()
2504 cwd = repo.getcwd()
2505 for f in added:
2505 for f in added:
2506 src = repo.dirstate.copied(f)
2506 src = repo.dirstate.copied(f)
2507 if src and src not in names and repo.dirstate[src] == 'r':
2507 if src and src not in names and repo.dirstate[src] == 'r':
2508 removed.add(src)
2508 removed.add(src)
2509 names[src] = (repo.pathto(src, cwd), True)
2509 names[src] = (repo.pathto(src, cwd), True)
2510
2510
2511 def removeforget(abs):
2511 def removeforget(abs):
2512 if repo.dirstate[abs] == 'a':
2512 if repo.dirstate[abs] == 'a':
2513 return _('forgetting %s\n')
2513 return _('forgetting %s\n')
2514 return _('removing %s\n')
2514 return _('removing %s\n')
2515
2515
2516 revert = ([], _('reverting %s\n'))
2516 revert = ([], _('reverting %s\n'))
2517 add = ([], _('adding %s\n'))
2517 add = ([], _('adding %s\n'))
2518 remove = ([], removeforget)
2518 remove = ([], removeforget)
2519 undelete = ([], _('undeleting %s\n'))
2519 undelete = ([], _('undeleting %s\n'))
2520
2520
2521 disptable = (
2521 disptable = (
2522 # dispatch table:
2522 # dispatch table:
2523 # file state
2523 # file state
2524 # action if in target manifest
2524 # action if in target manifest
2525 # action if not in target manifest
2525 # action if not in target manifest
2526 # make backup if in target manifest
2526 # make backup if in target manifest
2527 # make backup if not in target manifest
2527 # make backup if not in target manifest
2528 (modified, revert, remove, True, True),
2528 (modified, revert, remove, True, True),
2529 (added, revert, remove, True, False),
2529 (added, revert, remove, True, False),
2530 (removed, undelete, None, False, False),
2530 (removed, undelete, None, False, False),
2531 (deleted, revert, remove, False, False),
2531 (deleted, revert, remove, False, False),
2532 )
2532 )
2533
2533
2534 for abs, (rel, exact) in sorted(names.items()):
2534 for abs, (rel, exact) in sorted(names.items()):
2535 mfentry = mf.get(abs)
2535 mfentry = mf.get(abs)
2536 target = repo.wjoin(abs)
2536 target = repo.wjoin(abs)
2537 def handle(xlist, dobackup):
2537 def handle(xlist, dobackup):
2538 xlist[0].append(abs)
2538 xlist[0].append(abs)
2539 if dobackup and not opts.get('no_backup') and util.lexists(target):
2539 if dobackup and not opts.get('no_backup') and util.lexists(target):
2540 bakname = "%s.orig" % rel
2540 bakname = "%s.orig" % rel
2541 ui.note(_('saving current version of %s as %s\n') %
2541 ui.note(_('saving current version of %s as %s\n') %
2542 (rel, bakname))
2542 (rel, bakname))
2543 if not opts.get('dry_run'):
2543 if not opts.get('dry_run'):
2544 util.copyfile(target, bakname)
2544 util.copyfile(target, bakname)
2545 if ui.verbose or not exact:
2545 if ui.verbose or not exact:
2546 msg = xlist[1]
2546 msg = xlist[1]
2547 if not isinstance(msg, basestring):
2547 if not isinstance(msg, basestring):
2548 msg = msg(abs)
2548 msg = msg(abs)
2549 ui.status(msg % rel)
2549 ui.status(msg % rel)
2550 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2550 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2551 if abs not in table: continue
2551 if abs not in table: continue
2552 # file has changed in dirstate
2552 # file has changed in dirstate
2553 if mfentry:
2553 if mfentry:
2554 handle(hitlist, backuphit)
2554 handle(hitlist, backuphit)
2555 elif misslist is not None:
2555 elif misslist is not None:
2556 handle(misslist, backupmiss)
2556 handle(misslist, backupmiss)
2557 break
2557 break
2558 else:
2558 else:
2559 if abs not in repo.dirstate:
2559 if abs not in repo.dirstate:
2560 if mfentry:
2560 if mfentry:
2561 handle(add, True)
2561 handle(add, True)
2562 elif exact:
2562 elif exact:
2563 ui.warn(_('file not managed: %s\n') % rel)
2563 ui.warn(_('file not managed: %s\n') % rel)
2564 continue
2564 continue
2565 # file has not changed in dirstate
2565 # file has not changed in dirstate
2566 if node == parent:
2566 if node == parent:
2567 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2567 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2568 continue
2568 continue
2569 if pmf is None:
2569 if pmf is None:
2570 # only need parent manifest in this unlikely case,
2570 # only need parent manifest in this unlikely case,
2571 # so do not read by default
2571 # so do not read by default
2572 pmf = repo[parent].manifest()
2572 pmf = repo[parent].manifest()
2573 if abs in pmf:
2573 if abs in pmf:
2574 if mfentry:
2574 if mfentry:
2575 # if version of file is same in parent and target
2575 # if version of file is same in parent and target
2576 # manifests, do nothing
2576 # manifests, do nothing
2577 if (pmf[abs] != mfentry or
2577 if (pmf[abs] != mfentry or
2578 pmf.flags(abs) != mf.flags(abs)):
2578 pmf.flags(abs) != mf.flags(abs)):
2579 handle(revert, False)
2579 handle(revert, False)
2580 else:
2580 else:
2581 handle(remove, False)
2581 handle(remove, False)
2582
2582
2583 if not opts.get('dry_run'):
2583 if not opts.get('dry_run'):
2584 def checkout(f):
2584 def checkout(f):
2585 fc = ctx[f]
2585 fc = ctx[f]
2586 repo.wwrite(f, fc.data(), fc.flags())
2586 repo.wwrite(f, fc.data(), fc.flags())
2587
2587
2588 audit_path = util.path_auditor(repo.root)
2588 audit_path = util.path_auditor(repo.root)
2589 for f in remove[0]:
2589 for f in remove[0]:
2590 if repo.dirstate[f] == 'a':
2590 if repo.dirstate[f] == 'a':
2591 repo.dirstate.forget(f)
2591 repo.dirstate.forget(f)
2592 continue
2592 continue
2593 audit_path(f)
2593 audit_path(f)
2594 try:
2594 try:
2595 util.unlink(repo.wjoin(f))
2595 util.unlink(repo.wjoin(f))
2596 except OSError:
2596 except OSError:
2597 pass
2597 pass
2598 repo.dirstate.remove(f)
2598 repo.dirstate.remove(f)
2599
2599
2600 normal = None
2600 normal = None
2601 if node == parent:
2601 if node == parent:
2602 # We're reverting to our parent. If possible, we'd like status
2602 # We're reverting to our parent. If possible, we'd like status
2603 # to report the file as clean. We have to use normallookup for
2603 # to report the file as clean. We have to use normallookup for
2604 # merges to avoid losing information about merged/dirty files.
2604 # merges to avoid losing information about merged/dirty files.
2605 if p2 != nullid:
2605 if p2 != nullid:
2606 normal = repo.dirstate.normallookup
2606 normal = repo.dirstate.normallookup
2607 else:
2607 else:
2608 normal = repo.dirstate.normal
2608 normal = repo.dirstate.normal
2609 for f in revert[0]:
2609 for f in revert[0]:
2610 checkout(f)
2610 checkout(f)
2611 if normal:
2611 if normal:
2612 normal(f)
2612 normal(f)
2613
2613
2614 for f in add[0]:
2614 for f in add[0]:
2615 checkout(f)
2615 checkout(f)
2616 repo.dirstate.add(f)
2616 repo.dirstate.add(f)
2617
2617
2618 normal = repo.dirstate.normallookup
2618 normal = repo.dirstate.normallookup
2619 if node == parent and p2 == nullid:
2619 if node == parent and p2 == nullid:
2620 normal = repo.dirstate.normal
2620 normal = repo.dirstate.normal
2621 for f in undelete[0]:
2621 for f in undelete[0]:
2622 checkout(f)
2622 checkout(f)
2623 normal(f)
2623 normal(f)
2624
2624
2625 finally:
2625 finally:
2626 wlock.release()
2626 wlock.release()
2627
2627
2628 def rollback(ui, repo):
2628 def rollback(ui, repo):
2629 """roll back the last transaction
2629 """roll back the last transaction
2630
2630
2631 This command should be used with care. There is only one level of
2631 This command should be used with care. There is only one level of
2632 rollback, and there is no way to undo a rollback. It will also
2632 rollback, and there is no way to undo a rollback. It will also
2633 restore the dirstate at the time of the last transaction, losing
2633 restore the dirstate at the time of the last transaction, losing
2634 any dirstate changes since that time.
2634 any dirstate changes since that time.
2635
2635
2636 Transactions are used to encapsulate the effects of all commands
2636 Transactions are used to encapsulate the effects of all commands
2637 that create new changesets or propagate existing changesets into a
2637 that create new changesets or propagate existing changesets into a
2638 repository. For example, the following commands are transactional,
2638 repository. For example, the following commands are transactional,
2639 and their effects can be rolled back:
2639 and their effects can be rolled back:
2640
2640
2641 commit
2641 commit
2642 import
2642 import
2643 pull
2643 pull
2644 push (with this repository as destination)
2644 push (with this repository as destination)
2645 unbundle
2645 unbundle
2646
2646
2647 This command is not intended for use on public repositories. Once
2647 This command is not intended for use on public repositories. Once
2648 changes are visible for pull by other users, rolling a transaction
2648 changes are visible for pull by other users, rolling a transaction
2649 back locally is ineffective (someone else may already have pulled
2649 back locally is ineffective (someone else may already have pulled
2650 the changes). Furthermore, a race is possible with readers of the
2650 the changes). Furthermore, a race is possible with readers of the
2651 repository; for example an in-progress pull from the repository
2651 repository; for example an in-progress pull from the repository
2652 may fail if a rollback is performed.
2652 may fail if a rollback is performed.
2653 """
2653 """
2654 repo.rollback()
2654 repo.rollback()
2655
2655
2656 def root(ui, repo):
2656 def root(ui, repo):
2657 """print the root (top) of the current working directory
2657 """print the root (top) of the current working directory
2658
2658
2659 Print the root directory of the current repository.
2659 Print the root directory of the current repository.
2660 """
2660 """
2661 ui.write(repo.root + "\n")
2661 ui.write(repo.root + "\n")
2662
2662
2663 def serve(ui, repo, **opts):
2663 def serve(ui, repo, **opts):
2664 """export the repository via HTTP
2664 """export the repository via HTTP
2665
2665
2666 Start a local HTTP repository browser and pull server.
2666 Start a local HTTP repository browser and pull server.
2667
2667
2668 By default, the server logs accesses to stdout and errors to
2668 By default, the server logs accesses to stdout and errors to
2669 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2669 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2670 files.
2670 files.
2671 """
2671 """
2672
2672
2673 if opts["stdio"]:
2673 if opts["stdio"]:
2674 if repo is None:
2674 if repo is None:
2675 raise error.RepoError(_("There is no Mercurial repository here"
2675 raise error.RepoError(_("There is no Mercurial repository here"
2676 " (.hg not found)"))
2676 " (.hg not found)"))
2677 s = sshserver.sshserver(ui, repo)
2677 s = sshserver.sshserver(ui, repo)
2678 s.serve_forever()
2678 s.serve_forever()
2679
2679
2680 baseui = repo and repo.baseui or ui
2680 baseui = repo and repo.baseui or ui
2681 optlist = ("name templates style address port prefix ipv6"
2681 optlist = ("name templates style address port prefix ipv6"
2682 " accesslog errorlog webdir_conf certificate")
2682 " accesslog errorlog webdir_conf certificate")
2683 for o in optlist.split():
2683 for o in optlist.split():
2684 if opts[o]:
2684 if opts[o]:
2685 baseui.setconfig("web", o, str(opts[o]))
2685 baseui.setconfig("web", o, str(opts[o]))
2686 if (repo is not None) and (repo.ui != baseui):
2686 if (repo is not None) and (repo.ui != baseui):
2687 repo.ui.setconfig("web", o, str(opts[o]))
2687 repo.ui.setconfig("web", o, str(opts[o]))
2688
2688
2689 if repo is None and not ui.config("web", "webdir_conf"):
2689 if repo is None and not ui.config("web", "webdir_conf"):
2690 raise error.RepoError(_("There is no Mercurial repository here"
2690 raise error.RepoError(_("There is no Mercurial repository here"
2691 " (.hg not found)"))
2691 " (.hg not found)"))
2692
2692
2693 class service:
2693 class service:
2694 def init(self):
2694 def init(self):
2695 util.set_signal_handler()
2695 util.set_signal_handler()
2696 self.httpd = server.create_server(baseui, repo)
2696 self.httpd = server.create_server(baseui, repo)
2697
2697
2698 if not ui.verbose: return
2698 if not ui.verbose: return
2699
2699
2700 if self.httpd.prefix:
2700 if self.httpd.prefix:
2701 prefix = self.httpd.prefix.strip('/') + '/'
2701 prefix = self.httpd.prefix.strip('/') + '/'
2702 else:
2702 else:
2703 prefix = ''
2703 prefix = ''
2704
2704
2705 port = ':%d' % self.httpd.port
2705 port = ':%d' % self.httpd.port
2706 if port == ':80':
2706 if port == ':80':
2707 port = ''
2707 port = ''
2708
2708
2709 bindaddr = self.httpd.addr
2709 bindaddr = self.httpd.addr
2710 if bindaddr == '0.0.0.0':
2710 if bindaddr == '0.0.0.0':
2711 bindaddr = '*'
2711 bindaddr = '*'
2712 elif ':' in bindaddr: # IPv6
2712 elif ':' in bindaddr: # IPv6
2713 bindaddr = '[%s]' % bindaddr
2713 bindaddr = '[%s]' % bindaddr
2714
2714
2715 fqaddr = self.httpd.fqaddr
2715 fqaddr = self.httpd.fqaddr
2716 if ':' in fqaddr:
2716 if ':' in fqaddr:
2717 fqaddr = '[%s]' % fqaddr
2717 fqaddr = '[%s]' % fqaddr
2718 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2718 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2719 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2719 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2720
2720
2721 def run(self):
2721 def run(self):
2722 self.httpd.serve_forever()
2722 self.httpd.serve_forever()
2723
2723
2724 service = service()
2724 service = service()
2725
2725
2726 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2726 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2727
2727
2728 def status(ui, repo, *pats, **opts):
2728 def status(ui, repo, *pats, **opts):
2729 """show changed files in the working directory
2729 """show changed files in the working directory
2730
2730
2731 Show status of files in the repository. If names are given, only
2731 Show status of files in the repository. If names are given, only
2732 files that match are shown. Files that are clean or ignored or
2732 files that match are shown. Files that are clean or ignored or
2733 source of a copy/move operation, are not listed unless -c/--clean,
2733 source of a copy/move operation, are not listed unless -c/--clean,
2734 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2734 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2735 described with "show only ..." are given, the options -mardu are
2735 described with "show only ..." are given, the options -mardu are
2736 used.
2736 used.
2737
2737
2738 Option -q/--quiet hides untracked (unknown and ignored) files
2738 Option -q/--quiet hides untracked (unknown and ignored) files
2739 unless explicitly requested with -u/--unknown or -i/--ignored.
2739 unless explicitly requested with -u/--unknown or -i/--ignored.
2740
2740
2741 NOTE: status may appear to disagree with diff if permissions have
2741 NOTE: status may appear to disagree with diff if permissions have
2742 changed or a merge has occurred. The standard diff format does not
2742 changed or a merge has occurred. The standard diff format does not
2743 report permission changes and diff only reports changes relative
2743 report permission changes and diff only reports changes relative
2744 to one merge parent.
2744 to one merge parent.
2745
2745
2746 If one revision is given, it is used as the base revision.
2746 If one revision is given, it is used as the base revision.
2747 If two revisions are given, the difference between them is shown.
2747 If two revisions are given, the difference between them is shown.
2748
2748
2749 The codes used to show the status of files are:
2749 The codes used to show the status of files are:
2750 M = modified
2750 M = modified
2751 A = added
2751 A = added
2752 R = removed
2752 R = removed
2753 C = clean
2753 C = clean
2754 ! = missing (deleted by non-hg command, but still tracked)
2754 ! = missing (deleted by non-hg command, but still tracked)
2755 ? = not tracked
2755 ? = not tracked
2756 I = ignored
2756 I = ignored
2757 = the previous added file was copied from here
2757 = the previous added file was copied from here
2758 """
2758 """
2759
2759
2760 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2760 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2761 cwd = (pats and repo.getcwd()) or ''
2761 cwd = (pats and repo.getcwd()) or ''
2762 end = opts.get('print0') and '\0' or '\n'
2762 end = opts.get('print0') and '\0' or '\n'
2763 copy = {}
2763 copy = {}
2764 states = 'modified added removed deleted unknown ignored clean'.split()
2764 states = 'modified added removed deleted unknown ignored clean'.split()
2765 show = [k for k in states if opts.get(k)]
2765 show = [k for k in states if opts.get(k)]
2766 if opts.get('all'):
2766 if opts.get('all'):
2767 show += ui.quiet and (states[:4] + ['clean']) or states
2767 show += ui.quiet and (states[:4] + ['clean']) or states
2768 if not show:
2768 if not show:
2769 show = ui.quiet and states[:4] or states[:5]
2769 show = ui.quiet and states[:4] or states[:5]
2770
2770
2771 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2771 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2772 'ignored' in show, 'clean' in show, 'unknown' in show)
2772 'ignored' in show, 'clean' in show, 'unknown' in show)
2773 changestates = zip(states, 'MAR!?IC', stat)
2773 changestates = zip(states, 'MAR!?IC', stat)
2774
2774
2775 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2775 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2776 ctxn = repo[nullid]
2776 ctxn = repo[nullid]
2777 ctx1 = repo[node1]
2777 ctx1 = repo[node1]
2778 ctx2 = repo[node2]
2778 ctx2 = repo[node2]
2779 added = stat[1]
2779 added = stat[1]
2780 if node2 is None:
2780 if node2 is None:
2781 added = stat[0] + stat[1] # merged?
2781 added = stat[0] + stat[1] # merged?
2782
2782
2783 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2783 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2784 if k in added:
2784 if k in added:
2785 copy[k] = v
2785 copy[k] = v
2786 elif v in added:
2786 elif v in added:
2787 copy[v] = k
2787 copy[v] = k
2788
2788
2789 for state, char, files in changestates:
2789 for state, char, files in changestates:
2790 if state in show:
2790 if state in show:
2791 format = "%s %%s%s" % (char, end)
2791 format = "%s %%s%s" % (char, end)
2792 if opts.get('no_status'):
2792 if opts.get('no_status'):
2793 format = "%%s%s" % end
2793 format = "%%s%s" % end
2794
2794
2795 for f in files:
2795 for f in files:
2796 ui.write(format % repo.pathto(f, cwd))
2796 ui.write(format % repo.pathto(f, cwd))
2797 if f in copy:
2797 if f in copy:
2798 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2798 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2799
2799
2800 def tag(ui, repo, name1, *names, **opts):
2800 def tag(ui, repo, name1, *names, **opts):
2801 """add one or more tags for the current or given revision
2801 """add one or more tags for the current or given revision
2802
2802
2803 Name a particular revision using <name>.
2803 Name a particular revision using <name>.
2804
2804
2805 Tags are used to name particular revisions of the repository and are
2805 Tags are used to name particular revisions of the repository and are
2806 very useful to compare different revisions, to go back to significant
2806 very useful to compare different revisions, to go back to significant
2807 earlier versions or to mark branch points as releases, etc.
2807 earlier versions or to mark branch points as releases, etc.
2808
2808
2809 If no revision is given, the parent of the working directory is
2809 If no revision is given, the parent of the working directory is
2810 used, or tip if no revision is checked out.
2810 used, or tip if no revision is checked out.
2811
2811
2812 To facilitate version control, distribution, and merging of tags,
2812 To facilitate version control, distribution, and merging of tags,
2813 they are stored as a file named ".hgtags" which is managed
2813 they are stored as a file named ".hgtags" which is managed
2814 similarly to other project files and can be hand-edited if
2814 similarly to other project files and can be hand-edited if
2815 necessary. The file '.hg/localtags' is used for local tags (not
2815 necessary. The file '.hg/localtags' is used for local tags (not
2816 shared among repositories).
2816 shared among repositories).
2817
2817
2818 See 'hg help dates' for a list of formats valid for -d/--date.
2818 See 'hg help dates' for a list of formats valid for -d/--date.
2819 """
2819 """
2820
2820
2821 rev_ = "."
2821 rev_ = "."
2822 names = (name1,) + names
2822 names = (name1,) + names
2823 if len(names) != len(set(names)):
2823 if len(names) != len(set(names)):
2824 raise util.Abort(_('tag names must be unique'))
2824 raise util.Abort(_('tag names must be unique'))
2825 for n in names:
2825 for n in names:
2826 if n in ['tip', '.', 'null']:
2826 if n in ['tip', '.', 'null']:
2827 raise util.Abort(_('the name \'%s\' is reserved') % n)
2827 raise util.Abort(_('the name \'%s\' is reserved') % n)
2828 if opts.get('rev') and opts.get('remove'):
2828 if opts.get('rev') and opts.get('remove'):
2829 raise util.Abort(_("--rev and --remove are incompatible"))
2829 raise util.Abort(_("--rev and --remove are incompatible"))
2830 if opts.get('rev'):
2830 if opts.get('rev'):
2831 rev_ = opts['rev']
2831 rev_ = opts['rev']
2832 message = opts.get('message')
2832 message = opts.get('message')
2833 if opts.get('remove'):
2833 if opts.get('remove'):
2834 expectedtype = opts.get('local') and 'local' or 'global'
2834 expectedtype = opts.get('local') and 'local' or 'global'
2835 for n in names:
2835 for n in names:
2836 if not repo.tagtype(n):
2836 if not repo.tagtype(n):
2837 raise util.Abort(_('tag \'%s\' does not exist') % n)
2837 raise util.Abort(_('tag \'%s\' does not exist') % n)
2838 if repo.tagtype(n) != expectedtype:
2838 if repo.tagtype(n) != expectedtype:
2839 if expectedtype == 'global':
2839 if expectedtype == 'global':
2840 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2840 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2841 else:
2841 else:
2842 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2842 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2843 rev_ = nullid
2843 rev_ = nullid
2844 if not message:
2844 if not message:
2845 message = _('Removed tag %s') % ', '.join(names)
2845 message = _('Removed tag %s') % ', '.join(names)
2846 elif not opts.get('force'):
2846 elif not opts.get('force'):
2847 for n in names:
2847 for n in names:
2848 if n in repo.tags():
2848 if n in repo.tags():
2849 raise util.Abort(_('tag \'%s\' already exists '
2849 raise util.Abort(_('tag \'%s\' already exists '
2850 '(use -f to force)') % n)
2850 '(use -f to force)') % n)
2851 if not rev_ and repo.dirstate.parents()[1] != nullid:
2851 if not rev_ and repo.dirstate.parents()[1] != nullid:
2852 raise util.Abort(_('uncommitted merge - please provide a '
2852 raise util.Abort(_('uncommitted merge - please provide a '
2853 'specific revision'))
2853 'specific revision'))
2854 r = repo[rev_].node()
2854 r = repo[rev_].node()
2855
2855
2856 if not message:
2856 if not message:
2857 message = (_('Added tag %s for changeset %s') %
2857 message = (_('Added tag %s for changeset %s') %
2858 (', '.join(names), short(r)))
2858 (', '.join(names), short(r)))
2859
2859
2860 date = opts.get('date')
2860 date = opts.get('date')
2861 if date:
2861 if date:
2862 date = util.parsedate(date)
2862 date = util.parsedate(date)
2863
2863
2864 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2864 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2865
2865
2866 def tags(ui, repo):
2866 def tags(ui, repo):
2867 """list repository tags
2867 """list repository tags
2868
2868
2869 This lists both regular and local tags. When the -v/--verbose
2869 This lists both regular and local tags. When the -v/--verbose
2870 switch is used, a third column "local" is printed for local tags.
2870 switch is used, a third column "local" is printed for local tags.
2871 """
2871 """
2872
2872
2873 hexfunc = ui.debugflag and hex or short
2873 hexfunc = ui.debugflag and hex or short
2874 tagtype = ""
2874 tagtype = ""
2875
2875
2876 for t, n in reversed(repo.tagslist()):
2876 for t, n in reversed(repo.tagslist()):
2877 if ui.quiet:
2877 if ui.quiet:
2878 ui.write("%s\n" % t)
2878 ui.write("%s\n" % t)
2879 continue
2879 continue
2880
2880
2881 try:
2881 try:
2882 hn = hexfunc(n)
2882 hn = hexfunc(n)
2883 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2883 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2884 except error.LookupError:
2884 except error.LookupError:
2885 r = " ?:%s" % hn
2885 r = " ?:%s" % hn
2886 else:
2886 else:
2887 spaces = " " * (30 - encoding.colwidth(t))
2887 spaces = " " * (30 - encoding.colwidth(t))
2888 if ui.verbose:
2888 if ui.verbose:
2889 if repo.tagtype(t) == 'local':
2889 if repo.tagtype(t) == 'local':
2890 tagtype = " local"
2890 tagtype = " local"
2891 else:
2891 else:
2892 tagtype = ""
2892 tagtype = ""
2893 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2893 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2894
2894
2895 def tip(ui, repo, **opts):
2895 def tip(ui, repo, **opts):
2896 """show the tip revision
2896 """show the tip revision
2897
2897
2898 The tip revision (usually just called the tip) is the most
2898 The tip revision (usually just called the tip) is the most
2899 recently added changeset in the repository, the most recently
2899 recently added changeset in the repository, the most recently
2900 changed head.
2900 changed head.
2901
2901
2902 If you have just made a commit, that commit will be the tip. If
2902 If you have just made a commit, that commit will be the tip. If
2903 you have just pulled changes from another repository, the tip of
2903 you have just pulled changes from another repository, the tip of
2904 that repository becomes the current tip. The "tip" tag is special
2904 that repository becomes the current tip. The "tip" tag is special
2905 and cannot be renamed or assigned to a different changeset.
2905 and cannot be renamed or assigned to a different changeset.
2906 """
2906 """
2907 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2907 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2908
2908
2909 def unbundle(ui, repo, fname1, *fnames, **opts):
2909 def unbundle(ui, repo, fname1, *fnames, **opts):
2910 """apply one or more changegroup files
2910 """apply one or more changegroup files
2911
2911
2912 Apply one or more compressed changegroup files generated by the
2912 Apply one or more compressed changegroup files generated by the
2913 bundle command.
2913 bundle command.
2914 """
2914 """
2915 fnames = (fname1,) + fnames
2915 fnames = (fname1,) + fnames
2916
2916
2917 lock = repo.lock()
2917 lock = repo.lock()
2918 try:
2918 try:
2919 for fname in fnames:
2919 for fname in fnames:
2920 f = url.open(ui, fname)
2920 f = url.open(ui, fname)
2921 gen = changegroup.readbundle(f, fname)
2921 gen = changegroup.readbundle(f, fname)
2922 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2922 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2923 finally:
2923 finally:
2924 lock.release()
2924 lock.release()
2925
2925
2926 return postincoming(ui, repo, modheads, opts.get('update'), None)
2926 return postincoming(ui, repo, modheads, opts.get('update'), None)
2927
2927
2928 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2928 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2929 """update working directory
2929 """update working directory
2930
2930
2931 Update the repository's working directory to the specified
2931 Update the repository's working directory to the specified
2932 revision, or the tip of the current branch if none is specified.
2932 revision, or the tip of the current branch if none is specified.
2933 Use null as the revision to remove the working copy (like 'hg
2933 Use null as the revision to remove the working copy (like 'hg
2934 clone -U').
2934 clone -U').
2935
2935
2936 When the working directory contains no uncommitted changes, it
2936 When the working directory contains no uncommitted changes, it
2937 will be replaced by the state of the requested revision from the
2937 will be replaced by the state of the requested revision from the
2938 repository. When the requested revision is on a different branch,
2938 repository. When the requested revision is on a different branch,
2939 the working directory will additionally be switched to that
2939 the working directory will additionally be switched to that
2940 branch.
2940 branch.
2941
2941
2942 When there are uncommitted changes, use option -C/--clean to
2942 When there are uncommitted changes, use option -C/--clean to
2943 discard them, forcibly replacing the state of the working
2943 discard them, forcibly replacing the state of the working
2944 directory with the requested revision.
2944 directory with the requested revision.
2945
2945
2946 When there are uncommitted changes and option -C/--clean is not
2946 When there are uncommitted changes and option -C/--clean is not
2947 used, and the parent revision and requested revision are on the
2947 used, and the parent revision and requested revision are on the
2948 same branch, and one of them is an ancestor of the other, then the
2948 same branch, and one of them is an ancestor of the other, then the
2949 new working directory will contain the requested revision merged
2949 new working directory will contain the requested revision merged
2950 with the uncommitted changes. Otherwise, the update will fail with
2950 with the uncommitted changes. Otherwise, the update will fail with
2951 a suggestion to use 'merge' or 'update -C' instead.
2951 a suggestion to use 'merge' or 'update -C' instead.
2952
2952
2953 If you want to update just one file to an older revision, use
2953 If you want to update just one file to an older revision, use
2954 revert.
2954 revert.
2955
2955
2956 See 'hg help dates' for a list of formats valid for -d/--date.
2956 See 'hg help dates' for a list of formats valid for -d/--date.
2957 """
2957 """
2958 if rev and node:
2958 if rev and node:
2959 raise util.Abort(_("please specify just one revision"))
2959 raise util.Abort(_("please specify just one revision"))
2960
2960
2961 if not rev:
2961 if not rev:
2962 rev = node
2962 rev = node
2963
2963
2964 if date:
2964 if date:
2965 if rev:
2965 if rev:
2966 raise util.Abort(_("you can't specify a revision and a date"))
2966 raise util.Abort(_("you can't specify a revision and a date"))
2967 rev = cmdutil.finddate(ui, repo, date)
2967 rev = cmdutil.finddate(ui, repo, date)
2968
2968
2969 if clean:
2969 if clean:
2970 return hg.clean(repo, rev)
2970 return hg.clean(repo, rev)
2971 else:
2971 else:
2972 return hg.update(repo, rev)
2972 return hg.update(repo, rev)
2973
2973
2974 def verify(ui, repo):
2974 def verify(ui, repo):
2975 """verify the integrity of the repository
2975 """verify the integrity of the repository
2976
2976
2977 Verify the integrity of the current repository.
2977 Verify the integrity of the current repository.
2978
2978
2979 This will perform an extensive check of the repository's
2979 This will perform an extensive check of the repository's
2980 integrity, validating the hashes and checksums of each entry in
2980 integrity, validating the hashes and checksums of each entry in
2981 the changelog, manifest, and tracked files, as well as the
2981 the changelog, manifest, and tracked files, as well as the
2982 integrity of their crosslinks and indices.
2982 integrity of their crosslinks and indices.
2983 """
2983 """
2984 return hg.verify(repo)
2984 return hg.verify(repo)
2985
2985
2986 def version_(ui):
2986 def version_(ui):
2987 """output version and copyright information"""
2987 """output version and copyright information"""
2988 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2988 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2989 % util.version())
2989 % util.version())
2990 ui.status(_(
2990 ui.status(_(
2991 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2991 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2992 "This is free software; see the source for copying conditions. "
2992 "This is free software; see the source for copying conditions. "
2993 "There is NO\nwarranty; "
2993 "There is NO\nwarranty; "
2994 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2994 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2995 ))
2995 ))
2996
2996
2997 # Command options and aliases are listed here, alphabetically
2997 # Command options and aliases are listed here, alphabetically
2998
2998
2999 globalopts = [
2999 globalopts = [
3000 ('R', 'repository', '',
3000 ('R', 'repository', '',
3001 _('repository root directory or symbolic path name')),
3001 _('repository root directory or symbolic path name')),
3002 ('', 'cwd', '', _('change working directory')),
3002 ('', 'cwd', '', _('change working directory')),
3003 ('y', 'noninteractive', None,
3003 ('y', 'noninteractive', None,
3004 _('do not prompt, assume \'yes\' for any required answers')),
3004 _('do not prompt, assume \'yes\' for any required answers')),
3005 ('q', 'quiet', None, _('suppress output')),
3005 ('q', 'quiet', None, _('suppress output')),
3006 ('v', 'verbose', None, _('enable additional output')),
3006 ('v', 'verbose', None, _('enable additional output')),
3007 ('', 'config', [], _('set/override config option')),
3007 ('', 'config', [], _('set/override config option')),
3008 ('', 'debug', None, _('enable debugging output')),
3008 ('', 'debug', None, _('enable debugging output')),
3009 ('', 'debugger', None, _('start debugger')),
3009 ('', 'debugger', None, _('start debugger')),
3010 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3010 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3011 ('', 'encodingmode', encoding.encodingmode,
3011 ('', 'encodingmode', encoding.encodingmode,
3012 _('set the charset encoding mode')),
3012 _('set the charset encoding mode')),
3013 ('', 'traceback', None, _('print traceback on exception')),
3013 ('', 'traceback', None, _('print traceback on exception')),
3014 ('', 'time', None, _('time how long the command takes')),
3014 ('', 'time', None, _('time how long the command takes')),
3015 ('', 'profile', None, _('print command execution profile')),
3015 ('', 'profile', None, _('print command execution profile')),
3016 ('', 'version', None, _('output version information and exit')),
3016 ('', 'version', None, _('output version information and exit')),
3017 ('h', 'help', None, _('display help and exit')),
3017 ('h', 'help', None, _('display help and exit')),
3018 ]
3018 ]
3019
3019
3020 dryrunopts = [('n', 'dry-run', None,
3020 dryrunopts = [('n', 'dry-run', None,
3021 _('do not perform actions, just print output'))]
3021 _('do not perform actions, just print output'))]
3022
3022
3023 remoteopts = [
3023 remoteopts = [
3024 ('e', 'ssh', '', _('specify ssh command to use')),
3024 ('e', 'ssh', '', _('specify ssh command to use')),
3025 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3025 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3026 ]
3026 ]
3027
3027
3028 walkopts = [
3028 walkopts = [
3029 ('I', 'include', [], _('include names matching the given patterns')),
3029 ('I', 'include', [], _('include names matching the given patterns')),
3030 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3030 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3031 ]
3031 ]
3032
3032
3033 commitopts = [
3033 commitopts = [
3034 ('m', 'message', '', _('use <text> as commit message')),
3034 ('m', 'message', '', _('use <text> as commit message')),
3035 ('l', 'logfile', '', _('read commit message from <file>')),
3035 ('l', 'logfile', '', _('read commit message from <file>')),
3036 ]
3036 ]
3037
3037
3038 commitopts2 = [
3038 commitopts2 = [
3039 ('d', 'date', '', _('record datecode as commit date')),
3039 ('d', 'date', '', _('record datecode as commit date')),
3040 ('u', 'user', '', _('record the specified user as committer')),
3040 ('u', 'user', '', _('record the specified user as committer')),
3041 ]
3041 ]
3042
3042
3043 templateopts = [
3043 templateopts = [
3044 ('', 'style', '', _('display using template map file')),
3044 ('', 'style', '', _('display using template map file')),
3045 ('', 'template', '', _('display with template')),
3045 ('', 'template', '', _('display with template')),
3046 ]
3046 ]
3047
3047
3048 logopts = [
3048 logopts = [
3049 ('p', 'patch', None, _('show patch')),
3049 ('p', 'patch', None, _('show patch')),
3050 ('g', 'git', None, _('use git extended diff format')),
3050 ('g', 'git', None, _('use git extended diff format')),
3051 ('l', 'limit', '', _('limit number of changes displayed')),
3051 ('l', 'limit', '', _('limit number of changes displayed')),
3052 ('M', 'no-merges', None, _('do not show merges')),
3052 ('M', 'no-merges', None, _('do not show merges')),
3053 ] + templateopts
3053 ] + templateopts
3054
3054
3055 diffopts = [
3055 diffopts = [
3056 ('a', 'text', None, _('treat all files as text')),
3056 ('a', 'text', None, _('treat all files as text')),
3057 ('g', 'git', None, _('use git extended diff format')),
3057 ('g', 'git', None, _('use git extended diff format')),
3058 ('', 'nodates', None, _("don't include dates in diff headers"))
3058 ('', 'nodates', None, _("don't include dates in diff headers"))
3059 ]
3059 ]
3060
3060
3061 diffopts2 = [
3061 diffopts2 = [
3062 ('p', 'show-function', None, _('show which function each change is in')),
3062 ('p', 'show-function', None, _('show which function each change is in')),
3063 ('w', 'ignore-all-space', None,
3063 ('w', 'ignore-all-space', None,
3064 _('ignore white space when comparing lines')),
3064 _('ignore white space when comparing lines')),
3065 ('b', 'ignore-space-change', None,
3065 ('b', 'ignore-space-change', None,
3066 _('ignore changes in the amount of white space')),
3066 _('ignore changes in the amount of white space')),
3067 ('B', 'ignore-blank-lines', None,
3067 ('B', 'ignore-blank-lines', None,
3068 _('ignore changes whose lines are all blank')),
3068 _('ignore changes whose lines are all blank')),
3069 ('U', 'unified', '', _('number of lines of context to show'))
3069 ('U', 'unified', '', _('number of lines of context to show'))
3070 ]
3070 ]
3071
3071
3072 similarityopts = [
3072 similarityopts = [
3073 ('s', 'similarity', '',
3073 ('s', 'similarity', '',
3074 _('guess renamed files by similarity (0<=s<=100)'))
3074 _('guess renamed files by similarity (0<=s<=100)'))
3075 ]
3075 ]
3076
3076
3077 table = {
3077 table = {
3078 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3078 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3079 "addremove":
3079 "addremove":
3080 (addremove, similarityopts + walkopts + dryrunopts,
3080 (addremove, similarityopts + walkopts + dryrunopts,
3081 _('[OPTION]... [FILE]...')),
3081 _('[OPTION]... [FILE]...')),
3082 "^annotate|blame":
3082 "^annotate|blame":
3083 (annotate,
3083 (annotate,
3084 [('r', 'rev', '', _('annotate the specified revision')),
3084 [('r', 'rev', '', _('annotate the specified revision')),
3085 ('f', 'follow', None, _('follow file copies and renames')),
3085 ('f', 'follow', None, _('follow file copies and renames')),
3086 ('a', 'text', None, _('treat all files as text')),
3086 ('a', 'text', None, _('treat all files as text')),
3087 ('u', 'user', None, _('list the author (long with -v)')),
3087 ('u', 'user', None, _('list the author (long with -v)')),
3088 ('d', 'date', None, _('list the date (short with -q)')),
3088 ('d', 'date', None, _('list the date (short with -q)')),
3089 ('n', 'number', None, _('list the revision number (default)')),
3089 ('n', 'number', None, _('list the revision number (default)')),
3090 ('c', 'changeset', None, _('list the changeset')),
3090 ('c', 'changeset', None, _('list the changeset')),
3091 ('l', 'line-number', None,
3091 ('l', 'line-number', None,
3092 _('show line number at the first appearance'))
3092 _('show line number at the first appearance'))
3093 ] + walkopts,
3093 ] + walkopts,
3094 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3094 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3095 "archive":
3095 "archive":
3096 (archive,
3096 (archive,
3097 [('', 'no-decode', None, _('do not pass files through decoders')),
3097 [('', 'no-decode', None, _('do not pass files through decoders')),
3098 ('p', 'prefix', '', _('directory prefix for files in archive')),
3098 ('p', 'prefix', '', _('directory prefix for files in archive')),
3099 ('r', 'rev', '', _('revision to distribute')),
3099 ('r', 'rev', '', _('revision to distribute')),
3100 ('t', 'type', '', _('type of distribution to create')),
3100 ('t', 'type', '', _('type of distribution to create')),
3101 ] + walkopts,
3101 ] + walkopts,
3102 _('[OPTION]... DEST')),
3102 _('[OPTION]... DEST')),
3103 "backout":
3103 "backout":
3104 (backout,
3104 (backout,
3105 [('', 'merge', None,
3105 [('', 'merge', None,
3106 _('merge with old dirstate parent after backout')),
3106 _('merge with old dirstate parent after backout')),
3107 ('', 'parent', '', _('parent to choose when backing out merge')),
3107 ('', 'parent', '', _('parent to choose when backing out merge')),
3108 ('r', 'rev', '', _('revision to backout')),
3108 ('r', 'rev', '', _('revision to backout')),
3109 ] + walkopts + commitopts + commitopts2,
3109 ] + walkopts + commitopts + commitopts2,
3110 _('[OPTION]... [-r] REV')),
3110 _('[OPTION]... [-r] REV')),
3111 "bisect":
3111 "bisect":
3112 (bisect,
3112 (bisect,
3113 [('r', 'reset', False, _('reset bisect state')),
3113 [('r', 'reset', False, _('reset bisect state')),
3114 ('g', 'good', False, _('mark changeset good')),
3114 ('g', 'good', False, _('mark changeset good')),
3115 ('b', 'bad', False, _('mark changeset bad')),
3115 ('b', 'bad', False, _('mark changeset bad')),
3116 ('s', 'skip', False, _('skip testing changeset')),
3116 ('s', 'skip', False, _('skip testing changeset')),
3117 ('c', 'command', '', _('use command to check changeset state')),
3117 ('c', 'command', '', _('use command to check changeset state')),
3118 ('U', 'noupdate', False, _('do not update to target'))],
3118 ('U', 'noupdate', False, _('do not update to target'))],
3119 _("[-gbsr] [-c CMD] [REV]")),
3119 _("[-gbsr] [-c CMD] [REV]")),
3120 "branch":
3120 "branch":
3121 (branch,
3121 (branch,
3122 [('f', 'force', None,
3122 [('f', 'force', None,
3123 _('set branch name even if it shadows an existing branch')),
3123 _('set branch name even if it shadows an existing branch')),
3124 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3124 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3125 _('[-fC] [NAME]')),
3125 _('[-fC] [NAME]')),
3126 "branches":
3126 "branches":
3127 (branches,
3127 (branches,
3128 [('a', 'active', False,
3128 [('a', 'active', False,
3129 _('show only branches that have unmerged heads'))],
3129 _('show only branches that have unmerged heads'))],
3130 _('[-a]')),
3130 _('[-a]')),
3131 "bundle":
3131 "bundle":
3132 (bundle,
3132 (bundle,
3133 [('f', 'force', None,
3133 [('f', 'force', None,
3134 _('run even when remote repository is unrelated')),
3134 _('run even when remote repository is unrelated')),
3135 ('r', 'rev', [],
3135 ('r', 'rev', [],
3136 _('a changeset up to which you would like to bundle')),
3136 _('a changeset up to which you would like to bundle')),
3137 ('', 'base', [],
3137 ('', 'base', [],
3138 _('a base changeset to specify instead of a destination')),
3138 _('a base changeset to specify instead of a destination')),
3139 ('a', 'all', None, _('bundle all changesets in the repository')),
3139 ('a', 'all', None, _('bundle all changesets in the repository')),
3140 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3140 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3141 ] + remoteopts,
3141 ] + remoteopts,
3142 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3142 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3143 "cat":
3143 "cat":
3144 (cat,
3144 (cat,
3145 [('o', 'output', '', _('print output to file with formatted name')),
3145 [('o', 'output', '', _('print output to file with formatted name')),
3146 ('r', 'rev', '', _('print the given revision')),
3146 ('r', 'rev', '', _('print the given revision')),
3147 ('', 'decode', None, _('apply any matching decode filter')),
3147 ('', 'decode', None, _('apply any matching decode filter')),
3148 ] + walkopts,
3148 ] + walkopts,
3149 _('[OPTION]... FILE...')),
3149 _('[OPTION]... FILE...')),
3150 "^clone":
3150 "^clone":
3151 (clone,
3151 (clone,
3152 [('U', 'noupdate', None,
3152 [('U', 'noupdate', None,
3153 _('the clone will only contain a repository (no working copy)')),
3153 _('the clone will only contain a repository (no working copy)')),
3154 ('r', 'rev', [],
3154 ('r', 'rev', [],
3155 _('a changeset you would like to have after cloning')),
3155 _('a changeset you would like to have after cloning')),
3156 ('', 'pull', None, _('use pull protocol to copy metadata')),
3156 ('', 'pull', None, _('use pull protocol to copy metadata')),
3157 ('', 'uncompressed', None,
3157 ('', 'uncompressed', None,
3158 _('use uncompressed transfer (fast over LAN)')),
3158 _('use uncompressed transfer (fast over LAN)')),
3159 ] + remoteopts,
3159 ] + remoteopts,
3160 _('[OPTION]... SOURCE [DEST]')),
3160 _('[OPTION]... SOURCE [DEST]')),
3161 "^commit|ci":
3161 "^commit|ci":
3162 (commit,
3162 (commit,
3163 [('A', 'addremove', None,
3163 [('A', 'addremove', None,
3164 _('mark new/missing files as added/removed before committing')),
3164 _('mark new/missing files as added/removed before committing')),
3165 ('', 'close-branch', None,
3165 ('', 'close-branch', None,
3166 _('mark a branch as closed, hiding it from the branch list')),
3166 _('mark a branch as closed, hiding it from the branch list')),
3167 ] + walkopts + commitopts + commitopts2,
3167 ] + walkopts + commitopts + commitopts2,
3168 _('[OPTION]... [FILE]...')),
3168 _('[OPTION]... [FILE]...')),
3169 "copy|cp":
3169 "copy|cp":
3170 (copy,
3170 (copy,
3171 [('A', 'after', None, _('record a copy that has already occurred')),
3171 [('A', 'after', None, _('record a copy that has already occurred')),
3172 ('f', 'force', None,
3172 ('f', 'force', None,
3173 _('forcibly copy over an existing managed file')),
3173 _('forcibly copy over an existing managed file')),
3174 ] + walkopts + dryrunopts,
3174 ] + walkopts + dryrunopts,
3175 _('[OPTION]... [SOURCE]... DEST')),
3175 _('[OPTION]... [SOURCE]... DEST')),
3176 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3176 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3177 "debugcheckstate": (debugcheckstate, []),
3177 "debugcheckstate": (debugcheckstate, []),
3178 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3178 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3179 "debugcomplete":
3179 "debugcomplete":
3180 (debugcomplete,
3180 (debugcomplete,
3181 [('o', 'options', None, _('show the command options'))],
3181 [('o', 'options', None, _('show the command options'))],
3182 _('[-o] CMD')),
3182 _('[-o] CMD')),
3183 "debugdate":
3183 "debugdate":
3184 (debugdate,
3184 (debugdate,
3185 [('e', 'extended', None, _('try extended date formats'))],
3185 [('e', 'extended', None, _('try extended date formats'))],
3186 _('[-e] DATE [RANGE]')),
3186 _('[-e] DATE [RANGE]')),
3187 "debugdata": (debugdata, [], _('FILE REV')),
3187 "debugdata": (debugdata, [], _('FILE REV')),
3188 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3188 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3189 "debugindex": (debugindex, [], _('FILE')),
3189 "debugindex": (debugindex, [], _('FILE')),
3190 "debugindexdot": (debugindexdot, [], _('FILE')),
3190 "debugindexdot": (debugindexdot, [], _('FILE')),
3191 "debuginstall": (debuginstall, []),
3191 "debuginstall": (debuginstall, []),
3192 "debugrebuildstate":
3192 "debugrebuildstate":
3193 (debugrebuildstate,
3193 (debugrebuildstate,
3194 [('r', 'rev', '', _('revision to rebuild to'))],
3194 [('r', 'rev', '', _('revision to rebuild to'))],
3195 _('[-r REV] [REV]')),
3195 _('[-r REV] [REV]')),
3196 "debugrename":
3196 "debugrename":
3197 (debugrename,
3197 (debugrename,
3198 [('r', 'rev', '', _('revision to debug'))],
3198 [('r', 'rev', '', _('revision to debug'))],
3199 _('[-r REV] FILE')),
3199 _('[-r REV] FILE')),
3200 "debugsetparents":
3200 "debugsetparents":
3201 (debugsetparents, [], _('REV1 [REV2]')),
3201 (debugsetparents, [], _('REV1 [REV2]')),
3202 "debugstate":
3202 "debugstate":
3203 (debugstate,
3203 (debugstate,
3204 [('', 'nodates', None, _('do not display the saved mtime'))],
3204 [('', 'nodates', None, _('do not display the saved mtime'))],
3205 _('[OPTION]...')),
3205 _('[OPTION]...')),
3206 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3206 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3207 "^diff":
3207 "^diff":
3208 (diff,
3208 (diff,
3209 [('r', 'rev', [], _('revision')),
3209 [('r', 'rev', [], _('revision')),
3210 ('c', 'change', '', _('change made by revision'))
3210 ('c', 'change', '', _('change made by revision'))
3211 ] + diffopts + diffopts2 + walkopts,
3211 ] + diffopts + diffopts2 + walkopts,
3212 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3212 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3213 "^export":
3213 "^export":
3214 (export,
3214 (export,
3215 [('o', 'output', '', _('print output to file with formatted name')),
3215 [('o', 'output', '', _('print output to file with formatted name')),
3216 ('', 'switch-parent', None, _('diff against the second parent'))
3216 ('', 'switch-parent', None, _('diff against the second parent'))
3217 ] + diffopts,
3217 ] + diffopts,
3218 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3218 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3219 "grep":
3219 "grep":
3220 (grep,
3220 (grep,
3221 [('0', 'print0', None, _('end fields with NUL')),
3221 [('0', 'print0', None, _('end fields with NUL')),
3222 ('', 'all', None, _('print all revisions that match')),
3222 ('', 'all', None, _('print all revisions that match')),
3223 ('f', 'follow', None,
3223 ('f', 'follow', None,
3224 _('follow changeset history, or file history across copies and renames')),
3224 _('follow changeset history, or file history across copies and renames')),
3225 ('i', 'ignore-case', None, _('ignore case when matching')),
3225 ('i', 'ignore-case', None, _('ignore case when matching')),
3226 ('l', 'files-with-matches', None,
3226 ('l', 'files-with-matches', None,
3227 _('print only filenames and revisions that match')),
3227 _('print only filenames and revisions that match')),
3228 ('n', 'line-number', None, _('print matching line numbers')),
3228 ('n', 'line-number', None, _('print matching line numbers')),
3229 ('r', 'rev', [], _('search in given revision range')),
3229 ('r', 'rev', [], _('search in given revision range')),
3230 ('u', 'user', None, _('list the author (long with -v)')),
3230 ('u', 'user', None, _('list the author (long with -v)')),
3231 ('d', 'date', None, _('list the date (short with -q)')),
3231 ('d', 'date', None, _('list the date (short with -q)')),
3232 ] + walkopts,
3232 ] + walkopts,
3233 _('[OPTION]... PATTERN [FILE]...')),
3233 _('[OPTION]... PATTERN [FILE]...')),
3234 "heads":
3234 "heads":
3235 (heads,
3235 (heads,
3236 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3236 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3237 ('a', 'active', False,
3237 ('a', 'active', False,
3238 _('show only the active heads from open branches')),
3238 _('show only the active heads from open branches')),
3239 ] + templateopts,
3239 ] + templateopts,
3240 _('[-r REV] [REV]...')),
3240 _('[-r REV] [REV]...')),
3241 "help": (help_, [], _('[TOPIC]')),
3241 "help": (help_, [], _('[TOPIC]')),
3242 "identify|id":
3242 "identify|id":
3243 (identify,
3243 (identify,
3244 [('r', 'rev', '', _('identify the specified revision')),
3244 [('r', 'rev', '', _('identify the specified revision')),
3245 ('n', 'num', None, _('show local revision number')),
3245 ('n', 'num', None, _('show local revision number')),
3246 ('i', 'id', None, _('show global revision id')),
3246 ('i', 'id', None, _('show global revision id')),
3247 ('b', 'branch', None, _('show branch')),
3247 ('b', 'branch', None, _('show branch')),
3248 ('t', 'tags', None, _('show tags'))],
3248 ('t', 'tags', None, _('show tags'))],
3249 _('[-nibt] [-r REV] [SOURCE]')),
3249 _('[-nibt] [-r REV] [SOURCE]')),
3250 "import|patch":
3250 "import|patch":
3251 (import_,
3251 (import_,
3252 [('p', 'strip', 1,
3252 [('p', 'strip', 1,
3253 _('directory strip option for patch. This has the same '
3253 _('directory strip option for patch. This has the same '
3254 'meaning as the corresponding patch option')),
3254 'meaning as the corresponding patch option')),
3255 ('b', 'base', '', _('base path')),
3255 ('b', 'base', '', _('base path')),
3256 ('f', 'force', None,
3256 ('f', 'force', None,
3257 _('skip check for outstanding uncommitted changes')),
3257 _('skip check for outstanding uncommitted changes')),
3258 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3258 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3259 ('', 'exact', None,
3259 ('', 'exact', None,
3260 _('apply patch to the nodes from which it was generated')),
3260 _('apply patch to the nodes from which it was generated')),
3261 ('', 'import-branch', None,
3261 ('', 'import-branch', None,
3262 _('use any branch information in patch (implied by --exact)'))] +
3262 _('use any branch information in patch (implied by --exact)'))] +
3263 commitopts + commitopts2 + similarityopts,
3263 commitopts + commitopts2 + similarityopts,
3264 _('[OPTION]... PATCH...')),
3264 _('[OPTION]... PATCH...')),
3265 "incoming|in":
3265 "incoming|in":
3266 (incoming,
3266 (incoming,
3267 [('f', 'force', None,
3267 [('f', 'force', None,
3268 _('run even when remote repository is unrelated')),
3268 _('run even when remote repository is unrelated')),
3269 ('n', 'newest-first', None, _('show newest record first')),
3269 ('n', 'newest-first', None, _('show newest record first')),
3270 ('', 'bundle', '', _('file to store the bundles into')),
3270 ('', 'bundle', '', _('file to store the bundles into')),
3271 ('r', 'rev', [],
3271 ('r', 'rev', [],
3272 _('a specific revision up to which you would like to pull')),
3272 _('a specific revision up to which you would like to pull')),
3273 ] + logopts + remoteopts,
3273 ] + logopts + remoteopts,
3274 _('[-p] [-n] [-M] [-f] [-r REV]...'
3274 _('[-p] [-n] [-M] [-f] [-r REV]...'
3275 ' [--bundle FILENAME] [SOURCE]')),
3275 ' [--bundle FILENAME] [SOURCE]')),
3276 "^init":
3276 "^init":
3277 (init,
3277 (init,
3278 remoteopts,
3278 remoteopts,
3279 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3279 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3280 "locate":
3280 "locate":
3281 (locate,
3281 (locate,
3282 [('r', 'rev', '', _('search the repository as it stood at REV')),
3282 [('r', 'rev', '', _('search the repository as it stood at REV')),
3283 ('0', 'print0', None,
3283 ('0', 'print0', None,
3284 _('end filenames with NUL, for use with xargs')),
3284 _('end filenames with NUL, for use with xargs')),
3285 ('f', 'fullpath', None,
3285 ('f', 'fullpath', None,
3286 _('print complete paths from the filesystem root')),
3286 _('print complete paths from the filesystem root')),
3287 ] + walkopts,
3287 ] + walkopts,
3288 _('[OPTION]... [PATTERN]...')),
3288 _('[OPTION]... [PATTERN]...')),
3289 "^log|history":
3289 "^log|history":
3290 (log,
3290 (log,
3291 [('f', 'follow', None,
3291 [('f', 'follow', None,
3292 _('follow changeset history, or file history across copies and renames')),
3292 _('follow changeset history, or file history across copies and renames')),
3293 ('', 'follow-first', None,
3293 ('', 'follow-first', None,
3294 _('only follow the first parent of merge changesets')),
3294 _('only follow the first parent of merge changesets')),
3295 ('d', 'date', '', _('show revisions matching date spec')),
3295 ('d', 'date', '', _('show revisions matching date spec')),
3296 ('C', 'copies', None, _('show copied files')),
3296 ('C', 'copies', None, _('show copied files')),
3297 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3297 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3298 ('r', 'rev', [], _('show the specified revision or range')),
3298 ('r', 'rev', [], _('show the specified revision or range')),
3299 ('', 'removed', None, _('include revisions where files were removed')),
3299 ('', 'removed', None, _('include revisions where files were removed')),
3300 ('m', 'only-merges', None, _('show only merges')),
3300 ('m', 'only-merges', None, _('show only merges')),
3301 ('u', 'user', [], _('revisions committed by user')),
3301 ('u', 'user', [], _('revisions committed by user')),
3302 ('b', 'only-branch', [],
3302 ('b', 'only-branch', [],
3303 _('show only changesets within the given named branch')),
3303 _('show only changesets within the given named branch')),
3304 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3304 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3305 ] + logopts + walkopts,
3305 ] + logopts + walkopts,
3306 _('[OPTION]... [FILE]')),
3306 _('[OPTION]... [FILE]')),
3307 "manifest":
3307 "manifest":
3308 (manifest,
3308 (manifest,
3309 [('r', 'rev', '', _('revision to display'))],
3309 [('r', 'rev', '', _('revision to display'))],
3310 _('[-r REV]')),
3310 _('[-r REV]')),
3311 "^merge":
3311 "^merge":
3312 (merge,
3312 (merge,
3313 [('f', 'force', None, _('force a merge with outstanding changes')),
3313 [('f', 'force', None, _('force a merge with outstanding changes')),
3314 ('r', 'rev', '', _('revision to merge')),
3314 ('r', 'rev', '', _('revision to merge')),
3315 ('S', 'show', None,
3315 ('S', 'show', None,
3316 _('review revisions to merge (no merge is performed)'))],
3316 _('review revisions to merge (no merge is performed)'))],
3317 _('[-f] [[-r] REV]')),
3317 _('[-f] [[-r] REV]')),
3318 "outgoing|out":
3318 "outgoing|out":
3319 (outgoing,
3319 (outgoing,
3320 [('f', 'force', None,
3320 [('f', 'force', None,
3321 _('run even when remote repository is unrelated')),
3321 _('run even when remote repository is unrelated')),
3322 ('r', 'rev', [],
3322 ('r', 'rev', [],
3323 _('a specific revision up to which you would like to push')),
3323 _('a specific revision up to which you would like to push')),
3324 ('n', 'newest-first', None, _('show newest record first')),
3324 ('n', 'newest-first', None, _('show newest record first')),
3325 ] + logopts + remoteopts,
3325 ] + logopts + remoteopts,
3326 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3326 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3327 "^parents":
3327 "^parents":
3328 (parents,
3328 (parents,
3329 [('r', 'rev', '', _('show parents from the specified revision')),
3329 [('r', 'rev', '', _('show parents from the specified revision')),
3330 ] + templateopts,
3330 ] + templateopts,
3331 _('hg parents [-r REV] [FILE]')),
3331 _('hg parents [-r REV] [FILE]')),
3332 "paths": (paths, [], _('[NAME]')),
3332 "paths": (paths, [], _('[NAME]')),
3333 "^pull":
3333 "^pull":
3334 (pull,
3334 (pull,
3335 [('u', 'update', None,
3335 [('u', 'update', None,
3336 _('update to new tip if changesets were pulled')),
3336 _('update to new tip if changesets were pulled')),
3337 ('f', 'force', None,
3337 ('f', 'force', None,
3338 _('run even when remote repository is unrelated')),
3338 _('run even when remote repository is unrelated')),
3339 ('r', 'rev', [],
3339 ('r', 'rev', [],
3340 _('a specific revision up to which you would like to pull')),
3340 _('a specific revision up to which you would like to pull')),
3341 ] + remoteopts,
3341 ] + remoteopts,
3342 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3342 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3343 "^push":
3343 "^push":
3344 (push,
3344 (push,
3345 [('f', 'force', None, _('force push')),
3345 [('f', 'force', None, _('force push')),
3346 ('r', 'rev', [],
3346 ('r', 'rev', [],
3347 _('a specific revision up to which you would like to push')),
3347 _('a specific revision up to which you would like to push')),
3348 ] + remoteopts,
3348 ] + remoteopts,
3349 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3349 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3350 "recover": (recover, []),
3350 "recover": (recover, []),
3351 "^remove|rm":
3351 "^remove|rm":
3352 (remove,
3352 (remove,
3353 [('A', 'after', None, _('record delete for missing files')),
3353 [('A', 'after', None, _('record delete for missing files')),
3354 ('f', 'force', None,
3354 ('f', 'force', None,
3355 _('remove (and delete) file even if added or modified')),
3355 _('remove (and delete) file even if added or modified')),
3356 ] + walkopts,
3356 ] + walkopts,
3357 _('[OPTION]... FILE...')),
3357 _('[OPTION]... FILE...')),
3358 "rename|mv":
3358 "rename|mv":
3359 (rename,
3359 (rename,
3360 [('A', 'after', None, _('record a rename that has already occurred')),
3360 [('A', 'after', None, _('record a rename that has already occurred')),
3361 ('f', 'force', None,
3361 ('f', 'force', None,
3362 _('forcibly copy over an existing managed file')),
3362 _('forcibly copy over an existing managed file')),
3363 ] + walkopts + dryrunopts,
3363 ] + walkopts + dryrunopts,
3364 _('[OPTION]... SOURCE... DEST')),
3364 _('[OPTION]... SOURCE... DEST')),
3365 "resolve":
3365 "resolve":
3366 (resolve,
3366 (resolve,
3367 [('a', 'all', None, _('remerge all unresolved files')),
3367 [('a', 'all', None, _('remerge all unresolved files')),
3368 ('l', 'list', None, _('list state of files needing merge')),
3368 ('l', 'list', None, _('list state of files needing merge')),
3369 ('m', 'mark', None, _('mark files as resolved')),
3369 ('m', 'mark', None, _('mark files as resolved')),
3370 ('u', 'unmark', None, _('unmark files as resolved'))]
3370 ('u', 'unmark', None, _('unmark files as resolved'))]
3371 + walkopts,
3371 + walkopts,
3372 _('[OPTION]... [FILE]...')),
3372 _('[OPTION]... [FILE]...')),
3373 "revert":
3373 "revert":
3374 (revert,
3374 (revert,
3375 [('a', 'all', None, _('revert all changes when no arguments given')),
3375 [('a', 'all', None, _('revert all changes when no arguments given')),
3376 ('d', 'date', '', _('tipmost revision matching date')),
3376 ('d', 'date', '', _('tipmost revision matching date')),
3377 ('r', 'rev', '', _('revision to revert to')),
3377 ('r', 'rev', '', _('revision to revert to')),
3378 ('', 'no-backup', None, _('do not save backup copies of files')),
3378 ('', 'no-backup', None, _('do not save backup copies of files')),
3379 ] + walkopts + dryrunopts,
3379 ] + walkopts + dryrunopts,
3380 _('[OPTION]... [-r REV] [NAME]...')),
3380 _('[OPTION]... [-r REV] [NAME]...')),
3381 "rollback": (rollback, []),
3381 "rollback": (rollback, []),
3382 "root": (root, []),
3382 "root": (root, []),
3383 "^serve":
3383 "^serve":
3384 (serve,
3384 (serve,
3385 [('A', 'accesslog', '', _('name of access log file to write to')),
3385 [('A', 'accesslog', '', _('name of access log file to write to')),
3386 ('d', 'daemon', None, _('run server in background')),
3386 ('d', 'daemon', None, _('run server in background')),
3387 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3387 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3388 ('E', 'errorlog', '', _('name of error log file to write to')),
3388 ('E', 'errorlog', '', _('name of error log file to write to')),
3389 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3389 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3390 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3390 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3391 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3391 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3392 ('n', 'name', '',
3392 ('n', 'name', '',
3393 _('name to show in web pages (default: working directory)')),
3393 _('name to show in web pages (default: working directory)')),
3394 ('', 'webdir-conf', '', _('name of the webdir config file'
3394 ('', 'webdir-conf', '', _('name of the webdir config file'
3395 ' (serve more than one repository)')),
3395 ' (serve more than one repository)')),
3396 ('', 'pid-file', '', _('name of file to write process ID to')),
3396 ('', 'pid-file', '', _('name of file to write process ID to')),
3397 ('', 'stdio', None, _('for remote clients')),
3397 ('', 'stdio', None, _('for remote clients')),
3398 ('t', 'templates', '', _('web templates to use')),
3398 ('t', 'templates', '', _('web templates to use')),
3399 ('', 'style', '', _('template style to use')),
3399 ('', 'style', '', _('template style to use')),
3400 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3400 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3401 ('', 'certificate', '', _('SSL certificate file'))],
3401 ('', 'certificate', '', _('SSL certificate file'))],
3402 _('[OPTION]...')),
3402 _('[OPTION]...')),
3403 "showconfig|debugconfig":
3403 "showconfig|debugconfig":
3404 (showconfig,
3404 (showconfig,
3405 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3405 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3406 _('[-u] [NAME]...')),
3406 _('[-u] [NAME]...')),
3407 "^status|st":
3407 "^status|st":
3408 (status,
3408 (status,
3409 [('A', 'all', None, _('show status of all files')),
3409 [('A', 'all', None, _('show status of all files')),
3410 ('m', 'modified', None, _('show only modified files')),
3410 ('m', 'modified', None, _('show only modified files')),
3411 ('a', 'added', None, _('show only added files')),
3411 ('a', 'added', None, _('show only added files')),
3412 ('r', 'removed', None, _('show only removed files')),
3412 ('r', 'removed', None, _('show only removed files')),
3413 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3413 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3414 ('c', 'clean', None, _('show only files without changes')),
3414 ('c', 'clean', None, _('show only files without changes')),
3415 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3415 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3416 ('i', 'ignored', None, _('show only ignored files')),
3416 ('i', 'ignored', None, _('show only ignored files')),
3417 ('n', 'no-status', None, _('hide status prefix')),
3417 ('n', 'no-status', None, _('hide status prefix')),
3418 ('C', 'copies', None, _('show source of copied files')),
3418 ('C', 'copies', None, _('show source of copied files')),
3419 ('0', 'print0', None,
3419 ('0', 'print0', None,
3420 _('end filenames with NUL, for use with xargs')),
3420 _('end filenames with NUL, for use with xargs')),
3421 ('', 'rev', [], _('show difference from revision')),
3421 ('', 'rev', [], _('show difference from revision')),
3422 ] + walkopts,
3422 ] + walkopts,
3423 _('[OPTION]... [FILE]...')),
3423 _('[OPTION]... [FILE]...')),
3424 "tag":
3424 "tag":
3425 (tag,
3425 (tag,
3426 [('f', 'force', None, _('replace existing tag')),
3426 [('f', 'force', None, _('replace existing tag')),
3427 ('l', 'local', None, _('make the tag local')),
3427 ('l', 'local', None, _('make the tag local')),
3428 ('r', 'rev', '', _('revision to tag')),
3428 ('r', 'rev', '', _('revision to tag')),
3429 ('', 'remove', None, _('remove a tag')),
3429 ('', 'remove', None, _('remove a tag')),
3430 # -l/--local is already there, commitopts cannot be used
3430 # -l/--local is already there, commitopts cannot be used
3431 ('m', 'message', '', _('use <text> as commit message')),
3431 ('m', 'message', '', _('use <text> as commit message')),
3432 ] + commitopts2,
3432 ] + commitopts2,
3433 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3433 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3434 "tags": (tags, []),
3434 "tags": (tags, []),
3435 "tip":
3435 "tip":
3436 (tip,
3436 (tip,
3437 [('p', 'patch', None, _('show patch')),
3437 [('p', 'patch', None, _('show patch')),
3438 ('g', 'git', None, _('use git extended diff format')),
3438 ('g', 'git', None, _('use git extended diff format')),
3439 ] + templateopts,
3439 ] + templateopts,
3440 _('[-p]')),
3440 _('[-p]')),
3441 "unbundle":
3441 "unbundle":
3442 (unbundle,
3442 (unbundle,
3443 [('u', 'update', None,
3443 [('u', 'update', None,
3444 _('update to new tip if changesets were unbundled'))],
3444 _('update to new tip if changesets were unbundled'))],
3445 _('[-u] FILE...')),
3445 _('[-u] FILE...')),
3446 "^update|up|checkout|co":
3446 "^update|up|checkout|co":
3447 (update,
3447 (update,
3448 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3448 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3449 ('d', 'date', '', _('tipmost revision matching date')),
3449 ('d', 'date', '', _('tipmost revision matching date')),
3450 ('r', 'rev', '', _('revision'))],
3450 ('r', 'rev', '', _('revision'))],
3451 _('[-C] [-d DATE] [[-r] REV]')),
3451 _('[-C] [-d DATE] [[-r] REV]')),
3452 "verify": (verify, []),
3452 "verify": (verify, []),
3453 "version": (version_, []),
3453 "version": (version_, []),
3454 }
3454 }
3455
3455
3456 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3456 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3457 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3457 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3458 optionalrepo = ("identify paths serve showconfig debugancestor")
3458 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,1390 +1,1389
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 """Storage back-end for Mercurial.
8 """Storage back-end for Mercurial.
9
9
10 This provides efficient delta storage with O(1) retrieve and append
10 This provides efficient delta storage with O(1) retrieve and append
11 and O(changes) merge between branches.
11 and O(changes) merge between branches.
12 """
12 """
13
13
14 # import stuff from node for others to import from revlog
14 # import stuff from node for others to import from revlog
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
16 from i18n import _
16 from i18n import _
17 import changegroup, ancestor, mdiff, parsers, error, util
17 import changegroup, ancestor, mdiff, parsers, error, util
18 import struct, zlib, errno
18 import struct, zlib, errno
19
19
20 _pack = struct.pack
20 _pack = struct.pack
21 _unpack = struct.unpack
21 _unpack = struct.unpack
22 _compress = zlib.compress
22 _compress = zlib.compress
23 _decompress = zlib.decompress
23 _decompress = zlib.decompress
24 _sha = util.sha1
24 _sha = util.sha1
25
25
26 # revlog flags
26 # revlog flags
27 REVLOGV0 = 0
27 REVLOGV0 = 0
28 REVLOGNG = 1
28 REVLOGNG = 1
29 REVLOGNGINLINEDATA = (1 << 16)
29 REVLOGNGINLINEDATA = (1 << 16)
30 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
30 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
31 REVLOG_DEFAULT_FORMAT = REVLOGNG
31 REVLOG_DEFAULT_FORMAT = REVLOGNG
32 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
32 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
33
33
34 _prereadsize = 1048576
34 _prereadsize = 1048576
35
35
36 RevlogError = error.RevlogError
36 RevlogError = error.RevlogError
37 LookupError = error.LookupError
37 LookupError = error.LookupError
38
38
39 def getoffset(q):
39 def getoffset(q):
40 return int(q >> 16)
40 return int(q >> 16)
41
41
42 def gettype(q):
42 def gettype(q):
43 return int(q & 0xFFFF)
43 return int(q & 0xFFFF)
44
44
45 def offset_type(offset, type):
45 def offset_type(offset, type):
46 return long(long(offset) << 16 | type)
46 return long(long(offset) << 16 | type)
47
47
48 nullhash = _sha(nullid)
48 nullhash = _sha(nullid)
49
49
50 def hash(text, p1, p2):
50 def hash(text, p1, p2):
51 """generate a hash from the given text and its parent hashes
51 """generate a hash from the given text and its parent hashes
52
52
53 This hash combines both the current file contents and its history
53 This hash combines both the current file contents and its history
54 in a manner that makes it easy to distinguish nodes with the same
54 in a manner that makes it easy to distinguish nodes with the same
55 content in the revision graph.
55 content in the revision graph.
56 """
56 """
57 # As of now, if one of the parent node is null, p2 is null
57 # As of now, if one of the parent node is null, p2 is null
58 if p2 == nullid:
58 if p2 == nullid:
59 # deep copy of a hash is faster than creating one
59 # deep copy of a hash is faster than creating one
60 s = nullhash.copy()
60 s = nullhash.copy()
61 s.update(p1)
61 s.update(p1)
62 else:
62 else:
63 # none of the parent nodes are nullid
63 # none of the parent nodes are nullid
64 l = [p1, p2]
64 l = [p1, p2]
65 l.sort()
65 l.sort()
66 s = _sha(l[0])
66 s = _sha(l[0])
67 s.update(l[1])
67 s.update(l[1])
68 s.update(text)
68 s.update(text)
69 return s.digest()
69 return s.digest()
70
70
71 def compress(text):
71 def compress(text):
72 """ generate a possibly-compressed representation of text """
72 """ generate a possibly-compressed representation of text """
73 if not text:
73 if not text:
74 return ("", text)
74 return ("", text)
75 l = len(text)
75 l = len(text)
76 bin = None
76 bin = None
77 if l < 44:
77 if l < 44:
78 pass
78 pass
79 elif l > 1000000:
79 elif l > 1000000:
80 # zlib makes an internal copy, thus doubling memory usage for
80 # zlib makes an internal copy, thus doubling memory usage for
81 # large files, so lets do this in pieces
81 # large files, so lets do this in pieces
82 z = zlib.compressobj()
82 z = zlib.compressobj()
83 p = []
83 p = []
84 pos = 0
84 pos = 0
85 while pos < l:
85 while pos < l:
86 pos2 = pos + 2**20
86 pos2 = pos + 2**20
87 p.append(z.compress(text[pos:pos2]))
87 p.append(z.compress(text[pos:pos2]))
88 pos = pos2
88 pos = pos2
89 p.append(z.flush())
89 p.append(z.flush())
90 if sum(map(len, p)) < l:
90 if sum(map(len, p)) < l:
91 bin = "".join(p)
91 bin = "".join(p)
92 else:
92 else:
93 bin = _compress(text)
93 bin = _compress(text)
94 if bin is None or len(bin) > l:
94 if bin is None or len(bin) > l:
95 if text[0] == '\0':
95 if text[0] == '\0':
96 return ("", text)
96 return ("", text)
97 return ('u', text)
97 return ('u', text)
98 return ("", bin)
98 return ("", bin)
99
99
100 def decompress(bin):
100 def decompress(bin):
101 """ decompress the given input """
101 """ decompress the given input """
102 if not bin:
102 if not bin:
103 return bin
103 return bin
104 t = bin[0]
104 t = bin[0]
105 if t == '\0':
105 if t == '\0':
106 return bin
106 return bin
107 if t == 'x':
107 if t == 'x':
108 return _decompress(bin)
108 return _decompress(bin)
109 if t == 'u':
109 if t == 'u':
110 return bin[1:]
110 return bin[1:]
111 raise RevlogError(_("unknown compression type %r") % t)
111 raise RevlogError(_("unknown compression type %r") % t)
112
112
113 class lazyparser(object):
113 class lazyparser(object):
114 """
114 """
115 this class avoids the need to parse the entirety of large indices
115 this class avoids the need to parse the entirety of large indices
116 """
116 """
117
117
118 # lazyparser is not safe to use on windows if win32 extensions not
118 # lazyparser is not safe to use on windows if win32 extensions not
119 # available. it keeps file handle open, which make it not possible
119 # available. it keeps file handle open, which make it not possible
120 # to break hardlinks on local cloned repos.
120 # to break hardlinks on local cloned repos.
121
121
122 def __init__(self, dataf, size):
122 def __init__(self, dataf, size):
123 self.dataf = dataf
123 self.dataf = dataf
124 self.s = struct.calcsize(indexformatng)
124 self.s = struct.calcsize(indexformatng)
125 self.datasize = size
125 self.datasize = size
126 self.l = size/self.s
126 self.l = size/self.s
127 self.index = [None] * self.l
127 self.index = [None] * self.l
128 self.map = {nullid: nullrev}
128 self.map = {nullid: nullrev}
129 self.allmap = 0
129 self.allmap = 0
130 self.all = 0
130 self.all = 0
131 self.mapfind_count = 0
131 self.mapfind_count = 0
132
132
133 def loadmap(self):
133 def loadmap(self):
134 """
134 """
135 during a commit, we need to make sure the rev being added is
135 during a commit, we need to make sure the rev being added is
136 not a duplicate. This requires loading the entire index,
136 not a duplicate. This requires loading the entire index,
137 which is fairly slow. loadmap can load up just the node map,
137 which is fairly slow. loadmap can load up just the node map,
138 which takes much less time.
138 which takes much less time.
139 """
139 """
140 if self.allmap:
140 if self.allmap:
141 return
141 return
142 end = self.datasize
142 end = self.datasize
143 self.allmap = 1
143 self.allmap = 1
144 cur = 0
144 cur = 0
145 count = 0
145 count = 0
146 blocksize = self.s * 256
146 blocksize = self.s * 256
147 self.dataf.seek(0)
147 self.dataf.seek(0)
148 while cur < end:
148 while cur < end:
149 data = self.dataf.read(blocksize)
149 data = self.dataf.read(blocksize)
150 off = 0
150 off = 0
151 for x in xrange(256):
151 for x in xrange(256):
152 n = data[off + ngshaoffset:off + ngshaoffset + 20]
152 n = data[off + ngshaoffset:off + ngshaoffset + 20]
153 self.map[n] = count
153 self.map[n] = count
154 count += 1
154 count += 1
155 if count >= self.l:
155 if count >= self.l:
156 break
156 break
157 off += self.s
157 off += self.s
158 cur += blocksize
158 cur += blocksize
159
159
160 def loadblock(self, blockstart, blocksize, data=None):
160 def loadblock(self, blockstart, blocksize, data=None):
161 if self.all:
161 if self.all:
162 return
162 return
163 if data is None:
163 if data is None:
164 self.dataf.seek(blockstart)
164 self.dataf.seek(blockstart)
165 if blockstart + blocksize > self.datasize:
165 if blockstart + blocksize > self.datasize:
166 # the revlog may have grown since we've started running,
166 # the revlog may have grown since we've started running,
167 # but we don't have space in self.index for more entries.
167 # but we don't have space in self.index for more entries.
168 # limit blocksize so that we don't get too much data.
168 # limit blocksize so that we don't get too much data.
169 blocksize = max(self.datasize - blockstart, 0)
169 blocksize = max(self.datasize - blockstart, 0)
170 data = self.dataf.read(blocksize)
170 data = self.dataf.read(blocksize)
171 lend = len(data) / self.s
171 lend = len(data) / self.s
172 i = blockstart / self.s
172 i = blockstart / self.s
173 off = 0
173 off = 0
174 # lazyindex supports __delitem__
174 # lazyindex supports __delitem__
175 if lend > len(self.index) - i:
175 if lend > len(self.index) - i:
176 lend = len(self.index) - i
176 lend = len(self.index) - i
177 for x in xrange(lend):
177 for x in xrange(lend):
178 if self.index[i + x] == None:
178 if self.index[i + x] == None:
179 b = data[off : off + self.s]
179 b = data[off : off + self.s]
180 self.index[i + x] = b
180 self.index[i + x] = b
181 n = b[ngshaoffset:ngshaoffset + 20]
181 n = b[ngshaoffset:ngshaoffset + 20]
182 self.map[n] = i + x
182 self.map[n] = i + x
183 off += self.s
183 off += self.s
184
184
185 def findnode(self, node):
185 def findnode(self, node):
186 """search backwards through the index file for a specific node"""
186 """search backwards through the index file for a specific node"""
187 if self.allmap:
187 if self.allmap:
188 return None
188 return None
189
189
190 # hg log will cause many many searches for the manifest
190 # hg log will cause many many searches for the manifest
191 # nodes. After we get called a few times, just load the whole
191 # nodes. After we get called a few times, just load the whole
192 # thing.
192 # thing.
193 if self.mapfind_count > 8:
193 if self.mapfind_count > 8:
194 self.loadmap()
194 self.loadmap()
195 if node in self.map:
195 if node in self.map:
196 return node
196 return node
197 return None
197 return None
198 self.mapfind_count += 1
198 self.mapfind_count += 1
199 last = self.l - 1
199 last = self.l - 1
200 while self.index[last] != None:
200 while self.index[last] != None:
201 if last == 0:
201 if last == 0:
202 self.all = 1
202 self.all = 1
203 self.allmap = 1
203 self.allmap = 1
204 return None
204 return None
205 last -= 1
205 last -= 1
206 end = (last + 1) * self.s
206 end = (last + 1) * self.s
207 blocksize = self.s * 256
207 blocksize = self.s * 256
208 while end >= 0:
208 while end >= 0:
209 start = max(end - blocksize, 0)
209 start = max(end - blocksize, 0)
210 self.dataf.seek(start)
210 self.dataf.seek(start)
211 data = self.dataf.read(end - start)
211 data = self.dataf.read(end - start)
212 findend = end - start
212 findend = end - start
213 while True:
213 while True:
214 # we're searching backwards, so we have to make sure
214 # we're searching backwards, so we have to make sure
215 # we don't find a changeset where this node is a parent
215 # we don't find a changeset where this node is a parent
216 off = data.find(node, 0, findend)
216 off = data.find(node, 0, findend)
217 findend = off
217 findend = off
218 if off >= 0:
218 if off >= 0:
219 i = off / self.s
219 i = off / self.s
220 off = i * self.s
220 off = i * self.s
221 n = data[off + ngshaoffset:off + ngshaoffset + 20]
221 n = data[off + ngshaoffset:off + ngshaoffset + 20]
222 if n == node:
222 if n == node:
223 self.map[n] = i + start / self.s
223 self.map[n] = i + start / self.s
224 return node
224 return node
225 else:
225 else:
226 break
226 break
227 end -= blocksize
227 end -= blocksize
228 return None
228 return None
229
229
230 def loadindex(self, i=None, end=None):
230 def loadindex(self, i=None, end=None):
231 if self.all:
231 if self.all:
232 return
232 return
233 all = False
233 all = False
234 if i == None:
234 if i == None:
235 blockstart = 0
235 blockstart = 0
236 blocksize = (65536 / self.s) * self.s
236 blocksize = (65536 / self.s) * self.s
237 end = self.datasize
237 end = self.datasize
238 all = True
238 all = True
239 else:
239 else:
240 if end:
240 if end:
241 blockstart = i * self.s
241 blockstart = i * self.s
242 end = end * self.s
242 end = end * self.s
243 blocksize = end - blockstart
243 blocksize = end - blockstart
244 else:
244 else:
245 blockstart = (i & ~1023) * self.s
245 blockstart = (i & ~1023) * self.s
246 blocksize = self.s * 1024
246 blocksize = self.s * 1024
247 end = blockstart + blocksize
247 end = blockstart + blocksize
248 while blockstart < end:
248 while blockstart < end:
249 self.loadblock(blockstart, blocksize)
249 self.loadblock(blockstart, blocksize)
250 blockstart += blocksize
250 blockstart += blocksize
251 if all:
251 if all:
252 self.all = True
252 self.all = True
253
253
254 class lazyindex(object):
254 class lazyindex(object):
255 """a lazy version of the index array"""
255 """a lazy version of the index array"""
256 def __init__(self, parser):
256 def __init__(self, parser):
257 self.p = parser
257 self.p = parser
258 def __len__(self):
258 def __len__(self):
259 return len(self.p.index)
259 return len(self.p.index)
260 def load(self, pos):
260 def load(self, pos):
261 if pos < 0:
261 if pos < 0:
262 pos += len(self.p.index)
262 pos += len(self.p.index)
263 self.p.loadindex(pos)
263 self.p.loadindex(pos)
264 return self.p.index[pos]
264 return self.p.index[pos]
265 def __getitem__(self, pos):
265 def __getitem__(self, pos):
266 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
266 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
267 def __setitem__(self, pos, item):
267 def __setitem__(self, pos, item):
268 self.p.index[pos] = _pack(indexformatng, *item)
268 self.p.index[pos] = _pack(indexformatng, *item)
269 def __delitem__(self, pos):
269 def __delitem__(self, pos):
270 del self.p.index[pos]
270 del self.p.index[pos]
271 def insert(self, pos, e):
271 def insert(self, pos, e):
272 self.p.index.insert(pos, _pack(indexformatng, *e))
272 self.p.index.insert(pos, _pack(indexformatng, *e))
273 def append(self, e):
273 def append(self, e):
274 self.p.index.append(_pack(indexformatng, *e))
274 self.p.index.append(_pack(indexformatng, *e))
275
275
276 class lazymap(object):
276 class lazymap(object):
277 """a lazy version of the node map"""
277 """a lazy version of the node map"""
278 def __init__(self, parser):
278 def __init__(self, parser):
279 self.p = parser
279 self.p = parser
280 def load(self, key):
280 def load(self, key):
281 n = self.p.findnode(key)
281 n = self.p.findnode(key)
282 if n == None:
282 if n == None:
283 raise KeyError(key)
283 raise KeyError(key)
284 def __contains__(self, key):
284 def __contains__(self, key):
285 if key in self.p.map:
285 if key in self.p.map:
286 return True
286 return True
287 self.p.loadmap()
287 self.p.loadmap()
288 return key in self.p.map
288 return key in self.p.map
289 def __iter__(self):
289 def __iter__(self):
290 yield nullid
290 yield nullid
291 for i in xrange(self.p.l):
291 for i in xrange(self.p.l):
292 ret = self.p.index[i]
292 ret = self.p.index[i]
293 if not ret:
293 if not ret:
294 self.p.loadindex(i)
294 self.p.loadindex(i)
295 ret = self.p.index[i]
295 ret = self.p.index[i]
296 if isinstance(ret, str):
296 if isinstance(ret, str):
297 ret = _unpack(indexformatng, ret)
297 ret = _unpack(indexformatng, ret)
298 yield ret[7]
298 yield ret[7]
299 def __getitem__(self, key):
299 def __getitem__(self, key):
300 try:
300 try:
301 return self.p.map[key]
301 return self.p.map[key]
302 except KeyError:
302 except KeyError:
303 try:
303 try:
304 self.load(key)
304 self.load(key)
305 return self.p.map[key]
305 return self.p.map[key]
306 except KeyError:
306 except KeyError:
307 raise KeyError("node " + hex(key))
307 raise KeyError("node " + hex(key))
308 def __setitem__(self, key, val):
308 def __setitem__(self, key, val):
309 self.p.map[key] = val
309 self.p.map[key] = val
310 def __delitem__(self, key):
310 def __delitem__(self, key):
311 del self.p.map[key]
311 del self.p.map[key]
312
312
313 indexformatv0 = ">4l20s20s20s"
313 indexformatv0 = ">4l20s20s20s"
314 v0shaoffset = 56
314 v0shaoffset = 56
315
315
316 class revlogoldio(object):
316 class revlogoldio(object):
317 def __init__(self):
317 def __init__(self):
318 self.size = struct.calcsize(indexformatv0)
318 self.size = struct.calcsize(indexformatv0)
319
319
320 def parseindex(self, fp, data, inline):
320 def parseindex(self, fp, data, inline):
321 s = self.size
321 s = self.size
322 index = []
322 index = []
323 nodemap = {nullid: nullrev}
323 nodemap = {nullid: nullrev}
324 n = off = 0
324 n = off = 0
325 if len(data) < _prereadsize:
325 if len(data) < _prereadsize:
326 data += fp.read() # read the rest
326 data += fp.read() # read the rest
327 l = len(data)
327 l = len(data)
328 while off + s <= l:
328 while off + s <= l:
329 cur = data[off:off + s]
329 cur = data[off:off + s]
330 off += s
330 off += s
331 e = _unpack(indexformatv0, cur)
331 e = _unpack(indexformatv0, cur)
332 # transform to revlogv1 format
332 # transform to revlogv1 format
333 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
333 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
334 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
334 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
335 index.append(e2)
335 index.append(e2)
336 nodemap[e[6]] = n
336 nodemap[e[6]] = n
337 n += 1
337 n += 1
338
338
339 return index, nodemap, None
339 return index, nodemap, None
340
340
341 def packentry(self, entry, node, version, rev):
341 def packentry(self, entry, node, version, rev):
342 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
342 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
343 node(entry[5]), node(entry[6]), entry[7])
343 node(entry[5]), node(entry[6]), entry[7])
344 return _pack(indexformatv0, *e2)
344 return _pack(indexformatv0, *e2)
345
345
346 # index ng:
346 # index ng:
347 # 6 bytes offset
347 # 6 bytes offset
348 # 2 bytes flags
348 # 2 bytes flags
349 # 4 bytes compressed length
349 # 4 bytes compressed length
350 # 4 bytes uncompressed length
350 # 4 bytes uncompressed length
351 # 4 bytes: base rev
351 # 4 bytes: base rev
352 # 4 bytes link rev
352 # 4 bytes link rev
353 # 4 bytes parent 1 rev
353 # 4 bytes parent 1 rev
354 # 4 bytes parent 2 rev
354 # 4 bytes parent 2 rev
355 # 32 bytes: nodeid
355 # 32 bytes: nodeid
356 indexformatng = ">Qiiiiii20s12x"
356 indexformatng = ">Qiiiiii20s12x"
357 ngshaoffset = 32
357 ngshaoffset = 32
358 versionformat = ">I"
358 versionformat = ">I"
359
359
360 class revlogio(object):
360 class revlogio(object):
361 def __init__(self):
361 def __init__(self):
362 self.size = struct.calcsize(indexformatng)
362 self.size = struct.calcsize(indexformatng)
363
363
364 def parseindex(self, fp, data, inline):
364 def parseindex(self, fp, data, inline):
365 try:
365 try:
366 size = len(data)
366 size = len(data)
367 if size == _prereadsize:
367 if size == _prereadsize:
368 size = util.fstat(fp).st_size
368 size = util.fstat(fp).st_size
369 except AttributeError:
369 except AttributeError:
370 size = 0
370 size = 0
371
371
372 if util.openhardlinks() and not inline and size > _prereadsize:
372 if util.openhardlinks() and not inline and size > _prereadsize:
373 # big index, let's parse it on demand
373 # big index, let's parse it on demand
374 parser = lazyparser(fp, size)
374 parser = lazyparser(fp, size)
375 index = lazyindex(parser)
375 index = lazyindex(parser)
376 nodemap = lazymap(parser)
376 nodemap = lazymap(parser)
377 e = list(index[0])
377 e = list(index[0])
378 type = gettype(e[0])
378 type = gettype(e[0])
379 e[0] = offset_type(0, type)
379 e[0] = offset_type(0, type)
380 index[0] = e
380 index[0] = e
381 return index, nodemap, None
381 return index, nodemap, None
382
382
383 # call the C implementation to parse the index data
383 # call the C implementation to parse the index data
384 index, nodemap, cache = parsers.parse_index(data, inline)
384 index, nodemap, cache = parsers.parse_index(data, inline)
385 return index, nodemap, cache
385 return index, nodemap, cache
386
386
387 def packentry(self, entry, node, version, rev):
387 def packentry(self, entry, node, version, rev):
388 p = _pack(indexformatng, *entry)
388 p = _pack(indexformatng, *entry)
389 if rev == 0:
389 if rev == 0:
390 p = _pack(versionformat, version) + p[4:]
390 p = _pack(versionformat, version) + p[4:]
391 return p
391 return p
392
392
393 class revlog(object):
393 class revlog(object):
394 """
394 """
395 the underlying revision storage object
395 the underlying revision storage object
396
396
397 A revlog consists of two parts, an index and the revision data.
397 A revlog consists of two parts, an index and the revision data.
398
398
399 The index is a file with a fixed record size containing
399 The index is a file with a fixed record size containing
400 information on each revision, including its nodeid (hash), the
400 information on each revision, including its nodeid (hash), the
401 nodeids of its parents, the position and offset of its data within
401 nodeids of its parents, the position and offset of its data within
402 the data file, and the revision it's based on. Finally, each entry
402 the data file, and the revision it's based on. Finally, each entry
403 contains a linkrev entry that can serve as a pointer to external
403 contains a linkrev entry that can serve as a pointer to external
404 data.
404 data.
405
405
406 The revision data itself is a linear collection of data chunks.
406 The revision data itself is a linear collection of data chunks.
407 Each chunk represents a revision and is usually represented as a
407 Each chunk represents a revision and is usually represented as a
408 delta against the previous chunk. To bound lookup time, runs of
408 delta against the previous chunk. To bound lookup time, runs of
409 deltas are limited to about 2 times the length of the original
409 deltas are limited to about 2 times the length of the original
410 version data. This makes retrieval of a version proportional to
410 version data. This makes retrieval of a version proportional to
411 its size, or O(1) relative to the number of revisions.
411 its size, or O(1) relative to the number of revisions.
412
412
413 Both pieces of the revlog are written to in an append-only
413 Both pieces of the revlog are written to in an append-only
414 fashion, which means we never need to rewrite a file to insert or
414 fashion, which means we never need to rewrite a file to insert or
415 remove data, and can use some simple techniques to avoid the need
415 remove data, and can use some simple techniques to avoid the need
416 for locking while reading.
416 for locking while reading.
417 """
417 """
418 def __init__(self, opener, indexfile):
418 def __init__(self, opener, indexfile):
419 """
419 """
420 create a revlog object
420 create a revlog object
421
421
422 opener is a function that abstracts the file opening operation
422 opener is a function that abstracts the file opening operation
423 and can be used to implement COW semantics or the like.
423 and can be used to implement COW semantics or the like.
424 """
424 """
425 self.indexfile = indexfile
425 self.indexfile = indexfile
426 self.datafile = indexfile[:-2] + ".d"
426 self.datafile = indexfile[:-2] + ".d"
427 self.opener = opener
427 self.opener = opener
428 self._cache = None
428 self._cache = None
429 self._chunkcache = (0, '')
429 self._chunkcache = (0, '')
430 self.nodemap = {nullid: nullrev}
430 self.nodemap = {nullid: nullrev}
431 self.index = []
431 self.index = []
432
432
433 v = REVLOG_DEFAULT_VERSION
433 v = REVLOG_DEFAULT_VERSION
434 if hasattr(opener, "defversion"):
434 if hasattr(opener, "defversion"):
435 v = opener.defversion
435 v = opener.defversion
436 if v & REVLOGNG:
436 if v & REVLOGNG:
437 v |= REVLOGNGINLINEDATA
437 v |= REVLOGNGINLINEDATA
438
438
439 i = ''
439 i = ''
440 try:
440 try:
441 f = self.opener(self.indexfile)
441 f = self.opener(self.indexfile)
442 i = f.read(_prereadsize)
442 i = f.read(_prereadsize)
443 if len(i) > 0:
443 if len(i) > 0:
444 v = struct.unpack(versionformat, i[:4])[0]
444 v = struct.unpack(versionformat, i[:4])[0]
445 except IOError, inst:
445 except IOError, inst:
446 if inst.errno != errno.ENOENT:
446 if inst.errno != errno.ENOENT:
447 raise
447 raise
448
448
449 self.version = v
449 self.version = v
450 self._inline = v & REVLOGNGINLINEDATA
450 self._inline = v & REVLOGNGINLINEDATA
451 flags = v & ~0xFFFF
451 flags = v & ~0xFFFF
452 fmt = v & 0xFFFF
452 fmt = v & 0xFFFF
453 if fmt == REVLOGV0 and flags:
453 if fmt == REVLOGV0 and flags:
454 raise RevlogError(_("index %s unknown flags %#04x for format v0")
454 raise RevlogError(_("index %s unknown flags %#04x for format v0")
455 % (self.indexfile, flags >> 16))
455 % (self.indexfile, flags >> 16))
456 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
456 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
457 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
457 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
458 % (self.indexfile, flags >> 16))
458 % (self.indexfile, flags >> 16))
459 elif fmt > REVLOGNG:
459 elif fmt > REVLOGNG:
460 raise RevlogError(_("index %s unknown format %d")
460 raise RevlogError(_("index %s unknown format %d")
461 % (self.indexfile, fmt))
461 % (self.indexfile, fmt))
462
462
463 self._io = revlogio()
463 self._io = revlogio()
464 if self.version == REVLOGV0:
464 if self.version == REVLOGV0:
465 self._io = revlogoldio()
465 self._io = revlogoldio()
466 if i:
466 if i:
467 try:
467 try:
468 d = self._io.parseindex(f, i, self._inline)
468 d = self._io.parseindex(f, i, self._inline)
469 except (ValueError, IndexError), e:
469 except (ValueError, IndexError), e:
470 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
470 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
471 self.index, self.nodemap, self._chunkcache = d
471 self.index, self.nodemap, self._chunkcache = d
472 if not self._chunkcache:
472 if not self._chunkcache:
473 self._chunkcache = (0, '')
473 self._chunkcache = (0, '')
474
474
475 # add the magic null revision at -1 (if it hasn't been done already)
475 # add the magic null revision at -1 (if it hasn't been done already)
476 if (self.index == [] or isinstance(self.index, lazyindex) or
476 if (self.index == [] or isinstance(self.index, lazyindex) or
477 self.index[-1][7] != nullid) :
477 self.index[-1][7] != nullid) :
478 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
478 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
479
479
480 def _loadindex(self, start, end):
480 def _loadindex(self, start, end):
481 """load a block of indexes all at once from the lazy parser"""
481 """load a block of indexes all at once from the lazy parser"""
482 if isinstance(self.index, lazyindex):
482 if isinstance(self.index, lazyindex):
483 self.index.p.loadindex(start, end)
483 self.index.p.loadindex(start, end)
484
484
485 def _loadindexmap(self):
485 def _loadindexmap(self):
486 """loads both the map and the index from the lazy parser"""
486 """loads both the map and the index from the lazy parser"""
487 if isinstance(self.index, lazyindex):
487 if isinstance(self.index, lazyindex):
488 p = self.index.p
488 p = self.index.p
489 p.loadindex()
489 p.loadindex()
490 self.nodemap = p.map
490 self.nodemap = p.map
491
491
492 def _loadmap(self):
492 def _loadmap(self):
493 """loads the map from the lazy parser"""
493 """loads the map from the lazy parser"""
494 if isinstance(self.nodemap, lazymap):
494 if isinstance(self.nodemap, lazymap):
495 self.nodemap.p.loadmap()
495 self.nodemap.p.loadmap()
496 self.nodemap = self.nodemap.p.map
496 self.nodemap = self.nodemap.p.map
497
497
498 def tip(self):
498 def tip(self):
499 return self.node(len(self.index) - 2)
499 return self.node(len(self.index) - 2)
500 def __len__(self):
500 def __len__(self):
501 return len(self.index) - 1
501 return len(self.index) - 1
502 def __iter__(self):
502 def __iter__(self):
503 for i in xrange(len(self)):
503 for i in xrange(len(self)):
504 yield i
504 yield i
505 def rev(self, node):
505 def rev(self, node):
506 try:
506 try:
507 return self.nodemap[node]
507 return self.nodemap[node]
508 except KeyError:
508 except KeyError:
509 raise LookupError(node, self.indexfile, _('no node'))
509 raise LookupError(node, self.indexfile, _('no node'))
510 def node(self, rev):
510 def node(self, rev):
511 return self.index[rev][7]
511 return self.index[rev][7]
512 def linkrev(self, rev):
512 def linkrev(self, rev):
513 return self.index[rev][4]
513 return self.index[rev][4]
514 def parents(self, node):
514 def parents(self, node):
515 i = self.index
515 i = self.index
516 d = i[self.rev(node)]
516 d = i[self.rev(node)]
517 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
517 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
518 def parentrevs(self, rev):
518 def parentrevs(self, rev):
519 return self.index[rev][5:7]
519 return self.index[rev][5:7]
520 def start(self, rev):
520 def start(self, rev):
521 return int(self.index[rev][0] >> 16)
521 return int(self.index[rev][0] >> 16)
522 def end(self, rev):
522 def end(self, rev):
523 return self.start(rev) + self.length(rev)
523 return self.start(rev) + self.length(rev)
524 def length(self, rev):
524 def length(self, rev):
525 return self.index[rev][1]
525 return self.index[rev][1]
526 def base(self, rev):
526 def base(self, rev):
527 return self.index[rev][3]
527 return self.index[rev][3]
528
528
529 def size(self, rev):
529 def size(self, rev):
530 """return the length of the uncompressed text for a given revision"""
530 """return the length of the uncompressed text for a given revision"""
531 l = self.index[rev][2]
531 l = self.index[rev][2]
532 if l >= 0:
532 if l >= 0:
533 return l
533 return l
534
534
535 t = self.revision(self.node(rev))
535 t = self.revision(self.node(rev))
536 return len(t)
536 return len(t)
537
537
538 # alternate implementation, The advantage to this code is it
538 # alternate implementation, The advantage to this code is it
539 # will be faster for a single revision. But, the results are not
539 # will be faster for a single revision. But, the results are not
540 # cached, so finding the size of every revision will be slower.
540 # cached, so finding the size of every revision will be slower.
541 """
541 """
542 if self.cache and self.cache[1] == rev:
542 if self.cache and self.cache[1] == rev:
543 return len(self.cache[2])
543 return len(self.cache[2])
544
544
545 base = self.base(rev)
545 base = self.base(rev)
546 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
546 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
547 base = self.cache[1]
547 base = self.cache[1]
548 text = self.cache[2]
548 text = self.cache[2]
549 else:
549 else:
550 text = self.revision(self.node(base))
550 text = self.revision(self.node(base))
551
551
552 l = len(text)
552 l = len(text)
553 for x in xrange(base + 1, rev + 1):
553 for x in xrange(base + 1, rev + 1):
554 l = mdiff.patchedsize(l, self.chunk(x))
554 l = mdiff.patchedsize(l, self.chunk(x))
555 return l
555 return l
556 """
556 """
557
557
558 def reachable(self, node, stop=None):
558 def reachable(self, node, stop=None):
559 """return a hash of all nodes ancestral to a given node, including
559 """return the set of all nodes ancestral to a given node, including
560 the node itself, stopping when stop is matched"""
560 the node itself, stopping when stop is matched"""
561 reachable = {}
561 reachable = set((node,))
562 visit = [node]
562 visit = [node]
563 reachable[node] = 1
564 if stop:
563 if stop:
565 stopn = self.rev(stop)
564 stopn = self.rev(stop)
566 else:
565 else:
567 stopn = 0
566 stopn = 0
568 while visit:
567 while visit:
569 n = visit.pop(0)
568 n = visit.pop(0)
570 if n == stop:
569 if n == stop:
571 continue
570 continue
572 if n == nullid:
571 if n == nullid:
573 continue
572 continue
574 for p in self.parents(n):
573 for p in self.parents(n):
575 if self.rev(p) < stopn:
574 if self.rev(p) < stopn:
576 continue
575 continue
577 if p not in reachable:
576 if p not in reachable:
578 reachable[p] = 1
577 reachable.add(p)
579 visit.append(p)
578 visit.append(p)
580 return reachable
579 return reachable
581
580
582 def ancestors(self, *revs):
581 def ancestors(self, *revs):
583 'Generate the ancestors of revs using a breadth-first visit'
582 'Generate the ancestors of revs using a breadth-first visit'
584 visit = list(revs)
583 visit = list(revs)
585 seen = set([nullrev])
584 seen = set([nullrev])
586 while visit:
585 while visit:
587 for parent in self.parentrevs(visit.pop(0)):
586 for parent in self.parentrevs(visit.pop(0)):
588 if parent not in seen:
587 if parent not in seen:
589 visit.append(parent)
588 visit.append(parent)
590 seen.add(parent)
589 seen.add(parent)
591 yield parent
590 yield parent
592
591
593 def descendants(self, *revs):
592 def descendants(self, *revs):
594 'Generate the descendants of revs in topological order'
593 'Generate the descendants of revs in topological order'
595 seen = set(revs)
594 seen = set(revs)
596 for i in xrange(min(revs) + 1, len(self)):
595 for i in xrange(min(revs) + 1, len(self)):
597 for x in self.parentrevs(i):
596 for x in self.parentrevs(i):
598 if x != nullrev and x in seen:
597 if x != nullrev and x in seen:
599 seen.add(i)
598 seen.add(i)
600 yield i
599 yield i
601 break
600 break
602
601
603 def findmissing(self, common=None, heads=None):
602 def findmissing(self, common=None, heads=None):
604 '''
603 '''
605 returns the topologically sorted list of nodes from the set:
604 returns the topologically sorted list of nodes from the set:
606 missing = (ancestors(heads) \ ancestors(common))
605 missing = (ancestors(heads) \ ancestors(common))
607
606
608 where ancestors() is the set of ancestors from heads, heads included
607 where ancestors() is the set of ancestors from heads, heads included
609
608
610 if heads is None, the heads of the revlog are used
609 if heads is None, the heads of the revlog are used
611 if common is None, nullid is assumed to be a common node
610 if common is None, nullid is assumed to be a common node
612 '''
611 '''
613 if common is None:
612 if common is None:
614 common = [nullid]
613 common = [nullid]
615 if heads is None:
614 if heads is None:
616 heads = self.heads()
615 heads = self.heads()
617
616
618 common = [self.rev(n) for n in common]
617 common = [self.rev(n) for n in common]
619 heads = [self.rev(n) for n in heads]
618 heads = [self.rev(n) for n in heads]
620
619
621 # we want the ancestors, but inclusive
620 # we want the ancestors, but inclusive
622 has = set(self.ancestors(*common))
621 has = set(self.ancestors(*common))
623 has.add(nullrev)
622 has.add(nullrev)
624 has.update(common)
623 has.update(common)
625
624
626 # take all ancestors from heads that aren't in has
625 # take all ancestors from heads that aren't in has
627 missing = set()
626 missing = set()
628 visit = [r for r in heads if r not in has]
627 visit = [r for r in heads if r not in has]
629 while visit:
628 while visit:
630 r = visit.pop(0)
629 r = visit.pop(0)
631 if r in missing:
630 if r in missing:
632 continue
631 continue
633 else:
632 else:
634 missing.add(r)
633 missing.add(r)
635 for p in self.parentrevs(r):
634 for p in self.parentrevs(r):
636 if p not in has:
635 if p not in has:
637 visit.append(p)
636 visit.append(p)
638 missing = list(missing)
637 missing = list(missing)
639 missing.sort()
638 missing.sort()
640 return [self.node(r) for r in missing]
639 return [self.node(r) for r in missing]
641
640
642 def nodesbetween(self, roots=None, heads=None):
641 def nodesbetween(self, roots=None, heads=None):
643 """Return a tuple containing three elements. Elements 1 and 2 contain
642 """Return a tuple containing three elements. Elements 1 and 2 contain
644 a final list bases and heads after all the unreachable ones have been
643 a final list bases and heads after all the unreachable ones have been
645 pruned. Element 0 contains a topologically sorted list of all
644 pruned. Element 0 contains a topologically sorted list of all
646
645
647 nodes that satisfy these constraints:
646 nodes that satisfy these constraints:
648 1. All nodes must be descended from a node in roots (the nodes on
647 1. All nodes must be descended from a node in roots (the nodes on
649 roots are considered descended from themselves).
648 roots are considered descended from themselves).
650 2. All nodes must also be ancestors of a node in heads (the nodes in
649 2. All nodes must also be ancestors of a node in heads (the nodes in
651 heads are considered to be their own ancestors).
650 heads are considered to be their own ancestors).
652
651
653 If roots is unspecified, nullid is assumed as the only root.
652 If roots is unspecified, nullid is assumed as the only root.
654 If heads is unspecified, it is taken to be the output of the
653 If heads is unspecified, it is taken to be the output of the
655 heads method (i.e. a list of all nodes in the repository that
654 heads method (i.e. a list of all nodes in the repository that
656 have no children)."""
655 have no children)."""
657 nonodes = ([], [], [])
656 nonodes = ([], [], [])
658 if roots is not None:
657 if roots is not None:
659 roots = list(roots)
658 roots = list(roots)
660 if not roots:
659 if not roots:
661 return nonodes
660 return nonodes
662 lowestrev = min([self.rev(n) for n in roots])
661 lowestrev = min([self.rev(n) for n in roots])
663 else:
662 else:
664 roots = [nullid] # Everybody's a descendent of nullid
663 roots = [nullid] # Everybody's a descendent of nullid
665 lowestrev = nullrev
664 lowestrev = nullrev
666 if (lowestrev == nullrev) and (heads is None):
665 if (lowestrev == nullrev) and (heads is None):
667 # We want _all_ the nodes!
666 # We want _all_ the nodes!
668 return ([self.node(r) for r in self], [nullid], list(self.heads()))
667 return ([self.node(r) for r in self], [nullid], list(self.heads()))
669 if heads is None:
668 if heads is None:
670 # All nodes are ancestors, so the latest ancestor is the last
669 # All nodes are ancestors, so the latest ancestor is the last
671 # node.
670 # node.
672 highestrev = len(self) - 1
671 highestrev = len(self) - 1
673 # Set ancestors to None to signal that every node is an ancestor.
672 # Set ancestors to None to signal that every node is an ancestor.
674 ancestors = None
673 ancestors = None
675 # Set heads to an empty dictionary for later discovery of heads
674 # Set heads to an empty dictionary for later discovery of heads
676 heads = {}
675 heads = {}
677 else:
676 else:
678 heads = list(heads)
677 heads = list(heads)
679 if not heads:
678 if not heads:
680 return nonodes
679 return nonodes
681 ancestors = {}
680 ancestors = set()
682 # Turn heads into a dictionary so we can remove 'fake' heads.
681 # Turn heads into a dictionary so we can remove 'fake' heads.
683 # Also, later we will be using it to filter out the heads we can't
682 # Also, later we will be using it to filter out the heads we can't
684 # find from roots.
683 # find from roots.
685 heads = dict.fromkeys(heads, 0)
684 heads = dict.fromkeys(heads, 0)
686 # Start at the top and keep marking parents until we're done.
685 # Start at the top and keep marking parents until we're done.
687 nodestotag = set(heads)
686 nodestotag = set(heads)
688 # Remember where the top was so we can use it as a limit later.
687 # Remember where the top was so we can use it as a limit later.
689 highestrev = max([self.rev(n) for n in nodestotag])
688 highestrev = max([self.rev(n) for n in nodestotag])
690 while nodestotag:
689 while nodestotag:
691 # grab a node to tag
690 # grab a node to tag
692 n = nodestotag.pop()
691 n = nodestotag.pop()
693 # Never tag nullid
692 # Never tag nullid
694 if n == nullid:
693 if n == nullid:
695 continue
694 continue
696 # A node's revision number represents its place in a
695 # A node's revision number represents its place in a
697 # topologically sorted list of nodes.
696 # topologically sorted list of nodes.
698 r = self.rev(n)
697 r = self.rev(n)
699 if r >= lowestrev:
698 if r >= lowestrev:
700 if n not in ancestors:
699 if n not in ancestors:
701 # If we are possibly a descendent of one of the roots
700 # If we are possibly a descendent of one of the roots
702 # and we haven't already been marked as an ancestor
701 # and we haven't already been marked as an ancestor
703 ancestors[n] = 1 # Mark as ancestor
702 ancestors.add(n) # Mark as ancestor
704 # Add non-nullid parents to list of nodes to tag.
703 # Add non-nullid parents to list of nodes to tag.
705 nodestotag.update([p for p in self.parents(n) if
704 nodestotag.update([p for p in self.parents(n) if
706 p != nullid])
705 p != nullid])
707 elif n in heads: # We've seen it before, is it a fake head?
706 elif n in heads: # We've seen it before, is it a fake head?
708 # So it is, real heads should not be the ancestors of
707 # So it is, real heads should not be the ancestors of
709 # any other heads.
708 # any other heads.
710 heads.pop(n)
709 heads.pop(n)
711 if not ancestors:
710 if not ancestors:
712 return nonodes
711 return nonodes
713 # Now that we have our set of ancestors, we want to remove any
712 # Now that we have our set of ancestors, we want to remove any
714 # roots that are not ancestors.
713 # roots that are not ancestors.
715
714
716 # If one of the roots was nullid, everything is included anyway.
715 # If one of the roots was nullid, everything is included anyway.
717 if lowestrev > nullrev:
716 if lowestrev > nullrev:
718 # But, since we weren't, let's recompute the lowest rev to not
717 # But, since we weren't, let's recompute the lowest rev to not
719 # include roots that aren't ancestors.
718 # include roots that aren't ancestors.
720
719
721 # Filter out roots that aren't ancestors of heads
720 # Filter out roots that aren't ancestors of heads
722 roots = [n for n in roots if n in ancestors]
721 roots = [n for n in roots if n in ancestors]
723 # Recompute the lowest revision
722 # Recompute the lowest revision
724 if roots:
723 if roots:
725 lowestrev = min([self.rev(n) for n in roots])
724 lowestrev = min([self.rev(n) for n in roots])
726 else:
725 else:
727 # No more roots? Return empty list
726 # No more roots? Return empty list
728 return nonodes
727 return nonodes
729 else:
728 else:
730 # We are descending from nullid, and don't need to care about
729 # We are descending from nullid, and don't need to care about
731 # any other roots.
730 # any other roots.
732 lowestrev = nullrev
731 lowestrev = nullrev
733 roots = [nullid]
732 roots = [nullid]
734 # Transform our roots list into a set.
733 # Transform our roots list into a set.
735 descendents = set(roots)
734 descendents = set(roots)
736 # Also, keep the original roots so we can filter out roots that aren't
735 # Also, keep the original roots so we can filter out roots that aren't
737 # 'real' roots (i.e. are descended from other roots).
736 # 'real' roots (i.e. are descended from other roots).
738 roots = descendents.copy()
737 roots = descendents.copy()
739 # Our topologically sorted list of output nodes.
738 # Our topologically sorted list of output nodes.
740 orderedout = []
739 orderedout = []
741 # Don't start at nullid since we don't want nullid in our output list,
740 # Don't start at nullid since we don't want nullid in our output list,
742 # and if nullid shows up in descedents, empty parents will look like
741 # and if nullid shows up in descedents, empty parents will look like
743 # they're descendents.
742 # they're descendents.
744 for r in xrange(max(lowestrev, 0), highestrev + 1):
743 for r in xrange(max(lowestrev, 0), highestrev + 1):
745 n = self.node(r)
744 n = self.node(r)
746 isdescendent = False
745 isdescendent = False
747 if lowestrev == nullrev: # Everybody is a descendent of nullid
746 if lowestrev == nullrev: # Everybody is a descendent of nullid
748 isdescendent = True
747 isdescendent = True
749 elif n in descendents:
748 elif n in descendents:
750 # n is already a descendent
749 # n is already a descendent
751 isdescendent = True
750 isdescendent = True
752 # This check only needs to be done here because all the roots
751 # This check only needs to be done here because all the roots
753 # will start being marked is descendents before the loop.
752 # will start being marked is descendents before the loop.
754 if n in roots:
753 if n in roots:
755 # If n was a root, check if it's a 'real' root.
754 # If n was a root, check if it's a 'real' root.
756 p = tuple(self.parents(n))
755 p = tuple(self.parents(n))
757 # If any of its parents are descendents, it's not a root.
756 # If any of its parents are descendents, it's not a root.
758 if (p[0] in descendents) or (p[1] in descendents):
757 if (p[0] in descendents) or (p[1] in descendents):
759 roots.remove(n)
758 roots.remove(n)
760 else:
759 else:
761 p = tuple(self.parents(n))
760 p = tuple(self.parents(n))
762 # A node is a descendent if either of its parents are
761 # A node is a descendent if either of its parents are
763 # descendents. (We seeded the dependents list with the roots
762 # descendents. (We seeded the dependents list with the roots
764 # up there, remember?)
763 # up there, remember?)
765 if (p[0] in descendents) or (p[1] in descendents):
764 if (p[0] in descendents) or (p[1] in descendents):
766 descendents.add(n)
765 descendents.add(n)
767 isdescendent = True
766 isdescendent = True
768 if isdescendent and ((ancestors is None) or (n in ancestors)):
767 if isdescendent and ((ancestors is None) or (n in ancestors)):
769 # Only include nodes that are both descendents and ancestors.
768 # Only include nodes that are both descendents and ancestors.
770 orderedout.append(n)
769 orderedout.append(n)
771 if (ancestors is not None) and (n in heads):
770 if (ancestors is not None) and (n in heads):
772 # We're trying to figure out which heads are reachable
771 # We're trying to figure out which heads are reachable
773 # from roots.
772 # from roots.
774 # Mark this head as having been reached
773 # Mark this head as having been reached
775 heads[n] = 1
774 heads[n] = 1
776 elif ancestors is None:
775 elif ancestors is None:
777 # Otherwise, we're trying to discover the heads.
776 # Otherwise, we're trying to discover the heads.
778 # Assume this is a head because if it isn't, the next step
777 # Assume this is a head because if it isn't, the next step
779 # will eventually remove it.
778 # will eventually remove it.
780 heads[n] = 1
779 heads[n] = 1
781 # But, obviously its parents aren't.
780 # But, obviously its parents aren't.
782 for p in self.parents(n):
781 for p in self.parents(n):
783 heads.pop(p, None)
782 heads.pop(p, None)
784 heads = [n for n in heads.iterkeys() if heads[n] != 0]
783 heads = [n for n in heads.iterkeys() if heads[n] != 0]
785 roots = list(roots)
784 roots = list(roots)
786 assert orderedout
785 assert orderedout
787 assert roots
786 assert roots
788 assert heads
787 assert heads
789 return (orderedout, roots, heads)
788 return (orderedout, roots, heads)
790
789
791 def heads(self, start=None, stop=None):
790 def heads(self, start=None, stop=None):
792 """return the list of all nodes that have no children
791 """return the list of all nodes that have no children
793
792
794 if start is specified, only heads that are descendants of
793 if start is specified, only heads that are descendants of
795 start will be returned
794 start will be returned
796 if stop is specified, it will consider all the revs from stop
795 if stop is specified, it will consider all the revs from stop
797 as if they had no children
796 as if they had no children
798 """
797 """
799 if start is None and stop is None:
798 if start is None and stop is None:
800 count = len(self)
799 count = len(self)
801 if not count:
800 if not count:
802 return [nullid]
801 return [nullid]
803 ishead = [1] * (count + 1)
802 ishead = [1] * (count + 1)
804 index = self.index
803 index = self.index
805 for r in xrange(count):
804 for r in xrange(count):
806 e = index[r]
805 e = index[r]
807 ishead[e[5]] = ishead[e[6]] = 0
806 ishead[e[5]] = ishead[e[6]] = 0
808 return [self.node(r) for r in xrange(count) if ishead[r]]
807 return [self.node(r) for r in xrange(count) if ishead[r]]
809
808
810 if start is None:
809 if start is None:
811 start = nullid
810 start = nullid
812 if stop is None:
811 if stop is None:
813 stop = []
812 stop = []
814 stoprevs = set([self.rev(n) for n in stop])
813 stoprevs = set([self.rev(n) for n in stop])
815 startrev = self.rev(start)
814 startrev = self.rev(start)
816 reachable = {startrev: 1}
815 reachable = set((startrev,))
817 heads = {startrev: 1}
816 heads = set((startrev,))
818
817
819 parentrevs = self.parentrevs
818 parentrevs = self.parentrevs
820 for r in xrange(startrev + 1, len(self)):
819 for r in xrange(startrev + 1, len(self)):
821 for p in parentrevs(r):
820 for p in parentrevs(r):
822 if p in reachable:
821 if p in reachable:
823 if r not in stoprevs:
822 if r not in stoprevs:
824 reachable[r] = 1
823 reachable.add(r)
825 heads[r] = 1
824 heads.add(r)
826 if p in heads and p not in stoprevs:
825 if p in heads and p not in stoprevs:
827 del heads[p]
826 heads.remove(p)
828
827
829 return [self.node(r) for r in heads]
828 return [self.node(r) for r in heads]
830
829
831 def children(self, node):
830 def children(self, node):
832 """find the children of a given node"""
831 """find the children of a given node"""
833 c = []
832 c = []
834 p = self.rev(node)
833 p = self.rev(node)
835 for r in range(p + 1, len(self)):
834 for r in range(p + 1, len(self)):
836 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
835 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
837 if prevs:
836 if prevs:
838 for pr in prevs:
837 for pr in prevs:
839 if pr == p:
838 if pr == p:
840 c.append(self.node(r))
839 c.append(self.node(r))
841 elif p == nullrev:
840 elif p == nullrev:
842 c.append(self.node(r))
841 c.append(self.node(r))
843 return c
842 return c
844
843
845 def _match(self, id):
844 def _match(self, id):
846 if isinstance(id, (long, int)):
845 if isinstance(id, (long, int)):
847 # rev
846 # rev
848 return self.node(id)
847 return self.node(id)
849 if len(id) == 20:
848 if len(id) == 20:
850 # possibly a binary node
849 # possibly a binary node
851 # odds of a binary node being all hex in ASCII are 1 in 10**25
850 # odds of a binary node being all hex in ASCII are 1 in 10**25
852 try:
851 try:
853 node = id
852 node = id
854 self.rev(node) # quick search the index
853 self.rev(node) # quick search the index
855 return node
854 return node
856 except LookupError:
855 except LookupError:
857 pass # may be partial hex id
856 pass # may be partial hex id
858 try:
857 try:
859 # str(rev)
858 # str(rev)
860 rev = int(id)
859 rev = int(id)
861 if str(rev) != id:
860 if str(rev) != id:
862 raise ValueError
861 raise ValueError
863 if rev < 0:
862 if rev < 0:
864 rev = len(self) + rev
863 rev = len(self) + rev
865 if rev < 0 or rev >= len(self):
864 if rev < 0 or rev >= len(self):
866 raise ValueError
865 raise ValueError
867 return self.node(rev)
866 return self.node(rev)
868 except (ValueError, OverflowError):
867 except (ValueError, OverflowError):
869 pass
868 pass
870 if len(id) == 40:
869 if len(id) == 40:
871 try:
870 try:
872 # a full hex nodeid?
871 # a full hex nodeid?
873 node = bin(id)
872 node = bin(id)
874 self.rev(node)
873 self.rev(node)
875 return node
874 return node
876 except (TypeError, LookupError):
875 except (TypeError, LookupError):
877 pass
876 pass
878
877
879 def _partialmatch(self, id):
878 def _partialmatch(self, id):
880 if len(id) < 40:
879 if len(id) < 40:
881 try:
880 try:
882 # hex(node)[:...]
881 # hex(node)[:...]
883 l = len(id) / 2 # grab an even number of digits
882 l = len(id) / 2 # grab an even number of digits
884 bin_id = bin(id[:l*2])
883 bin_id = bin(id[:l*2])
885 nl = [n for n in self.nodemap if n[:l] == bin_id]
884 nl = [n for n in self.nodemap if n[:l] == bin_id]
886 nl = [n for n in nl if hex(n).startswith(id)]
885 nl = [n for n in nl if hex(n).startswith(id)]
887 if len(nl) > 0:
886 if len(nl) > 0:
888 if len(nl) == 1:
887 if len(nl) == 1:
889 return nl[0]
888 return nl[0]
890 raise LookupError(id, self.indexfile,
889 raise LookupError(id, self.indexfile,
891 _('ambiguous identifier'))
890 _('ambiguous identifier'))
892 return None
891 return None
893 except TypeError:
892 except TypeError:
894 pass
893 pass
895
894
896 def lookup(self, id):
895 def lookup(self, id):
897 """locate a node based on:
896 """locate a node based on:
898 - revision number or str(revision number)
897 - revision number or str(revision number)
899 - nodeid or subset of hex nodeid
898 - nodeid or subset of hex nodeid
900 """
899 """
901 n = self._match(id)
900 n = self._match(id)
902 if n is not None:
901 if n is not None:
903 return n
902 return n
904 n = self._partialmatch(id)
903 n = self._partialmatch(id)
905 if n:
904 if n:
906 return n
905 return n
907
906
908 raise LookupError(id, self.indexfile, _('no match found'))
907 raise LookupError(id, self.indexfile, _('no match found'))
909
908
910 def cmp(self, node, text):
909 def cmp(self, node, text):
911 """compare text with a given file revision"""
910 """compare text with a given file revision"""
912 p1, p2 = self.parents(node)
911 p1, p2 = self.parents(node)
913 return hash(text, p1, p2) != node
912 return hash(text, p1, p2) != node
914
913
915 def _addchunk(self, offset, data):
914 def _addchunk(self, offset, data):
916 o, d = self._chunkcache
915 o, d = self._chunkcache
917 # try to add to existing cache
916 # try to add to existing cache
918 if o + len(d) == offset and len(d) + len(data) < _prereadsize:
917 if o + len(d) == offset and len(d) + len(data) < _prereadsize:
919 self._chunkcache = o, d + data
918 self._chunkcache = o, d + data
920 else:
919 else:
921 self._chunkcache = offset, data
920 self._chunkcache = offset, data
922
921
923 def _loadchunk(self, offset, length, df=None):
922 def _loadchunk(self, offset, length, df=None):
924 if not df:
923 if not df:
925 if self._inline:
924 if self._inline:
926 df = self.opener(self.indexfile)
925 df = self.opener(self.indexfile)
927 else:
926 else:
928 df = self.opener(self.datafile)
927 df = self.opener(self.datafile)
929
928
930 readahead = max(65536, length)
929 readahead = max(65536, length)
931 df.seek(offset)
930 df.seek(offset)
932 d = df.read(readahead)
931 d = df.read(readahead)
933 self._addchunk(offset, d)
932 self._addchunk(offset, d)
934 if readahead > length:
933 if readahead > length:
935 return d[:length]
934 return d[:length]
936 return d
935 return d
937
936
938 def _getchunk(self, offset, length, df=None):
937 def _getchunk(self, offset, length, df=None):
939 o, d = self._chunkcache
938 o, d = self._chunkcache
940 l = len(d)
939 l = len(d)
941
940
942 # is it in the cache?
941 # is it in the cache?
943 cachestart = offset - o
942 cachestart = offset - o
944 cacheend = cachestart + length
943 cacheend = cachestart + length
945 if cachestart >= 0 and cacheend <= l:
944 if cachestart >= 0 and cacheend <= l:
946 if cachestart == 0 and cacheend == l:
945 if cachestart == 0 and cacheend == l:
947 return d # avoid a copy
946 return d # avoid a copy
948 return d[cachestart:cacheend]
947 return d[cachestart:cacheend]
949
948
950 return self._loadchunk(offset, length, df)
949 return self._loadchunk(offset, length, df)
951
950
952 def _prime(self, startrev, endrev, df):
951 def _prime(self, startrev, endrev, df):
953 start = self.start(startrev)
952 start = self.start(startrev)
954 end = self.end(endrev)
953 end = self.end(endrev)
955 if self._inline:
954 if self._inline:
956 start += (startrev + 1) * self._io.size
955 start += (startrev + 1) * self._io.size
957 end += (startrev + 1) * self._io.size
956 end += (startrev + 1) * self._io.size
958 self._loadchunk(start, end - start, df)
957 self._loadchunk(start, end - start, df)
959
958
960 def chunk(self, rev, df=None):
959 def chunk(self, rev, df=None):
961 start, length = self.start(rev), self.length(rev)
960 start, length = self.start(rev), self.length(rev)
962 if self._inline:
961 if self._inline:
963 start += (rev + 1) * self._io.size
962 start += (rev + 1) * self._io.size
964 return decompress(self._getchunk(start, length, df))
963 return decompress(self._getchunk(start, length, df))
965
964
966 def revdiff(self, rev1, rev2):
965 def revdiff(self, rev1, rev2):
967 """return or calculate a delta between two revisions"""
966 """return or calculate a delta between two revisions"""
968 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
967 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
969 return self.chunk(rev2)
968 return self.chunk(rev2)
970
969
971 return mdiff.textdiff(self.revision(self.node(rev1)),
970 return mdiff.textdiff(self.revision(self.node(rev1)),
972 self.revision(self.node(rev2)))
971 self.revision(self.node(rev2)))
973
972
974 def revision(self, node):
973 def revision(self, node):
975 """return an uncompressed revision of a given node"""
974 """return an uncompressed revision of a given node"""
976 if node == nullid:
975 if node == nullid:
977 return ""
976 return ""
978 if self._cache and self._cache[0] == node:
977 if self._cache and self._cache[0] == node:
979 return str(self._cache[2])
978 return str(self._cache[2])
980
979
981 # look up what we need to read
980 # look up what we need to read
982 text = None
981 text = None
983 rev = self.rev(node)
982 rev = self.rev(node)
984 base = self.base(rev)
983 base = self.base(rev)
985
984
986 # check rev flags
985 # check rev flags
987 if self.index[rev][0] & 0xFFFF:
986 if self.index[rev][0] & 0xFFFF:
988 raise RevlogError(_('incompatible revision flag %x') %
987 raise RevlogError(_('incompatible revision flag %x') %
989 (self.index[rev][0] & 0xFFFF))
988 (self.index[rev][0] & 0xFFFF))
990
989
991 df = None
990 df = None
992
991
993 # do we have useful data cached?
992 # do we have useful data cached?
994 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
993 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
995 base = self._cache[1]
994 base = self._cache[1]
996 text = str(self._cache[2])
995 text = str(self._cache[2])
997 self._loadindex(base, rev + 1)
996 self._loadindex(base, rev + 1)
998 if not self._inline and rev > base + 1:
997 if not self._inline and rev > base + 1:
999 df = self.opener(self.datafile)
998 df = self.opener(self.datafile)
1000 self._prime(base, rev, df)
999 self._prime(base, rev, df)
1001 else:
1000 else:
1002 self._loadindex(base, rev + 1)
1001 self._loadindex(base, rev + 1)
1003 if not self._inline and rev > base:
1002 if not self._inline and rev > base:
1004 df = self.opener(self.datafile)
1003 df = self.opener(self.datafile)
1005 self._prime(base, rev, df)
1004 self._prime(base, rev, df)
1006 text = self.chunk(base, df=df)
1005 text = self.chunk(base, df=df)
1007
1006
1008 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
1007 bins = [self.chunk(r, df) for r in xrange(base + 1, rev + 1)]
1009 text = mdiff.patches(text, bins)
1008 text = mdiff.patches(text, bins)
1010 p1, p2 = self.parents(node)
1009 p1, p2 = self.parents(node)
1011 if node != hash(text, p1, p2):
1010 if node != hash(text, p1, p2):
1012 raise RevlogError(_("integrity check failed on %s:%d")
1011 raise RevlogError(_("integrity check failed on %s:%d")
1013 % (self.datafile, rev))
1012 % (self.datafile, rev))
1014
1013
1015 self._cache = (node, rev, text)
1014 self._cache = (node, rev, text)
1016 return text
1015 return text
1017
1016
1018 def checkinlinesize(self, tr, fp=None):
1017 def checkinlinesize(self, tr, fp=None):
1019 if not self._inline or (self.start(-2) + self.length(-2)) < 131072:
1018 if not self._inline or (self.start(-2) + self.length(-2)) < 131072:
1020 return
1019 return
1021
1020
1022 trinfo = tr.find(self.indexfile)
1021 trinfo = tr.find(self.indexfile)
1023 if trinfo == None:
1022 if trinfo == None:
1024 raise RevlogError(_("%s not found in the transaction")
1023 raise RevlogError(_("%s not found in the transaction")
1025 % self.indexfile)
1024 % self.indexfile)
1026
1025
1027 trindex = trinfo[2]
1026 trindex = trinfo[2]
1028 dataoff = self.start(trindex)
1027 dataoff = self.start(trindex)
1029
1028
1030 tr.add(self.datafile, dataoff)
1029 tr.add(self.datafile, dataoff)
1031
1030
1032 if fp:
1031 if fp:
1033 fp.flush()
1032 fp.flush()
1034 fp.close()
1033 fp.close()
1035
1034
1036 df = self.opener(self.datafile, 'w')
1035 df = self.opener(self.datafile, 'w')
1037 try:
1036 try:
1038 calc = self._io.size
1037 calc = self._io.size
1039 for r in self:
1038 for r in self:
1040 start = self.start(r) + (r + 1) * calc
1039 start = self.start(r) + (r + 1) * calc
1041 length = self.length(r)
1040 length = self.length(r)
1042 d = self._getchunk(start, length)
1041 d = self._getchunk(start, length)
1043 df.write(d)
1042 df.write(d)
1044 finally:
1043 finally:
1045 df.close()
1044 df.close()
1046
1045
1047 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1046 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1048 self.version &= ~(REVLOGNGINLINEDATA)
1047 self.version &= ~(REVLOGNGINLINEDATA)
1049 self._inline = False
1048 self._inline = False
1050 for i in self:
1049 for i in self:
1051 e = self._io.packentry(self.index[i], self.node, self.version, i)
1050 e = self._io.packentry(self.index[i], self.node, self.version, i)
1052 fp.write(e)
1051 fp.write(e)
1053
1052
1054 # if we don't call rename, the temp file will never replace the
1053 # if we don't call rename, the temp file will never replace the
1055 # real index
1054 # real index
1056 fp.rename()
1055 fp.rename()
1057
1056
1058 tr.replace(self.indexfile, trindex * calc)
1057 tr.replace(self.indexfile, trindex * calc)
1059 self._chunkcache = (0, '')
1058 self._chunkcache = (0, '')
1060
1059
1061 def addrevision(self, text, transaction, link, p1, p2, d=None):
1060 def addrevision(self, text, transaction, link, p1, p2, d=None):
1062 """add a revision to the log
1061 """add a revision to the log
1063
1062
1064 text - the revision data to add
1063 text - the revision data to add
1065 transaction - the transaction object used for rollback
1064 transaction - the transaction object used for rollback
1066 link - the linkrev data to add
1065 link - the linkrev data to add
1067 p1, p2 - the parent nodeids of the revision
1066 p1, p2 - the parent nodeids of the revision
1068 d - an optional precomputed delta
1067 d - an optional precomputed delta
1069 """
1068 """
1070 dfh = None
1069 dfh = None
1071 if not self._inline:
1070 if not self._inline:
1072 dfh = self.opener(self.datafile, "a")
1071 dfh = self.opener(self.datafile, "a")
1073 ifh = self.opener(self.indexfile, "a+")
1072 ifh = self.opener(self.indexfile, "a+")
1074 try:
1073 try:
1075 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1074 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1076 finally:
1075 finally:
1077 if dfh:
1076 if dfh:
1078 dfh.close()
1077 dfh.close()
1079 ifh.close()
1078 ifh.close()
1080
1079
1081 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1080 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1082 node = hash(text, p1, p2)
1081 node = hash(text, p1, p2)
1083 if node in self.nodemap:
1082 if node in self.nodemap:
1084 return node
1083 return node
1085
1084
1086 curr = len(self)
1085 curr = len(self)
1087 prev = curr - 1
1086 prev = curr - 1
1088 base = self.base(prev)
1087 base = self.base(prev)
1089 offset = self.end(prev)
1088 offset = self.end(prev)
1090
1089
1091 if curr:
1090 if curr:
1092 if not d:
1091 if not d:
1093 ptext = self.revision(self.node(prev))
1092 ptext = self.revision(self.node(prev))
1094 d = mdiff.textdiff(ptext, text)
1093 d = mdiff.textdiff(ptext, text)
1095 data = compress(d)
1094 data = compress(d)
1096 l = len(data[1]) + len(data[0])
1095 l = len(data[1]) + len(data[0])
1097 dist = l + offset - self.start(base)
1096 dist = l + offset - self.start(base)
1098
1097
1099 # full versions are inserted when the needed deltas
1098 # full versions are inserted when the needed deltas
1100 # become comparable to the uncompressed text
1099 # become comparable to the uncompressed text
1101 if not curr or dist > len(text) * 2:
1100 if not curr or dist > len(text) * 2:
1102 data = compress(text)
1101 data = compress(text)
1103 l = len(data[1]) + len(data[0])
1102 l = len(data[1]) + len(data[0])
1104 base = curr
1103 base = curr
1105
1104
1106 e = (offset_type(offset, 0), l, len(text),
1105 e = (offset_type(offset, 0), l, len(text),
1107 base, link, self.rev(p1), self.rev(p2), node)
1106 base, link, self.rev(p1), self.rev(p2), node)
1108 self.index.insert(-1, e)
1107 self.index.insert(-1, e)
1109 self.nodemap[node] = curr
1108 self.nodemap[node] = curr
1110
1109
1111 entry = self._io.packentry(e, self.node, self.version, curr)
1110 entry = self._io.packentry(e, self.node, self.version, curr)
1112 if not self._inline:
1111 if not self._inline:
1113 transaction.add(self.datafile, offset)
1112 transaction.add(self.datafile, offset)
1114 transaction.add(self.indexfile, curr * len(entry))
1113 transaction.add(self.indexfile, curr * len(entry))
1115 if data[0]:
1114 if data[0]:
1116 dfh.write(data[0])
1115 dfh.write(data[0])
1117 dfh.write(data[1])
1116 dfh.write(data[1])
1118 dfh.flush()
1117 dfh.flush()
1119 ifh.write(entry)
1118 ifh.write(entry)
1120 else:
1119 else:
1121 offset += curr * self._io.size
1120 offset += curr * self._io.size
1122 transaction.add(self.indexfile, offset, curr)
1121 transaction.add(self.indexfile, offset, curr)
1123 ifh.write(entry)
1122 ifh.write(entry)
1124 ifh.write(data[0])
1123 ifh.write(data[0])
1125 ifh.write(data[1])
1124 ifh.write(data[1])
1126 self.checkinlinesize(transaction, ifh)
1125 self.checkinlinesize(transaction, ifh)
1127
1126
1128 self._cache = (node, curr, text)
1127 self._cache = (node, curr, text)
1129 return node
1128 return node
1130
1129
1131 def ancestor(self, a, b):
1130 def ancestor(self, a, b):
1132 """calculate the least common ancestor of nodes a and b"""
1131 """calculate the least common ancestor of nodes a and b"""
1133
1132
1134 def parents(rev):
1133 def parents(rev):
1135 return [p for p in self.parentrevs(rev) if p != nullrev]
1134 return [p for p in self.parentrevs(rev) if p != nullrev]
1136
1135
1137 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1136 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1138 if c is None:
1137 if c is None:
1139 return nullid
1138 return nullid
1140
1139
1141 return self.node(c)
1140 return self.node(c)
1142
1141
1143 def group(self, nodelist, lookup, infocollect=None):
1142 def group(self, nodelist, lookup, infocollect=None):
1144 """calculate a delta group
1143 """calculate a delta group
1145
1144
1146 Given a list of changeset revs, return a set of deltas and
1145 Given a list of changeset revs, return a set of deltas and
1147 metadata corresponding to nodes. the first delta is
1146 metadata corresponding to nodes. the first delta is
1148 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1147 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1149 have this parent as it has all history before these
1148 have this parent as it has all history before these
1150 changesets. parent is parent[0]
1149 changesets. parent is parent[0]
1151 """
1150 """
1152
1151
1153 # if we don't have any revisions touched by these changesets, bail
1152 # if we don't have any revisions touched by these changesets, bail
1154 if not nodelist:
1153 if not nodelist:
1155 yield changegroup.closechunk()
1154 yield changegroup.closechunk()
1156 return
1155 return
1157
1156
1158 revs = [self.rev(n) for n in nodelist]
1157 revs = [self.rev(n) for n in nodelist]
1159
1158
1160 # add the parent of the first rev
1159 # add the parent of the first rev
1161 p = self.parentrevs(revs[0])[0]
1160 p = self.parentrevs(revs[0])[0]
1162 revs.insert(0, p)
1161 revs.insert(0, p)
1163
1162
1164 # build deltas
1163 # build deltas
1165 for d in xrange(0, len(revs) - 1):
1164 for d in xrange(0, len(revs) - 1):
1166 a, b = revs[d], revs[d + 1]
1165 a, b = revs[d], revs[d + 1]
1167 nb = self.node(b)
1166 nb = self.node(b)
1168
1167
1169 if infocollect is not None:
1168 if infocollect is not None:
1170 infocollect(nb)
1169 infocollect(nb)
1171
1170
1172 p = self.parents(nb)
1171 p = self.parents(nb)
1173 meta = nb + p[0] + p[1] + lookup(nb)
1172 meta = nb + p[0] + p[1] + lookup(nb)
1174 if a == -1:
1173 if a == -1:
1175 d = self.revision(nb)
1174 d = self.revision(nb)
1176 meta += mdiff.trivialdiffheader(len(d))
1175 meta += mdiff.trivialdiffheader(len(d))
1177 else:
1176 else:
1178 d = self.revdiff(a, b)
1177 d = self.revdiff(a, b)
1179 yield changegroup.chunkheader(len(meta) + len(d))
1178 yield changegroup.chunkheader(len(meta) + len(d))
1180 yield meta
1179 yield meta
1181 if len(d) > 2**20:
1180 if len(d) > 2**20:
1182 pos = 0
1181 pos = 0
1183 while pos < len(d):
1182 while pos < len(d):
1184 pos2 = pos + 2 ** 18
1183 pos2 = pos + 2 ** 18
1185 yield d[pos:pos2]
1184 yield d[pos:pos2]
1186 pos = pos2
1185 pos = pos2
1187 else:
1186 else:
1188 yield d
1187 yield d
1189
1188
1190 yield changegroup.closechunk()
1189 yield changegroup.closechunk()
1191
1190
1192 def addgroup(self, revs, linkmapper, transaction):
1191 def addgroup(self, revs, linkmapper, transaction):
1193 """
1192 """
1194 add a delta group
1193 add a delta group
1195
1194
1196 given a set of deltas, add them to the revision log. the
1195 given a set of deltas, add them to the revision log. the
1197 first delta is against its parent, which should be in our
1196 first delta is against its parent, which should be in our
1198 log, the rest are against the previous delta.
1197 log, the rest are against the previous delta.
1199 """
1198 """
1200
1199
1201 #track the base of the current delta log
1200 #track the base of the current delta log
1202 r = len(self)
1201 r = len(self)
1203 t = r - 1
1202 t = r - 1
1204 node = None
1203 node = None
1205
1204
1206 base = prev = nullrev
1205 base = prev = nullrev
1207 start = end = textlen = 0
1206 start = end = textlen = 0
1208 if r:
1207 if r:
1209 end = self.end(t)
1208 end = self.end(t)
1210
1209
1211 ifh = self.opener(self.indexfile, "a+")
1210 ifh = self.opener(self.indexfile, "a+")
1212 isize = r * self._io.size
1211 isize = r * self._io.size
1213 if self._inline:
1212 if self._inline:
1214 transaction.add(self.indexfile, end + isize, r)
1213 transaction.add(self.indexfile, end + isize, r)
1215 dfh = None
1214 dfh = None
1216 else:
1215 else:
1217 transaction.add(self.indexfile, isize, r)
1216 transaction.add(self.indexfile, isize, r)
1218 transaction.add(self.datafile, end)
1217 transaction.add(self.datafile, end)
1219 dfh = self.opener(self.datafile, "a")
1218 dfh = self.opener(self.datafile, "a")
1220
1219
1221 try:
1220 try:
1222 # loop through our set of deltas
1221 # loop through our set of deltas
1223 chain = None
1222 chain = None
1224 for chunk in revs:
1223 for chunk in revs:
1225 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1224 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1226 link = linkmapper(cs)
1225 link = linkmapper(cs)
1227 if node in self.nodemap:
1226 if node in self.nodemap:
1228 # this can happen if two branches make the same change
1227 # this can happen if two branches make the same change
1229 chain = node
1228 chain = node
1230 continue
1229 continue
1231 delta = buffer(chunk, 80)
1230 delta = buffer(chunk, 80)
1232 del chunk
1231 del chunk
1233
1232
1234 for p in (p1, p2):
1233 for p in (p1, p2):
1235 if not p in self.nodemap:
1234 if not p in self.nodemap:
1236 raise LookupError(p, self.indexfile, _('unknown parent'))
1235 raise LookupError(p, self.indexfile, _('unknown parent'))
1237
1236
1238 if not chain:
1237 if not chain:
1239 # retrieve the parent revision of the delta chain
1238 # retrieve the parent revision of the delta chain
1240 chain = p1
1239 chain = p1
1241 if not chain in self.nodemap:
1240 if not chain in self.nodemap:
1242 raise LookupError(chain, self.indexfile, _('unknown base'))
1241 raise LookupError(chain, self.indexfile, _('unknown base'))
1243
1242
1244 # full versions are inserted when the needed deltas become
1243 # full versions are inserted when the needed deltas become
1245 # comparable to the uncompressed text or when the previous
1244 # comparable to the uncompressed text or when the previous
1246 # version is not the one we have a delta against. We use
1245 # version is not the one we have a delta against. We use
1247 # the size of the previous full rev as a proxy for the
1246 # the size of the previous full rev as a proxy for the
1248 # current size.
1247 # current size.
1249
1248
1250 if chain == prev:
1249 if chain == prev:
1251 cdelta = compress(delta)
1250 cdelta = compress(delta)
1252 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1251 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1253 textlen = mdiff.patchedsize(textlen, delta)
1252 textlen = mdiff.patchedsize(textlen, delta)
1254
1253
1255 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1254 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1256 # flush our writes here so we can read it in revision
1255 # flush our writes here so we can read it in revision
1257 if dfh:
1256 if dfh:
1258 dfh.flush()
1257 dfh.flush()
1259 ifh.flush()
1258 ifh.flush()
1260 text = self.revision(chain)
1259 text = self.revision(chain)
1261 if len(text) == 0:
1260 if len(text) == 0:
1262 # skip over trivial delta header
1261 # skip over trivial delta header
1263 text = buffer(delta, 12)
1262 text = buffer(delta, 12)
1264 else:
1263 else:
1265 text = mdiff.patches(text, [delta])
1264 text = mdiff.patches(text, [delta])
1266 del delta
1265 del delta
1267 chk = self._addrevision(text, transaction, link, p1, p2, None,
1266 chk = self._addrevision(text, transaction, link, p1, p2, None,
1268 ifh, dfh)
1267 ifh, dfh)
1269 if not dfh and not self._inline:
1268 if not dfh and not self._inline:
1270 # addrevision switched from inline to conventional
1269 # addrevision switched from inline to conventional
1271 # reopen the index
1270 # reopen the index
1272 dfh = self.opener(self.datafile, "a")
1271 dfh = self.opener(self.datafile, "a")
1273 ifh = self.opener(self.indexfile, "a")
1272 ifh = self.opener(self.indexfile, "a")
1274 if chk != node:
1273 if chk != node:
1275 raise RevlogError(_("consistency error adding group"))
1274 raise RevlogError(_("consistency error adding group"))
1276 textlen = len(text)
1275 textlen = len(text)
1277 else:
1276 else:
1278 e = (offset_type(end, 0), cdeltalen, textlen, base,
1277 e = (offset_type(end, 0), cdeltalen, textlen, base,
1279 link, self.rev(p1), self.rev(p2), node)
1278 link, self.rev(p1), self.rev(p2), node)
1280 self.index.insert(-1, e)
1279 self.index.insert(-1, e)
1281 self.nodemap[node] = r
1280 self.nodemap[node] = r
1282 entry = self._io.packentry(e, self.node, self.version, r)
1281 entry = self._io.packentry(e, self.node, self.version, r)
1283 if self._inline:
1282 if self._inline:
1284 ifh.write(entry)
1283 ifh.write(entry)
1285 ifh.write(cdelta[0])
1284 ifh.write(cdelta[0])
1286 ifh.write(cdelta[1])
1285 ifh.write(cdelta[1])
1287 self.checkinlinesize(transaction, ifh)
1286 self.checkinlinesize(transaction, ifh)
1288 if not self._inline:
1287 if not self._inline:
1289 dfh = self.opener(self.datafile, "a")
1288 dfh = self.opener(self.datafile, "a")
1290 ifh = self.opener(self.indexfile, "a")
1289 ifh = self.opener(self.indexfile, "a")
1291 else:
1290 else:
1292 dfh.write(cdelta[0])
1291 dfh.write(cdelta[0])
1293 dfh.write(cdelta[1])
1292 dfh.write(cdelta[1])
1294 ifh.write(entry)
1293 ifh.write(entry)
1295
1294
1296 t, r, chain, prev = r, r + 1, node, node
1295 t, r, chain, prev = r, r + 1, node, node
1297 base = self.base(t)
1296 base = self.base(t)
1298 start = self.start(base)
1297 start = self.start(base)
1299 end = self.end(t)
1298 end = self.end(t)
1300 finally:
1299 finally:
1301 if dfh:
1300 if dfh:
1302 dfh.close()
1301 dfh.close()
1303 ifh.close()
1302 ifh.close()
1304
1303
1305 return node
1304 return node
1306
1305
1307 def strip(self, minlink, transaction):
1306 def strip(self, minlink, transaction):
1308 """truncate the revlog on the first revision with a linkrev >= minlink
1307 """truncate the revlog on the first revision with a linkrev >= minlink
1309
1308
1310 This function is called when we're stripping revision minlink and
1309 This function is called when we're stripping revision minlink and
1311 its descendants from the repository.
1310 its descendants from the repository.
1312
1311
1313 We have to remove all revisions with linkrev >= minlink, because
1312 We have to remove all revisions with linkrev >= minlink, because
1314 the equivalent changelog revisions will be renumbered after the
1313 the equivalent changelog revisions will be renumbered after the
1315 strip.
1314 strip.
1316
1315
1317 So we truncate the revlog on the first of these revisions, and
1316 So we truncate the revlog on the first of these revisions, and
1318 trust that the caller has saved the revisions that shouldn't be
1317 trust that the caller has saved the revisions that shouldn't be
1319 removed and that it'll readd them after this truncation.
1318 removed and that it'll readd them after this truncation.
1320 """
1319 """
1321 if len(self) == 0:
1320 if len(self) == 0:
1322 return
1321 return
1323
1322
1324 if isinstance(self.index, lazyindex):
1323 if isinstance(self.index, lazyindex):
1325 self._loadindexmap()
1324 self._loadindexmap()
1326
1325
1327 for rev in self:
1326 for rev in self:
1328 if self.index[rev][4] >= minlink:
1327 if self.index[rev][4] >= minlink:
1329 break
1328 break
1330 else:
1329 else:
1331 return
1330 return
1332
1331
1333 # first truncate the files on disk
1332 # first truncate the files on disk
1334 end = self.start(rev)
1333 end = self.start(rev)
1335 if not self._inline:
1334 if not self._inline:
1336 transaction.add(self.datafile, end)
1335 transaction.add(self.datafile, end)
1337 end = rev * self._io.size
1336 end = rev * self._io.size
1338 else:
1337 else:
1339 end += rev * self._io.size
1338 end += rev * self._io.size
1340
1339
1341 transaction.add(self.indexfile, end)
1340 transaction.add(self.indexfile, end)
1342
1341
1343 # then reset internal state in memory to forget those revisions
1342 # then reset internal state in memory to forget those revisions
1344 self._cache = None
1343 self._cache = None
1345 self._chunkcache = (0, '')
1344 self._chunkcache = (0, '')
1346 for x in xrange(rev, len(self)):
1345 for x in xrange(rev, len(self)):
1347 del self.nodemap[self.node(x)]
1346 del self.nodemap[self.node(x)]
1348
1347
1349 del self.index[rev:-1]
1348 del self.index[rev:-1]
1350
1349
1351 def checksize(self):
1350 def checksize(self):
1352 expected = 0
1351 expected = 0
1353 if len(self):
1352 if len(self):
1354 expected = max(0, self.end(len(self) - 1))
1353 expected = max(0, self.end(len(self) - 1))
1355
1354
1356 try:
1355 try:
1357 f = self.opener(self.datafile)
1356 f = self.opener(self.datafile)
1358 f.seek(0, 2)
1357 f.seek(0, 2)
1359 actual = f.tell()
1358 actual = f.tell()
1360 dd = actual - expected
1359 dd = actual - expected
1361 except IOError, inst:
1360 except IOError, inst:
1362 if inst.errno != errno.ENOENT:
1361 if inst.errno != errno.ENOENT:
1363 raise
1362 raise
1364 dd = 0
1363 dd = 0
1365
1364
1366 try:
1365 try:
1367 f = self.opener(self.indexfile)
1366 f = self.opener(self.indexfile)
1368 f.seek(0, 2)
1367 f.seek(0, 2)
1369 actual = f.tell()
1368 actual = f.tell()
1370 s = self._io.size
1369 s = self._io.size
1371 i = max(0, actual / s)
1370 i = max(0, actual / s)
1372 di = actual - (i * s)
1371 di = actual - (i * s)
1373 if self._inline:
1372 if self._inline:
1374 databytes = 0
1373 databytes = 0
1375 for r in self:
1374 for r in self:
1376 databytes += max(0, self.length(r))
1375 databytes += max(0, self.length(r))
1377 dd = 0
1376 dd = 0
1378 di = actual - len(self) * s - databytes
1377 di = actual - len(self) * s - databytes
1379 except IOError, inst:
1378 except IOError, inst:
1380 if inst.errno != errno.ENOENT:
1379 if inst.errno != errno.ENOENT:
1381 raise
1380 raise
1382 di = 0
1381 di = 0
1383
1382
1384 return (dd, di)
1383 return (dd, di)
1385
1384
1386 def files(self):
1385 def files(self):
1387 res = [ self.indexfile ]
1386 res = [ self.indexfile ]
1388 if not self._inline:
1387 if not self._inline:
1389 res.append(self.datafile)
1388 res.append(self.datafile)
1390 return res
1389 return res
General Comments 0
You need to be logged in to leave comments. Login now