##// END OF EJS Templates
pychecker: remove unused local variables
Benoit Boissinot -
r9679:a1943c2a default
parent child Browse files
Show More
@@ -1,3677 +1,3676 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, util, revlog, bundlerepo, extensions, copies, error
12 import hg, util, revlog, bundlerepo, extensions, copies, error
13 import patch, help, mdiff, url, encoding
13 import patch, help, mdiff, url, encoding
14 import archival, changegroup, cmdutil, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server
15 from hgweb import server
16 import merge as merge_
16 import merge as merge_
17 import minirst
17 import minirst
18
18
19 # Commands start here, listed alphabetically
19 # Commands start here, listed alphabetically
20
20
21 def add(ui, repo, *pats, **opts):
21 def add(ui, repo, *pats, **opts):
22 """add the specified files on the next commit
22 """add the specified files on the next commit
23
23
24 Schedule files to be version controlled and added to the
24 Schedule files to be version controlled and added to the
25 repository.
25 repository.
26
26
27 The files will be added to the repository at the next commit. To
27 The files will be added to the repository at the next commit. To
28 undo an add before that, see hg forget.
28 undo an add before that, see hg forget.
29
29
30 If no names are given, add all files to the repository.
30 If no names are given, add all files to the repository.
31 """
31 """
32
32
33 bad = []
33 bad = []
34 exacts = {}
35 names = []
34 names = []
36 m = cmdutil.match(repo, pats, opts)
35 m = cmdutil.match(repo, pats, opts)
37 oldbad = m.bad
36 oldbad = m.bad
38 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
37 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
39
38
40 for f in repo.walk(m):
39 for f in repo.walk(m):
41 exact = m.exact(f)
40 exact = m.exact(f)
42 if exact or f not in repo.dirstate:
41 if exact or f not in repo.dirstate:
43 names.append(f)
42 names.append(f)
44 if ui.verbose or not exact:
43 if ui.verbose or not exact:
45 ui.status(_('adding %s\n') % m.rel(f))
44 ui.status(_('adding %s\n') % m.rel(f))
46 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
47 bad += [f for f in repo.add(names) if f in m.files()]
46 bad += [f for f in repo.add(names) if f in m.files()]
48 return bad and 1 or 0
47 return bad and 1 or 0
49
48
50 def addremove(ui, repo, *pats, **opts):
49 def addremove(ui, repo, *pats, **opts):
51 """add all new files, delete all missing files
50 """add all new files, delete all missing files
52
51
53 Add all new files and remove all missing files from the
52 Add all new files and remove all missing files from the
54 repository.
53 repository.
55
54
56 New files are ignored if they match any of the patterns in
55 New files are ignored if they match any of the patterns in
57 .hgignore. As with add, these changes take effect at the next
56 .hgignore. As with add, these changes take effect at the next
58 commit.
57 commit.
59
58
60 Use the -s/--similarity option to detect renamed files. With a
59 Use the -s/--similarity option to detect renamed files. With a
61 parameter greater than 0, this compares every removed file with
60 parameter greater than 0, this compares every removed file with
62 every added file and records those similar enough as renames. This
61 every added file and records those similar enough as renames. This
63 option takes a percentage between 0 (disabled) and 100 (files must
62 option takes a percentage between 0 (disabled) and 100 (files must
64 be identical) as its parameter. Detecting renamed files this way
63 be identical) as its parameter. Detecting renamed files this way
65 can be expensive.
64 can be expensive.
66 """
65 """
67 try:
66 try:
68 sim = float(opts.get('similarity') or 0)
67 sim = float(opts.get('similarity') or 0)
69 except ValueError:
68 except ValueError:
70 raise util.Abort(_('similarity must be a number'))
69 raise util.Abort(_('similarity must be a number'))
71 if sim < 0 or sim > 100:
70 if sim < 0 or sim > 100:
72 raise util.Abort(_('similarity must be between 0 and 100'))
71 raise util.Abort(_('similarity must be between 0 and 100'))
73 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
72 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
74
73
75 def annotate(ui, repo, *pats, **opts):
74 def annotate(ui, repo, *pats, **opts):
76 """show changeset information by line for each file
75 """show changeset information by line for each file
77
76
78 List changes in files, showing the revision id responsible for
77 List changes in files, showing the revision id responsible for
79 each line
78 each line
80
79
81 This command is useful for discovering when a change was made and
80 This command is useful for discovering when a change was made and
82 by whom.
81 by whom.
83
82
84 Without the -a/--text option, annotate will avoid processing files
83 Without the -a/--text option, annotate will avoid processing files
85 it detects as binary. With -a, annotate will annotate the file
84 it detects as binary. With -a, annotate will annotate the file
86 anyway, although the results will probably be neither useful
85 anyway, although the results will probably be neither useful
87 nor desirable.
86 nor desirable.
88 """
87 """
89 datefunc = ui.quiet and util.shortdate or util.datestr
88 datefunc = ui.quiet and util.shortdate or util.datestr
90 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
91
90
92 if not pats:
91 if not pats:
93 raise util.Abort(_('at least one filename or pattern is required'))
92 raise util.Abort(_('at least one filename or pattern is required'))
94
93
95 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
96 ('number', lambda x: str(x[0].rev())),
95 ('number', lambda x: str(x[0].rev())),
97 ('changeset', lambda x: short(x[0].node())),
96 ('changeset', lambda x: short(x[0].node())),
98 ('date', getdate),
97 ('date', getdate),
99 ('follow', lambda x: x[0].path()),
98 ('follow', lambda x: x[0].path()),
100 ]
99 ]
101
100
102 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
101 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
103 and not opts.get('follow')):
102 and not opts.get('follow')):
104 opts['number'] = 1
103 opts['number'] = 1
105
104
106 linenumber = opts.get('line_number') is not None
105 linenumber = opts.get('line_number') is not None
107 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
108 raise util.Abort(_('at least one of -n/-c is required for -l'))
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
109
108
110 funcmap = [func for op, func in opmap if opts.get(op)]
109 funcmap = [func for op, func in opmap if opts.get(op)]
111 if linenumber:
110 if linenumber:
112 lastfunc = funcmap[-1]
111 lastfunc = funcmap[-1]
113 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
114
113
115 ctx = repo[opts.get('rev')]
114 ctx = repo[opts.get('rev')]
116
115
117 m = cmdutil.match(repo, pats, opts)
116 m = cmdutil.match(repo, pats, opts)
118 for abs in ctx.walk(m):
117 for abs in ctx.walk(m):
119 fctx = ctx[abs]
118 fctx = ctx[abs]
120 if not opts.get('text') and util.binary(fctx.data()):
119 if not opts.get('text') and util.binary(fctx.data()):
121 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
122 continue
121 continue
123
122
124 lines = fctx.annotate(follow=opts.get('follow'),
123 lines = fctx.annotate(follow=opts.get('follow'),
125 linenumber=linenumber)
124 linenumber=linenumber)
126 pieces = []
125 pieces = []
127
126
128 for f in funcmap:
127 for f in funcmap:
129 l = [f(n) for n, dummy in lines]
128 l = [f(n) for n, dummy in lines]
130 if l:
129 if l:
131 ml = max(map(len, l))
130 ml = max(map(len, l))
132 pieces.append(["%*s" % (ml, x) for x in l])
131 pieces.append(["%*s" % (ml, x) for x in l])
133
132
134 if pieces:
133 if pieces:
135 for p, l in zip(zip(*pieces), lines):
134 for p, l in zip(zip(*pieces), lines):
136 ui.write("%s: %s" % (" ".join(p), l[1]))
135 ui.write("%s: %s" % (" ".join(p), l[1]))
137
136
138 def archive(ui, repo, dest, **opts):
137 def archive(ui, repo, dest, **opts):
139 '''create an unversioned archive of a repository revision
138 '''create an unversioned archive of a repository revision
140
139
141 By default, the revision used is the parent of the working
140 By default, the revision used is the parent of the working
142 directory; use -r/--rev to specify a different revision.
141 directory; use -r/--rev to specify a different revision.
143
142
144 To specify the type of archive to create, use -t/--type. Valid
143 To specify the type of archive to create, use -t/--type. Valid
145 types are::
144 types are::
146
145
147 "files" (default): a directory full of files
146 "files" (default): a directory full of files
148 "tar": tar archive, uncompressed
147 "tar": tar archive, uncompressed
149 "tbz2": tar archive, compressed using bzip2
148 "tbz2": tar archive, compressed using bzip2
150 "tgz": tar archive, compressed using gzip
149 "tgz": tar archive, compressed using gzip
151 "uzip": zip archive, uncompressed
150 "uzip": zip archive, uncompressed
152 "zip": zip archive, compressed using deflate
151 "zip": zip archive, compressed using deflate
153
152
154 The exact name of the destination archive or directory is given
153 The exact name of the destination archive or directory is given
155 using a format string; see 'hg help export' for details.
154 using a format string; see 'hg help export' for details.
156
155
157 Each member added to an archive file has a directory prefix
156 Each member added to an archive file has a directory prefix
158 prepended. Use -p/--prefix to specify a format string for the
157 prepended. Use -p/--prefix to specify a format string for the
159 prefix. The default is the basename of the archive, with suffixes
158 prefix. The default is the basename of the archive, with suffixes
160 removed.
159 removed.
161 '''
160 '''
162
161
163 ctx = repo[opts.get('rev')]
162 ctx = repo[opts.get('rev')]
164 if not ctx:
163 if not ctx:
165 raise util.Abort(_('no working directory: please specify a revision'))
164 raise util.Abort(_('no working directory: please specify a revision'))
166 node = ctx.node()
165 node = ctx.node()
167 dest = cmdutil.make_filename(repo, dest, node)
166 dest = cmdutil.make_filename(repo, dest, node)
168 if os.path.realpath(dest) == repo.root:
167 if os.path.realpath(dest) == repo.root:
169 raise util.Abort(_('repository root cannot be destination'))
168 raise util.Abort(_('repository root cannot be destination'))
170 matchfn = cmdutil.match(repo, [], opts)
169 matchfn = cmdutil.match(repo, [], opts)
171 kind = opts.get('type') or 'files'
170 kind = opts.get('type') or 'files'
172 prefix = opts.get('prefix')
171 prefix = opts.get('prefix')
173 if dest == '-':
172 if dest == '-':
174 if kind == 'files':
173 if kind == 'files':
175 raise util.Abort(_('cannot archive plain files to stdout'))
174 raise util.Abort(_('cannot archive plain files to stdout'))
176 dest = sys.stdout
175 dest = sys.stdout
177 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
176 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
178 prefix = cmdutil.make_filename(repo, prefix, node)
177 prefix = cmdutil.make_filename(repo, prefix, node)
179 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
180 matchfn, prefix)
179 matchfn, prefix)
181
180
182 def backout(ui, repo, node=None, rev=None, **opts):
181 def backout(ui, repo, node=None, rev=None, **opts):
183 '''reverse effect of earlier changeset
182 '''reverse effect of earlier changeset
184
183
185 Commit the backed out changes as a new changeset. The new
184 Commit the backed out changes as a new changeset. The new
186 changeset is a child of the backed out changeset.
185 changeset is a child of the backed out changeset.
187
186
188 If you backout a changeset other than the tip, a new head is
187 If you backout a changeset other than the tip, a new head is
189 created. This head will be the new tip and you should merge this
188 created. This head will be the new tip and you should merge this
190 backout changeset with another head.
189 backout changeset with another head.
191
190
192 The --merge option remembers the parent of the working directory
191 The --merge option remembers the parent of the working directory
193 before starting the backout, then merges the new head with that
192 before starting the backout, then merges the new head with that
194 changeset afterwards. This saves you from doing the merge by hand.
193 changeset afterwards. This saves you from doing the merge by hand.
195 The result of this merge is not committed, as with a normal merge.
194 The result of this merge is not committed, as with a normal merge.
196
195
197 See 'hg help dates' for a list of formats valid for -d/--date.
196 See 'hg help dates' for a list of formats valid for -d/--date.
198 '''
197 '''
199 if rev and node:
198 if rev and node:
200 raise util.Abort(_("please specify just one revision"))
199 raise util.Abort(_("please specify just one revision"))
201
200
202 if not rev:
201 if not rev:
203 rev = node
202 rev = node
204
203
205 if not rev:
204 if not rev:
206 raise util.Abort(_("please specify a revision to backout"))
205 raise util.Abort(_("please specify a revision to backout"))
207
206
208 date = opts.get('date')
207 date = opts.get('date')
209 if date:
208 if date:
210 opts['date'] = util.parsedate(date)
209 opts['date'] = util.parsedate(date)
211
210
212 cmdutil.bail_if_changed(repo)
211 cmdutil.bail_if_changed(repo)
213 node = repo.lookup(rev)
212 node = repo.lookup(rev)
214
213
215 op1, op2 = repo.dirstate.parents()
214 op1, op2 = repo.dirstate.parents()
216 a = repo.changelog.ancestor(op1, node)
215 a = repo.changelog.ancestor(op1, node)
217 if a != node:
216 if a != node:
218 raise util.Abort(_('cannot backout change on a different branch'))
217 raise util.Abort(_('cannot backout change on a different branch'))
219
218
220 p1, p2 = repo.changelog.parents(node)
219 p1, p2 = repo.changelog.parents(node)
221 if p1 == nullid:
220 if p1 == nullid:
222 raise util.Abort(_('cannot backout a change with no parents'))
221 raise util.Abort(_('cannot backout a change with no parents'))
223 if p2 != nullid:
222 if p2 != nullid:
224 if not opts.get('parent'):
223 if not opts.get('parent'):
225 raise util.Abort(_('cannot backout a merge changeset without '
224 raise util.Abort(_('cannot backout a merge changeset without '
226 '--parent'))
225 '--parent'))
227 p = repo.lookup(opts['parent'])
226 p = repo.lookup(opts['parent'])
228 if p not in (p1, p2):
227 if p not in (p1, p2):
229 raise util.Abort(_('%s is not a parent of %s') %
228 raise util.Abort(_('%s is not a parent of %s') %
230 (short(p), short(node)))
229 (short(p), short(node)))
231 parent = p
230 parent = p
232 else:
231 else:
233 if opts.get('parent'):
232 if opts.get('parent'):
234 raise util.Abort(_('cannot use --parent on non-merge changeset'))
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
235 parent = p1
234 parent = p1
236
235
237 # the backout should appear on the same branch
236 # the backout should appear on the same branch
238 branch = repo.dirstate.branch()
237 branch = repo.dirstate.branch()
239 hg.clean(repo, node, show_stats=False)
238 hg.clean(repo, node, show_stats=False)
240 repo.dirstate.setbranch(branch)
239 repo.dirstate.setbranch(branch)
241 revert_opts = opts.copy()
240 revert_opts = opts.copy()
242 revert_opts['date'] = None
241 revert_opts['date'] = None
243 revert_opts['all'] = True
242 revert_opts['all'] = True
244 revert_opts['rev'] = hex(parent)
243 revert_opts['rev'] = hex(parent)
245 revert_opts['no_backup'] = None
244 revert_opts['no_backup'] = None
246 revert(ui, repo, **revert_opts)
245 revert(ui, repo, **revert_opts)
247 commit_opts = opts.copy()
246 commit_opts = opts.copy()
248 commit_opts['addremove'] = False
247 commit_opts['addremove'] = False
249 if not commit_opts['message'] and not commit_opts['logfile']:
248 if not commit_opts['message'] and not commit_opts['logfile']:
250 # we don't translate commit messages
249 # we don't translate commit messages
251 commit_opts['message'] = "Backed out changeset %s" % short(node)
250 commit_opts['message'] = "Backed out changeset %s" % short(node)
252 commit_opts['force_editor'] = True
251 commit_opts['force_editor'] = True
253 commit(ui, repo, **commit_opts)
252 commit(ui, repo, **commit_opts)
254 def nice(node):
253 def nice(node):
255 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
256 ui.status(_('changeset %s backs out changeset %s\n') %
255 ui.status(_('changeset %s backs out changeset %s\n') %
257 (nice(repo.changelog.tip()), nice(node)))
256 (nice(repo.changelog.tip()), nice(node)))
258 if op1 != node:
257 if op1 != node:
259 hg.clean(repo, op1, show_stats=False)
258 hg.clean(repo, op1, show_stats=False)
260 if opts.get('merge'):
259 if opts.get('merge'):
261 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
262 hg.merge(repo, hex(repo.changelog.tip()))
261 hg.merge(repo, hex(repo.changelog.tip()))
263 else:
262 else:
264 ui.status(_('the backout changeset is a new head - '
263 ui.status(_('the backout changeset is a new head - '
265 'do not forget to merge\n'))
264 'do not forget to merge\n'))
266 ui.status(_('(use "backout --merge" '
265 ui.status(_('(use "backout --merge" '
267 'if you want to auto-merge)\n'))
266 'if you want to auto-merge)\n'))
268
267
269 def bisect(ui, repo, rev=None, extra=None, command=None,
268 def bisect(ui, repo, rev=None, extra=None, command=None,
270 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 reset=None, good=None, bad=None, skip=None, noupdate=None):
271 """subdivision search of changesets
270 """subdivision search of changesets
272
271
273 This command helps to find changesets which introduce problems. To
272 This command helps to find changesets which introduce problems. To
274 use, mark the earliest changeset you know exhibits the problem as
273 use, mark the earliest changeset you know exhibits the problem as
275 bad, then mark the latest changeset which is free from the problem
274 bad, then mark the latest changeset which is free from the problem
276 as good. Bisect will update your working directory to a revision
275 as good. Bisect will update your working directory to a revision
277 for testing (unless the -U/--noupdate option is specified). Once
276 for testing (unless the -U/--noupdate option is specified). Once
278 you have performed tests, mark the working directory as good or
277 you have performed tests, mark the working directory as good or
279 bad, and bisect will either update to another candidate changeset
278 bad, and bisect will either update to another candidate changeset
280 or announce that it has found the bad revision.
279 or announce that it has found the bad revision.
281
280
282 As a shortcut, you can also use the revision argument to mark a
281 As a shortcut, you can also use the revision argument to mark a
283 revision as good or bad without checking it out first.
282 revision as good or bad without checking it out first.
284
283
285 If you supply a command, it will be used for automatic bisection.
284 If you supply a command, it will be used for automatic bisection.
286 Its exit status will be used to mark revisions as good or bad:
285 Its exit status will be used to mark revisions as good or bad:
287 status 0 means good, 125 means to skip the revision, 127
286 status 0 means good, 125 means to skip the revision, 127
288 (command not found) will abort the bisection, and any other
287 (command not found) will abort the bisection, and any other
289 non-zero exit status means the revision is bad.
288 non-zero exit status means the revision is bad.
290 """
289 """
291 def print_result(nodes, good):
290 def print_result(nodes, good):
292 displayer = cmdutil.show_changeset(ui, repo, {})
291 displayer = cmdutil.show_changeset(ui, repo, {})
293 if len(nodes) == 1:
292 if len(nodes) == 1:
294 # narrowed it down to a single revision
293 # narrowed it down to a single revision
295 if good:
294 if good:
296 ui.write(_("The first good revision is:\n"))
295 ui.write(_("The first good revision is:\n"))
297 else:
296 else:
298 ui.write(_("The first bad revision is:\n"))
297 ui.write(_("The first bad revision is:\n"))
299 displayer.show(repo[nodes[0]])
298 displayer.show(repo[nodes[0]])
300 else:
299 else:
301 # multiple possible revisions
300 # multiple possible revisions
302 if good:
301 if good:
303 ui.write(_("Due to skipped revisions, the first "
302 ui.write(_("Due to skipped revisions, the first "
304 "good revision could be any of:\n"))
303 "good revision could be any of:\n"))
305 else:
304 else:
306 ui.write(_("Due to skipped revisions, the first "
305 ui.write(_("Due to skipped revisions, the first "
307 "bad revision could be any of:\n"))
306 "bad revision could be any of:\n"))
308 for n in nodes:
307 for n in nodes:
309 displayer.show(repo[n])
308 displayer.show(repo[n])
310
309
311 def check_state(state, interactive=True):
310 def check_state(state, interactive=True):
312 if not state['good'] or not state['bad']:
311 if not state['good'] or not state['bad']:
313 if (good or bad or skip or reset) and interactive:
312 if (good or bad or skip or reset) and interactive:
314 return
313 return
315 if not state['good']:
314 if not state['good']:
316 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 raise util.Abort(_('cannot bisect (no known good revisions)'))
317 else:
316 else:
318 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 raise util.Abort(_('cannot bisect (no known bad revisions)'))
319 return True
318 return True
320
319
321 # backward compatibility
320 # backward compatibility
322 if rev in "good bad reset init".split():
321 if rev in "good bad reset init".split():
323 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
324 cmd, rev, extra = rev, extra, None
323 cmd, rev, extra = rev, extra, None
325 if cmd == "good":
324 if cmd == "good":
326 good = True
325 good = True
327 elif cmd == "bad":
326 elif cmd == "bad":
328 bad = True
327 bad = True
329 else:
328 else:
330 reset = True
329 reset = True
331 elif extra or good + bad + skip + reset + bool(command) > 1:
330 elif extra or good + bad + skip + reset + bool(command) > 1:
332 raise util.Abort(_('incompatible arguments'))
331 raise util.Abort(_('incompatible arguments'))
333
332
334 if reset:
333 if reset:
335 p = repo.join("bisect.state")
334 p = repo.join("bisect.state")
336 if os.path.exists(p):
335 if os.path.exists(p):
337 os.unlink(p)
336 os.unlink(p)
338 return
337 return
339
338
340 state = hbisect.load_state(repo)
339 state = hbisect.load_state(repo)
341
340
342 if command:
341 if command:
343 changesets = 1
342 changesets = 1
344 try:
343 try:
345 while changesets:
344 while changesets:
346 # update state
345 # update state
347 status = util.system(command)
346 status = util.system(command)
348 if status == 125:
347 if status == 125:
349 transition = "skip"
348 transition = "skip"
350 elif status == 0:
349 elif status == 0:
351 transition = "good"
350 transition = "good"
352 # status < 0 means process was killed
351 # status < 0 means process was killed
353 elif status == 127:
352 elif status == 127:
354 raise util.Abort(_("failed to execute %s") % command)
353 raise util.Abort(_("failed to execute %s") % command)
355 elif status < 0:
354 elif status < 0:
356 raise util.Abort(_("%s killed") % command)
355 raise util.Abort(_("%s killed") % command)
357 else:
356 else:
358 transition = "bad"
357 transition = "bad"
359 ctx = repo[rev or '.']
358 ctx = repo[rev or '.']
360 state[transition].append(ctx.node())
359 state[transition].append(ctx.node())
361 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
360 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
362 check_state(state, interactive=False)
361 check_state(state, interactive=False)
363 # bisect
362 # bisect
364 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
363 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 # update to next check
364 # update to next check
366 cmdutil.bail_if_changed(repo)
365 cmdutil.bail_if_changed(repo)
367 hg.clean(repo, nodes[0], show_stats=False)
366 hg.clean(repo, nodes[0], show_stats=False)
368 finally:
367 finally:
369 hbisect.save_state(repo, state)
368 hbisect.save_state(repo, state)
370 return print_result(nodes, good)
369 return print_result(nodes, good)
371
370
372 # update state
371 # update state
373 node = repo.lookup(rev or '.')
372 node = repo.lookup(rev or '.')
374 if good:
373 if good:
375 state['good'].append(node)
374 state['good'].append(node)
376 elif bad:
375 elif bad:
377 state['bad'].append(node)
376 state['bad'].append(node)
378 elif skip:
377 elif skip:
379 state['skip'].append(node)
378 state['skip'].append(node)
380
379
381 hbisect.save_state(repo, state)
380 hbisect.save_state(repo, state)
382
381
383 if not check_state(state):
382 if not check_state(state):
384 return
383 return
385
384
386 # actually bisect
385 # actually bisect
387 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
386 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 if changesets == 0:
387 if changesets == 0:
389 print_result(nodes, good)
388 print_result(nodes, good)
390 else:
389 else:
391 assert len(nodes) == 1 # only a single node can be tested next
390 assert len(nodes) == 1 # only a single node can be tested next
392 node = nodes[0]
391 node = nodes[0]
393 # compute the approximate number of remaining tests
392 # compute the approximate number of remaining tests
394 tests, size = 0, 2
393 tests, size = 0, 2
395 while size <= changesets:
394 while size <= changesets:
396 tests, size = tests + 1, size * 2
395 tests, size = tests + 1, size * 2
397 rev = repo.changelog.rev(node)
396 rev = repo.changelog.rev(node)
398 ui.write(_("Testing changeset %d:%s "
397 ui.write(_("Testing changeset %d:%s "
399 "(%d changesets remaining, ~%d tests)\n")
398 "(%d changesets remaining, ~%d tests)\n")
400 % (rev, short(node), changesets, tests))
399 % (rev, short(node), changesets, tests))
401 if not noupdate:
400 if not noupdate:
402 cmdutil.bail_if_changed(repo)
401 cmdutil.bail_if_changed(repo)
403 return hg.clean(repo, node)
402 return hg.clean(repo, node)
404
403
405 def branch(ui, repo, label=None, **opts):
404 def branch(ui, repo, label=None, **opts):
406 """set or show the current branch name
405 """set or show the current branch name
407
406
408 With no argument, show the current branch name. With one argument,
407 With no argument, show the current branch name. With one argument,
409 set the working directory branch name (the branch will not exist
408 set the working directory branch name (the branch will not exist
410 in the repository until the next commit). Standard practice
409 in the repository until the next commit). Standard practice
411 recommends that primary development take place on the 'default'
410 recommends that primary development take place on the 'default'
412 branch.
411 branch.
413
412
414 Unless -f/--force is specified, branch will not let you set a
413 Unless -f/--force is specified, branch will not let you set a
415 branch name that already exists, even if it's inactive.
414 branch name that already exists, even if it's inactive.
416
415
417 Use -C/--clean to reset the working directory branch to that of
416 Use -C/--clean to reset the working directory branch to that of
418 the parent of the working directory, negating a previous branch
417 the parent of the working directory, negating a previous branch
419 change.
418 change.
420
419
421 Use the command 'hg update' to switch to an existing branch. Use
420 Use the command 'hg update' to switch to an existing branch. Use
422 'hg commit --close-branch' to mark this branch as closed.
421 'hg commit --close-branch' to mark this branch as closed.
423 """
422 """
424
423
425 if opts.get('clean'):
424 if opts.get('clean'):
426 label = repo[None].parents()[0].branch()
425 label = repo[None].parents()[0].branch()
427 repo.dirstate.setbranch(label)
426 repo.dirstate.setbranch(label)
428 ui.status(_('reset working directory to branch %s\n') % label)
427 ui.status(_('reset working directory to branch %s\n') % label)
429 elif label:
428 elif label:
430 if not opts.get('force') and label in repo.branchtags():
429 if not opts.get('force') and label in repo.branchtags():
431 if label not in [p.branch() for p in repo.parents()]:
430 if label not in [p.branch() for p in repo.parents()]:
432 raise util.Abort(_('a branch of the same name already exists'
431 raise util.Abort(_('a branch of the same name already exists'
433 ' (use --force to override)'))
432 ' (use --force to override)'))
434 repo.dirstate.setbranch(encoding.fromlocal(label))
433 repo.dirstate.setbranch(encoding.fromlocal(label))
435 ui.status(_('marked working directory as branch %s\n') % label)
434 ui.status(_('marked working directory as branch %s\n') % label)
436 else:
435 else:
437 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
436 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
438
437
439 def branches(ui, repo, active=False, closed=False):
438 def branches(ui, repo, active=False, closed=False):
440 """list repository named branches
439 """list repository named branches
441
440
442 List the repository's named branches, indicating which ones are
441 List the repository's named branches, indicating which ones are
443 inactive. If -c/--closed is specified, also list branches which have
442 inactive. If -c/--closed is specified, also list branches which have
444 been marked closed (see hg commit --close-branch).
443 been marked closed (see hg commit --close-branch).
445
444
446 If -a/--active is specified, only show active branches. A branch
445 If -a/--active is specified, only show active branches. A branch
447 is considered active if it contains repository heads.
446 is considered active if it contains repository heads.
448
447
449 Use the command 'hg update' to switch to an existing branch.
448 Use the command 'hg update' to switch to an existing branch.
450 """
449 """
451
450
452 hexfunc = ui.debugflag and hex or short
451 hexfunc = ui.debugflag and hex or short
453 activebranches = [repo[n].branch() for n in repo.heads()]
452 activebranches = [repo[n].branch() for n in repo.heads()]
454 def testactive(tag, node):
453 def testactive(tag, node):
455 realhead = tag in activebranches
454 realhead = tag in activebranches
456 open = node in repo.branchheads(tag, closed=False)
455 open = node in repo.branchheads(tag, closed=False)
457 return realhead and open
456 return realhead and open
458 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
457 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
459 for tag, node in repo.branchtags().items()],
458 for tag, node in repo.branchtags().items()],
460 reverse=True)
459 reverse=True)
461
460
462 for isactive, node, tag in branches:
461 for isactive, node, tag in branches:
463 if (not active) or isactive:
462 if (not active) or isactive:
464 encodedtag = encoding.tolocal(tag)
463 encodedtag = encoding.tolocal(tag)
465 if ui.quiet:
464 if ui.quiet:
466 ui.write("%s\n" % encodedtag)
465 ui.write("%s\n" % encodedtag)
467 else:
466 else:
468 hn = repo.lookup(node)
467 hn = repo.lookup(node)
469 if isactive:
468 if isactive:
470 notice = ''
469 notice = ''
471 elif hn not in repo.branchheads(tag, closed=False):
470 elif hn not in repo.branchheads(tag, closed=False):
472 if not closed:
471 if not closed:
473 continue
472 continue
474 notice = ' (closed)'
473 notice = ' (closed)'
475 else:
474 else:
476 notice = ' (inactive)'
475 notice = ' (inactive)'
477 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
476 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
478 data = encodedtag, rev, hexfunc(hn), notice
477 data = encodedtag, rev, hexfunc(hn), notice
479 ui.write("%s %s:%s%s\n" % data)
478 ui.write("%s %s:%s%s\n" % data)
480
479
481 def bundle(ui, repo, fname, dest=None, **opts):
480 def bundle(ui, repo, fname, dest=None, **opts):
482 """create a changegroup file
481 """create a changegroup file
483
482
484 Generate a compressed changegroup file collecting changesets not
483 Generate a compressed changegroup file collecting changesets not
485 known to be in another repository.
484 known to be in another repository.
486
485
487 If no destination repository is specified the destination is
486 If no destination repository is specified the destination is
488 assumed to have all the nodes specified by one or more --base
487 assumed to have all the nodes specified by one or more --base
489 parameters. To create a bundle containing all changesets, use
488 parameters. To create a bundle containing all changesets, use
490 -a/--all (or --base null).
489 -a/--all (or --base null).
491
490
492 You can change compression method with the -t/--type option.
491 You can change compression method with the -t/--type option.
493 The available compression methods are: none, bzip2, and
492 The available compression methods are: none, bzip2, and
494 gzip (by default, bundles are compressed using bzip2).
493 gzip (by default, bundles are compressed using bzip2).
495
494
496 The bundle file can then be transferred using conventional means
495 The bundle file can then be transferred using conventional means
497 and applied to another repository with the unbundle or pull
496 and applied to another repository with the unbundle or pull
498 command. This is useful when direct push and pull are not
497 command. This is useful when direct push and pull are not
499 available or when exporting an entire repository is undesirable.
498 available or when exporting an entire repository is undesirable.
500
499
501 Applying bundles preserves all changeset contents including
500 Applying bundles preserves all changeset contents including
502 permissions, copy/rename information, and revision history.
501 permissions, copy/rename information, and revision history.
503 """
502 """
504 revs = opts.get('rev') or None
503 revs = opts.get('rev') or None
505 if revs:
504 if revs:
506 revs = [repo.lookup(rev) for rev in revs]
505 revs = [repo.lookup(rev) for rev in revs]
507 if opts.get('all'):
506 if opts.get('all'):
508 base = ['null']
507 base = ['null']
509 else:
508 else:
510 base = opts.get('base')
509 base = opts.get('base')
511 if base:
510 if base:
512 if dest:
511 if dest:
513 raise util.Abort(_("--base is incompatible with specifying "
512 raise util.Abort(_("--base is incompatible with specifying "
514 "a destination"))
513 "a destination"))
515 base = [repo.lookup(rev) for rev in base]
514 base = [repo.lookup(rev) for rev in base]
516 # create the right base
515 # create the right base
517 # XXX: nodesbetween / changegroup* should be "fixed" instead
516 # XXX: nodesbetween / changegroup* should be "fixed" instead
518 o = []
517 o = []
519 has = set((nullid,))
518 has = set((nullid,))
520 for n in base:
519 for n in base:
521 has.update(repo.changelog.reachable(n))
520 has.update(repo.changelog.reachable(n))
522 if revs:
521 if revs:
523 visit = list(revs)
522 visit = list(revs)
524 else:
523 else:
525 visit = repo.changelog.heads()
524 visit = repo.changelog.heads()
526 seen = {}
525 seen = {}
527 while visit:
526 while visit:
528 n = visit.pop(0)
527 n = visit.pop(0)
529 parents = [p for p in repo.changelog.parents(n) if p not in has]
528 parents = [p for p in repo.changelog.parents(n) if p not in has]
530 if len(parents) == 0:
529 if len(parents) == 0:
531 o.insert(0, n)
530 o.insert(0, n)
532 else:
531 else:
533 for p in parents:
532 for p in parents:
534 if p not in seen:
533 if p not in seen:
535 seen[p] = 1
534 seen[p] = 1
536 visit.append(p)
535 visit.append(p)
537 else:
536 else:
538 dest, revs, checkout = hg.parseurl(
537 dest, revs, checkout = hg.parseurl(
539 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
538 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
540 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
539 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
541 o = repo.findoutgoing(other, force=opts.get('force'))
540 o = repo.findoutgoing(other, force=opts.get('force'))
542
541
543 if revs:
542 if revs:
544 cg = repo.changegroupsubset(o, revs, 'bundle')
543 cg = repo.changegroupsubset(o, revs, 'bundle')
545 else:
544 else:
546 cg = repo.changegroup(o, 'bundle')
545 cg = repo.changegroup(o, 'bundle')
547
546
548 bundletype = opts.get('type', 'bzip2').lower()
547 bundletype = opts.get('type', 'bzip2').lower()
549 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
548 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
550 bundletype = btypes.get(bundletype)
549 bundletype = btypes.get(bundletype)
551 if bundletype not in changegroup.bundletypes:
550 if bundletype not in changegroup.bundletypes:
552 raise util.Abort(_('unknown bundle type specified with --type'))
551 raise util.Abort(_('unknown bundle type specified with --type'))
553
552
554 changegroup.writebundle(cg, fname, bundletype)
553 changegroup.writebundle(cg, fname, bundletype)
555
554
556 def cat(ui, repo, file1, *pats, **opts):
555 def cat(ui, repo, file1, *pats, **opts):
557 """output the current or given revision of files
556 """output the current or given revision of files
558
557
559 Print the specified files as they were at the given revision. If
558 Print the specified files as they were at the given revision. If
560 no revision is given, the parent of the working directory is used,
559 no revision is given, the parent of the working directory is used,
561 or tip if no revision is checked out.
560 or tip if no revision is checked out.
562
561
563 Output may be to a file, in which case the name of the file is
562 Output may be to a file, in which case the name of the file is
564 given using a format string. The formatting rules are the same as
563 given using a format string. The formatting rules are the same as
565 for the export command, with the following additions::
564 for the export command, with the following additions::
566
565
567 %s basename of file being printed
566 %s basename of file being printed
568 %d dirname of file being printed, or '.' if in repository root
567 %d dirname of file being printed, or '.' if in repository root
569 %p root-relative path name of file being printed
568 %p root-relative path name of file being printed
570 """
569 """
571 ctx = repo[opts.get('rev')]
570 ctx = repo[opts.get('rev')]
572 err = 1
571 err = 1
573 m = cmdutil.match(repo, (file1,) + pats, opts)
572 m = cmdutil.match(repo, (file1,) + pats, opts)
574 for abs in ctx.walk(m):
573 for abs in ctx.walk(m):
575 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
574 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
576 data = ctx[abs].data()
575 data = ctx[abs].data()
577 if opts.get('decode'):
576 if opts.get('decode'):
578 data = repo.wwritedata(abs, data)
577 data = repo.wwritedata(abs, data)
579 fp.write(data)
578 fp.write(data)
580 err = 0
579 err = 0
581 return err
580 return err
582
581
583 def clone(ui, source, dest=None, **opts):
582 def clone(ui, source, dest=None, **opts):
584 """make a copy of an existing repository
583 """make a copy of an existing repository
585
584
586 Create a copy of an existing repository in a new directory.
585 Create a copy of an existing repository in a new directory.
587
586
588 If no destination directory name is specified, it defaults to the
587 If no destination directory name is specified, it defaults to the
589 basename of the source.
588 basename of the source.
590
589
591 The location of the source is added to the new repository's
590 The location of the source is added to the new repository's
592 .hg/hgrc file, as the default to be used for future pulls.
591 .hg/hgrc file, as the default to be used for future pulls.
593
592
594 If you use the -r/--rev option to clone up to a specific revision,
593 If you use the -r/--rev option to clone up to a specific revision,
595 no subsequent revisions (including subsequent tags) will be
594 no subsequent revisions (including subsequent tags) will be
596 present in the cloned repository. This option implies --pull, even
595 present in the cloned repository. This option implies --pull, even
597 on local repositories.
596 on local repositories.
598
597
599 By default, clone will check out the head of the 'default' branch.
598 By default, clone will check out the head of the 'default' branch.
600 If the -U/--noupdate option is used, the new clone will contain
599 If the -U/--noupdate option is used, the new clone will contain
601 only a repository (.hg) and no working copy (the working copy
600 only a repository (.hg) and no working copy (the working copy
602 parent is the null revision).
601 parent is the null revision).
603
602
604 See 'hg help urls' for valid source format details.
603 See 'hg help urls' for valid source format details.
605
604
606 It is possible to specify an ssh:// URL as the destination, but no
605 It is possible to specify an ssh:// URL as the destination, but no
607 .hg/hgrc and working directory will be created on the remote side.
606 .hg/hgrc and working directory will be created on the remote side.
608 Please see 'hg help urls' for important details about ssh:// URLs.
607 Please see 'hg help urls' for important details about ssh:// URLs.
609
608
610 For efficiency, hardlinks are used for cloning whenever the source
609 For efficiency, hardlinks are used for cloning whenever the source
611 and destination are on the same filesystem (note this applies only
610 and destination are on the same filesystem (note this applies only
612 to the repository data, not to the checked out files). Some
611 to the repository data, not to the checked out files). Some
613 filesystems, such as AFS, implement hardlinking incorrectly, but
612 filesystems, such as AFS, implement hardlinking incorrectly, but
614 do not report errors. In these cases, use the --pull option to
613 do not report errors. In these cases, use the --pull option to
615 avoid hardlinking.
614 avoid hardlinking.
616
615
617 In some cases, you can clone repositories and checked out files
616 In some cases, you can clone repositories and checked out files
618 using full hardlinks with ::
617 using full hardlinks with ::
619
618
620 $ cp -al REPO REPOCLONE
619 $ cp -al REPO REPOCLONE
621
620
622 This is the fastest way to clone, but it is not always safe. The
621 This is the fastest way to clone, but it is not always safe. The
623 operation is not atomic (making sure REPO is not modified during
622 operation is not atomic (making sure REPO is not modified during
624 the operation is up to you) and you have to make sure your editor
623 the operation is up to you) and you have to make sure your editor
625 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
624 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
626 this is not compatible with certain extensions that place their
625 this is not compatible with certain extensions that place their
627 metadata under the .hg directory, such as mq.
626 metadata under the .hg directory, such as mq.
628 """
627 """
629 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
628 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
630 pull=opts.get('pull'),
629 pull=opts.get('pull'),
631 stream=opts.get('uncompressed'),
630 stream=opts.get('uncompressed'),
632 rev=opts.get('rev'),
631 rev=opts.get('rev'),
633 update=not opts.get('noupdate'))
632 update=not opts.get('noupdate'))
634
633
635 def commit(ui, repo, *pats, **opts):
634 def commit(ui, repo, *pats, **opts):
636 """commit the specified files or all outstanding changes
635 """commit the specified files or all outstanding changes
637
636
638 Commit changes to the given files into the repository. Unlike a
637 Commit changes to the given files into the repository. Unlike a
639 centralized RCS, this operation is a local operation. See hg push
638 centralized RCS, this operation is a local operation. See hg push
640 for a way to actively distribute your changes.
639 for a way to actively distribute your changes.
641
640
642 If a list of files is omitted, all changes reported by "hg status"
641 If a list of files is omitted, all changes reported by "hg status"
643 will be committed.
642 will be committed.
644
643
645 If you are committing the result of a merge, do not provide any
644 If you are committing the result of a merge, do not provide any
646 filenames or -I/-X filters.
645 filenames or -I/-X filters.
647
646
648 If no commit message is specified, the configured editor is
647 If no commit message is specified, the configured editor is
649 started to prompt you for a message.
648 started to prompt you for a message.
650
649
651 See 'hg help dates' for a list of formats valid for -d/--date.
650 See 'hg help dates' for a list of formats valid for -d/--date.
652 """
651 """
653 extra = {}
652 extra = {}
654 if opts.get('close_branch'):
653 if opts.get('close_branch'):
655 extra['close'] = 1
654 extra['close'] = 1
656 e = cmdutil.commiteditor
655 e = cmdutil.commiteditor
657 if opts.get('force_editor'):
656 if opts.get('force_editor'):
658 e = cmdutil.commitforceeditor
657 e = cmdutil.commitforceeditor
659
658
660 def commitfunc(ui, repo, message, match, opts):
659 def commitfunc(ui, repo, message, match, opts):
661 return repo.commit(message, opts.get('user'), opts.get('date'), match,
660 return repo.commit(message, opts.get('user'), opts.get('date'), match,
662 editor=e, extra=extra)
661 editor=e, extra=extra)
663
662
664 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
663 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
665 if not node:
664 if not node:
666 ui.status(_("nothing changed\n"))
665 ui.status(_("nothing changed\n"))
667 return
666 return
668 cl = repo.changelog
667 cl = repo.changelog
669 rev = cl.rev(node)
668 rev = cl.rev(node)
670 parents = cl.parentrevs(rev)
669 parents = cl.parentrevs(rev)
671 if rev - 1 in parents:
670 if rev - 1 in parents:
672 # one of the parents was the old tip
671 # one of the parents was the old tip
673 pass
672 pass
674 elif (parents == (nullrev, nullrev) or
673 elif (parents == (nullrev, nullrev) or
675 len(cl.heads(cl.node(parents[0]))) > 1 and
674 len(cl.heads(cl.node(parents[0]))) > 1 and
676 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
675 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
677 ui.status(_('created new head\n'))
676 ui.status(_('created new head\n'))
678
677
679 if ui.debugflag:
678 if ui.debugflag:
680 ui.write(_('committed changeset %d:%s\n') % (rev, hex(node)))
679 ui.write(_('committed changeset %d:%s\n') % (rev, hex(node)))
681 elif ui.verbose:
680 elif ui.verbose:
682 ui.write(_('committed changeset %d:%s\n') % (rev, short(node)))
681 ui.write(_('committed changeset %d:%s\n') % (rev, short(node)))
683
682
684 def copy(ui, repo, *pats, **opts):
683 def copy(ui, repo, *pats, **opts):
685 """mark files as copied for the next commit
684 """mark files as copied for the next commit
686
685
687 Mark dest as having copies of source files. If dest is a
686 Mark dest as having copies of source files. If dest is a
688 directory, copies are put in that directory. If dest is a file,
687 directory, copies are put in that directory. If dest is a file,
689 the source must be a single file.
688 the source must be a single file.
690
689
691 By default, this command copies the contents of files as they
690 By default, this command copies the contents of files as they
692 exist in the working directory. If invoked with -A/--after, the
691 exist in the working directory. If invoked with -A/--after, the
693 operation is recorded, but no copying is performed.
692 operation is recorded, but no copying is performed.
694
693
695 This command takes effect with the next commit. To undo a copy
694 This command takes effect with the next commit. To undo a copy
696 before that, see hg revert.
695 before that, see hg revert.
697 """
696 """
698 wlock = repo.wlock(False)
697 wlock = repo.wlock(False)
699 try:
698 try:
700 return cmdutil.copy(ui, repo, pats, opts)
699 return cmdutil.copy(ui, repo, pats, opts)
701 finally:
700 finally:
702 wlock.release()
701 wlock.release()
703
702
704 def debugancestor(ui, repo, *args):
703 def debugancestor(ui, repo, *args):
705 """find the ancestor revision of two revisions in a given index"""
704 """find the ancestor revision of two revisions in a given index"""
706 if len(args) == 3:
705 if len(args) == 3:
707 index, rev1, rev2 = args
706 index, rev1, rev2 = args
708 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
707 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
709 lookup = r.lookup
708 lookup = r.lookup
710 elif len(args) == 2:
709 elif len(args) == 2:
711 if not repo:
710 if not repo:
712 raise util.Abort(_("There is no Mercurial repository here "
711 raise util.Abort(_("There is no Mercurial repository here "
713 "(.hg not found)"))
712 "(.hg not found)"))
714 rev1, rev2 = args
713 rev1, rev2 = args
715 r = repo.changelog
714 r = repo.changelog
716 lookup = repo.lookup
715 lookup = repo.lookup
717 else:
716 else:
718 raise util.Abort(_('either two or three arguments required'))
717 raise util.Abort(_('either two or three arguments required'))
719 a = r.ancestor(lookup(rev1), lookup(rev2))
718 a = r.ancestor(lookup(rev1), lookup(rev2))
720 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
719 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
721
720
722 def debugcommands(ui, cmd='', *args):
721 def debugcommands(ui, cmd='', *args):
723 for cmd, vals in sorted(table.iteritems()):
722 for cmd, vals in sorted(table.iteritems()):
724 cmd = cmd.split('|')[0].strip('^')
723 cmd = cmd.split('|')[0].strip('^')
725 opts = ', '.join([i[1] for i in vals[1]])
724 opts = ', '.join([i[1] for i in vals[1]])
726 ui.write('%s: %s\n' % (cmd, opts))
725 ui.write('%s: %s\n' % (cmd, opts))
727
726
728 def debugcomplete(ui, cmd='', **opts):
727 def debugcomplete(ui, cmd='', **opts):
729 """returns the completion list associated with the given command"""
728 """returns the completion list associated with the given command"""
730
729
731 if opts.get('options'):
730 if opts.get('options'):
732 options = []
731 options = []
733 otables = [globalopts]
732 otables = [globalopts]
734 if cmd:
733 if cmd:
735 aliases, entry = cmdutil.findcmd(cmd, table, False)
734 aliases, entry = cmdutil.findcmd(cmd, table, False)
736 otables.append(entry[1])
735 otables.append(entry[1])
737 for t in otables:
736 for t in otables:
738 for o in t:
737 for o in t:
739 if o[0]:
738 if o[0]:
740 options.append('-%s' % o[0])
739 options.append('-%s' % o[0])
741 options.append('--%s' % o[1])
740 options.append('--%s' % o[1])
742 ui.write("%s\n" % "\n".join(options))
741 ui.write("%s\n" % "\n".join(options))
743 return
742 return
744
743
745 cmdlist = cmdutil.findpossible(cmd, table)
744 cmdlist = cmdutil.findpossible(cmd, table)
746 if ui.verbose:
745 if ui.verbose:
747 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
746 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
748 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
747 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
749
748
750 def debugfsinfo(ui, path = "."):
749 def debugfsinfo(ui, path = "."):
751 open('.debugfsinfo', 'w').write('')
750 open('.debugfsinfo', 'w').write('')
752 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
751 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
753 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
752 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
754 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
753 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
755 and 'yes' or 'no'))
754 and 'yes' or 'no'))
756 os.unlink('.debugfsinfo')
755 os.unlink('.debugfsinfo')
757
756
758 def debugrebuildstate(ui, repo, rev="tip"):
757 def debugrebuildstate(ui, repo, rev="tip"):
759 """rebuild the dirstate as it would look like for the given revision"""
758 """rebuild the dirstate as it would look like for the given revision"""
760 ctx = repo[rev]
759 ctx = repo[rev]
761 wlock = repo.wlock()
760 wlock = repo.wlock()
762 try:
761 try:
763 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
762 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
764 finally:
763 finally:
765 wlock.release()
764 wlock.release()
766
765
767 def debugcheckstate(ui, repo):
766 def debugcheckstate(ui, repo):
768 """validate the correctness of the current dirstate"""
767 """validate the correctness of the current dirstate"""
769 parent1, parent2 = repo.dirstate.parents()
768 parent1, parent2 = repo.dirstate.parents()
770 m1 = repo[parent1].manifest()
769 m1 = repo[parent1].manifest()
771 m2 = repo[parent2].manifest()
770 m2 = repo[parent2].manifest()
772 errors = 0
771 errors = 0
773 for f in repo.dirstate:
772 for f in repo.dirstate:
774 state = repo.dirstate[f]
773 state = repo.dirstate[f]
775 if state in "nr" and f not in m1:
774 if state in "nr" and f not in m1:
776 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
775 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
777 errors += 1
776 errors += 1
778 if state in "a" and f in m1:
777 if state in "a" and f in m1:
779 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
778 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
780 errors += 1
779 errors += 1
781 if state in "m" and f not in m1 and f not in m2:
780 if state in "m" and f not in m1 and f not in m2:
782 ui.warn(_("%s in state %s, but not in either manifest\n") %
781 ui.warn(_("%s in state %s, but not in either manifest\n") %
783 (f, state))
782 (f, state))
784 errors += 1
783 errors += 1
785 for f in m1:
784 for f in m1:
786 state = repo.dirstate[f]
785 state = repo.dirstate[f]
787 if state not in "nrm":
786 if state not in "nrm":
788 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
787 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
789 errors += 1
788 errors += 1
790 if errors:
789 if errors:
791 error = _(".hg/dirstate inconsistent with current parent's manifest")
790 error = _(".hg/dirstate inconsistent with current parent's manifest")
792 raise util.Abort(error)
791 raise util.Abort(error)
793
792
794 def showconfig(ui, repo, *values, **opts):
793 def showconfig(ui, repo, *values, **opts):
795 """show combined config settings from all hgrc files
794 """show combined config settings from all hgrc files
796
795
797 With no arguments, print names and values of all config items.
796 With no arguments, print names and values of all config items.
798
797
799 With one argument of the form section.name, print just the value
798 With one argument of the form section.name, print just the value
800 of that config item.
799 of that config item.
801
800
802 With multiple arguments, print names and values of all config
801 With multiple arguments, print names and values of all config
803 items with matching section names.
802 items with matching section names.
804
803
805 With --debug, the source (filename and line number) is printed
804 With --debug, the source (filename and line number) is printed
806 for each config item.
805 for each config item.
807 """
806 """
808
807
809 untrusted = bool(opts.get('untrusted'))
808 untrusted = bool(opts.get('untrusted'))
810 if values:
809 if values:
811 if len([v for v in values if '.' in v]) > 1:
810 if len([v for v in values if '.' in v]) > 1:
812 raise util.Abort(_('only one config item permitted'))
811 raise util.Abort(_('only one config item permitted'))
813 for section, name, value in ui.walkconfig(untrusted=untrusted):
812 for section, name, value in ui.walkconfig(untrusted=untrusted):
814 sectname = section + '.' + name
813 sectname = section + '.' + name
815 if values:
814 if values:
816 for v in values:
815 for v in values:
817 if v == section:
816 if v == section:
818 ui.debug('%s: ' %
817 ui.debug('%s: ' %
819 ui.configsource(section, name, untrusted))
818 ui.configsource(section, name, untrusted))
820 ui.write('%s=%s\n' % (sectname, value))
819 ui.write('%s=%s\n' % (sectname, value))
821 elif v == sectname:
820 elif v == sectname:
822 ui.debug('%s: ' %
821 ui.debug('%s: ' %
823 ui.configsource(section, name, untrusted))
822 ui.configsource(section, name, untrusted))
824 ui.write(value, '\n')
823 ui.write(value, '\n')
825 else:
824 else:
826 ui.debug('%s: ' %
825 ui.debug('%s: ' %
827 ui.configsource(section, name, untrusted))
826 ui.configsource(section, name, untrusted))
828 ui.write('%s=%s\n' % (sectname, value))
827 ui.write('%s=%s\n' % (sectname, value))
829
828
830 def debugsetparents(ui, repo, rev1, rev2=None):
829 def debugsetparents(ui, repo, rev1, rev2=None):
831 """manually set the parents of the current working directory
830 """manually set the parents of the current working directory
832
831
833 This is useful for writing repository conversion tools, but should
832 This is useful for writing repository conversion tools, but should
834 be used with care.
833 be used with care.
835 """
834 """
836
835
837 if not rev2:
836 if not rev2:
838 rev2 = hex(nullid)
837 rev2 = hex(nullid)
839
838
840 wlock = repo.wlock()
839 wlock = repo.wlock()
841 try:
840 try:
842 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
841 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
843 finally:
842 finally:
844 wlock.release()
843 wlock.release()
845
844
846 def debugstate(ui, repo, nodates=None):
845 def debugstate(ui, repo, nodates=None):
847 """show the contents of the current dirstate"""
846 """show the contents of the current dirstate"""
848 timestr = ""
847 timestr = ""
849 showdate = not nodates
848 showdate = not nodates
850 for file_, ent in sorted(repo.dirstate._map.iteritems()):
849 for file_, ent in sorted(repo.dirstate._map.iteritems()):
851 if showdate:
850 if showdate:
852 if ent[3] == -1:
851 if ent[3] == -1:
853 # Pad or slice to locale representation
852 # Pad or slice to locale representation
854 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
853 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
855 timestr = 'unset'
854 timestr = 'unset'
856 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
855 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
857 else:
856 else:
858 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
857 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
859 if ent[1] & 020000:
858 if ent[1] & 020000:
860 mode = 'lnk'
859 mode = 'lnk'
861 else:
860 else:
862 mode = '%3o' % (ent[1] & 0777)
861 mode = '%3o' % (ent[1] & 0777)
863 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
862 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
864 for f in repo.dirstate.copies():
863 for f in repo.dirstate.copies():
865 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
864 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
866
865
867 def debugsub(ui, repo, rev=None):
866 def debugsub(ui, repo, rev=None):
868 if rev == '':
867 if rev == '':
869 rev = None
868 rev = None
870 for k,v in sorted(repo[rev].substate.items()):
869 for k,v in sorted(repo[rev].substate.items()):
871 ui.write('path %s\n' % k)
870 ui.write('path %s\n' % k)
872 ui.write(' source %s\n' % v[0])
871 ui.write(' source %s\n' % v[0])
873 ui.write(' revision %s\n' % v[1])
872 ui.write(' revision %s\n' % v[1])
874
873
875 def debugdata(ui, file_, rev):
874 def debugdata(ui, file_, rev):
876 """dump the contents of a data file revision"""
875 """dump the contents of a data file revision"""
877 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
876 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
878 try:
877 try:
879 ui.write(r.revision(r.lookup(rev)))
878 ui.write(r.revision(r.lookup(rev)))
880 except KeyError:
879 except KeyError:
881 raise util.Abort(_('invalid revision identifier %s') % rev)
880 raise util.Abort(_('invalid revision identifier %s') % rev)
882
881
883 def debugdate(ui, date, range=None, **opts):
882 def debugdate(ui, date, range=None, **opts):
884 """parse and display a date"""
883 """parse and display a date"""
885 if opts["extended"]:
884 if opts["extended"]:
886 d = util.parsedate(date, util.extendeddateformats)
885 d = util.parsedate(date, util.extendeddateformats)
887 else:
886 else:
888 d = util.parsedate(date)
887 d = util.parsedate(date)
889 ui.write("internal: %s %s\n" % d)
888 ui.write("internal: %s %s\n" % d)
890 ui.write("standard: %s\n" % util.datestr(d))
889 ui.write("standard: %s\n" % util.datestr(d))
891 if range:
890 if range:
892 m = util.matchdate(range)
891 m = util.matchdate(range)
893 ui.write("match: %s\n" % m(d[0]))
892 ui.write("match: %s\n" % m(d[0]))
894
893
895 def debugindex(ui, file_):
894 def debugindex(ui, file_):
896 """dump the contents of an index file"""
895 """dump the contents of an index file"""
897 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
896 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
898 ui.write(" rev offset length base linkrev"
897 ui.write(" rev offset length base linkrev"
899 " nodeid p1 p2\n")
898 " nodeid p1 p2\n")
900 for i in r:
899 for i in r:
901 node = r.node(i)
900 node = r.node(i)
902 try:
901 try:
903 pp = r.parents(node)
902 pp = r.parents(node)
904 except:
903 except:
905 pp = [nullid, nullid]
904 pp = [nullid, nullid]
906 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
905 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
907 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
906 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
908 short(node), short(pp[0]), short(pp[1])))
907 short(node), short(pp[0]), short(pp[1])))
909
908
910 def debugindexdot(ui, file_):
909 def debugindexdot(ui, file_):
911 """dump an index DAG as a graphviz dot file"""
910 """dump an index DAG as a graphviz dot file"""
912 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
911 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
913 ui.write("digraph G {\n")
912 ui.write("digraph G {\n")
914 for i in r:
913 for i in r:
915 node = r.node(i)
914 node = r.node(i)
916 pp = r.parents(node)
915 pp = r.parents(node)
917 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
916 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
918 if pp[1] != nullid:
917 if pp[1] != nullid:
919 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
918 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
920 ui.write("}\n")
919 ui.write("}\n")
921
920
922 def debuginstall(ui):
921 def debuginstall(ui):
923 '''test Mercurial installation'''
922 '''test Mercurial installation'''
924
923
925 def writetemp(contents):
924 def writetemp(contents):
926 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
925 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
927 f = os.fdopen(fd, "wb")
926 f = os.fdopen(fd, "wb")
928 f.write(contents)
927 f.write(contents)
929 f.close()
928 f.close()
930 return name
929 return name
931
930
932 problems = 0
931 problems = 0
933
932
934 # encoding
933 # encoding
935 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
934 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
936 try:
935 try:
937 encoding.fromlocal("test")
936 encoding.fromlocal("test")
938 except util.Abort, inst:
937 except util.Abort, inst:
939 ui.write(" %s\n" % inst)
938 ui.write(" %s\n" % inst)
940 ui.write(_(" (check that your locale is properly set)\n"))
939 ui.write(_(" (check that your locale is properly set)\n"))
941 problems += 1
940 problems += 1
942
941
943 # compiled modules
942 # compiled modules
944 ui.status(_("Checking extensions...\n"))
943 ui.status(_("Checking extensions...\n"))
945 try:
944 try:
946 import bdiff, mpatch, base85
945 import bdiff, mpatch, base85
947 except Exception, inst:
946 except Exception, inst:
948 ui.write(" %s\n" % inst)
947 ui.write(" %s\n" % inst)
949 ui.write(_(" One or more extensions could not be found"))
948 ui.write(_(" One or more extensions could not be found"))
950 ui.write(_(" (check that you compiled the extensions)\n"))
949 ui.write(_(" (check that you compiled the extensions)\n"))
951 problems += 1
950 problems += 1
952
951
953 # templates
952 # templates
954 ui.status(_("Checking templates...\n"))
953 ui.status(_("Checking templates...\n"))
955 try:
954 try:
956 import templater
955 import templater
957 templater.templater(templater.templatepath("map-cmdline.default"))
956 templater.templater(templater.templatepath("map-cmdline.default"))
958 except Exception, inst:
957 except Exception, inst:
959 ui.write(" %s\n" % inst)
958 ui.write(" %s\n" % inst)
960 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
959 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
961 problems += 1
960 problems += 1
962
961
963 # patch
962 # patch
964 ui.status(_("Checking patch...\n"))
963 ui.status(_("Checking patch...\n"))
965 patchproblems = 0
964 patchproblems = 0
966 a = "1\n2\n3\n4\n"
965 a = "1\n2\n3\n4\n"
967 b = "1\n2\n3\ninsert\n4\n"
966 b = "1\n2\n3\ninsert\n4\n"
968 fa = writetemp(a)
967 fa = writetemp(a)
969 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
968 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
970 os.path.basename(fa))
969 os.path.basename(fa))
971 fd = writetemp(d)
970 fd = writetemp(d)
972
971
973 files = {}
972 files = {}
974 try:
973 try:
975 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
974 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
976 except util.Abort, e:
975 except util.Abort, e:
977 ui.write(_(" patch call failed:\n"))
976 ui.write(_(" patch call failed:\n"))
978 ui.write(" " + str(e) + "\n")
977 ui.write(" " + str(e) + "\n")
979 patchproblems += 1
978 patchproblems += 1
980 else:
979 else:
981 if list(files) != [os.path.basename(fa)]:
980 if list(files) != [os.path.basename(fa)]:
982 ui.write(_(" unexpected patch output!\n"))
981 ui.write(_(" unexpected patch output!\n"))
983 patchproblems += 1
982 patchproblems += 1
984 a = open(fa).read()
983 a = open(fa).read()
985 if a != b:
984 if a != b:
986 ui.write(_(" patch test failed!\n"))
985 ui.write(_(" patch test failed!\n"))
987 patchproblems += 1
986 patchproblems += 1
988
987
989 if patchproblems:
988 if patchproblems:
990 if ui.config('ui', 'patch'):
989 if ui.config('ui', 'patch'):
991 ui.write(_(" (Current patch tool may be incompatible with patch,"
990 ui.write(_(" (Current patch tool may be incompatible with patch,"
992 " or misconfigured. Please check your .hgrc file)\n"))
991 " or misconfigured. Please check your .hgrc file)\n"))
993 else:
992 else:
994 ui.write(_(" Internal patcher failure, please report this error"
993 ui.write(_(" Internal patcher failure, please report this error"
995 " to http://mercurial.selenic.com/bts/\n"))
994 " to http://mercurial.selenic.com/bts/\n"))
996 problems += patchproblems
995 problems += patchproblems
997
996
998 os.unlink(fa)
997 os.unlink(fa)
999 os.unlink(fd)
998 os.unlink(fd)
1000
999
1001 # editor
1000 # editor
1002 ui.status(_("Checking commit editor...\n"))
1001 ui.status(_("Checking commit editor...\n"))
1003 editor = ui.geteditor()
1002 editor = ui.geteditor()
1004 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1003 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1005 if not cmdpath:
1004 if not cmdpath:
1006 if editor == 'vi':
1005 if editor == 'vi':
1007 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1006 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1008 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1007 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1009 else:
1008 else:
1010 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1009 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1011 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1010 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1012 problems += 1
1011 problems += 1
1013
1012
1014 # check username
1013 # check username
1015 ui.status(_("Checking username...\n"))
1014 ui.status(_("Checking username...\n"))
1016 user = os.environ.get("HGUSER")
1015 user = os.environ.get("HGUSER")
1017 if user is None:
1016 if user is None:
1018 user = ui.config("ui", "username")
1017 user = ui.config("ui", "username")
1019 if user is None:
1018 if user is None:
1020 user = os.environ.get("EMAIL")
1019 user = os.environ.get("EMAIL")
1021 if not user:
1020 if not user:
1022 ui.warn(" ")
1021 ui.warn(" ")
1023 ui.username()
1022 ui.username()
1024 ui.write(_(" (specify a username in your .hgrc file)\n"))
1023 ui.write(_(" (specify a username in your .hgrc file)\n"))
1025
1024
1026 if not problems:
1025 if not problems:
1027 ui.status(_("No problems detected\n"))
1026 ui.status(_("No problems detected\n"))
1028 else:
1027 else:
1029 ui.write(_("%s problems detected,"
1028 ui.write(_("%s problems detected,"
1030 " please check your install!\n") % problems)
1029 " please check your install!\n") % problems)
1031
1030
1032 return problems
1031 return problems
1033
1032
1034 def debugrename(ui, repo, file1, *pats, **opts):
1033 def debugrename(ui, repo, file1, *pats, **opts):
1035 """dump rename information"""
1034 """dump rename information"""
1036
1035
1037 ctx = repo[opts.get('rev')]
1036 ctx = repo[opts.get('rev')]
1038 m = cmdutil.match(repo, (file1,) + pats, opts)
1037 m = cmdutil.match(repo, (file1,) + pats, opts)
1039 for abs in ctx.walk(m):
1038 for abs in ctx.walk(m):
1040 fctx = ctx[abs]
1039 fctx = ctx[abs]
1041 o = fctx.filelog().renamed(fctx.filenode())
1040 o = fctx.filelog().renamed(fctx.filenode())
1042 rel = m.rel(abs)
1041 rel = m.rel(abs)
1043 if o:
1042 if o:
1044 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1043 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1045 else:
1044 else:
1046 ui.write(_("%s not renamed\n") % rel)
1045 ui.write(_("%s not renamed\n") % rel)
1047
1046
1048 def debugwalk(ui, repo, *pats, **opts):
1047 def debugwalk(ui, repo, *pats, **opts):
1049 """show how files match on given patterns"""
1048 """show how files match on given patterns"""
1050 m = cmdutil.match(repo, pats, opts)
1049 m = cmdutil.match(repo, pats, opts)
1051 items = list(repo.walk(m))
1050 items = list(repo.walk(m))
1052 if not items:
1051 if not items:
1053 return
1052 return
1054 fmt = 'f %%-%ds %%-%ds %%s' % (
1053 fmt = 'f %%-%ds %%-%ds %%s' % (
1055 max([len(abs) for abs in items]),
1054 max([len(abs) for abs in items]),
1056 max([len(m.rel(abs)) for abs in items]))
1055 max([len(m.rel(abs)) for abs in items]))
1057 for abs in items:
1056 for abs in items:
1058 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1057 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1059 ui.write("%s\n" % line.rstrip())
1058 ui.write("%s\n" % line.rstrip())
1060
1059
1061 def diff(ui, repo, *pats, **opts):
1060 def diff(ui, repo, *pats, **opts):
1062 """diff repository (or selected files)
1061 """diff repository (or selected files)
1063
1062
1064 Show differences between revisions for the specified files.
1063 Show differences between revisions for the specified files.
1065
1064
1066 Differences between files are shown using the unified diff format.
1065 Differences between files are shown using the unified diff format.
1067
1066
1068 NOTE: diff may generate unexpected results for merges, as it will
1067 NOTE: diff may generate unexpected results for merges, as it will
1069 default to comparing against the working directory's first parent
1068 default to comparing against the working directory's first parent
1070 changeset if no revisions are specified.
1069 changeset if no revisions are specified.
1071
1070
1072 When two revision arguments are given, then changes are shown
1071 When two revision arguments are given, then changes are shown
1073 between those revisions. If only one revision is specified then
1072 between those revisions. If only one revision is specified then
1074 that revision is compared to the working directory, and, when no
1073 that revision is compared to the working directory, and, when no
1075 revisions are specified, the working directory files are compared
1074 revisions are specified, the working directory files are compared
1076 to its parent.
1075 to its parent.
1077
1076
1078 Without the -a/--text option, diff will avoid generating diffs of
1077 Without the -a/--text option, diff will avoid generating diffs of
1079 files it detects as binary. With -a, diff will generate a diff
1078 files it detects as binary. With -a, diff will generate a diff
1080 anyway, probably with undesirable results.
1079 anyway, probably with undesirable results.
1081
1080
1082 Use the -g/--git option to generate diffs in the git extended diff
1081 Use the -g/--git option to generate diffs in the git extended diff
1083 format. For more information, read 'hg help diffs'.
1082 format. For more information, read 'hg help diffs'.
1084 """
1083 """
1085
1084
1086 revs = opts.get('rev')
1085 revs = opts.get('rev')
1087 change = opts.get('change')
1086 change = opts.get('change')
1088 stat = opts.get('stat')
1087 stat = opts.get('stat')
1089
1088
1090 if revs and change:
1089 if revs and change:
1091 msg = _('cannot specify --rev and --change at the same time')
1090 msg = _('cannot specify --rev and --change at the same time')
1092 raise util.Abort(msg)
1091 raise util.Abort(msg)
1093 elif change:
1092 elif change:
1094 node2 = repo.lookup(change)
1093 node2 = repo.lookup(change)
1095 node1 = repo[node2].parents()[0].node()
1094 node1 = repo[node2].parents()[0].node()
1096 else:
1095 else:
1097 node1, node2 = cmdutil.revpair(repo, revs)
1096 node1, node2 = cmdutil.revpair(repo, revs)
1098
1097
1099 if stat:
1098 if stat:
1100 opts['unified'] = '0'
1099 opts['unified'] = '0'
1101 diffopts = patch.diffopts(ui, opts)
1100 diffopts = patch.diffopts(ui, opts)
1102
1101
1103 m = cmdutil.match(repo, pats, opts)
1102 m = cmdutil.match(repo, pats, opts)
1104 it = patch.diff(repo, node1, node2, match=m, opts=diffopts)
1103 it = patch.diff(repo, node1, node2, match=m, opts=diffopts)
1105 if stat:
1104 if stat:
1106 width = ui.interactive() and util.termwidth() or 80
1105 width = ui.interactive() and util.termwidth() or 80
1107 ui.write(patch.diffstat(util.iterlines(it), width=width,
1106 ui.write(patch.diffstat(util.iterlines(it), width=width,
1108 git=diffopts.git))
1107 git=diffopts.git))
1109 else:
1108 else:
1110 for chunk in it:
1109 for chunk in it:
1111 ui.write(chunk)
1110 ui.write(chunk)
1112
1111
1113 def export(ui, repo, *changesets, **opts):
1112 def export(ui, repo, *changesets, **opts):
1114 """dump the header and diffs for one or more changesets
1113 """dump the header and diffs for one or more changesets
1115
1114
1116 Print the changeset header and diffs for one or more revisions.
1115 Print the changeset header and diffs for one or more revisions.
1117
1116
1118 The information shown in the changeset header is: author,
1117 The information shown in the changeset header is: author,
1119 changeset hash, parent(s) and commit comment.
1118 changeset hash, parent(s) and commit comment.
1120
1119
1121 NOTE: export may generate unexpected diff output for merge
1120 NOTE: export may generate unexpected diff output for merge
1122 changesets, as it will compare the merge changeset against its
1121 changesets, as it will compare the merge changeset against its
1123 first parent only.
1122 first parent only.
1124
1123
1125 Output may be to a file, in which case the name of the file is
1124 Output may be to a file, in which case the name of the file is
1126 given using a format string. The formatting rules are as follows::
1125 given using a format string. The formatting rules are as follows::
1127
1126
1128 %% literal "%" character
1127 %% literal "%" character
1129 %H changeset hash (40 bytes of hexadecimal)
1128 %H changeset hash (40 bytes of hexadecimal)
1130 %N number of patches being generated
1129 %N number of patches being generated
1131 %R changeset revision number
1130 %R changeset revision number
1132 %b basename of the exporting repository
1131 %b basename of the exporting repository
1133 %h short-form changeset hash (12 bytes of hexadecimal)
1132 %h short-form changeset hash (12 bytes of hexadecimal)
1134 %n zero-padded sequence number, starting at 1
1133 %n zero-padded sequence number, starting at 1
1135 %r zero-padded changeset revision number
1134 %r zero-padded changeset revision number
1136
1135
1137 Without the -a/--text option, export will avoid generating diffs
1136 Without the -a/--text option, export will avoid generating diffs
1138 of files it detects as binary. With -a, export will generate a
1137 of files it detects as binary. With -a, export will generate a
1139 diff anyway, probably with undesirable results.
1138 diff anyway, probably with undesirable results.
1140
1139
1141 Use the -g/--git option to generate diffs in the git extended diff
1140 Use the -g/--git option to generate diffs in the git extended diff
1142 format. See 'hg help diffs' for more information.
1141 format. See 'hg help diffs' for more information.
1143
1142
1144 With the --switch-parent option, the diff will be against the
1143 With the --switch-parent option, the diff will be against the
1145 second parent. It can be useful to review a merge.
1144 second parent. It can be useful to review a merge.
1146 """
1145 """
1147 if not changesets:
1146 if not changesets:
1148 raise util.Abort(_("export requires at least one changeset"))
1147 raise util.Abort(_("export requires at least one changeset"))
1149 revs = cmdutil.revrange(repo, changesets)
1148 revs = cmdutil.revrange(repo, changesets)
1150 if len(revs) > 1:
1149 if len(revs) > 1:
1151 ui.note(_('exporting patches:\n'))
1150 ui.note(_('exporting patches:\n'))
1152 else:
1151 else:
1153 ui.note(_('exporting patch:\n'))
1152 ui.note(_('exporting patch:\n'))
1154 patch.export(repo, revs, template=opts.get('output'),
1153 patch.export(repo, revs, template=opts.get('output'),
1155 switch_parent=opts.get('switch_parent'),
1154 switch_parent=opts.get('switch_parent'),
1156 opts=patch.diffopts(ui, opts))
1155 opts=patch.diffopts(ui, opts))
1157
1156
1158 def forget(ui, repo, *pats, **opts):
1157 def forget(ui, repo, *pats, **opts):
1159 """forget the specified files on the next commit
1158 """forget the specified files on the next commit
1160
1159
1161 Mark the specified files so they will no longer be tracked
1160 Mark the specified files so they will no longer be tracked
1162 after the next commit.
1161 after the next commit.
1163
1162
1164 This only removes files from the current branch, not from the
1163 This only removes files from the current branch, not from the
1165 entire project history, and it does not delete them from the
1164 entire project history, and it does not delete them from the
1166 working directory.
1165 working directory.
1167
1166
1168 To undo a forget before the next commit, see hg add.
1167 To undo a forget before the next commit, see hg add.
1169 """
1168 """
1170
1169
1171 if not pats:
1170 if not pats:
1172 raise util.Abort(_('no files specified'))
1171 raise util.Abort(_('no files specified'))
1173
1172
1174 m = cmdutil.match(repo, pats, opts)
1173 m = cmdutil.match(repo, pats, opts)
1175 s = repo.status(match=m, clean=True)
1174 s = repo.status(match=m, clean=True)
1176 forget = sorted(s[0] + s[1] + s[3] + s[6])
1175 forget = sorted(s[0] + s[1] + s[3] + s[6])
1177
1176
1178 for f in m.files():
1177 for f in m.files():
1179 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1178 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1180 ui.warn(_('not removing %s: file is already untracked\n')
1179 ui.warn(_('not removing %s: file is already untracked\n')
1181 % m.rel(f))
1180 % m.rel(f))
1182
1181
1183 for f in forget:
1182 for f in forget:
1184 if ui.verbose or not m.exact(f):
1183 if ui.verbose or not m.exact(f):
1185 ui.status(_('removing %s\n') % m.rel(f))
1184 ui.status(_('removing %s\n') % m.rel(f))
1186
1185
1187 repo.remove(forget, unlink=False)
1186 repo.remove(forget, unlink=False)
1188
1187
1189 def grep(ui, repo, pattern, *pats, **opts):
1188 def grep(ui, repo, pattern, *pats, **opts):
1190 """search for a pattern in specified files and revisions
1189 """search for a pattern in specified files and revisions
1191
1190
1192 Search revisions of files for a regular expression.
1191 Search revisions of files for a regular expression.
1193
1192
1194 This command behaves differently than Unix grep. It only accepts
1193 This command behaves differently than Unix grep. It only accepts
1195 Python/Perl regexps. It searches repository history, not the
1194 Python/Perl regexps. It searches repository history, not the
1196 working directory. It always prints the revision number in which a
1195 working directory. It always prints the revision number in which a
1197 match appears.
1196 match appears.
1198
1197
1199 By default, grep only prints output for the first revision of a
1198 By default, grep only prints output for the first revision of a
1200 file in which it finds a match. To get it to print every revision
1199 file in which it finds a match. To get it to print every revision
1201 that contains a change in match status ("-" for a match that
1200 that contains a change in match status ("-" for a match that
1202 becomes a non-match, or "+" for a non-match that becomes a match),
1201 becomes a non-match, or "+" for a non-match that becomes a match),
1203 use the --all flag.
1202 use the --all flag.
1204 """
1203 """
1205 reflags = 0
1204 reflags = 0
1206 if opts.get('ignore_case'):
1205 if opts.get('ignore_case'):
1207 reflags |= re.I
1206 reflags |= re.I
1208 try:
1207 try:
1209 regexp = re.compile(pattern, reflags)
1208 regexp = re.compile(pattern, reflags)
1210 except Exception, inst:
1209 except Exception, inst:
1211 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1210 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1212 return None
1211 return None
1213 sep, eol = ':', '\n'
1212 sep, eol = ':', '\n'
1214 if opts.get('print0'):
1213 if opts.get('print0'):
1215 sep = eol = '\0'
1214 sep = eol = '\0'
1216
1215
1217 getfile = util.lrucachefunc(repo.file)
1216 getfile = util.lrucachefunc(repo.file)
1218
1217
1219 def matchlines(body):
1218 def matchlines(body):
1220 begin = 0
1219 begin = 0
1221 linenum = 0
1220 linenum = 0
1222 while True:
1221 while True:
1223 match = regexp.search(body, begin)
1222 match = regexp.search(body, begin)
1224 if not match:
1223 if not match:
1225 break
1224 break
1226 mstart, mend = match.span()
1225 mstart, mend = match.span()
1227 linenum += body.count('\n', begin, mstart) + 1
1226 linenum += body.count('\n', begin, mstart) + 1
1228 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1227 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1229 begin = body.find('\n', mend) + 1 or len(body)
1228 begin = body.find('\n', mend) + 1 or len(body)
1230 lend = begin - 1
1229 lend = begin - 1
1231 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1230 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1232
1231
1233 class linestate(object):
1232 class linestate(object):
1234 def __init__(self, line, linenum, colstart, colend):
1233 def __init__(self, line, linenum, colstart, colend):
1235 self.line = line
1234 self.line = line
1236 self.linenum = linenum
1235 self.linenum = linenum
1237 self.colstart = colstart
1236 self.colstart = colstart
1238 self.colend = colend
1237 self.colend = colend
1239
1238
1240 def __hash__(self):
1239 def __hash__(self):
1241 return hash((self.linenum, self.line))
1240 return hash((self.linenum, self.line))
1242
1241
1243 def __eq__(self, other):
1242 def __eq__(self, other):
1244 return self.line == other.line
1243 return self.line == other.line
1245
1244
1246 matches = {}
1245 matches = {}
1247 copies = {}
1246 copies = {}
1248 def grepbody(fn, rev, body):
1247 def grepbody(fn, rev, body):
1249 matches[rev].setdefault(fn, [])
1248 matches[rev].setdefault(fn, [])
1250 m = matches[rev][fn]
1249 m = matches[rev][fn]
1251 for lnum, cstart, cend, line in matchlines(body):
1250 for lnum, cstart, cend, line in matchlines(body):
1252 s = linestate(line, lnum, cstart, cend)
1251 s = linestate(line, lnum, cstart, cend)
1253 m.append(s)
1252 m.append(s)
1254
1253
1255 def difflinestates(a, b):
1254 def difflinestates(a, b):
1256 sm = difflib.SequenceMatcher(None, a, b)
1255 sm = difflib.SequenceMatcher(None, a, b)
1257 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1256 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1258 if tag == 'insert':
1257 if tag == 'insert':
1259 for i in xrange(blo, bhi):
1258 for i in xrange(blo, bhi):
1260 yield ('+', b[i])
1259 yield ('+', b[i])
1261 elif tag == 'delete':
1260 elif tag == 'delete':
1262 for i in xrange(alo, ahi):
1261 for i in xrange(alo, ahi):
1263 yield ('-', a[i])
1262 yield ('-', a[i])
1264 elif tag == 'replace':
1263 elif tag == 'replace':
1265 for i in xrange(alo, ahi):
1264 for i in xrange(alo, ahi):
1266 yield ('-', a[i])
1265 yield ('-', a[i])
1267 for i in xrange(blo, bhi):
1266 for i in xrange(blo, bhi):
1268 yield ('+', b[i])
1267 yield ('+', b[i])
1269
1268
1270 def display(fn, ctx, pstates, states):
1269 def display(fn, ctx, pstates, states):
1271 rev = ctx.rev()
1270 rev = ctx.rev()
1272 datefunc = ui.quiet and util.shortdate or util.datestr
1271 datefunc = ui.quiet and util.shortdate or util.datestr
1273 found = False
1272 found = False
1274 filerevmatches = {}
1273 filerevmatches = {}
1275 if opts.get('all'):
1274 if opts.get('all'):
1276 iter = difflinestates(pstates, states)
1275 iter = difflinestates(pstates, states)
1277 else:
1276 else:
1278 iter = [('', l) for l in states]
1277 iter = [('', l) for l in states]
1279 for change, l in iter:
1278 for change, l in iter:
1280 cols = [fn, str(rev)]
1279 cols = [fn, str(rev)]
1281 if opts.get('line_number'):
1280 if opts.get('line_number'):
1282 cols.append(str(l.linenum))
1281 cols.append(str(l.linenum))
1283 if opts.get('all'):
1282 if opts.get('all'):
1284 cols.append(change)
1283 cols.append(change)
1285 if opts.get('user'):
1284 if opts.get('user'):
1286 cols.append(ui.shortuser(ctx.user()))
1285 cols.append(ui.shortuser(ctx.user()))
1287 if opts.get('date'):
1286 if opts.get('date'):
1288 cols.append(datefunc(ctx.date()))
1287 cols.append(datefunc(ctx.date()))
1289 if opts.get('files_with_matches'):
1288 if opts.get('files_with_matches'):
1290 c = (fn, rev)
1289 c = (fn, rev)
1291 if c in filerevmatches:
1290 if c in filerevmatches:
1292 continue
1291 continue
1293 filerevmatches[c] = 1
1292 filerevmatches[c] = 1
1294 else:
1293 else:
1295 cols.append(l.line)
1294 cols.append(l.line)
1296 ui.write(sep.join(cols), eol)
1295 ui.write(sep.join(cols), eol)
1297 found = True
1296 found = True
1298 return found
1297 return found
1299
1298
1300 skip = {}
1299 skip = {}
1301 revfiles = {}
1300 revfiles = {}
1302 matchfn = cmdutil.match(repo, pats, opts)
1301 matchfn = cmdutil.match(repo, pats, opts)
1303 found = False
1302 found = False
1304 follow = opts.get('follow')
1303 follow = opts.get('follow')
1305
1304
1306 def prep(ctx, fns):
1305 def prep(ctx, fns):
1307 rev = ctx.rev()
1306 rev = ctx.rev()
1308 pctx = ctx.parents()[0]
1307 pctx = ctx.parents()[0]
1309 parent = pctx.rev()
1308 parent = pctx.rev()
1310 matches.setdefault(rev, {})
1309 matches.setdefault(rev, {})
1311 matches.setdefault(parent, {})
1310 matches.setdefault(parent, {})
1312 files = revfiles.setdefault(rev, [])
1311 files = revfiles.setdefault(rev, [])
1313 for fn in fns:
1312 for fn in fns:
1314 flog = getfile(fn)
1313 flog = getfile(fn)
1315 try:
1314 try:
1316 fnode = ctx.filenode(fn)
1315 fnode = ctx.filenode(fn)
1317 except error.LookupError:
1316 except error.LookupError:
1318 continue
1317 continue
1319
1318
1320 copied = flog.renamed(fnode)
1319 copied = flog.renamed(fnode)
1321 copy = follow and copied and copied[0]
1320 copy = follow and copied and copied[0]
1322 if copy:
1321 if copy:
1323 copies.setdefault(rev, {})[fn] = copy
1322 copies.setdefault(rev, {})[fn] = copy
1324 if fn in skip:
1323 if fn in skip:
1325 if copy:
1324 if copy:
1326 skip[copy] = True
1325 skip[copy] = True
1327 continue
1326 continue
1328 files.append(fn)
1327 files.append(fn)
1329
1328
1330 if fn not in matches[rev]:
1329 if fn not in matches[rev]:
1331 grepbody(fn, rev, flog.read(fnode))
1330 grepbody(fn, rev, flog.read(fnode))
1332
1331
1333 pfn = copy or fn
1332 pfn = copy or fn
1334 if pfn not in matches[parent]:
1333 if pfn not in matches[parent]:
1335 try:
1334 try:
1336 fnode = pctx.filenode(pfn)
1335 fnode = pctx.filenode(pfn)
1337 grepbody(pfn, parent, flog.read(fnode))
1336 grepbody(pfn, parent, flog.read(fnode))
1338 except error.LookupError:
1337 except error.LookupError:
1339 pass
1338 pass
1340
1339
1341 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1340 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1342 rev = ctx.rev()
1341 rev = ctx.rev()
1343 parent = ctx.parents()[0].rev()
1342 parent = ctx.parents()[0].rev()
1344 for fn in sorted(revfiles.get(rev, [])):
1343 for fn in sorted(revfiles.get(rev, [])):
1345 states = matches[rev][fn]
1344 states = matches[rev][fn]
1346 copy = copies.get(rev, {}).get(fn)
1345 copy = copies.get(rev, {}).get(fn)
1347 if fn in skip:
1346 if fn in skip:
1348 if copy:
1347 if copy:
1349 skip[copy] = True
1348 skip[copy] = True
1350 continue
1349 continue
1351 pstates = matches.get(parent, {}).get(copy or fn, [])
1350 pstates = matches.get(parent, {}).get(copy or fn, [])
1352 if pstates or states:
1351 if pstates or states:
1353 r = display(fn, ctx, pstates, states)
1352 r = display(fn, ctx, pstates, states)
1354 found = found or r
1353 found = found or r
1355 if r and not opts.get('all'):
1354 if r and not opts.get('all'):
1356 skip[fn] = True
1355 skip[fn] = True
1357 if copy:
1356 if copy:
1358 skip[copy] = True
1357 skip[copy] = True
1359 del matches[rev]
1358 del matches[rev]
1360 del revfiles[rev]
1359 del revfiles[rev]
1361
1360
1362 def heads(ui, repo, *branchrevs, **opts):
1361 def heads(ui, repo, *branchrevs, **opts):
1363 """show current repository heads or show branch heads
1362 """show current repository heads or show branch heads
1364
1363
1365 With no arguments, show all repository head changesets.
1364 With no arguments, show all repository head changesets.
1366
1365
1367 Repository "heads" are changesets with no child changesets. They are
1366 Repository "heads" are changesets with no child changesets. They are
1368 where development generally takes place and are the usual targets
1367 where development generally takes place and are the usual targets
1369 for update and merge operations.
1368 for update and merge operations.
1370
1369
1371 If one or more REV is given, the "branch heads" will be shown for
1370 If one or more REV is given, the "branch heads" will be shown for
1372 the named branch associated with the specified changeset(s).
1371 the named branch associated with the specified changeset(s).
1373
1372
1374 Branch heads are changesets on a named branch with no descendants on
1373 Branch heads are changesets on a named branch with no descendants on
1375 the same branch. A branch head could be a "true" (repository) head,
1374 the same branch. A branch head could be a "true" (repository) head,
1376 or it could be the last changeset on that branch before it was
1375 or it could be the last changeset on that branch before it was
1377 merged into another branch, or it could be the last changeset on the
1376 merged into another branch, or it could be the last changeset on the
1378 branch before a new branch was created. If none of the branch heads
1377 branch before a new branch was created. If none of the branch heads
1379 are true heads, the branch is considered inactive.
1378 are true heads, the branch is considered inactive.
1380
1379
1381 If -c/--closed is specified, also show branch heads marked closed
1380 If -c/--closed is specified, also show branch heads marked closed
1382 (see hg commit --close-branch).
1381 (see hg commit --close-branch).
1383
1382
1384 If STARTREV is specified, only those heads that are descendants of
1383 If STARTREV is specified, only those heads that are descendants of
1385 STARTREV will be displayed.
1384 STARTREV will be displayed.
1386 """
1385 """
1387 if opts.get('rev'):
1386 if opts.get('rev'):
1388 start = repo.lookup(opts['rev'])
1387 start = repo.lookup(opts['rev'])
1389 else:
1388 else:
1390 start = None
1389 start = None
1391 closed = opts.get('closed')
1390 closed = opts.get('closed')
1392 hideinactive, _heads = opts.get('active'), None
1391 hideinactive, _heads = opts.get('active'), None
1393 if not branchrevs:
1392 if not branchrevs:
1394 if closed:
1393 if closed:
1395 raise error.Abort(_('you must specify a branch to use --closed'))
1394 raise error.Abort(_('you must specify a branch to use --closed'))
1396 # Assume we're looking repo-wide heads if no revs were specified.
1395 # Assume we're looking repo-wide heads if no revs were specified.
1397 heads = repo.heads(start)
1396 heads = repo.heads(start)
1398 else:
1397 else:
1399 if hideinactive:
1398 if hideinactive:
1400 _heads = repo.heads(start)
1399 _heads = repo.heads(start)
1401 heads = []
1400 heads = []
1402 visitedset = set()
1401 visitedset = set()
1403 for branchrev in branchrevs:
1402 for branchrev in branchrevs:
1404 branch = repo[encoding.fromlocal(branchrev)].branch()
1403 branch = repo[encoding.fromlocal(branchrev)].branch()
1405 encodedbranch = encoding.tolocal(branch)
1404 encodedbranch = encoding.tolocal(branch)
1406 if branch in visitedset:
1405 if branch in visitedset:
1407 continue
1406 continue
1408 visitedset.add(branch)
1407 visitedset.add(branch)
1409 bheads = repo.branchheads(branch, start, closed=closed)
1408 bheads = repo.branchheads(branch, start, closed=closed)
1410 if not bheads:
1409 if not bheads:
1411 if not opts.get('rev'):
1410 if not opts.get('rev'):
1412 ui.warn(_("no open branch heads on branch %s\n") % encodedbranch)
1411 ui.warn(_("no open branch heads on branch %s\n") % encodedbranch)
1413 elif branch != branchrev:
1412 elif branch != branchrev:
1414 ui.warn(_("no changes on branch %s containing %s are "
1413 ui.warn(_("no changes on branch %s containing %s are "
1415 "reachable from %s\n")
1414 "reachable from %s\n")
1416 % (encodedbranch, branchrev, opts.get('rev')))
1415 % (encodedbranch, branchrev, opts.get('rev')))
1417 else:
1416 else:
1418 ui.warn(_("no changes on branch %s are reachable from %s\n")
1417 ui.warn(_("no changes on branch %s are reachable from %s\n")
1419 % (encodedbranch, opts.get('rev')))
1418 % (encodedbranch, opts.get('rev')))
1420 if hideinactive:
1419 if hideinactive:
1421 bheads = [bhead for bhead in bheads if bhead in _heads]
1420 bheads = [bhead for bhead in bheads if bhead in _heads]
1422 heads.extend(bheads)
1421 heads.extend(bheads)
1423 if not heads:
1422 if not heads:
1424 return 1
1423 return 1
1425 displayer = cmdutil.show_changeset(ui, repo, opts)
1424 displayer = cmdutil.show_changeset(ui, repo, opts)
1426 for n in heads:
1425 for n in heads:
1427 displayer.show(repo[n])
1426 displayer.show(repo[n])
1428
1427
1429 def help_(ui, name=None, with_version=False):
1428 def help_(ui, name=None, with_version=False):
1430 """show help for a given topic or a help overview
1429 """show help for a given topic or a help overview
1431
1430
1432 With no arguments, print a list of commands with short help messages.
1431 With no arguments, print a list of commands with short help messages.
1433
1432
1434 Given a topic, extension, or command name, print help for that
1433 Given a topic, extension, or command name, print help for that
1435 topic."""
1434 topic."""
1436 option_lists = []
1435 option_lists = []
1437 textwidth = util.termwidth() - 2
1436 textwidth = util.termwidth() - 2
1438
1437
1439 def addglobalopts(aliases):
1438 def addglobalopts(aliases):
1440 if ui.verbose:
1439 if ui.verbose:
1441 option_lists.append((_("global options:"), globalopts))
1440 option_lists.append((_("global options:"), globalopts))
1442 if name == 'shortlist':
1441 if name == 'shortlist':
1443 option_lists.append((_('use "hg help" for the full list '
1442 option_lists.append((_('use "hg help" for the full list '
1444 'of commands'), ()))
1443 'of commands'), ()))
1445 else:
1444 else:
1446 if name == 'shortlist':
1445 if name == 'shortlist':
1447 msg = _('use "hg help" for the full list of commands '
1446 msg = _('use "hg help" for the full list of commands '
1448 'or "hg -v" for details')
1447 'or "hg -v" for details')
1449 elif aliases:
1448 elif aliases:
1450 msg = _('use "hg -v help%s" to show aliases and '
1449 msg = _('use "hg -v help%s" to show aliases and '
1451 'global options') % (name and " " + name or "")
1450 'global options') % (name and " " + name or "")
1452 else:
1451 else:
1453 msg = _('use "hg -v help %s" to show global options') % name
1452 msg = _('use "hg -v help %s" to show global options') % name
1454 option_lists.append((msg, ()))
1453 option_lists.append((msg, ()))
1455
1454
1456 def helpcmd(name):
1455 def helpcmd(name):
1457 if with_version:
1456 if with_version:
1458 version_(ui)
1457 version_(ui)
1459 ui.write('\n')
1458 ui.write('\n')
1460
1459
1461 try:
1460 try:
1462 aliases, i = cmdutil.findcmd(name, table, False)
1461 aliases, i = cmdutil.findcmd(name, table, False)
1463 except error.AmbiguousCommand, inst:
1462 except error.AmbiguousCommand, inst:
1464 # py3k fix: except vars can't be used outside the scope of the
1463 # py3k fix: except vars can't be used outside the scope of the
1465 # except block, nor can be used inside a lambda. python issue4617
1464 # except block, nor can be used inside a lambda. python issue4617
1466 prefix = inst.args[0]
1465 prefix = inst.args[0]
1467 select = lambda c: c.lstrip('^').startswith(prefix)
1466 select = lambda c: c.lstrip('^').startswith(prefix)
1468 helplist(_('list of commands:\n\n'), select)
1467 helplist(_('list of commands:\n\n'), select)
1469 return
1468 return
1470
1469
1471 # synopsis
1470 # synopsis
1472 if len(i) > 2:
1471 if len(i) > 2:
1473 if i[2].startswith('hg'):
1472 if i[2].startswith('hg'):
1474 ui.write("%s\n" % i[2])
1473 ui.write("%s\n" % i[2])
1475 else:
1474 else:
1476 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1475 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1477 else:
1476 else:
1478 ui.write('hg %s\n' % aliases[0])
1477 ui.write('hg %s\n' % aliases[0])
1479
1478
1480 # aliases
1479 # aliases
1481 if not ui.quiet and len(aliases) > 1:
1480 if not ui.quiet and len(aliases) > 1:
1482 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1481 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1483
1482
1484 # description
1483 # description
1485 doc = gettext(i[0].__doc__)
1484 doc = gettext(i[0].__doc__)
1486 if not doc:
1485 if not doc:
1487 doc = _("(no help text available)")
1486 doc = _("(no help text available)")
1488 if ui.quiet:
1487 if ui.quiet:
1489 doc = doc.splitlines()[0]
1488 doc = doc.splitlines()[0]
1490 ui.write("\n%s\n" % minirst.format(doc, textwidth))
1489 ui.write("\n%s\n" % minirst.format(doc, textwidth))
1491
1490
1492 if not ui.quiet:
1491 if not ui.quiet:
1493 # options
1492 # options
1494 if i[1]:
1493 if i[1]:
1495 option_lists.append((_("options:\n"), i[1]))
1494 option_lists.append((_("options:\n"), i[1]))
1496
1495
1497 addglobalopts(False)
1496 addglobalopts(False)
1498
1497
1499 def helplist(header, select=None):
1498 def helplist(header, select=None):
1500 h = {}
1499 h = {}
1501 cmds = {}
1500 cmds = {}
1502 for c, e in table.iteritems():
1501 for c, e in table.iteritems():
1503 f = c.split("|", 1)[0]
1502 f = c.split("|", 1)[0]
1504 if select and not select(f):
1503 if select and not select(f):
1505 continue
1504 continue
1506 if (not select and name != 'shortlist' and
1505 if (not select and name != 'shortlist' and
1507 e[0].__module__ != __name__):
1506 e[0].__module__ != __name__):
1508 continue
1507 continue
1509 if name == "shortlist" and not f.startswith("^"):
1508 if name == "shortlist" and not f.startswith("^"):
1510 continue
1509 continue
1511 f = f.lstrip("^")
1510 f = f.lstrip("^")
1512 if not ui.debugflag and f.startswith("debug"):
1511 if not ui.debugflag and f.startswith("debug"):
1513 continue
1512 continue
1514 doc = e[0].__doc__
1513 doc = e[0].__doc__
1515 if doc and 'DEPRECATED' in doc and not ui.verbose:
1514 if doc and 'DEPRECATED' in doc and not ui.verbose:
1516 continue
1515 continue
1517 doc = gettext(doc)
1516 doc = gettext(doc)
1518 if not doc:
1517 if not doc:
1519 doc = _("(no help text available)")
1518 doc = _("(no help text available)")
1520 h[f] = doc.splitlines()[0].rstrip()
1519 h[f] = doc.splitlines()[0].rstrip()
1521 cmds[f] = c.lstrip("^")
1520 cmds[f] = c.lstrip("^")
1522
1521
1523 if not h:
1522 if not h:
1524 ui.status(_('no commands defined\n'))
1523 ui.status(_('no commands defined\n'))
1525 return
1524 return
1526
1525
1527 ui.status(header)
1526 ui.status(header)
1528 fns = sorted(h)
1527 fns = sorted(h)
1529 m = max(map(len, fns))
1528 m = max(map(len, fns))
1530 for f in fns:
1529 for f in fns:
1531 if ui.verbose:
1530 if ui.verbose:
1532 commands = cmds[f].replace("|",", ")
1531 commands = cmds[f].replace("|",", ")
1533 ui.write(" %s:\n %s\n"%(commands, h[f]))
1532 ui.write(" %s:\n %s\n"%(commands, h[f]))
1534 else:
1533 else:
1535 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1534 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1536
1535
1537 if name != 'shortlist':
1536 if name != 'shortlist':
1538 exts, maxlength = extensions.enabled()
1537 exts, maxlength = extensions.enabled()
1539 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1538 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1540 if text:
1539 if text:
1541 ui.write("\n%s\n" % minirst.format(text, textwidth))
1540 ui.write("\n%s\n" % minirst.format(text, textwidth))
1542
1541
1543 if not ui.quiet:
1542 if not ui.quiet:
1544 addglobalopts(True)
1543 addglobalopts(True)
1545
1544
1546 def helptopic(name):
1545 def helptopic(name):
1547 for names, header, doc in help.helptable:
1546 for names, header, doc in help.helptable:
1548 if name in names:
1547 if name in names:
1549 break
1548 break
1550 else:
1549 else:
1551 raise error.UnknownCommand(name)
1550 raise error.UnknownCommand(name)
1552
1551
1553 # description
1552 # description
1554 if not doc:
1553 if not doc:
1555 doc = _("(no help text available)")
1554 doc = _("(no help text available)")
1556 if hasattr(doc, '__call__'):
1555 if hasattr(doc, '__call__'):
1557 doc = doc()
1556 doc = doc()
1558
1557
1559 ui.write("%s\n\n" % header)
1558 ui.write("%s\n\n" % header)
1560 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1559 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1561
1560
1562 def helpext(name):
1561 def helpext(name):
1563 try:
1562 try:
1564 mod = extensions.find(name)
1563 mod = extensions.find(name)
1565 except KeyError:
1564 except KeyError:
1566 raise error.UnknownCommand(name)
1565 raise error.UnknownCommand(name)
1567
1566
1568 doc = gettext(mod.__doc__) or _('no help text available')
1567 doc = gettext(mod.__doc__) or _('no help text available')
1569 if '\n' not in doc:
1568 if '\n' not in doc:
1570 head, tail = doc, ""
1569 head, tail = doc, ""
1571 else:
1570 else:
1572 head, tail = doc.split('\n', 1)
1571 head, tail = doc.split('\n', 1)
1573 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1572 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1574 if tail:
1573 if tail:
1575 ui.write(minirst.format(tail, textwidth))
1574 ui.write(minirst.format(tail, textwidth))
1576 ui.status('\n\n')
1575 ui.status('\n\n')
1577
1576
1578 try:
1577 try:
1579 ct = mod.cmdtable
1578 ct = mod.cmdtable
1580 except AttributeError:
1579 except AttributeError:
1581 ct = {}
1580 ct = {}
1582
1581
1583 modcmds = set([c.split('|', 1)[0] for c in ct])
1582 modcmds = set([c.split('|', 1)[0] for c in ct])
1584 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1583 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1585
1584
1586 if name and name != 'shortlist':
1585 if name and name != 'shortlist':
1587 i = None
1586 i = None
1588 for f in (helptopic, helpcmd, helpext):
1587 for f in (helptopic, helpcmd, helpext):
1589 try:
1588 try:
1590 f(name)
1589 f(name)
1591 i = None
1590 i = None
1592 break
1591 break
1593 except error.UnknownCommand, inst:
1592 except error.UnknownCommand, inst:
1594 i = inst
1593 i = inst
1595 if i:
1594 if i:
1596 raise i
1595 raise i
1597
1596
1598 else:
1597 else:
1599 # program name
1598 # program name
1600 if ui.verbose or with_version:
1599 if ui.verbose or with_version:
1601 version_(ui)
1600 version_(ui)
1602 else:
1601 else:
1603 ui.status(_("Mercurial Distributed SCM\n"))
1602 ui.status(_("Mercurial Distributed SCM\n"))
1604 ui.status('\n')
1603 ui.status('\n')
1605
1604
1606 # list of commands
1605 # list of commands
1607 if name == "shortlist":
1606 if name == "shortlist":
1608 header = _('basic commands:\n\n')
1607 header = _('basic commands:\n\n')
1609 else:
1608 else:
1610 header = _('list of commands:\n\n')
1609 header = _('list of commands:\n\n')
1611
1610
1612 helplist(header)
1611 helplist(header)
1613
1612
1614 # list all option lists
1613 # list all option lists
1615 opt_output = []
1614 opt_output = []
1616 for title, options in option_lists:
1615 for title, options in option_lists:
1617 opt_output.append(("\n%s" % title, None))
1616 opt_output.append(("\n%s" % title, None))
1618 for shortopt, longopt, default, desc in options:
1617 for shortopt, longopt, default, desc in options:
1619 if _("DEPRECATED") in desc and not ui.verbose: continue
1618 if _("DEPRECATED") in desc and not ui.verbose: continue
1620 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1619 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1621 longopt and " --%s" % longopt),
1620 longopt and " --%s" % longopt),
1622 "%s%s" % (desc,
1621 "%s%s" % (desc,
1623 default
1622 default
1624 and _(" (default: %s)") % default
1623 and _(" (default: %s)") % default
1625 or "")))
1624 or "")))
1626
1625
1627 if not name:
1626 if not name:
1628 ui.write(_("\nadditional help topics:\n\n"))
1627 ui.write(_("\nadditional help topics:\n\n"))
1629 topics = []
1628 topics = []
1630 for names, header, doc in help.helptable:
1629 for names, header, doc in help.helptable:
1631 topics.append((sorted(names, key=len, reverse=True)[0], header))
1630 topics.append((sorted(names, key=len, reverse=True)[0], header))
1632 topics_len = max([len(s[0]) for s in topics])
1631 topics_len = max([len(s[0]) for s in topics])
1633 for t, desc in topics:
1632 for t, desc in topics:
1634 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1633 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1635
1634
1636 if opt_output:
1635 if opt_output:
1637 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1636 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1638 for first, second in opt_output:
1637 for first, second in opt_output:
1639 if second:
1638 if second:
1640 second = util.wrap(second, opts_len + 3)
1639 second = util.wrap(second, opts_len + 3)
1641 ui.write(" %-*s %s\n" % (opts_len, first, second))
1640 ui.write(" %-*s %s\n" % (opts_len, first, second))
1642 else:
1641 else:
1643 ui.write("%s\n" % first)
1642 ui.write("%s\n" % first)
1644
1643
1645 def identify(ui, repo, source=None,
1644 def identify(ui, repo, source=None,
1646 rev=None, num=None, id=None, branch=None, tags=None):
1645 rev=None, num=None, id=None, branch=None, tags=None):
1647 """identify the working copy or specified revision
1646 """identify the working copy or specified revision
1648
1647
1649 With no revision, print a summary of the current state of the
1648 With no revision, print a summary of the current state of the
1650 repository.
1649 repository.
1651
1650
1652 Specifying a path to a repository root or Mercurial bundle will
1651 Specifying a path to a repository root or Mercurial bundle will
1653 cause lookup to operate on that repository/bundle.
1652 cause lookup to operate on that repository/bundle.
1654
1653
1655 This summary identifies the repository state using one or two
1654 This summary identifies the repository state using one or two
1656 parent hash identifiers, followed by a "+" if there are
1655 parent hash identifiers, followed by a "+" if there are
1657 uncommitted changes in the working directory, a list of tags for
1656 uncommitted changes in the working directory, a list of tags for
1658 this revision and a branch name for non-default branches.
1657 this revision and a branch name for non-default branches.
1659 """
1658 """
1660
1659
1661 if not repo and not source:
1660 if not repo and not source:
1662 raise util.Abort(_("There is no Mercurial repository here "
1661 raise util.Abort(_("There is no Mercurial repository here "
1663 "(.hg not found)"))
1662 "(.hg not found)"))
1664
1663
1665 hexfunc = ui.debugflag and hex or short
1664 hexfunc = ui.debugflag and hex or short
1666 default = not (num or id or branch or tags)
1665 default = not (num or id or branch or tags)
1667 output = []
1666 output = []
1668
1667
1669 revs = []
1668 revs = []
1670 if source:
1669 if source:
1671 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1670 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1672 repo = hg.repository(ui, source)
1671 repo = hg.repository(ui, source)
1673
1672
1674 if not repo.local():
1673 if not repo.local():
1675 if not rev and revs:
1674 if not rev and revs:
1676 rev = revs[0]
1675 rev = revs[0]
1677 if not rev:
1676 if not rev:
1678 rev = "tip"
1677 rev = "tip"
1679 if num or branch or tags:
1678 if num or branch or tags:
1680 raise util.Abort(
1679 raise util.Abort(
1681 "can't query remote revision number, branch, or tags")
1680 "can't query remote revision number, branch, or tags")
1682 output = [hexfunc(repo.lookup(rev))]
1681 output = [hexfunc(repo.lookup(rev))]
1683 elif not rev:
1682 elif not rev:
1684 ctx = repo[None]
1683 ctx = repo[None]
1685 parents = ctx.parents()
1684 parents = ctx.parents()
1686 changed = False
1685 changed = False
1687 if default or id or num:
1686 if default or id or num:
1688 changed = ctx.files() + ctx.deleted()
1687 changed = ctx.files() + ctx.deleted()
1689 if default or id:
1688 if default or id:
1690 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1689 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1691 (changed) and "+" or "")]
1690 (changed) and "+" or "")]
1692 if num:
1691 if num:
1693 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1692 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1694 (changed) and "+" or ""))
1693 (changed) and "+" or ""))
1695 else:
1694 else:
1696 ctx = repo[rev]
1695 ctx = repo[rev]
1697 if default or id:
1696 if default or id:
1698 output = [hexfunc(ctx.node())]
1697 output = [hexfunc(ctx.node())]
1699 if num:
1698 if num:
1700 output.append(str(ctx.rev()))
1699 output.append(str(ctx.rev()))
1701
1700
1702 if repo.local() and default and not ui.quiet:
1701 if repo.local() and default and not ui.quiet:
1703 b = encoding.tolocal(ctx.branch())
1702 b = encoding.tolocal(ctx.branch())
1704 if b != 'default':
1703 if b != 'default':
1705 output.append("(%s)" % b)
1704 output.append("(%s)" % b)
1706
1705
1707 # multiple tags for a single parent separated by '/'
1706 # multiple tags for a single parent separated by '/'
1708 t = "/".join(ctx.tags())
1707 t = "/".join(ctx.tags())
1709 if t:
1708 if t:
1710 output.append(t)
1709 output.append(t)
1711
1710
1712 if branch:
1711 if branch:
1713 output.append(encoding.tolocal(ctx.branch()))
1712 output.append(encoding.tolocal(ctx.branch()))
1714
1713
1715 if tags:
1714 if tags:
1716 output.extend(ctx.tags())
1715 output.extend(ctx.tags())
1717
1716
1718 ui.write("%s\n" % ' '.join(output))
1717 ui.write("%s\n" % ' '.join(output))
1719
1718
1720 def import_(ui, repo, patch1, *patches, **opts):
1719 def import_(ui, repo, patch1, *patches, **opts):
1721 """import an ordered set of patches
1720 """import an ordered set of patches
1722
1721
1723 Import a list of patches and commit them individually (unless
1722 Import a list of patches and commit them individually (unless
1724 --no-commit is specified).
1723 --no-commit is specified).
1725
1724
1726 If there are outstanding changes in the working directory, import
1725 If there are outstanding changes in the working directory, import
1727 will abort unless given the -f/--force flag.
1726 will abort unless given the -f/--force flag.
1728
1727
1729 You can import a patch straight from a mail message. Even patches
1728 You can import a patch straight from a mail message. Even patches
1730 as attachments work (to use the body part, it must have type
1729 as attachments work (to use the body part, it must have type
1731 text/plain or text/x-patch). From and Subject headers of email
1730 text/plain or text/x-patch). From and Subject headers of email
1732 message are used as default committer and commit message. All
1731 message are used as default committer and commit message. All
1733 text/plain body parts before first diff are added to commit
1732 text/plain body parts before first diff are added to commit
1734 message.
1733 message.
1735
1734
1736 If the imported patch was generated by hg export, user and
1735 If the imported patch was generated by hg export, user and
1737 description from patch override values from message headers and
1736 description from patch override values from message headers and
1738 body. Values given on command line with -m/--message and -u/--user
1737 body. Values given on command line with -m/--message and -u/--user
1739 override these.
1738 override these.
1740
1739
1741 If --exact is specified, import will set the working directory to
1740 If --exact is specified, import will set the working directory to
1742 the parent of each patch before applying it, and will abort if the
1741 the parent of each patch before applying it, and will abort if the
1743 resulting changeset has a different ID than the one recorded in
1742 resulting changeset has a different ID than the one recorded in
1744 the patch. This may happen due to character set problems or other
1743 the patch. This may happen due to character set problems or other
1745 deficiencies in the text patch format.
1744 deficiencies in the text patch format.
1746
1745
1747 With -s/--similarity, hg will attempt to discover renames and
1746 With -s/--similarity, hg will attempt to discover renames and
1748 copies in the patch in the same way as 'addremove'.
1747 copies in the patch in the same way as 'addremove'.
1749
1748
1750 To read a patch from standard input, use "-" as the patch name. If
1749 To read a patch from standard input, use "-" as the patch name. If
1751 a URL is specified, the patch will be downloaded from it.
1750 a URL is specified, the patch will be downloaded from it.
1752 See 'hg help dates' for a list of formats valid for -d/--date.
1751 See 'hg help dates' for a list of formats valid for -d/--date.
1753 """
1752 """
1754 patches = (patch1,) + patches
1753 patches = (patch1,) + patches
1755
1754
1756 date = opts.get('date')
1755 date = opts.get('date')
1757 if date:
1756 if date:
1758 opts['date'] = util.parsedate(date)
1757 opts['date'] = util.parsedate(date)
1759
1758
1760 try:
1759 try:
1761 sim = float(opts.get('similarity') or 0)
1760 sim = float(opts.get('similarity') or 0)
1762 except ValueError:
1761 except ValueError:
1763 raise util.Abort(_('similarity must be a number'))
1762 raise util.Abort(_('similarity must be a number'))
1764 if sim < 0 or sim > 100:
1763 if sim < 0 or sim > 100:
1765 raise util.Abort(_('similarity must be between 0 and 100'))
1764 raise util.Abort(_('similarity must be between 0 and 100'))
1766
1765
1767 if opts.get('exact') or not opts.get('force'):
1766 if opts.get('exact') or not opts.get('force'):
1768 cmdutil.bail_if_changed(repo)
1767 cmdutil.bail_if_changed(repo)
1769
1768
1770 d = opts["base"]
1769 d = opts["base"]
1771 strip = opts["strip"]
1770 strip = opts["strip"]
1772 wlock = lock = None
1771 wlock = lock = None
1773 try:
1772 try:
1774 wlock = repo.wlock()
1773 wlock = repo.wlock()
1775 lock = repo.lock()
1774 lock = repo.lock()
1776 for p in patches:
1775 for p in patches:
1777 pf = os.path.join(d, p)
1776 pf = os.path.join(d, p)
1778
1777
1779 if pf == '-':
1778 if pf == '-':
1780 ui.status(_("applying patch from stdin\n"))
1779 ui.status(_("applying patch from stdin\n"))
1781 pf = sys.stdin
1780 pf = sys.stdin
1782 else:
1781 else:
1783 ui.status(_("applying %s\n") % p)
1782 ui.status(_("applying %s\n") % p)
1784 pf = url.open(ui, pf)
1783 pf = url.open(ui, pf)
1785 data = patch.extract(ui, pf)
1784 data = patch.extract(ui, pf)
1786 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1785 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1787
1786
1788 if tmpname is None:
1787 if tmpname is None:
1789 raise util.Abort(_('no diffs found'))
1788 raise util.Abort(_('no diffs found'))
1790
1789
1791 try:
1790 try:
1792 cmdline_message = cmdutil.logmessage(opts)
1791 cmdline_message = cmdutil.logmessage(opts)
1793 if cmdline_message:
1792 if cmdline_message:
1794 # pickup the cmdline msg
1793 # pickup the cmdline msg
1795 message = cmdline_message
1794 message = cmdline_message
1796 elif message:
1795 elif message:
1797 # pickup the patch msg
1796 # pickup the patch msg
1798 message = message.strip()
1797 message = message.strip()
1799 else:
1798 else:
1800 # launch the editor
1799 # launch the editor
1801 message = None
1800 message = None
1802 ui.debug('message:\n%s\n' % message)
1801 ui.debug('message:\n%s\n' % message)
1803
1802
1804 wp = repo.parents()
1803 wp = repo.parents()
1805 if opts.get('exact'):
1804 if opts.get('exact'):
1806 if not nodeid or not p1:
1805 if not nodeid or not p1:
1807 raise util.Abort(_('not a Mercurial patch'))
1806 raise util.Abort(_('not a Mercurial patch'))
1808 p1 = repo.lookup(p1)
1807 p1 = repo.lookup(p1)
1809 p2 = repo.lookup(p2 or hex(nullid))
1808 p2 = repo.lookup(p2 or hex(nullid))
1810
1809
1811 if p1 != wp[0].node():
1810 if p1 != wp[0].node():
1812 hg.clean(repo, p1)
1811 hg.clean(repo, p1)
1813 repo.dirstate.setparents(p1, p2)
1812 repo.dirstate.setparents(p1, p2)
1814 elif p2:
1813 elif p2:
1815 try:
1814 try:
1816 p1 = repo.lookup(p1)
1815 p1 = repo.lookup(p1)
1817 p2 = repo.lookup(p2)
1816 p2 = repo.lookup(p2)
1818 if p1 == wp[0].node():
1817 if p1 == wp[0].node():
1819 repo.dirstate.setparents(p1, p2)
1818 repo.dirstate.setparents(p1, p2)
1820 except error.RepoError:
1819 except error.RepoError:
1821 pass
1820 pass
1822 if opts.get('exact') or opts.get('import_branch'):
1821 if opts.get('exact') or opts.get('import_branch'):
1823 repo.dirstate.setbranch(branch or 'default')
1822 repo.dirstate.setbranch(branch or 'default')
1824
1823
1825 files = {}
1824 files = {}
1826 try:
1825 try:
1827 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1826 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1828 files=files, eolmode=None)
1827 files=files, eolmode=None)
1829 finally:
1828 finally:
1830 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1829 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1831 if not opts.get('no_commit'):
1830 if not opts.get('no_commit'):
1832 m = cmdutil.matchfiles(repo, files or [])
1831 m = cmdutil.matchfiles(repo, files or [])
1833 n = repo.commit(message, opts.get('user') or user,
1832 n = repo.commit(message, opts.get('user') or user,
1834 opts.get('date') or date, match=m,
1833 opts.get('date') or date, match=m,
1835 editor=cmdutil.commiteditor)
1834 editor=cmdutil.commiteditor)
1836 if opts.get('exact'):
1835 if opts.get('exact'):
1837 if hex(n) != nodeid:
1836 if hex(n) != nodeid:
1838 repo.rollback()
1837 repo.rollback()
1839 raise util.Abort(_('patch is damaged'
1838 raise util.Abort(_('patch is damaged'
1840 ' or loses information'))
1839 ' or loses information'))
1841 # Force a dirstate write so that the next transaction
1840 # Force a dirstate write so that the next transaction
1842 # backups an up-do-date file.
1841 # backups an up-do-date file.
1843 repo.dirstate.write()
1842 repo.dirstate.write()
1844 finally:
1843 finally:
1845 os.unlink(tmpname)
1844 os.unlink(tmpname)
1846 finally:
1845 finally:
1847 release(lock, wlock)
1846 release(lock, wlock)
1848
1847
1849 def incoming(ui, repo, source="default", **opts):
1848 def incoming(ui, repo, source="default", **opts):
1850 """show new changesets found in source
1849 """show new changesets found in source
1851
1850
1852 Show new changesets found in the specified path/URL or the default
1851 Show new changesets found in the specified path/URL or the default
1853 pull location. These are the changesets that would have been pulled
1852 pull location. These are the changesets that would have been pulled
1854 if a pull at the time you issued this command.
1853 if a pull at the time you issued this command.
1855
1854
1856 For remote repository, using --bundle avoids downloading the
1855 For remote repository, using --bundle avoids downloading the
1857 changesets twice if the incoming is followed by a pull.
1856 changesets twice if the incoming is followed by a pull.
1858
1857
1859 See pull for valid source format details.
1858 See pull for valid source format details.
1860 """
1859 """
1861 limit = cmdutil.loglimit(opts)
1860 limit = cmdutil.loglimit(opts)
1862 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1861 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1863 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1862 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1864 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1863 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1865 if revs:
1864 if revs:
1866 revs = [other.lookup(rev) for rev in revs]
1865 revs = [other.lookup(rev) for rev in revs]
1867 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1866 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1868 force=opts["force"])
1867 force=opts["force"])
1869 if not incoming:
1868 if not incoming:
1870 try:
1869 try:
1871 os.unlink(opts["bundle"])
1870 os.unlink(opts["bundle"])
1872 except:
1871 except:
1873 pass
1872 pass
1874 ui.status(_("no changes found\n"))
1873 ui.status(_("no changes found\n"))
1875 return 1
1874 return 1
1876
1875
1877 cleanup = None
1876 cleanup = None
1878 try:
1877 try:
1879 fname = opts["bundle"]
1878 fname = opts["bundle"]
1880 if fname or not other.local():
1879 if fname or not other.local():
1881 # create a bundle (uncompressed if other repo is not local)
1880 # create a bundle (uncompressed if other repo is not local)
1882
1881
1883 if revs is None and other.capable('changegroupsubset'):
1882 if revs is None and other.capable('changegroupsubset'):
1884 revs = rheads
1883 revs = rheads
1885
1884
1886 if revs is None:
1885 if revs is None:
1887 cg = other.changegroup(incoming, "incoming")
1886 cg = other.changegroup(incoming, "incoming")
1888 else:
1887 else:
1889 cg = other.changegroupsubset(incoming, revs, 'incoming')
1888 cg = other.changegroupsubset(incoming, revs, 'incoming')
1890 bundletype = other.local() and "HG10BZ" or "HG10UN"
1889 bundletype = other.local() and "HG10BZ" or "HG10UN"
1891 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1890 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1892 # keep written bundle?
1891 # keep written bundle?
1893 if opts["bundle"]:
1892 if opts["bundle"]:
1894 cleanup = None
1893 cleanup = None
1895 if not other.local():
1894 if not other.local():
1896 # use the created uncompressed bundlerepo
1895 # use the created uncompressed bundlerepo
1897 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1896 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1898
1897
1899 o = other.changelog.nodesbetween(incoming, revs)[0]
1898 o = other.changelog.nodesbetween(incoming, revs)[0]
1900 if opts.get('newest_first'):
1899 if opts.get('newest_first'):
1901 o.reverse()
1900 o.reverse()
1902 displayer = cmdutil.show_changeset(ui, other, opts)
1901 displayer = cmdutil.show_changeset(ui, other, opts)
1903 count = 0
1902 count = 0
1904 for n in o:
1903 for n in o:
1905 if count >= limit:
1904 if count >= limit:
1906 break
1905 break
1907 parents = [p for p in other.changelog.parents(n) if p != nullid]
1906 parents = [p for p in other.changelog.parents(n) if p != nullid]
1908 if opts.get('no_merges') and len(parents) == 2:
1907 if opts.get('no_merges') and len(parents) == 2:
1909 continue
1908 continue
1910 count += 1
1909 count += 1
1911 displayer.show(other[n])
1910 displayer.show(other[n])
1912 finally:
1911 finally:
1913 if hasattr(other, 'close'):
1912 if hasattr(other, 'close'):
1914 other.close()
1913 other.close()
1915 if cleanup:
1914 if cleanup:
1916 os.unlink(cleanup)
1915 os.unlink(cleanup)
1917
1916
1918 def init(ui, dest=".", **opts):
1917 def init(ui, dest=".", **opts):
1919 """create a new repository in the given directory
1918 """create a new repository in the given directory
1920
1919
1921 Initialize a new repository in the given directory. If the given
1920 Initialize a new repository in the given directory. If the given
1922 directory does not exist, it will be created.
1921 directory does not exist, it will be created.
1923
1922
1924 If no directory is given, the current directory is used.
1923 If no directory is given, the current directory is used.
1925
1924
1926 It is possible to specify an ssh:// URL as the destination.
1925 It is possible to specify an ssh:// URL as the destination.
1927 See 'hg help urls' for more information.
1926 See 'hg help urls' for more information.
1928 """
1927 """
1929 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1928 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1930
1929
1931 def locate(ui, repo, *pats, **opts):
1930 def locate(ui, repo, *pats, **opts):
1932 """locate files matching specific patterns
1931 """locate files matching specific patterns
1933
1932
1934 Print files under Mercurial control in the working directory whose
1933 Print files under Mercurial control in the working directory whose
1935 names match the given patterns.
1934 names match the given patterns.
1936
1935
1937 By default, this command searches all directories in the working
1936 By default, this command searches all directories in the working
1938 directory. To search just the current directory and its
1937 directory. To search just the current directory and its
1939 subdirectories, use "--include .".
1938 subdirectories, use "--include .".
1940
1939
1941 If no patterns are given to match, this command prints the names
1940 If no patterns are given to match, this command prints the names
1942 of all files under Mercurial control in the working directory.
1941 of all files under Mercurial control in the working directory.
1943
1942
1944 If you want to feed the output of this command into the "xargs"
1943 If you want to feed the output of this command into the "xargs"
1945 command, use the -0 option to both this command and "xargs". This
1944 command, use the -0 option to both this command and "xargs". This
1946 will avoid the problem of "xargs" treating single filenames that
1945 will avoid the problem of "xargs" treating single filenames that
1947 contain whitespace as multiple filenames.
1946 contain whitespace as multiple filenames.
1948 """
1947 """
1949 end = opts.get('print0') and '\0' or '\n'
1948 end = opts.get('print0') and '\0' or '\n'
1950 rev = opts.get('rev') or None
1949 rev = opts.get('rev') or None
1951
1950
1952 ret = 1
1951 ret = 1
1953 m = cmdutil.match(repo, pats, opts, default='relglob')
1952 m = cmdutil.match(repo, pats, opts, default='relglob')
1954 m.bad = lambda x,y: False
1953 m.bad = lambda x,y: False
1955 for abs in repo[rev].walk(m):
1954 for abs in repo[rev].walk(m):
1956 if not rev and abs not in repo.dirstate:
1955 if not rev and abs not in repo.dirstate:
1957 continue
1956 continue
1958 if opts.get('fullpath'):
1957 if opts.get('fullpath'):
1959 ui.write(repo.wjoin(abs), end)
1958 ui.write(repo.wjoin(abs), end)
1960 else:
1959 else:
1961 ui.write(((pats and m.rel(abs)) or abs), end)
1960 ui.write(((pats and m.rel(abs)) or abs), end)
1962 ret = 0
1961 ret = 0
1963
1962
1964 return ret
1963 return ret
1965
1964
1966 def log(ui, repo, *pats, **opts):
1965 def log(ui, repo, *pats, **opts):
1967 """show revision history of entire repository or files
1966 """show revision history of entire repository or files
1968
1967
1969 Print the revision history of the specified files or the entire
1968 Print the revision history of the specified files or the entire
1970 project.
1969 project.
1971
1970
1972 File history is shown without following rename or copy history of
1971 File history is shown without following rename or copy history of
1973 files. Use -f/--follow with a filename to follow history across
1972 files. Use -f/--follow with a filename to follow history across
1974 renames and copies. --follow without a filename will only show
1973 renames and copies. --follow without a filename will only show
1975 ancestors or descendants of the starting revision. --follow-first
1974 ancestors or descendants of the starting revision. --follow-first
1976 only follows the first parent of merge revisions.
1975 only follows the first parent of merge revisions.
1977
1976
1978 If no revision range is specified, the default is tip:0 unless
1977 If no revision range is specified, the default is tip:0 unless
1979 --follow is set, in which case the working directory parent is
1978 --follow is set, in which case the working directory parent is
1980 used as the starting revision.
1979 used as the starting revision.
1981
1980
1982 See 'hg help dates' for a list of formats valid for -d/--date.
1981 See 'hg help dates' for a list of formats valid for -d/--date.
1983
1982
1984 By default this command prints revision number and changeset id,
1983 By default this command prints revision number and changeset id,
1985 tags, non-trivial parents, user, date and time, and a summary for
1984 tags, non-trivial parents, user, date and time, and a summary for
1986 each commit. When the -v/--verbose switch is used, the list of
1985 each commit. When the -v/--verbose switch is used, the list of
1987 changed files and full commit message are shown.
1986 changed files and full commit message are shown.
1988
1987
1989 NOTE: log -p/--patch may generate unexpected diff output for merge
1988 NOTE: log -p/--patch may generate unexpected diff output for merge
1990 changesets, as it will only compare the merge changeset against
1989 changesets, as it will only compare the merge changeset against
1991 its first parent. Also, only files different from BOTH parents
1990 its first parent. Also, only files different from BOTH parents
1992 will appear in files:.
1991 will appear in files:.
1993 """
1992 """
1994
1993
1995 matchfn = cmdutil.match(repo, pats, opts)
1994 matchfn = cmdutil.match(repo, pats, opts)
1996 limit = cmdutil.loglimit(opts)
1995 limit = cmdutil.loglimit(opts)
1997 count = 0
1996 count = 0
1998
1997
1999 if opts.get('copies') and opts.get('rev'):
1998 if opts.get('copies') and opts.get('rev'):
2000 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1999 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2001 else:
2000 else:
2002 endrev = len(repo)
2001 endrev = len(repo)
2003 rcache = {}
2002 rcache = {}
2004 ncache = {}
2003 ncache = {}
2005 def getrenamed(fn, rev):
2004 def getrenamed(fn, rev):
2006 '''looks up all renames for a file (up to endrev) the first
2005 '''looks up all renames for a file (up to endrev) the first
2007 time the file is given. It indexes on the changerev and only
2006 time the file is given. It indexes on the changerev and only
2008 parses the manifest if linkrev != changerev.
2007 parses the manifest if linkrev != changerev.
2009 Returns rename info for fn at changerev rev.'''
2008 Returns rename info for fn at changerev rev.'''
2010 if fn not in rcache:
2009 if fn not in rcache:
2011 rcache[fn] = {}
2010 rcache[fn] = {}
2012 ncache[fn] = {}
2011 ncache[fn] = {}
2013 fl = repo.file(fn)
2012 fl = repo.file(fn)
2014 for i in fl:
2013 for i in fl:
2015 node = fl.node(i)
2014 node = fl.node(i)
2016 lr = fl.linkrev(i)
2015 lr = fl.linkrev(i)
2017 renamed = fl.renamed(node)
2016 renamed = fl.renamed(node)
2018 rcache[fn][lr] = renamed
2017 rcache[fn][lr] = renamed
2019 if renamed:
2018 if renamed:
2020 ncache[fn][node] = renamed
2019 ncache[fn][node] = renamed
2021 if lr >= endrev:
2020 if lr >= endrev:
2022 break
2021 break
2023 if rev in rcache[fn]:
2022 if rev in rcache[fn]:
2024 return rcache[fn][rev]
2023 return rcache[fn][rev]
2025
2024
2026 # If linkrev != rev (i.e. rev not found in rcache) fallback to
2025 # If linkrev != rev (i.e. rev not found in rcache) fallback to
2027 # filectx logic.
2026 # filectx logic.
2028
2027
2029 try:
2028 try:
2030 return repo[rev][fn].renamed()
2029 return repo[rev][fn].renamed()
2031 except error.LookupError:
2030 except error.LookupError:
2032 pass
2031 pass
2033 return None
2032 return None
2034
2033
2035 df = False
2034 df = False
2036 if opts["date"]:
2035 if opts["date"]:
2037 df = util.matchdate(opts["date"])
2036 df = util.matchdate(opts["date"])
2038
2037
2039 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2038 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2040 def prep(ctx, fns):
2039 def prep(ctx, fns):
2041 rev = ctx.rev()
2040 rev = ctx.rev()
2042 parents = [p for p in repo.changelog.parentrevs(rev)
2041 parents = [p for p in repo.changelog.parentrevs(rev)
2043 if p != nullrev]
2042 if p != nullrev]
2044 if opts.get('no_merges') and len(parents) == 2:
2043 if opts.get('no_merges') and len(parents) == 2:
2045 return
2044 return
2046 if opts.get('only_merges') and len(parents) != 2:
2045 if opts.get('only_merges') and len(parents) != 2:
2047 return
2046 return
2048 if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
2047 if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
2049 return
2048 return
2050 if df and not df(ctx.date()[0]):
2049 if df and not df(ctx.date()[0]):
2051 return
2050 return
2052 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2051 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2053 return
2052 return
2054 if opts.get('keyword'):
2053 if opts.get('keyword'):
2055 for k in [kw.lower() for kw in opts['keyword']]:
2054 for k in [kw.lower() for kw in opts['keyword']]:
2056 if (k in ctx.user().lower() or
2055 if (k in ctx.user().lower() or
2057 k in ctx.description().lower() or
2056 k in ctx.description().lower() or
2058 k in " ".join(ctx.files()).lower()):
2057 k in " ".join(ctx.files()).lower()):
2059 break
2058 break
2060 else:
2059 else:
2061 return
2060 return
2062
2061
2063 copies = []
2062 copies = []
2064 if opts.get('copies') and rev:
2063 if opts.get('copies') and rev:
2065 for fn in ctx.files():
2064 for fn in ctx.files():
2066 rename = getrenamed(fn, rev)
2065 rename = getrenamed(fn, rev)
2067 if rename:
2066 if rename:
2068 copies.append((fn, rename[0]))
2067 copies.append((fn, rename[0]))
2069
2068
2070 displayer.show(ctx, copies=copies)
2069 displayer.show(ctx, copies=copies)
2071
2070
2072 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2071 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2073 if count != limit:
2072 if count != limit:
2074 if displayer.flush(ctx.rev()):
2073 if displayer.flush(ctx.rev()):
2075 count += 1
2074 count += 1
2076
2075
2077 def manifest(ui, repo, node=None, rev=None):
2076 def manifest(ui, repo, node=None, rev=None):
2078 """output the current or given revision of the project manifest
2077 """output the current or given revision of the project manifest
2079
2078
2080 Print a list of version controlled files for the given revision.
2079 Print a list of version controlled files for the given revision.
2081 If no revision is given, the first parent of the working directory
2080 If no revision is given, the first parent of the working directory
2082 is used, or the null revision if no revision is checked out.
2081 is used, or the null revision if no revision is checked out.
2083
2082
2084 With -v, print file permissions, symlink and executable bits.
2083 With -v, print file permissions, symlink and executable bits.
2085 With --debug, print file revision hashes.
2084 With --debug, print file revision hashes.
2086 """
2085 """
2087
2086
2088 if rev and node:
2087 if rev and node:
2089 raise util.Abort(_("please specify just one revision"))
2088 raise util.Abort(_("please specify just one revision"))
2090
2089
2091 if not node:
2090 if not node:
2092 node = rev
2091 node = rev
2093
2092
2094 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2093 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2095 ctx = repo[node]
2094 ctx = repo[node]
2096 for f in ctx:
2095 for f in ctx:
2097 if ui.debugflag:
2096 if ui.debugflag:
2098 ui.write("%40s " % hex(ctx.manifest()[f]))
2097 ui.write("%40s " % hex(ctx.manifest()[f]))
2099 if ui.verbose:
2098 if ui.verbose:
2100 ui.write(decor[ctx.flags(f)])
2099 ui.write(decor[ctx.flags(f)])
2101 ui.write("%s\n" % f)
2100 ui.write("%s\n" % f)
2102
2101
2103 def merge(ui, repo, node=None, **opts):
2102 def merge(ui, repo, node=None, **opts):
2104 """merge working directory with another revision
2103 """merge working directory with another revision
2105
2104
2106 The current working directory is updated with all changes made in
2105 The current working directory is updated with all changes made in
2107 the requested revision since the last common predecessor revision.
2106 the requested revision since the last common predecessor revision.
2108
2107
2109 Files that changed between either parent are marked as changed for
2108 Files that changed between either parent are marked as changed for
2110 the next commit and a commit must be performed before any further
2109 the next commit and a commit must be performed before any further
2111 updates to the repository are allowed. The next commit will have
2110 updates to the repository are allowed. The next commit will have
2112 two parents.
2111 two parents.
2113
2112
2114 If no revision is specified, the working directory's parent is a
2113 If no revision is specified, the working directory's parent is a
2115 head revision, and the current branch contains exactly one other
2114 head revision, and the current branch contains exactly one other
2116 head, the other head is merged with by default. Otherwise, an
2115 head, the other head is merged with by default. Otherwise, an
2117 explicit revision with which to merge with must be provided.
2116 explicit revision with which to merge with must be provided.
2118 """
2117 """
2119
2118
2120 if opts.get('rev') and node:
2119 if opts.get('rev') and node:
2121 raise util.Abort(_("please specify just one revision"))
2120 raise util.Abort(_("please specify just one revision"))
2122 if not node:
2121 if not node:
2123 node = opts.get('rev')
2122 node = opts.get('rev')
2124
2123
2125 if not node:
2124 if not node:
2126 branch = repo.changectx(None).branch()
2125 branch = repo.changectx(None).branch()
2127 bheads = repo.branchheads(branch)
2126 bheads = repo.branchheads(branch)
2128 if len(bheads) > 2:
2127 if len(bheads) > 2:
2129 raise util.Abort(_("branch '%s' has %d heads - "
2128 raise util.Abort(_("branch '%s' has %d heads - "
2130 "please merge with an explicit rev") %
2129 "please merge with an explicit rev") %
2131 (branch, len(bheads)))
2130 (branch, len(bheads)))
2132
2131
2133 parent = repo.dirstate.parents()[0]
2132 parent = repo.dirstate.parents()[0]
2134 if len(bheads) == 1:
2133 if len(bheads) == 1:
2135 if len(repo.heads()) > 1:
2134 if len(repo.heads()) > 1:
2136 raise util.Abort(_("branch '%s' has one head - "
2135 raise util.Abort(_("branch '%s' has one head - "
2137 "please merge with an explicit rev") %
2136 "please merge with an explicit rev") %
2138 branch)
2137 branch)
2139 msg = _('there is nothing to merge')
2138 msg = _('there is nothing to merge')
2140 if parent != repo.lookup(repo[None].branch()):
2139 if parent != repo.lookup(repo[None].branch()):
2141 msg = _('%s - use "hg update" instead') % msg
2140 msg = _('%s - use "hg update" instead') % msg
2142 raise util.Abort(msg)
2141 raise util.Abort(msg)
2143
2142
2144 if parent not in bheads:
2143 if parent not in bheads:
2145 raise util.Abort(_('working dir not at a head rev - '
2144 raise util.Abort(_('working dir not at a head rev - '
2146 'use "hg update" or merge with an explicit rev'))
2145 'use "hg update" or merge with an explicit rev'))
2147 node = parent == bheads[0] and bheads[-1] or bheads[0]
2146 node = parent == bheads[0] and bheads[-1] or bheads[0]
2148
2147
2149 if opts.get('preview'):
2148 if opts.get('preview'):
2150 p1 = repo['.']
2149 p1 = repo['.']
2151 p2 = repo[node]
2150 p2 = repo[node]
2152 common = p1.ancestor(p2)
2151 common = p1.ancestor(p2)
2153 roots, heads = [common.node()], [p2.node()]
2152 roots, heads = [common.node()], [p2.node()]
2154 displayer = cmdutil.show_changeset(ui, repo, opts)
2153 displayer = cmdutil.show_changeset(ui, repo, opts)
2155 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2154 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2156 if node not in roots:
2155 if node not in roots:
2157 displayer.show(repo[node])
2156 displayer.show(repo[node])
2158 return 0
2157 return 0
2159
2158
2160 return hg.merge(repo, node, force=opts.get('force'))
2159 return hg.merge(repo, node, force=opts.get('force'))
2161
2160
2162 def outgoing(ui, repo, dest=None, **opts):
2161 def outgoing(ui, repo, dest=None, **opts):
2163 """show changesets not found in destination
2162 """show changesets not found in destination
2164
2163
2165 Show changesets not found in the specified destination repository
2164 Show changesets not found in the specified destination repository
2166 or the default push location. These are the changesets that would
2165 or the default push location. These are the changesets that would
2167 be pushed if a push was requested.
2166 be pushed if a push was requested.
2168
2167
2169 See pull for valid destination format details.
2168 See pull for valid destination format details.
2170 """
2169 """
2171 limit = cmdutil.loglimit(opts)
2170 limit = cmdutil.loglimit(opts)
2172 dest, revs, checkout = hg.parseurl(
2171 dest, revs, checkout = hg.parseurl(
2173 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2172 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2174 if revs:
2173 if revs:
2175 revs = [repo.lookup(rev) for rev in revs]
2174 revs = [repo.lookup(rev) for rev in revs]
2176
2175
2177 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2176 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2178 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2177 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2179 o = repo.findoutgoing(other, force=opts.get('force'))
2178 o = repo.findoutgoing(other, force=opts.get('force'))
2180 if not o:
2179 if not o:
2181 ui.status(_("no changes found\n"))
2180 ui.status(_("no changes found\n"))
2182 return 1
2181 return 1
2183 o = repo.changelog.nodesbetween(o, revs)[0]
2182 o = repo.changelog.nodesbetween(o, revs)[0]
2184 if opts.get('newest_first'):
2183 if opts.get('newest_first'):
2185 o.reverse()
2184 o.reverse()
2186 displayer = cmdutil.show_changeset(ui, repo, opts)
2185 displayer = cmdutil.show_changeset(ui, repo, opts)
2187 count = 0
2186 count = 0
2188 for n in o:
2187 for n in o:
2189 if count >= limit:
2188 if count >= limit:
2190 break
2189 break
2191 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2190 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2192 if opts.get('no_merges') and len(parents) == 2:
2191 if opts.get('no_merges') and len(parents) == 2:
2193 continue
2192 continue
2194 count += 1
2193 count += 1
2195 displayer.show(repo[n])
2194 displayer.show(repo[n])
2196
2195
2197 def parents(ui, repo, file_=None, **opts):
2196 def parents(ui, repo, file_=None, **opts):
2198 """show the parents of the working directory or revision
2197 """show the parents of the working directory or revision
2199
2198
2200 Print the working directory's parent revisions. If a revision is
2199 Print the working directory's parent revisions. If a revision is
2201 given via -r/--rev, the parent of that revision will be printed.
2200 given via -r/--rev, the parent of that revision will be printed.
2202 If a file argument is given, the revision in which the file was
2201 If a file argument is given, the revision in which the file was
2203 last changed (before the working directory revision or the
2202 last changed (before the working directory revision or the
2204 argument to --rev if given) is printed.
2203 argument to --rev if given) is printed.
2205 """
2204 """
2206 rev = opts.get('rev')
2205 rev = opts.get('rev')
2207 if rev:
2206 if rev:
2208 ctx = repo[rev]
2207 ctx = repo[rev]
2209 else:
2208 else:
2210 ctx = repo[None]
2209 ctx = repo[None]
2211
2210
2212 if file_:
2211 if file_:
2213 m = cmdutil.match(repo, (file_,), opts)
2212 m = cmdutil.match(repo, (file_,), opts)
2214 if m.anypats() or len(m.files()) != 1:
2213 if m.anypats() or len(m.files()) != 1:
2215 raise util.Abort(_('can only specify an explicit filename'))
2214 raise util.Abort(_('can only specify an explicit filename'))
2216 file_ = m.files()[0]
2215 file_ = m.files()[0]
2217 filenodes = []
2216 filenodes = []
2218 for cp in ctx.parents():
2217 for cp in ctx.parents():
2219 if not cp:
2218 if not cp:
2220 continue
2219 continue
2221 try:
2220 try:
2222 filenodes.append(cp.filenode(file_))
2221 filenodes.append(cp.filenode(file_))
2223 except error.LookupError:
2222 except error.LookupError:
2224 pass
2223 pass
2225 if not filenodes:
2224 if not filenodes:
2226 raise util.Abort(_("'%s' not found in manifest!") % file_)
2225 raise util.Abort(_("'%s' not found in manifest!") % file_)
2227 fl = repo.file(file_)
2226 fl = repo.file(file_)
2228 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2227 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2229 else:
2228 else:
2230 p = [cp.node() for cp in ctx.parents()]
2229 p = [cp.node() for cp in ctx.parents()]
2231
2230
2232 displayer = cmdutil.show_changeset(ui, repo, opts)
2231 displayer = cmdutil.show_changeset(ui, repo, opts)
2233 for n in p:
2232 for n in p:
2234 if n != nullid:
2233 if n != nullid:
2235 displayer.show(repo[n])
2234 displayer.show(repo[n])
2236
2235
2237 def paths(ui, repo, search=None):
2236 def paths(ui, repo, search=None):
2238 """show aliases for remote repositories
2237 """show aliases for remote repositories
2239
2238
2240 Show definition of symbolic path name NAME. If no name is given,
2239 Show definition of symbolic path name NAME. If no name is given,
2241 show definition of all available names.
2240 show definition of all available names.
2242
2241
2243 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2242 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2244 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2243 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2245
2244
2246 See 'hg help urls' for more information.
2245 See 'hg help urls' for more information.
2247 """
2246 """
2248 if search:
2247 if search:
2249 for name, path in ui.configitems("paths"):
2248 for name, path in ui.configitems("paths"):
2250 if name == search:
2249 if name == search:
2251 ui.write("%s\n" % url.hidepassword(path))
2250 ui.write("%s\n" % url.hidepassword(path))
2252 return
2251 return
2253 ui.warn(_("not found!\n"))
2252 ui.warn(_("not found!\n"))
2254 return 1
2253 return 1
2255 else:
2254 else:
2256 for name, path in ui.configitems("paths"):
2255 for name, path in ui.configitems("paths"):
2257 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2256 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2258
2257
2259 def postincoming(ui, repo, modheads, optupdate, checkout):
2258 def postincoming(ui, repo, modheads, optupdate, checkout):
2260 if modheads == 0:
2259 if modheads == 0:
2261 return
2260 return
2262 if optupdate:
2261 if optupdate:
2263 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2262 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2264 return hg.update(repo, checkout)
2263 return hg.update(repo, checkout)
2265 else:
2264 else:
2266 ui.status(_("not updating, since new heads added\n"))
2265 ui.status(_("not updating, since new heads added\n"))
2267 if modheads > 1:
2266 if modheads > 1:
2268 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2267 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2269 else:
2268 else:
2270 ui.status(_("(run 'hg update' to get a working copy)\n"))
2269 ui.status(_("(run 'hg update' to get a working copy)\n"))
2271
2270
2272 def pull(ui, repo, source="default", **opts):
2271 def pull(ui, repo, source="default", **opts):
2273 """pull changes from the specified source
2272 """pull changes from the specified source
2274
2273
2275 Pull changes from a remote repository to a local one.
2274 Pull changes from a remote repository to a local one.
2276
2275
2277 This finds all changes from the repository at the specified path
2276 This finds all changes from the repository at the specified path
2278 or URL and adds them to a local repository (the current one unless
2277 or URL and adds them to a local repository (the current one unless
2279 -R is specified). By default, this does not update the copy of the
2278 -R is specified). By default, this does not update the copy of the
2280 project in the working directory.
2279 project in the working directory.
2281
2280
2282 Use hg incoming if you want to see what would have been added by a
2281 Use hg incoming if you want to see what would have been added by a
2283 pull at the time you issued this command. If you then decide to
2282 pull at the time you issued this command. If you then decide to
2284 added those changes to the repository, you should use pull -r X
2283 added those changes to the repository, you should use pull -r X
2285 where X is the last changeset listed by hg incoming.
2284 where X is the last changeset listed by hg incoming.
2286
2285
2287 If SOURCE is omitted, the 'default' path will be used.
2286 If SOURCE is omitted, the 'default' path will be used.
2288 See 'hg help urls' for more information.
2287 See 'hg help urls' for more information.
2289 """
2288 """
2290 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2289 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2291 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2290 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2292 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2291 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2293 if revs:
2292 if revs:
2294 try:
2293 try:
2295 revs = [other.lookup(rev) for rev in revs]
2294 revs = [other.lookup(rev) for rev in revs]
2296 except error.CapabilityError:
2295 except error.CapabilityError:
2297 err = _("Other repository doesn't support revision lookup, "
2296 err = _("Other repository doesn't support revision lookup, "
2298 "so a rev cannot be specified.")
2297 "so a rev cannot be specified.")
2299 raise util.Abort(err)
2298 raise util.Abort(err)
2300
2299
2301 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2300 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2302 if checkout:
2301 if checkout:
2303 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2302 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2304 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2303 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2305
2304
2306 def push(ui, repo, dest=None, **opts):
2305 def push(ui, repo, dest=None, **opts):
2307 """push changes to the specified destination
2306 """push changes to the specified destination
2308
2307
2309 Push changes from the local repository to the given destination.
2308 Push changes from the local repository to the given destination.
2310
2309
2311 This is the symmetrical operation for pull. It moves changes from
2310 This is the symmetrical operation for pull. It moves changes from
2312 the current repository to a different one. If the destination is
2311 the current repository to a different one. If the destination is
2313 local this is identical to a pull in that directory from the
2312 local this is identical to a pull in that directory from the
2314 current one.
2313 current one.
2315
2314
2316 By default, push will refuse to run if it detects the result would
2315 By default, push will refuse to run if it detects the result would
2317 increase the number of remote heads. This generally indicates the
2316 increase the number of remote heads. This generally indicates the
2318 user forgot to pull and merge before pushing.
2317 user forgot to pull and merge before pushing.
2319
2318
2320 If -r/--rev is used, the named revision and all its ancestors will
2319 If -r/--rev is used, the named revision and all its ancestors will
2321 be pushed to the remote repository.
2320 be pushed to the remote repository.
2322
2321
2323 Please see 'hg help urls' for important details about ssh://
2322 Please see 'hg help urls' for important details about ssh://
2324 URLs. If DESTINATION is omitted, a default path will be used.
2323 URLs. If DESTINATION is omitted, a default path will be used.
2325 """
2324 """
2326 dest, revs, checkout = hg.parseurl(
2325 dest, revs, checkout = hg.parseurl(
2327 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2326 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2328 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2327 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2329 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2328 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2330 if revs:
2329 if revs:
2331 revs = [repo.lookup(rev) for rev in revs]
2330 revs = [repo.lookup(rev) for rev in revs]
2332
2331
2333 # push subrepos depth-first for coherent ordering
2332 # push subrepos depth-first for coherent ordering
2334 c = repo['']
2333 c = repo['']
2335 subs = c.substate # only repos that are committed
2334 subs = c.substate # only repos that are committed
2336 for s in sorted(subs):
2335 for s in sorted(subs):
2337 c.sub(s).push(opts.get('force'))
2336 c.sub(s).push(opts.get('force'))
2338
2337
2339 r = repo.push(other, opts.get('force'), revs=revs)
2338 r = repo.push(other, opts.get('force'), revs=revs)
2340 return r == 0
2339 return r == 0
2341
2340
2342 def recover(ui, repo):
2341 def recover(ui, repo):
2343 """roll back an interrupted transaction
2342 """roll back an interrupted transaction
2344
2343
2345 Recover from an interrupted commit or pull.
2344 Recover from an interrupted commit or pull.
2346
2345
2347 This command tries to fix the repository status after an
2346 This command tries to fix the repository status after an
2348 interrupted operation. It should only be necessary when Mercurial
2347 interrupted operation. It should only be necessary when Mercurial
2349 suggests it.
2348 suggests it.
2350 """
2349 """
2351 if repo.recover():
2350 if repo.recover():
2352 return hg.verify(repo)
2351 return hg.verify(repo)
2353 return 1
2352 return 1
2354
2353
2355 def remove(ui, repo, *pats, **opts):
2354 def remove(ui, repo, *pats, **opts):
2356 """remove the specified files on the next commit
2355 """remove the specified files on the next commit
2357
2356
2358 Schedule the indicated files for removal from the repository.
2357 Schedule the indicated files for removal from the repository.
2359
2358
2360 This only removes files from the current branch, not from the
2359 This only removes files from the current branch, not from the
2361 entire project history. -A/--after can be used to remove only
2360 entire project history. -A/--after can be used to remove only
2362 files that have already been deleted, -f/--force can be used to
2361 files that have already been deleted, -f/--force can be used to
2363 force deletion, and -Af can be used to remove files from the next
2362 force deletion, and -Af can be used to remove files from the next
2364 revision without deleting them from the working directory.
2363 revision without deleting them from the working directory.
2365
2364
2366 The following table details the behavior of remove for different
2365 The following table details the behavior of remove for different
2367 file states (columns) and option combinations (rows). The file
2366 file states (columns) and option combinations (rows). The file
2368 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2367 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2369 reported by hg status). The actions are Warn, Remove (from branch)
2368 reported by hg status). The actions are Warn, Remove (from branch)
2370 and Delete (from disk)::
2369 and Delete (from disk)::
2371
2370
2372 A C M !
2371 A C M !
2373 none W RD W R
2372 none W RD W R
2374 -f R RD RD R
2373 -f R RD RD R
2375 -A W W W R
2374 -A W W W R
2376 -Af R R R R
2375 -Af R R R R
2377
2376
2378 This command schedules the files to be removed at the next commit.
2377 This command schedules the files to be removed at the next commit.
2379 To undo a remove before that, see hg revert.
2378 To undo a remove before that, see hg revert.
2380 """
2379 """
2381
2380
2382 after, force = opts.get('after'), opts.get('force')
2381 after, force = opts.get('after'), opts.get('force')
2383 if not pats and not after:
2382 if not pats and not after:
2384 raise util.Abort(_('no files specified'))
2383 raise util.Abort(_('no files specified'))
2385
2384
2386 m = cmdutil.match(repo, pats, opts)
2385 m = cmdutil.match(repo, pats, opts)
2387 s = repo.status(match=m, clean=True)
2386 s = repo.status(match=m, clean=True)
2388 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2387 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2389
2388
2390 for f in m.files():
2389 for f in m.files():
2391 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2390 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2392 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2391 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2393
2392
2394 def warn(files, reason):
2393 def warn(files, reason):
2395 for f in files:
2394 for f in files:
2396 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2395 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2397 % (m.rel(f), reason))
2396 % (m.rel(f), reason))
2398
2397
2399 if force:
2398 if force:
2400 remove, forget = modified + deleted + clean, added
2399 remove, forget = modified + deleted + clean, added
2401 elif after:
2400 elif after:
2402 remove, forget = deleted, []
2401 remove, forget = deleted, []
2403 warn(modified + added + clean, _('still exists'))
2402 warn(modified + added + clean, _('still exists'))
2404 else:
2403 else:
2405 remove, forget = deleted + clean, []
2404 remove, forget = deleted + clean, []
2406 warn(modified, _('is modified'))
2405 warn(modified, _('is modified'))
2407 warn(added, _('has been marked for add'))
2406 warn(added, _('has been marked for add'))
2408
2407
2409 for f in sorted(remove + forget):
2408 for f in sorted(remove + forget):
2410 if ui.verbose or not m.exact(f):
2409 if ui.verbose or not m.exact(f):
2411 ui.status(_('removing %s\n') % m.rel(f))
2410 ui.status(_('removing %s\n') % m.rel(f))
2412
2411
2413 repo.forget(forget)
2412 repo.forget(forget)
2414 repo.remove(remove, unlink=not after)
2413 repo.remove(remove, unlink=not after)
2415
2414
2416 def rename(ui, repo, *pats, **opts):
2415 def rename(ui, repo, *pats, **opts):
2417 """rename files; equivalent of copy + remove
2416 """rename files; equivalent of copy + remove
2418
2417
2419 Mark dest as copies of sources; mark sources for deletion. If dest
2418 Mark dest as copies of sources; mark sources for deletion. If dest
2420 is a directory, copies are put in that directory. If dest is a
2419 is a directory, copies are put in that directory. If dest is a
2421 file, there can only be one source.
2420 file, there can only be one source.
2422
2421
2423 By default, this command copies the contents of files as they
2422 By default, this command copies the contents of files as they
2424 exist in the working directory. If invoked with -A/--after, the
2423 exist in the working directory. If invoked with -A/--after, the
2425 operation is recorded, but no copying is performed.
2424 operation is recorded, but no copying is performed.
2426
2425
2427 This command takes effect at the next commit. To undo a rename
2426 This command takes effect at the next commit. To undo a rename
2428 before that, see hg revert.
2427 before that, see hg revert.
2429 """
2428 """
2430 wlock = repo.wlock(False)
2429 wlock = repo.wlock(False)
2431 try:
2430 try:
2432 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2431 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2433 finally:
2432 finally:
2434 wlock.release()
2433 wlock.release()
2435
2434
2436 def resolve(ui, repo, *pats, **opts):
2435 def resolve(ui, repo, *pats, **opts):
2437 """retry file merges from a merge or update
2436 """retry file merges from a merge or update
2438
2437
2439 This command can cleanly retry unresolved file merges using file
2438 This command can cleanly retry unresolved file merges using file
2440 revisions preserved from the last update or merge. To attempt to
2439 revisions preserved from the last update or merge. To attempt to
2441 resolve all unresolved files, use the -a/--all switch.
2440 resolve all unresolved files, use the -a/--all switch.
2442
2441
2443 If a conflict is resolved manually, please note that the changes
2442 If a conflict is resolved manually, please note that the changes
2444 will be overwritten if the merge is retried with resolve. The
2443 will be overwritten if the merge is retried with resolve. The
2445 -m/--mark switch should be used to mark the file as resolved.
2444 -m/--mark switch should be used to mark the file as resolved.
2446
2445
2447 This command also allows listing resolved files and manually
2446 This command also allows listing resolved files and manually
2448 indicating whether or not files are resolved. All files must be
2447 indicating whether or not files are resolved. All files must be
2449 marked as resolved before a commit is permitted.
2448 marked as resolved before a commit is permitted.
2450
2449
2451 The codes used to show the status of files are::
2450 The codes used to show the status of files are::
2452
2451
2453 U = unresolved
2452 U = unresolved
2454 R = resolved
2453 R = resolved
2455 """
2454 """
2456
2455
2457 all, mark, unmark, show, nostatus = \
2456 all, mark, unmark, show, nostatus = \
2458 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2457 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2459
2458
2460 if (show and (mark or unmark)) or (mark and unmark):
2459 if (show and (mark or unmark)) or (mark and unmark):
2461 raise util.Abort(_("too many options specified"))
2460 raise util.Abort(_("too many options specified"))
2462 if pats and all:
2461 if pats and all:
2463 raise util.Abort(_("can't specify --all and patterns"))
2462 raise util.Abort(_("can't specify --all and patterns"))
2464 if not (all or pats or show or mark or unmark):
2463 if not (all or pats or show or mark or unmark):
2465 raise util.Abort(_('no files or directories specified; '
2464 raise util.Abort(_('no files or directories specified; '
2466 'use --all to remerge all files'))
2465 'use --all to remerge all files'))
2467
2466
2468 ms = merge_.mergestate(repo)
2467 ms = merge_.mergestate(repo)
2469 m = cmdutil.match(repo, pats, opts)
2468 m = cmdutil.match(repo, pats, opts)
2470
2469
2471 for f in ms:
2470 for f in ms:
2472 if m(f):
2471 if m(f):
2473 if show:
2472 if show:
2474 if nostatus:
2473 if nostatus:
2475 ui.write("%s\n" % f)
2474 ui.write("%s\n" % f)
2476 else:
2475 else:
2477 ui.write("%s %s\n" % (ms[f].upper(), f))
2476 ui.write("%s %s\n" % (ms[f].upper(), f))
2478 elif mark:
2477 elif mark:
2479 ms.mark(f, "r")
2478 ms.mark(f, "r")
2480 elif unmark:
2479 elif unmark:
2481 ms.mark(f, "u")
2480 ms.mark(f, "u")
2482 else:
2481 else:
2483 wctx = repo[None]
2482 wctx = repo[None]
2484 mctx = wctx.parents()[-1]
2483 mctx = wctx.parents()[-1]
2485
2484
2486 # backup pre-resolve (merge uses .orig for its own purposes)
2485 # backup pre-resolve (merge uses .orig for its own purposes)
2487 a = repo.wjoin(f)
2486 a = repo.wjoin(f)
2488 util.copyfile(a, a + ".resolve")
2487 util.copyfile(a, a + ".resolve")
2489
2488
2490 # resolve file
2489 # resolve file
2491 ms.resolve(f, wctx, mctx)
2490 ms.resolve(f, wctx, mctx)
2492
2491
2493 # replace filemerge's .orig file with our resolve file
2492 # replace filemerge's .orig file with our resolve file
2494 util.rename(a + ".resolve", a + ".orig")
2493 util.rename(a + ".resolve", a + ".orig")
2495
2494
2496 def revert(ui, repo, *pats, **opts):
2495 def revert(ui, repo, *pats, **opts):
2497 """restore individual files or directories to an earlier state
2496 """restore individual files or directories to an earlier state
2498
2497
2499 (Use update -r to check out earlier revisions, revert does not
2498 (Use update -r to check out earlier revisions, revert does not
2500 change the working directory parents.)
2499 change the working directory parents.)
2501
2500
2502 With no revision specified, revert the named files or directories
2501 With no revision specified, revert the named files or directories
2503 to the contents they had in the parent of the working directory.
2502 to the contents they had in the parent of the working directory.
2504 This restores the contents of the affected files to an unmodified
2503 This restores the contents of the affected files to an unmodified
2505 state and unschedules adds, removes, copies, and renames. If the
2504 state and unschedules adds, removes, copies, and renames. If the
2506 working directory has two parents, you must explicitly specify the
2505 working directory has two parents, you must explicitly specify the
2507 revision to revert to.
2506 revision to revert to.
2508
2507
2509 Using the -r/--rev option, revert the given files or directories
2508 Using the -r/--rev option, revert the given files or directories
2510 to their contents as of a specific revision. This can be helpful
2509 to their contents as of a specific revision. This can be helpful
2511 to "roll back" some or all of an earlier change. See 'hg help
2510 to "roll back" some or all of an earlier change. See 'hg help
2512 dates' for a list of formats valid for -d/--date.
2511 dates' for a list of formats valid for -d/--date.
2513
2512
2514 Revert modifies the working directory. It does not commit any
2513 Revert modifies the working directory. It does not commit any
2515 changes, or change the parent of the working directory. If you
2514 changes, or change the parent of the working directory. If you
2516 revert to a revision other than the parent of the working
2515 revert to a revision other than the parent of the working
2517 directory, the reverted files will thus appear modified
2516 directory, the reverted files will thus appear modified
2518 afterwards.
2517 afterwards.
2519
2518
2520 If a file has been deleted, it is restored. If the executable mode
2519 If a file has been deleted, it is restored. If the executable mode
2521 of a file was changed, it is reset.
2520 of a file was changed, it is reset.
2522
2521
2523 If names are given, all files matching the names are reverted.
2522 If names are given, all files matching the names are reverted.
2524 If no arguments are given, no files are reverted.
2523 If no arguments are given, no files are reverted.
2525
2524
2526 Modified files are saved with a .orig suffix before reverting.
2525 Modified files are saved with a .orig suffix before reverting.
2527 To disable these backups, use --no-backup.
2526 To disable these backups, use --no-backup.
2528 """
2527 """
2529
2528
2530 if opts["date"]:
2529 if opts["date"]:
2531 if opts["rev"]:
2530 if opts["rev"]:
2532 raise util.Abort(_("you can't specify a revision and a date"))
2531 raise util.Abort(_("you can't specify a revision and a date"))
2533 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2532 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2534
2533
2535 if not pats and not opts.get('all'):
2534 if not pats and not opts.get('all'):
2536 raise util.Abort(_('no files or directories specified; '
2535 raise util.Abort(_('no files or directories specified; '
2537 'use --all to revert the whole repo'))
2536 'use --all to revert the whole repo'))
2538
2537
2539 parent, p2 = repo.dirstate.parents()
2538 parent, p2 = repo.dirstate.parents()
2540 if not opts.get('rev') and p2 != nullid:
2539 if not opts.get('rev') and p2 != nullid:
2541 raise util.Abort(_('uncommitted merge - please provide a '
2540 raise util.Abort(_('uncommitted merge - please provide a '
2542 'specific revision'))
2541 'specific revision'))
2543 ctx = repo[opts.get('rev')]
2542 ctx = repo[opts.get('rev')]
2544 node = ctx.node()
2543 node = ctx.node()
2545 mf = ctx.manifest()
2544 mf = ctx.manifest()
2546 if node == parent:
2545 if node == parent:
2547 pmf = mf
2546 pmf = mf
2548 else:
2547 else:
2549 pmf = None
2548 pmf = None
2550
2549
2551 # need all matching names in dirstate and manifest of target rev,
2550 # need all matching names in dirstate and manifest of target rev,
2552 # so have to walk both. do not print errors if files exist in one
2551 # so have to walk both. do not print errors if files exist in one
2553 # but not other.
2552 # but not other.
2554
2553
2555 names = {}
2554 names = {}
2556
2555
2557 wlock = repo.wlock()
2556 wlock = repo.wlock()
2558 try:
2557 try:
2559 # walk dirstate.
2558 # walk dirstate.
2560
2559
2561 m = cmdutil.match(repo, pats, opts)
2560 m = cmdutil.match(repo, pats, opts)
2562 m.bad = lambda x,y: False
2561 m.bad = lambda x,y: False
2563 for abs in repo.walk(m):
2562 for abs in repo.walk(m):
2564 names[abs] = m.rel(abs), m.exact(abs)
2563 names[abs] = m.rel(abs), m.exact(abs)
2565
2564
2566 # walk target manifest.
2565 # walk target manifest.
2567
2566
2568 def badfn(path, msg):
2567 def badfn(path, msg):
2569 if path in names:
2568 if path in names:
2570 return
2569 return
2571 path_ = path + '/'
2570 path_ = path + '/'
2572 for f in names:
2571 for f in names:
2573 if f.startswith(path_):
2572 if f.startswith(path_):
2574 return
2573 return
2575 ui.warn("%s: %s\n" % (m.rel(path), msg))
2574 ui.warn("%s: %s\n" % (m.rel(path), msg))
2576
2575
2577 m = cmdutil.match(repo, pats, opts)
2576 m = cmdutil.match(repo, pats, opts)
2578 m.bad = badfn
2577 m.bad = badfn
2579 for abs in repo[node].walk(m):
2578 for abs in repo[node].walk(m):
2580 if abs not in names:
2579 if abs not in names:
2581 names[abs] = m.rel(abs), m.exact(abs)
2580 names[abs] = m.rel(abs), m.exact(abs)
2582
2581
2583 m = cmdutil.matchfiles(repo, names)
2582 m = cmdutil.matchfiles(repo, names)
2584 changes = repo.status(match=m)[:4]
2583 changes = repo.status(match=m)[:4]
2585 modified, added, removed, deleted = map(set, changes)
2584 modified, added, removed, deleted = map(set, changes)
2586
2585
2587 # if f is a rename, also revert the source
2586 # if f is a rename, also revert the source
2588 cwd = repo.getcwd()
2587 cwd = repo.getcwd()
2589 for f in added:
2588 for f in added:
2590 src = repo.dirstate.copied(f)
2589 src = repo.dirstate.copied(f)
2591 if src and src not in names and repo.dirstate[src] == 'r':
2590 if src and src not in names and repo.dirstate[src] == 'r':
2592 removed.add(src)
2591 removed.add(src)
2593 names[src] = (repo.pathto(src, cwd), True)
2592 names[src] = (repo.pathto(src, cwd), True)
2594
2593
2595 def removeforget(abs):
2594 def removeforget(abs):
2596 if repo.dirstate[abs] == 'a':
2595 if repo.dirstate[abs] == 'a':
2597 return _('forgetting %s\n')
2596 return _('forgetting %s\n')
2598 return _('removing %s\n')
2597 return _('removing %s\n')
2599
2598
2600 revert = ([], _('reverting %s\n'))
2599 revert = ([], _('reverting %s\n'))
2601 add = ([], _('adding %s\n'))
2600 add = ([], _('adding %s\n'))
2602 remove = ([], removeforget)
2601 remove = ([], removeforget)
2603 undelete = ([], _('undeleting %s\n'))
2602 undelete = ([], _('undeleting %s\n'))
2604
2603
2605 disptable = (
2604 disptable = (
2606 # dispatch table:
2605 # dispatch table:
2607 # file state
2606 # file state
2608 # action if in target manifest
2607 # action if in target manifest
2609 # action if not in target manifest
2608 # action if not in target manifest
2610 # make backup if in target manifest
2609 # make backup if in target manifest
2611 # make backup if not in target manifest
2610 # make backup if not in target manifest
2612 (modified, revert, remove, True, True),
2611 (modified, revert, remove, True, True),
2613 (added, revert, remove, True, False),
2612 (added, revert, remove, True, False),
2614 (removed, undelete, None, False, False),
2613 (removed, undelete, None, False, False),
2615 (deleted, revert, remove, False, False),
2614 (deleted, revert, remove, False, False),
2616 )
2615 )
2617
2616
2618 for abs, (rel, exact) in sorted(names.items()):
2617 for abs, (rel, exact) in sorted(names.items()):
2619 mfentry = mf.get(abs)
2618 mfentry = mf.get(abs)
2620 target = repo.wjoin(abs)
2619 target = repo.wjoin(abs)
2621 def handle(xlist, dobackup):
2620 def handle(xlist, dobackup):
2622 xlist[0].append(abs)
2621 xlist[0].append(abs)
2623 if dobackup and not opts.get('no_backup') and util.lexists(target):
2622 if dobackup and not opts.get('no_backup') and util.lexists(target):
2624 bakname = "%s.orig" % rel
2623 bakname = "%s.orig" % rel
2625 ui.note(_('saving current version of %s as %s\n') %
2624 ui.note(_('saving current version of %s as %s\n') %
2626 (rel, bakname))
2625 (rel, bakname))
2627 if not opts.get('dry_run'):
2626 if not opts.get('dry_run'):
2628 util.copyfile(target, bakname)
2627 util.copyfile(target, bakname)
2629 if ui.verbose or not exact:
2628 if ui.verbose or not exact:
2630 msg = xlist[1]
2629 msg = xlist[1]
2631 if not isinstance(msg, basestring):
2630 if not isinstance(msg, basestring):
2632 msg = msg(abs)
2631 msg = msg(abs)
2633 ui.status(msg % rel)
2632 ui.status(msg % rel)
2634 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2633 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2635 if abs not in table: continue
2634 if abs not in table: continue
2636 # file has changed in dirstate
2635 # file has changed in dirstate
2637 if mfentry:
2636 if mfentry:
2638 handle(hitlist, backuphit)
2637 handle(hitlist, backuphit)
2639 elif misslist is not None:
2638 elif misslist is not None:
2640 handle(misslist, backupmiss)
2639 handle(misslist, backupmiss)
2641 break
2640 break
2642 else:
2641 else:
2643 if abs not in repo.dirstate:
2642 if abs not in repo.dirstate:
2644 if mfentry:
2643 if mfentry:
2645 handle(add, True)
2644 handle(add, True)
2646 elif exact:
2645 elif exact:
2647 ui.warn(_('file not managed: %s\n') % rel)
2646 ui.warn(_('file not managed: %s\n') % rel)
2648 continue
2647 continue
2649 # file has not changed in dirstate
2648 # file has not changed in dirstate
2650 if node == parent:
2649 if node == parent:
2651 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2650 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2652 continue
2651 continue
2653 if pmf is None:
2652 if pmf is None:
2654 # only need parent manifest in this unlikely case,
2653 # only need parent manifest in this unlikely case,
2655 # so do not read by default
2654 # so do not read by default
2656 pmf = repo[parent].manifest()
2655 pmf = repo[parent].manifest()
2657 if abs in pmf:
2656 if abs in pmf:
2658 if mfentry:
2657 if mfentry:
2659 # if version of file is same in parent and target
2658 # if version of file is same in parent and target
2660 # manifests, do nothing
2659 # manifests, do nothing
2661 if (pmf[abs] != mfentry or
2660 if (pmf[abs] != mfentry or
2662 pmf.flags(abs) != mf.flags(abs)):
2661 pmf.flags(abs) != mf.flags(abs)):
2663 handle(revert, False)
2662 handle(revert, False)
2664 else:
2663 else:
2665 handle(remove, False)
2664 handle(remove, False)
2666
2665
2667 if not opts.get('dry_run'):
2666 if not opts.get('dry_run'):
2668 def checkout(f):
2667 def checkout(f):
2669 fc = ctx[f]
2668 fc = ctx[f]
2670 repo.wwrite(f, fc.data(), fc.flags())
2669 repo.wwrite(f, fc.data(), fc.flags())
2671
2670
2672 audit_path = util.path_auditor(repo.root)
2671 audit_path = util.path_auditor(repo.root)
2673 for f in remove[0]:
2672 for f in remove[0]:
2674 if repo.dirstate[f] == 'a':
2673 if repo.dirstate[f] == 'a':
2675 repo.dirstate.forget(f)
2674 repo.dirstate.forget(f)
2676 continue
2675 continue
2677 audit_path(f)
2676 audit_path(f)
2678 try:
2677 try:
2679 util.unlink(repo.wjoin(f))
2678 util.unlink(repo.wjoin(f))
2680 except OSError:
2679 except OSError:
2681 pass
2680 pass
2682 repo.dirstate.remove(f)
2681 repo.dirstate.remove(f)
2683
2682
2684 normal = None
2683 normal = None
2685 if node == parent:
2684 if node == parent:
2686 # We're reverting to our parent. If possible, we'd like status
2685 # We're reverting to our parent. If possible, we'd like status
2687 # to report the file as clean. We have to use normallookup for
2686 # to report the file as clean. We have to use normallookup for
2688 # merges to avoid losing information about merged/dirty files.
2687 # merges to avoid losing information about merged/dirty files.
2689 if p2 != nullid:
2688 if p2 != nullid:
2690 normal = repo.dirstate.normallookup
2689 normal = repo.dirstate.normallookup
2691 else:
2690 else:
2692 normal = repo.dirstate.normal
2691 normal = repo.dirstate.normal
2693 for f in revert[0]:
2692 for f in revert[0]:
2694 checkout(f)
2693 checkout(f)
2695 if normal:
2694 if normal:
2696 normal(f)
2695 normal(f)
2697
2696
2698 for f in add[0]:
2697 for f in add[0]:
2699 checkout(f)
2698 checkout(f)
2700 repo.dirstate.add(f)
2699 repo.dirstate.add(f)
2701
2700
2702 normal = repo.dirstate.normallookup
2701 normal = repo.dirstate.normallookup
2703 if node == parent and p2 == nullid:
2702 if node == parent and p2 == nullid:
2704 normal = repo.dirstate.normal
2703 normal = repo.dirstate.normal
2705 for f in undelete[0]:
2704 for f in undelete[0]:
2706 checkout(f)
2705 checkout(f)
2707 normal(f)
2706 normal(f)
2708
2707
2709 finally:
2708 finally:
2710 wlock.release()
2709 wlock.release()
2711
2710
2712 def rollback(ui, repo):
2711 def rollback(ui, repo):
2713 """roll back the last transaction
2712 """roll back the last transaction
2714
2713
2715 This command should be used with care. There is only one level of
2714 This command should be used with care. There is only one level of
2716 rollback, and there is no way to undo a rollback. It will also
2715 rollback, and there is no way to undo a rollback. It will also
2717 restore the dirstate at the time of the last transaction, losing
2716 restore the dirstate at the time of the last transaction, losing
2718 any dirstate changes since that time. This command does not alter
2717 any dirstate changes since that time. This command does not alter
2719 the working directory.
2718 the working directory.
2720
2719
2721 Transactions are used to encapsulate the effects of all commands
2720 Transactions are used to encapsulate the effects of all commands
2722 that create new changesets or propagate existing changesets into a
2721 that create new changesets or propagate existing changesets into a
2723 repository. For example, the following commands are transactional,
2722 repository. For example, the following commands are transactional,
2724 and their effects can be rolled back::
2723 and their effects can be rolled back::
2725
2724
2726 commit
2725 commit
2727 import
2726 import
2728 pull
2727 pull
2729 push (with this repository as destination)
2728 push (with this repository as destination)
2730 unbundle
2729 unbundle
2731
2730
2732 This command is not intended for use on public repositories. Once
2731 This command is not intended for use on public repositories. Once
2733 changes are visible for pull by other users, rolling a transaction
2732 changes are visible for pull by other users, rolling a transaction
2734 back locally is ineffective (someone else may already have pulled
2733 back locally is ineffective (someone else may already have pulled
2735 the changes). Furthermore, a race is possible with readers of the
2734 the changes). Furthermore, a race is possible with readers of the
2736 repository; for example an in-progress pull from the repository
2735 repository; for example an in-progress pull from the repository
2737 may fail if a rollback is performed.
2736 may fail if a rollback is performed.
2738 """
2737 """
2739 repo.rollback()
2738 repo.rollback()
2740
2739
2741 def root(ui, repo):
2740 def root(ui, repo):
2742 """print the root (top) of the current working directory
2741 """print the root (top) of the current working directory
2743
2742
2744 Print the root directory of the current repository.
2743 Print the root directory of the current repository.
2745 """
2744 """
2746 ui.write(repo.root + "\n")
2745 ui.write(repo.root + "\n")
2747
2746
2748 def serve(ui, repo, **opts):
2747 def serve(ui, repo, **opts):
2749 """export the repository via HTTP
2748 """export the repository via HTTP
2750
2749
2751 Start a local HTTP repository browser and pull server.
2750 Start a local HTTP repository browser and pull server.
2752
2751
2753 By default, the server logs accesses to stdout and errors to
2752 By default, the server logs accesses to stdout and errors to
2754 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2753 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2755 files.
2754 files.
2756 """
2755 """
2757
2756
2758 if opts["stdio"]:
2757 if opts["stdio"]:
2759 if repo is None:
2758 if repo is None:
2760 raise error.RepoError(_("There is no Mercurial repository here"
2759 raise error.RepoError(_("There is no Mercurial repository here"
2761 " (.hg not found)"))
2760 " (.hg not found)"))
2762 s = sshserver.sshserver(ui, repo)
2761 s = sshserver.sshserver(ui, repo)
2763 s.serve_forever()
2762 s.serve_forever()
2764
2763
2765 baseui = repo and repo.baseui or ui
2764 baseui = repo and repo.baseui or ui
2766 optlist = ("name templates style address port prefix ipv6"
2765 optlist = ("name templates style address port prefix ipv6"
2767 " accesslog errorlog webdir_conf certificate encoding")
2766 " accesslog errorlog webdir_conf certificate encoding")
2768 for o in optlist.split():
2767 for o in optlist.split():
2769 if opts.get(o, None):
2768 if opts.get(o, None):
2770 baseui.setconfig("web", o, str(opts[o]))
2769 baseui.setconfig("web", o, str(opts[o]))
2771 if (repo is not None) and (repo.ui != baseui):
2770 if (repo is not None) and (repo.ui != baseui):
2772 repo.ui.setconfig("web", o, str(opts[o]))
2771 repo.ui.setconfig("web", o, str(opts[o]))
2773
2772
2774 if repo is None and not ui.config("web", "webdir_conf"):
2773 if repo is None and not ui.config("web", "webdir_conf"):
2775 raise error.RepoError(_("There is no Mercurial repository here"
2774 raise error.RepoError(_("There is no Mercurial repository here"
2776 " (.hg not found)"))
2775 " (.hg not found)"))
2777
2776
2778 class service(object):
2777 class service(object):
2779 def init(self):
2778 def init(self):
2780 util.set_signal_handler()
2779 util.set_signal_handler()
2781 self.httpd = server.create_server(baseui, repo)
2780 self.httpd = server.create_server(baseui, repo)
2782
2781
2783 if not ui.verbose: return
2782 if not ui.verbose: return
2784
2783
2785 if self.httpd.prefix:
2784 if self.httpd.prefix:
2786 prefix = self.httpd.prefix.strip('/') + '/'
2785 prefix = self.httpd.prefix.strip('/') + '/'
2787 else:
2786 else:
2788 prefix = ''
2787 prefix = ''
2789
2788
2790 port = ':%d' % self.httpd.port
2789 port = ':%d' % self.httpd.port
2791 if port == ':80':
2790 if port == ':80':
2792 port = ''
2791 port = ''
2793
2792
2794 bindaddr = self.httpd.addr
2793 bindaddr = self.httpd.addr
2795 if bindaddr == '0.0.0.0':
2794 if bindaddr == '0.0.0.0':
2796 bindaddr = '*'
2795 bindaddr = '*'
2797 elif ':' in bindaddr: # IPv6
2796 elif ':' in bindaddr: # IPv6
2798 bindaddr = '[%s]' % bindaddr
2797 bindaddr = '[%s]' % bindaddr
2799
2798
2800 fqaddr = self.httpd.fqaddr
2799 fqaddr = self.httpd.fqaddr
2801 if ':' in fqaddr:
2800 if ':' in fqaddr:
2802 fqaddr = '[%s]' % fqaddr
2801 fqaddr = '[%s]' % fqaddr
2803 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2802 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2804 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2803 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2805
2804
2806 def run(self):
2805 def run(self):
2807 self.httpd.serve_forever()
2806 self.httpd.serve_forever()
2808
2807
2809 service = service()
2808 service = service()
2810
2809
2811 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2810 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2812
2811
2813 def status(ui, repo, *pats, **opts):
2812 def status(ui, repo, *pats, **opts):
2814 """show changed files in the working directory
2813 """show changed files in the working directory
2815
2814
2816 Show status of files in the repository. If names are given, only
2815 Show status of files in the repository. If names are given, only
2817 files that match are shown. Files that are clean or ignored or
2816 files that match are shown. Files that are clean or ignored or
2818 the source of a copy/move operation, are not listed unless
2817 the source of a copy/move operation, are not listed unless
2819 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2818 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2820 Unless options described with "show only ..." are given, the
2819 Unless options described with "show only ..." are given, the
2821 options -mardu are used.
2820 options -mardu are used.
2822
2821
2823 Option -q/--quiet hides untracked (unknown and ignored) files
2822 Option -q/--quiet hides untracked (unknown and ignored) files
2824 unless explicitly requested with -u/--unknown or -i/--ignored.
2823 unless explicitly requested with -u/--unknown or -i/--ignored.
2825
2824
2826 NOTE: status may appear to disagree with diff if permissions have
2825 NOTE: status may appear to disagree with diff if permissions have
2827 changed or a merge has occurred. The standard diff format does not
2826 changed or a merge has occurred. The standard diff format does not
2828 report permission changes and diff only reports changes relative
2827 report permission changes and diff only reports changes relative
2829 to one merge parent.
2828 to one merge parent.
2830
2829
2831 If one revision is given, it is used as the base revision.
2830 If one revision is given, it is used as the base revision.
2832 If two revisions are given, the differences between them are
2831 If two revisions are given, the differences between them are
2833 shown.
2832 shown.
2834
2833
2835 The codes used to show the status of files are::
2834 The codes used to show the status of files are::
2836
2835
2837 M = modified
2836 M = modified
2838 A = added
2837 A = added
2839 R = removed
2838 R = removed
2840 C = clean
2839 C = clean
2841 ! = missing (deleted by non-hg command, but still tracked)
2840 ! = missing (deleted by non-hg command, but still tracked)
2842 ? = not tracked
2841 ? = not tracked
2843 I = ignored
2842 I = ignored
2844 = origin of the previous file listed as A (added)
2843 = origin of the previous file listed as A (added)
2845 """
2844 """
2846
2845
2847 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2846 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2848 cwd = (pats and repo.getcwd()) or ''
2847 cwd = (pats and repo.getcwd()) or ''
2849 end = opts.get('print0') and '\0' or '\n'
2848 end = opts.get('print0') and '\0' or '\n'
2850 copy = {}
2849 copy = {}
2851 states = 'modified added removed deleted unknown ignored clean'.split()
2850 states = 'modified added removed deleted unknown ignored clean'.split()
2852 show = [k for k in states if opts.get(k)]
2851 show = [k for k in states if opts.get(k)]
2853 if opts.get('all'):
2852 if opts.get('all'):
2854 show += ui.quiet and (states[:4] + ['clean']) or states
2853 show += ui.quiet and (states[:4] + ['clean']) or states
2855 if not show:
2854 if not show:
2856 show = ui.quiet and states[:4] or states[:5]
2855 show = ui.quiet and states[:4] or states[:5]
2857
2856
2858 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2857 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2859 'ignored' in show, 'clean' in show, 'unknown' in show)
2858 'ignored' in show, 'clean' in show, 'unknown' in show)
2860 changestates = zip(states, 'MAR!?IC', stat)
2859 changestates = zip(states, 'MAR!?IC', stat)
2861
2860
2862 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2861 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2863 ctxn = repo[nullid]
2862 ctxn = repo[nullid]
2864 ctx1 = repo[node1]
2863 ctx1 = repo[node1]
2865 ctx2 = repo[node2]
2864 ctx2 = repo[node2]
2866 added = stat[1]
2865 added = stat[1]
2867 if node2 is None:
2866 if node2 is None:
2868 added = stat[0] + stat[1] # merged?
2867 added = stat[0] + stat[1] # merged?
2869
2868
2870 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2869 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2871 if k in added:
2870 if k in added:
2872 copy[k] = v
2871 copy[k] = v
2873 elif v in added:
2872 elif v in added:
2874 copy[v] = k
2873 copy[v] = k
2875
2874
2876 for state, char, files in changestates:
2875 for state, char, files in changestates:
2877 if state in show:
2876 if state in show:
2878 format = "%s %%s%s" % (char, end)
2877 format = "%s %%s%s" % (char, end)
2879 if opts.get('no_status'):
2878 if opts.get('no_status'):
2880 format = "%%s%s" % end
2879 format = "%%s%s" % end
2881
2880
2882 for f in files:
2881 for f in files:
2883 ui.write(format % repo.pathto(f, cwd))
2882 ui.write(format % repo.pathto(f, cwd))
2884 if f in copy:
2883 if f in copy:
2885 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2884 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2886
2885
2887 def summary(ui, repo, **opts):
2886 def summary(ui, repo, **opts):
2888 """summarize working directory state
2887 """summarize working directory state
2889
2888
2890 This generates a brief summary of the working directory state,
2889 This generates a brief summary of the working directory state,
2891 including parents, branch, commit status, and available updates.
2890 including parents, branch, commit status, and available updates.
2892
2891
2893 With the --remote option, this will check the default paths for
2892 With the --remote option, this will check the default paths for
2894 incoming and outgoing changes. This can be time-consuming.
2893 incoming and outgoing changes. This can be time-consuming.
2895 """
2894 """
2896
2895
2897 ctx = repo[None]
2896 ctx = repo[None]
2898 parents = ctx.parents()
2897 parents = ctx.parents()
2899 pnode = parents[0].node()
2898 pnode = parents[0].node()
2900 tags = repo.tags()
2899 tags = repo.tags()
2901
2900
2902 for p in parents:
2901 for p in parents:
2903 t = ' '.join([t for t in tags if tags[t] == p.node()])
2902 t = ' '.join([t for t in tags if tags[t] == p.node()])
2904 if p.rev() == -1:
2903 if p.rev() == -1:
2905 if not len(repo):
2904 if not len(repo):
2906 t += _(' (empty repository)')
2905 t += _(' (empty repository)')
2907 else:
2906 else:
2908 t += _(' (no revision checked out)')
2907 t += _(' (no revision checked out)')
2909 ui.write(_('parent: %d:%s %s\n') % (p.rev(), str(p), t))
2908 ui.write(_('parent: %d:%s %s\n') % (p.rev(), str(p), t))
2910 if p.description():
2909 if p.description():
2911 ui.status(' ' + p.description().splitlines()[0].strip() + '\n')
2910 ui.status(' ' + p.description().splitlines()[0].strip() + '\n')
2912
2911
2913 branch = ctx.branch()
2912 branch = ctx.branch()
2914 bheads = repo.branchheads(branch)
2913 bheads = repo.branchheads(branch)
2915 ui.status(_('branch: %s\n') % branch)
2914 ui.status(_('branch: %s\n') % branch)
2916
2915
2917 st = list(repo.status(unknown=True))[:7]
2916 st = list(repo.status(unknown=True))[:7]
2918 ms = merge_.mergestate(repo)
2917 ms = merge_.mergestate(repo)
2919 st.append([f for f in ms if f == 'u'])
2918 st.append([f for f in ms if f == 'u'])
2920 labels = [_('%d modified'), _('%d added'), _('%d removed'),
2919 labels = [_('%d modified'), _('%d added'), _('%d removed'),
2921 _('%d deleted'), _('%d unknown'), _('%d ignored'),
2920 _('%d deleted'), _('%d unknown'), _('%d ignored'),
2922 _('%d unresolved')]
2921 _('%d unresolved')]
2923 t = []
2922 t = []
2924 for s,l in zip(st, labels):
2923 for s,l in zip(st, labels):
2925 if s:
2924 if s:
2926 t.append(l % len(s))
2925 t.append(l % len(s))
2927
2926
2928 t = ', '.join(t)
2927 t = ', '.join(t)
2929
2928
2930 if len(parents) > 1:
2929 if len(parents) > 1:
2931 t += _(' (merge)')
2930 t += _(' (merge)')
2932 elif branch != parents[0].branch():
2931 elif branch != parents[0].branch():
2933 t += _(' (new branch)')
2932 t += _(' (new branch)')
2934 elif (not st[0] and not st[1] and not st[2]):
2933 elif (not st[0] and not st[1] and not st[2]):
2935 t += _(' (clean)')
2934 t += _(' (clean)')
2936 elif pnode not in bheads:
2935 elif pnode not in bheads:
2937 t += _(' (new branch head)')
2936 t += _(' (new branch head)')
2938
2937
2939 if 'clean' in t:
2938 if 'clean' in t:
2940 ui.status(_('commit: %s\n') % t.strip())
2939 ui.status(_('commit: %s\n') % t.strip())
2941 else:
2940 else:
2942 ui.write(_('commit: %s\n') % t.strip())
2941 ui.write(_('commit: %s\n') % t.strip())
2943
2942
2944 # all ancestors of branch heads - all ancestors of parent = new csets
2943 # all ancestors of branch heads - all ancestors of parent = new csets
2945 new = [0] * len(repo)
2944 new = [0] * len(repo)
2946 cl = repo.changelog
2945 cl = repo.changelog
2947 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
2946 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
2948 new[a] = 1
2947 new[a] = 1
2949 for a in cl.ancestors(*[p.rev() for p in parents]):
2948 for a in cl.ancestors(*[p.rev() for p in parents]):
2950 new[a] = 0
2949 new[a] = 0
2951 new = sum(new)
2950 new = sum(new)
2952
2951
2953 if new == 0:
2952 if new == 0:
2954 ui.status(_('update: (current)\n'))
2953 ui.status(_('update: (current)\n'))
2955 elif pnode not in bheads:
2954 elif pnode not in bheads:
2956 ui.write(_('update: %d new changesets (update)\n') % new)
2955 ui.write(_('update: %d new changesets (update)\n') % new)
2957 else:
2956 else:
2958 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
2957 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
2959 (new, len(bheads)))
2958 (new, len(bheads)))
2960
2959
2961 if opts.get('remote'):
2960 if opts.get('remote'):
2962 t = []
2961 t = []
2963 source, revs, checkout = hg.parseurl(ui.expandpath('default'),
2962 source, revs, checkout = hg.parseurl(ui.expandpath('default'),
2964 opts.get('rev'))
2963 opts.get('rev'))
2965 other = hg.repository(cmdutil.remoteui(repo, {}), source)
2964 other = hg.repository(cmdutil.remoteui(repo, {}), source)
2966 ui.debug('comparing with %s\n' % url.hidepassword(source))
2965 ui.debug('comparing with %s\n' % url.hidepassword(source))
2967 repo.ui.pushbuffer()
2966 repo.ui.pushbuffer()
2968 common, incoming, rheads = repo.findcommonincoming(other)
2967 common, incoming, rheads = repo.findcommonincoming(other)
2969 repo.ui.popbuffer()
2968 repo.ui.popbuffer()
2970 if incoming:
2969 if incoming:
2971 t.append(_('1 or more incoming'))
2970 t.append(_('1 or more incoming'))
2972
2971
2973 dest, revs, checkout = hg.parseurl(
2972 dest, revs, checkout = hg.parseurl(
2974 ui.expandpath('default-push', 'default'))
2973 ui.expandpath('default-push', 'default'))
2975 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
2974 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
2976 ui.debug('comparing with %s\n' % url.hidepassword(dest))
2975 ui.debug('comparing with %s\n' % url.hidepassword(dest))
2977 repo.ui.pushbuffer()
2976 repo.ui.pushbuffer()
2978 o = repo.findoutgoing(other)
2977 o = repo.findoutgoing(other)
2979 repo.ui.popbuffer()
2978 repo.ui.popbuffer()
2980 o = repo.changelog.nodesbetween(o, revs)[0]
2979 o = repo.changelog.nodesbetween(o, revs)[0]
2981 if o:
2980 if o:
2982 t.append(_('%d outgoing') % len(o))
2981 t.append(_('%d outgoing') % len(o))
2983
2982
2984 if t:
2983 if t:
2985 ui.write(_('remote: %s\n') % (', '.join(t)))
2984 ui.write(_('remote: %s\n') % (', '.join(t)))
2986 else:
2985 else:
2987 ui.status(_('remote: (synced)\n'))
2986 ui.status(_('remote: (synced)\n'))
2988
2987
2989 def tag(ui, repo, name1, *names, **opts):
2988 def tag(ui, repo, name1, *names, **opts):
2990 """add one or more tags for the current or given revision
2989 """add one or more tags for the current or given revision
2991
2990
2992 Name a particular revision using <name>.
2991 Name a particular revision using <name>.
2993
2992
2994 Tags are used to name particular revisions of the repository and are
2993 Tags are used to name particular revisions of the repository and are
2995 very useful to compare different revisions, to go back to significant
2994 very useful to compare different revisions, to go back to significant
2996 earlier versions or to mark branch points as releases, etc.
2995 earlier versions or to mark branch points as releases, etc.
2997
2996
2998 If no revision is given, the parent of the working directory is
2997 If no revision is given, the parent of the working directory is
2999 used, or tip if no revision is checked out.
2998 used, or tip if no revision is checked out.
3000
2999
3001 To facilitate version control, distribution, and merging of tags,
3000 To facilitate version control, distribution, and merging of tags,
3002 they are stored as a file named ".hgtags" which is managed
3001 they are stored as a file named ".hgtags" which is managed
3003 similarly to other project files and can be hand-edited if
3002 similarly to other project files and can be hand-edited if
3004 necessary. The file '.hg/localtags' is used for local tags (not
3003 necessary. The file '.hg/localtags' is used for local tags (not
3005 shared among repositories).
3004 shared among repositories).
3006
3005
3007 See 'hg help dates' for a list of formats valid for -d/--date.
3006 See 'hg help dates' for a list of formats valid for -d/--date.
3008 """
3007 """
3009
3008
3010 rev_ = "."
3009 rev_ = "."
3011 names = (name1,) + names
3010 names = (name1,) + names
3012 if len(names) != len(set(names)):
3011 if len(names) != len(set(names)):
3013 raise util.Abort(_('tag names must be unique'))
3012 raise util.Abort(_('tag names must be unique'))
3014 for n in names:
3013 for n in names:
3015 if n in ['tip', '.', 'null']:
3014 if n in ['tip', '.', 'null']:
3016 raise util.Abort(_('the name \'%s\' is reserved') % n)
3015 raise util.Abort(_('the name \'%s\' is reserved') % n)
3017 if opts.get('rev') and opts.get('remove'):
3016 if opts.get('rev') and opts.get('remove'):
3018 raise util.Abort(_("--rev and --remove are incompatible"))
3017 raise util.Abort(_("--rev and --remove are incompatible"))
3019 if opts.get('rev'):
3018 if opts.get('rev'):
3020 rev_ = opts['rev']
3019 rev_ = opts['rev']
3021 message = opts.get('message')
3020 message = opts.get('message')
3022 if opts.get('remove'):
3021 if opts.get('remove'):
3023 expectedtype = opts.get('local') and 'local' or 'global'
3022 expectedtype = opts.get('local') and 'local' or 'global'
3024 for n in names:
3023 for n in names:
3025 if not repo.tagtype(n):
3024 if not repo.tagtype(n):
3026 raise util.Abort(_('tag \'%s\' does not exist') % n)
3025 raise util.Abort(_('tag \'%s\' does not exist') % n)
3027 if repo.tagtype(n) != expectedtype:
3026 if repo.tagtype(n) != expectedtype:
3028 if expectedtype == 'global':
3027 if expectedtype == 'global':
3029 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3028 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3030 else:
3029 else:
3031 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3030 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3032 rev_ = nullid
3031 rev_ = nullid
3033 if not message:
3032 if not message:
3034 # we don't translate commit messages
3033 # we don't translate commit messages
3035 message = 'Removed tag %s' % ', '.join(names)
3034 message = 'Removed tag %s' % ', '.join(names)
3036 elif not opts.get('force'):
3035 elif not opts.get('force'):
3037 for n in names:
3036 for n in names:
3038 if n in repo.tags():
3037 if n in repo.tags():
3039 raise util.Abort(_('tag \'%s\' already exists '
3038 raise util.Abort(_('tag \'%s\' already exists '
3040 '(use -f to force)') % n)
3039 '(use -f to force)') % n)
3041 if not rev_ and repo.dirstate.parents()[1] != nullid:
3040 if not rev_ and repo.dirstate.parents()[1] != nullid:
3042 raise util.Abort(_('uncommitted merge - please provide a '
3041 raise util.Abort(_('uncommitted merge - please provide a '
3043 'specific revision'))
3042 'specific revision'))
3044 r = repo[rev_].node()
3043 r = repo[rev_].node()
3045
3044
3046 if not message:
3045 if not message:
3047 # we don't translate commit messages
3046 # we don't translate commit messages
3048 message = ('Added tag %s for changeset %s' %
3047 message = ('Added tag %s for changeset %s' %
3049 (', '.join(names), short(r)))
3048 (', '.join(names), short(r)))
3050
3049
3051 date = opts.get('date')
3050 date = opts.get('date')
3052 if date:
3051 if date:
3053 date = util.parsedate(date)
3052 date = util.parsedate(date)
3054
3053
3055 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3054 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3056
3055
3057 def tags(ui, repo):
3056 def tags(ui, repo):
3058 """list repository tags
3057 """list repository tags
3059
3058
3060 This lists both regular and local tags. When the -v/--verbose
3059 This lists both regular and local tags. When the -v/--verbose
3061 switch is used, a third column "local" is printed for local tags.
3060 switch is used, a third column "local" is printed for local tags.
3062 """
3061 """
3063
3062
3064 hexfunc = ui.debugflag and hex or short
3063 hexfunc = ui.debugflag and hex or short
3065 tagtype = ""
3064 tagtype = ""
3066
3065
3067 for t, n in reversed(repo.tagslist()):
3066 for t, n in reversed(repo.tagslist()):
3068 if ui.quiet:
3067 if ui.quiet:
3069 ui.write("%s\n" % t)
3068 ui.write("%s\n" % t)
3070 continue
3069 continue
3071
3070
3072 try:
3071 try:
3073 hn = hexfunc(n)
3072 hn = hexfunc(n)
3074 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3073 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3075 except error.LookupError:
3074 except error.LookupError:
3076 r = " ?:%s" % hn
3075 r = " ?:%s" % hn
3077 else:
3076 else:
3078 spaces = " " * (30 - encoding.colwidth(t))
3077 spaces = " " * (30 - encoding.colwidth(t))
3079 if ui.verbose:
3078 if ui.verbose:
3080 if repo.tagtype(t) == 'local':
3079 if repo.tagtype(t) == 'local':
3081 tagtype = " local"
3080 tagtype = " local"
3082 else:
3081 else:
3083 tagtype = ""
3082 tagtype = ""
3084 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3083 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3085
3084
3086 def tip(ui, repo, **opts):
3085 def tip(ui, repo, **opts):
3087 """show the tip revision
3086 """show the tip revision
3088
3087
3089 The tip revision (usually just called the tip) is the changeset
3088 The tip revision (usually just called the tip) is the changeset
3090 most recently added to the repository (and therefore the most
3089 most recently added to the repository (and therefore the most
3091 recently changed head).
3090 recently changed head).
3092
3091
3093 If you have just made a commit, that commit will be the tip. If
3092 If you have just made a commit, that commit will be the tip. If
3094 you have just pulled changes from another repository, the tip of
3093 you have just pulled changes from another repository, the tip of
3095 that repository becomes the current tip. The "tip" tag is special
3094 that repository becomes the current tip. The "tip" tag is special
3096 and cannot be renamed or assigned to a different changeset.
3095 and cannot be renamed or assigned to a different changeset.
3097 """
3096 """
3098 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
3097 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
3099
3098
3100 def unbundle(ui, repo, fname1, *fnames, **opts):
3099 def unbundle(ui, repo, fname1, *fnames, **opts):
3101 """apply one or more changegroup files
3100 """apply one or more changegroup files
3102
3101
3103 Apply one or more compressed changegroup files generated by the
3102 Apply one or more compressed changegroup files generated by the
3104 bundle command.
3103 bundle command.
3105 """
3104 """
3106 fnames = (fname1,) + fnames
3105 fnames = (fname1,) + fnames
3107
3106
3108 lock = repo.lock()
3107 lock = repo.lock()
3109 try:
3108 try:
3110 for fname in fnames:
3109 for fname in fnames:
3111 f = url.open(ui, fname)
3110 f = url.open(ui, fname)
3112 gen = changegroup.readbundle(f, fname)
3111 gen = changegroup.readbundle(f, fname)
3113 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3112 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3114 finally:
3113 finally:
3115 lock.release()
3114 lock.release()
3116
3115
3117 return postincoming(ui, repo, modheads, opts.get('update'), None)
3116 return postincoming(ui, repo, modheads, opts.get('update'), None)
3118
3117
3119 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3118 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3120 """update working directory
3119 """update working directory
3121
3120
3122 Update the repository's working directory to the specified
3121 Update the repository's working directory to the specified
3123 revision, or the tip of the current branch if none is specified.
3122 revision, or the tip of the current branch if none is specified.
3124 Use null as the revision to remove the working copy (like 'hg
3123 Use null as the revision to remove the working copy (like 'hg
3125 clone -U').
3124 clone -U').
3126
3125
3127 When the working directory contains no uncommitted changes, it
3126 When the working directory contains no uncommitted changes, it
3128 will be replaced by the state of the requested revision from the
3127 will be replaced by the state of the requested revision from the
3129 repository. When the requested revision is on a different branch,
3128 repository. When the requested revision is on a different branch,
3130 the working directory will additionally be switched to that
3129 the working directory will additionally be switched to that
3131 branch.
3130 branch.
3132
3131
3133 When there are uncommitted changes, use option -C/--clean to
3132 When there are uncommitted changes, use option -C/--clean to
3134 discard them, forcibly replacing the state of the working
3133 discard them, forcibly replacing the state of the working
3135 directory with the requested revision. Alternately, use -c/--check
3134 directory with the requested revision. Alternately, use -c/--check
3136 to abort.
3135 to abort.
3137
3136
3138 When there are uncommitted changes and option -C/--clean is not
3137 When there are uncommitted changes and option -C/--clean is not
3139 used, and the parent revision and requested revision are on the
3138 used, and the parent revision and requested revision are on the
3140 same branch, and one of them is an ancestor of the other, then the
3139 same branch, and one of them is an ancestor of the other, then the
3141 new working directory will contain the requested revision merged
3140 new working directory will contain the requested revision merged
3142 with the uncommitted changes. Otherwise, the update will fail with
3141 with the uncommitted changes. Otherwise, the update will fail with
3143 a suggestion to use 'merge' or 'update -C' instead.
3142 a suggestion to use 'merge' or 'update -C' instead.
3144
3143
3145 If you want to update just one file to an older revision, use
3144 If you want to update just one file to an older revision, use
3146 revert.
3145 revert.
3147
3146
3148 See 'hg help dates' for a list of formats valid for -d/--date.
3147 See 'hg help dates' for a list of formats valid for -d/--date.
3149 """
3148 """
3150 if rev and node:
3149 if rev and node:
3151 raise util.Abort(_("please specify just one revision"))
3150 raise util.Abort(_("please specify just one revision"))
3152
3151
3153 if not rev:
3152 if not rev:
3154 rev = node
3153 rev = node
3155
3154
3156 if check and clean:
3155 if check and clean:
3157 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3156 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3158
3157
3159 if check:
3158 if check:
3160 # we could use dirty() but we can ignore merge and branch trivia
3159 # we could use dirty() but we can ignore merge and branch trivia
3161 c = repo[None]
3160 c = repo[None]
3162 if c.modified() or c.added() or c.removed():
3161 if c.modified() or c.added() or c.removed():
3163 raise util.Abort(_("uncommitted local changes"))
3162 raise util.Abort(_("uncommitted local changes"))
3164
3163
3165 if date:
3164 if date:
3166 if rev:
3165 if rev:
3167 raise util.Abort(_("you can't specify a revision and a date"))
3166 raise util.Abort(_("you can't specify a revision and a date"))
3168 rev = cmdutil.finddate(ui, repo, date)
3167 rev = cmdutil.finddate(ui, repo, date)
3169
3168
3170 if clean or check:
3169 if clean or check:
3171 return hg.clean(repo, rev)
3170 return hg.clean(repo, rev)
3172 else:
3171 else:
3173 return hg.update(repo, rev)
3172 return hg.update(repo, rev)
3174
3173
3175 def verify(ui, repo):
3174 def verify(ui, repo):
3176 """verify the integrity of the repository
3175 """verify the integrity of the repository
3177
3176
3178 Verify the integrity of the current repository.
3177 Verify the integrity of the current repository.
3179
3178
3180 This will perform an extensive check of the repository's
3179 This will perform an extensive check of the repository's
3181 integrity, validating the hashes and checksums of each entry in
3180 integrity, validating the hashes and checksums of each entry in
3182 the changelog, manifest, and tracked files, as well as the
3181 the changelog, manifest, and tracked files, as well as the
3183 integrity of their crosslinks and indices.
3182 integrity of their crosslinks and indices.
3184 """
3183 """
3185 return hg.verify(repo)
3184 return hg.verify(repo)
3186
3185
3187 def version_(ui):
3186 def version_(ui):
3188 """output version and copyright information"""
3187 """output version and copyright information"""
3189 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3188 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3190 % util.version())
3189 % util.version())
3191 ui.status(_(
3190 ui.status(_(
3192 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
3191 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
3193 "This is free software; see the source for copying conditions. "
3192 "This is free software; see the source for copying conditions. "
3194 "There is NO\nwarranty; "
3193 "There is NO\nwarranty; "
3195 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3194 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3196 ))
3195 ))
3197
3196
3198 # Command options and aliases are listed here, alphabetically
3197 # Command options and aliases are listed here, alphabetically
3199
3198
3200 globalopts = [
3199 globalopts = [
3201 ('R', 'repository', '',
3200 ('R', 'repository', '',
3202 _('repository root directory or name of overlay bundle file')),
3201 _('repository root directory or name of overlay bundle file')),
3203 ('', 'cwd', '', _('change working directory')),
3202 ('', 'cwd', '', _('change working directory')),
3204 ('y', 'noninteractive', None,
3203 ('y', 'noninteractive', None,
3205 _('do not prompt, assume \'yes\' for any required answers')),
3204 _('do not prompt, assume \'yes\' for any required answers')),
3206 ('q', 'quiet', None, _('suppress output')),
3205 ('q', 'quiet', None, _('suppress output')),
3207 ('v', 'verbose', None, _('enable additional output')),
3206 ('v', 'verbose', None, _('enable additional output')),
3208 ('', 'config', [], _('set/override config option')),
3207 ('', 'config', [], _('set/override config option')),
3209 ('', 'debug', None, _('enable debugging output')),
3208 ('', 'debug', None, _('enable debugging output')),
3210 ('', 'debugger', None, _('start debugger')),
3209 ('', 'debugger', None, _('start debugger')),
3211 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3210 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3212 ('', 'encodingmode', encoding.encodingmode,
3211 ('', 'encodingmode', encoding.encodingmode,
3213 _('set the charset encoding mode')),
3212 _('set the charset encoding mode')),
3214 ('', 'traceback', None, _('print traceback on exception')),
3213 ('', 'traceback', None, _('print traceback on exception')),
3215 ('', 'time', None, _('time how long the command takes')),
3214 ('', 'time', None, _('time how long the command takes')),
3216 ('', 'profile', None, _('print command execution profile')),
3215 ('', 'profile', None, _('print command execution profile')),
3217 ('', 'version', None, _('output version information and exit')),
3216 ('', 'version', None, _('output version information and exit')),
3218 ('h', 'help', None, _('display help and exit')),
3217 ('h', 'help', None, _('display help and exit')),
3219 ]
3218 ]
3220
3219
3221 dryrunopts = [('n', 'dry-run', None,
3220 dryrunopts = [('n', 'dry-run', None,
3222 _('do not perform actions, just print output'))]
3221 _('do not perform actions, just print output'))]
3223
3222
3224 remoteopts = [
3223 remoteopts = [
3225 ('e', 'ssh', '', _('specify ssh command to use')),
3224 ('e', 'ssh', '', _('specify ssh command to use')),
3226 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3225 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3227 ]
3226 ]
3228
3227
3229 walkopts = [
3228 walkopts = [
3230 ('I', 'include', [], _('include names matching the given patterns')),
3229 ('I', 'include', [], _('include names matching the given patterns')),
3231 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3230 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3232 ]
3231 ]
3233
3232
3234 commitopts = [
3233 commitopts = [
3235 ('m', 'message', '', _('use <text> as commit message')),
3234 ('m', 'message', '', _('use <text> as commit message')),
3236 ('l', 'logfile', '', _('read commit message from <file>')),
3235 ('l', 'logfile', '', _('read commit message from <file>')),
3237 ]
3236 ]
3238
3237
3239 commitopts2 = [
3238 commitopts2 = [
3240 ('d', 'date', '', _('record datecode as commit date')),
3239 ('d', 'date', '', _('record datecode as commit date')),
3241 ('u', 'user', '', _('record the specified user as committer')),
3240 ('u', 'user', '', _('record the specified user as committer')),
3242 ]
3241 ]
3243
3242
3244 templateopts = [
3243 templateopts = [
3245 ('', 'style', '', _('display using template map file')),
3244 ('', 'style', '', _('display using template map file')),
3246 ('', 'template', '', _('display with template')),
3245 ('', 'template', '', _('display with template')),
3247 ]
3246 ]
3248
3247
3249 logopts = [
3248 logopts = [
3250 ('p', 'patch', None, _('show patch')),
3249 ('p', 'patch', None, _('show patch')),
3251 ('g', 'git', None, _('use git extended diff format')),
3250 ('g', 'git', None, _('use git extended diff format')),
3252 ('l', 'limit', '', _('limit number of changes displayed')),
3251 ('l', 'limit', '', _('limit number of changes displayed')),
3253 ('M', 'no-merges', None, _('do not show merges')),
3252 ('M', 'no-merges', None, _('do not show merges')),
3254 ] + templateopts
3253 ] + templateopts
3255
3254
3256 diffopts = [
3255 diffopts = [
3257 ('a', 'text', None, _('treat all files as text')),
3256 ('a', 'text', None, _('treat all files as text')),
3258 ('g', 'git', None, _('use git extended diff format')),
3257 ('g', 'git', None, _('use git extended diff format')),
3259 ('', 'nodates', None, _("don't include dates in diff headers"))
3258 ('', 'nodates', None, _("don't include dates in diff headers"))
3260 ]
3259 ]
3261
3260
3262 diffopts2 = [
3261 diffopts2 = [
3263 ('p', 'show-function', None, _('show which function each change is in')),
3262 ('p', 'show-function', None, _('show which function each change is in')),
3264 ('w', 'ignore-all-space', None,
3263 ('w', 'ignore-all-space', None,
3265 _('ignore white space when comparing lines')),
3264 _('ignore white space when comparing lines')),
3266 ('b', 'ignore-space-change', None,
3265 ('b', 'ignore-space-change', None,
3267 _('ignore changes in the amount of white space')),
3266 _('ignore changes in the amount of white space')),
3268 ('B', 'ignore-blank-lines', None,
3267 ('B', 'ignore-blank-lines', None,
3269 _('ignore changes whose lines are all blank')),
3268 _('ignore changes whose lines are all blank')),
3270 ('U', 'unified', '', _('number of lines of context to show')),
3269 ('U', 'unified', '', _('number of lines of context to show')),
3271 ('', 'stat', None, _('output diffstat-style summary of changes')),
3270 ('', 'stat', None, _('output diffstat-style summary of changes')),
3272 ]
3271 ]
3273
3272
3274 similarityopts = [
3273 similarityopts = [
3275 ('s', 'similarity', '',
3274 ('s', 'similarity', '',
3276 _('guess renamed files by similarity (0<=s<=100)'))
3275 _('guess renamed files by similarity (0<=s<=100)'))
3277 ]
3276 ]
3278
3277
3279 table = {
3278 table = {
3280 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3279 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3281 "addremove":
3280 "addremove":
3282 (addremove, similarityopts + walkopts + dryrunopts,
3281 (addremove, similarityopts + walkopts + dryrunopts,
3283 _('[OPTION]... [FILE]...')),
3282 _('[OPTION]... [FILE]...')),
3284 "^annotate|blame":
3283 "^annotate|blame":
3285 (annotate,
3284 (annotate,
3286 [('r', 'rev', '', _('annotate the specified revision')),
3285 [('r', 'rev', '', _('annotate the specified revision')),
3287 ('f', 'follow', None, _('follow file copies and renames')),
3286 ('f', 'follow', None, _('follow file copies and renames')),
3288 ('a', 'text', None, _('treat all files as text')),
3287 ('a', 'text', None, _('treat all files as text')),
3289 ('u', 'user', None, _('list the author (long with -v)')),
3288 ('u', 'user', None, _('list the author (long with -v)')),
3290 ('d', 'date', None, _('list the date (short with -q)')),
3289 ('d', 'date', None, _('list the date (short with -q)')),
3291 ('n', 'number', None, _('list the revision number (default)')),
3290 ('n', 'number', None, _('list the revision number (default)')),
3292 ('c', 'changeset', None, _('list the changeset')),
3291 ('c', 'changeset', None, _('list the changeset')),
3293 ('l', 'line-number', None,
3292 ('l', 'line-number', None,
3294 _('show line number at the first appearance'))
3293 _('show line number at the first appearance'))
3295 ] + walkopts,
3294 ] + walkopts,
3296 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3295 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3297 "archive":
3296 "archive":
3298 (archive,
3297 (archive,
3299 [('', 'no-decode', None, _('do not pass files through decoders')),
3298 [('', 'no-decode', None, _('do not pass files through decoders')),
3300 ('p', 'prefix', '', _('directory prefix for files in archive')),
3299 ('p', 'prefix', '', _('directory prefix for files in archive')),
3301 ('r', 'rev', '', _('revision to distribute')),
3300 ('r', 'rev', '', _('revision to distribute')),
3302 ('t', 'type', '', _('type of distribution to create')),
3301 ('t', 'type', '', _('type of distribution to create')),
3303 ] + walkopts,
3302 ] + walkopts,
3304 _('[OPTION]... DEST')),
3303 _('[OPTION]... DEST')),
3305 "backout":
3304 "backout":
3306 (backout,
3305 (backout,
3307 [('', 'merge', None,
3306 [('', 'merge', None,
3308 _('merge with old dirstate parent after backout')),
3307 _('merge with old dirstate parent after backout')),
3309 ('', 'parent', '', _('parent to choose when backing out merge')),
3308 ('', 'parent', '', _('parent to choose when backing out merge')),
3310 ('r', 'rev', '', _('revision to backout')),
3309 ('r', 'rev', '', _('revision to backout')),
3311 ] + walkopts + commitopts + commitopts2,
3310 ] + walkopts + commitopts + commitopts2,
3312 _('[OPTION]... [-r] REV')),
3311 _('[OPTION]... [-r] REV')),
3313 "bisect":
3312 "bisect":
3314 (bisect,
3313 (bisect,
3315 [('r', 'reset', False, _('reset bisect state')),
3314 [('r', 'reset', False, _('reset bisect state')),
3316 ('g', 'good', False, _('mark changeset good')),
3315 ('g', 'good', False, _('mark changeset good')),
3317 ('b', 'bad', False, _('mark changeset bad')),
3316 ('b', 'bad', False, _('mark changeset bad')),
3318 ('s', 'skip', False, _('skip testing changeset')),
3317 ('s', 'skip', False, _('skip testing changeset')),
3319 ('c', 'command', '', _('use command to check changeset state')),
3318 ('c', 'command', '', _('use command to check changeset state')),
3320 ('U', 'noupdate', False, _('do not update to target'))],
3319 ('U', 'noupdate', False, _('do not update to target'))],
3321 _("[-gbsr] [-c CMD] [REV]")),
3320 _("[-gbsr] [-c CMD] [REV]")),
3322 "branch":
3321 "branch":
3323 (branch,
3322 (branch,
3324 [('f', 'force', None,
3323 [('f', 'force', None,
3325 _('set branch name even if it shadows an existing branch')),
3324 _('set branch name even if it shadows an existing branch')),
3326 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3325 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3327 _('[-fC] [NAME]')),
3326 _('[-fC] [NAME]')),
3328 "branches":
3327 "branches":
3329 (branches,
3328 (branches,
3330 [('a', 'active', False,
3329 [('a', 'active', False,
3331 _('show only branches that have unmerged heads')),
3330 _('show only branches that have unmerged heads')),
3332 ('c', 'closed', False,
3331 ('c', 'closed', False,
3333 _('show normal and closed branches'))],
3332 _('show normal and closed branches'))],
3334 _('[-a]')),
3333 _('[-a]')),
3335 "bundle":
3334 "bundle":
3336 (bundle,
3335 (bundle,
3337 [('f', 'force', None,
3336 [('f', 'force', None,
3338 _('run even when remote repository is unrelated')),
3337 _('run even when remote repository is unrelated')),
3339 ('r', 'rev', [],
3338 ('r', 'rev', [],
3340 _('a changeset up to which you would like to bundle')),
3339 _('a changeset up to which you would like to bundle')),
3341 ('', 'base', [],
3340 ('', 'base', [],
3342 _('a base changeset to specify instead of a destination')),
3341 _('a base changeset to specify instead of a destination')),
3343 ('a', 'all', None, _('bundle all changesets in the repository')),
3342 ('a', 'all', None, _('bundle all changesets in the repository')),
3344 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3343 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3345 ] + remoteopts,
3344 ] + remoteopts,
3346 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3345 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3347 "cat":
3346 "cat":
3348 (cat,
3347 (cat,
3349 [('o', 'output', '', _('print output to file with formatted name')),
3348 [('o', 'output', '', _('print output to file with formatted name')),
3350 ('r', 'rev', '', _('print the given revision')),
3349 ('r', 'rev', '', _('print the given revision')),
3351 ('', 'decode', None, _('apply any matching decode filter')),
3350 ('', 'decode', None, _('apply any matching decode filter')),
3352 ] + walkopts,
3351 ] + walkopts,
3353 _('[OPTION]... FILE...')),
3352 _('[OPTION]... FILE...')),
3354 "^clone":
3353 "^clone":
3355 (clone,
3354 (clone,
3356 [('U', 'noupdate', None,
3355 [('U', 'noupdate', None,
3357 _('the clone will only contain a repository (no working copy)')),
3356 _('the clone will only contain a repository (no working copy)')),
3358 ('r', 'rev', [],
3357 ('r', 'rev', [],
3359 _('a changeset you would like to have after cloning')),
3358 _('a changeset you would like to have after cloning')),
3360 ('', 'pull', None, _('use pull protocol to copy metadata')),
3359 ('', 'pull', None, _('use pull protocol to copy metadata')),
3361 ('', 'uncompressed', None,
3360 ('', 'uncompressed', None,
3362 _('use uncompressed transfer (fast over LAN)')),
3361 _('use uncompressed transfer (fast over LAN)')),
3363 ] + remoteopts,
3362 ] + remoteopts,
3364 _('[OPTION]... SOURCE [DEST]')),
3363 _('[OPTION]... SOURCE [DEST]')),
3365 "^commit|ci":
3364 "^commit|ci":
3366 (commit,
3365 (commit,
3367 [('A', 'addremove', None,
3366 [('A', 'addremove', None,
3368 _('mark new/missing files as added/removed before committing')),
3367 _('mark new/missing files as added/removed before committing')),
3369 ('', 'close-branch', None,
3368 ('', 'close-branch', None,
3370 _('mark a branch as closed, hiding it from the branch list')),
3369 _('mark a branch as closed, hiding it from the branch list')),
3371 ] + walkopts + commitopts + commitopts2,
3370 ] + walkopts + commitopts + commitopts2,
3372 _('[OPTION]... [FILE]...')),
3371 _('[OPTION]... [FILE]...')),
3373 "copy|cp":
3372 "copy|cp":
3374 (copy,
3373 (copy,
3375 [('A', 'after', None, _('record a copy that has already occurred')),
3374 [('A', 'after', None, _('record a copy that has already occurred')),
3376 ('f', 'force', None,
3375 ('f', 'force', None,
3377 _('forcibly copy over an existing managed file')),
3376 _('forcibly copy over an existing managed file')),
3378 ] + walkopts + dryrunopts,
3377 ] + walkopts + dryrunopts,
3379 _('[OPTION]... [SOURCE]... DEST')),
3378 _('[OPTION]... [SOURCE]... DEST')),
3380 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3379 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3381 "debugcheckstate": (debugcheckstate, [], ''),
3380 "debugcheckstate": (debugcheckstate, [], ''),
3382 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3381 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3383 "debugcomplete":
3382 "debugcomplete":
3384 (debugcomplete,
3383 (debugcomplete,
3385 [('o', 'options', None, _('show the command options'))],
3384 [('o', 'options', None, _('show the command options'))],
3386 _('[-o] CMD')),
3385 _('[-o] CMD')),
3387 "debugdate":
3386 "debugdate":
3388 (debugdate,
3387 (debugdate,
3389 [('e', 'extended', None, _('try extended date formats'))],
3388 [('e', 'extended', None, _('try extended date formats'))],
3390 _('[-e] DATE [RANGE]')),
3389 _('[-e] DATE [RANGE]')),
3391 "debugdata": (debugdata, [], _('FILE REV')),
3390 "debugdata": (debugdata, [], _('FILE REV')),
3392 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3391 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3393 "debugindex": (debugindex, [], _('FILE')),
3392 "debugindex": (debugindex, [], _('FILE')),
3394 "debugindexdot": (debugindexdot, [], _('FILE')),
3393 "debugindexdot": (debugindexdot, [], _('FILE')),
3395 "debuginstall": (debuginstall, [], ''),
3394 "debuginstall": (debuginstall, [], ''),
3396 "debugrebuildstate":
3395 "debugrebuildstate":
3397 (debugrebuildstate,
3396 (debugrebuildstate,
3398 [('r', 'rev', '', _('revision to rebuild to'))],
3397 [('r', 'rev', '', _('revision to rebuild to'))],
3399 _('[-r REV] [REV]')),
3398 _('[-r REV] [REV]')),
3400 "debugrename":
3399 "debugrename":
3401 (debugrename,
3400 (debugrename,
3402 [('r', 'rev', '', _('revision to debug'))],
3401 [('r', 'rev', '', _('revision to debug'))],
3403 _('[-r REV] FILE')),
3402 _('[-r REV] FILE')),
3404 "debugsetparents":
3403 "debugsetparents":
3405 (debugsetparents, [], _('REV1 [REV2]')),
3404 (debugsetparents, [], _('REV1 [REV2]')),
3406 "debugstate":
3405 "debugstate":
3407 (debugstate,
3406 (debugstate,
3408 [('', 'nodates', None, _('do not display the saved mtime'))],
3407 [('', 'nodates', None, _('do not display the saved mtime'))],
3409 _('[OPTION]...')),
3408 _('[OPTION]...')),
3410 "debugsub":
3409 "debugsub":
3411 (debugsub,
3410 (debugsub,
3412 [('r', 'rev', '', _('revision to check'))],
3411 [('r', 'rev', '', _('revision to check'))],
3413 _('[-r REV] [REV]')),
3412 _('[-r REV] [REV]')),
3414 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3413 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3415 "^diff":
3414 "^diff":
3416 (diff,
3415 (diff,
3417 [('r', 'rev', [], _('revision')),
3416 [('r', 'rev', [], _('revision')),
3418 ('c', 'change', '', _('change made by revision'))
3417 ('c', 'change', '', _('change made by revision'))
3419 ] + diffopts + diffopts2 + walkopts,
3418 ] + diffopts + diffopts2 + walkopts,
3420 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3419 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3421 "^export":
3420 "^export":
3422 (export,
3421 (export,
3423 [('o', 'output', '', _('print output to file with formatted name')),
3422 [('o', 'output', '', _('print output to file with formatted name')),
3424 ('', 'switch-parent', None, _('diff against the second parent'))
3423 ('', 'switch-parent', None, _('diff against the second parent'))
3425 ] + diffopts,
3424 ] + diffopts,
3426 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3425 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3427 "^forget":
3426 "^forget":
3428 (forget,
3427 (forget,
3429 [] + walkopts,
3428 [] + walkopts,
3430 _('[OPTION]... FILE...')),
3429 _('[OPTION]... FILE...')),
3431 "grep":
3430 "grep":
3432 (grep,
3431 (grep,
3433 [('0', 'print0', None, _('end fields with NUL')),
3432 [('0', 'print0', None, _('end fields with NUL')),
3434 ('', 'all', None, _('print all revisions that match')),
3433 ('', 'all', None, _('print all revisions that match')),
3435 ('f', 'follow', None,
3434 ('f', 'follow', None,
3436 _('follow changeset history, or file history across copies and renames')),
3435 _('follow changeset history, or file history across copies and renames')),
3437 ('i', 'ignore-case', None, _('ignore case when matching')),
3436 ('i', 'ignore-case', None, _('ignore case when matching')),
3438 ('l', 'files-with-matches', None,
3437 ('l', 'files-with-matches', None,
3439 _('print only filenames and revisions that match')),
3438 _('print only filenames and revisions that match')),
3440 ('n', 'line-number', None, _('print matching line numbers')),
3439 ('n', 'line-number', None, _('print matching line numbers')),
3441 ('r', 'rev', [], _('search in given revision range')),
3440 ('r', 'rev', [], _('search in given revision range')),
3442 ('u', 'user', None, _('list the author (long with -v)')),
3441 ('u', 'user', None, _('list the author (long with -v)')),
3443 ('d', 'date', None, _('list the date (short with -q)')),
3442 ('d', 'date', None, _('list the date (short with -q)')),
3444 ] + walkopts,
3443 ] + walkopts,
3445 _('[OPTION]... PATTERN [FILE]...')),
3444 _('[OPTION]... PATTERN [FILE]...')),
3446 "heads":
3445 "heads":
3447 (heads,
3446 (heads,
3448 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3447 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3449 ('a', 'active', False,
3448 ('a', 'active', False,
3450 _('show only the active branch heads from open branches')),
3449 _('show only the active branch heads from open branches')),
3451 ('c', 'closed', False,
3450 ('c', 'closed', False,
3452 _('show normal and closed branch heads')),
3451 _('show normal and closed branch heads')),
3453 ] + templateopts,
3452 ] + templateopts,
3454 _('[-r STARTREV] [REV]...')),
3453 _('[-r STARTREV] [REV]...')),
3455 "help": (help_, [], _('[TOPIC]')),
3454 "help": (help_, [], _('[TOPIC]')),
3456 "identify|id":
3455 "identify|id":
3457 (identify,
3456 (identify,
3458 [('r', 'rev', '', _('identify the specified revision')),
3457 [('r', 'rev', '', _('identify the specified revision')),
3459 ('n', 'num', None, _('show local revision number')),
3458 ('n', 'num', None, _('show local revision number')),
3460 ('i', 'id', None, _('show global revision id')),
3459 ('i', 'id', None, _('show global revision id')),
3461 ('b', 'branch', None, _('show branch')),
3460 ('b', 'branch', None, _('show branch')),
3462 ('t', 'tags', None, _('show tags'))],
3461 ('t', 'tags', None, _('show tags'))],
3463 _('[-nibt] [-r REV] [SOURCE]')),
3462 _('[-nibt] [-r REV] [SOURCE]')),
3464 "import|patch":
3463 "import|patch":
3465 (import_,
3464 (import_,
3466 [('p', 'strip', 1,
3465 [('p', 'strip', 1,
3467 _('directory strip option for patch. This has the same '
3466 _('directory strip option for patch. This has the same '
3468 'meaning as the corresponding patch option')),
3467 'meaning as the corresponding patch option')),
3469 ('b', 'base', '', _('base path')),
3468 ('b', 'base', '', _('base path')),
3470 ('f', 'force', None,
3469 ('f', 'force', None,
3471 _('skip check for outstanding uncommitted changes')),
3470 _('skip check for outstanding uncommitted changes')),
3472 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3471 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3473 ('', 'exact', None,
3472 ('', 'exact', None,
3474 _('apply patch to the nodes from which it was generated')),
3473 _('apply patch to the nodes from which it was generated')),
3475 ('', 'import-branch', None,
3474 ('', 'import-branch', None,
3476 _('use any branch information in patch (implied by --exact)'))] +
3475 _('use any branch information in patch (implied by --exact)'))] +
3477 commitopts + commitopts2 + similarityopts,
3476 commitopts + commitopts2 + similarityopts,
3478 _('[OPTION]... PATCH...')),
3477 _('[OPTION]... PATCH...')),
3479 "incoming|in":
3478 "incoming|in":
3480 (incoming,
3479 (incoming,
3481 [('f', 'force', None,
3480 [('f', 'force', None,
3482 _('run even when remote repository is unrelated')),
3481 _('run even when remote repository is unrelated')),
3483 ('n', 'newest-first', None, _('show newest record first')),
3482 ('n', 'newest-first', None, _('show newest record first')),
3484 ('', 'bundle', '', _('file to store the bundles into')),
3483 ('', 'bundle', '', _('file to store the bundles into')),
3485 ('r', 'rev', [],
3484 ('r', 'rev', [],
3486 _('a specific remote revision up to which you would like to pull')),
3485 _('a specific remote revision up to which you would like to pull')),
3487 ] + logopts + remoteopts,
3486 ] + logopts + remoteopts,
3488 _('[-p] [-n] [-M] [-f] [-r REV]...'
3487 _('[-p] [-n] [-M] [-f] [-r REV]...'
3489 ' [--bundle FILENAME] [SOURCE]')),
3488 ' [--bundle FILENAME] [SOURCE]')),
3490 "^init":
3489 "^init":
3491 (init,
3490 (init,
3492 remoteopts,
3491 remoteopts,
3493 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3492 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3494 "locate":
3493 "locate":
3495 (locate,
3494 (locate,
3496 [('r', 'rev', '', _('search the repository as it stood at REV')),
3495 [('r', 'rev', '', _('search the repository as it stood at REV')),
3497 ('0', 'print0', None,
3496 ('0', 'print0', None,
3498 _('end filenames with NUL, for use with xargs')),
3497 _('end filenames with NUL, for use with xargs')),
3499 ('f', 'fullpath', None,
3498 ('f', 'fullpath', None,
3500 _('print complete paths from the filesystem root')),
3499 _('print complete paths from the filesystem root')),
3501 ] + walkopts,
3500 ] + walkopts,
3502 _('[OPTION]... [PATTERN]...')),
3501 _('[OPTION]... [PATTERN]...')),
3503 "^log|history":
3502 "^log|history":
3504 (log,
3503 (log,
3505 [('f', 'follow', None,
3504 [('f', 'follow', None,
3506 _('follow changeset history, or file history across copies and renames')),
3505 _('follow changeset history, or file history across copies and renames')),
3507 ('', 'follow-first', None,
3506 ('', 'follow-first', None,
3508 _('only follow the first parent of merge changesets')),
3507 _('only follow the first parent of merge changesets')),
3509 ('d', 'date', '', _('show revisions matching date spec')),
3508 ('d', 'date', '', _('show revisions matching date spec')),
3510 ('C', 'copies', None, _('show copied files')),
3509 ('C', 'copies', None, _('show copied files')),
3511 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3510 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3512 ('r', 'rev', [], _('show the specified revision or range')),
3511 ('r', 'rev', [], _('show the specified revision or range')),
3513 ('', 'removed', None, _('include revisions where files were removed')),
3512 ('', 'removed', None, _('include revisions where files were removed')),
3514 ('m', 'only-merges', None, _('show only merges')),
3513 ('m', 'only-merges', None, _('show only merges')),
3515 ('u', 'user', [], _('revisions committed by user')),
3514 ('u', 'user', [], _('revisions committed by user')),
3516 ('b', 'only-branch', [],
3515 ('b', 'only-branch', [],
3517 _('show only changesets within the given named branch')),
3516 _('show only changesets within the given named branch')),
3518 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3517 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3519 ] + logopts + walkopts,
3518 ] + logopts + walkopts,
3520 _('[OPTION]... [FILE]')),
3519 _('[OPTION]... [FILE]')),
3521 "manifest":
3520 "manifest":
3522 (manifest,
3521 (manifest,
3523 [('r', 'rev', '', _('revision to display'))],
3522 [('r', 'rev', '', _('revision to display'))],
3524 _('[-r REV]')),
3523 _('[-r REV]')),
3525 "^merge":
3524 "^merge":
3526 (merge,
3525 (merge,
3527 [('f', 'force', None, _('force a merge with outstanding changes')),
3526 [('f', 'force', None, _('force a merge with outstanding changes')),
3528 ('r', 'rev', '', _('revision to merge')),
3527 ('r', 'rev', '', _('revision to merge')),
3529 ('P', 'preview', None,
3528 ('P', 'preview', None,
3530 _('review revisions to merge (no merge is performed)'))],
3529 _('review revisions to merge (no merge is performed)'))],
3531 _('[-f] [[-r] REV]')),
3530 _('[-f] [[-r] REV]')),
3532 "outgoing|out":
3531 "outgoing|out":
3533 (outgoing,
3532 (outgoing,
3534 [('f', 'force', None,
3533 [('f', 'force', None,
3535 _('run even when remote repository is unrelated')),
3534 _('run even when remote repository is unrelated')),
3536 ('r', 'rev', [],
3535 ('r', 'rev', [],
3537 _('a specific revision up to which you would like to push')),
3536 _('a specific revision up to which you would like to push')),
3538 ('n', 'newest-first', None, _('show newest record first')),
3537 ('n', 'newest-first', None, _('show newest record first')),
3539 ] + logopts + remoteopts,
3538 ] + logopts + remoteopts,
3540 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3539 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3541 "parents":
3540 "parents":
3542 (parents,
3541 (parents,
3543 [('r', 'rev', '', _('show parents from the specified revision')),
3542 [('r', 'rev', '', _('show parents from the specified revision')),
3544 ] + templateopts,
3543 ] + templateopts,
3545 _('[-r REV] [FILE]')),
3544 _('[-r REV] [FILE]')),
3546 "paths": (paths, [], _('[NAME]')),
3545 "paths": (paths, [], _('[NAME]')),
3547 "^pull":
3546 "^pull":
3548 (pull,
3547 (pull,
3549 [('u', 'update', None,
3548 [('u', 'update', None,
3550 _('update to new tip if changesets were pulled')),
3549 _('update to new tip if changesets were pulled')),
3551 ('f', 'force', None,
3550 ('f', 'force', None,
3552 _('run even when remote repository is unrelated')),
3551 _('run even when remote repository is unrelated')),
3553 ('r', 'rev', [],
3552 ('r', 'rev', [],
3554 _('a specific remote revision up to which you would like to pull')),
3553 _('a specific remote revision up to which you would like to pull')),
3555 ] + remoteopts,
3554 ] + remoteopts,
3556 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3555 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3557 "^push":
3556 "^push":
3558 (push,
3557 (push,
3559 [('f', 'force', None, _('force push')),
3558 [('f', 'force', None, _('force push')),
3560 ('r', 'rev', [],
3559 ('r', 'rev', [],
3561 _('a specific revision up to which you would like to push')),
3560 _('a specific revision up to which you would like to push')),
3562 ] + remoteopts,
3561 ] + remoteopts,
3563 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3562 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3564 "recover": (recover, []),
3563 "recover": (recover, []),
3565 "^remove|rm":
3564 "^remove|rm":
3566 (remove,
3565 (remove,
3567 [('A', 'after', None, _('record delete for missing files')),
3566 [('A', 'after', None, _('record delete for missing files')),
3568 ('f', 'force', None,
3567 ('f', 'force', None,
3569 _('remove (and delete) file even if added or modified')),
3568 _('remove (and delete) file even if added or modified')),
3570 ] + walkopts,
3569 ] + walkopts,
3571 _('[OPTION]... FILE...')),
3570 _('[OPTION]... FILE...')),
3572 "rename|mv":
3571 "rename|mv":
3573 (rename,
3572 (rename,
3574 [('A', 'after', None, _('record a rename that has already occurred')),
3573 [('A', 'after', None, _('record a rename that has already occurred')),
3575 ('f', 'force', None,
3574 ('f', 'force', None,
3576 _('forcibly copy over an existing managed file')),
3575 _('forcibly copy over an existing managed file')),
3577 ] + walkopts + dryrunopts,
3576 ] + walkopts + dryrunopts,
3578 _('[OPTION]... SOURCE... DEST')),
3577 _('[OPTION]... SOURCE... DEST')),
3579 "resolve":
3578 "resolve":
3580 (resolve,
3579 (resolve,
3581 [('a', 'all', None, _('remerge all unresolved files')),
3580 [('a', 'all', None, _('remerge all unresolved files')),
3582 ('l', 'list', None, _('list state of files needing merge')),
3581 ('l', 'list', None, _('list state of files needing merge')),
3583 ('m', 'mark', None, _('mark files as resolved')),
3582 ('m', 'mark', None, _('mark files as resolved')),
3584 ('u', 'unmark', None, _('unmark files as resolved')),
3583 ('u', 'unmark', None, _('unmark files as resolved')),
3585 ('n', 'no-status', None, _('hide status prefix'))]
3584 ('n', 'no-status', None, _('hide status prefix'))]
3586 + walkopts,
3585 + walkopts,
3587 _('[OPTION]... [FILE]...')),
3586 _('[OPTION]... [FILE]...')),
3588 "revert":
3587 "revert":
3589 (revert,
3588 (revert,
3590 [('a', 'all', None, _('revert all changes when no arguments given')),
3589 [('a', 'all', None, _('revert all changes when no arguments given')),
3591 ('d', 'date', '', _('tipmost revision matching date')),
3590 ('d', 'date', '', _('tipmost revision matching date')),
3592 ('r', 'rev', '', _('revision to revert to')),
3591 ('r', 'rev', '', _('revision to revert to')),
3593 ('', 'no-backup', None, _('do not save backup copies of files')),
3592 ('', 'no-backup', None, _('do not save backup copies of files')),
3594 ] + walkopts + dryrunopts,
3593 ] + walkopts + dryrunopts,
3595 _('[OPTION]... [-r REV] [NAME]...')),
3594 _('[OPTION]... [-r REV] [NAME]...')),
3596 "rollback": (rollback, []),
3595 "rollback": (rollback, []),
3597 "root": (root, []),
3596 "root": (root, []),
3598 "^serve":
3597 "^serve":
3599 (serve,
3598 (serve,
3600 [('A', 'accesslog', '', _('name of access log file to write to')),
3599 [('A', 'accesslog', '', _('name of access log file to write to')),
3601 ('d', 'daemon', None, _('run server in background')),
3600 ('d', 'daemon', None, _('run server in background')),
3602 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3601 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3603 ('E', 'errorlog', '', _('name of error log file to write to')),
3602 ('E', 'errorlog', '', _('name of error log file to write to')),
3604 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3603 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3605 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3604 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3606 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3605 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3607 ('n', 'name', '',
3606 ('n', 'name', '',
3608 _('name to show in web pages (default: working directory)')),
3607 _('name to show in web pages (default: working directory)')),
3609 ('', 'webdir-conf', '', _('name of the webdir config file'
3608 ('', 'webdir-conf', '', _('name of the webdir config file'
3610 ' (serve more than one repository)')),
3609 ' (serve more than one repository)')),
3611 ('', 'pid-file', '', _('name of file to write process ID to')),
3610 ('', 'pid-file', '', _('name of file to write process ID to')),
3612 ('', 'stdio', None, _('for remote clients')),
3611 ('', 'stdio', None, _('for remote clients')),
3613 ('t', 'templates', '', _('web templates to use')),
3612 ('t', 'templates', '', _('web templates to use')),
3614 ('', 'style', '', _('template style to use')),
3613 ('', 'style', '', _('template style to use')),
3615 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3614 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3616 ('', 'certificate', '', _('SSL certificate file'))],
3615 ('', 'certificate', '', _('SSL certificate file'))],
3617 _('[OPTION]...')),
3616 _('[OPTION]...')),
3618 "showconfig|debugconfig":
3617 "showconfig|debugconfig":
3619 (showconfig,
3618 (showconfig,
3620 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3619 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3621 _('[-u] [NAME]...')),
3620 _('[-u] [NAME]...')),
3622 "^summary|sum":
3621 "^summary|sum":
3623 (summary,
3622 (summary,
3624 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
3623 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
3625 "^status|st":
3624 "^status|st":
3626 (status,
3625 (status,
3627 [('A', 'all', None, _('show status of all files')),
3626 [('A', 'all', None, _('show status of all files')),
3628 ('m', 'modified', None, _('show only modified files')),
3627 ('m', 'modified', None, _('show only modified files')),
3629 ('a', 'added', None, _('show only added files')),
3628 ('a', 'added', None, _('show only added files')),
3630 ('r', 'removed', None, _('show only removed files')),
3629 ('r', 'removed', None, _('show only removed files')),
3631 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3630 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3632 ('c', 'clean', None, _('show only files without changes')),
3631 ('c', 'clean', None, _('show only files without changes')),
3633 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3632 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3634 ('i', 'ignored', None, _('show only ignored files')),
3633 ('i', 'ignored', None, _('show only ignored files')),
3635 ('n', 'no-status', None, _('hide status prefix')),
3634 ('n', 'no-status', None, _('hide status prefix')),
3636 ('C', 'copies', None, _('show source of copied files')),
3635 ('C', 'copies', None, _('show source of copied files')),
3637 ('0', 'print0', None,
3636 ('0', 'print0', None,
3638 _('end filenames with NUL, for use with xargs')),
3637 _('end filenames with NUL, for use with xargs')),
3639 ('', 'rev', [], _('show difference from revision')),
3638 ('', 'rev', [], _('show difference from revision')),
3640 ] + walkopts,
3639 ] + walkopts,
3641 _('[OPTION]... [FILE]...')),
3640 _('[OPTION]... [FILE]...')),
3642 "tag":
3641 "tag":
3643 (tag,
3642 (tag,
3644 [('f', 'force', None, _('replace existing tag')),
3643 [('f', 'force', None, _('replace existing tag')),
3645 ('l', 'local', None, _('make the tag local')),
3644 ('l', 'local', None, _('make the tag local')),
3646 ('r', 'rev', '', _('revision to tag')),
3645 ('r', 'rev', '', _('revision to tag')),
3647 ('', 'remove', None, _('remove a tag')),
3646 ('', 'remove', None, _('remove a tag')),
3648 # -l/--local is already there, commitopts cannot be used
3647 # -l/--local is already there, commitopts cannot be used
3649 ('m', 'message', '', _('use <text> as commit message')),
3648 ('m', 'message', '', _('use <text> as commit message')),
3650 ] + commitopts2,
3649 ] + commitopts2,
3651 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3650 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3652 "tags": (tags, [], ''),
3651 "tags": (tags, [], ''),
3653 "tip":
3652 "tip":
3654 (tip,
3653 (tip,
3655 [('p', 'patch', None, _('show patch')),
3654 [('p', 'patch', None, _('show patch')),
3656 ('g', 'git', None, _('use git extended diff format')),
3655 ('g', 'git', None, _('use git extended diff format')),
3657 ] + templateopts,
3656 ] + templateopts,
3658 _('[-p]')),
3657 _('[-p]')),
3659 "unbundle":
3658 "unbundle":
3660 (unbundle,
3659 (unbundle,
3661 [('u', 'update', None,
3660 [('u', 'update', None,
3662 _('update to new tip if changesets were unbundled'))],
3661 _('update to new tip if changesets were unbundled'))],
3663 _('[-u] FILE...')),
3662 _('[-u] FILE...')),
3664 "^update|up|checkout|co":
3663 "^update|up|checkout|co":
3665 (update,
3664 (update,
3666 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3665 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3667 ('c', 'check', None, _('check for uncommitted changes')),
3666 ('c', 'check', None, _('check for uncommitted changes')),
3668 ('d', 'date', '', _('tipmost revision matching date')),
3667 ('d', 'date', '', _('tipmost revision matching date')),
3669 ('r', 'rev', '', _('revision'))],
3668 ('r', 'rev', '', _('revision'))],
3670 _('[-C] [-d DATE] [[-r] REV]')),
3669 _('[-C] [-d DATE] [[-r] REV]')),
3671 "verify": (verify, []),
3670 "verify": (verify, []),
3672 "version": (version_, []),
3671 "version": (version_, []),
3673 }
3672 }
3674
3673
3675 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3674 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3676 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3675 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3677 optionalrepo = ("identify paths serve showconfig debugancestor")
3676 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,501 +1,499 b''
1 # dispatch.py - command dispatching for mercurial
1 # dispatch.py - command dispatching for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time
9 import os, sys, atexit, signal, pdb, socket, errno, shlex, time
10 import util, commands, hg, fancyopts, extensions, hook, error
10 import util, commands, hg, fancyopts, extensions, hook, error
11 import cmdutil, encoding
11 import cmdutil, encoding
12 import ui as _ui
12 import ui as _ui
13
13
14 def run():
14 def run():
15 "run the command in sys.argv"
15 "run the command in sys.argv"
16 sys.exit(dispatch(sys.argv[1:]))
16 sys.exit(dispatch(sys.argv[1:]))
17
17
18 def dispatch(args):
18 def dispatch(args):
19 "run the command specified in args"
19 "run the command specified in args"
20 try:
20 try:
21 u = _ui.ui()
21 u = _ui.ui()
22 if '--traceback' in args:
22 if '--traceback' in args:
23 u.setconfig('ui', 'traceback', 'on')
23 u.setconfig('ui', 'traceback', 'on')
24 except util.Abort, inst:
24 except util.Abort, inst:
25 sys.stderr.write(_("abort: %s\n") % inst)
25 sys.stderr.write(_("abort: %s\n") % inst)
26 return -1
26 return -1
27 except error.ConfigError, inst:
27 except error.ConfigError, inst:
28 sys.stderr.write(_("hg: %s\n") % inst)
28 sys.stderr.write(_("hg: %s\n") % inst)
29 return -1
29 return -1
30 return _runcatch(u, args)
30 return _runcatch(u, args)
31
31
32 def _runcatch(ui, args):
32 def _runcatch(ui, args):
33 def catchterm(*args):
33 def catchterm(*args):
34 raise error.SignalInterrupt
34 raise error.SignalInterrupt
35
35
36 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
36 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
37 num = getattr(signal, name, None)
37 num = getattr(signal, name, None)
38 if num: signal.signal(num, catchterm)
38 if num: signal.signal(num, catchterm)
39
39
40 try:
40 try:
41 try:
41 try:
42 # enter the debugger before command execution
42 # enter the debugger before command execution
43 if '--debugger' in args:
43 if '--debugger' in args:
44 pdb.set_trace()
44 pdb.set_trace()
45 try:
45 try:
46 return _dispatch(ui, args)
46 return _dispatch(ui, args)
47 finally:
47 finally:
48 ui.flush()
48 ui.flush()
49 except:
49 except:
50 # enter the debugger when we hit an exception
50 # enter the debugger when we hit an exception
51 if '--debugger' in args:
51 if '--debugger' in args:
52 pdb.post_mortem(sys.exc_info()[2])
52 pdb.post_mortem(sys.exc_info()[2])
53 ui.traceback()
53 ui.traceback()
54 raise
54 raise
55
55
56 # Global exception handling, alphabetically
56 # Global exception handling, alphabetically
57 # Mercurial-specific first, followed by built-in and library exceptions
57 # Mercurial-specific first, followed by built-in and library exceptions
58 except error.AmbiguousCommand, inst:
58 except error.AmbiguousCommand, inst:
59 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
59 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
60 (inst.args[0], " ".join(inst.args[1])))
60 (inst.args[0], " ".join(inst.args[1])))
61 except error.ConfigError, inst:
61 except error.ConfigError, inst:
62 ui.warn(_("hg: %s\n") % inst.args[0])
62 ui.warn(_("hg: %s\n") % inst.args[0])
63 except error.LockHeld, inst:
63 except error.LockHeld, inst:
64 if inst.errno == errno.ETIMEDOUT:
64 if inst.errno == errno.ETIMEDOUT:
65 reason = _('timed out waiting for lock held by %s') % inst.locker
65 reason = _('timed out waiting for lock held by %s') % inst.locker
66 else:
66 else:
67 reason = _('lock held by %s') % inst.locker
67 reason = _('lock held by %s') % inst.locker
68 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
69 except error.LockUnavailable, inst:
69 except error.LockUnavailable, inst:
70 ui.warn(_("abort: could not lock %s: %s\n") %
70 ui.warn(_("abort: could not lock %s: %s\n") %
71 (inst.desc or inst.filename, inst.strerror))
71 (inst.desc or inst.filename, inst.strerror))
72 except error.ParseError, inst:
72 except error.ParseError, inst:
73 if inst.args[0]:
73 if inst.args[0]:
74 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
74 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
75 commands.help_(ui, inst.args[0])
75 commands.help_(ui, inst.args[0])
76 else:
76 else:
77 ui.warn(_("hg: %s\n") % inst.args[1])
77 ui.warn(_("hg: %s\n") % inst.args[1])
78 commands.help_(ui, 'shortlist')
78 commands.help_(ui, 'shortlist')
79 except error.RepoError, inst:
79 except error.RepoError, inst:
80 ui.warn(_("abort: %s!\n") % inst)
80 ui.warn(_("abort: %s!\n") % inst)
81 except error.ResponseError, inst:
81 except error.ResponseError, inst:
82 ui.warn(_("abort: %s") % inst.args[0])
82 ui.warn(_("abort: %s") % inst.args[0])
83 if not isinstance(inst.args[1], basestring):
83 if not isinstance(inst.args[1], basestring):
84 ui.warn(" %r\n" % (inst.args[1],))
84 ui.warn(" %r\n" % (inst.args[1],))
85 elif not inst.args[1]:
85 elif not inst.args[1]:
86 ui.warn(_(" empty string\n"))
86 ui.warn(_(" empty string\n"))
87 else:
87 else:
88 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
88 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
89 except error.RevlogError, inst:
89 except error.RevlogError, inst:
90 ui.warn(_("abort: %s!\n") % inst)
90 ui.warn(_("abort: %s!\n") % inst)
91 except error.SignalInterrupt:
91 except error.SignalInterrupt:
92 ui.warn(_("killed!\n"))
92 ui.warn(_("killed!\n"))
93 except error.UnknownCommand, inst:
93 except error.UnknownCommand, inst:
94 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
94 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
95 commands.help_(ui, 'shortlist')
95 commands.help_(ui, 'shortlist')
96 except util.Abort, inst:
96 except util.Abort, inst:
97 ui.warn(_("abort: %s\n") % inst)
97 ui.warn(_("abort: %s\n") % inst)
98 except ImportError, inst:
98 except ImportError, inst:
99 m = str(inst).split()[-1]
99 m = str(inst).split()[-1]
100 ui.warn(_("abort: could not import module %s!\n") % m)
100 ui.warn(_("abort: could not import module %s!\n") % m)
101 if m in "mpatch bdiff".split():
101 if m in "mpatch bdiff".split():
102 ui.warn(_("(did you forget to compile extensions?)\n"))
102 ui.warn(_("(did you forget to compile extensions?)\n"))
103 elif m in "zlib".split():
103 elif m in "zlib".split():
104 ui.warn(_("(is your Python install correct?)\n"))
104 ui.warn(_("(is your Python install correct?)\n"))
105 except IOError, inst:
105 except IOError, inst:
106 if hasattr(inst, "code"):
106 if hasattr(inst, "code"):
107 ui.warn(_("abort: %s\n") % inst)
107 ui.warn(_("abort: %s\n") % inst)
108 elif hasattr(inst, "reason"):
108 elif hasattr(inst, "reason"):
109 try: # usually it is in the form (errno, strerror)
109 try: # usually it is in the form (errno, strerror)
110 reason = inst.reason.args[1]
110 reason = inst.reason.args[1]
111 except: # it might be anything, for example a string
111 except: # it might be anything, for example a string
112 reason = inst.reason
112 reason = inst.reason
113 ui.warn(_("abort: error: %s\n") % reason)
113 ui.warn(_("abort: error: %s\n") % reason)
114 elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
114 elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
115 if ui.debugflag:
115 if ui.debugflag:
116 ui.warn(_("broken pipe\n"))
116 ui.warn(_("broken pipe\n"))
117 elif getattr(inst, "strerror", None):
117 elif getattr(inst, "strerror", None):
118 if getattr(inst, "filename", None):
118 if getattr(inst, "filename", None):
119 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
119 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
120 else:
120 else:
121 ui.warn(_("abort: %s\n") % inst.strerror)
121 ui.warn(_("abort: %s\n") % inst.strerror)
122 else:
122 else:
123 raise
123 raise
124 except OSError, inst:
124 except OSError, inst:
125 if getattr(inst, "filename", None):
125 if getattr(inst, "filename", None):
126 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
126 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
127 else:
127 else:
128 ui.warn(_("abort: %s\n") % inst.strerror)
128 ui.warn(_("abort: %s\n") % inst.strerror)
129 except KeyboardInterrupt:
129 except KeyboardInterrupt:
130 try:
130 try:
131 ui.warn(_("interrupted!\n"))
131 ui.warn(_("interrupted!\n"))
132 except IOError, inst:
132 except IOError, inst:
133 if inst.errno == errno.EPIPE:
133 if inst.errno == errno.EPIPE:
134 if ui.debugflag:
134 if ui.debugflag:
135 ui.warn(_("\nbroken pipe\n"))
135 ui.warn(_("\nbroken pipe\n"))
136 else:
136 else:
137 raise
137 raise
138 except MemoryError:
138 except MemoryError:
139 ui.warn(_("abort: out of memory\n"))
139 ui.warn(_("abort: out of memory\n"))
140 except SystemExit, inst:
140 except SystemExit, inst:
141 # Commands shouldn't sys.exit directly, but give a return code.
141 # Commands shouldn't sys.exit directly, but give a return code.
142 # Just in case catch this and and pass exit code to caller.
142 # Just in case catch this and and pass exit code to caller.
143 return inst.code
143 return inst.code
144 except socket.error, inst:
144 except socket.error, inst:
145 ui.warn(_("abort: %s\n") % inst.args[-1])
145 ui.warn(_("abort: %s\n") % inst.args[-1])
146 except:
146 except:
147 ui.warn(_("** unknown exception encountered, details follow\n"))
147 ui.warn(_("** unknown exception encountered, details follow\n"))
148 ui.warn(_("** report bug details to "
148 ui.warn(_("** report bug details to "
149 "http://mercurial.selenic.com/bts/\n"))
149 "http://mercurial.selenic.com/bts/\n"))
150 ui.warn(_("** or mercurial@selenic.com\n"))
150 ui.warn(_("** or mercurial@selenic.com\n"))
151 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
151 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
152 % util.version())
152 % util.version())
153 ui.warn(_("** Extensions loaded: %s\n")
153 ui.warn(_("** Extensions loaded: %s\n")
154 % ", ".join([x[0] for x in extensions.extensions()]))
154 % ", ".join([x[0] for x in extensions.extensions()]))
155 raise
155 raise
156
156
157 return -1
157 return -1
158
158
159 def _findrepo(p):
159 def _findrepo(p):
160 while not os.path.isdir(os.path.join(p, ".hg")):
160 while not os.path.isdir(os.path.join(p, ".hg")):
161 oldp, p = p, os.path.dirname(p)
161 oldp, p = p, os.path.dirname(p)
162 if p == oldp:
162 if p == oldp:
163 return None
163 return None
164
164
165 return p
165 return p
166
166
167 def aliasargs(fn):
167 def aliasargs(fn):
168 if hasattr(fn, 'args'):
168 if hasattr(fn, 'args'):
169 return fn.args
169 return fn.args
170 return []
170 return []
171
171
172 class cmdalias(object):
172 class cmdalias(object):
173 def __init__(self, name, definition, cmdtable):
173 def __init__(self, name, definition, cmdtable):
174 self.name = name
174 self.name = name
175 self.definition = definition
175 self.definition = definition
176 self.args = []
176 self.args = []
177 self.opts = []
177 self.opts = []
178 self.help = ''
178 self.help = ''
179 self.norepo = True
179 self.norepo = True
180
180
181 try:
181 try:
182 cmdutil.findcmd(self.name, cmdtable, True)
182 cmdutil.findcmd(self.name, cmdtable, True)
183 self.shadows = True
183 self.shadows = True
184 except error.UnknownCommand:
184 except error.UnknownCommand:
185 self.shadows = False
185 self.shadows = False
186
186
187 if not self.definition:
187 if not self.definition:
188 def fn(ui, *args):
188 def fn(ui, *args):
189 ui.warn(_("no definition for alias '%s'\n") % self.name)
189 ui.warn(_("no definition for alias '%s'\n") % self.name)
190 return 1
190 return 1
191 self.fn = fn
191 self.fn = fn
192
192
193 return
193 return
194
194
195 args = shlex.split(self.definition)
195 args = shlex.split(self.definition)
196 cmd = args.pop(0)
196 cmd = args.pop(0)
197 opts = []
198 help = ''
199
197
200 try:
198 try:
201 self.fn, self.opts, self.help = cmdutil.findcmd(cmd, cmdtable, False)[1]
199 self.fn, self.opts, self.help = cmdutil.findcmd(cmd, cmdtable, False)[1]
202 self.args = aliasargs(self.fn) + args
200 self.args = aliasargs(self.fn) + args
203 if cmd not in commands.norepo.split(' '):
201 if cmd not in commands.norepo.split(' '):
204 self.norepo = False
202 self.norepo = False
205 except error.UnknownCommand:
203 except error.UnknownCommand:
206 def fn(ui, *args):
204 def fn(ui, *args):
207 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
205 ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
208 % (self.name, cmd))
206 % (self.name, cmd))
209 return 1
207 return 1
210 self.fn = fn
208 self.fn = fn
211 except error.AmbiguousCommand:
209 except error.AmbiguousCommand:
212 def fn(ui, *args):
210 def fn(ui, *args):
213 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
211 ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
214 % (self.name, cmd))
212 % (self.name, cmd))
215 return 1
213 return 1
216 self.fn = fn
214 self.fn = fn
217
215
218 def __call__(self, ui, *args, **opts):
216 def __call__(self, ui, *args, **opts):
219 if self.shadows:
217 if self.shadows:
220 ui.debug("alias '%s' shadows command\n" % self.name)
218 ui.debug("alias '%s' shadows command\n" % self.name)
221
219
222 return self.fn(ui, *args, **opts)
220 return self.fn(ui, *args, **opts)
223
221
224 def addaliases(ui, cmdtable):
222 def addaliases(ui, cmdtable):
225 # aliases are processed after extensions have been loaded, so they
223 # aliases are processed after extensions have been loaded, so they
226 # may use extension commands. Aliases can also use other alias definitions,
224 # may use extension commands. Aliases can also use other alias definitions,
227 # but only if they have been defined prior to the current definition.
225 # but only if they have been defined prior to the current definition.
228 for alias, definition in ui.configitems('alias'):
226 for alias, definition in ui.configitems('alias'):
229 aliasdef = cmdalias(alias, definition, cmdtable)
227 aliasdef = cmdalias(alias, definition, cmdtable)
230 cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help)
228 cmdtable[alias] = (aliasdef, aliasdef.opts, aliasdef.help)
231 if aliasdef.norepo:
229 if aliasdef.norepo:
232 commands.norepo += ' %s' % alias
230 commands.norepo += ' %s' % alias
233
231
234 def _parse(ui, args):
232 def _parse(ui, args):
235 options = {}
233 options = {}
236 cmdoptions = {}
234 cmdoptions = {}
237
235
238 try:
236 try:
239 args = fancyopts.fancyopts(args, commands.globalopts, options)
237 args = fancyopts.fancyopts(args, commands.globalopts, options)
240 except fancyopts.getopt.GetoptError, inst:
238 except fancyopts.getopt.GetoptError, inst:
241 raise error.ParseError(None, inst)
239 raise error.ParseError(None, inst)
242
240
243 if args:
241 if args:
244 cmd, args = args[0], args[1:]
242 cmd, args = args[0], args[1:]
245 aliases, i = cmdutil.findcmd(cmd, commands.table,
243 aliases, i = cmdutil.findcmd(cmd, commands.table,
246 ui.config("ui", "strict"))
244 ui.config("ui", "strict"))
247 cmd = aliases[0]
245 cmd = aliases[0]
248 args = aliasargs(i[0]) + args
246 args = aliasargs(i[0]) + args
249 defaults = ui.config("defaults", cmd)
247 defaults = ui.config("defaults", cmd)
250 if defaults:
248 if defaults:
251 args = map(util.expandpath, shlex.split(defaults)) + args
249 args = map(util.expandpath, shlex.split(defaults)) + args
252 c = list(i[1])
250 c = list(i[1])
253 else:
251 else:
254 cmd = None
252 cmd = None
255 c = []
253 c = []
256
254
257 # combine global options into local
255 # combine global options into local
258 for o in commands.globalopts:
256 for o in commands.globalopts:
259 c.append((o[0], o[1], options[o[1]], o[3]))
257 c.append((o[0], o[1], options[o[1]], o[3]))
260
258
261 try:
259 try:
262 args = fancyopts.fancyopts(args, c, cmdoptions, True)
260 args = fancyopts.fancyopts(args, c, cmdoptions, True)
263 except fancyopts.getopt.GetoptError, inst:
261 except fancyopts.getopt.GetoptError, inst:
264 raise error.ParseError(cmd, inst)
262 raise error.ParseError(cmd, inst)
265
263
266 # separate global options back out
264 # separate global options back out
267 for o in commands.globalopts:
265 for o in commands.globalopts:
268 n = o[1]
266 n = o[1]
269 options[n] = cmdoptions[n]
267 options[n] = cmdoptions[n]
270 del cmdoptions[n]
268 del cmdoptions[n]
271
269
272 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
270 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
273
271
274 def _parseconfig(ui, config):
272 def _parseconfig(ui, config):
275 """parse the --config options from the command line"""
273 """parse the --config options from the command line"""
276 for cfg in config:
274 for cfg in config:
277 try:
275 try:
278 name, value = cfg.split('=', 1)
276 name, value = cfg.split('=', 1)
279 section, name = name.split('.', 1)
277 section, name = name.split('.', 1)
280 if not section or not name:
278 if not section or not name:
281 raise IndexError
279 raise IndexError
282 ui.setconfig(section, name, value)
280 ui.setconfig(section, name, value)
283 except (IndexError, ValueError):
281 except (IndexError, ValueError):
284 raise util.Abort(_('malformed --config option: %s') % cfg)
282 raise util.Abort(_('malformed --config option: %s') % cfg)
285
283
286 def _earlygetopt(aliases, args):
284 def _earlygetopt(aliases, args):
287 """Return list of values for an option (or aliases).
285 """Return list of values for an option (or aliases).
288
286
289 The values are listed in the order they appear in args.
287 The values are listed in the order they appear in args.
290 The options and values are removed from args.
288 The options and values are removed from args.
291 """
289 """
292 try:
290 try:
293 argcount = args.index("--")
291 argcount = args.index("--")
294 except ValueError:
292 except ValueError:
295 argcount = len(args)
293 argcount = len(args)
296 shortopts = [opt for opt in aliases if len(opt) == 2]
294 shortopts = [opt for opt in aliases if len(opt) == 2]
297 values = []
295 values = []
298 pos = 0
296 pos = 0
299 while pos < argcount:
297 while pos < argcount:
300 if args[pos] in aliases:
298 if args[pos] in aliases:
301 if pos + 1 >= argcount:
299 if pos + 1 >= argcount:
302 # ignore and let getopt report an error if there is no value
300 # ignore and let getopt report an error if there is no value
303 break
301 break
304 del args[pos]
302 del args[pos]
305 values.append(args.pop(pos))
303 values.append(args.pop(pos))
306 argcount -= 2
304 argcount -= 2
307 elif args[pos][:2] in shortopts:
305 elif args[pos][:2] in shortopts:
308 # short option can have no following space, e.g. hg log -Rfoo
306 # short option can have no following space, e.g. hg log -Rfoo
309 values.append(args.pop(pos)[2:])
307 values.append(args.pop(pos)[2:])
310 argcount -= 1
308 argcount -= 1
311 else:
309 else:
312 pos += 1
310 pos += 1
313 return values
311 return values
314
312
315 def runcommand(lui, repo, cmd, fullargs, ui, options, d):
313 def runcommand(lui, repo, cmd, fullargs, ui, options, d):
316 # run pre-hook, and abort if it fails
314 # run pre-hook, and abort if it fails
317 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
315 ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
318 if ret:
316 if ret:
319 return ret
317 return ret
320 ret = _runcommand(ui, options, cmd, d)
318 ret = _runcommand(ui, options, cmd, d)
321 # run post-hook, passing command result
319 # run post-hook, passing command result
322 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
320 hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
323 result = ret)
321 result = ret)
324 return ret
322 return ret
325
323
326 _loaded = set()
324 _loaded = set()
327 def _dispatch(ui, args):
325 def _dispatch(ui, args):
328 # read --config before doing anything else
326 # read --config before doing anything else
329 # (e.g. to change trust settings for reading .hg/hgrc)
327 # (e.g. to change trust settings for reading .hg/hgrc)
330 _parseconfig(ui, _earlygetopt(['--config'], args))
328 _parseconfig(ui, _earlygetopt(['--config'], args))
331
329
332 # check for cwd
330 # check for cwd
333 cwd = _earlygetopt(['--cwd'], args)
331 cwd = _earlygetopt(['--cwd'], args)
334 if cwd:
332 if cwd:
335 os.chdir(cwd[-1])
333 os.chdir(cwd[-1])
336
334
337 # read the local repository .hgrc into a local ui object
335 # read the local repository .hgrc into a local ui object
338 path = _findrepo(os.getcwd()) or ""
336 path = _findrepo(os.getcwd()) or ""
339 if not path:
337 if not path:
340 lui = ui
338 lui = ui
341 else:
339 else:
342 try:
340 try:
343 lui = ui.copy()
341 lui = ui.copy()
344 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
342 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
345 except IOError:
343 except IOError:
346 pass
344 pass
347
345
348 # now we can expand paths, even ones in .hg/hgrc
346 # now we can expand paths, even ones in .hg/hgrc
349 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
347 rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
350 if rpath:
348 if rpath:
351 path = lui.expandpath(rpath[-1])
349 path = lui.expandpath(rpath[-1])
352 lui = ui.copy()
350 lui = ui.copy()
353 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
351 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
354
352
355 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
353 # Configure extensions in phases: uisetup, extsetup, cmdtable, and
356 # reposetup. Programs like TortoiseHg will call _dispatch several
354 # reposetup. Programs like TortoiseHg will call _dispatch several
357 # times so we keep track of configured extensions in _loaded.
355 # times so we keep track of configured extensions in _loaded.
358 extensions.loadall(lui)
356 extensions.loadall(lui)
359 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
357 exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
360
358
361 # (uisetup and extsetup are handled in extensions.loadall)
359 # (uisetup and extsetup are handled in extensions.loadall)
362
360
363 for name, module in exts:
361 for name, module in exts:
364 cmdtable = getattr(module, 'cmdtable', {})
362 cmdtable = getattr(module, 'cmdtable', {})
365 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
363 overrides = [cmd for cmd in cmdtable if cmd in commands.table]
366 if overrides:
364 if overrides:
367 ui.warn(_("extension '%s' overrides commands: %s\n")
365 ui.warn(_("extension '%s' overrides commands: %s\n")
368 % (name, " ".join(overrides)))
366 % (name, " ".join(overrides)))
369 commands.table.update(cmdtable)
367 commands.table.update(cmdtable)
370 _loaded.add(name)
368 _loaded.add(name)
371
369
372 # (reposetup is handled in hg.repository)
370 # (reposetup is handled in hg.repository)
373
371
374 addaliases(lui, commands.table)
372 addaliases(lui, commands.table)
375
373
376 # check for fallback encoding
374 # check for fallback encoding
377 fallback = lui.config('ui', 'fallbackencoding')
375 fallback = lui.config('ui', 'fallbackencoding')
378 if fallback:
376 if fallback:
379 encoding.fallbackencoding = fallback
377 encoding.fallbackencoding = fallback
380
378
381 fullargs = args
379 fullargs = args
382 cmd, func, args, options, cmdoptions = _parse(lui, args)
380 cmd, func, args, options, cmdoptions = _parse(lui, args)
383
381
384 if options["config"]:
382 if options["config"]:
385 raise util.Abort(_("Option --config may not be abbreviated!"))
383 raise util.Abort(_("Option --config may not be abbreviated!"))
386 if options["cwd"]:
384 if options["cwd"]:
387 raise util.Abort(_("Option --cwd may not be abbreviated!"))
385 raise util.Abort(_("Option --cwd may not be abbreviated!"))
388 if options["repository"]:
386 if options["repository"]:
389 raise util.Abort(_(
387 raise util.Abort(_(
390 "Option -R has to be separated from other options (e.g. not -qR) "
388 "Option -R has to be separated from other options (e.g. not -qR) "
391 "and --repository may only be abbreviated as --repo!"))
389 "and --repository may only be abbreviated as --repo!"))
392
390
393 if options["encoding"]:
391 if options["encoding"]:
394 encoding.encoding = options["encoding"]
392 encoding.encoding = options["encoding"]
395 if options["encodingmode"]:
393 if options["encodingmode"]:
396 encoding.encodingmode = options["encodingmode"]
394 encoding.encodingmode = options["encodingmode"]
397 if options["time"]:
395 if options["time"]:
398 def get_times():
396 def get_times():
399 t = os.times()
397 t = os.times()
400 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
398 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
401 t = (t[0], t[1], t[2], t[3], time.clock())
399 t = (t[0], t[1], t[2], t[3], time.clock())
402 return t
400 return t
403 s = get_times()
401 s = get_times()
404 def print_time():
402 def print_time():
405 t = get_times()
403 t = get_times()
406 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
404 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
407 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
405 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
408 atexit.register(print_time)
406 atexit.register(print_time)
409
407
410 if options['verbose'] or options['debug'] or options['quiet']:
408 if options['verbose'] or options['debug'] or options['quiet']:
411 ui.setconfig('ui', 'verbose', str(bool(options['verbose'])))
409 ui.setconfig('ui', 'verbose', str(bool(options['verbose'])))
412 ui.setconfig('ui', 'debug', str(bool(options['debug'])))
410 ui.setconfig('ui', 'debug', str(bool(options['debug'])))
413 ui.setconfig('ui', 'quiet', str(bool(options['quiet'])))
411 ui.setconfig('ui', 'quiet', str(bool(options['quiet'])))
414 if options['traceback']:
412 if options['traceback']:
415 ui.setconfig('ui', 'traceback', 'on')
413 ui.setconfig('ui', 'traceback', 'on')
416 if options['noninteractive']:
414 if options['noninteractive']:
417 ui.setconfig('ui', 'interactive', 'off')
415 ui.setconfig('ui', 'interactive', 'off')
418
416
419 if options['help']:
417 if options['help']:
420 return commands.help_(ui, cmd, options['version'])
418 return commands.help_(ui, cmd, options['version'])
421 elif options['version']:
419 elif options['version']:
422 return commands.version_(ui)
420 return commands.version_(ui)
423 elif not cmd:
421 elif not cmd:
424 return commands.help_(ui, 'shortlist')
422 return commands.help_(ui, 'shortlist')
425
423
426 repo = None
424 repo = None
427 if cmd not in commands.norepo.split():
425 if cmd not in commands.norepo.split():
428 try:
426 try:
429 repo = hg.repository(ui, path=path)
427 repo = hg.repository(ui, path=path)
430 ui = repo.ui
428 ui = repo.ui
431 if not repo.local():
429 if not repo.local():
432 raise util.Abort(_("repository '%s' is not local") % path)
430 raise util.Abort(_("repository '%s' is not local") % path)
433 ui.setconfig("bundle", "mainreporoot", repo.root)
431 ui.setconfig("bundle", "mainreporoot", repo.root)
434 except error.RepoError:
432 except error.RepoError:
435 if cmd not in commands.optionalrepo.split():
433 if cmd not in commands.optionalrepo.split():
436 if args and not path: # try to infer -R from command args
434 if args and not path: # try to infer -R from command args
437 repos = map(_findrepo, args)
435 repos = map(_findrepo, args)
438 guess = repos[0]
436 guess = repos[0]
439 if guess and repos.count(guess) == len(repos):
437 if guess and repos.count(guess) == len(repos):
440 return _dispatch(ui, ['--repository', guess] + fullargs)
438 return _dispatch(ui, ['--repository', guess] + fullargs)
441 if not path:
439 if not path:
442 raise error.RepoError(_("There is no Mercurial repository"
440 raise error.RepoError(_("There is no Mercurial repository"
443 " here (.hg not found)"))
441 " here (.hg not found)"))
444 raise
442 raise
445 args.insert(0, repo)
443 args.insert(0, repo)
446 elif rpath:
444 elif rpath:
447 ui.warn("warning: --repository ignored\n")
445 ui.warn("warning: --repository ignored\n")
448
446
449 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
447 d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
450 return runcommand(lui, repo, cmd, fullargs, ui, options, d)
448 return runcommand(lui, repo, cmd, fullargs, ui, options, d)
451
449
452 def _runcommand(ui, options, cmd, cmdfunc):
450 def _runcommand(ui, options, cmd, cmdfunc):
453 def checkargs():
451 def checkargs():
454 try:
452 try:
455 return cmdfunc()
453 return cmdfunc()
456 except error.SignatureError:
454 except error.SignatureError:
457 raise error.ParseError(cmd, _("invalid arguments"))
455 raise error.ParseError(cmd, _("invalid arguments"))
458
456
459 if options['profile']:
457 if options['profile']:
460 format = ui.config('profiling', 'format', default='text')
458 format = ui.config('profiling', 'format', default='text')
461
459
462 if not format in ['text', 'kcachegrind']:
460 if not format in ['text', 'kcachegrind']:
463 ui.warn(_("unrecognized profiling format '%s'"
461 ui.warn(_("unrecognized profiling format '%s'"
464 " - Ignored\n") % format)
462 " - Ignored\n") % format)
465 format = 'text'
463 format = 'text'
466
464
467 output = ui.config('profiling', 'output')
465 output = ui.config('profiling', 'output')
468
466
469 if output:
467 if output:
470 path = ui.expandpath(output)
468 path = ui.expandpath(output)
471 ostream = open(path, 'wb')
469 ostream = open(path, 'wb')
472 else:
470 else:
473 ostream = sys.stderr
471 ostream = sys.stderr
474
472
475 try:
473 try:
476 from mercurial import lsprof
474 from mercurial import lsprof
477 except ImportError:
475 except ImportError:
478 raise util.Abort(_(
476 raise util.Abort(_(
479 'lsprof not available - install from '
477 'lsprof not available - install from '
480 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
478 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
481 p = lsprof.Profiler()
479 p = lsprof.Profiler()
482 p.enable(subcalls=True)
480 p.enable(subcalls=True)
483 try:
481 try:
484 return checkargs()
482 return checkargs()
485 finally:
483 finally:
486 p.disable()
484 p.disable()
487
485
488 if format == 'kcachegrind':
486 if format == 'kcachegrind':
489 import lsprofcalltree
487 import lsprofcalltree
490 calltree = lsprofcalltree.KCacheGrind(p)
488 calltree = lsprofcalltree.KCacheGrind(p)
491 calltree.output(ostream)
489 calltree.output(ostream)
492 else:
490 else:
493 # format == 'text'
491 # format == 'text'
494 stats = lsprof.Stats(p.getstats())
492 stats = lsprof.Stats(p.getstats())
495 stats.sort()
493 stats.sort()
496 stats.pprint(top=10, file=ostream, climit=5)
494 stats.pprint(top=10, file=ostream, climit=5)
497
495
498 if output:
496 if output:
499 ostream.close()
497 ostream.close()
500 else:
498 else:
501 return checkargs()
499 return checkargs()
@@ -1,191 +1,190 b''
1 # extensions.py - extension handling for mercurial
1 # extensions.py - extension handling for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import imp, os
8 import imp, os
9 import util, cmdutil, help
9 import util, cmdutil, help
10 from i18n import _, gettext
10 from i18n import _, gettext
11
11
12 _extensions = {}
12 _extensions = {}
13 _order = []
13 _order = []
14
14
15 def extensions():
15 def extensions():
16 for name in _order:
16 for name in _order:
17 module = _extensions[name]
17 module = _extensions[name]
18 if module:
18 if module:
19 yield name, module
19 yield name, module
20
20
21 def find(name):
21 def find(name):
22 '''return module with given extension name'''
22 '''return module with given extension name'''
23 try:
23 try:
24 return _extensions[name]
24 return _extensions[name]
25 except KeyError:
25 except KeyError:
26 for k, v in _extensions.iteritems():
26 for k, v in _extensions.iteritems():
27 if k.endswith('.' + name) or k.endswith('/' + name):
27 if k.endswith('.' + name) or k.endswith('/' + name):
28 return v
28 return v
29 raise KeyError(name)
29 raise KeyError(name)
30
30
31 def loadpath(path, module_name):
31 def loadpath(path, module_name):
32 module_name = module_name.replace('.', '_')
32 module_name = module_name.replace('.', '_')
33 path = util.expandpath(path)
33 path = util.expandpath(path)
34 if os.path.isdir(path):
34 if os.path.isdir(path):
35 # module/__init__.py style
35 # module/__init__.py style
36 d, f = os.path.split(path.rstrip('/'))
36 d, f = os.path.split(path.rstrip('/'))
37 fd, fpath, desc = imp.find_module(f, [d])
37 fd, fpath, desc = imp.find_module(f, [d])
38 return imp.load_module(module_name, fd, fpath, desc)
38 return imp.load_module(module_name, fd, fpath, desc)
39 else:
39 else:
40 return imp.load_source(module_name, path)
40 return imp.load_source(module_name, path)
41
41
42 def load(ui, name, path):
42 def load(ui, name, path):
43 # unused ui argument kept for backwards compatibility
43 # unused ui argument kept for backwards compatibility
44 if name.startswith('hgext.') or name.startswith('hgext/'):
44 if name.startswith('hgext.') or name.startswith('hgext/'):
45 shortname = name[6:]
45 shortname = name[6:]
46 else:
46 else:
47 shortname = name
47 shortname = name
48 if shortname in _extensions:
48 if shortname in _extensions:
49 return
49 return
50 _extensions[shortname] = None
50 _extensions[shortname] = None
51 if path:
51 if path:
52 # the module will be loaded in sys.modules
52 # the module will be loaded in sys.modules
53 # choose an unique name so that it doesn't
53 # choose an unique name so that it doesn't
54 # conflicts with other modules
54 # conflicts with other modules
55 mod = loadpath(path, 'hgext.%s' % name)
55 mod = loadpath(path, 'hgext.%s' % name)
56 else:
56 else:
57 def importh(name):
57 def importh(name):
58 mod = __import__(name)
58 mod = __import__(name)
59 components = name.split('.')
59 components = name.split('.')
60 for comp in components[1:]:
60 for comp in components[1:]:
61 mod = getattr(mod, comp)
61 mod = getattr(mod, comp)
62 return mod
62 return mod
63 try:
63 try:
64 mod = importh("hgext.%s" % name)
64 mod = importh("hgext.%s" % name)
65 except ImportError:
65 except ImportError:
66 mod = importh(name)
66 mod = importh(name)
67 _extensions[shortname] = mod
67 _extensions[shortname] = mod
68 _order.append(shortname)
68 _order.append(shortname)
69
69
70 def loadall(ui):
70 def loadall(ui):
71 result = ui.configitems("extensions")
71 result = ui.configitems("extensions")
72 newindex = len(_order)
72 newindex = len(_order)
73 for (name, path) in result:
73 for (name, path) in result:
74 if path:
74 if path:
75 if path[0] == '!':
75 if path[0] == '!':
76 continue
76 continue
77 try:
77 try:
78 load(ui, name, path)
78 load(ui, name, path)
79 except KeyboardInterrupt:
79 except KeyboardInterrupt:
80 raise
80 raise
81 except Exception, inst:
81 except Exception, inst:
82 if path:
82 if path:
83 ui.warn(_("*** failed to import extension %s from %s: %s\n")
83 ui.warn(_("*** failed to import extension %s from %s: %s\n")
84 % (name, path, inst))
84 % (name, path, inst))
85 else:
85 else:
86 ui.warn(_("*** failed to import extension %s: %s\n")
86 ui.warn(_("*** failed to import extension %s: %s\n")
87 % (name, inst))
87 % (name, inst))
88 if ui.traceback():
88 if ui.traceback():
89 return 1
89 return 1
90
90
91 for name in _order[newindex:]:
91 for name in _order[newindex:]:
92 uisetup = getattr(_extensions[name], 'uisetup', None)
92 uisetup = getattr(_extensions[name], 'uisetup', None)
93 if uisetup:
93 if uisetup:
94 uisetup(ui)
94 uisetup(ui)
95
95
96 for name in _order[newindex:]:
96 for name in _order[newindex:]:
97 extsetup = getattr(_extensions[name], 'extsetup', None)
97 extsetup = getattr(_extensions[name], 'extsetup', None)
98 if extsetup:
98 if extsetup:
99 try:
99 try:
100 extsetup(ui)
100 extsetup(ui)
101 except TypeError:
101 except TypeError:
102 if extsetup.func_code.co_argcount != 0:
102 if extsetup.func_code.co_argcount != 0:
103 raise
103 raise
104 extsetup() # old extsetup with no ui argument
104 extsetup() # old extsetup with no ui argument
105
105
106 def wrapcommand(table, command, wrapper):
106 def wrapcommand(table, command, wrapper):
107 aliases, entry = cmdutil.findcmd(command, table)
107 aliases, entry = cmdutil.findcmd(command, table)
108 for alias, e in table.iteritems():
108 for alias, e in table.iteritems():
109 if e is entry:
109 if e is entry:
110 key = alias
110 key = alias
111 break
111 break
112
112
113 origfn = entry[0]
113 origfn = entry[0]
114 def wrap(*args, **kwargs):
114 def wrap(*args, **kwargs):
115 return util.checksignature(wrapper)(
115 return util.checksignature(wrapper)(
116 util.checksignature(origfn), *args, **kwargs)
116 util.checksignature(origfn), *args, **kwargs)
117
117
118 wrap.__doc__ = getattr(origfn, '__doc__')
118 wrap.__doc__ = getattr(origfn, '__doc__')
119 wrap.__module__ = getattr(origfn, '__module__')
119 wrap.__module__ = getattr(origfn, '__module__')
120
120
121 newentry = list(entry)
121 newentry = list(entry)
122 newentry[0] = wrap
122 newentry[0] = wrap
123 table[key] = tuple(newentry)
123 table[key] = tuple(newentry)
124 return entry
124 return entry
125
125
126 def wrapfunction(container, funcname, wrapper):
126 def wrapfunction(container, funcname, wrapper):
127 def wrap(*args, **kwargs):
127 def wrap(*args, **kwargs):
128 return wrapper(origfn, *args, **kwargs)
128 return wrapper(origfn, *args, **kwargs)
129
129
130 origfn = getattr(container, funcname)
130 origfn = getattr(container, funcname)
131 setattr(container, funcname, wrap)
131 setattr(container, funcname, wrap)
132 return origfn
132 return origfn
133
133
134 def disabled():
134 def disabled():
135 '''find disabled extensions from hgext
135 '''find disabled extensions from hgext
136 returns a dict of {name: desc}, and the max name length'''
136 returns a dict of {name: desc}, and the max name length'''
137
137
138 import hgext
138 import hgext
139 extpath = os.path.dirname(os.path.abspath(hgext.__file__))
139 extpath = os.path.dirname(os.path.abspath(hgext.__file__))
140
140
141 try: # might not be a filesystem path
141 try: # might not be a filesystem path
142 files = os.listdir(extpath)
142 files = os.listdir(extpath)
143 except OSError:
143 except OSError:
144 return None, 0
144 return None, 0
145
145
146 exts = {}
146 exts = {}
147 maxlength = 0
147 maxlength = 0
148 for e in files:
148 for e in files:
149
149
150 if e.endswith('.py'):
150 if e.endswith('.py'):
151 name = e.rsplit('.', 1)[0]
151 name = e.rsplit('.', 1)[0]
152 path = os.path.join(extpath, e)
152 path = os.path.join(extpath, e)
153 else:
153 else:
154 name = e
154 name = e
155 path = os.path.join(extpath, e, '__init__.py')
155 path = os.path.join(extpath, e, '__init__.py')
156 if not os.path.exists(path):
156 if not os.path.exists(path):
157 continue
157 continue
158
158
159 if name in exts or name in _order or name == '__init__':
159 if name in exts or name in _order or name == '__init__':
160 continue
160 continue
161
161
162 try:
162 try:
163 file = open(path)
163 file = open(path)
164 except IOError:
164 except IOError:
165 continue
165 continue
166 else:
166 else:
167 doc = help.moduledoc(file)
167 doc = help.moduledoc(file)
168 file.close()
168 file.close()
169
169
170 if doc: # extracting localized synopsis
170 if doc: # extracting localized synopsis
171 exts[name] = gettext(doc).splitlines()[0]
171 exts[name] = gettext(doc).splitlines()[0]
172 else:
172 else:
173 exts[name] = _('(no help text available)')
173 exts[name] = _('(no help text available)')
174
174
175 if len(name) > maxlength:
175 if len(name) > maxlength:
176 maxlength = len(name)
176 maxlength = len(name)
177
177
178 return exts, maxlength
178 return exts, maxlength
179
179
180 def enabled():
180 def enabled():
181 '''return a dict of {name: desc} of extensions, and the max name length'''
181 '''return a dict of {name: desc} of extensions, and the max name length'''
182 exts = {}
182 exts = {}
183 maxlength = 0
183 maxlength = 0
184 exthelps = []
185 for ename, ext in extensions():
184 for ename, ext in extensions():
186 doc = (gettext(ext.__doc__) or _('(no help text available)'))
185 doc = (gettext(ext.__doc__) or _('(no help text available)'))
187 ename = ename.split('.')[-1]
186 ename = ename.split('.')[-1]
188 maxlength = max(len(ename), maxlength)
187 maxlength = max(len(ename), maxlength)
189 exts[ename] = doc.splitlines()[0].strip()
188 exts[ename] = doc.splitlines()[0].strip()
190
189
191 return exts, maxlength
190 return exts, maxlength
@@ -1,1378 +1,1378 b''
1 # revlog.py - storage back-end for mercurial
1 # revlog.py - storage back-end for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 """Storage back-end for Mercurial.
8 """Storage back-end for Mercurial.
9
9
10 This provides efficient delta storage with O(1) retrieve and append
10 This provides efficient delta storage with O(1) retrieve and append
11 and O(changes) merge between branches.
11 and O(changes) merge between branches.
12 """
12 """
13
13
14 # import stuff from node for others to import from revlog
14 # import stuff from node for others to import from revlog
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
15 from node import bin, hex, nullid, nullrev, short #@UnusedImport
16 from i18n import _
16 from i18n import _
17 import changegroup, ancestor, mdiff, parsers, error, util
17 import changegroup, ancestor, mdiff, parsers, error, util
18 import struct, zlib, errno
18 import struct, zlib, errno
19
19
20 _pack = struct.pack
20 _pack = struct.pack
21 _unpack = struct.unpack
21 _unpack = struct.unpack
22 _compress = zlib.compress
22 _compress = zlib.compress
23 _decompress = zlib.decompress
23 _decompress = zlib.decompress
24 _sha = util.sha1
24 _sha = util.sha1
25
25
26 # revlog flags
26 # revlog flags
27 REVLOGV0 = 0
27 REVLOGV0 = 0
28 REVLOGNG = 1
28 REVLOGNG = 1
29 REVLOGNGINLINEDATA = (1 << 16)
29 REVLOGNGINLINEDATA = (1 << 16)
30 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
30 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
31 REVLOG_DEFAULT_FORMAT = REVLOGNG
31 REVLOG_DEFAULT_FORMAT = REVLOGNG
32 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
32 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
33
33
34 _prereadsize = 1048576
34 _prereadsize = 1048576
35
35
36 RevlogError = error.RevlogError
36 RevlogError = error.RevlogError
37 LookupError = error.LookupError
37 LookupError = error.LookupError
38
38
39 def getoffset(q):
39 def getoffset(q):
40 return int(q >> 16)
40 return int(q >> 16)
41
41
42 def gettype(q):
42 def gettype(q):
43 return int(q & 0xFFFF)
43 return int(q & 0xFFFF)
44
44
45 def offset_type(offset, type):
45 def offset_type(offset, type):
46 return long(long(offset) << 16 | type)
46 return long(long(offset) << 16 | type)
47
47
48 nullhash = _sha(nullid)
48 nullhash = _sha(nullid)
49
49
50 def hash(text, p1, p2):
50 def hash(text, p1, p2):
51 """generate a hash from the given text and its parent hashes
51 """generate a hash from the given text and its parent hashes
52
52
53 This hash combines both the current file contents and its history
53 This hash combines both the current file contents and its history
54 in a manner that makes it easy to distinguish nodes with the same
54 in a manner that makes it easy to distinguish nodes with the same
55 content in the revision graph.
55 content in the revision graph.
56 """
56 """
57 # As of now, if one of the parent node is null, p2 is null
57 # As of now, if one of the parent node is null, p2 is null
58 if p2 == nullid:
58 if p2 == nullid:
59 # deep copy of a hash is faster than creating one
59 # deep copy of a hash is faster than creating one
60 s = nullhash.copy()
60 s = nullhash.copy()
61 s.update(p1)
61 s.update(p1)
62 else:
62 else:
63 # none of the parent nodes are nullid
63 # none of the parent nodes are nullid
64 l = [p1, p2]
64 l = [p1, p2]
65 l.sort()
65 l.sort()
66 s = _sha(l[0])
66 s = _sha(l[0])
67 s.update(l[1])
67 s.update(l[1])
68 s.update(text)
68 s.update(text)
69 return s.digest()
69 return s.digest()
70
70
71 def compress(text):
71 def compress(text):
72 """ generate a possibly-compressed representation of text """
72 """ generate a possibly-compressed representation of text """
73 if not text:
73 if not text:
74 return ("", text)
74 return ("", text)
75 l = len(text)
75 l = len(text)
76 bin = None
76 bin = None
77 if l < 44:
77 if l < 44:
78 pass
78 pass
79 elif l > 1000000:
79 elif l > 1000000:
80 # zlib makes an internal copy, thus doubling memory usage for
80 # zlib makes an internal copy, thus doubling memory usage for
81 # large files, so lets do this in pieces
81 # large files, so lets do this in pieces
82 z = zlib.compressobj()
82 z = zlib.compressobj()
83 p = []
83 p = []
84 pos = 0
84 pos = 0
85 while pos < l:
85 while pos < l:
86 pos2 = pos + 2**20
86 pos2 = pos + 2**20
87 p.append(z.compress(text[pos:pos2]))
87 p.append(z.compress(text[pos:pos2]))
88 pos = pos2
88 pos = pos2
89 p.append(z.flush())
89 p.append(z.flush())
90 if sum(map(len, p)) < l:
90 if sum(map(len, p)) < l:
91 bin = "".join(p)
91 bin = "".join(p)
92 else:
92 else:
93 bin = _compress(text)
93 bin = _compress(text)
94 if bin is None or len(bin) > l:
94 if bin is None or len(bin) > l:
95 if text[0] == '\0':
95 if text[0] == '\0':
96 return ("", text)
96 return ("", text)
97 return ('u', text)
97 return ('u', text)
98 return ("", bin)
98 return ("", bin)
99
99
100 def decompress(bin):
100 def decompress(bin):
101 """ decompress the given input """
101 """ decompress the given input """
102 if not bin:
102 if not bin:
103 return bin
103 return bin
104 t = bin[0]
104 t = bin[0]
105 if t == '\0':
105 if t == '\0':
106 return bin
106 return bin
107 if t == 'x':
107 if t == 'x':
108 return _decompress(bin)
108 return _decompress(bin)
109 if t == 'u':
109 if t == 'u':
110 return bin[1:]
110 return bin[1:]
111 raise RevlogError(_("unknown compression type %r") % t)
111 raise RevlogError(_("unknown compression type %r") % t)
112
112
113 class lazyparser(object):
113 class lazyparser(object):
114 """
114 """
115 this class avoids the need to parse the entirety of large indices
115 this class avoids the need to parse the entirety of large indices
116 """
116 """
117
117
118 # lazyparser is not safe to use on windows if win32 extensions not
118 # lazyparser is not safe to use on windows if win32 extensions not
119 # available. it keeps file handle open, which make it not possible
119 # available. it keeps file handle open, which make it not possible
120 # to break hardlinks on local cloned repos.
120 # to break hardlinks on local cloned repos.
121
121
122 def __init__(self, dataf):
122 def __init__(self, dataf):
123 try:
123 try:
124 size = util.fstat(dataf).st_size
124 size = util.fstat(dataf).st_size
125 except AttributeError:
125 except AttributeError:
126 size = 0
126 size = 0
127 self.dataf = dataf
127 self.dataf = dataf
128 self.s = struct.calcsize(indexformatng)
128 self.s = struct.calcsize(indexformatng)
129 self.datasize = size
129 self.datasize = size
130 self.l = size/self.s
130 self.l = size/self.s
131 self.index = [None] * self.l
131 self.index = [None] * self.l
132 self.map = {nullid: nullrev}
132 self.map = {nullid: nullrev}
133 self.allmap = 0
133 self.allmap = 0
134 self.all = 0
134 self.all = 0
135 self.mapfind_count = 0
135 self.mapfind_count = 0
136
136
137 def loadmap(self):
137 def loadmap(self):
138 """
138 """
139 during a commit, we need to make sure the rev being added is
139 during a commit, we need to make sure the rev being added is
140 not a duplicate. This requires loading the entire index,
140 not a duplicate. This requires loading the entire index,
141 which is fairly slow. loadmap can load up just the node map,
141 which is fairly slow. loadmap can load up just the node map,
142 which takes much less time.
142 which takes much less time.
143 """
143 """
144 if self.allmap:
144 if self.allmap:
145 return
145 return
146 end = self.datasize
146 end = self.datasize
147 self.allmap = 1
147 self.allmap = 1
148 cur = 0
148 cur = 0
149 count = 0
149 count = 0
150 blocksize = self.s * 256
150 blocksize = self.s * 256
151 self.dataf.seek(0)
151 self.dataf.seek(0)
152 while cur < end:
152 while cur < end:
153 data = self.dataf.read(blocksize)
153 data = self.dataf.read(blocksize)
154 off = 0
154 off = 0
155 for x in xrange(256):
155 for x in xrange(256):
156 n = data[off + ngshaoffset:off + ngshaoffset + 20]
156 n = data[off + ngshaoffset:off + ngshaoffset + 20]
157 self.map[n] = count
157 self.map[n] = count
158 count += 1
158 count += 1
159 if count >= self.l:
159 if count >= self.l:
160 break
160 break
161 off += self.s
161 off += self.s
162 cur += blocksize
162 cur += blocksize
163
163
164 def loadblock(self, blockstart, blocksize, data=None):
164 def loadblock(self, blockstart, blocksize, data=None):
165 if self.all:
165 if self.all:
166 return
166 return
167 if data is None:
167 if data is None:
168 self.dataf.seek(blockstart)
168 self.dataf.seek(blockstart)
169 if blockstart + blocksize > self.datasize:
169 if blockstart + blocksize > self.datasize:
170 # the revlog may have grown since we've started running,
170 # the revlog may have grown since we've started running,
171 # but we don't have space in self.index for more entries.
171 # but we don't have space in self.index for more entries.
172 # limit blocksize so that we don't get too much data.
172 # limit blocksize so that we don't get too much data.
173 blocksize = max(self.datasize - blockstart, 0)
173 blocksize = max(self.datasize - blockstart, 0)
174 data = self.dataf.read(blocksize)
174 data = self.dataf.read(blocksize)
175 lend = len(data) / self.s
175 lend = len(data) / self.s
176 i = blockstart / self.s
176 i = blockstart / self.s
177 off = 0
177 off = 0
178 # lazyindex supports __delitem__
178 # lazyindex supports __delitem__
179 if lend > len(self.index) - i:
179 if lend > len(self.index) - i:
180 lend = len(self.index) - i
180 lend = len(self.index) - i
181 for x in xrange(lend):
181 for x in xrange(lend):
182 if self.index[i + x] is None:
182 if self.index[i + x] is None:
183 b = data[off : off + self.s]
183 b = data[off : off + self.s]
184 self.index[i + x] = b
184 self.index[i + x] = b
185 n = b[ngshaoffset:ngshaoffset + 20]
185 n = b[ngshaoffset:ngshaoffset + 20]
186 self.map[n] = i + x
186 self.map[n] = i + x
187 off += self.s
187 off += self.s
188
188
189 def findnode(self, node):
189 def findnode(self, node):
190 """search backwards through the index file for a specific node"""
190 """search backwards through the index file for a specific node"""
191 if self.allmap:
191 if self.allmap:
192 return None
192 return None
193
193
194 # hg log will cause many many searches for the manifest
194 # hg log will cause many many searches for the manifest
195 # nodes. After we get called a few times, just load the whole
195 # nodes. After we get called a few times, just load the whole
196 # thing.
196 # thing.
197 if self.mapfind_count > 8:
197 if self.mapfind_count > 8:
198 self.loadmap()
198 self.loadmap()
199 if node in self.map:
199 if node in self.map:
200 return node
200 return node
201 return None
201 return None
202 self.mapfind_count += 1
202 self.mapfind_count += 1
203 last = self.l - 1
203 last = self.l - 1
204 while self.index[last] != None:
204 while self.index[last] != None:
205 if last == 0:
205 if last == 0:
206 self.all = 1
206 self.all = 1
207 self.allmap = 1
207 self.allmap = 1
208 return None
208 return None
209 last -= 1
209 last -= 1
210 end = (last + 1) * self.s
210 end = (last + 1) * self.s
211 blocksize = self.s * 256
211 blocksize = self.s * 256
212 while end >= 0:
212 while end >= 0:
213 start = max(end - blocksize, 0)
213 start = max(end - blocksize, 0)
214 self.dataf.seek(start)
214 self.dataf.seek(start)
215 data = self.dataf.read(end - start)
215 data = self.dataf.read(end - start)
216 findend = end - start
216 findend = end - start
217 while True:
217 while True:
218 # we're searching backwards, so we have to make sure
218 # we're searching backwards, so we have to make sure
219 # we don't find a changeset where this node is a parent
219 # we don't find a changeset where this node is a parent
220 off = data.find(node, 0, findend)
220 off = data.find(node, 0, findend)
221 findend = off
221 findend = off
222 if off >= 0:
222 if off >= 0:
223 i = off / self.s
223 i = off / self.s
224 off = i * self.s
224 off = i * self.s
225 n = data[off + ngshaoffset:off + ngshaoffset + 20]
225 n = data[off + ngshaoffset:off + ngshaoffset + 20]
226 if n == node:
226 if n == node:
227 self.map[n] = i + start / self.s
227 self.map[n] = i + start / self.s
228 return node
228 return node
229 else:
229 else:
230 break
230 break
231 end -= blocksize
231 end -= blocksize
232 return None
232 return None
233
233
234 def loadindex(self, i=None, end=None):
234 def loadindex(self, i=None, end=None):
235 if self.all:
235 if self.all:
236 return
236 return
237 all = False
237 all = False
238 if i is None:
238 if i is None:
239 blockstart = 0
239 blockstart = 0
240 blocksize = (65536 / self.s) * self.s
240 blocksize = (65536 / self.s) * self.s
241 end = self.datasize
241 end = self.datasize
242 all = True
242 all = True
243 else:
243 else:
244 if end:
244 if end:
245 blockstart = i * self.s
245 blockstart = i * self.s
246 end = end * self.s
246 end = end * self.s
247 blocksize = end - blockstart
247 blocksize = end - blockstart
248 else:
248 else:
249 blockstart = (i & ~1023) * self.s
249 blockstart = (i & ~1023) * self.s
250 blocksize = self.s * 1024
250 blocksize = self.s * 1024
251 end = blockstart + blocksize
251 end = blockstart + blocksize
252 while blockstart < end:
252 while blockstart < end:
253 self.loadblock(blockstart, blocksize)
253 self.loadblock(blockstart, blocksize)
254 blockstart += blocksize
254 blockstart += blocksize
255 if all:
255 if all:
256 self.all = True
256 self.all = True
257
257
258 class lazyindex(object):
258 class lazyindex(object):
259 """a lazy version of the index array"""
259 """a lazy version of the index array"""
260 def __init__(self, parser):
260 def __init__(self, parser):
261 self.p = parser
261 self.p = parser
262 def __len__(self):
262 def __len__(self):
263 return len(self.p.index)
263 return len(self.p.index)
264 def load(self, pos):
264 def load(self, pos):
265 if pos < 0:
265 if pos < 0:
266 pos += len(self.p.index)
266 pos += len(self.p.index)
267 self.p.loadindex(pos)
267 self.p.loadindex(pos)
268 return self.p.index[pos]
268 return self.p.index[pos]
269 def __getitem__(self, pos):
269 def __getitem__(self, pos):
270 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
270 return _unpack(indexformatng, self.p.index[pos] or self.load(pos))
271 def __setitem__(self, pos, item):
271 def __setitem__(self, pos, item):
272 self.p.index[pos] = _pack(indexformatng, *item)
272 self.p.index[pos] = _pack(indexformatng, *item)
273 def __delitem__(self, pos):
273 def __delitem__(self, pos):
274 del self.p.index[pos]
274 del self.p.index[pos]
275 def insert(self, pos, e):
275 def insert(self, pos, e):
276 self.p.index.insert(pos, _pack(indexformatng, *e))
276 self.p.index.insert(pos, _pack(indexformatng, *e))
277 def append(self, e):
277 def append(self, e):
278 self.p.index.append(_pack(indexformatng, *e))
278 self.p.index.append(_pack(indexformatng, *e))
279
279
280 class lazymap(object):
280 class lazymap(object):
281 """a lazy version of the node map"""
281 """a lazy version of the node map"""
282 def __init__(self, parser):
282 def __init__(self, parser):
283 self.p = parser
283 self.p = parser
284 def load(self, key):
284 def load(self, key):
285 n = self.p.findnode(key)
285 n = self.p.findnode(key)
286 if n is None:
286 if n is None:
287 raise KeyError(key)
287 raise KeyError(key)
288 def __contains__(self, key):
288 def __contains__(self, key):
289 if key in self.p.map:
289 if key in self.p.map:
290 return True
290 return True
291 self.p.loadmap()
291 self.p.loadmap()
292 return key in self.p.map
292 return key in self.p.map
293 def __iter__(self):
293 def __iter__(self):
294 yield nullid
294 yield nullid
295 for i in xrange(self.p.l):
295 for i in xrange(self.p.l):
296 ret = self.p.index[i]
296 ret = self.p.index[i]
297 if not ret:
297 if not ret:
298 self.p.loadindex(i)
298 self.p.loadindex(i)
299 ret = self.p.index[i]
299 ret = self.p.index[i]
300 if isinstance(ret, str):
300 if isinstance(ret, str):
301 ret = _unpack(indexformatng, ret)
301 ret = _unpack(indexformatng, ret)
302 yield ret[7]
302 yield ret[7]
303 def __getitem__(self, key):
303 def __getitem__(self, key):
304 try:
304 try:
305 return self.p.map[key]
305 return self.p.map[key]
306 except KeyError:
306 except KeyError:
307 try:
307 try:
308 self.load(key)
308 self.load(key)
309 return self.p.map[key]
309 return self.p.map[key]
310 except KeyError:
310 except KeyError:
311 raise KeyError("node " + hex(key))
311 raise KeyError("node " + hex(key))
312 def __setitem__(self, key, val):
312 def __setitem__(self, key, val):
313 self.p.map[key] = val
313 self.p.map[key] = val
314 def __delitem__(self, key):
314 def __delitem__(self, key):
315 del self.p.map[key]
315 del self.p.map[key]
316
316
317 indexformatv0 = ">4l20s20s20s"
317 indexformatv0 = ">4l20s20s20s"
318 v0shaoffset = 56
318 v0shaoffset = 56
319
319
320 class revlogoldio(object):
320 class revlogoldio(object):
321 def __init__(self):
321 def __init__(self):
322 self.size = struct.calcsize(indexformatv0)
322 self.size = struct.calcsize(indexformatv0)
323
323
324 def parseindex(self, fp, data, inline):
324 def parseindex(self, fp, data, inline):
325 s = self.size
325 s = self.size
326 index = []
326 index = []
327 nodemap = {nullid: nullrev}
327 nodemap = {nullid: nullrev}
328 n = off = 0
328 n = off = 0
329 if len(data) == _prereadsize:
329 if len(data) == _prereadsize:
330 data += fp.read() # read the rest
330 data += fp.read() # read the rest
331 l = len(data)
331 l = len(data)
332 while off + s <= l:
332 while off + s <= l:
333 cur = data[off:off + s]
333 cur = data[off:off + s]
334 off += s
334 off += s
335 e = _unpack(indexformatv0, cur)
335 e = _unpack(indexformatv0, cur)
336 # transform to revlogv1 format
336 # transform to revlogv1 format
337 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
337 e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
338 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
338 nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
339 index.append(e2)
339 index.append(e2)
340 nodemap[e[6]] = n
340 nodemap[e[6]] = n
341 n += 1
341 n += 1
342
342
343 return index, nodemap, None
343 return index, nodemap, None
344
344
345 def packentry(self, entry, node, version, rev):
345 def packentry(self, entry, node, version, rev):
346 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
346 e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
347 node(entry[5]), node(entry[6]), entry[7])
347 node(entry[5]), node(entry[6]), entry[7])
348 return _pack(indexformatv0, *e2)
348 return _pack(indexformatv0, *e2)
349
349
350 # index ng:
350 # index ng:
351 # 6 bytes offset
351 # 6 bytes offset
352 # 2 bytes flags
352 # 2 bytes flags
353 # 4 bytes compressed length
353 # 4 bytes compressed length
354 # 4 bytes uncompressed length
354 # 4 bytes uncompressed length
355 # 4 bytes: base rev
355 # 4 bytes: base rev
356 # 4 bytes link rev
356 # 4 bytes link rev
357 # 4 bytes parent 1 rev
357 # 4 bytes parent 1 rev
358 # 4 bytes parent 2 rev
358 # 4 bytes parent 2 rev
359 # 32 bytes: nodeid
359 # 32 bytes: nodeid
360 indexformatng = ">Qiiiiii20s12x"
360 indexformatng = ">Qiiiiii20s12x"
361 ngshaoffset = 32
361 ngshaoffset = 32
362 versionformat = ">I"
362 versionformat = ">I"
363
363
364 class revlogio(object):
364 class revlogio(object):
365 def __init__(self):
365 def __init__(self):
366 self.size = struct.calcsize(indexformatng)
366 self.size = struct.calcsize(indexformatng)
367
367
368 def parseindex(self, fp, data, inline):
368 def parseindex(self, fp, data, inline):
369 if len(data) == _prereadsize:
369 if len(data) == _prereadsize:
370 if util.openhardlinks() and not inline:
370 if util.openhardlinks() and not inline:
371 # big index, let's parse it on demand
371 # big index, let's parse it on demand
372 parser = lazyparser(fp)
372 parser = lazyparser(fp)
373 index = lazyindex(parser)
373 index = lazyindex(parser)
374 nodemap = lazymap(parser)
374 nodemap = lazymap(parser)
375 e = list(index[0])
375 e = list(index[0])
376 type = gettype(e[0])
376 type = gettype(e[0])
377 e[0] = offset_type(0, type)
377 e[0] = offset_type(0, type)
378 index[0] = e
378 index[0] = e
379 return index, nodemap, None
379 return index, nodemap, None
380 else:
380 else:
381 data += fp.read()
381 data += fp.read()
382
382
383 # call the C implementation to parse the index data
383 # call the C implementation to parse the index data
384 index, nodemap, cache = parsers.parse_index(data, inline)
384 index, nodemap, cache = parsers.parse_index(data, inline)
385 return index, nodemap, cache
385 return index, nodemap, cache
386
386
387 def packentry(self, entry, node, version, rev):
387 def packentry(self, entry, node, version, rev):
388 p = _pack(indexformatng, *entry)
388 p = _pack(indexformatng, *entry)
389 if rev == 0:
389 if rev == 0:
390 p = _pack(versionformat, version) + p[4:]
390 p = _pack(versionformat, version) + p[4:]
391 return p
391 return p
392
392
393 class revlog(object):
393 class revlog(object):
394 """
394 """
395 the underlying revision storage object
395 the underlying revision storage object
396
396
397 A revlog consists of two parts, an index and the revision data.
397 A revlog consists of two parts, an index and the revision data.
398
398
399 The index is a file with a fixed record size containing
399 The index is a file with a fixed record size containing
400 information on each revision, including its nodeid (hash), the
400 information on each revision, including its nodeid (hash), the
401 nodeids of its parents, the position and offset of its data within
401 nodeids of its parents, the position and offset of its data within
402 the data file, and the revision it's based on. Finally, each entry
402 the data file, and the revision it's based on. Finally, each entry
403 contains a linkrev entry that can serve as a pointer to external
403 contains a linkrev entry that can serve as a pointer to external
404 data.
404 data.
405
405
406 The revision data itself is a linear collection of data chunks.
406 The revision data itself is a linear collection of data chunks.
407 Each chunk represents a revision and is usually represented as a
407 Each chunk represents a revision and is usually represented as a
408 delta against the previous chunk. To bound lookup time, runs of
408 delta against the previous chunk. To bound lookup time, runs of
409 deltas are limited to about 2 times the length of the original
409 deltas are limited to about 2 times the length of the original
410 version data. This makes retrieval of a version proportional to
410 version data. This makes retrieval of a version proportional to
411 its size, or O(1) relative to the number of revisions.
411 its size, or O(1) relative to the number of revisions.
412
412
413 Both pieces of the revlog are written to in an append-only
413 Both pieces of the revlog are written to in an append-only
414 fashion, which means we never need to rewrite a file to insert or
414 fashion, which means we never need to rewrite a file to insert or
415 remove data, and can use some simple techniques to avoid the need
415 remove data, and can use some simple techniques to avoid the need
416 for locking while reading.
416 for locking while reading.
417 """
417 """
418 def __init__(self, opener, indexfile):
418 def __init__(self, opener, indexfile):
419 """
419 """
420 create a revlog object
420 create a revlog object
421
421
422 opener is a function that abstracts the file opening operation
422 opener is a function that abstracts the file opening operation
423 and can be used to implement COW semantics or the like.
423 and can be used to implement COW semantics or the like.
424 """
424 """
425 self.indexfile = indexfile
425 self.indexfile = indexfile
426 self.datafile = indexfile[:-2] + ".d"
426 self.datafile = indexfile[:-2] + ".d"
427 self.opener = opener
427 self.opener = opener
428 self._cache = None
428 self._cache = None
429 self._chunkcache = (0, '')
429 self._chunkcache = (0, '')
430 self.nodemap = {nullid: nullrev}
430 self.nodemap = {nullid: nullrev}
431 self.index = []
431 self.index = []
432
432
433 v = REVLOG_DEFAULT_VERSION
433 v = REVLOG_DEFAULT_VERSION
434 if hasattr(opener, "defversion"):
434 if hasattr(opener, "defversion"):
435 v = opener.defversion
435 v = opener.defversion
436 if v & REVLOGNG:
436 if v & REVLOGNG:
437 v |= REVLOGNGINLINEDATA
437 v |= REVLOGNGINLINEDATA
438
438
439 i = ''
439 i = ''
440 try:
440 try:
441 f = self.opener(self.indexfile)
441 f = self.opener(self.indexfile)
442 i = f.read(_prereadsize)
442 i = f.read(_prereadsize)
443 if len(i) > 0:
443 if len(i) > 0:
444 v = struct.unpack(versionformat, i[:4])[0]
444 v = struct.unpack(versionformat, i[:4])[0]
445 except IOError, inst:
445 except IOError, inst:
446 if inst.errno != errno.ENOENT:
446 if inst.errno != errno.ENOENT:
447 raise
447 raise
448
448
449 self.version = v
449 self.version = v
450 self._inline = v & REVLOGNGINLINEDATA
450 self._inline = v & REVLOGNGINLINEDATA
451 flags = v & ~0xFFFF
451 flags = v & ~0xFFFF
452 fmt = v & 0xFFFF
452 fmt = v & 0xFFFF
453 if fmt == REVLOGV0 and flags:
453 if fmt == REVLOGV0 and flags:
454 raise RevlogError(_("index %s unknown flags %#04x for format v0")
454 raise RevlogError(_("index %s unknown flags %#04x for format v0")
455 % (self.indexfile, flags >> 16))
455 % (self.indexfile, flags >> 16))
456 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
456 elif fmt == REVLOGNG and flags & ~REVLOGNGINLINEDATA:
457 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
457 raise RevlogError(_("index %s unknown flags %#04x for revlogng")
458 % (self.indexfile, flags >> 16))
458 % (self.indexfile, flags >> 16))
459 elif fmt > REVLOGNG:
459 elif fmt > REVLOGNG:
460 raise RevlogError(_("index %s unknown format %d")
460 raise RevlogError(_("index %s unknown format %d")
461 % (self.indexfile, fmt))
461 % (self.indexfile, fmt))
462
462
463 self._io = revlogio()
463 self._io = revlogio()
464 if self.version == REVLOGV0:
464 if self.version == REVLOGV0:
465 self._io = revlogoldio()
465 self._io = revlogoldio()
466 if i:
466 if i:
467 try:
467 try:
468 d = self._io.parseindex(f, i, self._inline)
468 d = self._io.parseindex(f, i, self._inline)
469 except (ValueError, IndexError), e:
469 except (ValueError, IndexError):
470 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
470 raise RevlogError(_("index %s is corrupted") % (self.indexfile))
471 self.index, self.nodemap, self._chunkcache = d
471 self.index, self.nodemap, self._chunkcache = d
472 if not self._chunkcache:
472 if not self._chunkcache:
473 self._chunkclear()
473 self._chunkclear()
474
474
475 # add the magic null revision at -1 (if it hasn't been done already)
475 # add the magic null revision at -1 (if it hasn't been done already)
476 if (self.index == [] or isinstance(self.index, lazyindex) or
476 if (self.index == [] or isinstance(self.index, lazyindex) or
477 self.index[-1][7] != nullid) :
477 self.index[-1][7] != nullid) :
478 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
478 self.index.append((0, 0, 0, -1, -1, -1, -1, nullid))
479
479
480 def _loadindex(self, start, end):
480 def _loadindex(self, start, end):
481 """load a block of indexes all at once from the lazy parser"""
481 """load a block of indexes all at once from the lazy parser"""
482 if isinstance(self.index, lazyindex):
482 if isinstance(self.index, lazyindex):
483 self.index.p.loadindex(start, end)
483 self.index.p.loadindex(start, end)
484
484
485 def _loadindexmap(self):
485 def _loadindexmap(self):
486 """loads both the map and the index from the lazy parser"""
486 """loads both the map and the index from the lazy parser"""
487 if isinstance(self.index, lazyindex):
487 if isinstance(self.index, lazyindex):
488 p = self.index.p
488 p = self.index.p
489 p.loadindex()
489 p.loadindex()
490 self.nodemap = p.map
490 self.nodemap = p.map
491
491
492 def _loadmap(self):
492 def _loadmap(self):
493 """loads the map from the lazy parser"""
493 """loads the map from the lazy parser"""
494 if isinstance(self.nodemap, lazymap):
494 if isinstance(self.nodemap, lazymap):
495 self.nodemap.p.loadmap()
495 self.nodemap.p.loadmap()
496 self.nodemap = self.nodemap.p.map
496 self.nodemap = self.nodemap.p.map
497
497
498 def tip(self):
498 def tip(self):
499 return self.node(len(self.index) - 2)
499 return self.node(len(self.index) - 2)
500 def __len__(self):
500 def __len__(self):
501 return len(self.index) - 1
501 return len(self.index) - 1
502 def __iter__(self):
502 def __iter__(self):
503 for i in xrange(len(self)):
503 for i in xrange(len(self)):
504 yield i
504 yield i
505 def rev(self, node):
505 def rev(self, node):
506 try:
506 try:
507 return self.nodemap[node]
507 return self.nodemap[node]
508 except KeyError:
508 except KeyError:
509 raise LookupError(node, self.indexfile, _('no node'))
509 raise LookupError(node, self.indexfile, _('no node'))
510 def node(self, rev):
510 def node(self, rev):
511 return self.index[rev][7]
511 return self.index[rev][7]
512 def linkrev(self, rev):
512 def linkrev(self, rev):
513 return self.index[rev][4]
513 return self.index[rev][4]
514 def parents(self, node):
514 def parents(self, node):
515 i = self.index
515 i = self.index
516 d = i[self.rev(node)]
516 d = i[self.rev(node)]
517 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
517 return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
518 def parentrevs(self, rev):
518 def parentrevs(self, rev):
519 return self.index[rev][5:7]
519 return self.index[rev][5:7]
520 def start(self, rev):
520 def start(self, rev):
521 return int(self.index[rev][0] >> 16)
521 return int(self.index[rev][0] >> 16)
522 def end(self, rev):
522 def end(self, rev):
523 return self.start(rev) + self.length(rev)
523 return self.start(rev) + self.length(rev)
524 def length(self, rev):
524 def length(self, rev):
525 return self.index[rev][1]
525 return self.index[rev][1]
526 def base(self, rev):
526 def base(self, rev):
527 return self.index[rev][3]
527 return self.index[rev][3]
528
528
529 def size(self, rev):
529 def size(self, rev):
530 """return the length of the uncompressed text for a given revision"""
530 """return the length of the uncompressed text for a given revision"""
531 l = self.index[rev][2]
531 l = self.index[rev][2]
532 if l >= 0:
532 if l >= 0:
533 return l
533 return l
534
534
535 t = self.revision(self.node(rev))
535 t = self.revision(self.node(rev))
536 return len(t)
536 return len(t)
537
537
538 # Alternate implementation. The advantage to this code is it
538 # Alternate implementation. The advantage to this code is it
539 # will be faster for a single revision. However, the results
539 # will be faster for a single revision. However, the results
540 # are not cached, so finding the size of every revision will
540 # are not cached, so finding the size of every revision will
541 # be slower.
541 # be slower.
542 #
542 #
543 # if self.cache and self.cache[1] == rev:
543 # if self.cache and self.cache[1] == rev:
544 # return len(self.cache[2])
544 # return len(self.cache[2])
545 #
545 #
546 # base = self.base(rev)
546 # base = self.base(rev)
547 # if self.cache and self.cache[1] >= base and self.cache[1] < rev:
547 # if self.cache and self.cache[1] >= base and self.cache[1] < rev:
548 # base = self.cache[1]
548 # base = self.cache[1]
549 # text = self.cache[2]
549 # text = self.cache[2]
550 # else:
550 # else:
551 # text = self.revision(self.node(base))
551 # text = self.revision(self.node(base))
552 #
552 #
553 # l = len(text)
553 # l = len(text)
554 # for x in xrange(base + 1, rev + 1):
554 # for x in xrange(base + 1, rev + 1):
555 # l = mdiff.patchedsize(l, self._chunk(x))
555 # l = mdiff.patchedsize(l, self._chunk(x))
556 # return l
556 # return l
557
557
558 def reachable(self, node, stop=None):
558 def reachable(self, node, stop=None):
559 """return the set of all nodes ancestral to a given node, including
559 """return the set of all nodes ancestral to a given node, including
560 the node itself, stopping when stop is matched"""
560 the node itself, stopping when stop is matched"""
561 reachable = set((node,))
561 reachable = set((node,))
562 visit = [node]
562 visit = [node]
563 if stop:
563 if stop:
564 stopn = self.rev(stop)
564 stopn = self.rev(stop)
565 else:
565 else:
566 stopn = 0
566 stopn = 0
567 while visit:
567 while visit:
568 n = visit.pop(0)
568 n = visit.pop(0)
569 if n == stop:
569 if n == stop:
570 continue
570 continue
571 if n == nullid:
571 if n == nullid:
572 continue
572 continue
573 for p in self.parents(n):
573 for p in self.parents(n):
574 if self.rev(p) < stopn:
574 if self.rev(p) < stopn:
575 continue
575 continue
576 if p not in reachable:
576 if p not in reachable:
577 reachable.add(p)
577 reachable.add(p)
578 visit.append(p)
578 visit.append(p)
579 return reachable
579 return reachable
580
580
581 def ancestors(self, *revs):
581 def ancestors(self, *revs):
582 'Generate the ancestors of revs using a breadth-first visit'
582 'Generate the ancestors of revs using a breadth-first visit'
583 visit = list(revs)
583 visit = list(revs)
584 seen = set([nullrev])
584 seen = set([nullrev])
585 while visit:
585 while visit:
586 for parent in self.parentrevs(visit.pop(0)):
586 for parent in self.parentrevs(visit.pop(0)):
587 if parent not in seen:
587 if parent not in seen:
588 visit.append(parent)
588 visit.append(parent)
589 seen.add(parent)
589 seen.add(parent)
590 yield parent
590 yield parent
591
591
592 def descendants(self, *revs):
592 def descendants(self, *revs):
593 'Generate the descendants of revs in topological order'
593 'Generate the descendants of revs in topological order'
594 seen = set(revs)
594 seen = set(revs)
595 for i in xrange(min(revs) + 1, len(self)):
595 for i in xrange(min(revs) + 1, len(self)):
596 for x in self.parentrevs(i):
596 for x in self.parentrevs(i):
597 if x != nullrev and x in seen:
597 if x != nullrev and x in seen:
598 seen.add(i)
598 seen.add(i)
599 yield i
599 yield i
600 break
600 break
601
601
602 def findmissing(self, common=None, heads=None):
602 def findmissing(self, common=None, heads=None):
603 '''
603 '''
604 returns the topologically sorted list of nodes from the set:
604 returns the topologically sorted list of nodes from the set:
605 missing = (ancestors(heads) \ ancestors(common))
605 missing = (ancestors(heads) \ ancestors(common))
606
606
607 where ancestors() is the set of ancestors from heads, heads included
607 where ancestors() is the set of ancestors from heads, heads included
608
608
609 if heads is None, the heads of the revlog are used
609 if heads is None, the heads of the revlog are used
610 if common is None, nullid is assumed to be a common node
610 if common is None, nullid is assumed to be a common node
611 '''
611 '''
612 if common is None:
612 if common is None:
613 common = [nullid]
613 common = [nullid]
614 if heads is None:
614 if heads is None:
615 heads = self.heads()
615 heads = self.heads()
616
616
617 common = [self.rev(n) for n in common]
617 common = [self.rev(n) for n in common]
618 heads = [self.rev(n) for n in heads]
618 heads = [self.rev(n) for n in heads]
619
619
620 # we want the ancestors, but inclusive
620 # we want the ancestors, but inclusive
621 has = set(self.ancestors(*common))
621 has = set(self.ancestors(*common))
622 has.add(nullrev)
622 has.add(nullrev)
623 has.update(common)
623 has.update(common)
624
624
625 # take all ancestors from heads that aren't in has
625 # take all ancestors from heads that aren't in has
626 missing = set()
626 missing = set()
627 visit = [r for r in heads if r not in has]
627 visit = [r for r in heads if r not in has]
628 while visit:
628 while visit:
629 r = visit.pop(0)
629 r = visit.pop(0)
630 if r in missing:
630 if r in missing:
631 continue
631 continue
632 else:
632 else:
633 missing.add(r)
633 missing.add(r)
634 for p in self.parentrevs(r):
634 for p in self.parentrevs(r):
635 if p not in has:
635 if p not in has:
636 visit.append(p)
636 visit.append(p)
637 missing = list(missing)
637 missing = list(missing)
638 missing.sort()
638 missing.sort()
639 return [self.node(r) for r in missing]
639 return [self.node(r) for r in missing]
640
640
641 def nodesbetween(self, roots=None, heads=None):
641 def nodesbetween(self, roots=None, heads=None):
642 """Return a tuple containing three elements. Elements 1 and 2 contain
642 """Return a tuple containing three elements. Elements 1 and 2 contain
643 a final list bases and heads after all the unreachable ones have been
643 a final list bases and heads after all the unreachable ones have been
644 pruned. Element 0 contains a topologically sorted list of all
644 pruned. Element 0 contains a topologically sorted list of all
645
645
646 nodes that satisfy these constraints:
646 nodes that satisfy these constraints:
647 1. All nodes must be descended from a node in roots (the nodes on
647 1. All nodes must be descended from a node in roots (the nodes on
648 roots are considered descended from themselves).
648 roots are considered descended from themselves).
649 2. All nodes must also be ancestors of a node in heads (the nodes in
649 2. All nodes must also be ancestors of a node in heads (the nodes in
650 heads are considered to be their own ancestors).
650 heads are considered to be their own ancestors).
651
651
652 If roots is unspecified, nullid is assumed as the only root.
652 If roots is unspecified, nullid is assumed as the only root.
653 If heads is unspecified, it is taken to be the output of the
653 If heads is unspecified, it is taken to be the output of the
654 heads method (i.e. a list of all nodes in the repository that
654 heads method (i.e. a list of all nodes in the repository that
655 have no children)."""
655 have no children)."""
656 nonodes = ([], [], [])
656 nonodes = ([], [], [])
657 if roots is not None:
657 if roots is not None:
658 roots = list(roots)
658 roots = list(roots)
659 if not roots:
659 if not roots:
660 return nonodes
660 return nonodes
661 lowestrev = min([self.rev(n) for n in roots])
661 lowestrev = min([self.rev(n) for n in roots])
662 else:
662 else:
663 roots = [nullid] # Everybody's a descendent of nullid
663 roots = [nullid] # Everybody's a descendent of nullid
664 lowestrev = nullrev
664 lowestrev = nullrev
665 if (lowestrev == nullrev) and (heads is None):
665 if (lowestrev == nullrev) and (heads is None):
666 # We want _all_ the nodes!
666 # We want _all_ the nodes!
667 return ([self.node(r) for r in self], [nullid], list(self.heads()))
667 return ([self.node(r) for r in self], [nullid], list(self.heads()))
668 if heads is None:
668 if heads is None:
669 # All nodes are ancestors, so the latest ancestor is the last
669 # All nodes are ancestors, so the latest ancestor is the last
670 # node.
670 # node.
671 highestrev = len(self) - 1
671 highestrev = len(self) - 1
672 # Set ancestors to None to signal that every node is an ancestor.
672 # Set ancestors to None to signal that every node is an ancestor.
673 ancestors = None
673 ancestors = None
674 # Set heads to an empty dictionary for later discovery of heads
674 # Set heads to an empty dictionary for later discovery of heads
675 heads = {}
675 heads = {}
676 else:
676 else:
677 heads = list(heads)
677 heads = list(heads)
678 if not heads:
678 if not heads:
679 return nonodes
679 return nonodes
680 ancestors = set()
680 ancestors = set()
681 # Turn heads into a dictionary so we can remove 'fake' heads.
681 # Turn heads into a dictionary so we can remove 'fake' heads.
682 # Also, later we will be using it to filter out the heads we can't
682 # Also, later we will be using it to filter out the heads we can't
683 # find from roots.
683 # find from roots.
684 heads = dict.fromkeys(heads, 0)
684 heads = dict.fromkeys(heads, 0)
685 # Start at the top and keep marking parents until we're done.
685 # Start at the top and keep marking parents until we're done.
686 nodestotag = set(heads)
686 nodestotag = set(heads)
687 # Remember where the top was so we can use it as a limit later.
687 # Remember where the top was so we can use it as a limit later.
688 highestrev = max([self.rev(n) for n in nodestotag])
688 highestrev = max([self.rev(n) for n in nodestotag])
689 while nodestotag:
689 while nodestotag:
690 # grab a node to tag
690 # grab a node to tag
691 n = nodestotag.pop()
691 n = nodestotag.pop()
692 # Never tag nullid
692 # Never tag nullid
693 if n == nullid:
693 if n == nullid:
694 continue
694 continue
695 # A node's revision number represents its place in a
695 # A node's revision number represents its place in a
696 # topologically sorted list of nodes.
696 # topologically sorted list of nodes.
697 r = self.rev(n)
697 r = self.rev(n)
698 if r >= lowestrev:
698 if r >= lowestrev:
699 if n not in ancestors:
699 if n not in ancestors:
700 # If we are possibly a descendent of one of the roots
700 # If we are possibly a descendent of one of the roots
701 # and we haven't already been marked as an ancestor
701 # and we haven't already been marked as an ancestor
702 ancestors.add(n) # Mark as ancestor
702 ancestors.add(n) # Mark as ancestor
703 # Add non-nullid parents to list of nodes to tag.
703 # Add non-nullid parents to list of nodes to tag.
704 nodestotag.update([p for p in self.parents(n) if
704 nodestotag.update([p for p in self.parents(n) if
705 p != nullid])
705 p != nullid])
706 elif n in heads: # We've seen it before, is it a fake head?
706 elif n in heads: # We've seen it before, is it a fake head?
707 # So it is, real heads should not be the ancestors of
707 # So it is, real heads should not be the ancestors of
708 # any other heads.
708 # any other heads.
709 heads.pop(n)
709 heads.pop(n)
710 if not ancestors:
710 if not ancestors:
711 return nonodes
711 return nonodes
712 # Now that we have our set of ancestors, we want to remove any
712 # Now that we have our set of ancestors, we want to remove any
713 # roots that are not ancestors.
713 # roots that are not ancestors.
714
714
715 # If one of the roots was nullid, everything is included anyway.
715 # If one of the roots was nullid, everything is included anyway.
716 if lowestrev > nullrev:
716 if lowestrev > nullrev:
717 # But, since we weren't, let's recompute the lowest rev to not
717 # But, since we weren't, let's recompute the lowest rev to not
718 # include roots that aren't ancestors.
718 # include roots that aren't ancestors.
719
719
720 # Filter out roots that aren't ancestors of heads
720 # Filter out roots that aren't ancestors of heads
721 roots = [n for n in roots if n in ancestors]
721 roots = [n for n in roots if n in ancestors]
722 # Recompute the lowest revision
722 # Recompute the lowest revision
723 if roots:
723 if roots:
724 lowestrev = min([self.rev(n) for n in roots])
724 lowestrev = min([self.rev(n) for n in roots])
725 else:
725 else:
726 # No more roots? Return empty list
726 # No more roots? Return empty list
727 return nonodes
727 return nonodes
728 else:
728 else:
729 # We are descending from nullid, and don't need to care about
729 # We are descending from nullid, and don't need to care about
730 # any other roots.
730 # any other roots.
731 lowestrev = nullrev
731 lowestrev = nullrev
732 roots = [nullid]
732 roots = [nullid]
733 # Transform our roots list into a set.
733 # Transform our roots list into a set.
734 descendents = set(roots)
734 descendents = set(roots)
735 # Also, keep the original roots so we can filter out roots that aren't
735 # Also, keep the original roots so we can filter out roots that aren't
736 # 'real' roots (i.e. are descended from other roots).
736 # 'real' roots (i.e. are descended from other roots).
737 roots = descendents.copy()
737 roots = descendents.copy()
738 # Our topologically sorted list of output nodes.
738 # Our topologically sorted list of output nodes.
739 orderedout = []
739 orderedout = []
740 # Don't start at nullid since we don't want nullid in our output list,
740 # Don't start at nullid since we don't want nullid in our output list,
741 # and if nullid shows up in descedents, empty parents will look like
741 # and if nullid shows up in descedents, empty parents will look like
742 # they're descendents.
742 # they're descendents.
743 for r in xrange(max(lowestrev, 0), highestrev + 1):
743 for r in xrange(max(lowestrev, 0), highestrev + 1):
744 n = self.node(r)
744 n = self.node(r)
745 isdescendent = False
745 isdescendent = False
746 if lowestrev == nullrev: # Everybody is a descendent of nullid
746 if lowestrev == nullrev: # Everybody is a descendent of nullid
747 isdescendent = True
747 isdescendent = True
748 elif n in descendents:
748 elif n in descendents:
749 # n is already a descendent
749 # n is already a descendent
750 isdescendent = True
750 isdescendent = True
751 # This check only needs to be done here because all the roots
751 # This check only needs to be done here because all the roots
752 # will start being marked is descendents before the loop.
752 # will start being marked is descendents before the loop.
753 if n in roots:
753 if n in roots:
754 # If n was a root, check if it's a 'real' root.
754 # If n was a root, check if it's a 'real' root.
755 p = tuple(self.parents(n))
755 p = tuple(self.parents(n))
756 # If any of its parents are descendents, it's not a root.
756 # If any of its parents are descendents, it's not a root.
757 if (p[0] in descendents) or (p[1] in descendents):
757 if (p[0] in descendents) or (p[1] in descendents):
758 roots.remove(n)
758 roots.remove(n)
759 else:
759 else:
760 p = tuple(self.parents(n))
760 p = tuple(self.parents(n))
761 # A node is a descendent if either of its parents are
761 # A node is a descendent if either of its parents are
762 # descendents. (We seeded the dependents list with the roots
762 # descendents. (We seeded the dependents list with the roots
763 # up there, remember?)
763 # up there, remember?)
764 if (p[0] in descendents) or (p[1] in descendents):
764 if (p[0] in descendents) or (p[1] in descendents):
765 descendents.add(n)
765 descendents.add(n)
766 isdescendent = True
766 isdescendent = True
767 if isdescendent and ((ancestors is None) or (n in ancestors)):
767 if isdescendent and ((ancestors is None) or (n in ancestors)):
768 # Only include nodes that are both descendents and ancestors.
768 # Only include nodes that are both descendents and ancestors.
769 orderedout.append(n)
769 orderedout.append(n)
770 if (ancestors is not None) and (n in heads):
770 if (ancestors is not None) and (n in heads):
771 # We're trying to figure out which heads are reachable
771 # We're trying to figure out which heads are reachable
772 # from roots.
772 # from roots.
773 # Mark this head as having been reached
773 # Mark this head as having been reached
774 heads[n] = 1
774 heads[n] = 1
775 elif ancestors is None:
775 elif ancestors is None:
776 # Otherwise, we're trying to discover the heads.
776 # Otherwise, we're trying to discover the heads.
777 # Assume this is a head because if it isn't, the next step
777 # Assume this is a head because if it isn't, the next step
778 # will eventually remove it.
778 # will eventually remove it.
779 heads[n] = 1
779 heads[n] = 1
780 # But, obviously its parents aren't.
780 # But, obviously its parents aren't.
781 for p in self.parents(n):
781 for p in self.parents(n):
782 heads.pop(p, None)
782 heads.pop(p, None)
783 heads = [n for n in heads.iterkeys() if heads[n] != 0]
783 heads = [n for n in heads.iterkeys() if heads[n] != 0]
784 roots = list(roots)
784 roots = list(roots)
785 assert orderedout
785 assert orderedout
786 assert roots
786 assert roots
787 assert heads
787 assert heads
788 return (orderedout, roots, heads)
788 return (orderedout, roots, heads)
789
789
790 def heads(self, start=None, stop=None):
790 def heads(self, start=None, stop=None):
791 """return the list of all nodes that have no children
791 """return the list of all nodes that have no children
792
792
793 if start is specified, only heads that are descendants of
793 if start is specified, only heads that are descendants of
794 start will be returned
794 start will be returned
795 if stop is specified, it will consider all the revs from stop
795 if stop is specified, it will consider all the revs from stop
796 as if they had no children
796 as if they had no children
797 """
797 """
798 if start is None and stop is None:
798 if start is None and stop is None:
799 count = len(self)
799 count = len(self)
800 if not count:
800 if not count:
801 return [nullid]
801 return [nullid]
802 ishead = [1] * (count + 1)
802 ishead = [1] * (count + 1)
803 index = self.index
803 index = self.index
804 for r in xrange(count):
804 for r in xrange(count):
805 e = index[r]
805 e = index[r]
806 ishead[e[5]] = ishead[e[6]] = 0
806 ishead[e[5]] = ishead[e[6]] = 0
807 return [self.node(r) for r in xrange(count) if ishead[r]]
807 return [self.node(r) for r in xrange(count) if ishead[r]]
808
808
809 if start is None:
809 if start is None:
810 start = nullid
810 start = nullid
811 if stop is None:
811 if stop is None:
812 stop = []
812 stop = []
813 stoprevs = set([self.rev(n) for n in stop])
813 stoprevs = set([self.rev(n) for n in stop])
814 startrev = self.rev(start)
814 startrev = self.rev(start)
815 reachable = set((startrev,))
815 reachable = set((startrev,))
816 heads = set((startrev,))
816 heads = set((startrev,))
817
817
818 parentrevs = self.parentrevs
818 parentrevs = self.parentrevs
819 for r in xrange(startrev + 1, len(self)):
819 for r in xrange(startrev + 1, len(self)):
820 for p in parentrevs(r):
820 for p in parentrevs(r):
821 if p in reachable:
821 if p in reachable:
822 if r not in stoprevs:
822 if r not in stoprevs:
823 reachable.add(r)
823 reachable.add(r)
824 heads.add(r)
824 heads.add(r)
825 if p in heads and p not in stoprevs:
825 if p in heads and p not in stoprevs:
826 heads.remove(p)
826 heads.remove(p)
827
827
828 return [self.node(r) for r in heads]
828 return [self.node(r) for r in heads]
829
829
830 def children(self, node):
830 def children(self, node):
831 """find the children of a given node"""
831 """find the children of a given node"""
832 c = []
832 c = []
833 p = self.rev(node)
833 p = self.rev(node)
834 for r in range(p + 1, len(self)):
834 for r in range(p + 1, len(self)):
835 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
835 prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
836 if prevs:
836 if prevs:
837 for pr in prevs:
837 for pr in prevs:
838 if pr == p:
838 if pr == p:
839 c.append(self.node(r))
839 c.append(self.node(r))
840 elif p == nullrev:
840 elif p == nullrev:
841 c.append(self.node(r))
841 c.append(self.node(r))
842 return c
842 return c
843
843
844 def _match(self, id):
844 def _match(self, id):
845 if isinstance(id, (long, int)):
845 if isinstance(id, (long, int)):
846 # rev
846 # rev
847 return self.node(id)
847 return self.node(id)
848 if len(id) == 20:
848 if len(id) == 20:
849 # possibly a binary node
849 # possibly a binary node
850 # odds of a binary node being all hex in ASCII are 1 in 10**25
850 # odds of a binary node being all hex in ASCII are 1 in 10**25
851 try:
851 try:
852 node = id
852 node = id
853 self.rev(node) # quick search the index
853 self.rev(node) # quick search the index
854 return node
854 return node
855 except LookupError:
855 except LookupError:
856 pass # may be partial hex id
856 pass # may be partial hex id
857 try:
857 try:
858 # str(rev)
858 # str(rev)
859 rev = int(id)
859 rev = int(id)
860 if str(rev) != id:
860 if str(rev) != id:
861 raise ValueError
861 raise ValueError
862 if rev < 0:
862 if rev < 0:
863 rev = len(self) + rev
863 rev = len(self) + rev
864 if rev < 0 or rev >= len(self):
864 if rev < 0 or rev >= len(self):
865 raise ValueError
865 raise ValueError
866 return self.node(rev)
866 return self.node(rev)
867 except (ValueError, OverflowError):
867 except (ValueError, OverflowError):
868 pass
868 pass
869 if len(id) == 40:
869 if len(id) == 40:
870 try:
870 try:
871 # a full hex nodeid?
871 # a full hex nodeid?
872 node = bin(id)
872 node = bin(id)
873 self.rev(node)
873 self.rev(node)
874 return node
874 return node
875 except (TypeError, LookupError):
875 except (TypeError, LookupError):
876 pass
876 pass
877
877
878 def _partialmatch(self, id):
878 def _partialmatch(self, id):
879 if len(id) < 40:
879 if len(id) < 40:
880 try:
880 try:
881 # hex(node)[:...]
881 # hex(node)[:...]
882 l = len(id) // 2 # grab an even number of digits
882 l = len(id) // 2 # grab an even number of digits
883 bin_id = bin(id[:l*2])
883 bin_id = bin(id[:l*2])
884 nl = [n for n in self.nodemap if n[:l] == bin_id]
884 nl = [n for n in self.nodemap if n[:l] == bin_id]
885 nl = [n for n in nl if hex(n).startswith(id)]
885 nl = [n for n in nl if hex(n).startswith(id)]
886 if len(nl) > 0:
886 if len(nl) > 0:
887 if len(nl) == 1:
887 if len(nl) == 1:
888 return nl[0]
888 return nl[0]
889 raise LookupError(id, self.indexfile,
889 raise LookupError(id, self.indexfile,
890 _('ambiguous identifier'))
890 _('ambiguous identifier'))
891 return None
891 return None
892 except TypeError:
892 except TypeError:
893 pass
893 pass
894
894
895 def lookup(self, id):
895 def lookup(self, id):
896 """locate a node based on:
896 """locate a node based on:
897 - revision number or str(revision number)
897 - revision number or str(revision number)
898 - nodeid or subset of hex nodeid
898 - nodeid or subset of hex nodeid
899 """
899 """
900 n = self._match(id)
900 n = self._match(id)
901 if n is not None:
901 if n is not None:
902 return n
902 return n
903 n = self._partialmatch(id)
903 n = self._partialmatch(id)
904 if n:
904 if n:
905 return n
905 return n
906
906
907 raise LookupError(id, self.indexfile, _('no match found'))
907 raise LookupError(id, self.indexfile, _('no match found'))
908
908
909 def cmp(self, node, text):
909 def cmp(self, node, text):
910 """compare text with a given file revision"""
910 """compare text with a given file revision"""
911 p1, p2 = self.parents(node)
911 p1, p2 = self.parents(node)
912 return hash(text, p1, p2) != node
912 return hash(text, p1, p2) != node
913
913
914 def _addchunk(self, offset, data):
914 def _addchunk(self, offset, data):
915 o, d = self._chunkcache
915 o, d = self._chunkcache
916 # try to add to existing cache
916 # try to add to existing cache
917 if o + len(d) == offset and len(d) + len(data) < _prereadsize:
917 if o + len(d) == offset and len(d) + len(data) < _prereadsize:
918 self._chunkcache = o, d + data
918 self._chunkcache = o, d + data
919 else:
919 else:
920 self._chunkcache = offset, data
920 self._chunkcache = offset, data
921
921
922 def _loadchunk(self, offset, length):
922 def _loadchunk(self, offset, length):
923 if self._inline:
923 if self._inline:
924 df = self.opener(self.indexfile)
924 df = self.opener(self.indexfile)
925 else:
925 else:
926 df = self.opener(self.datafile)
926 df = self.opener(self.datafile)
927
927
928 readahead = max(65536, length)
928 readahead = max(65536, length)
929 df.seek(offset)
929 df.seek(offset)
930 d = df.read(readahead)
930 d = df.read(readahead)
931 self._addchunk(offset, d)
931 self._addchunk(offset, d)
932 if readahead > length:
932 if readahead > length:
933 return d[:length]
933 return d[:length]
934 return d
934 return d
935
935
936 def _getchunk(self, offset, length):
936 def _getchunk(self, offset, length):
937 o, d = self._chunkcache
937 o, d = self._chunkcache
938 l = len(d)
938 l = len(d)
939
939
940 # is it in the cache?
940 # is it in the cache?
941 cachestart = offset - o
941 cachestart = offset - o
942 cacheend = cachestart + length
942 cacheend = cachestart + length
943 if cachestart >= 0 and cacheend <= l:
943 if cachestart >= 0 and cacheend <= l:
944 if cachestart == 0 and cacheend == l:
944 if cachestart == 0 and cacheend == l:
945 return d # avoid a copy
945 return d # avoid a copy
946 return d[cachestart:cacheend]
946 return d[cachestart:cacheend]
947
947
948 return self._loadchunk(offset, length)
948 return self._loadchunk(offset, length)
949
949
950 def _chunkraw(self, startrev, endrev):
950 def _chunkraw(self, startrev, endrev):
951 start = self.start(startrev)
951 start = self.start(startrev)
952 length = self.end(endrev) - start
952 length = self.end(endrev) - start
953 if self._inline:
953 if self._inline:
954 start += (startrev + 1) * self._io.size
954 start += (startrev + 1) * self._io.size
955 return self._getchunk(start, length)
955 return self._getchunk(start, length)
956
956
957 def _chunk(self, rev):
957 def _chunk(self, rev):
958 return decompress(self._chunkraw(rev, rev))
958 return decompress(self._chunkraw(rev, rev))
959
959
960 def _chunkclear(self):
960 def _chunkclear(self):
961 self._chunkcache = (0, '')
961 self._chunkcache = (0, '')
962
962
963 def revdiff(self, rev1, rev2):
963 def revdiff(self, rev1, rev2):
964 """return or calculate a delta between two revisions"""
964 """return or calculate a delta between two revisions"""
965 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
965 if rev1 + 1 == rev2 and self.base(rev1) == self.base(rev2):
966 return self._chunk(rev2)
966 return self._chunk(rev2)
967
967
968 return mdiff.textdiff(self.revision(self.node(rev1)),
968 return mdiff.textdiff(self.revision(self.node(rev1)),
969 self.revision(self.node(rev2)))
969 self.revision(self.node(rev2)))
970
970
971 def revision(self, node):
971 def revision(self, node):
972 """return an uncompressed revision of a given node"""
972 """return an uncompressed revision of a given node"""
973 if node == nullid:
973 if node == nullid:
974 return ""
974 return ""
975 if self._cache and self._cache[0] == node:
975 if self._cache and self._cache[0] == node:
976 return self._cache[2]
976 return self._cache[2]
977
977
978 # look up what we need to read
978 # look up what we need to read
979 text = None
979 text = None
980 rev = self.rev(node)
980 rev = self.rev(node)
981 base = self.base(rev)
981 base = self.base(rev)
982
982
983 # check rev flags
983 # check rev flags
984 if self.index[rev][0] & 0xFFFF:
984 if self.index[rev][0] & 0xFFFF:
985 raise RevlogError(_('incompatible revision flag %x') %
985 raise RevlogError(_('incompatible revision flag %x') %
986 (self.index[rev][0] & 0xFFFF))
986 (self.index[rev][0] & 0xFFFF))
987
987
988 # do we have useful data cached?
988 # do we have useful data cached?
989 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
989 if self._cache and self._cache[1] >= base and self._cache[1] < rev:
990 base = self._cache[1]
990 base = self._cache[1]
991 text = self._cache[2]
991 text = self._cache[2]
992
992
993 self._loadindex(base, rev + 1)
993 self._loadindex(base, rev + 1)
994 self._chunkraw(base, rev)
994 self._chunkraw(base, rev)
995 if text is None:
995 if text is None:
996 text = self._chunk(base)
996 text = self._chunk(base)
997
997
998 bins = [self._chunk(r) for r in xrange(base + 1, rev + 1)]
998 bins = [self._chunk(r) for r in xrange(base + 1, rev + 1)]
999 text = mdiff.patches(text, bins)
999 text = mdiff.patches(text, bins)
1000 p1, p2 = self.parents(node)
1000 p1, p2 = self.parents(node)
1001 if node != hash(text, p1, p2):
1001 if node != hash(text, p1, p2):
1002 raise RevlogError(_("integrity check failed on %s:%d")
1002 raise RevlogError(_("integrity check failed on %s:%d")
1003 % (self.indexfile, rev))
1003 % (self.indexfile, rev))
1004
1004
1005 self._cache = (node, rev, text)
1005 self._cache = (node, rev, text)
1006 return text
1006 return text
1007
1007
1008 def checkinlinesize(self, tr, fp=None):
1008 def checkinlinesize(self, tr, fp=None):
1009 if not self._inline or (self.start(-2) + self.length(-2)) < 131072:
1009 if not self._inline or (self.start(-2) + self.length(-2)) < 131072:
1010 return
1010 return
1011
1011
1012 trinfo = tr.find(self.indexfile)
1012 trinfo = tr.find(self.indexfile)
1013 if trinfo is None:
1013 if trinfo is None:
1014 raise RevlogError(_("%s not found in the transaction")
1014 raise RevlogError(_("%s not found in the transaction")
1015 % self.indexfile)
1015 % self.indexfile)
1016
1016
1017 trindex = trinfo[2]
1017 trindex = trinfo[2]
1018 dataoff = self.start(trindex)
1018 dataoff = self.start(trindex)
1019
1019
1020 tr.add(self.datafile, dataoff)
1020 tr.add(self.datafile, dataoff)
1021
1021
1022 if fp:
1022 if fp:
1023 fp.flush()
1023 fp.flush()
1024 fp.close()
1024 fp.close()
1025
1025
1026 df = self.opener(self.datafile, 'w')
1026 df = self.opener(self.datafile, 'w')
1027 try:
1027 try:
1028 for r in self:
1028 for r in self:
1029 df.write(self._chunkraw(r, r))
1029 df.write(self._chunkraw(r, r))
1030 finally:
1030 finally:
1031 df.close()
1031 df.close()
1032
1032
1033 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1033 fp = self.opener(self.indexfile, 'w', atomictemp=True)
1034 self.version &= ~(REVLOGNGINLINEDATA)
1034 self.version &= ~(REVLOGNGINLINEDATA)
1035 self._inline = False
1035 self._inline = False
1036 for i in self:
1036 for i in self:
1037 e = self._io.packentry(self.index[i], self.node, self.version, i)
1037 e = self._io.packentry(self.index[i], self.node, self.version, i)
1038 fp.write(e)
1038 fp.write(e)
1039
1039
1040 # if we don't call rename, the temp file will never replace the
1040 # if we don't call rename, the temp file will never replace the
1041 # real index
1041 # real index
1042 fp.rename()
1042 fp.rename()
1043
1043
1044 tr.replace(self.indexfile, trindex * self._io.size)
1044 tr.replace(self.indexfile, trindex * self._io.size)
1045 self._chunkclear()
1045 self._chunkclear()
1046
1046
1047 def addrevision(self, text, transaction, link, p1, p2, d=None):
1047 def addrevision(self, text, transaction, link, p1, p2, d=None):
1048 """add a revision to the log
1048 """add a revision to the log
1049
1049
1050 text - the revision data to add
1050 text - the revision data to add
1051 transaction - the transaction object used for rollback
1051 transaction - the transaction object used for rollback
1052 link - the linkrev data to add
1052 link - the linkrev data to add
1053 p1, p2 - the parent nodeids of the revision
1053 p1, p2 - the parent nodeids of the revision
1054 d - an optional precomputed delta
1054 d - an optional precomputed delta
1055 """
1055 """
1056 dfh = None
1056 dfh = None
1057 if not self._inline:
1057 if not self._inline:
1058 dfh = self.opener(self.datafile, "a")
1058 dfh = self.opener(self.datafile, "a")
1059 ifh = self.opener(self.indexfile, "a+")
1059 ifh = self.opener(self.indexfile, "a+")
1060 try:
1060 try:
1061 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1061 return self._addrevision(text, transaction, link, p1, p2, d, ifh, dfh)
1062 finally:
1062 finally:
1063 if dfh:
1063 if dfh:
1064 dfh.close()
1064 dfh.close()
1065 ifh.close()
1065 ifh.close()
1066
1066
1067 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1067 def _addrevision(self, text, transaction, link, p1, p2, d, ifh, dfh):
1068 node = hash(text, p1, p2)
1068 node = hash(text, p1, p2)
1069 if node in self.nodemap:
1069 if node in self.nodemap:
1070 return node
1070 return node
1071
1071
1072 curr = len(self)
1072 curr = len(self)
1073 prev = curr - 1
1073 prev = curr - 1
1074 base = self.base(prev)
1074 base = self.base(prev)
1075 offset = self.end(prev)
1075 offset = self.end(prev)
1076
1076
1077 if curr:
1077 if curr:
1078 if not d:
1078 if not d:
1079 ptext = self.revision(self.node(prev))
1079 ptext = self.revision(self.node(prev))
1080 d = mdiff.textdiff(ptext, text)
1080 d = mdiff.textdiff(ptext, text)
1081 data = compress(d)
1081 data = compress(d)
1082 l = len(data[1]) + len(data[0])
1082 l = len(data[1]) + len(data[0])
1083 dist = l + offset - self.start(base)
1083 dist = l + offset - self.start(base)
1084
1084
1085 # full versions are inserted when the needed deltas
1085 # full versions are inserted when the needed deltas
1086 # become comparable to the uncompressed text
1086 # become comparable to the uncompressed text
1087 if not curr or dist > len(text) * 2:
1087 if not curr or dist > len(text) * 2:
1088 data = compress(text)
1088 data = compress(text)
1089 l = len(data[1]) + len(data[0])
1089 l = len(data[1]) + len(data[0])
1090 base = curr
1090 base = curr
1091
1091
1092 e = (offset_type(offset, 0), l, len(text),
1092 e = (offset_type(offset, 0), l, len(text),
1093 base, link, self.rev(p1), self.rev(p2), node)
1093 base, link, self.rev(p1), self.rev(p2), node)
1094 self.index.insert(-1, e)
1094 self.index.insert(-1, e)
1095 self.nodemap[node] = curr
1095 self.nodemap[node] = curr
1096
1096
1097 entry = self._io.packentry(e, self.node, self.version, curr)
1097 entry = self._io.packentry(e, self.node, self.version, curr)
1098 if not self._inline:
1098 if not self._inline:
1099 transaction.add(self.datafile, offset)
1099 transaction.add(self.datafile, offset)
1100 transaction.add(self.indexfile, curr * len(entry))
1100 transaction.add(self.indexfile, curr * len(entry))
1101 if data[0]:
1101 if data[0]:
1102 dfh.write(data[0])
1102 dfh.write(data[0])
1103 dfh.write(data[1])
1103 dfh.write(data[1])
1104 dfh.flush()
1104 dfh.flush()
1105 ifh.write(entry)
1105 ifh.write(entry)
1106 else:
1106 else:
1107 offset += curr * self._io.size
1107 offset += curr * self._io.size
1108 transaction.add(self.indexfile, offset, curr)
1108 transaction.add(self.indexfile, offset, curr)
1109 ifh.write(entry)
1109 ifh.write(entry)
1110 ifh.write(data[0])
1110 ifh.write(data[0])
1111 ifh.write(data[1])
1111 ifh.write(data[1])
1112 self.checkinlinesize(transaction, ifh)
1112 self.checkinlinesize(transaction, ifh)
1113
1113
1114 if type(text) == str: # only accept immutable objects
1114 if type(text) == str: # only accept immutable objects
1115 self._cache = (node, curr, text)
1115 self._cache = (node, curr, text)
1116 return node
1116 return node
1117
1117
1118 def ancestor(self, a, b):
1118 def ancestor(self, a, b):
1119 """calculate the least common ancestor of nodes a and b"""
1119 """calculate the least common ancestor of nodes a and b"""
1120
1120
1121 def parents(rev):
1121 def parents(rev):
1122 return [p for p in self.parentrevs(rev) if p != nullrev]
1122 return [p for p in self.parentrevs(rev) if p != nullrev]
1123
1123
1124 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1124 c = ancestor.ancestor(self.rev(a), self.rev(b), parents)
1125 if c is None:
1125 if c is None:
1126 return nullid
1126 return nullid
1127
1127
1128 return self.node(c)
1128 return self.node(c)
1129
1129
1130 def group(self, nodelist, lookup, infocollect=None):
1130 def group(self, nodelist, lookup, infocollect=None):
1131 """Calculate a delta group, yielding a sequence of changegroup chunks
1131 """Calculate a delta group, yielding a sequence of changegroup chunks
1132 (strings).
1132 (strings).
1133
1133
1134 Given a list of changeset revs, return a set of deltas and
1134 Given a list of changeset revs, return a set of deltas and
1135 metadata corresponding to nodes. the first delta is
1135 metadata corresponding to nodes. the first delta is
1136 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1136 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1137 have this parent as it has all history before these
1137 have this parent as it has all history before these
1138 changesets. parent is parent[0]
1138 changesets. parent is parent[0]
1139 """
1139 """
1140
1140
1141 revs = [self.rev(n) for n in nodelist]
1141 revs = [self.rev(n) for n in nodelist]
1142
1142
1143 # if we don't have any revisions touched by these changesets, bail
1143 # if we don't have any revisions touched by these changesets, bail
1144 if not revs:
1144 if not revs:
1145 yield changegroup.closechunk()
1145 yield changegroup.closechunk()
1146 return
1146 return
1147
1147
1148 # add the parent of the first rev
1148 # add the parent of the first rev
1149 p = self.parentrevs(revs[0])[0]
1149 p = self.parentrevs(revs[0])[0]
1150 revs.insert(0, p)
1150 revs.insert(0, p)
1151
1151
1152 # build deltas
1152 # build deltas
1153 for d in xrange(len(revs) - 1):
1153 for d in xrange(len(revs) - 1):
1154 a, b = revs[d], revs[d + 1]
1154 a, b = revs[d], revs[d + 1]
1155 nb = self.node(b)
1155 nb = self.node(b)
1156
1156
1157 if infocollect is not None:
1157 if infocollect is not None:
1158 infocollect(nb)
1158 infocollect(nb)
1159
1159
1160 p = self.parents(nb)
1160 p = self.parents(nb)
1161 meta = nb + p[0] + p[1] + lookup(nb)
1161 meta = nb + p[0] + p[1] + lookup(nb)
1162 if a == -1:
1162 if a == -1:
1163 d = self.revision(nb)
1163 d = self.revision(nb)
1164 meta += mdiff.trivialdiffheader(len(d))
1164 meta += mdiff.trivialdiffheader(len(d))
1165 else:
1165 else:
1166 d = self.revdiff(a, b)
1166 d = self.revdiff(a, b)
1167 yield changegroup.chunkheader(len(meta) + len(d))
1167 yield changegroup.chunkheader(len(meta) + len(d))
1168 yield meta
1168 yield meta
1169 if len(d) > 2**20:
1169 if len(d) > 2**20:
1170 pos = 0
1170 pos = 0
1171 while pos < len(d):
1171 while pos < len(d):
1172 pos2 = pos + 2 ** 18
1172 pos2 = pos + 2 ** 18
1173 yield d[pos:pos2]
1173 yield d[pos:pos2]
1174 pos = pos2
1174 pos = pos2
1175 else:
1175 else:
1176 yield d
1176 yield d
1177
1177
1178 yield changegroup.closechunk()
1178 yield changegroup.closechunk()
1179
1179
1180 def addgroup(self, revs, linkmapper, transaction):
1180 def addgroup(self, revs, linkmapper, transaction):
1181 """
1181 """
1182 add a delta group
1182 add a delta group
1183
1183
1184 given a set of deltas, add them to the revision log. the
1184 given a set of deltas, add them to the revision log. the
1185 first delta is against its parent, which should be in our
1185 first delta is against its parent, which should be in our
1186 log, the rest are against the previous delta.
1186 log, the rest are against the previous delta.
1187 """
1187 """
1188
1188
1189 #track the base of the current delta log
1189 #track the base of the current delta log
1190 r = len(self)
1190 r = len(self)
1191 t = r - 1
1191 t = r - 1
1192 node = None
1192 node = None
1193
1193
1194 base = prev = nullrev
1194 base = prev = nullrev
1195 start = end = textlen = 0
1195 start = end = textlen = 0
1196 if r:
1196 if r:
1197 end = self.end(t)
1197 end = self.end(t)
1198
1198
1199 ifh = self.opener(self.indexfile, "a+")
1199 ifh = self.opener(self.indexfile, "a+")
1200 isize = r * self._io.size
1200 isize = r * self._io.size
1201 if self._inline:
1201 if self._inline:
1202 transaction.add(self.indexfile, end + isize, r)
1202 transaction.add(self.indexfile, end + isize, r)
1203 dfh = None
1203 dfh = None
1204 else:
1204 else:
1205 transaction.add(self.indexfile, isize, r)
1205 transaction.add(self.indexfile, isize, r)
1206 transaction.add(self.datafile, end)
1206 transaction.add(self.datafile, end)
1207 dfh = self.opener(self.datafile, "a")
1207 dfh = self.opener(self.datafile, "a")
1208
1208
1209 try:
1209 try:
1210 # loop through our set of deltas
1210 # loop through our set of deltas
1211 chain = None
1211 chain = None
1212 for chunk in revs:
1212 for chunk in revs:
1213 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1213 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1214 link = linkmapper(cs)
1214 link = linkmapper(cs)
1215 if node in self.nodemap:
1215 if node in self.nodemap:
1216 # this can happen if two branches make the same change
1216 # this can happen if two branches make the same change
1217 chain = node
1217 chain = node
1218 continue
1218 continue
1219 delta = buffer(chunk, 80)
1219 delta = buffer(chunk, 80)
1220 del chunk
1220 del chunk
1221
1221
1222 for p in (p1, p2):
1222 for p in (p1, p2):
1223 if not p in self.nodemap:
1223 if not p in self.nodemap:
1224 raise LookupError(p, self.indexfile, _('unknown parent'))
1224 raise LookupError(p, self.indexfile, _('unknown parent'))
1225
1225
1226 if not chain:
1226 if not chain:
1227 # retrieve the parent revision of the delta chain
1227 # retrieve the parent revision of the delta chain
1228 chain = p1
1228 chain = p1
1229 if not chain in self.nodemap:
1229 if not chain in self.nodemap:
1230 raise LookupError(chain, self.indexfile, _('unknown base'))
1230 raise LookupError(chain, self.indexfile, _('unknown base'))
1231
1231
1232 # full versions are inserted when the needed deltas become
1232 # full versions are inserted when the needed deltas become
1233 # comparable to the uncompressed text or when the previous
1233 # comparable to the uncompressed text or when the previous
1234 # version is not the one we have a delta against. We use
1234 # version is not the one we have a delta against. We use
1235 # the size of the previous full rev as a proxy for the
1235 # the size of the previous full rev as a proxy for the
1236 # current size.
1236 # current size.
1237
1237
1238 if chain == prev:
1238 if chain == prev:
1239 cdelta = compress(delta)
1239 cdelta = compress(delta)
1240 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1240 cdeltalen = len(cdelta[0]) + len(cdelta[1])
1241 textlen = mdiff.patchedsize(textlen, delta)
1241 textlen = mdiff.patchedsize(textlen, delta)
1242
1242
1243 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1243 if chain != prev or (end - start + cdeltalen) > textlen * 2:
1244 # flush our writes here so we can read it in revision
1244 # flush our writes here so we can read it in revision
1245 if dfh:
1245 if dfh:
1246 dfh.flush()
1246 dfh.flush()
1247 ifh.flush()
1247 ifh.flush()
1248 text = self.revision(chain)
1248 text = self.revision(chain)
1249 if len(text) == 0:
1249 if len(text) == 0:
1250 # skip over trivial delta header
1250 # skip over trivial delta header
1251 text = buffer(delta, 12)
1251 text = buffer(delta, 12)
1252 else:
1252 else:
1253 text = mdiff.patches(text, [delta])
1253 text = mdiff.patches(text, [delta])
1254 del delta
1254 del delta
1255 chk = self._addrevision(text, transaction, link, p1, p2, None,
1255 chk = self._addrevision(text, transaction, link, p1, p2, None,
1256 ifh, dfh)
1256 ifh, dfh)
1257 if not dfh and not self._inline:
1257 if not dfh and not self._inline:
1258 # addrevision switched from inline to conventional
1258 # addrevision switched from inline to conventional
1259 # reopen the index
1259 # reopen the index
1260 dfh = self.opener(self.datafile, "a")
1260 dfh = self.opener(self.datafile, "a")
1261 ifh = self.opener(self.indexfile, "a")
1261 ifh = self.opener(self.indexfile, "a")
1262 if chk != node:
1262 if chk != node:
1263 raise RevlogError(_("consistency error adding group"))
1263 raise RevlogError(_("consistency error adding group"))
1264 textlen = len(text)
1264 textlen = len(text)
1265 else:
1265 else:
1266 e = (offset_type(end, 0), cdeltalen, textlen, base,
1266 e = (offset_type(end, 0), cdeltalen, textlen, base,
1267 link, self.rev(p1), self.rev(p2), node)
1267 link, self.rev(p1), self.rev(p2), node)
1268 self.index.insert(-1, e)
1268 self.index.insert(-1, e)
1269 self.nodemap[node] = r
1269 self.nodemap[node] = r
1270 entry = self._io.packentry(e, self.node, self.version, r)
1270 entry = self._io.packentry(e, self.node, self.version, r)
1271 if self._inline:
1271 if self._inline:
1272 ifh.write(entry)
1272 ifh.write(entry)
1273 ifh.write(cdelta[0])
1273 ifh.write(cdelta[0])
1274 ifh.write(cdelta[1])
1274 ifh.write(cdelta[1])
1275 self.checkinlinesize(transaction, ifh)
1275 self.checkinlinesize(transaction, ifh)
1276 if not self._inline:
1276 if not self._inline:
1277 dfh = self.opener(self.datafile, "a")
1277 dfh = self.opener(self.datafile, "a")
1278 ifh = self.opener(self.indexfile, "a")
1278 ifh = self.opener(self.indexfile, "a")
1279 else:
1279 else:
1280 dfh.write(cdelta[0])
1280 dfh.write(cdelta[0])
1281 dfh.write(cdelta[1])
1281 dfh.write(cdelta[1])
1282 ifh.write(entry)
1282 ifh.write(entry)
1283
1283
1284 t, r, chain, prev = r, r + 1, node, node
1284 t, r, chain, prev = r, r + 1, node, node
1285 base = self.base(t)
1285 base = self.base(t)
1286 start = self.start(base)
1286 start = self.start(base)
1287 end = self.end(t)
1287 end = self.end(t)
1288 finally:
1288 finally:
1289 if dfh:
1289 if dfh:
1290 dfh.close()
1290 dfh.close()
1291 ifh.close()
1291 ifh.close()
1292
1292
1293 return node
1293 return node
1294
1294
1295 def strip(self, minlink, transaction):
1295 def strip(self, minlink, transaction):
1296 """truncate the revlog on the first revision with a linkrev >= minlink
1296 """truncate the revlog on the first revision with a linkrev >= minlink
1297
1297
1298 This function is called when we're stripping revision minlink and
1298 This function is called when we're stripping revision minlink and
1299 its descendants from the repository.
1299 its descendants from the repository.
1300
1300
1301 We have to remove all revisions with linkrev >= minlink, because
1301 We have to remove all revisions with linkrev >= minlink, because
1302 the equivalent changelog revisions will be renumbered after the
1302 the equivalent changelog revisions will be renumbered after the
1303 strip.
1303 strip.
1304
1304
1305 So we truncate the revlog on the first of these revisions, and
1305 So we truncate the revlog on the first of these revisions, and
1306 trust that the caller has saved the revisions that shouldn't be
1306 trust that the caller has saved the revisions that shouldn't be
1307 removed and that it'll readd them after this truncation.
1307 removed and that it'll readd them after this truncation.
1308 """
1308 """
1309 if len(self) == 0:
1309 if len(self) == 0:
1310 return
1310 return
1311
1311
1312 if isinstance(self.index, lazyindex):
1312 if isinstance(self.index, lazyindex):
1313 self._loadindexmap()
1313 self._loadindexmap()
1314
1314
1315 for rev in self:
1315 for rev in self:
1316 if self.index[rev][4] >= minlink:
1316 if self.index[rev][4] >= minlink:
1317 break
1317 break
1318 else:
1318 else:
1319 return
1319 return
1320
1320
1321 # first truncate the files on disk
1321 # first truncate the files on disk
1322 end = self.start(rev)
1322 end = self.start(rev)
1323 if not self._inline:
1323 if not self._inline:
1324 transaction.add(self.datafile, end)
1324 transaction.add(self.datafile, end)
1325 end = rev * self._io.size
1325 end = rev * self._io.size
1326 else:
1326 else:
1327 end += rev * self._io.size
1327 end += rev * self._io.size
1328
1328
1329 transaction.add(self.indexfile, end)
1329 transaction.add(self.indexfile, end)
1330
1330
1331 # then reset internal state in memory to forget those revisions
1331 # then reset internal state in memory to forget those revisions
1332 self._cache = None
1332 self._cache = None
1333 self._chunkclear()
1333 self._chunkclear()
1334 for x in xrange(rev, len(self)):
1334 for x in xrange(rev, len(self)):
1335 del self.nodemap[self.node(x)]
1335 del self.nodemap[self.node(x)]
1336
1336
1337 del self.index[rev:-1]
1337 del self.index[rev:-1]
1338
1338
1339 def checksize(self):
1339 def checksize(self):
1340 expected = 0
1340 expected = 0
1341 if len(self):
1341 if len(self):
1342 expected = max(0, self.end(len(self) - 1))
1342 expected = max(0, self.end(len(self) - 1))
1343
1343
1344 try:
1344 try:
1345 f = self.opener(self.datafile)
1345 f = self.opener(self.datafile)
1346 f.seek(0, 2)
1346 f.seek(0, 2)
1347 actual = f.tell()
1347 actual = f.tell()
1348 dd = actual - expected
1348 dd = actual - expected
1349 except IOError, inst:
1349 except IOError, inst:
1350 if inst.errno != errno.ENOENT:
1350 if inst.errno != errno.ENOENT:
1351 raise
1351 raise
1352 dd = 0
1352 dd = 0
1353
1353
1354 try:
1354 try:
1355 f = self.opener(self.indexfile)
1355 f = self.opener(self.indexfile)
1356 f.seek(0, 2)
1356 f.seek(0, 2)
1357 actual = f.tell()
1357 actual = f.tell()
1358 s = self._io.size
1358 s = self._io.size
1359 i = max(0, actual // s)
1359 i = max(0, actual // s)
1360 di = actual - (i * s)
1360 di = actual - (i * s)
1361 if self._inline:
1361 if self._inline:
1362 databytes = 0
1362 databytes = 0
1363 for r in self:
1363 for r in self:
1364 databytes += max(0, self.length(r))
1364 databytes += max(0, self.length(r))
1365 dd = 0
1365 dd = 0
1366 di = actual - len(self) * s - databytes
1366 di = actual - len(self) * s - databytes
1367 except IOError, inst:
1367 except IOError, inst:
1368 if inst.errno != errno.ENOENT:
1368 if inst.errno != errno.ENOENT:
1369 raise
1369 raise
1370 di = 0
1370 di = 0
1371
1371
1372 return (dd, di)
1372 return (dd, di)
1373
1373
1374 def files(self):
1374 def files(self):
1375 res = [ self.indexfile ]
1375 res = [ self.indexfile ]
1376 if not self._inline:
1376 if not self._inline:
1377 res.append(self.datafile)
1377 res.append(self.datafile)
1378 return res
1378 return res
General Comments 0
You need to be logged in to leave comments. Login now