##// END OF EJS Templates
branch closing: referencing open and closed branches/heads...
John Mulligan -
r7656:6a24fb99 default
parent child Browse files
Show More
@@ -1,3434 +1,3443
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _, gettext
9 from i18n import _, gettext
10 import os, re, sys
10 import os, re, sys
11 import hg, util, revlog, bundlerepo, extensions, copies, context, error
11 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import difflib, patch, time, help, mdiff, tempfile, url
12 import difflib, patch, time, help, mdiff, tempfile, url
13 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
13 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 import merge as merge_
14 import merge as merge_
15
15
16 # Commands start here, listed alphabetically
16 # Commands start here, listed alphabetically
17
17
18 def add(ui, repo, *pats, **opts):
18 def add(ui, repo, *pats, **opts):
19 """add the specified files on the next commit
19 """add the specified files on the next commit
20
20
21 Schedule files to be version controlled and added to the repository.
21 Schedule files to be version controlled and added to the repository.
22
22
23 The files will be added to the repository at the next commit. To
23 The files will be added to the repository at the next commit. To
24 undo an add before that, see hg revert.
24 undo an add before that, see hg revert.
25
25
26 If no names are given, add all files in the repository.
26 If no names are given, add all files in the repository.
27 """
27 """
28
28
29 rejected = None
29 rejected = None
30 exacts = {}
30 exacts = {}
31 names = []
31 names = []
32 m = cmdutil.match(repo, pats, opts)
32 m = cmdutil.match(repo, pats, opts)
33 m.bad = lambda x,y: True
33 m.bad = lambda x,y: True
34 for abs in repo.walk(m):
34 for abs in repo.walk(m):
35 if m.exact(abs):
35 if m.exact(abs):
36 if ui.verbose:
36 if ui.verbose:
37 ui.status(_('adding %s\n') % m.rel(abs))
37 ui.status(_('adding %s\n') % m.rel(abs))
38 names.append(abs)
38 names.append(abs)
39 exacts[abs] = 1
39 exacts[abs] = 1
40 elif abs not in repo.dirstate:
40 elif abs not in repo.dirstate:
41 ui.status(_('adding %s\n') % m.rel(abs))
41 ui.status(_('adding %s\n') % m.rel(abs))
42 names.append(abs)
42 names.append(abs)
43 if not opts.get('dry_run'):
43 if not opts.get('dry_run'):
44 rejected = repo.add(names)
44 rejected = repo.add(names)
45 rejected = [p for p in rejected if p in exacts]
45 rejected = [p for p in rejected if p in exacts]
46 return rejected and 1 or 0
46 return rejected and 1 or 0
47
47
48 def addremove(ui, repo, *pats, **opts):
48 def addremove(ui, repo, *pats, **opts):
49 """add all new files, delete all missing files
49 """add all new files, delete all missing files
50
50
51 Add all new files and remove all missing files from the repository.
51 Add all new files and remove all missing files from the repository.
52
52
53 New files are ignored if they match any of the patterns in .hgignore. As
53 New files are ignored if they match any of the patterns in .hgignore. As
54 with add, these changes take effect at the next commit.
54 with add, these changes take effect at the next commit.
55
55
56 Use the -s option to detect renamed files. With a parameter > 0,
56 Use the -s option to detect renamed files. With a parameter > 0,
57 this compares every removed file with every added file and records
57 this compares every removed file with every added file and records
58 those similar enough as renames. This option takes a percentage
58 those similar enough as renames. This option takes a percentage
59 between 0 (disabled) and 100 (files must be identical) as its
59 between 0 (disabled) and 100 (files must be identical) as its
60 parameter. Detecting renamed files this way can be expensive.
60 parameter. Detecting renamed files this way can be expensive.
61 """
61 """
62 try:
62 try:
63 sim = float(opts.get('similarity') or 0)
63 sim = float(opts.get('similarity') or 0)
64 except ValueError:
64 except ValueError:
65 raise util.Abort(_('similarity must be a number'))
65 raise util.Abort(_('similarity must be a number'))
66 if sim < 0 or sim > 100:
66 if sim < 0 or sim > 100:
67 raise util.Abort(_('similarity must be between 0 and 100'))
67 raise util.Abort(_('similarity must be between 0 and 100'))
68 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
69
69
70 def annotate(ui, repo, *pats, **opts):
70 def annotate(ui, repo, *pats, **opts):
71 """show changeset information per file line
71 """show changeset information per file line
72
72
73 List changes in files, showing the revision id responsible for each line
73 List changes in files, showing the revision id responsible for each line
74
74
75 This command is useful to discover who did a change or when a change took
75 This command is useful to discover who did a change or when a change took
76 place.
76 place.
77
77
78 Without the -a option, annotate will avoid processing files it
78 Without the -a option, annotate will avoid processing files it
79 detects as binary. With -a, annotate will generate an annotation
79 detects as binary. With -a, annotate will generate an annotation
80 anyway, probably with undesirable results.
80 anyway, probably with undesirable results.
81 """
81 """
82 datefunc = ui.quiet and util.shortdate or util.datestr
82 datefunc = ui.quiet and util.shortdate or util.datestr
83 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
84
84
85 if not pats:
85 if not pats:
86 raise util.Abort(_('at least one file name or pattern required'))
86 raise util.Abort(_('at least one file name or pattern required'))
87
87
88 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
89 ('number', lambda x: str(x[0].rev())),
89 ('number', lambda x: str(x[0].rev())),
90 ('changeset', lambda x: short(x[0].node())),
90 ('changeset', lambda x: short(x[0].node())),
91 ('date', getdate),
91 ('date', getdate),
92 ('follow', lambda x: x[0].path()),
92 ('follow', lambda x: x[0].path()),
93 ]
93 ]
94
94
95 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
95 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
96 and not opts.get('follow')):
96 and not opts.get('follow')):
97 opts['number'] = 1
97 opts['number'] = 1
98
98
99 linenumber = opts.get('line_number') is not None
99 linenumber = opts.get('line_number') is not None
100 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
100 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
101 raise util.Abort(_('at least one of -n/-c is required for -l'))
101 raise util.Abort(_('at least one of -n/-c is required for -l'))
102
102
103 funcmap = [func for op, func in opmap if opts.get(op)]
103 funcmap = [func for op, func in opmap if opts.get(op)]
104 if linenumber:
104 if linenumber:
105 lastfunc = funcmap[-1]
105 lastfunc = funcmap[-1]
106 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
107
107
108 ctx = repo[opts.get('rev')]
108 ctx = repo[opts.get('rev')]
109
109
110 m = cmdutil.match(repo, pats, opts)
110 m = cmdutil.match(repo, pats, opts)
111 for abs in ctx.walk(m):
111 for abs in ctx.walk(m):
112 fctx = ctx[abs]
112 fctx = ctx[abs]
113 if not opts.get('text') and util.binary(fctx.data()):
113 if not opts.get('text') and util.binary(fctx.data()):
114 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
114 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
115 continue
115 continue
116
116
117 lines = fctx.annotate(follow=opts.get('follow'),
117 lines = fctx.annotate(follow=opts.get('follow'),
118 linenumber=linenumber)
118 linenumber=linenumber)
119 pieces = []
119 pieces = []
120
120
121 for f in funcmap:
121 for f in funcmap:
122 l = [f(n) for n, dummy in lines]
122 l = [f(n) for n, dummy in lines]
123 if l:
123 if l:
124 ml = max(map(len, l))
124 ml = max(map(len, l))
125 pieces.append(["%*s" % (ml, x) for x in l])
125 pieces.append(["%*s" % (ml, x) for x in l])
126
126
127 if pieces:
127 if pieces:
128 for p, l in zip(zip(*pieces), lines):
128 for p, l in zip(zip(*pieces), lines):
129 ui.write("%s: %s" % (" ".join(p), l[1]))
129 ui.write("%s: %s" % (" ".join(p), l[1]))
130
130
131 def archive(ui, repo, dest, **opts):
131 def archive(ui, repo, dest, **opts):
132 '''create unversioned archive of a repository revision
132 '''create unversioned archive of a repository revision
133
133
134 By default, the revision used is the parent of the working
134 By default, the revision used is the parent of the working
135 directory; use "-r" to specify a different revision.
135 directory; use "-r" to specify a different revision.
136
136
137 To specify the type of archive to create, use "-t". Valid
137 To specify the type of archive to create, use "-t". Valid
138 types are:
138 types are:
139
139
140 "files" (default): a directory full of files
140 "files" (default): a directory full of files
141 "tar": tar archive, uncompressed
141 "tar": tar archive, uncompressed
142 "tbz2": tar archive, compressed using bzip2
142 "tbz2": tar archive, compressed using bzip2
143 "tgz": tar archive, compressed using gzip
143 "tgz": tar archive, compressed using gzip
144 "uzip": zip archive, uncompressed
144 "uzip": zip archive, uncompressed
145 "zip": zip archive, compressed using deflate
145 "zip": zip archive, compressed using deflate
146
146
147 The exact name of the destination archive or directory is given
147 The exact name of the destination archive or directory is given
148 using a format string; see "hg help export" for details.
148 using a format string; see "hg help export" for details.
149
149
150 Each member added to an archive file has a directory prefix
150 Each member added to an archive file has a directory prefix
151 prepended. Use "-p" to specify a format string for the prefix.
151 prepended. Use "-p" to specify a format string for the prefix.
152 The default is the basename of the archive, with suffixes removed.
152 The default is the basename of the archive, with suffixes removed.
153 '''
153 '''
154
154
155 ctx = repo[opts.get('rev')]
155 ctx = repo[opts.get('rev')]
156 if not ctx:
156 if not ctx:
157 raise util.Abort(_('no working directory: please specify a revision'))
157 raise util.Abort(_('no working directory: please specify a revision'))
158 node = ctx.node()
158 node = ctx.node()
159 dest = cmdutil.make_filename(repo, dest, node)
159 dest = cmdutil.make_filename(repo, dest, node)
160 if os.path.realpath(dest) == repo.root:
160 if os.path.realpath(dest) == repo.root:
161 raise util.Abort(_('repository root cannot be destination'))
161 raise util.Abort(_('repository root cannot be destination'))
162 matchfn = cmdutil.match(repo, [], opts)
162 matchfn = cmdutil.match(repo, [], opts)
163 kind = opts.get('type') or 'files'
163 kind = opts.get('type') or 'files'
164 prefix = opts.get('prefix')
164 prefix = opts.get('prefix')
165 if dest == '-':
165 if dest == '-':
166 if kind == 'files':
166 if kind == 'files':
167 raise util.Abort(_('cannot archive plain files to stdout'))
167 raise util.Abort(_('cannot archive plain files to stdout'))
168 dest = sys.stdout
168 dest = sys.stdout
169 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
170 prefix = cmdutil.make_filename(repo, prefix, node)
170 prefix = cmdutil.make_filename(repo, prefix, node)
171 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
171 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
172 matchfn, prefix)
172 matchfn, prefix)
173
173
174 def backout(ui, repo, node=None, rev=None, **opts):
174 def backout(ui, repo, node=None, rev=None, **opts):
175 '''reverse effect of earlier changeset
175 '''reverse effect of earlier changeset
176
176
177 Commit the backed out changes as a new changeset. The new
177 Commit the backed out changes as a new changeset. The new
178 changeset is a child of the backed out changeset.
178 changeset is a child of the backed out changeset.
179
179
180 If you back out a changeset other than the tip, a new head is
180 If you back out a changeset other than the tip, a new head is
181 created. This head will be the new tip and you should merge this
181 created. This head will be the new tip and you should merge this
182 backout changeset with another head (current one by default).
182 backout changeset with another head (current one by default).
183
183
184 The --merge option remembers the parent of the working directory
184 The --merge option remembers the parent of the working directory
185 before starting the backout, then merges the new head with that
185 before starting the backout, then merges the new head with that
186 changeset afterwards. This saves you from doing the merge by
186 changeset afterwards. This saves you from doing the merge by
187 hand. The result of this merge is not committed, as for a normal
187 hand. The result of this merge is not committed, as for a normal
188 merge.
188 merge.
189
189
190 See \'hg help dates\' for a list of formats valid for -d/--date.
190 See \'hg help dates\' for a list of formats valid for -d/--date.
191 '''
191 '''
192 if rev and node:
192 if rev and node:
193 raise util.Abort(_("please specify just one revision"))
193 raise util.Abort(_("please specify just one revision"))
194
194
195 if not rev:
195 if not rev:
196 rev = node
196 rev = node
197
197
198 if not rev:
198 if not rev:
199 raise util.Abort(_("please specify a revision to backout"))
199 raise util.Abort(_("please specify a revision to backout"))
200
200
201 date = opts.get('date')
201 date = opts.get('date')
202 if date:
202 if date:
203 opts['date'] = util.parsedate(date)
203 opts['date'] = util.parsedate(date)
204
204
205 cmdutil.bail_if_changed(repo)
205 cmdutil.bail_if_changed(repo)
206 node = repo.lookup(rev)
206 node = repo.lookup(rev)
207
207
208 op1, op2 = repo.dirstate.parents()
208 op1, op2 = repo.dirstate.parents()
209 a = repo.changelog.ancestor(op1, node)
209 a = repo.changelog.ancestor(op1, node)
210 if a != node:
210 if a != node:
211 raise util.Abort(_('cannot back out change on a different branch'))
211 raise util.Abort(_('cannot back out change on a different branch'))
212
212
213 p1, p2 = repo.changelog.parents(node)
213 p1, p2 = repo.changelog.parents(node)
214 if p1 == nullid:
214 if p1 == nullid:
215 raise util.Abort(_('cannot back out a change with no parents'))
215 raise util.Abort(_('cannot back out a change with no parents'))
216 if p2 != nullid:
216 if p2 != nullid:
217 if not opts.get('parent'):
217 if not opts.get('parent'):
218 raise util.Abort(_('cannot back out a merge changeset without '
218 raise util.Abort(_('cannot back out a merge changeset without '
219 '--parent'))
219 '--parent'))
220 p = repo.lookup(opts['parent'])
220 p = repo.lookup(opts['parent'])
221 if p not in (p1, p2):
221 if p not in (p1, p2):
222 raise util.Abort(_('%s is not a parent of %s') %
222 raise util.Abort(_('%s is not a parent of %s') %
223 (short(p), short(node)))
223 (short(p), short(node)))
224 parent = p
224 parent = p
225 else:
225 else:
226 if opts.get('parent'):
226 if opts.get('parent'):
227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
228 parent = p1
228 parent = p1
229
229
230 # the backout should appear on the same branch
230 # the backout should appear on the same branch
231 branch = repo.dirstate.branch()
231 branch = repo.dirstate.branch()
232 hg.clean(repo, node, show_stats=False)
232 hg.clean(repo, node, show_stats=False)
233 repo.dirstate.setbranch(branch)
233 repo.dirstate.setbranch(branch)
234 revert_opts = opts.copy()
234 revert_opts = opts.copy()
235 revert_opts['date'] = None
235 revert_opts['date'] = None
236 revert_opts['all'] = True
236 revert_opts['all'] = True
237 revert_opts['rev'] = hex(parent)
237 revert_opts['rev'] = hex(parent)
238 revert_opts['no_backup'] = None
238 revert_opts['no_backup'] = None
239 revert(ui, repo, **revert_opts)
239 revert(ui, repo, **revert_opts)
240 commit_opts = opts.copy()
240 commit_opts = opts.copy()
241 commit_opts['addremove'] = False
241 commit_opts['addremove'] = False
242 if not commit_opts['message'] and not commit_opts['logfile']:
242 if not commit_opts['message'] and not commit_opts['logfile']:
243 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
243 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
244 commit_opts['force_editor'] = True
244 commit_opts['force_editor'] = True
245 commit(ui, repo, **commit_opts)
245 commit(ui, repo, **commit_opts)
246 def nice(node):
246 def nice(node):
247 return '%d:%s' % (repo.changelog.rev(node), short(node))
247 return '%d:%s' % (repo.changelog.rev(node), short(node))
248 ui.status(_('changeset %s backs out changeset %s\n') %
248 ui.status(_('changeset %s backs out changeset %s\n') %
249 (nice(repo.changelog.tip()), nice(node)))
249 (nice(repo.changelog.tip()), nice(node)))
250 if op1 != node:
250 if op1 != node:
251 hg.clean(repo, op1, show_stats=False)
251 hg.clean(repo, op1, show_stats=False)
252 if opts.get('merge'):
252 if opts.get('merge'):
253 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
253 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
254 hg.merge(repo, hex(repo.changelog.tip()))
254 hg.merge(repo, hex(repo.changelog.tip()))
255 else:
255 else:
256 ui.status(_('the backout changeset is a new head - '
256 ui.status(_('the backout changeset is a new head - '
257 'do not forget to merge\n'))
257 'do not forget to merge\n'))
258 ui.status(_('(use "backout --merge" '
258 ui.status(_('(use "backout --merge" '
259 'if you want to auto-merge)\n'))
259 'if you want to auto-merge)\n'))
260
260
261 def bisect(ui, repo, rev=None, extra=None, command=None,
261 def bisect(ui, repo, rev=None, extra=None, command=None,
262 reset=None, good=None, bad=None, skip=None, noupdate=None):
262 reset=None, good=None, bad=None, skip=None, noupdate=None):
263 """subdivision search of changesets
263 """subdivision search of changesets
264
264
265 This command helps to find changesets which introduce problems.
265 This command helps to find changesets which introduce problems.
266 To use, mark the earliest changeset you know exhibits the problem
266 To use, mark the earliest changeset you know exhibits the problem
267 as bad, then mark the latest changeset which is free from the
267 as bad, then mark the latest changeset which is free from the
268 problem as good. Bisect will update your working directory to a
268 problem as good. Bisect will update your working directory to a
269 revision for testing (unless the --noupdate option is specified).
269 revision for testing (unless the --noupdate option is specified).
270 Once you have performed tests, mark the working directory as bad
270 Once you have performed tests, mark the working directory as bad
271 or good and bisect will either update to another candidate changeset
271 or good and bisect will either update to another candidate changeset
272 or announce that it has found the bad revision.
272 or announce that it has found the bad revision.
273
273
274 As a shortcut, you can also use the revision argument to mark a
274 As a shortcut, you can also use the revision argument to mark a
275 revision as good or bad without checking it out first.
275 revision as good or bad without checking it out first.
276
276
277 If you supply a command it will be used for automatic bisection. Its exit
277 If you supply a command it will be used for automatic bisection. Its exit
278 status will be used as flag to mark revision as bad or good. In case exit
278 status will be used as flag to mark revision as bad or good. In case exit
279 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
279 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
280 found) - bisection will be aborted and any other status bigger than 0 will
280 found) - bisection will be aborted and any other status bigger than 0 will
281 mark revision as bad.
281 mark revision as bad.
282 """
282 """
283 def print_result(nodes, good):
283 def print_result(nodes, good):
284 displayer = cmdutil.show_changeset(ui, repo, {})
284 displayer = cmdutil.show_changeset(ui, repo, {})
285 transition = (good and "good" or "bad")
285 transition = (good and "good" or "bad")
286 if len(nodes) == 1:
286 if len(nodes) == 1:
287 # narrowed it down to a single revision
287 # narrowed it down to a single revision
288 ui.write(_("The first %s revision is:\n") % transition)
288 ui.write(_("The first %s revision is:\n") % transition)
289 displayer.show(repo[nodes[0]])
289 displayer.show(repo[nodes[0]])
290 else:
290 else:
291 # multiple possible revisions
291 # multiple possible revisions
292 ui.write(_("Due to skipped revisions, the first "
292 ui.write(_("Due to skipped revisions, the first "
293 "%s revision could be any of:\n") % transition)
293 "%s revision could be any of:\n") % transition)
294 for n in nodes:
294 for n in nodes:
295 displayer.show(repo[n])
295 displayer.show(repo[n])
296
296
297 def check_state(state, interactive=True):
297 def check_state(state, interactive=True):
298 if not state['good'] or not state['bad']:
298 if not state['good'] or not state['bad']:
299 if (good or bad or skip or reset) and interactive:
299 if (good or bad or skip or reset) and interactive:
300 return
300 return
301 if not state['good']:
301 if not state['good']:
302 raise util.Abort(_('cannot bisect (no known good revisions)'))
302 raise util.Abort(_('cannot bisect (no known good revisions)'))
303 else:
303 else:
304 raise util.Abort(_('cannot bisect (no known bad revisions)'))
304 raise util.Abort(_('cannot bisect (no known bad revisions)'))
305 return True
305 return True
306
306
307 # backward compatibility
307 # backward compatibility
308 if rev in "good bad reset init".split():
308 if rev in "good bad reset init".split():
309 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
309 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
310 cmd, rev, extra = rev, extra, None
310 cmd, rev, extra = rev, extra, None
311 if cmd == "good":
311 if cmd == "good":
312 good = True
312 good = True
313 elif cmd == "bad":
313 elif cmd == "bad":
314 bad = True
314 bad = True
315 else:
315 else:
316 reset = True
316 reset = True
317 elif extra or good + bad + skip + reset + bool(command) > 1:
317 elif extra or good + bad + skip + reset + bool(command) > 1:
318 raise util.Abort(_('incompatible arguments'))
318 raise util.Abort(_('incompatible arguments'))
319
319
320 if reset:
320 if reset:
321 p = repo.join("bisect.state")
321 p = repo.join("bisect.state")
322 if os.path.exists(p):
322 if os.path.exists(p):
323 os.unlink(p)
323 os.unlink(p)
324 return
324 return
325
325
326 state = hbisect.load_state(repo)
326 state = hbisect.load_state(repo)
327
327
328 if command:
328 if command:
329 commandpath = util.find_exe(command)
329 commandpath = util.find_exe(command)
330 changesets = 1
330 changesets = 1
331 try:
331 try:
332 while changesets:
332 while changesets:
333 # update state
333 # update state
334 status = os.spawnl(os.P_WAIT, commandpath)
334 status = os.spawnl(os.P_WAIT, commandpath)
335 if status == 125:
335 if status == 125:
336 transition = "skip"
336 transition = "skip"
337 elif status == 0:
337 elif status == 0:
338 transition = "good"
338 transition = "good"
339 # status < 0 means process was killed
339 # status < 0 means process was killed
340 elif status == 127:
340 elif status == 127:
341 raise util.Abort(_("failed to execute %s") % command)
341 raise util.Abort(_("failed to execute %s") % command)
342 elif status < 0:
342 elif status < 0:
343 raise util.Abort(_("%s killed") % command)
343 raise util.Abort(_("%s killed") % command)
344 else:
344 else:
345 transition = "bad"
345 transition = "bad"
346 node = repo.lookup(rev or '.')
346 node = repo.lookup(rev or '.')
347 state[transition].append(node)
347 state[transition].append(node)
348 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
348 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
349 check_state(state, interactive=False)
349 check_state(state, interactive=False)
350 # bisect
350 # bisect
351 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
351 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
352 # update to next check
352 # update to next check
353 cmdutil.bail_if_changed(repo)
353 cmdutil.bail_if_changed(repo)
354 hg.clean(repo, nodes[0], show_stats=False)
354 hg.clean(repo, nodes[0], show_stats=False)
355 finally:
355 finally:
356 hbisect.save_state(repo, state)
356 hbisect.save_state(repo, state)
357 return print_result(nodes, not status)
357 return print_result(nodes, not status)
358
358
359 # update state
359 # update state
360 node = repo.lookup(rev or '.')
360 node = repo.lookup(rev or '.')
361 if good:
361 if good:
362 state['good'].append(node)
362 state['good'].append(node)
363 elif bad:
363 elif bad:
364 state['bad'].append(node)
364 state['bad'].append(node)
365 elif skip:
365 elif skip:
366 state['skip'].append(node)
366 state['skip'].append(node)
367
367
368 hbisect.save_state(repo, state)
368 hbisect.save_state(repo, state)
369
369
370 if not check_state(state):
370 if not check_state(state):
371 return
371 return
372
372
373 # actually bisect
373 # actually bisect
374 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
374 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
375 if changesets == 0:
375 if changesets == 0:
376 print_result(nodes, good)
376 print_result(nodes, good)
377 else:
377 else:
378 assert len(nodes) == 1 # only a single node can be tested next
378 assert len(nodes) == 1 # only a single node can be tested next
379 node = nodes[0]
379 node = nodes[0]
380 # compute the approximate number of remaining tests
380 # compute the approximate number of remaining tests
381 tests, size = 0, 2
381 tests, size = 0, 2
382 while size <= changesets:
382 while size <= changesets:
383 tests, size = tests + 1, size * 2
383 tests, size = tests + 1, size * 2
384 rev = repo.changelog.rev(node)
384 rev = repo.changelog.rev(node)
385 ui.write(_("Testing changeset %s:%s "
385 ui.write(_("Testing changeset %s:%s "
386 "(%s changesets remaining, ~%s tests)\n")
386 "(%s changesets remaining, ~%s tests)\n")
387 % (rev, short(node), changesets, tests))
387 % (rev, short(node), changesets, tests))
388 if not noupdate:
388 if not noupdate:
389 cmdutil.bail_if_changed(repo)
389 cmdutil.bail_if_changed(repo)
390 return hg.clean(repo, node)
390 return hg.clean(repo, node)
391
391
392 def branch(ui, repo, label=None, **opts):
392 def branch(ui, repo, label=None, **opts):
393 """set or show the current branch name
393 """set or show the current branch name
394
394
395 With no argument, show the current branch name. With one argument,
395 With no argument, show the current branch name. With one argument,
396 set the working directory branch name (the branch does not exist in
396 set the working directory branch name (the branch does not exist in
397 the repository until the next commit).
397 the repository until the next commit).
398
398
399 Unless --force is specified, branch will not let you set a
399 Unless --force is specified, branch will not let you set a
400 branch name that shadows an existing branch.
400 branch name that shadows an existing branch.
401
401
402 Use --clean to reset the working directory branch to that of the
402 Use --clean to reset the working directory branch to that of the
403 parent of the working directory, negating a previous branch change.
403 parent of the working directory, negating a previous branch change.
404
404
405 Use the command 'hg update' to switch to an existing branch.
405 Use the command 'hg update' to switch to an existing branch.
406 """
406 """
407
407
408 if opts.get('clean'):
408 if opts.get('clean'):
409 label = repo[None].parents()[0].branch()
409 label = repo[None].parents()[0].branch()
410 repo.dirstate.setbranch(label)
410 repo.dirstate.setbranch(label)
411 ui.status(_('reset working directory to branch %s\n') % label)
411 ui.status(_('reset working directory to branch %s\n') % label)
412 elif label:
412 elif label:
413 if not opts.get('force') and label in repo.branchtags():
413 if not opts.get('force') and label in repo.branchtags():
414 if label not in [p.branch() for p in repo.parents()]:
414 if label not in [p.branch() for p in repo.parents()]:
415 raise util.Abort(_('a branch of the same name already exists'
415 raise util.Abort(_('a branch of the same name already exists'
416 ' (use --force to override)'))
416 ' (use --force to override)'))
417 repo.dirstate.setbranch(util.fromlocal(label))
417 repo.dirstate.setbranch(util.fromlocal(label))
418 ui.status(_('marked working directory as branch %s\n') % label)
418 ui.status(_('marked working directory as branch %s\n') % label)
419 else:
419 else:
420 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
420 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
421
421
422 def branches(ui, repo, active=False):
422 def branches(ui, repo, active=False):
423 """list repository named branches
423 """list repository named branches
424
424
425 List the repository's named branches, indicating which ones are
425 List the repository's named branches, indicating which ones are
426 inactive. If active is specified, only show active branches.
426 inactive. If active is specified, only show active branches.
427
427
428 A branch is considered active if it contains repository heads.
428 A branch is considered active if it contains repository heads.
429
429
430 Use the command 'hg update' to switch to an existing branch.
430 Use the command 'hg update' to switch to an existing branch.
431 """
431 """
432 hexfunc = ui.debugflag and hex or short
432 hexfunc = ui.debugflag and hex or short
433 activebranches = [util.tolocal(repo[n].branch())
433 activebranches = [util.tolocal(repo[n].branch())
434 for n in repo.heads()]
434 for n in repo.heads(closed=False)]
435 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
435 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
436 for tag, node in repo.branchtags().items()])
436 for tag, node in repo.branchtags().items()])
437 branches.reverse()
437 branches.reverse()
438
438
439 for isactive, node, tag in branches:
439 for isactive, node, tag in branches:
440 if (not active) or isactive:
440 if (not active) or isactive:
441 if ui.quiet:
441 if ui.quiet:
442 ui.write("%s\n" % tag)
442 ui.write("%s\n" % tag)
443 else:
443 else:
444 hn = repo.lookup(node)
445 if isactive:
446 notice = ''
447 elif hn not in repo.branchheads(tag, closed=False):
448 notice = ' (closed)'
449 else:
450 notice = ' (inactive)'
444 rev = str(node).rjust(31 - util.locallen(tag))
451 rev = str(node).rjust(31 - util.locallen(tag))
445 isinactive = ((not isactive) and " (inactive)") or ''
452 data = tag, rev, hexfunc(hn), notice
446 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
447 ui.write("%s %s:%s%s\n" % data)
453 ui.write("%s %s:%s%s\n" % data)
448
454
449 def bundle(ui, repo, fname, dest=None, **opts):
455 def bundle(ui, repo, fname, dest=None, **opts):
450 """create a changegroup file
456 """create a changegroup file
451
457
452 Generate a compressed changegroup file collecting changesets not
458 Generate a compressed changegroup file collecting changesets not
453 found in the other repository.
459 found in the other repository.
454
460
455 If no destination repository is specified the destination is
461 If no destination repository is specified the destination is
456 assumed to have all the nodes specified by one or more --base
462 assumed to have all the nodes specified by one or more --base
457 parameters. To create a bundle containing all changesets, use
463 parameters. To create a bundle containing all changesets, use
458 --all (or --base null). To change the compression method applied,
464 --all (or --base null). To change the compression method applied,
459 use the -t option (by default, bundles are compressed using bz2).
465 use the -t option (by default, bundles are compressed using bz2).
460
466
461 The bundle file can then be transferred using conventional means and
467 The bundle file can then be transferred using conventional means and
462 applied to another repository with the unbundle or pull command.
468 applied to another repository with the unbundle or pull command.
463 This is useful when direct push and pull are not available or when
469 This is useful when direct push and pull are not available or when
464 exporting an entire repository is undesirable.
470 exporting an entire repository is undesirable.
465
471
466 Applying bundles preserves all changeset contents including
472 Applying bundles preserves all changeset contents including
467 permissions, copy/rename information, and revision history.
473 permissions, copy/rename information, and revision history.
468 """
474 """
469 revs = opts.get('rev') or None
475 revs = opts.get('rev') or None
470 if revs:
476 if revs:
471 revs = [repo.lookup(rev) for rev in revs]
477 revs = [repo.lookup(rev) for rev in revs]
472 if opts.get('all'):
478 if opts.get('all'):
473 base = ['null']
479 base = ['null']
474 else:
480 else:
475 base = opts.get('base')
481 base = opts.get('base')
476 if base:
482 if base:
477 if dest:
483 if dest:
478 raise util.Abort(_("--base is incompatible with specifiying "
484 raise util.Abort(_("--base is incompatible with specifiying "
479 "a destination"))
485 "a destination"))
480 base = [repo.lookup(rev) for rev in base]
486 base = [repo.lookup(rev) for rev in base]
481 # create the right base
487 # create the right base
482 # XXX: nodesbetween / changegroup* should be "fixed" instead
488 # XXX: nodesbetween / changegroup* should be "fixed" instead
483 o = []
489 o = []
484 has = {nullid: None}
490 has = {nullid: None}
485 for n in base:
491 for n in base:
486 has.update(repo.changelog.reachable(n))
492 has.update(repo.changelog.reachable(n))
487 if revs:
493 if revs:
488 visit = list(revs)
494 visit = list(revs)
489 else:
495 else:
490 visit = repo.changelog.heads()
496 visit = repo.changelog.heads()
491 seen = {}
497 seen = {}
492 while visit:
498 while visit:
493 n = visit.pop(0)
499 n = visit.pop(0)
494 parents = [p for p in repo.changelog.parents(n) if p not in has]
500 parents = [p for p in repo.changelog.parents(n) if p not in has]
495 if len(parents) == 0:
501 if len(parents) == 0:
496 o.insert(0, n)
502 o.insert(0, n)
497 else:
503 else:
498 for p in parents:
504 for p in parents:
499 if p not in seen:
505 if p not in seen:
500 seen[p] = 1
506 seen[p] = 1
501 visit.append(p)
507 visit.append(p)
502 else:
508 else:
503 cmdutil.setremoteconfig(ui, opts)
509 cmdutil.setremoteconfig(ui, opts)
504 dest, revs, checkout = hg.parseurl(
510 dest, revs, checkout = hg.parseurl(
505 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
511 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
506 other = hg.repository(ui, dest)
512 other = hg.repository(ui, dest)
507 o = repo.findoutgoing(other, force=opts.get('force'))
513 o = repo.findoutgoing(other, force=opts.get('force'))
508
514
509 if revs:
515 if revs:
510 cg = repo.changegroupsubset(o, revs, 'bundle')
516 cg = repo.changegroupsubset(o, revs, 'bundle')
511 else:
517 else:
512 cg = repo.changegroup(o, 'bundle')
518 cg = repo.changegroup(o, 'bundle')
513
519
514 bundletype = opts.get('type', 'bzip2').lower()
520 bundletype = opts.get('type', 'bzip2').lower()
515 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
521 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
516 bundletype = btypes.get(bundletype)
522 bundletype = btypes.get(bundletype)
517 if bundletype not in changegroup.bundletypes:
523 if bundletype not in changegroup.bundletypes:
518 raise util.Abort(_('unknown bundle type specified with --type'))
524 raise util.Abort(_('unknown bundle type specified with --type'))
519
525
520 changegroup.writebundle(cg, fname, bundletype)
526 changegroup.writebundle(cg, fname, bundletype)
521
527
522 def cat(ui, repo, file1, *pats, **opts):
528 def cat(ui, repo, file1, *pats, **opts):
523 """output the current or given revision of files
529 """output the current or given revision of files
524
530
525 Print the specified files as they were at the given revision.
531 Print the specified files as they were at the given revision.
526 If no revision is given, the parent of the working directory is used,
532 If no revision is given, the parent of the working directory is used,
527 or tip if no revision is checked out.
533 or tip if no revision is checked out.
528
534
529 Output may be to a file, in which case the name of the file is
535 Output may be to a file, in which case the name of the file is
530 given using a format string. The formatting rules are the same as
536 given using a format string. The formatting rules are the same as
531 for the export command, with the following additions:
537 for the export command, with the following additions:
532
538
533 %s basename of file being printed
539 %s basename of file being printed
534 %d dirname of file being printed, or '.' if in repo root
540 %d dirname of file being printed, or '.' if in repo root
535 %p root-relative path name of file being printed
541 %p root-relative path name of file being printed
536 """
542 """
537 ctx = repo[opts.get('rev')]
543 ctx = repo[opts.get('rev')]
538 err = 1
544 err = 1
539 m = cmdutil.match(repo, (file1,) + pats, opts)
545 m = cmdutil.match(repo, (file1,) + pats, opts)
540 for abs in ctx.walk(m):
546 for abs in ctx.walk(m):
541 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
547 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
542 data = ctx[abs].data()
548 data = ctx[abs].data()
543 if opts.get('decode'):
549 if opts.get('decode'):
544 data = repo.wwritedata(abs, data)
550 data = repo.wwritedata(abs, data)
545 fp.write(data)
551 fp.write(data)
546 err = 0
552 err = 0
547 return err
553 return err
548
554
549 def clone(ui, source, dest=None, **opts):
555 def clone(ui, source, dest=None, **opts):
550 """make a copy of an existing repository
556 """make a copy of an existing repository
551
557
552 Create a copy of an existing repository in a new directory.
558 Create a copy of an existing repository in a new directory.
553
559
554 If no destination directory name is specified, it defaults to the
560 If no destination directory name is specified, it defaults to the
555 basename of the source.
561 basename of the source.
556
562
557 The location of the source is added to the new repository's
563 The location of the source is added to the new repository's
558 .hg/hgrc file, as the default to be used for future pulls.
564 .hg/hgrc file, as the default to be used for future pulls.
559
565
560 For efficiency, hardlinks are used for cloning whenever the source
566 For efficiency, hardlinks are used for cloning whenever the source
561 and destination are on the same filesystem (note this applies only
567 and destination are on the same filesystem (note this applies only
562 to the repository data, not to the checked out files). Some
568 to the repository data, not to the checked out files). Some
563 filesystems, such as AFS, implement hardlinking incorrectly, but
569 filesystems, such as AFS, implement hardlinking incorrectly, but
564 do not report errors. In these cases, use the --pull option to
570 do not report errors. In these cases, use the --pull option to
565 avoid hardlinking.
571 avoid hardlinking.
566
572
567 In some cases, you can clone repositories and checked out files
573 In some cases, you can clone repositories and checked out files
568 using full hardlinks with
574 using full hardlinks with
569
575
570 $ cp -al REPO REPOCLONE
576 $ cp -al REPO REPOCLONE
571
577
572 This is the fastest way to clone, but it is not always safe. The
578 This is the fastest way to clone, but it is not always safe. The
573 operation is not atomic (making sure REPO is not modified during
579 operation is not atomic (making sure REPO is not modified during
574 the operation is up to you) and you have to make sure your editor
580 the operation is up to you) and you have to make sure your editor
575 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
581 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
576 this is not compatible with certain extensions that place their
582 this is not compatible with certain extensions that place their
577 metadata under the .hg directory, such as mq.
583 metadata under the .hg directory, such as mq.
578
584
579 If you use the -r option to clone up to a specific revision, no
585 If you use the -r option to clone up to a specific revision, no
580 subsequent revisions will be present in the cloned repository.
586 subsequent revisions will be present in the cloned repository.
581 This option implies --pull, even on local repositories.
587 This option implies --pull, even on local repositories.
582
588
583 If the -U option is used, the new clone will contain only a repository
589 If the -U option is used, the new clone will contain only a repository
584 (.hg) and no working copy (the working copy parent is the null revision).
590 (.hg) and no working copy (the working copy parent is the null revision).
585
591
586 See pull for valid source format details.
592 See pull for valid source format details.
587
593
588 It is possible to specify an ssh:// URL as the destination, but no
594 It is possible to specify an ssh:// URL as the destination, but no
589 .hg/hgrc and working directory will be created on the remote side.
595 .hg/hgrc and working directory will be created on the remote side.
590 Look at the help text for the pull command for important details
596 Look at the help text for the pull command for important details
591 about ssh:// URLs.
597 about ssh:// URLs.
592 """
598 """
593 cmdutil.setremoteconfig(ui, opts)
599 cmdutil.setremoteconfig(ui, opts)
594 hg.clone(ui, source, dest,
600 hg.clone(ui, source, dest,
595 pull=opts.get('pull'),
601 pull=opts.get('pull'),
596 stream=opts.get('uncompressed'),
602 stream=opts.get('uncompressed'),
597 rev=opts.get('rev'),
603 rev=opts.get('rev'),
598 update=not opts.get('noupdate'))
604 update=not opts.get('noupdate'))
599
605
600 def commit(ui, repo, *pats, **opts):
606 def commit(ui, repo, *pats, **opts):
601 """commit the specified files or all outstanding changes
607 """commit the specified files or all outstanding changes
602
608
603 Commit changes to the given files into the repository.
609 Commit changes to the given files into the repository.
604
610
605 If a list of files is omitted, all changes reported by "hg status"
611 If a list of files is omitted, all changes reported by "hg status"
606 will be committed.
612 will be committed.
607
613
608 If you are committing the result of a merge, do not provide any
614 If you are committing the result of a merge, do not provide any
609 file names or -I/-X filters.
615 file names or -I/-X filters.
610
616
611 If no commit message is specified, the configured editor is started to
617 If no commit message is specified, the configured editor is started to
612 enter a message.
618 enter a message.
613
619
614 See 'hg help dates' for a list of formats valid for -d/--date.
620 See 'hg help dates' for a list of formats valid for -d/--date.
615 """
621 """
616 extra = {}
622 extra = {}
617 if opts.get('close_branch'):
623 if opts.get('close_branch'):
618 extra['close'] = 1
624 extra['close'] = 1
619 def commitfunc(ui, repo, message, match, opts):
625 def commitfunc(ui, repo, message, match, opts):
620 return repo.commit(match.files(), message, opts.get('user'),
626 return repo.commit(match.files(), message, opts.get('user'),
621 opts.get('date'), match, force_editor=opts.get('force_editor'),
627 opts.get('date'), match, force_editor=opts.get('force_editor'),
622 extra=extra)
628 extra=extra)
623
629
624 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
630 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
625 if not node:
631 if not node:
626 return
632 return
627 cl = repo.changelog
633 cl = repo.changelog
628 rev = cl.rev(node)
634 rev = cl.rev(node)
629 parents = cl.parentrevs(rev)
635 parents = cl.parentrevs(rev)
630 if rev - 1 in parents:
636 if rev - 1 in parents:
631 # one of the parents was the old tip
637 # one of the parents was the old tip
632 pass
638 pass
633 elif (parents == (nullrev, nullrev) or
639 elif (parents == (nullrev, nullrev) or
634 len(cl.heads(cl.node(parents[0]))) > 1 and
640 len(cl.heads(cl.node(parents[0]))) > 1 and
635 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
641 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
636 ui.status(_('created new head\n'))
642 ui.status(_('created new head\n'))
637
643
638 if ui.debugflag:
644 if ui.debugflag:
639 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
645 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
640 elif ui.verbose:
646 elif ui.verbose:
641 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
647 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
642
648
643 def copy(ui, repo, *pats, **opts):
649 def copy(ui, repo, *pats, **opts):
644 """mark files as copied for the next commit
650 """mark files as copied for the next commit
645
651
646 Mark dest as having copies of source files. If dest is a
652 Mark dest as having copies of source files. If dest is a
647 directory, copies are put in that directory. If dest is a file,
653 directory, copies are put in that directory. If dest is a file,
648 there can only be one source.
654 there can only be one source.
649
655
650 By default, this command copies the contents of files as they
656 By default, this command copies the contents of files as they
651 stand in the working directory. If invoked with --after, the
657 stand in the working directory. If invoked with --after, the
652 operation is recorded, but no copying is performed.
658 operation is recorded, but no copying is performed.
653
659
654 This command takes effect in the next commit. To undo a copy
660 This command takes effect in the next commit. To undo a copy
655 before that, see hg revert.
661 before that, see hg revert.
656 """
662 """
657 wlock = repo.wlock(False)
663 wlock = repo.wlock(False)
658 try:
664 try:
659 return cmdutil.copy(ui, repo, pats, opts)
665 return cmdutil.copy(ui, repo, pats, opts)
660 finally:
666 finally:
661 del wlock
667 del wlock
662
668
663 def debugancestor(ui, repo, *args):
669 def debugancestor(ui, repo, *args):
664 """find the ancestor revision of two revisions in a given index"""
670 """find the ancestor revision of two revisions in a given index"""
665 if len(args) == 3:
671 if len(args) == 3:
666 index, rev1, rev2 = args
672 index, rev1, rev2 = args
667 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
673 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
668 lookup = r.lookup
674 lookup = r.lookup
669 elif len(args) == 2:
675 elif len(args) == 2:
670 if not repo:
676 if not repo:
671 raise util.Abort(_("There is no Mercurial repository here "
677 raise util.Abort(_("There is no Mercurial repository here "
672 "(.hg not found)"))
678 "(.hg not found)"))
673 rev1, rev2 = args
679 rev1, rev2 = args
674 r = repo.changelog
680 r = repo.changelog
675 lookup = repo.lookup
681 lookup = repo.lookup
676 else:
682 else:
677 raise util.Abort(_('either two or three arguments required'))
683 raise util.Abort(_('either two or three arguments required'))
678 a = r.ancestor(lookup(rev1), lookup(rev2))
684 a = r.ancestor(lookup(rev1), lookup(rev2))
679 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
685 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
680
686
681 def debugcomplete(ui, cmd='', **opts):
687 def debugcomplete(ui, cmd='', **opts):
682 """returns the completion list associated with the given command"""
688 """returns the completion list associated with the given command"""
683
689
684 if opts.get('options'):
690 if opts.get('options'):
685 options = []
691 options = []
686 otables = [globalopts]
692 otables = [globalopts]
687 if cmd:
693 if cmd:
688 aliases, entry = cmdutil.findcmd(cmd, table, False)
694 aliases, entry = cmdutil.findcmd(cmd, table, False)
689 otables.append(entry[1])
695 otables.append(entry[1])
690 for t in otables:
696 for t in otables:
691 for o in t:
697 for o in t:
692 if o[0]:
698 if o[0]:
693 options.append('-%s' % o[0])
699 options.append('-%s' % o[0])
694 options.append('--%s' % o[1])
700 options.append('--%s' % o[1])
695 ui.write("%s\n" % "\n".join(options))
701 ui.write("%s\n" % "\n".join(options))
696 return
702 return
697
703
698 cmdlist = cmdutil.findpossible(cmd, table)
704 cmdlist = cmdutil.findpossible(cmd, table)
699 if ui.verbose:
705 if ui.verbose:
700 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
706 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
701 ui.write("%s\n" % "\n".join(util.sort(cmdlist)))
707 ui.write("%s\n" % "\n".join(util.sort(cmdlist)))
702
708
703 def debugfsinfo(ui, path = "."):
709 def debugfsinfo(ui, path = "."):
704 file('.debugfsinfo', 'w').write('')
710 file('.debugfsinfo', 'w').write('')
705 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
711 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
706 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
712 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
707 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
713 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
708 and 'yes' or 'no'))
714 and 'yes' or 'no'))
709 os.unlink('.debugfsinfo')
715 os.unlink('.debugfsinfo')
710
716
711 def debugrebuildstate(ui, repo, rev="tip"):
717 def debugrebuildstate(ui, repo, rev="tip"):
712 """rebuild the dirstate as it would look like for the given revision"""
718 """rebuild the dirstate as it would look like for the given revision"""
713 ctx = repo[rev]
719 ctx = repo[rev]
714 wlock = repo.wlock()
720 wlock = repo.wlock()
715 try:
721 try:
716 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
722 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
717 finally:
723 finally:
718 del wlock
724 del wlock
719
725
720 def debugcheckstate(ui, repo):
726 def debugcheckstate(ui, repo):
721 """validate the correctness of the current dirstate"""
727 """validate the correctness of the current dirstate"""
722 parent1, parent2 = repo.dirstate.parents()
728 parent1, parent2 = repo.dirstate.parents()
723 m1 = repo[parent1].manifest()
729 m1 = repo[parent1].manifest()
724 m2 = repo[parent2].manifest()
730 m2 = repo[parent2].manifest()
725 errors = 0
731 errors = 0
726 for f in repo.dirstate:
732 for f in repo.dirstate:
727 state = repo.dirstate[f]
733 state = repo.dirstate[f]
728 if state in "nr" and f not in m1:
734 if state in "nr" and f not in m1:
729 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
735 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
730 errors += 1
736 errors += 1
731 if state in "a" and f in m1:
737 if state in "a" and f in m1:
732 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
738 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
733 errors += 1
739 errors += 1
734 if state in "m" and f not in m1 and f not in m2:
740 if state in "m" and f not in m1 and f not in m2:
735 ui.warn(_("%s in state %s, but not in either manifest\n") %
741 ui.warn(_("%s in state %s, but not in either manifest\n") %
736 (f, state))
742 (f, state))
737 errors += 1
743 errors += 1
738 for f in m1:
744 for f in m1:
739 state = repo.dirstate[f]
745 state = repo.dirstate[f]
740 if state not in "nrm":
746 if state not in "nrm":
741 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
747 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
742 errors += 1
748 errors += 1
743 if errors:
749 if errors:
744 error = _(".hg/dirstate inconsistent with current parent's manifest")
750 error = _(".hg/dirstate inconsistent with current parent's manifest")
745 raise util.Abort(error)
751 raise util.Abort(error)
746
752
747 def showconfig(ui, repo, *values, **opts):
753 def showconfig(ui, repo, *values, **opts):
748 """show combined config settings from all hgrc files
754 """show combined config settings from all hgrc files
749
755
750 With no args, print names and values of all config items.
756 With no args, print names and values of all config items.
751
757
752 With one arg of the form section.name, print just the value of
758 With one arg of the form section.name, print just the value of
753 that config item.
759 that config item.
754
760
755 With multiple args, print names and values of all config items
761 With multiple args, print names and values of all config items
756 with matching section names."""
762 with matching section names."""
757
763
758 untrusted = bool(opts.get('untrusted'))
764 untrusted = bool(opts.get('untrusted'))
759 if values:
765 if values:
760 if len([v for v in values if '.' in v]) > 1:
766 if len([v for v in values if '.' in v]) > 1:
761 raise util.Abort(_('only one config item permitted'))
767 raise util.Abort(_('only one config item permitted'))
762 for section, name, value in ui.walkconfig(untrusted=untrusted):
768 for section, name, value in ui.walkconfig(untrusted=untrusted):
763 sectname = section + '.' + name
769 sectname = section + '.' + name
764 if values:
770 if values:
765 for v in values:
771 for v in values:
766 if v == section:
772 if v == section:
767 ui.write('%s=%s\n' % (sectname, value))
773 ui.write('%s=%s\n' % (sectname, value))
768 elif v == sectname:
774 elif v == sectname:
769 ui.write(value, '\n')
775 ui.write(value, '\n')
770 else:
776 else:
771 ui.write('%s=%s\n' % (sectname, value))
777 ui.write('%s=%s\n' % (sectname, value))
772
778
773 def debugsetparents(ui, repo, rev1, rev2=None):
779 def debugsetparents(ui, repo, rev1, rev2=None):
774 """manually set the parents of the current working directory
780 """manually set the parents of the current working directory
775
781
776 This is useful for writing repository conversion tools, but should
782 This is useful for writing repository conversion tools, but should
777 be used with care.
783 be used with care.
778 """
784 """
779
785
780 if not rev2:
786 if not rev2:
781 rev2 = hex(nullid)
787 rev2 = hex(nullid)
782
788
783 wlock = repo.wlock()
789 wlock = repo.wlock()
784 try:
790 try:
785 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
791 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
786 finally:
792 finally:
787 del wlock
793 del wlock
788
794
789 def debugstate(ui, repo, nodates=None):
795 def debugstate(ui, repo, nodates=None):
790 """show the contents of the current dirstate"""
796 """show the contents of the current dirstate"""
791 timestr = ""
797 timestr = ""
792 showdate = not nodates
798 showdate = not nodates
793 for file_, ent in util.sort(repo.dirstate._map.iteritems()):
799 for file_, ent in util.sort(repo.dirstate._map.iteritems()):
794 if showdate:
800 if showdate:
795 if ent[3] == -1:
801 if ent[3] == -1:
796 # Pad or slice to locale representation
802 # Pad or slice to locale representation
797 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
803 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
798 timestr = 'unset'
804 timestr = 'unset'
799 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
805 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
800 else:
806 else:
801 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
807 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
802 if ent[1] & 020000:
808 if ent[1] & 020000:
803 mode = 'lnk'
809 mode = 'lnk'
804 else:
810 else:
805 mode = '%3o' % (ent[1] & 0777)
811 mode = '%3o' % (ent[1] & 0777)
806 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
812 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
807 for f in repo.dirstate.copies():
813 for f in repo.dirstate.copies():
808 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
814 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
809
815
810 def debugdata(ui, file_, rev):
816 def debugdata(ui, file_, rev):
811 """dump the contents of a data file revision"""
817 """dump the contents of a data file revision"""
812 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
818 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
813 try:
819 try:
814 ui.write(r.revision(r.lookup(rev)))
820 ui.write(r.revision(r.lookup(rev)))
815 except KeyError:
821 except KeyError:
816 raise util.Abort(_('invalid revision identifier %s') % rev)
822 raise util.Abort(_('invalid revision identifier %s') % rev)
817
823
818 def debugdate(ui, date, range=None, **opts):
824 def debugdate(ui, date, range=None, **opts):
819 """parse and display a date"""
825 """parse and display a date"""
820 if opts["extended"]:
826 if opts["extended"]:
821 d = util.parsedate(date, util.extendeddateformats)
827 d = util.parsedate(date, util.extendeddateformats)
822 else:
828 else:
823 d = util.parsedate(date)
829 d = util.parsedate(date)
824 ui.write("internal: %s %s\n" % d)
830 ui.write("internal: %s %s\n" % d)
825 ui.write("standard: %s\n" % util.datestr(d))
831 ui.write("standard: %s\n" % util.datestr(d))
826 if range:
832 if range:
827 m = util.matchdate(range)
833 m = util.matchdate(range)
828 ui.write("match: %s\n" % m(d[0]))
834 ui.write("match: %s\n" % m(d[0]))
829
835
830 def debugindex(ui, file_):
836 def debugindex(ui, file_):
831 """dump the contents of an index file"""
837 """dump the contents of an index file"""
832 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
838 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
833 ui.write(" rev offset length base linkrev" +
839 ui.write(" rev offset length base linkrev" +
834 " nodeid p1 p2\n")
840 " nodeid p1 p2\n")
835 for i in r:
841 for i in r:
836 node = r.node(i)
842 node = r.node(i)
837 try:
843 try:
838 pp = r.parents(node)
844 pp = r.parents(node)
839 except:
845 except:
840 pp = [nullid, nullid]
846 pp = [nullid, nullid]
841 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
847 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
842 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
848 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
843 short(node), short(pp[0]), short(pp[1])))
849 short(node), short(pp[0]), short(pp[1])))
844
850
845 def debugindexdot(ui, file_):
851 def debugindexdot(ui, file_):
846 """dump an index DAG as a .dot file"""
852 """dump an index DAG as a .dot file"""
847 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
853 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
848 ui.write("digraph G {\n")
854 ui.write("digraph G {\n")
849 for i in r:
855 for i in r:
850 node = r.node(i)
856 node = r.node(i)
851 pp = r.parents(node)
857 pp = r.parents(node)
852 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
858 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
853 if pp[1] != nullid:
859 if pp[1] != nullid:
854 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
860 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
855 ui.write("}\n")
861 ui.write("}\n")
856
862
857 def debuginstall(ui):
863 def debuginstall(ui):
858 '''test Mercurial installation'''
864 '''test Mercurial installation'''
859
865
860 def writetemp(contents):
866 def writetemp(contents):
861 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
867 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
862 f = os.fdopen(fd, "wb")
868 f = os.fdopen(fd, "wb")
863 f.write(contents)
869 f.write(contents)
864 f.close()
870 f.close()
865 return name
871 return name
866
872
867 problems = 0
873 problems = 0
868
874
869 # encoding
875 # encoding
870 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
876 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
871 try:
877 try:
872 util.fromlocal("test")
878 util.fromlocal("test")
873 except util.Abort, inst:
879 except util.Abort, inst:
874 ui.write(" %s\n" % inst)
880 ui.write(" %s\n" % inst)
875 ui.write(_(" (check that your locale is properly set)\n"))
881 ui.write(_(" (check that your locale is properly set)\n"))
876 problems += 1
882 problems += 1
877
883
878 # compiled modules
884 # compiled modules
879 ui.status(_("Checking extensions...\n"))
885 ui.status(_("Checking extensions...\n"))
880 try:
886 try:
881 import bdiff, mpatch, base85
887 import bdiff, mpatch, base85
882 except Exception, inst:
888 except Exception, inst:
883 ui.write(" %s\n" % inst)
889 ui.write(" %s\n" % inst)
884 ui.write(_(" One or more extensions could not be found"))
890 ui.write(_(" One or more extensions could not be found"))
885 ui.write(_(" (check that you compiled the extensions)\n"))
891 ui.write(_(" (check that you compiled the extensions)\n"))
886 problems += 1
892 problems += 1
887
893
888 # templates
894 # templates
889 ui.status(_("Checking templates...\n"))
895 ui.status(_("Checking templates...\n"))
890 try:
896 try:
891 import templater
897 import templater
892 t = templater.templater(templater.templatepath("map-cmdline.default"))
898 t = templater.templater(templater.templatepath("map-cmdline.default"))
893 except Exception, inst:
899 except Exception, inst:
894 ui.write(" %s\n" % inst)
900 ui.write(" %s\n" % inst)
895 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
901 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
896 problems += 1
902 problems += 1
897
903
898 # patch
904 # patch
899 ui.status(_("Checking patch...\n"))
905 ui.status(_("Checking patch...\n"))
900 patchproblems = 0
906 patchproblems = 0
901 a = "1\n2\n3\n4\n"
907 a = "1\n2\n3\n4\n"
902 b = "1\n2\n3\ninsert\n4\n"
908 b = "1\n2\n3\ninsert\n4\n"
903 fa = writetemp(a)
909 fa = writetemp(a)
904 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
910 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
905 os.path.basename(fa))
911 os.path.basename(fa))
906 fd = writetemp(d)
912 fd = writetemp(d)
907
913
908 files = {}
914 files = {}
909 try:
915 try:
910 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
916 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
911 except util.Abort, e:
917 except util.Abort, e:
912 ui.write(_(" patch call failed:\n"))
918 ui.write(_(" patch call failed:\n"))
913 ui.write(" " + str(e) + "\n")
919 ui.write(" " + str(e) + "\n")
914 patchproblems += 1
920 patchproblems += 1
915 else:
921 else:
916 if list(files) != [os.path.basename(fa)]:
922 if list(files) != [os.path.basename(fa)]:
917 ui.write(_(" unexpected patch output!\n"))
923 ui.write(_(" unexpected patch output!\n"))
918 patchproblems += 1
924 patchproblems += 1
919 a = file(fa).read()
925 a = file(fa).read()
920 if a != b:
926 if a != b:
921 ui.write(_(" patch test failed!\n"))
927 ui.write(_(" patch test failed!\n"))
922 patchproblems += 1
928 patchproblems += 1
923
929
924 if patchproblems:
930 if patchproblems:
925 if ui.config('ui', 'patch'):
931 if ui.config('ui', 'patch'):
926 ui.write(_(" (Current patch tool may be incompatible with patch,"
932 ui.write(_(" (Current patch tool may be incompatible with patch,"
927 " or misconfigured. Please check your .hgrc file)\n"))
933 " or misconfigured. Please check your .hgrc file)\n"))
928 else:
934 else:
929 ui.write(_(" Internal patcher failure, please report this error"
935 ui.write(_(" Internal patcher failure, please report this error"
930 " to http://www.selenic.com/mercurial/bts\n"))
936 " to http://www.selenic.com/mercurial/bts\n"))
931 problems += patchproblems
937 problems += patchproblems
932
938
933 os.unlink(fa)
939 os.unlink(fa)
934 os.unlink(fd)
940 os.unlink(fd)
935
941
936 # editor
942 # editor
937 ui.status(_("Checking commit editor...\n"))
943 ui.status(_("Checking commit editor...\n"))
938 editor = ui.geteditor()
944 editor = ui.geteditor()
939 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
945 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
940 if not cmdpath:
946 if not cmdpath:
941 if editor == 'vi':
947 if editor == 'vi':
942 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
948 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
943 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
949 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
944 else:
950 else:
945 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
951 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
946 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
952 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
947 problems += 1
953 problems += 1
948
954
949 # check username
955 # check username
950 ui.status(_("Checking username...\n"))
956 ui.status(_("Checking username...\n"))
951 user = os.environ.get("HGUSER")
957 user = os.environ.get("HGUSER")
952 if user is None:
958 if user is None:
953 user = ui.config("ui", "username")
959 user = ui.config("ui", "username")
954 if user is None:
960 if user is None:
955 user = os.environ.get("EMAIL")
961 user = os.environ.get("EMAIL")
956 if not user:
962 if not user:
957 ui.warn(" ")
963 ui.warn(" ")
958 ui.username()
964 ui.username()
959 ui.write(_(" (specify a username in your .hgrc file)\n"))
965 ui.write(_(" (specify a username in your .hgrc file)\n"))
960
966
961 if not problems:
967 if not problems:
962 ui.status(_("No problems detected\n"))
968 ui.status(_("No problems detected\n"))
963 else:
969 else:
964 ui.write(_("%s problems detected,"
970 ui.write(_("%s problems detected,"
965 " please check your install!\n") % problems)
971 " please check your install!\n") % problems)
966
972
967 return problems
973 return problems
968
974
969 def debugrename(ui, repo, file1, *pats, **opts):
975 def debugrename(ui, repo, file1, *pats, **opts):
970 """dump rename information"""
976 """dump rename information"""
971
977
972 ctx = repo[opts.get('rev')]
978 ctx = repo[opts.get('rev')]
973 m = cmdutil.match(repo, (file1,) + pats, opts)
979 m = cmdutil.match(repo, (file1,) + pats, opts)
974 for abs in ctx.walk(m):
980 for abs in ctx.walk(m):
975 fctx = ctx[abs]
981 fctx = ctx[abs]
976 o = fctx.filelog().renamed(fctx.filenode())
982 o = fctx.filelog().renamed(fctx.filenode())
977 rel = m.rel(abs)
983 rel = m.rel(abs)
978 if o:
984 if o:
979 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
985 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
980 else:
986 else:
981 ui.write(_("%s not renamed\n") % rel)
987 ui.write(_("%s not renamed\n") % rel)
982
988
983 def debugwalk(ui, repo, *pats, **opts):
989 def debugwalk(ui, repo, *pats, **opts):
984 """show how files match on given patterns"""
990 """show how files match on given patterns"""
985 m = cmdutil.match(repo, pats, opts)
991 m = cmdutil.match(repo, pats, opts)
986 items = list(repo.walk(m))
992 items = list(repo.walk(m))
987 if not items:
993 if not items:
988 return
994 return
989 fmt = 'f %%-%ds %%-%ds %%s' % (
995 fmt = 'f %%-%ds %%-%ds %%s' % (
990 max([len(abs) for abs in items]),
996 max([len(abs) for abs in items]),
991 max([len(m.rel(abs)) for abs in items]))
997 max([len(m.rel(abs)) for abs in items]))
992 for abs in items:
998 for abs in items:
993 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
999 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
994 ui.write("%s\n" % line.rstrip())
1000 ui.write("%s\n" % line.rstrip())
995
1001
996 def diff(ui, repo, *pats, **opts):
1002 def diff(ui, repo, *pats, **opts):
997 """diff repository (or selected files)
1003 """diff repository (or selected files)
998
1004
999 Show differences between revisions for the specified files.
1005 Show differences between revisions for the specified files.
1000
1006
1001 Differences between files are shown using the unified diff format.
1007 Differences between files are shown using the unified diff format.
1002
1008
1003 NOTE: diff may generate unexpected results for merges, as it will
1009 NOTE: diff may generate unexpected results for merges, as it will
1004 default to comparing against the working directory's first parent
1010 default to comparing against the working directory's first parent
1005 changeset if no revisions are specified.
1011 changeset if no revisions are specified.
1006
1012
1007 When two revision arguments are given, then changes are shown
1013 When two revision arguments are given, then changes are shown
1008 between those revisions. If only one revision is specified then
1014 between those revisions. If only one revision is specified then
1009 that revision is compared to the working directory, and, when no
1015 that revision is compared to the working directory, and, when no
1010 revisions are specified, the working directory files are compared
1016 revisions are specified, the working directory files are compared
1011 to its parent.
1017 to its parent.
1012
1018
1013 Without the -a option, diff will avoid generating diffs of files
1019 Without the -a option, diff will avoid generating diffs of files
1014 it detects as binary. With -a, diff will generate a diff anyway,
1020 it detects as binary. With -a, diff will generate a diff anyway,
1015 probably with undesirable results.
1021 probably with undesirable results.
1016
1022
1017 Use the --git option to generate diffs in the git extended diff
1023 Use the --git option to generate diffs in the git extended diff
1018 format. Read the diffs help topic for more information.
1024 format. Read the diffs help topic for more information.
1019 """
1025 """
1020
1026
1021 revs = opts.get('rev')
1027 revs = opts.get('rev')
1022 change = opts.get('change')
1028 change = opts.get('change')
1023
1029
1024 if revs and change:
1030 if revs and change:
1025 msg = _('cannot specify --rev and --change at the same time')
1031 msg = _('cannot specify --rev and --change at the same time')
1026 raise util.Abort(msg)
1032 raise util.Abort(msg)
1027 elif change:
1033 elif change:
1028 node2 = repo.lookup(change)
1034 node2 = repo.lookup(change)
1029 node1 = repo[node2].parents()[0].node()
1035 node1 = repo[node2].parents()[0].node()
1030 else:
1036 else:
1031 node1, node2 = cmdutil.revpair(repo, revs)
1037 node1, node2 = cmdutil.revpair(repo, revs)
1032
1038
1033 m = cmdutil.match(repo, pats, opts)
1039 m = cmdutil.match(repo, pats, opts)
1034 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1040 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1035 for chunk in it:
1041 for chunk in it:
1036 repo.ui.write(chunk)
1042 repo.ui.write(chunk)
1037
1043
1038 def export(ui, repo, *changesets, **opts):
1044 def export(ui, repo, *changesets, **opts):
1039 """dump the header and diffs for one or more changesets
1045 """dump the header and diffs for one or more changesets
1040
1046
1041 Print the changeset header and diffs for one or more revisions.
1047 Print the changeset header and diffs for one or more revisions.
1042
1048
1043 The information shown in the changeset header is: author,
1049 The information shown in the changeset header is: author,
1044 changeset hash, parent(s) and commit comment.
1050 changeset hash, parent(s) and commit comment.
1045
1051
1046 NOTE: export may generate unexpected diff output for merge changesets,
1052 NOTE: export may generate unexpected diff output for merge changesets,
1047 as it will compare the merge changeset against its first parent only.
1053 as it will compare the merge changeset against its first parent only.
1048
1054
1049 Output may be to a file, in which case the name of the file is
1055 Output may be to a file, in which case the name of the file is
1050 given using a format string. The formatting rules are as follows:
1056 given using a format string. The formatting rules are as follows:
1051
1057
1052 %% literal "%" character
1058 %% literal "%" character
1053 %H changeset hash (40 bytes of hexadecimal)
1059 %H changeset hash (40 bytes of hexadecimal)
1054 %N number of patches being generated
1060 %N number of patches being generated
1055 %R changeset revision number
1061 %R changeset revision number
1056 %b basename of the exporting repository
1062 %b basename of the exporting repository
1057 %h short-form changeset hash (12 bytes of hexadecimal)
1063 %h short-form changeset hash (12 bytes of hexadecimal)
1058 %n zero-padded sequence number, starting at 1
1064 %n zero-padded sequence number, starting at 1
1059 %r zero-padded changeset revision number
1065 %r zero-padded changeset revision number
1060
1066
1061 Without the -a option, export will avoid generating diffs of files
1067 Without the -a option, export will avoid generating diffs of files
1062 it detects as binary. With -a, export will generate a diff anyway,
1068 it detects as binary. With -a, export will generate a diff anyway,
1063 probably with undesirable results.
1069 probably with undesirable results.
1064
1070
1065 Use the --git option to generate diffs in the git extended diff
1071 Use the --git option to generate diffs in the git extended diff
1066 format. Read the diffs help topic for more information.
1072 format. Read the diffs help topic for more information.
1067
1073
1068 With the --switch-parent option, the diff will be against the second
1074 With the --switch-parent option, the diff will be against the second
1069 parent. It can be useful to review a merge.
1075 parent. It can be useful to review a merge.
1070 """
1076 """
1071 if not changesets:
1077 if not changesets:
1072 raise util.Abort(_("export requires at least one changeset"))
1078 raise util.Abort(_("export requires at least one changeset"))
1073 revs = cmdutil.revrange(repo, changesets)
1079 revs = cmdutil.revrange(repo, changesets)
1074 if len(revs) > 1:
1080 if len(revs) > 1:
1075 ui.note(_('exporting patches:\n'))
1081 ui.note(_('exporting patches:\n'))
1076 else:
1082 else:
1077 ui.note(_('exporting patch:\n'))
1083 ui.note(_('exporting patch:\n'))
1078 patch.export(repo, revs, template=opts.get('output'),
1084 patch.export(repo, revs, template=opts.get('output'),
1079 switch_parent=opts.get('switch_parent'),
1085 switch_parent=opts.get('switch_parent'),
1080 opts=patch.diffopts(ui, opts))
1086 opts=patch.diffopts(ui, opts))
1081
1087
1082 def grep(ui, repo, pattern, *pats, **opts):
1088 def grep(ui, repo, pattern, *pats, **opts):
1083 """search for a pattern in specified files and revisions
1089 """search for a pattern in specified files and revisions
1084
1090
1085 Search revisions of files for a regular expression.
1091 Search revisions of files for a regular expression.
1086
1092
1087 This command behaves differently than Unix grep. It only accepts
1093 This command behaves differently than Unix grep. It only accepts
1088 Python/Perl regexps. It searches repository history, not the
1094 Python/Perl regexps. It searches repository history, not the
1089 working directory. It always prints the revision number in which
1095 working directory. It always prints the revision number in which
1090 a match appears.
1096 a match appears.
1091
1097
1092 By default, grep only prints output for the first revision of a
1098 By default, grep only prints output for the first revision of a
1093 file in which it finds a match. To get it to print every revision
1099 file in which it finds a match. To get it to print every revision
1094 that contains a change in match status ("-" for a match that
1100 that contains a change in match status ("-" for a match that
1095 becomes a non-match, or "+" for a non-match that becomes a match),
1101 becomes a non-match, or "+" for a non-match that becomes a match),
1096 use the --all flag.
1102 use the --all flag.
1097 """
1103 """
1098 reflags = 0
1104 reflags = 0
1099 if opts.get('ignore_case'):
1105 if opts.get('ignore_case'):
1100 reflags |= re.I
1106 reflags |= re.I
1101 try:
1107 try:
1102 regexp = re.compile(pattern, reflags)
1108 regexp = re.compile(pattern, reflags)
1103 except Exception, inst:
1109 except Exception, inst:
1104 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1110 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1105 return None
1111 return None
1106 sep, eol = ':', '\n'
1112 sep, eol = ':', '\n'
1107 if opts.get('print0'):
1113 if opts.get('print0'):
1108 sep = eol = '\0'
1114 sep = eol = '\0'
1109
1115
1110 fcache = {}
1116 fcache = {}
1111 def getfile(fn):
1117 def getfile(fn):
1112 if fn not in fcache:
1118 if fn not in fcache:
1113 fcache[fn] = repo.file(fn)
1119 fcache[fn] = repo.file(fn)
1114 return fcache[fn]
1120 return fcache[fn]
1115
1121
1116 def matchlines(body):
1122 def matchlines(body):
1117 begin = 0
1123 begin = 0
1118 linenum = 0
1124 linenum = 0
1119 while True:
1125 while True:
1120 match = regexp.search(body, begin)
1126 match = regexp.search(body, begin)
1121 if not match:
1127 if not match:
1122 break
1128 break
1123 mstart, mend = match.span()
1129 mstart, mend = match.span()
1124 linenum += body.count('\n', begin, mstart) + 1
1130 linenum += body.count('\n', begin, mstart) + 1
1125 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1131 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1126 begin = body.find('\n', mend) + 1 or len(body)
1132 begin = body.find('\n', mend) + 1 or len(body)
1127 lend = begin - 1
1133 lend = begin - 1
1128 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1134 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1129
1135
1130 class linestate(object):
1136 class linestate(object):
1131 def __init__(self, line, linenum, colstart, colend):
1137 def __init__(self, line, linenum, colstart, colend):
1132 self.line = line
1138 self.line = line
1133 self.linenum = linenum
1139 self.linenum = linenum
1134 self.colstart = colstart
1140 self.colstart = colstart
1135 self.colend = colend
1141 self.colend = colend
1136
1142
1137 def __hash__(self):
1143 def __hash__(self):
1138 return hash((self.linenum, self.line))
1144 return hash((self.linenum, self.line))
1139
1145
1140 def __eq__(self, other):
1146 def __eq__(self, other):
1141 return self.line == other.line
1147 return self.line == other.line
1142
1148
1143 matches = {}
1149 matches = {}
1144 copies = {}
1150 copies = {}
1145 def grepbody(fn, rev, body):
1151 def grepbody(fn, rev, body):
1146 matches[rev].setdefault(fn, [])
1152 matches[rev].setdefault(fn, [])
1147 m = matches[rev][fn]
1153 m = matches[rev][fn]
1148 for lnum, cstart, cend, line in matchlines(body):
1154 for lnum, cstart, cend, line in matchlines(body):
1149 s = linestate(line, lnum, cstart, cend)
1155 s = linestate(line, lnum, cstart, cend)
1150 m.append(s)
1156 m.append(s)
1151
1157
1152 def difflinestates(a, b):
1158 def difflinestates(a, b):
1153 sm = difflib.SequenceMatcher(None, a, b)
1159 sm = difflib.SequenceMatcher(None, a, b)
1154 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1160 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1155 if tag == 'insert':
1161 if tag == 'insert':
1156 for i in xrange(blo, bhi):
1162 for i in xrange(blo, bhi):
1157 yield ('+', b[i])
1163 yield ('+', b[i])
1158 elif tag == 'delete':
1164 elif tag == 'delete':
1159 for i in xrange(alo, ahi):
1165 for i in xrange(alo, ahi):
1160 yield ('-', a[i])
1166 yield ('-', a[i])
1161 elif tag == 'replace':
1167 elif tag == 'replace':
1162 for i in xrange(alo, ahi):
1168 for i in xrange(alo, ahi):
1163 yield ('-', a[i])
1169 yield ('-', a[i])
1164 for i in xrange(blo, bhi):
1170 for i in xrange(blo, bhi):
1165 yield ('+', b[i])
1171 yield ('+', b[i])
1166
1172
1167 prev = {}
1173 prev = {}
1168 def display(fn, rev, states, prevstates):
1174 def display(fn, rev, states, prevstates):
1169 datefunc = ui.quiet and util.shortdate or util.datestr
1175 datefunc = ui.quiet and util.shortdate or util.datestr
1170 found = False
1176 found = False
1171 filerevmatches = {}
1177 filerevmatches = {}
1172 r = prev.get(fn, -1)
1178 r = prev.get(fn, -1)
1173 if opts.get('all'):
1179 if opts.get('all'):
1174 iter = difflinestates(states, prevstates)
1180 iter = difflinestates(states, prevstates)
1175 else:
1181 else:
1176 iter = [('', l) for l in prevstates]
1182 iter = [('', l) for l in prevstates]
1177 for change, l in iter:
1183 for change, l in iter:
1178 cols = [fn, str(r)]
1184 cols = [fn, str(r)]
1179 if opts.get('line_number'):
1185 if opts.get('line_number'):
1180 cols.append(str(l.linenum))
1186 cols.append(str(l.linenum))
1181 if opts.get('all'):
1187 if opts.get('all'):
1182 cols.append(change)
1188 cols.append(change)
1183 if opts.get('user'):
1189 if opts.get('user'):
1184 cols.append(ui.shortuser(get(r)[1]))
1190 cols.append(ui.shortuser(get(r)[1]))
1185 if opts.get('date'):
1191 if opts.get('date'):
1186 cols.append(datefunc(get(r)[2]))
1192 cols.append(datefunc(get(r)[2]))
1187 if opts.get('files_with_matches'):
1193 if opts.get('files_with_matches'):
1188 c = (fn, r)
1194 c = (fn, r)
1189 if c in filerevmatches:
1195 if c in filerevmatches:
1190 continue
1196 continue
1191 filerevmatches[c] = 1
1197 filerevmatches[c] = 1
1192 else:
1198 else:
1193 cols.append(l.line)
1199 cols.append(l.line)
1194 ui.write(sep.join(cols), eol)
1200 ui.write(sep.join(cols), eol)
1195 found = True
1201 found = True
1196 return found
1202 return found
1197
1203
1198 fstate = {}
1204 fstate = {}
1199 skip = {}
1205 skip = {}
1200 get = util.cachefunc(lambda r: repo[r].changeset())
1206 get = util.cachefunc(lambda r: repo[r].changeset())
1201 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1207 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1202 found = False
1208 found = False
1203 follow = opts.get('follow')
1209 follow = opts.get('follow')
1204 for st, rev, fns in changeiter:
1210 for st, rev, fns in changeiter:
1205 if st == 'window':
1211 if st == 'window':
1206 matches.clear()
1212 matches.clear()
1207 elif st == 'add':
1213 elif st == 'add':
1208 ctx = repo[rev]
1214 ctx = repo[rev]
1209 matches[rev] = {}
1215 matches[rev] = {}
1210 for fn in fns:
1216 for fn in fns:
1211 if fn in skip:
1217 if fn in skip:
1212 continue
1218 continue
1213 try:
1219 try:
1214 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1220 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1215 fstate.setdefault(fn, [])
1221 fstate.setdefault(fn, [])
1216 if follow:
1222 if follow:
1217 copied = getfile(fn).renamed(ctx.filenode(fn))
1223 copied = getfile(fn).renamed(ctx.filenode(fn))
1218 if copied:
1224 if copied:
1219 copies.setdefault(rev, {})[fn] = copied[0]
1225 copies.setdefault(rev, {})[fn] = copied[0]
1220 except error.LookupError:
1226 except error.LookupError:
1221 pass
1227 pass
1222 elif st == 'iter':
1228 elif st == 'iter':
1223 for fn, m in util.sort(matches[rev].items()):
1229 for fn, m in util.sort(matches[rev].items()):
1224 copy = copies.get(rev, {}).get(fn)
1230 copy = copies.get(rev, {}).get(fn)
1225 if fn in skip:
1231 if fn in skip:
1226 if copy:
1232 if copy:
1227 skip[copy] = True
1233 skip[copy] = True
1228 continue
1234 continue
1229 if fn in prev or fstate[fn]:
1235 if fn in prev or fstate[fn]:
1230 r = display(fn, rev, m, fstate[fn])
1236 r = display(fn, rev, m, fstate[fn])
1231 found = found or r
1237 found = found or r
1232 if r and not opts.get('all'):
1238 if r and not opts.get('all'):
1233 skip[fn] = True
1239 skip[fn] = True
1234 if copy:
1240 if copy:
1235 skip[copy] = True
1241 skip[copy] = True
1236 fstate[fn] = m
1242 fstate[fn] = m
1237 if copy:
1243 if copy:
1238 fstate[copy] = m
1244 fstate[copy] = m
1239 prev[fn] = rev
1245 prev[fn] = rev
1240
1246
1241 for fn, state in util.sort(fstate.items()):
1247 for fn, state in util.sort(fstate.items()):
1242 if fn in skip:
1248 if fn in skip:
1243 continue
1249 continue
1244 if fn not in copies.get(prev[fn], {}):
1250 if fn not in copies.get(prev[fn], {}):
1245 found = display(fn, rev, {}, state) or found
1251 found = display(fn, rev, {}, state) or found
1246 return (not found and 1) or 0
1252 return (not found and 1) or 0
1247
1253
1248 def heads(ui, repo, *branchrevs, **opts):
1254 def heads(ui, repo, *branchrevs, **opts):
1249 """show current repository heads or show branch heads
1255 """show current repository heads or show branch heads
1250
1256
1251 With no arguments, show all repository head changesets.
1257 With no arguments, show all repository head changesets.
1252
1258
1253 If branch or revisions names are given this will show the heads of
1259 If branch or revisions names are given this will show the heads of
1254 the specified branches or the branches those revisions are tagged
1260 the specified branches or the branches those revisions are tagged
1255 with.
1261 with.
1256
1262
1257 Repository "heads" are changesets that don't have child
1263 Repository "heads" are changesets that don't have child
1258 changesets. They are where development generally takes place and
1264 changesets. They are where development generally takes place and
1259 are the usual targets for update and merge operations.
1265 are the usual targets for update and merge operations.
1260
1266
1261 Branch heads are changesets that have a given branch tag, but have
1267 Branch heads are changesets that have a given branch tag, but have
1262 no child changesets with that tag. They are usually where
1268 no child changesets with that tag. They are usually where
1263 development on the given branch takes place.
1269 development on the given branch takes place.
1264 """
1270 """
1265 if opts.get('rev'):
1271 if opts.get('rev'):
1266 start = repo.lookup(opts['rev'])
1272 start = repo.lookup(opts['rev'])
1267 else:
1273 else:
1268 start = None
1274 start = None
1275 closed = not opts.get('active')
1269 if not branchrevs:
1276 if not branchrevs:
1270 # Assume we're looking repo-wide heads if no revs were specified.
1277 # Assume we're looking repo-wide heads if no revs were specified.
1271 heads = repo.heads(start)
1278 heads = repo.heads(start, closed=closed)
1272 else:
1279 else:
1273 heads = []
1280 heads = []
1274 visitedset = util.set()
1281 visitedset = util.set()
1275 for branchrev in branchrevs:
1282 for branchrev in branchrevs:
1276 branch = repo[branchrev].branch()
1283 branch = repo[branchrev].branch()
1277 if branch in visitedset:
1284 if branch in visitedset:
1278 continue
1285 continue
1279 visitedset.add(branch)
1286 visitedset.add(branch)
1280 bheads = repo.branchheads(branch, start)
1287 bheads = repo.branchheads(branch, start, closed=closed)
1281 if not bheads:
1288 if not bheads:
1282 if branch != branchrev:
1289 if branch != branchrev:
1283 ui.warn(_("no changes on branch %s containing %s are "
1290 ui.warn(_("no changes on branch %s containing %s are "
1284 "reachable from %s\n")
1291 "reachable from %s\n")
1285 % (branch, branchrev, opts.get('rev')))
1292 % (branch, branchrev, opts.get('rev')))
1286 else:
1293 else:
1287 ui.warn(_("no changes on branch %s are reachable from %s\n")
1294 ui.warn(_("no changes on branch %s are reachable from %s\n")
1288 % (branch, opts.get('rev')))
1295 % (branch, opts.get('rev')))
1289 heads.extend(bheads)
1296 heads.extend(bheads)
1290 if not heads:
1297 if not heads:
1291 return 1
1298 return 1
1292 displayer = cmdutil.show_changeset(ui, repo, opts)
1299 displayer = cmdutil.show_changeset(ui, repo, opts)
1293 for n in heads:
1300 for n in heads:
1294 displayer.show(repo[n])
1301 displayer.show(repo[n])
1295
1302
1296 def help_(ui, name=None, with_version=False):
1303 def help_(ui, name=None, with_version=False):
1297 """show help for a given topic or a help overview
1304 """show help for a given topic or a help overview
1298
1305
1299 With no arguments, print a list of commands and short help.
1306 With no arguments, print a list of commands and short help.
1300
1307
1301 Given a topic, extension, or command name, print help for that topic."""
1308 Given a topic, extension, or command name, print help for that topic."""
1302 option_lists = []
1309 option_lists = []
1303
1310
1304 def addglobalopts(aliases):
1311 def addglobalopts(aliases):
1305 if ui.verbose:
1312 if ui.verbose:
1306 option_lists.append((_("global options:"), globalopts))
1313 option_lists.append((_("global options:"), globalopts))
1307 if name == 'shortlist':
1314 if name == 'shortlist':
1308 option_lists.append((_('use "hg help" for the full list '
1315 option_lists.append((_('use "hg help" for the full list '
1309 'of commands'), ()))
1316 'of commands'), ()))
1310 else:
1317 else:
1311 if name == 'shortlist':
1318 if name == 'shortlist':
1312 msg = _('use "hg help" for the full list of commands '
1319 msg = _('use "hg help" for the full list of commands '
1313 'or "hg -v" for details')
1320 'or "hg -v" for details')
1314 elif aliases:
1321 elif aliases:
1315 msg = _('use "hg -v help%s" to show aliases and '
1322 msg = _('use "hg -v help%s" to show aliases and '
1316 'global options') % (name and " " + name or "")
1323 'global options') % (name and " " + name or "")
1317 else:
1324 else:
1318 msg = _('use "hg -v help %s" to show global options') % name
1325 msg = _('use "hg -v help %s" to show global options') % name
1319 option_lists.append((msg, ()))
1326 option_lists.append((msg, ()))
1320
1327
1321 def helpcmd(name):
1328 def helpcmd(name):
1322 if with_version:
1329 if with_version:
1323 version_(ui)
1330 version_(ui)
1324 ui.write('\n')
1331 ui.write('\n')
1325
1332
1326 try:
1333 try:
1327 aliases, i = cmdutil.findcmd(name, table, False)
1334 aliases, i = cmdutil.findcmd(name, table, False)
1328 except error.AmbiguousCommand, inst:
1335 except error.AmbiguousCommand, inst:
1329 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1336 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1330 helplist(_('list of commands:\n\n'), select)
1337 helplist(_('list of commands:\n\n'), select)
1331 return
1338 return
1332
1339
1333 # synopsis
1340 # synopsis
1334 if len(i) > 2:
1341 if len(i) > 2:
1335 if i[2].startswith('hg'):
1342 if i[2].startswith('hg'):
1336 ui.write("%s\n" % i[2])
1343 ui.write("%s\n" % i[2])
1337 else:
1344 else:
1338 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1345 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1339 else:
1346 else:
1340 ui.write('hg %s\n' % aliases[0])
1347 ui.write('hg %s\n' % aliases[0])
1341
1348
1342 # aliases
1349 # aliases
1343 if not ui.quiet and len(aliases) > 1:
1350 if not ui.quiet and len(aliases) > 1:
1344 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1351 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1345
1352
1346 # description
1353 # description
1347 doc = gettext(i[0].__doc__)
1354 doc = gettext(i[0].__doc__)
1348 if not doc:
1355 if not doc:
1349 doc = _("(no help text available)")
1356 doc = _("(no help text available)")
1350 if ui.quiet:
1357 if ui.quiet:
1351 doc = doc.splitlines(0)[0]
1358 doc = doc.splitlines(0)[0]
1352 ui.write("\n%s\n" % doc.rstrip())
1359 ui.write("\n%s\n" % doc.rstrip())
1353
1360
1354 if not ui.quiet:
1361 if not ui.quiet:
1355 # options
1362 # options
1356 if i[1]:
1363 if i[1]:
1357 option_lists.append((_("options:\n"), i[1]))
1364 option_lists.append((_("options:\n"), i[1]))
1358
1365
1359 addglobalopts(False)
1366 addglobalopts(False)
1360
1367
1361 def helplist(header, select=None):
1368 def helplist(header, select=None):
1362 h = {}
1369 h = {}
1363 cmds = {}
1370 cmds = {}
1364 for c, e in table.iteritems():
1371 for c, e in table.iteritems():
1365 f = c.split("|", 1)[0]
1372 f = c.split("|", 1)[0]
1366 if select and not select(f):
1373 if select and not select(f):
1367 continue
1374 continue
1368 if (not select and name != 'shortlist' and
1375 if (not select and name != 'shortlist' and
1369 e[0].__module__ != __name__):
1376 e[0].__module__ != __name__):
1370 continue
1377 continue
1371 if name == "shortlist" and not f.startswith("^"):
1378 if name == "shortlist" and not f.startswith("^"):
1372 continue
1379 continue
1373 f = f.lstrip("^")
1380 f = f.lstrip("^")
1374 if not ui.debugflag and f.startswith("debug"):
1381 if not ui.debugflag and f.startswith("debug"):
1375 continue
1382 continue
1376 doc = gettext(e[0].__doc__)
1383 doc = gettext(e[0].__doc__)
1377 if not doc:
1384 if not doc:
1378 doc = _("(no help text available)")
1385 doc = _("(no help text available)")
1379 h[f] = doc.splitlines(0)[0].rstrip()
1386 h[f] = doc.splitlines(0)[0].rstrip()
1380 cmds[f] = c.lstrip("^")
1387 cmds[f] = c.lstrip("^")
1381
1388
1382 if not h:
1389 if not h:
1383 ui.status(_('no commands defined\n'))
1390 ui.status(_('no commands defined\n'))
1384 return
1391 return
1385
1392
1386 ui.status(header)
1393 ui.status(header)
1387 fns = util.sort(h)
1394 fns = util.sort(h)
1388 m = max(map(len, fns))
1395 m = max(map(len, fns))
1389 for f in fns:
1396 for f in fns:
1390 if ui.verbose:
1397 if ui.verbose:
1391 commands = cmds[f].replace("|",", ")
1398 commands = cmds[f].replace("|",", ")
1392 ui.write(" %s:\n %s\n"%(commands, h[f]))
1399 ui.write(" %s:\n %s\n"%(commands, h[f]))
1393 else:
1400 else:
1394 ui.write(' %-*s %s\n' % (m, f, h[f]))
1401 ui.write(' %-*s %s\n' % (m, f, h[f]))
1395
1402
1396 exts = list(extensions.extensions())
1403 exts = list(extensions.extensions())
1397 if exts and name != 'shortlist':
1404 if exts and name != 'shortlist':
1398 ui.write(_('\nenabled extensions:\n\n'))
1405 ui.write(_('\nenabled extensions:\n\n'))
1399 maxlength = 0
1406 maxlength = 0
1400 exthelps = []
1407 exthelps = []
1401 for ename, ext in exts:
1408 for ename, ext in exts:
1402 doc = (ext.__doc__ or _('(no help text available)'))
1409 doc = (ext.__doc__ or _('(no help text available)'))
1403 ename = ename.split('.')[-1]
1410 ename = ename.split('.')[-1]
1404 maxlength = max(len(ename), maxlength)
1411 maxlength = max(len(ename), maxlength)
1405 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1412 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1406 for ename, text in exthelps:
1413 for ename, text in exthelps:
1407 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1414 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1408
1415
1409 if not ui.quiet:
1416 if not ui.quiet:
1410 addglobalopts(True)
1417 addglobalopts(True)
1411
1418
1412 def helptopic(name):
1419 def helptopic(name):
1413 for names, header, doc in help.helptable:
1420 for names, header, doc in help.helptable:
1414 if name in names:
1421 if name in names:
1415 break
1422 break
1416 else:
1423 else:
1417 raise error.UnknownCommand(name)
1424 raise error.UnknownCommand(name)
1418
1425
1419 # description
1426 # description
1420 if not doc:
1427 if not doc:
1421 doc = _("(no help text available)")
1428 doc = _("(no help text available)")
1422 if callable(doc):
1429 if callable(doc):
1423 doc = doc()
1430 doc = doc()
1424
1431
1425 ui.write("%s\n" % header)
1432 ui.write("%s\n" % header)
1426 ui.write("%s\n" % doc.rstrip())
1433 ui.write("%s\n" % doc.rstrip())
1427
1434
1428 def helpext(name):
1435 def helpext(name):
1429 try:
1436 try:
1430 mod = extensions.find(name)
1437 mod = extensions.find(name)
1431 except KeyError:
1438 except KeyError:
1432 raise error.UnknownCommand(name)
1439 raise error.UnknownCommand(name)
1433
1440
1434 doc = gettext(mod.__doc__) or _('no help text available')
1441 doc = gettext(mod.__doc__) or _('no help text available')
1435 doc = doc.splitlines(0)
1442 doc = doc.splitlines(0)
1436 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1443 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1437 for d in doc[1:]:
1444 for d in doc[1:]:
1438 ui.write(d, '\n')
1445 ui.write(d, '\n')
1439
1446
1440 ui.status('\n')
1447 ui.status('\n')
1441
1448
1442 try:
1449 try:
1443 ct = mod.cmdtable
1450 ct = mod.cmdtable
1444 except AttributeError:
1451 except AttributeError:
1445 ct = {}
1452 ct = {}
1446
1453
1447 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1454 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1448 helplist(_('list of commands:\n\n'), modcmds.has_key)
1455 helplist(_('list of commands:\n\n'), modcmds.has_key)
1449
1456
1450 if name and name != 'shortlist':
1457 if name and name != 'shortlist':
1451 i = None
1458 i = None
1452 for f in (helptopic, helpcmd, helpext):
1459 for f in (helptopic, helpcmd, helpext):
1453 try:
1460 try:
1454 f(name)
1461 f(name)
1455 i = None
1462 i = None
1456 break
1463 break
1457 except error.UnknownCommand, inst:
1464 except error.UnknownCommand, inst:
1458 i = inst
1465 i = inst
1459 if i:
1466 if i:
1460 raise i
1467 raise i
1461
1468
1462 else:
1469 else:
1463 # program name
1470 # program name
1464 if ui.verbose or with_version:
1471 if ui.verbose or with_version:
1465 version_(ui)
1472 version_(ui)
1466 else:
1473 else:
1467 ui.status(_("Mercurial Distributed SCM\n"))
1474 ui.status(_("Mercurial Distributed SCM\n"))
1468 ui.status('\n')
1475 ui.status('\n')
1469
1476
1470 # list of commands
1477 # list of commands
1471 if name == "shortlist":
1478 if name == "shortlist":
1472 header = _('basic commands:\n\n')
1479 header = _('basic commands:\n\n')
1473 else:
1480 else:
1474 header = _('list of commands:\n\n')
1481 header = _('list of commands:\n\n')
1475
1482
1476 helplist(header)
1483 helplist(header)
1477
1484
1478 # list all option lists
1485 # list all option lists
1479 opt_output = []
1486 opt_output = []
1480 for title, options in option_lists:
1487 for title, options in option_lists:
1481 opt_output.append(("\n%s" % title, None))
1488 opt_output.append(("\n%s" % title, None))
1482 for shortopt, longopt, default, desc in options:
1489 for shortopt, longopt, default, desc in options:
1483 if "DEPRECATED" in desc and not ui.verbose: continue
1490 if "DEPRECATED" in desc and not ui.verbose: continue
1484 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1491 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1485 longopt and " --%s" % longopt),
1492 longopt and " --%s" % longopt),
1486 "%s%s" % (desc,
1493 "%s%s" % (desc,
1487 default
1494 default
1488 and _(" (default: %s)") % default
1495 and _(" (default: %s)") % default
1489 or "")))
1496 or "")))
1490
1497
1491 if not name:
1498 if not name:
1492 ui.write(_("\nadditional help topics:\n\n"))
1499 ui.write(_("\nadditional help topics:\n\n"))
1493 topics = []
1500 topics = []
1494 for names, header, doc in help.helptable:
1501 for names, header, doc in help.helptable:
1495 names = [(-len(name), name) for name in names]
1502 names = [(-len(name), name) for name in names]
1496 names.sort()
1503 names.sort()
1497 topics.append((names[0][1], header))
1504 topics.append((names[0][1], header))
1498 topics_len = max([len(s[0]) for s in topics])
1505 topics_len = max([len(s[0]) for s in topics])
1499 for t, desc in topics:
1506 for t, desc in topics:
1500 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1507 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1501
1508
1502 if opt_output:
1509 if opt_output:
1503 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1510 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1504 for first, second in opt_output:
1511 for first, second in opt_output:
1505 if second:
1512 if second:
1506 ui.write(" %-*s %s\n" % (opts_len, first, second))
1513 ui.write(" %-*s %s\n" % (opts_len, first, second))
1507 else:
1514 else:
1508 ui.write("%s\n" % first)
1515 ui.write("%s\n" % first)
1509
1516
1510 def identify(ui, repo, source=None,
1517 def identify(ui, repo, source=None,
1511 rev=None, num=None, id=None, branch=None, tags=None):
1518 rev=None, num=None, id=None, branch=None, tags=None):
1512 """identify the working copy or specified revision
1519 """identify the working copy or specified revision
1513
1520
1514 With no revision, print a summary of the current state of the repo.
1521 With no revision, print a summary of the current state of the repo.
1515
1522
1516 With a path, do a lookup in another repository.
1523 With a path, do a lookup in another repository.
1517
1524
1518 This summary identifies the repository state using one or two parent
1525 This summary identifies the repository state using one or two parent
1519 hash identifiers, followed by a "+" if there are uncommitted changes
1526 hash identifiers, followed by a "+" if there are uncommitted changes
1520 in the working directory, a list of tags for this revision and a branch
1527 in the working directory, a list of tags for this revision and a branch
1521 name for non-default branches.
1528 name for non-default branches.
1522 """
1529 """
1523
1530
1524 if not repo and not source:
1531 if not repo and not source:
1525 raise util.Abort(_("There is no Mercurial repository here "
1532 raise util.Abort(_("There is no Mercurial repository here "
1526 "(.hg not found)"))
1533 "(.hg not found)"))
1527
1534
1528 hexfunc = ui.debugflag and hex or short
1535 hexfunc = ui.debugflag and hex or short
1529 default = not (num or id or branch or tags)
1536 default = not (num or id or branch or tags)
1530 output = []
1537 output = []
1531
1538
1532 if source:
1539 if source:
1533 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1540 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1534 srepo = hg.repository(ui, source)
1541 srepo = hg.repository(ui, source)
1535 if not rev and revs:
1542 if not rev and revs:
1536 rev = revs[0]
1543 rev = revs[0]
1537 if not rev:
1544 if not rev:
1538 rev = "tip"
1545 rev = "tip"
1539 if num or branch or tags:
1546 if num or branch or tags:
1540 raise util.Abort(
1547 raise util.Abort(
1541 "can't query remote revision number, branch, or tags")
1548 "can't query remote revision number, branch, or tags")
1542 output = [hexfunc(srepo.lookup(rev))]
1549 output = [hexfunc(srepo.lookup(rev))]
1543 elif not rev:
1550 elif not rev:
1544 ctx = repo[None]
1551 ctx = repo[None]
1545 parents = ctx.parents()
1552 parents = ctx.parents()
1546 changed = False
1553 changed = False
1547 if default or id or num:
1554 if default or id or num:
1548 changed = ctx.files() + ctx.deleted()
1555 changed = ctx.files() + ctx.deleted()
1549 if default or id:
1556 if default or id:
1550 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1557 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1551 (changed) and "+" or "")]
1558 (changed) and "+" or "")]
1552 if num:
1559 if num:
1553 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1560 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1554 (changed) and "+" or ""))
1561 (changed) and "+" or ""))
1555 else:
1562 else:
1556 ctx = repo[rev]
1563 ctx = repo[rev]
1557 if default or id:
1564 if default or id:
1558 output = [hexfunc(ctx.node())]
1565 output = [hexfunc(ctx.node())]
1559 if num:
1566 if num:
1560 output.append(str(ctx.rev()))
1567 output.append(str(ctx.rev()))
1561
1568
1562 if not source and default and not ui.quiet:
1569 if not source and default and not ui.quiet:
1563 b = util.tolocal(ctx.branch())
1570 b = util.tolocal(ctx.branch())
1564 if b != 'default':
1571 if b != 'default':
1565 output.append("(%s)" % b)
1572 output.append("(%s)" % b)
1566
1573
1567 # multiple tags for a single parent separated by '/'
1574 # multiple tags for a single parent separated by '/'
1568 t = "/".join(ctx.tags())
1575 t = "/".join(ctx.tags())
1569 if t:
1576 if t:
1570 output.append(t)
1577 output.append(t)
1571
1578
1572 if branch:
1579 if branch:
1573 output.append(util.tolocal(ctx.branch()))
1580 output.append(util.tolocal(ctx.branch()))
1574
1581
1575 if tags:
1582 if tags:
1576 output.extend(ctx.tags())
1583 output.extend(ctx.tags())
1577
1584
1578 ui.write("%s\n" % ' '.join(output))
1585 ui.write("%s\n" % ' '.join(output))
1579
1586
1580 def import_(ui, repo, patch1, *patches, **opts):
1587 def import_(ui, repo, patch1, *patches, **opts):
1581 """import an ordered set of patches
1588 """import an ordered set of patches
1582
1589
1583 Import a list of patches and commit them individually.
1590 Import a list of patches and commit them individually.
1584
1591
1585 If there are outstanding changes in the working directory, import
1592 If there are outstanding changes in the working directory, import
1586 will abort unless given the -f flag.
1593 will abort unless given the -f flag.
1587
1594
1588 You can import a patch straight from a mail message. Even patches
1595 You can import a patch straight from a mail message. Even patches
1589 as attachments work (body part must be type text/plain or
1596 as attachments work (body part must be type text/plain or
1590 text/x-patch to be used). From and Subject headers of email
1597 text/x-patch to be used). From and Subject headers of email
1591 message are used as default committer and commit message. All
1598 message are used as default committer and commit message. All
1592 text/plain body parts before first diff are added to commit
1599 text/plain body parts before first diff are added to commit
1593 message.
1600 message.
1594
1601
1595 If the imported patch was generated by hg export, user and description
1602 If the imported patch was generated by hg export, user and description
1596 from patch override values from message headers and body. Values
1603 from patch override values from message headers and body. Values
1597 given on command line with -m and -u override these.
1604 given on command line with -m and -u override these.
1598
1605
1599 If --exact is specified, import will set the working directory
1606 If --exact is specified, import will set the working directory
1600 to the parent of each patch before applying it, and will abort
1607 to the parent of each patch before applying it, and will abort
1601 if the resulting changeset has a different ID than the one
1608 if the resulting changeset has a different ID than the one
1602 recorded in the patch. This may happen due to character set
1609 recorded in the patch. This may happen due to character set
1603 problems or other deficiencies in the text patch format.
1610 problems or other deficiencies in the text patch format.
1604
1611
1605 With --similarity, hg will attempt to discover renames and copies
1612 With --similarity, hg will attempt to discover renames and copies
1606 in the patch in the same way as 'addremove'.
1613 in the patch in the same way as 'addremove'.
1607
1614
1608 To read a patch from standard input, use patch name "-".
1615 To read a patch from standard input, use patch name "-".
1609 See 'hg help dates' for a list of formats valid for -d/--date.
1616 See 'hg help dates' for a list of formats valid for -d/--date.
1610 """
1617 """
1611 patches = (patch1,) + patches
1618 patches = (patch1,) + patches
1612
1619
1613 date = opts.get('date')
1620 date = opts.get('date')
1614 if date:
1621 if date:
1615 opts['date'] = util.parsedate(date)
1622 opts['date'] = util.parsedate(date)
1616
1623
1617 try:
1624 try:
1618 sim = float(opts.get('similarity') or 0)
1625 sim = float(opts.get('similarity') or 0)
1619 except ValueError:
1626 except ValueError:
1620 raise util.Abort(_('similarity must be a number'))
1627 raise util.Abort(_('similarity must be a number'))
1621 if sim < 0 or sim > 100:
1628 if sim < 0 or sim > 100:
1622 raise util.Abort(_('similarity must be between 0 and 100'))
1629 raise util.Abort(_('similarity must be between 0 and 100'))
1623
1630
1624 if opts.get('exact') or not opts.get('force'):
1631 if opts.get('exact') or not opts.get('force'):
1625 cmdutil.bail_if_changed(repo)
1632 cmdutil.bail_if_changed(repo)
1626
1633
1627 d = opts["base"]
1634 d = opts["base"]
1628 strip = opts["strip"]
1635 strip = opts["strip"]
1629 wlock = lock = None
1636 wlock = lock = None
1630 try:
1637 try:
1631 wlock = repo.wlock()
1638 wlock = repo.wlock()
1632 lock = repo.lock()
1639 lock = repo.lock()
1633 for p in patches:
1640 for p in patches:
1634 pf = os.path.join(d, p)
1641 pf = os.path.join(d, p)
1635
1642
1636 if pf == '-':
1643 if pf == '-':
1637 ui.status(_("applying patch from stdin\n"))
1644 ui.status(_("applying patch from stdin\n"))
1638 pf = sys.stdin
1645 pf = sys.stdin
1639 else:
1646 else:
1640 ui.status(_("applying %s\n") % p)
1647 ui.status(_("applying %s\n") % p)
1641 pf = url.open(ui, pf)
1648 pf = url.open(ui, pf)
1642 data = patch.extract(ui, pf)
1649 data = patch.extract(ui, pf)
1643 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1650 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1644
1651
1645 if tmpname is None:
1652 if tmpname is None:
1646 raise util.Abort(_('no diffs found'))
1653 raise util.Abort(_('no diffs found'))
1647
1654
1648 try:
1655 try:
1649 cmdline_message = cmdutil.logmessage(opts)
1656 cmdline_message = cmdutil.logmessage(opts)
1650 if cmdline_message:
1657 if cmdline_message:
1651 # pickup the cmdline msg
1658 # pickup the cmdline msg
1652 message = cmdline_message
1659 message = cmdline_message
1653 elif message:
1660 elif message:
1654 # pickup the patch msg
1661 # pickup the patch msg
1655 message = message.strip()
1662 message = message.strip()
1656 else:
1663 else:
1657 # launch the editor
1664 # launch the editor
1658 message = None
1665 message = None
1659 ui.debug(_('message:\n%s\n') % message)
1666 ui.debug(_('message:\n%s\n') % message)
1660
1667
1661 wp = repo.parents()
1668 wp = repo.parents()
1662 if opts.get('exact'):
1669 if opts.get('exact'):
1663 if not nodeid or not p1:
1670 if not nodeid or not p1:
1664 raise util.Abort(_('not a mercurial patch'))
1671 raise util.Abort(_('not a mercurial patch'))
1665 p1 = repo.lookup(p1)
1672 p1 = repo.lookup(p1)
1666 p2 = repo.lookup(p2 or hex(nullid))
1673 p2 = repo.lookup(p2 or hex(nullid))
1667
1674
1668 if p1 != wp[0].node():
1675 if p1 != wp[0].node():
1669 hg.clean(repo, p1)
1676 hg.clean(repo, p1)
1670 repo.dirstate.setparents(p1, p2)
1677 repo.dirstate.setparents(p1, p2)
1671 elif p2:
1678 elif p2:
1672 try:
1679 try:
1673 p1 = repo.lookup(p1)
1680 p1 = repo.lookup(p1)
1674 p2 = repo.lookup(p2)
1681 p2 = repo.lookup(p2)
1675 if p1 == wp[0].node():
1682 if p1 == wp[0].node():
1676 repo.dirstate.setparents(p1, p2)
1683 repo.dirstate.setparents(p1, p2)
1677 except error.RepoError:
1684 except error.RepoError:
1678 pass
1685 pass
1679 if opts.get('exact') or opts.get('import_branch'):
1686 if opts.get('exact') or opts.get('import_branch'):
1680 repo.dirstate.setbranch(branch or 'default')
1687 repo.dirstate.setbranch(branch or 'default')
1681
1688
1682 files = {}
1689 files = {}
1683 try:
1690 try:
1684 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1691 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1685 files=files)
1692 files=files)
1686 finally:
1693 finally:
1687 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1694 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1688 if not opts.get('no_commit'):
1695 if not opts.get('no_commit'):
1689 n = repo.commit(files, message, opts.get('user') or user,
1696 n = repo.commit(files, message, opts.get('user') or user,
1690 opts.get('date') or date)
1697 opts.get('date') or date)
1691 if opts.get('exact'):
1698 if opts.get('exact'):
1692 if hex(n) != nodeid:
1699 if hex(n) != nodeid:
1693 repo.rollback()
1700 repo.rollback()
1694 raise util.Abort(_('patch is damaged'
1701 raise util.Abort(_('patch is damaged'
1695 ' or loses information'))
1702 ' or loses information'))
1696 # Force a dirstate write so that the next transaction
1703 # Force a dirstate write so that the next transaction
1697 # backups an up-do-date file.
1704 # backups an up-do-date file.
1698 repo.dirstate.write()
1705 repo.dirstate.write()
1699 finally:
1706 finally:
1700 os.unlink(tmpname)
1707 os.unlink(tmpname)
1701 finally:
1708 finally:
1702 del lock, wlock
1709 del lock, wlock
1703
1710
1704 def incoming(ui, repo, source="default", **opts):
1711 def incoming(ui, repo, source="default", **opts):
1705 """show new changesets found in source
1712 """show new changesets found in source
1706
1713
1707 Show new changesets found in the specified path/URL or the default
1714 Show new changesets found in the specified path/URL or the default
1708 pull location. These are the changesets that would be pulled if a pull
1715 pull location. These are the changesets that would be pulled if a pull
1709 was requested.
1716 was requested.
1710
1717
1711 For remote repository, using --bundle avoids downloading the changesets
1718 For remote repository, using --bundle avoids downloading the changesets
1712 twice if the incoming is followed by a pull.
1719 twice if the incoming is followed by a pull.
1713
1720
1714 See pull for valid source format details.
1721 See pull for valid source format details.
1715 """
1722 """
1716 limit = cmdutil.loglimit(opts)
1723 limit = cmdutil.loglimit(opts)
1717 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1724 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1718 cmdutil.setremoteconfig(ui, opts)
1725 cmdutil.setremoteconfig(ui, opts)
1719
1726
1720 other = hg.repository(ui, source)
1727 other = hg.repository(ui, source)
1721 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1728 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1722 if revs:
1729 if revs:
1723 revs = [other.lookup(rev) for rev in revs]
1730 revs = [other.lookup(rev) for rev in revs]
1724 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1731 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1725 force=opts["force"])
1732 force=opts["force"])
1726 if not incoming:
1733 if not incoming:
1727 try:
1734 try:
1728 os.unlink(opts["bundle"])
1735 os.unlink(opts["bundle"])
1729 except:
1736 except:
1730 pass
1737 pass
1731 ui.status(_("no changes found\n"))
1738 ui.status(_("no changes found\n"))
1732 return 1
1739 return 1
1733
1740
1734 cleanup = None
1741 cleanup = None
1735 try:
1742 try:
1736 fname = opts["bundle"]
1743 fname = opts["bundle"]
1737 if fname or not other.local():
1744 if fname or not other.local():
1738 # create a bundle (uncompressed if other repo is not local)
1745 # create a bundle (uncompressed if other repo is not local)
1739
1746
1740 if revs is None and other.capable('changegroupsubset'):
1747 if revs is None and other.capable('changegroupsubset'):
1741 revs = rheads
1748 revs = rheads
1742
1749
1743 if revs is None:
1750 if revs is None:
1744 cg = other.changegroup(incoming, "incoming")
1751 cg = other.changegroup(incoming, "incoming")
1745 else:
1752 else:
1746 cg = other.changegroupsubset(incoming, revs, 'incoming')
1753 cg = other.changegroupsubset(incoming, revs, 'incoming')
1747 bundletype = other.local() and "HG10BZ" or "HG10UN"
1754 bundletype = other.local() and "HG10BZ" or "HG10UN"
1748 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1755 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1749 # keep written bundle?
1756 # keep written bundle?
1750 if opts["bundle"]:
1757 if opts["bundle"]:
1751 cleanup = None
1758 cleanup = None
1752 if not other.local():
1759 if not other.local():
1753 # use the created uncompressed bundlerepo
1760 # use the created uncompressed bundlerepo
1754 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1761 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1755
1762
1756 o = other.changelog.nodesbetween(incoming, revs)[0]
1763 o = other.changelog.nodesbetween(incoming, revs)[0]
1757 if opts.get('newest_first'):
1764 if opts.get('newest_first'):
1758 o.reverse()
1765 o.reverse()
1759 displayer = cmdutil.show_changeset(ui, other, opts)
1766 displayer = cmdutil.show_changeset(ui, other, opts)
1760 count = 0
1767 count = 0
1761 for n in o:
1768 for n in o:
1762 if count >= limit:
1769 if count >= limit:
1763 break
1770 break
1764 parents = [p for p in other.changelog.parents(n) if p != nullid]
1771 parents = [p for p in other.changelog.parents(n) if p != nullid]
1765 if opts.get('no_merges') and len(parents) == 2:
1772 if opts.get('no_merges') and len(parents) == 2:
1766 continue
1773 continue
1767 count += 1
1774 count += 1
1768 displayer.show(other[n])
1775 displayer.show(other[n])
1769 finally:
1776 finally:
1770 if hasattr(other, 'close'):
1777 if hasattr(other, 'close'):
1771 other.close()
1778 other.close()
1772 if cleanup:
1779 if cleanup:
1773 os.unlink(cleanup)
1780 os.unlink(cleanup)
1774
1781
1775 def init(ui, dest=".", **opts):
1782 def init(ui, dest=".", **opts):
1776 """create a new repository in the given directory
1783 """create a new repository in the given directory
1777
1784
1778 Initialize a new repository in the given directory. If the given
1785 Initialize a new repository in the given directory. If the given
1779 directory does not exist, it is created.
1786 directory does not exist, it is created.
1780
1787
1781 If no directory is given, the current directory is used.
1788 If no directory is given, the current directory is used.
1782
1789
1783 It is possible to specify an ssh:// URL as the destination.
1790 It is possible to specify an ssh:// URL as the destination.
1784 Look at the help text for the pull command for important details
1791 Look at the help text for the pull command for important details
1785 about ssh:// URLs.
1792 about ssh:// URLs.
1786 """
1793 """
1787 cmdutil.setremoteconfig(ui, opts)
1794 cmdutil.setremoteconfig(ui, opts)
1788 hg.repository(ui, dest, create=1)
1795 hg.repository(ui, dest, create=1)
1789
1796
1790 def locate(ui, repo, *pats, **opts):
1797 def locate(ui, repo, *pats, **opts):
1791 """locate files matching specific patterns
1798 """locate files matching specific patterns
1792
1799
1793 Print all files under Mercurial control whose names match the
1800 Print all files under Mercurial control whose names match the
1794 given patterns.
1801 given patterns.
1795
1802
1796 This command searches the entire repository by default. To search
1803 This command searches the entire repository by default. To search
1797 just the current directory and its subdirectories, use
1804 just the current directory and its subdirectories, use
1798 "--include .".
1805 "--include .".
1799
1806
1800 If no patterns are given to match, this command prints all file
1807 If no patterns are given to match, this command prints all file
1801 names.
1808 names.
1802
1809
1803 If you want to feed the output of this command into the "xargs"
1810 If you want to feed the output of this command into the "xargs"
1804 command, use the "-0" option to both this command and "xargs".
1811 command, use the "-0" option to both this command and "xargs".
1805 This will avoid the problem of "xargs" treating single filenames
1812 This will avoid the problem of "xargs" treating single filenames
1806 that contain white space as multiple filenames.
1813 that contain white space as multiple filenames.
1807 """
1814 """
1808 end = opts.get('print0') and '\0' or '\n'
1815 end = opts.get('print0') and '\0' or '\n'
1809 rev = opts.get('rev') or None
1816 rev = opts.get('rev') or None
1810
1817
1811 ret = 1
1818 ret = 1
1812 m = cmdutil.match(repo, pats, opts, default='relglob')
1819 m = cmdutil.match(repo, pats, opts, default='relglob')
1813 m.bad = lambda x,y: False
1820 m.bad = lambda x,y: False
1814 for abs in repo[rev].walk(m):
1821 for abs in repo[rev].walk(m):
1815 if not rev and abs not in repo.dirstate:
1822 if not rev and abs not in repo.dirstate:
1816 continue
1823 continue
1817 if opts.get('fullpath'):
1824 if opts.get('fullpath'):
1818 ui.write(repo.wjoin(abs), end)
1825 ui.write(repo.wjoin(abs), end)
1819 else:
1826 else:
1820 ui.write(((pats and m.rel(abs)) or abs), end)
1827 ui.write(((pats and m.rel(abs)) or abs), end)
1821 ret = 0
1828 ret = 0
1822
1829
1823 return ret
1830 return ret
1824
1831
1825 def log(ui, repo, *pats, **opts):
1832 def log(ui, repo, *pats, **opts):
1826 """show revision history of entire repository or files
1833 """show revision history of entire repository or files
1827
1834
1828 Print the revision history of the specified files or the entire
1835 Print the revision history of the specified files or the entire
1829 project.
1836 project.
1830
1837
1831 File history is shown without following rename or copy history of
1838 File history is shown without following rename or copy history of
1832 files. Use -f/--follow with a file name to follow history across
1839 files. Use -f/--follow with a file name to follow history across
1833 renames and copies. --follow without a file name will only show
1840 renames and copies. --follow without a file name will only show
1834 ancestors or descendants of the starting revision. --follow-first
1841 ancestors or descendants of the starting revision. --follow-first
1835 only follows the first parent of merge revisions.
1842 only follows the first parent of merge revisions.
1836
1843
1837 If no revision range is specified, the default is tip:0 unless
1844 If no revision range is specified, the default is tip:0 unless
1838 --follow is set, in which case the working directory parent is
1845 --follow is set, in which case the working directory parent is
1839 used as the starting revision.
1846 used as the starting revision.
1840
1847
1841 See 'hg help dates' for a list of formats valid for -d/--date.
1848 See 'hg help dates' for a list of formats valid for -d/--date.
1842
1849
1843 By default this command outputs: changeset id and hash, tags,
1850 By default this command outputs: changeset id and hash, tags,
1844 non-trivial parents, user, date and time, and a summary for each
1851 non-trivial parents, user, date and time, and a summary for each
1845 commit. When the -v/--verbose switch is used, the list of changed
1852 commit. When the -v/--verbose switch is used, the list of changed
1846 files and full commit message is shown.
1853 files and full commit message is shown.
1847
1854
1848 NOTE: log -p may generate unexpected diff output for merge
1855 NOTE: log -p may generate unexpected diff output for merge
1849 changesets, as it will compare the merge changeset against its
1856 changesets, as it will compare the merge changeset against its
1850 first parent only. Also, the files: list will only reflect files
1857 first parent only. Also, the files: list will only reflect files
1851 that are different from BOTH parents.
1858 that are different from BOTH parents.
1852
1859
1853 """
1860 """
1854
1861
1855 get = util.cachefunc(lambda r: repo[r].changeset())
1862 get = util.cachefunc(lambda r: repo[r].changeset())
1856 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1863 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1857
1864
1858 limit = cmdutil.loglimit(opts)
1865 limit = cmdutil.loglimit(opts)
1859 count = 0
1866 count = 0
1860
1867
1861 if opts.get('copies') and opts.get('rev'):
1868 if opts.get('copies') and opts.get('rev'):
1862 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1869 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1863 else:
1870 else:
1864 endrev = len(repo)
1871 endrev = len(repo)
1865 rcache = {}
1872 rcache = {}
1866 ncache = {}
1873 ncache = {}
1867 def getrenamed(fn, rev):
1874 def getrenamed(fn, rev):
1868 '''looks up all renames for a file (up to endrev) the first
1875 '''looks up all renames for a file (up to endrev) the first
1869 time the file is given. It indexes on the changerev and only
1876 time the file is given. It indexes on the changerev and only
1870 parses the manifest if linkrev != changerev.
1877 parses the manifest if linkrev != changerev.
1871 Returns rename info for fn at changerev rev.'''
1878 Returns rename info for fn at changerev rev.'''
1872 if fn not in rcache:
1879 if fn not in rcache:
1873 rcache[fn] = {}
1880 rcache[fn] = {}
1874 ncache[fn] = {}
1881 ncache[fn] = {}
1875 fl = repo.file(fn)
1882 fl = repo.file(fn)
1876 for i in fl:
1883 for i in fl:
1877 node = fl.node(i)
1884 node = fl.node(i)
1878 lr = fl.linkrev(i)
1885 lr = fl.linkrev(i)
1879 renamed = fl.renamed(node)
1886 renamed = fl.renamed(node)
1880 rcache[fn][lr] = renamed
1887 rcache[fn][lr] = renamed
1881 if renamed:
1888 if renamed:
1882 ncache[fn][node] = renamed
1889 ncache[fn][node] = renamed
1883 if lr >= endrev:
1890 if lr >= endrev:
1884 break
1891 break
1885 if rev in rcache[fn]:
1892 if rev in rcache[fn]:
1886 return rcache[fn][rev]
1893 return rcache[fn][rev]
1887
1894
1888 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1895 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1889 # filectx logic.
1896 # filectx logic.
1890
1897
1891 try:
1898 try:
1892 return repo[rev][fn].renamed()
1899 return repo[rev][fn].renamed()
1893 except error.LookupError:
1900 except error.LookupError:
1894 pass
1901 pass
1895 return None
1902 return None
1896
1903
1897 df = False
1904 df = False
1898 if opts["date"]:
1905 if opts["date"]:
1899 df = util.matchdate(opts["date"])
1906 df = util.matchdate(opts["date"])
1900
1907
1901 only_branches = opts.get('only_branch')
1908 only_branches = opts.get('only_branch')
1902
1909
1903 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1910 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1904 for st, rev, fns in changeiter:
1911 for st, rev, fns in changeiter:
1905 if st == 'add':
1912 if st == 'add':
1906 parents = [p for p in repo.changelog.parentrevs(rev)
1913 parents = [p for p in repo.changelog.parentrevs(rev)
1907 if p != nullrev]
1914 if p != nullrev]
1908 if opts.get('no_merges') and len(parents) == 2:
1915 if opts.get('no_merges') and len(parents) == 2:
1909 continue
1916 continue
1910 if opts.get('only_merges') and len(parents) != 2:
1917 if opts.get('only_merges') and len(parents) != 2:
1911 continue
1918 continue
1912
1919
1913 if only_branches:
1920 if only_branches:
1914 revbranch = get(rev)[5]['branch']
1921 revbranch = get(rev)[5]['branch']
1915 if revbranch not in only_branches:
1922 if revbranch not in only_branches:
1916 continue
1923 continue
1917
1924
1918 if df:
1925 if df:
1919 changes = get(rev)
1926 changes = get(rev)
1920 if not df(changes[2][0]):
1927 if not df(changes[2][0]):
1921 continue
1928 continue
1922
1929
1923 if opts.get('keyword'):
1930 if opts.get('keyword'):
1924 changes = get(rev)
1931 changes = get(rev)
1925 miss = 0
1932 miss = 0
1926 for k in [kw.lower() for kw in opts['keyword']]:
1933 for k in [kw.lower() for kw in opts['keyword']]:
1927 if not (k in changes[1].lower() or
1934 if not (k in changes[1].lower() or
1928 k in changes[4].lower() or
1935 k in changes[4].lower() or
1929 k in " ".join(changes[3]).lower()):
1936 k in " ".join(changes[3]).lower()):
1930 miss = 1
1937 miss = 1
1931 break
1938 break
1932 if miss:
1939 if miss:
1933 continue
1940 continue
1934
1941
1935 if opts['user']:
1942 if opts['user']:
1936 changes = get(rev)
1943 changes = get(rev)
1937 miss = 0
1944 miss = 0
1938 for k in opts['user']:
1945 for k in opts['user']:
1939 if k != changes[1]:
1946 if k != changes[1]:
1940 miss = 1
1947 miss = 1
1941 break
1948 break
1942 if miss:
1949 if miss:
1943 continue
1950 continue
1944
1951
1945 copies = []
1952 copies = []
1946 if opts.get('copies') and rev:
1953 if opts.get('copies') and rev:
1947 for fn in get(rev)[3]:
1954 for fn in get(rev)[3]:
1948 rename = getrenamed(fn, rev)
1955 rename = getrenamed(fn, rev)
1949 if rename:
1956 if rename:
1950 copies.append((fn, rename[0]))
1957 copies.append((fn, rename[0]))
1951 displayer.show(context.changectx(repo, rev), copies=copies)
1958 displayer.show(context.changectx(repo, rev), copies=copies)
1952 elif st == 'iter':
1959 elif st == 'iter':
1953 if count == limit: break
1960 if count == limit: break
1954 if displayer.flush(rev):
1961 if displayer.flush(rev):
1955 count += 1
1962 count += 1
1956
1963
1957 def manifest(ui, repo, node=None, rev=None):
1964 def manifest(ui, repo, node=None, rev=None):
1958 """output the current or given revision of the project manifest
1965 """output the current or given revision of the project manifest
1959
1966
1960 Print a list of version controlled files for the given revision.
1967 Print a list of version controlled files for the given revision.
1961 If no revision is given, the parent of the working directory is used,
1968 If no revision is given, the parent of the working directory is used,
1962 or tip if no revision is checked out.
1969 or tip if no revision is checked out.
1963
1970
1964 The manifest is the list of files being version controlled. If no revision
1971 The manifest is the list of files being version controlled. If no revision
1965 is given then the first parent of the working directory is used.
1972 is given then the first parent of the working directory is used.
1966
1973
1967 With -v flag, print file permissions, symlink and executable bits. With
1974 With -v flag, print file permissions, symlink and executable bits. With
1968 --debug flag, print file revision hashes.
1975 --debug flag, print file revision hashes.
1969 """
1976 """
1970
1977
1971 if rev and node:
1978 if rev and node:
1972 raise util.Abort(_("please specify just one revision"))
1979 raise util.Abort(_("please specify just one revision"))
1973
1980
1974 if not node:
1981 if not node:
1975 node = rev
1982 node = rev
1976
1983
1977 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1984 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1978 ctx = repo[node]
1985 ctx = repo[node]
1979 for f in ctx:
1986 for f in ctx:
1980 if ui.debugflag:
1987 if ui.debugflag:
1981 ui.write("%40s " % hex(ctx.manifest()[f]))
1988 ui.write("%40s " % hex(ctx.manifest()[f]))
1982 if ui.verbose:
1989 if ui.verbose:
1983 ui.write(decor[ctx.flags(f)])
1990 ui.write(decor[ctx.flags(f)])
1984 ui.write("%s\n" % f)
1991 ui.write("%s\n" % f)
1985
1992
1986 def merge(ui, repo, node=None, force=None, rev=None):
1993 def merge(ui, repo, node=None, force=None, rev=None):
1987 """merge working directory with another revision
1994 """merge working directory with another revision
1988
1995
1989 Merge the contents of the current working directory and the
1996 Merge the contents of the current working directory and the
1990 requested revision. Files that changed between either parent are
1997 requested revision. Files that changed between either parent are
1991 marked as changed for the next commit and a commit must be
1998 marked as changed for the next commit and a commit must be
1992 performed before any further updates are allowed.
1999 performed before any further updates are allowed.
1993
2000
1994 If no revision is specified, the working directory's parent is a
2001 If no revision is specified, the working directory's parent is a
1995 head revision, and the current branch contains exactly one other head,
2002 head revision, and the current branch contains exactly one other head,
1996 the other head is merged with by default. Otherwise, an explicit
2003 the other head is merged with by default. Otherwise, an explicit
1997 revision to merge with must be provided.
2004 revision to merge with must be provided.
1998 """
2005 """
1999
2006
2000 if rev and node:
2007 if rev and node:
2001 raise util.Abort(_("please specify just one revision"))
2008 raise util.Abort(_("please specify just one revision"))
2002 if not node:
2009 if not node:
2003 node = rev
2010 node = rev
2004
2011
2005 if not node:
2012 if not node:
2006 branch = repo.changectx(None).branch()
2013 branch = repo.changectx(None).branch()
2007 bheads = repo.branchheads(branch)
2014 bheads = repo.branchheads(branch)
2008 if len(bheads) > 2:
2015 if len(bheads) > 2:
2009 raise util.Abort(_("branch '%s' has %d heads - "
2016 raise util.Abort(_("branch '%s' has %d heads - "
2010 "please merge with an explicit rev") %
2017 "please merge with an explicit rev") %
2011 (branch, len(bheads)))
2018 (branch, len(bheads)))
2012
2019
2013 parent = repo.dirstate.parents()[0]
2020 parent = repo.dirstate.parents()[0]
2014 if len(bheads) == 1:
2021 if len(bheads) == 1:
2015 if len(repo.heads()) > 1:
2022 if len(repo.heads()) > 1:
2016 raise util.Abort(_("branch '%s' has one head - "
2023 raise util.Abort(_("branch '%s' has one head - "
2017 "please merge with an explicit rev") %
2024 "please merge with an explicit rev") %
2018 branch)
2025 branch)
2019 msg = _('there is nothing to merge')
2026 msg = _('there is nothing to merge')
2020 if parent != repo.lookup(repo[None].branch()):
2027 if parent != repo.lookup(repo[None].branch()):
2021 msg = _('%s - use "hg update" instead') % msg
2028 msg = _('%s - use "hg update" instead') % msg
2022 raise util.Abort(msg)
2029 raise util.Abort(msg)
2023
2030
2024 if parent not in bheads:
2031 if parent not in bheads:
2025 raise util.Abort(_('working dir not at a head rev - '
2032 raise util.Abort(_('working dir not at a head rev - '
2026 'use "hg update" or merge with an explicit rev'))
2033 'use "hg update" or merge with an explicit rev'))
2027 node = parent == bheads[0] and bheads[-1] or bheads[0]
2034 node = parent == bheads[0] and bheads[-1] or bheads[0]
2028 return hg.merge(repo, node, force=force)
2035 return hg.merge(repo, node, force=force)
2029
2036
2030 def outgoing(ui, repo, dest=None, **opts):
2037 def outgoing(ui, repo, dest=None, **opts):
2031 """show changesets not found in destination
2038 """show changesets not found in destination
2032
2039
2033 Show changesets not found in the specified destination repository or
2040 Show changesets not found in the specified destination repository or
2034 the default push location. These are the changesets that would be pushed
2041 the default push location. These are the changesets that would be pushed
2035 if a push was requested.
2042 if a push was requested.
2036
2043
2037 See pull for valid destination format details.
2044 See pull for valid destination format details.
2038 """
2045 """
2039 limit = cmdutil.loglimit(opts)
2046 limit = cmdutil.loglimit(opts)
2040 dest, revs, checkout = hg.parseurl(
2047 dest, revs, checkout = hg.parseurl(
2041 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2048 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2042 cmdutil.setremoteconfig(ui, opts)
2049 cmdutil.setremoteconfig(ui, opts)
2043 if revs:
2050 if revs:
2044 revs = [repo.lookup(rev) for rev in revs]
2051 revs = [repo.lookup(rev) for rev in revs]
2045
2052
2046 other = hg.repository(ui, dest)
2053 other = hg.repository(ui, dest)
2047 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2054 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2048 o = repo.findoutgoing(other, force=opts.get('force'))
2055 o = repo.findoutgoing(other, force=opts.get('force'))
2049 if not o:
2056 if not o:
2050 ui.status(_("no changes found\n"))
2057 ui.status(_("no changes found\n"))
2051 return 1
2058 return 1
2052 o = repo.changelog.nodesbetween(o, revs)[0]
2059 o = repo.changelog.nodesbetween(o, revs)[0]
2053 if opts.get('newest_first'):
2060 if opts.get('newest_first'):
2054 o.reverse()
2061 o.reverse()
2055 displayer = cmdutil.show_changeset(ui, repo, opts)
2062 displayer = cmdutil.show_changeset(ui, repo, opts)
2056 count = 0
2063 count = 0
2057 for n in o:
2064 for n in o:
2058 if count >= limit:
2065 if count >= limit:
2059 break
2066 break
2060 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2067 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2061 if opts.get('no_merges') and len(parents) == 2:
2068 if opts.get('no_merges') and len(parents) == 2:
2062 continue
2069 continue
2063 count += 1
2070 count += 1
2064 displayer.show(repo[n])
2071 displayer.show(repo[n])
2065
2072
2066 def parents(ui, repo, file_=None, **opts):
2073 def parents(ui, repo, file_=None, **opts):
2067 """show the parents of the working dir or revision
2074 """show the parents of the working dir or revision
2068
2075
2069 Print the working directory's parent revisions. If a
2076 Print the working directory's parent revisions. If a
2070 revision is given via --rev, the parent of that revision
2077 revision is given via --rev, the parent of that revision
2071 will be printed. If a file argument is given, revision in
2078 will be printed. If a file argument is given, revision in
2072 which the file was last changed (before the working directory
2079 which the file was last changed (before the working directory
2073 revision or the argument to --rev if given) is printed.
2080 revision or the argument to --rev if given) is printed.
2074 """
2081 """
2075 rev = opts.get('rev')
2082 rev = opts.get('rev')
2076 if rev:
2083 if rev:
2077 ctx = repo[rev]
2084 ctx = repo[rev]
2078 else:
2085 else:
2079 ctx = repo[None]
2086 ctx = repo[None]
2080
2087
2081 if file_:
2088 if file_:
2082 m = cmdutil.match(repo, (file_,), opts)
2089 m = cmdutil.match(repo, (file_,), opts)
2083 if m.anypats() or len(m.files()) != 1:
2090 if m.anypats() or len(m.files()) != 1:
2084 raise util.Abort(_('can only specify an explicit file name'))
2091 raise util.Abort(_('can only specify an explicit file name'))
2085 file_ = m.files()[0]
2092 file_ = m.files()[0]
2086 filenodes = []
2093 filenodes = []
2087 for cp in ctx.parents():
2094 for cp in ctx.parents():
2088 if not cp:
2095 if not cp:
2089 continue
2096 continue
2090 try:
2097 try:
2091 filenodes.append(cp.filenode(file_))
2098 filenodes.append(cp.filenode(file_))
2092 except error.LookupError:
2099 except error.LookupError:
2093 pass
2100 pass
2094 if not filenodes:
2101 if not filenodes:
2095 raise util.Abort(_("'%s' not found in manifest!") % file_)
2102 raise util.Abort(_("'%s' not found in manifest!") % file_)
2096 fl = repo.file(file_)
2103 fl = repo.file(file_)
2097 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2104 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2098 else:
2105 else:
2099 p = [cp.node() for cp in ctx.parents()]
2106 p = [cp.node() for cp in ctx.parents()]
2100
2107
2101 displayer = cmdutil.show_changeset(ui, repo, opts)
2108 displayer = cmdutil.show_changeset(ui, repo, opts)
2102 for n in p:
2109 for n in p:
2103 if n != nullid:
2110 if n != nullid:
2104 displayer.show(repo[n])
2111 displayer.show(repo[n])
2105
2112
2106 def paths(ui, repo, search=None):
2113 def paths(ui, repo, search=None):
2107 """show definition of symbolic path names
2114 """show definition of symbolic path names
2108
2115
2109 Show definition of symbolic path name NAME. If no name is given, show
2116 Show definition of symbolic path name NAME. If no name is given, show
2110 definition of available names.
2117 definition of available names.
2111
2118
2112 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2119 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2113 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2120 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2114 """
2121 """
2115 if search:
2122 if search:
2116 for name, path in ui.configitems("paths"):
2123 for name, path in ui.configitems("paths"):
2117 if name == search:
2124 if name == search:
2118 ui.write("%s\n" % url.hidepassword(path))
2125 ui.write("%s\n" % url.hidepassword(path))
2119 return
2126 return
2120 ui.warn(_("not found!\n"))
2127 ui.warn(_("not found!\n"))
2121 return 1
2128 return 1
2122 else:
2129 else:
2123 for name, path in ui.configitems("paths"):
2130 for name, path in ui.configitems("paths"):
2124 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2131 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2125
2132
2126 def postincoming(ui, repo, modheads, optupdate, checkout):
2133 def postincoming(ui, repo, modheads, optupdate, checkout):
2127 if modheads == 0:
2134 if modheads == 0:
2128 return
2135 return
2129 if optupdate:
2136 if optupdate:
2130 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2137 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2131 return hg.update(repo, checkout)
2138 return hg.update(repo, checkout)
2132 else:
2139 else:
2133 ui.status(_("not updating, since new heads added\n"))
2140 ui.status(_("not updating, since new heads added\n"))
2134 if modheads > 1:
2141 if modheads > 1:
2135 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2142 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2136 else:
2143 else:
2137 ui.status(_("(run 'hg update' to get a working copy)\n"))
2144 ui.status(_("(run 'hg update' to get a working copy)\n"))
2138
2145
2139 def pull(ui, repo, source="default", **opts):
2146 def pull(ui, repo, source="default", **opts):
2140 """pull changes from the specified source
2147 """pull changes from the specified source
2141
2148
2142 Pull changes from a remote repository to a local one.
2149 Pull changes from a remote repository to a local one.
2143
2150
2144 This finds all changes from the repository at the specified path
2151 This finds all changes from the repository at the specified path
2145 or URL and adds them to the local repository. By default, this
2152 or URL and adds them to the local repository. By default, this
2146 does not update the copy of the project in the working directory.
2153 does not update the copy of the project in the working directory.
2147
2154
2148 Valid URLs are of the form:
2155 Valid URLs are of the form:
2149
2156
2150 local/filesystem/path (or file://local/filesystem/path)
2157 local/filesystem/path (or file://local/filesystem/path)
2151 http://[user[:pass]@]host[:port]/[path]
2158 http://[user[:pass]@]host[:port]/[path]
2152 https://[user[:pass]@]host[:port]/[path]
2159 https://[user[:pass]@]host[:port]/[path]
2153 ssh://[user[:pass]@]host[:port]/[path]
2160 ssh://[user[:pass]@]host[:port]/[path]
2154
2161
2155 Paths in the local filesystem can either point to Mercurial
2162 Paths in the local filesystem can either point to Mercurial
2156 repositories or to bundle files (as created by 'hg bundle' or
2163 repositories or to bundle files (as created by 'hg bundle' or
2157 'hg incoming --bundle').
2164 'hg incoming --bundle').
2158
2165
2159 An optional identifier after # indicates a particular branch, tag,
2166 An optional identifier after # indicates a particular branch, tag,
2160 or changeset to pull.
2167 or changeset to pull.
2161
2168
2162 Some notes about using SSH with Mercurial:
2169 Some notes about using SSH with Mercurial:
2163 - SSH requires an accessible shell account on the destination machine
2170 - SSH requires an accessible shell account on the destination machine
2164 and a copy of hg in the remote path or specified with as remotecmd.
2171 and a copy of hg in the remote path or specified with as remotecmd.
2165 - path is relative to the remote user's home directory by default.
2172 - path is relative to the remote user's home directory by default.
2166 Use an extra slash at the start of a path to specify an absolute path:
2173 Use an extra slash at the start of a path to specify an absolute path:
2167 ssh://example.com//tmp/repository
2174 ssh://example.com//tmp/repository
2168 - Mercurial doesn't use its own compression via SSH; the right thing
2175 - Mercurial doesn't use its own compression via SSH; the right thing
2169 to do is to configure it in your ~/.ssh/config, e.g.:
2176 to do is to configure it in your ~/.ssh/config, e.g.:
2170 Host *.mylocalnetwork.example.com
2177 Host *.mylocalnetwork.example.com
2171 Compression no
2178 Compression no
2172 Host *
2179 Host *
2173 Compression yes
2180 Compression yes
2174 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2181 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2175 with the --ssh command line option.
2182 with the --ssh command line option.
2176 """
2183 """
2177 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2184 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2178 cmdutil.setremoteconfig(ui, opts)
2185 cmdutil.setremoteconfig(ui, opts)
2179
2186
2180 other = hg.repository(ui, source)
2187 other = hg.repository(ui, source)
2181 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2188 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2182 if revs:
2189 if revs:
2183 try:
2190 try:
2184 revs = [other.lookup(rev) for rev in revs]
2191 revs = [other.lookup(rev) for rev in revs]
2185 except error.CapabilityError:
2192 except error.CapabilityError:
2186 err = _("Other repository doesn't support revision lookup, "
2193 err = _("Other repository doesn't support revision lookup, "
2187 "so a rev cannot be specified.")
2194 "so a rev cannot be specified.")
2188 raise util.Abort(err)
2195 raise util.Abort(err)
2189
2196
2190 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2197 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2191 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2198 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2192
2199
2193 def push(ui, repo, dest=None, **opts):
2200 def push(ui, repo, dest=None, **opts):
2194 """push changes to the specified destination
2201 """push changes to the specified destination
2195
2202
2196 Push changes from the local repository to the given destination.
2203 Push changes from the local repository to the given destination.
2197
2204
2198 This is the symmetrical operation for pull. It helps to move
2205 This is the symmetrical operation for pull. It helps to move
2199 changes from the current repository to a different one. If the
2206 changes from the current repository to a different one. If the
2200 destination is local this is identical to a pull in that directory
2207 destination is local this is identical to a pull in that directory
2201 from the current one.
2208 from the current one.
2202
2209
2203 By default, push will refuse to run if it detects the result would
2210 By default, push will refuse to run if it detects the result would
2204 increase the number of remote heads. This generally indicates the
2211 increase the number of remote heads. This generally indicates the
2205 the client has forgotten to pull and merge before pushing.
2212 the client has forgotten to pull and merge before pushing.
2206
2213
2207 Valid URLs are of the form:
2214 Valid URLs are of the form:
2208
2215
2209 local/filesystem/path (or file://local/filesystem/path)
2216 local/filesystem/path (or file://local/filesystem/path)
2210 ssh://[user[:pass]@]host[:port]/[path]
2217 ssh://[user[:pass]@]host[:port]/[path]
2211 http://[user[:pass]@]host[:port]/[path]
2218 http://[user[:pass]@]host[:port]/[path]
2212 https://[user[:pass]@]host[:port]/[path]
2219 https://[user[:pass]@]host[:port]/[path]
2213
2220
2214 An optional identifier after # indicates a particular branch, tag,
2221 An optional identifier after # indicates a particular branch, tag,
2215 or changeset to push. If -r is used, the named changeset and all its
2222 or changeset to push. If -r is used, the named changeset and all its
2216 ancestors will be pushed to the remote repository.
2223 ancestors will be pushed to the remote repository.
2217
2224
2218 Look at the help text for the pull command for important details
2225 Look at the help text for the pull command for important details
2219 about ssh:// URLs.
2226 about ssh:// URLs.
2220
2227
2221 Pushing to http:// and https:// URLs is only possible, if this
2228 Pushing to http:// and https:// URLs is only possible, if this
2222 feature is explicitly enabled on the remote Mercurial server.
2229 feature is explicitly enabled on the remote Mercurial server.
2223 """
2230 """
2224 dest, revs, checkout = hg.parseurl(
2231 dest, revs, checkout = hg.parseurl(
2225 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2232 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2226 cmdutil.setremoteconfig(ui, opts)
2233 cmdutil.setremoteconfig(ui, opts)
2227
2234
2228 other = hg.repository(ui, dest)
2235 other = hg.repository(ui, dest)
2229 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2236 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2230 if revs:
2237 if revs:
2231 revs = [repo.lookup(rev) for rev in revs]
2238 revs = [repo.lookup(rev) for rev in revs]
2232 r = repo.push(other, opts.get('force'), revs=revs)
2239 r = repo.push(other, opts.get('force'), revs=revs)
2233 return r == 0
2240 return r == 0
2234
2241
2235 def rawcommit(ui, repo, *pats, **opts):
2242 def rawcommit(ui, repo, *pats, **opts):
2236 """raw commit interface (DEPRECATED)
2243 """raw commit interface (DEPRECATED)
2237
2244
2238 (DEPRECATED)
2245 (DEPRECATED)
2239 Lowlevel commit, for use in helper scripts.
2246 Lowlevel commit, for use in helper scripts.
2240
2247
2241 This command is not intended to be used by normal users, as it is
2248 This command is not intended to be used by normal users, as it is
2242 primarily useful for importing from other SCMs.
2249 primarily useful for importing from other SCMs.
2243
2250
2244 This command is now deprecated and will be removed in a future
2251 This command is now deprecated and will be removed in a future
2245 release, please use debugsetparents and commit instead.
2252 release, please use debugsetparents and commit instead.
2246 """
2253 """
2247
2254
2248 ui.warn(_("(the rawcommit command is deprecated)\n"))
2255 ui.warn(_("(the rawcommit command is deprecated)\n"))
2249
2256
2250 message = cmdutil.logmessage(opts)
2257 message = cmdutil.logmessage(opts)
2251
2258
2252 files = cmdutil.match(repo, pats, opts).files()
2259 files = cmdutil.match(repo, pats, opts).files()
2253 if opts.get('files'):
2260 if opts.get('files'):
2254 files += open(opts['files']).read().splitlines()
2261 files += open(opts['files']).read().splitlines()
2255
2262
2256 parents = [repo.lookup(p) for p in opts['parent']]
2263 parents = [repo.lookup(p) for p in opts['parent']]
2257
2264
2258 try:
2265 try:
2259 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2266 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2260 except ValueError, inst:
2267 except ValueError, inst:
2261 raise util.Abort(str(inst))
2268 raise util.Abort(str(inst))
2262
2269
2263 def recover(ui, repo):
2270 def recover(ui, repo):
2264 """roll back an interrupted transaction
2271 """roll back an interrupted transaction
2265
2272
2266 Recover from an interrupted commit or pull.
2273 Recover from an interrupted commit or pull.
2267
2274
2268 This command tries to fix the repository status after an interrupted
2275 This command tries to fix the repository status after an interrupted
2269 operation. It should only be necessary when Mercurial suggests it.
2276 operation. It should only be necessary when Mercurial suggests it.
2270 """
2277 """
2271 if repo.recover():
2278 if repo.recover():
2272 return hg.verify(repo)
2279 return hg.verify(repo)
2273 return 1
2280 return 1
2274
2281
2275 def remove(ui, repo, *pats, **opts):
2282 def remove(ui, repo, *pats, **opts):
2276 """remove the specified files on the next commit
2283 """remove the specified files on the next commit
2277
2284
2278 Schedule the indicated files for removal from the repository.
2285 Schedule the indicated files for removal from the repository.
2279
2286
2280 This only removes files from the current branch, not from the entire
2287 This only removes files from the current branch, not from the entire
2281 project history. -A can be used to remove only files that have already
2288 project history. -A can be used to remove only files that have already
2282 been deleted, -f can be used to force deletion, and -Af can be used
2289 been deleted, -f can be used to force deletion, and -Af can be used
2283 to remove files from the next revision without deleting them.
2290 to remove files from the next revision without deleting them.
2284
2291
2285 The following table details the behavior of remove for different file
2292 The following table details the behavior of remove for different file
2286 states (columns) and option combinations (rows). The file states are
2293 states (columns) and option combinations (rows). The file states are
2287 Added, Clean, Modified and Missing (as reported by hg status). The
2294 Added, Clean, Modified and Missing (as reported by hg status). The
2288 actions are Warn, Remove (from branch) and Delete (from disk).
2295 actions are Warn, Remove (from branch) and Delete (from disk).
2289
2296
2290 A C M !
2297 A C M !
2291 none W RD W R
2298 none W RD W R
2292 -f R RD RD R
2299 -f R RD RD R
2293 -A W W W R
2300 -A W W W R
2294 -Af R R R R
2301 -Af R R R R
2295
2302
2296 This command schedules the files to be removed at the next commit.
2303 This command schedules the files to be removed at the next commit.
2297 To undo a remove before that, see hg revert.
2304 To undo a remove before that, see hg revert.
2298 """
2305 """
2299
2306
2300 after, force = opts.get('after'), opts.get('force')
2307 after, force = opts.get('after'), opts.get('force')
2301 if not pats and not after:
2308 if not pats and not after:
2302 raise util.Abort(_('no files specified'))
2309 raise util.Abort(_('no files specified'))
2303
2310
2304 m = cmdutil.match(repo, pats, opts)
2311 m = cmdutil.match(repo, pats, opts)
2305 s = repo.status(match=m, clean=True)
2312 s = repo.status(match=m, clean=True)
2306 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2313 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2307
2314
2308 def warn(files, reason):
2315 def warn(files, reason):
2309 for f in files:
2316 for f in files:
2310 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2317 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2311 % (m.rel(f), reason))
2318 % (m.rel(f), reason))
2312
2319
2313 if force:
2320 if force:
2314 remove, forget = modified + deleted + clean, added
2321 remove, forget = modified + deleted + clean, added
2315 elif after:
2322 elif after:
2316 remove, forget = deleted, []
2323 remove, forget = deleted, []
2317 warn(modified + added + clean, _('still exists'))
2324 warn(modified + added + clean, _('still exists'))
2318 else:
2325 else:
2319 remove, forget = deleted + clean, []
2326 remove, forget = deleted + clean, []
2320 warn(modified, _('is modified'))
2327 warn(modified, _('is modified'))
2321 warn(added, _('has been marked for add'))
2328 warn(added, _('has been marked for add'))
2322
2329
2323 for f in util.sort(remove + forget):
2330 for f in util.sort(remove + forget):
2324 if ui.verbose or not m.exact(f):
2331 if ui.verbose or not m.exact(f):
2325 ui.status(_('removing %s\n') % m.rel(f))
2332 ui.status(_('removing %s\n') % m.rel(f))
2326
2333
2327 repo.forget(forget)
2334 repo.forget(forget)
2328 repo.remove(remove, unlink=not after)
2335 repo.remove(remove, unlink=not after)
2329
2336
2330 def rename(ui, repo, *pats, **opts):
2337 def rename(ui, repo, *pats, **opts):
2331 """rename files; equivalent of copy + remove
2338 """rename files; equivalent of copy + remove
2332
2339
2333 Mark dest as copies of sources; mark sources for deletion. If
2340 Mark dest as copies of sources; mark sources for deletion. If
2334 dest is a directory, copies are put in that directory. If dest is
2341 dest is a directory, copies are put in that directory. If dest is
2335 a file, there can only be one source.
2342 a file, there can only be one source.
2336
2343
2337 By default, this command copies the contents of files as they
2344 By default, this command copies the contents of files as they
2338 stand in the working directory. If invoked with --after, the
2345 stand in the working directory. If invoked with --after, the
2339 operation is recorded, but no copying is performed.
2346 operation is recorded, but no copying is performed.
2340
2347
2341 This command takes effect in the next commit. To undo a rename
2348 This command takes effect in the next commit. To undo a rename
2342 before that, see hg revert.
2349 before that, see hg revert.
2343 """
2350 """
2344 wlock = repo.wlock(False)
2351 wlock = repo.wlock(False)
2345 try:
2352 try:
2346 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2353 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2347 finally:
2354 finally:
2348 del wlock
2355 del wlock
2349
2356
2350 def resolve(ui, repo, *pats, **opts):
2357 def resolve(ui, repo, *pats, **opts):
2351 """retry file merges from a merge or update
2358 """retry file merges from a merge or update
2352
2359
2353 This command will cleanly retry unresolved file merges using file
2360 This command will cleanly retry unresolved file merges using file
2354 revisions preserved from the last update or merge. To attempt to
2361 revisions preserved from the last update or merge. To attempt to
2355 resolve all unresolved files, use the -a switch.
2362 resolve all unresolved files, use the -a switch.
2356
2363
2357 This command will also allow listing resolved files and manually
2364 This command will also allow listing resolved files and manually
2358 marking and unmarking files as resolved.
2365 marking and unmarking files as resolved.
2359
2366
2360 The codes used to show the status of files are:
2367 The codes used to show the status of files are:
2361 U = unresolved
2368 U = unresolved
2362 R = resolved
2369 R = resolved
2363 """
2370 """
2364
2371
2365 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2372 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2366
2373
2367 if (show and (mark or unmark)) or (mark and unmark):
2374 if (show and (mark or unmark)) or (mark and unmark):
2368 raise util.Abort(_("too many options specified"))
2375 raise util.Abort(_("too many options specified"))
2369 if pats and all:
2376 if pats and all:
2370 raise util.Abort(_("can't specify --all and patterns"))
2377 raise util.Abort(_("can't specify --all and patterns"))
2371 if not (all or pats or show or mark or unmark):
2378 if not (all or pats or show or mark or unmark):
2372 raise util.Abort(_('no files or directories specified; '
2379 raise util.Abort(_('no files or directories specified; '
2373 'use --all to remerge all files'))
2380 'use --all to remerge all files'))
2374
2381
2375 ms = merge_.mergestate(repo)
2382 ms = merge_.mergestate(repo)
2376 m = cmdutil.match(repo, pats, opts)
2383 m = cmdutil.match(repo, pats, opts)
2377
2384
2378 for f in ms:
2385 for f in ms:
2379 if m(f):
2386 if m(f):
2380 if show:
2387 if show:
2381 ui.write("%s %s\n" % (ms[f].upper(), f))
2388 ui.write("%s %s\n" % (ms[f].upper(), f))
2382 elif mark:
2389 elif mark:
2383 ms.mark(f, "r")
2390 ms.mark(f, "r")
2384 elif unmark:
2391 elif unmark:
2385 ms.mark(f, "u")
2392 ms.mark(f, "u")
2386 else:
2393 else:
2387 wctx = repo[None]
2394 wctx = repo[None]
2388 mctx = wctx.parents()[-1]
2395 mctx = wctx.parents()[-1]
2389 ms.resolve(f, wctx, mctx)
2396 ms.resolve(f, wctx, mctx)
2390
2397
2391 def revert(ui, repo, *pats, **opts):
2398 def revert(ui, repo, *pats, **opts):
2392 """restore individual files or dirs to an earlier state
2399 """restore individual files or dirs to an earlier state
2393
2400
2394 (use update -r to check out earlier revisions, revert does not
2401 (use update -r to check out earlier revisions, revert does not
2395 change the working dir parents)
2402 change the working dir parents)
2396
2403
2397 With no revision specified, revert the named files or directories
2404 With no revision specified, revert the named files or directories
2398 to the contents they had in the parent of the working directory.
2405 to the contents they had in the parent of the working directory.
2399 This restores the contents of the affected files to an unmodified
2406 This restores the contents of the affected files to an unmodified
2400 state and unschedules adds, removes, copies, and renames. If the
2407 state and unschedules adds, removes, copies, and renames. If the
2401 working directory has two parents, you must explicitly specify the
2408 working directory has two parents, you must explicitly specify the
2402 revision to revert to.
2409 revision to revert to.
2403
2410
2404 Using the -r option, revert the given files or directories to their
2411 Using the -r option, revert the given files or directories to their
2405 contents as of a specific revision. This can be helpful to "roll
2412 contents as of a specific revision. This can be helpful to "roll
2406 back" some or all of an earlier change.
2413 back" some or all of an earlier change.
2407 See 'hg help dates' for a list of formats valid for -d/--date.
2414 See 'hg help dates' for a list of formats valid for -d/--date.
2408
2415
2409 Revert modifies the working directory. It does not commit any
2416 Revert modifies the working directory. It does not commit any
2410 changes, or change the parent of the working directory. If you
2417 changes, or change the parent of the working directory. If you
2411 revert to a revision other than the parent of the working
2418 revert to a revision other than the parent of the working
2412 directory, the reverted files will thus appear modified
2419 directory, the reverted files will thus appear modified
2413 afterwards.
2420 afterwards.
2414
2421
2415 If a file has been deleted, it is restored. If the executable
2422 If a file has been deleted, it is restored. If the executable
2416 mode of a file was changed, it is reset.
2423 mode of a file was changed, it is reset.
2417
2424
2418 If names are given, all files matching the names are reverted.
2425 If names are given, all files matching the names are reverted.
2419 If no arguments are given, no files are reverted.
2426 If no arguments are given, no files are reverted.
2420
2427
2421 Modified files are saved with a .orig suffix before reverting.
2428 Modified files are saved with a .orig suffix before reverting.
2422 To disable these backups, use --no-backup.
2429 To disable these backups, use --no-backup.
2423 """
2430 """
2424
2431
2425 if opts["date"]:
2432 if opts["date"]:
2426 if opts["rev"]:
2433 if opts["rev"]:
2427 raise util.Abort(_("you can't specify a revision and a date"))
2434 raise util.Abort(_("you can't specify a revision and a date"))
2428 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2435 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2429
2436
2430 if not pats and not opts.get('all'):
2437 if not pats and not opts.get('all'):
2431 raise util.Abort(_('no files or directories specified; '
2438 raise util.Abort(_('no files or directories specified; '
2432 'use --all to revert the whole repo'))
2439 'use --all to revert the whole repo'))
2433
2440
2434 parent, p2 = repo.dirstate.parents()
2441 parent, p2 = repo.dirstate.parents()
2435 if not opts.get('rev') and p2 != nullid:
2442 if not opts.get('rev') and p2 != nullid:
2436 raise util.Abort(_('uncommitted merge - please provide a '
2443 raise util.Abort(_('uncommitted merge - please provide a '
2437 'specific revision'))
2444 'specific revision'))
2438 ctx = repo[opts.get('rev')]
2445 ctx = repo[opts.get('rev')]
2439 node = ctx.node()
2446 node = ctx.node()
2440 mf = ctx.manifest()
2447 mf = ctx.manifest()
2441 if node == parent:
2448 if node == parent:
2442 pmf = mf
2449 pmf = mf
2443 else:
2450 else:
2444 pmf = None
2451 pmf = None
2445
2452
2446 # need all matching names in dirstate and manifest of target rev,
2453 # need all matching names in dirstate and manifest of target rev,
2447 # so have to walk both. do not print errors if files exist in one
2454 # so have to walk both. do not print errors if files exist in one
2448 # but not other.
2455 # but not other.
2449
2456
2450 names = {}
2457 names = {}
2451
2458
2452 wlock = repo.wlock()
2459 wlock = repo.wlock()
2453 try:
2460 try:
2454 # walk dirstate.
2461 # walk dirstate.
2455 files = []
2462 files = []
2456
2463
2457 m = cmdutil.match(repo, pats, opts)
2464 m = cmdutil.match(repo, pats, opts)
2458 m.bad = lambda x,y: False
2465 m.bad = lambda x,y: False
2459 for abs in repo.walk(m):
2466 for abs in repo.walk(m):
2460 names[abs] = m.rel(abs), m.exact(abs)
2467 names[abs] = m.rel(abs), m.exact(abs)
2461
2468
2462 # walk target manifest.
2469 # walk target manifest.
2463
2470
2464 def badfn(path, msg):
2471 def badfn(path, msg):
2465 if path in names:
2472 if path in names:
2466 return False
2473 return False
2467 path_ = path + '/'
2474 path_ = path + '/'
2468 for f in names:
2475 for f in names:
2469 if f.startswith(path_):
2476 if f.startswith(path_):
2470 return False
2477 return False
2471 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2478 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2472 return False
2479 return False
2473
2480
2474 m = cmdutil.match(repo, pats, opts)
2481 m = cmdutil.match(repo, pats, opts)
2475 m.bad = badfn
2482 m.bad = badfn
2476 for abs in repo[node].walk(m):
2483 for abs in repo[node].walk(m):
2477 if abs not in names:
2484 if abs not in names:
2478 names[abs] = m.rel(abs), m.exact(abs)
2485 names[abs] = m.rel(abs), m.exact(abs)
2479
2486
2480 m = cmdutil.matchfiles(repo, names)
2487 m = cmdutil.matchfiles(repo, names)
2481 changes = repo.status(match=m)[:4]
2488 changes = repo.status(match=m)[:4]
2482 modified, added, removed, deleted = map(dict.fromkeys, changes)
2489 modified, added, removed, deleted = map(dict.fromkeys, changes)
2483
2490
2484 # if f is a rename, also revert the source
2491 # if f is a rename, also revert the source
2485 cwd = repo.getcwd()
2492 cwd = repo.getcwd()
2486 for f in added:
2493 for f in added:
2487 src = repo.dirstate.copied(f)
2494 src = repo.dirstate.copied(f)
2488 if src and src not in names and repo.dirstate[src] == 'r':
2495 if src and src not in names and repo.dirstate[src] == 'r':
2489 removed[src] = None
2496 removed[src] = None
2490 names[src] = (repo.pathto(src, cwd), True)
2497 names[src] = (repo.pathto(src, cwd), True)
2491
2498
2492 def removeforget(abs):
2499 def removeforget(abs):
2493 if repo.dirstate[abs] == 'a':
2500 if repo.dirstate[abs] == 'a':
2494 return _('forgetting %s\n')
2501 return _('forgetting %s\n')
2495 return _('removing %s\n')
2502 return _('removing %s\n')
2496
2503
2497 revert = ([], _('reverting %s\n'))
2504 revert = ([], _('reverting %s\n'))
2498 add = ([], _('adding %s\n'))
2505 add = ([], _('adding %s\n'))
2499 remove = ([], removeforget)
2506 remove = ([], removeforget)
2500 undelete = ([], _('undeleting %s\n'))
2507 undelete = ([], _('undeleting %s\n'))
2501
2508
2502 disptable = (
2509 disptable = (
2503 # dispatch table:
2510 # dispatch table:
2504 # file state
2511 # file state
2505 # action if in target manifest
2512 # action if in target manifest
2506 # action if not in target manifest
2513 # action if not in target manifest
2507 # make backup if in target manifest
2514 # make backup if in target manifest
2508 # make backup if not in target manifest
2515 # make backup if not in target manifest
2509 (modified, revert, remove, True, True),
2516 (modified, revert, remove, True, True),
2510 (added, revert, remove, True, False),
2517 (added, revert, remove, True, False),
2511 (removed, undelete, None, False, False),
2518 (removed, undelete, None, False, False),
2512 (deleted, revert, remove, False, False),
2519 (deleted, revert, remove, False, False),
2513 )
2520 )
2514
2521
2515 for abs, (rel, exact) in util.sort(names.items()):
2522 for abs, (rel, exact) in util.sort(names.items()):
2516 mfentry = mf.get(abs)
2523 mfentry = mf.get(abs)
2517 target = repo.wjoin(abs)
2524 target = repo.wjoin(abs)
2518 def handle(xlist, dobackup):
2525 def handle(xlist, dobackup):
2519 xlist[0].append(abs)
2526 xlist[0].append(abs)
2520 if dobackup and not opts.get('no_backup') and util.lexists(target):
2527 if dobackup and not opts.get('no_backup') and util.lexists(target):
2521 bakname = "%s.orig" % rel
2528 bakname = "%s.orig" % rel
2522 ui.note(_('saving current version of %s as %s\n') %
2529 ui.note(_('saving current version of %s as %s\n') %
2523 (rel, bakname))
2530 (rel, bakname))
2524 if not opts.get('dry_run'):
2531 if not opts.get('dry_run'):
2525 util.copyfile(target, bakname)
2532 util.copyfile(target, bakname)
2526 if ui.verbose or not exact:
2533 if ui.verbose or not exact:
2527 msg = xlist[1]
2534 msg = xlist[1]
2528 if not isinstance(msg, basestring):
2535 if not isinstance(msg, basestring):
2529 msg = msg(abs)
2536 msg = msg(abs)
2530 ui.status(msg % rel)
2537 ui.status(msg % rel)
2531 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2538 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2532 if abs not in table: continue
2539 if abs not in table: continue
2533 # file has changed in dirstate
2540 # file has changed in dirstate
2534 if mfentry:
2541 if mfentry:
2535 handle(hitlist, backuphit)
2542 handle(hitlist, backuphit)
2536 elif misslist is not None:
2543 elif misslist is not None:
2537 handle(misslist, backupmiss)
2544 handle(misslist, backupmiss)
2538 break
2545 break
2539 else:
2546 else:
2540 if abs not in repo.dirstate:
2547 if abs not in repo.dirstate:
2541 if mfentry:
2548 if mfentry:
2542 handle(add, True)
2549 handle(add, True)
2543 elif exact:
2550 elif exact:
2544 ui.warn(_('file not managed: %s\n') % rel)
2551 ui.warn(_('file not managed: %s\n') % rel)
2545 continue
2552 continue
2546 # file has not changed in dirstate
2553 # file has not changed in dirstate
2547 if node == parent:
2554 if node == parent:
2548 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2555 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2549 continue
2556 continue
2550 if pmf is None:
2557 if pmf is None:
2551 # only need parent manifest in this unlikely case,
2558 # only need parent manifest in this unlikely case,
2552 # so do not read by default
2559 # so do not read by default
2553 pmf = repo[parent].manifest()
2560 pmf = repo[parent].manifest()
2554 if abs in pmf:
2561 if abs in pmf:
2555 if mfentry:
2562 if mfentry:
2556 # if version of file is same in parent and target
2563 # if version of file is same in parent and target
2557 # manifests, do nothing
2564 # manifests, do nothing
2558 if (pmf[abs] != mfentry or
2565 if (pmf[abs] != mfentry or
2559 pmf.flags(abs) != mf.flags(abs)):
2566 pmf.flags(abs) != mf.flags(abs)):
2560 handle(revert, False)
2567 handle(revert, False)
2561 else:
2568 else:
2562 handle(remove, False)
2569 handle(remove, False)
2563
2570
2564 if not opts.get('dry_run'):
2571 if not opts.get('dry_run'):
2565 def checkout(f):
2572 def checkout(f):
2566 fc = ctx[f]
2573 fc = ctx[f]
2567 repo.wwrite(f, fc.data(), fc.flags())
2574 repo.wwrite(f, fc.data(), fc.flags())
2568
2575
2569 audit_path = util.path_auditor(repo.root)
2576 audit_path = util.path_auditor(repo.root)
2570 for f in remove[0]:
2577 for f in remove[0]:
2571 if repo.dirstate[f] == 'a':
2578 if repo.dirstate[f] == 'a':
2572 repo.dirstate.forget(f)
2579 repo.dirstate.forget(f)
2573 continue
2580 continue
2574 audit_path(f)
2581 audit_path(f)
2575 try:
2582 try:
2576 util.unlink(repo.wjoin(f))
2583 util.unlink(repo.wjoin(f))
2577 except OSError:
2584 except OSError:
2578 pass
2585 pass
2579 repo.dirstate.remove(f)
2586 repo.dirstate.remove(f)
2580
2587
2581 normal = None
2588 normal = None
2582 if node == parent:
2589 if node == parent:
2583 # We're reverting to our parent. If possible, we'd like status
2590 # We're reverting to our parent. If possible, we'd like status
2584 # to report the file as clean. We have to use normallookup for
2591 # to report the file as clean. We have to use normallookup for
2585 # merges to avoid losing information about merged/dirty files.
2592 # merges to avoid losing information about merged/dirty files.
2586 if p2 != nullid:
2593 if p2 != nullid:
2587 normal = repo.dirstate.normallookup
2594 normal = repo.dirstate.normallookup
2588 else:
2595 else:
2589 normal = repo.dirstate.normal
2596 normal = repo.dirstate.normal
2590 for f in revert[0]:
2597 for f in revert[0]:
2591 checkout(f)
2598 checkout(f)
2592 if normal:
2599 if normal:
2593 normal(f)
2600 normal(f)
2594
2601
2595 for f in add[0]:
2602 for f in add[0]:
2596 checkout(f)
2603 checkout(f)
2597 repo.dirstate.add(f)
2604 repo.dirstate.add(f)
2598
2605
2599 normal = repo.dirstate.normallookup
2606 normal = repo.dirstate.normallookup
2600 if node == parent and p2 == nullid:
2607 if node == parent and p2 == nullid:
2601 normal = repo.dirstate.normal
2608 normal = repo.dirstate.normal
2602 for f in undelete[0]:
2609 for f in undelete[0]:
2603 checkout(f)
2610 checkout(f)
2604 normal(f)
2611 normal(f)
2605
2612
2606 finally:
2613 finally:
2607 del wlock
2614 del wlock
2608
2615
2609 def rollback(ui, repo):
2616 def rollback(ui, repo):
2610 """roll back the last transaction
2617 """roll back the last transaction
2611
2618
2612 This command should be used with care. There is only one level of
2619 This command should be used with care. There is only one level of
2613 rollback, and there is no way to undo a rollback. It will also
2620 rollback, and there is no way to undo a rollback. It will also
2614 restore the dirstate at the time of the last transaction, losing
2621 restore the dirstate at the time of the last transaction, losing
2615 any dirstate changes since that time.
2622 any dirstate changes since that time.
2616
2623
2617 Transactions are used to encapsulate the effects of all commands
2624 Transactions are used to encapsulate the effects of all commands
2618 that create new changesets or propagate existing changesets into a
2625 that create new changesets or propagate existing changesets into a
2619 repository. For example, the following commands are transactional,
2626 repository. For example, the following commands are transactional,
2620 and their effects can be rolled back:
2627 and their effects can be rolled back:
2621
2628
2622 commit
2629 commit
2623 import
2630 import
2624 pull
2631 pull
2625 push (with this repository as destination)
2632 push (with this repository as destination)
2626 unbundle
2633 unbundle
2627
2634
2628 This command is not intended for use on public repositories. Once
2635 This command is not intended for use on public repositories. Once
2629 changes are visible for pull by other users, rolling a transaction
2636 changes are visible for pull by other users, rolling a transaction
2630 back locally is ineffective (someone else may already have pulled
2637 back locally is ineffective (someone else may already have pulled
2631 the changes). Furthermore, a race is possible with readers of the
2638 the changes). Furthermore, a race is possible with readers of the
2632 repository; for example an in-progress pull from the repository
2639 repository; for example an in-progress pull from the repository
2633 may fail if a rollback is performed.
2640 may fail if a rollback is performed.
2634 """
2641 """
2635 repo.rollback()
2642 repo.rollback()
2636
2643
2637 def root(ui, repo):
2644 def root(ui, repo):
2638 """print the root (top) of the current working dir
2645 """print the root (top) of the current working dir
2639
2646
2640 Print the root directory of the current repository.
2647 Print the root directory of the current repository.
2641 """
2648 """
2642 ui.write(repo.root + "\n")
2649 ui.write(repo.root + "\n")
2643
2650
2644 def serve(ui, repo, **opts):
2651 def serve(ui, repo, **opts):
2645 """export the repository via HTTP
2652 """export the repository via HTTP
2646
2653
2647 Start a local HTTP repository browser and pull server.
2654 Start a local HTTP repository browser and pull server.
2648
2655
2649 By default, the server logs accesses to stdout and errors to
2656 By default, the server logs accesses to stdout and errors to
2650 stderr. Use the "-A" and "-E" options to log to files.
2657 stderr. Use the "-A" and "-E" options to log to files.
2651 """
2658 """
2652
2659
2653 if opts["stdio"]:
2660 if opts["stdio"]:
2654 if repo is None:
2661 if repo is None:
2655 raise error.RepoError(_("There is no Mercurial repository here"
2662 raise error.RepoError(_("There is no Mercurial repository here"
2656 " (.hg not found)"))
2663 " (.hg not found)"))
2657 s = sshserver.sshserver(ui, repo)
2664 s = sshserver.sshserver(ui, repo)
2658 s.serve_forever()
2665 s.serve_forever()
2659
2666
2660 parentui = ui.parentui or ui
2667 parentui = ui.parentui or ui
2661 optlist = ("name templates style address port prefix ipv6"
2668 optlist = ("name templates style address port prefix ipv6"
2662 " accesslog errorlog webdir_conf certificate")
2669 " accesslog errorlog webdir_conf certificate")
2663 for o in optlist.split():
2670 for o in optlist.split():
2664 if opts[o]:
2671 if opts[o]:
2665 parentui.setconfig("web", o, str(opts[o]))
2672 parentui.setconfig("web", o, str(opts[o]))
2666 if (repo is not None) and (repo.ui != parentui):
2673 if (repo is not None) and (repo.ui != parentui):
2667 repo.ui.setconfig("web", o, str(opts[o]))
2674 repo.ui.setconfig("web", o, str(opts[o]))
2668
2675
2669 if repo is None and not ui.config("web", "webdir_conf"):
2676 if repo is None and not ui.config("web", "webdir_conf"):
2670 raise error.RepoError(_("There is no Mercurial repository here"
2677 raise error.RepoError(_("There is no Mercurial repository here"
2671 " (.hg not found)"))
2678 " (.hg not found)"))
2672
2679
2673 class service:
2680 class service:
2674 def init(self):
2681 def init(self):
2675 util.set_signal_handler()
2682 util.set_signal_handler()
2676 self.httpd = hgweb.server.create_server(parentui, repo)
2683 self.httpd = hgweb.server.create_server(parentui, repo)
2677
2684
2678 if not ui.verbose: return
2685 if not ui.verbose: return
2679
2686
2680 if self.httpd.prefix:
2687 if self.httpd.prefix:
2681 prefix = self.httpd.prefix.strip('/') + '/'
2688 prefix = self.httpd.prefix.strip('/') + '/'
2682 else:
2689 else:
2683 prefix = ''
2690 prefix = ''
2684
2691
2685 port = ':%d' % self.httpd.port
2692 port = ':%d' % self.httpd.port
2686 if port == ':80':
2693 if port == ':80':
2687 port = ''
2694 port = ''
2688
2695
2689 bindaddr = self.httpd.addr
2696 bindaddr = self.httpd.addr
2690 if bindaddr == '0.0.0.0':
2697 if bindaddr == '0.0.0.0':
2691 bindaddr = '*'
2698 bindaddr = '*'
2692 elif ':' in bindaddr: # IPv6
2699 elif ':' in bindaddr: # IPv6
2693 bindaddr = '[%s]' % bindaddr
2700 bindaddr = '[%s]' % bindaddr
2694
2701
2695 fqaddr = self.httpd.fqaddr
2702 fqaddr = self.httpd.fqaddr
2696 if ':' in fqaddr:
2703 if ':' in fqaddr:
2697 fqaddr = '[%s]' % fqaddr
2704 fqaddr = '[%s]' % fqaddr
2698 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2705 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2699 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2706 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2700
2707
2701 def run(self):
2708 def run(self):
2702 self.httpd.serve_forever()
2709 self.httpd.serve_forever()
2703
2710
2704 service = service()
2711 service = service()
2705
2712
2706 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2713 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2707
2714
2708 def status(ui, repo, *pats, **opts):
2715 def status(ui, repo, *pats, **opts):
2709 """show changed files in the working directory
2716 """show changed files in the working directory
2710
2717
2711 Show status of files in the repository. If names are given, only
2718 Show status of files in the repository. If names are given, only
2712 files that match are shown. Files that are clean or ignored or
2719 files that match are shown. Files that are clean or ignored or
2713 source of a copy/move operation, are not listed unless -c (clean),
2720 source of a copy/move operation, are not listed unless -c (clean),
2714 -i (ignored), -C (copies) or -A is given. Unless options described
2721 -i (ignored), -C (copies) or -A is given. Unless options described
2715 with "show only ..." are given, the options -mardu are used.
2722 with "show only ..." are given, the options -mardu are used.
2716
2723
2717 Option -q/--quiet hides untracked (unknown and ignored) files
2724 Option -q/--quiet hides untracked (unknown and ignored) files
2718 unless explicitly requested with -u/--unknown or -i/-ignored.
2725 unless explicitly requested with -u/--unknown or -i/-ignored.
2719
2726
2720 NOTE: status may appear to disagree with diff if permissions have
2727 NOTE: status may appear to disagree with diff if permissions have
2721 changed or a merge has occurred. The standard diff format does not
2728 changed or a merge has occurred. The standard diff format does not
2722 report permission changes and diff only reports changes relative
2729 report permission changes and diff only reports changes relative
2723 to one merge parent.
2730 to one merge parent.
2724
2731
2725 If one revision is given, it is used as the base revision.
2732 If one revision is given, it is used as the base revision.
2726 If two revisions are given, the difference between them is shown.
2733 If two revisions are given, the difference between them is shown.
2727
2734
2728 The codes used to show the status of files are:
2735 The codes used to show the status of files are:
2729 M = modified
2736 M = modified
2730 A = added
2737 A = added
2731 R = removed
2738 R = removed
2732 C = clean
2739 C = clean
2733 ! = deleted, but still tracked
2740 ! = deleted, but still tracked
2734 ? = not tracked
2741 ? = not tracked
2735 I = ignored
2742 I = ignored
2736 = the previous added file was copied from here
2743 = the previous added file was copied from here
2737 """
2744 """
2738
2745
2739 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2746 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2740 cwd = (pats and repo.getcwd()) or ''
2747 cwd = (pats and repo.getcwd()) or ''
2741 end = opts.get('print0') and '\0' or '\n'
2748 end = opts.get('print0') and '\0' or '\n'
2742 copy = {}
2749 copy = {}
2743 states = 'modified added removed deleted unknown ignored clean'.split()
2750 states = 'modified added removed deleted unknown ignored clean'.split()
2744 show = [k for k in states if opts[k]]
2751 show = [k for k in states if opts[k]]
2745 if opts.get('all'):
2752 if opts.get('all'):
2746 show += ui.quiet and (states[:4] + ['clean']) or states
2753 show += ui.quiet and (states[:4] + ['clean']) or states
2747 if not show:
2754 if not show:
2748 show = ui.quiet and states[:4] or states[:5]
2755 show = ui.quiet and states[:4] or states[:5]
2749
2756
2750 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2757 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2751 'ignored' in show, 'clean' in show, 'unknown' in show)
2758 'ignored' in show, 'clean' in show, 'unknown' in show)
2752 changestates = zip(states, 'MAR!?IC', stat)
2759 changestates = zip(states, 'MAR!?IC', stat)
2753
2760
2754 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2761 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2755 ctxn = repo[nullid]
2762 ctxn = repo[nullid]
2756 ctx1 = repo[node1]
2763 ctx1 = repo[node1]
2757 ctx2 = repo[node2]
2764 ctx2 = repo[node2]
2758 added = stat[1]
2765 added = stat[1]
2759 if node2 is None:
2766 if node2 is None:
2760 added = stat[0] + stat[1] # merged?
2767 added = stat[0] + stat[1] # merged?
2761
2768
2762 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2769 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2763 if k in added:
2770 if k in added:
2764 copy[k] = v
2771 copy[k] = v
2765 elif v in added:
2772 elif v in added:
2766 copy[v] = k
2773 copy[v] = k
2767
2774
2768 for state, char, files in changestates:
2775 for state, char, files in changestates:
2769 if state in show:
2776 if state in show:
2770 format = "%s %%s%s" % (char, end)
2777 format = "%s %%s%s" % (char, end)
2771 if opts.get('no_status'):
2778 if opts.get('no_status'):
2772 format = "%%s%s" % end
2779 format = "%%s%s" % end
2773
2780
2774 for f in files:
2781 for f in files:
2775 ui.write(format % repo.pathto(f, cwd))
2782 ui.write(format % repo.pathto(f, cwd))
2776 if f in copy:
2783 if f in copy:
2777 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2784 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2778
2785
2779 def tag(ui, repo, name1, *names, **opts):
2786 def tag(ui, repo, name1, *names, **opts):
2780 """add one or more tags for the current or given revision
2787 """add one or more tags for the current or given revision
2781
2788
2782 Name a particular revision using <name>.
2789 Name a particular revision using <name>.
2783
2790
2784 Tags are used to name particular revisions of the repository and are
2791 Tags are used to name particular revisions of the repository and are
2785 very useful to compare different revisions, to go back to significant
2792 very useful to compare different revisions, to go back to significant
2786 earlier versions or to mark branch points as releases, etc.
2793 earlier versions or to mark branch points as releases, etc.
2787
2794
2788 If no revision is given, the parent of the working directory is used,
2795 If no revision is given, the parent of the working directory is used,
2789 or tip if no revision is checked out.
2796 or tip if no revision is checked out.
2790
2797
2791 To facilitate version control, distribution, and merging of tags,
2798 To facilitate version control, distribution, and merging of tags,
2792 they are stored as a file named ".hgtags" which is managed
2799 they are stored as a file named ".hgtags" which is managed
2793 similarly to other project files and can be hand-edited if
2800 similarly to other project files and can be hand-edited if
2794 necessary. The file '.hg/localtags' is used for local tags (not
2801 necessary. The file '.hg/localtags' is used for local tags (not
2795 shared among repositories).
2802 shared among repositories).
2796
2803
2797 See 'hg help dates' for a list of formats valid for -d/--date.
2804 See 'hg help dates' for a list of formats valid for -d/--date.
2798 """
2805 """
2799
2806
2800 rev_ = "."
2807 rev_ = "."
2801 names = (name1,) + names
2808 names = (name1,) + names
2802 if len(names) != len(dict.fromkeys(names)):
2809 if len(names) != len(dict.fromkeys(names)):
2803 raise util.Abort(_('tag names must be unique'))
2810 raise util.Abort(_('tag names must be unique'))
2804 for n in names:
2811 for n in names:
2805 if n in ['tip', '.', 'null']:
2812 if n in ['tip', '.', 'null']:
2806 raise util.Abort(_('the name \'%s\' is reserved') % n)
2813 raise util.Abort(_('the name \'%s\' is reserved') % n)
2807 if opts.get('rev') and opts.get('remove'):
2814 if opts.get('rev') and opts.get('remove'):
2808 raise util.Abort(_("--rev and --remove are incompatible"))
2815 raise util.Abort(_("--rev and --remove are incompatible"))
2809 if opts.get('rev'):
2816 if opts.get('rev'):
2810 rev_ = opts['rev']
2817 rev_ = opts['rev']
2811 message = opts.get('message')
2818 message = opts.get('message')
2812 if opts.get('remove'):
2819 if opts.get('remove'):
2813 expectedtype = opts.get('local') and 'local' or 'global'
2820 expectedtype = opts.get('local') and 'local' or 'global'
2814 for n in names:
2821 for n in names:
2815 if not repo.tagtype(n):
2822 if not repo.tagtype(n):
2816 raise util.Abort(_('tag \'%s\' does not exist') % n)
2823 raise util.Abort(_('tag \'%s\' does not exist') % n)
2817 if repo.tagtype(n) != expectedtype:
2824 if repo.tagtype(n) != expectedtype:
2818 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2825 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2819 (n, expectedtype))
2826 (n, expectedtype))
2820 rev_ = nullid
2827 rev_ = nullid
2821 if not message:
2828 if not message:
2822 message = _('Removed tag %s') % ', '.join(names)
2829 message = _('Removed tag %s') % ', '.join(names)
2823 elif not opts.get('force'):
2830 elif not opts.get('force'):
2824 for n in names:
2831 for n in names:
2825 if n in repo.tags():
2832 if n in repo.tags():
2826 raise util.Abort(_('tag \'%s\' already exists '
2833 raise util.Abort(_('tag \'%s\' already exists '
2827 '(use -f to force)') % n)
2834 '(use -f to force)') % n)
2828 if not rev_ and repo.dirstate.parents()[1] != nullid:
2835 if not rev_ and repo.dirstate.parents()[1] != nullid:
2829 raise util.Abort(_('uncommitted merge - please provide a '
2836 raise util.Abort(_('uncommitted merge - please provide a '
2830 'specific revision'))
2837 'specific revision'))
2831 r = repo[rev_].node()
2838 r = repo[rev_].node()
2832
2839
2833 if not message:
2840 if not message:
2834 message = (_('Added tag %s for changeset %s') %
2841 message = (_('Added tag %s for changeset %s') %
2835 (', '.join(names), short(r)))
2842 (', '.join(names), short(r)))
2836
2843
2837 date = opts.get('date')
2844 date = opts.get('date')
2838 if date:
2845 if date:
2839 date = util.parsedate(date)
2846 date = util.parsedate(date)
2840
2847
2841 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2848 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2842
2849
2843 def tags(ui, repo):
2850 def tags(ui, repo):
2844 """list repository tags
2851 """list repository tags
2845
2852
2846 This lists both regular and local tags. When the -v/--verbose switch
2853 This lists both regular and local tags. When the -v/--verbose switch
2847 is used, a third column "local" is printed for local tags.
2854 is used, a third column "local" is printed for local tags.
2848 """
2855 """
2849
2856
2850 l = repo.tagslist()
2857 l = repo.tagslist()
2851 l.reverse()
2858 l.reverse()
2852 hexfunc = ui.debugflag and hex or short
2859 hexfunc = ui.debugflag and hex or short
2853 tagtype = ""
2860 tagtype = ""
2854
2861
2855 for t, n in l:
2862 for t, n in l:
2856 if ui.quiet:
2863 if ui.quiet:
2857 ui.write("%s\n" % t)
2864 ui.write("%s\n" % t)
2858 continue
2865 continue
2859
2866
2860 try:
2867 try:
2861 hn = hexfunc(n)
2868 hn = hexfunc(n)
2862 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2869 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2863 except error.LookupError:
2870 except error.LookupError:
2864 r = " ?:%s" % hn
2871 r = " ?:%s" % hn
2865 else:
2872 else:
2866 spaces = " " * (30 - util.locallen(t))
2873 spaces = " " * (30 - util.locallen(t))
2867 if ui.verbose:
2874 if ui.verbose:
2868 if repo.tagtype(t) == 'local':
2875 if repo.tagtype(t) == 'local':
2869 tagtype = " local"
2876 tagtype = " local"
2870 else:
2877 else:
2871 tagtype = ""
2878 tagtype = ""
2872 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2879 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2873
2880
2874 def tip(ui, repo, **opts):
2881 def tip(ui, repo, **opts):
2875 """show the tip revision
2882 """show the tip revision
2876
2883
2877 The tip revision (usually just called the tip) is the most
2884 The tip revision (usually just called the tip) is the most
2878 recently added changeset in the repository, the most recently
2885 recently added changeset in the repository, the most recently
2879 changed head.
2886 changed head.
2880
2887
2881 If you have just made a commit, that commit will be the tip. If
2888 If you have just made a commit, that commit will be the tip. If
2882 you have just pulled changes from another repository, the tip of
2889 you have just pulled changes from another repository, the tip of
2883 that repository becomes the current tip. The "tip" tag is special
2890 that repository becomes the current tip. The "tip" tag is special
2884 and cannot be renamed or assigned to a different changeset.
2891 and cannot be renamed or assigned to a different changeset.
2885 """
2892 """
2886 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2893 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2887
2894
2888 def unbundle(ui, repo, fname1, *fnames, **opts):
2895 def unbundle(ui, repo, fname1, *fnames, **opts):
2889 """apply one or more changegroup files
2896 """apply one or more changegroup files
2890
2897
2891 Apply one or more compressed changegroup files generated by the
2898 Apply one or more compressed changegroup files generated by the
2892 bundle command.
2899 bundle command.
2893 """
2900 """
2894 fnames = (fname1,) + fnames
2901 fnames = (fname1,) + fnames
2895
2902
2896 lock = None
2903 lock = None
2897 try:
2904 try:
2898 lock = repo.lock()
2905 lock = repo.lock()
2899 for fname in fnames:
2906 for fname in fnames:
2900 f = url.open(ui, fname)
2907 f = url.open(ui, fname)
2901 gen = changegroup.readbundle(f, fname)
2908 gen = changegroup.readbundle(f, fname)
2902 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2909 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2903 finally:
2910 finally:
2904 del lock
2911 del lock
2905
2912
2906 return postincoming(ui, repo, modheads, opts.get('update'), None)
2913 return postincoming(ui, repo, modheads, opts.get('update'), None)
2907
2914
2908 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2915 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2909 """update working directory
2916 """update working directory
2910
2917
2911 Update the repository's working directory to the specified revision,
2918 Update the repository's working directory to the specified revision,
2912 or the tip of the current branch if none is specified. Use null as
2919 or the tip of the current branch if none is specified. Use null as
2913 the revision to remove the working copy (like 'hg clone -U').
2920 the revision to remove the working copy (like 'hg clone -U').
2914
2921
2915 When the working dir contains no uncommitted changes, it will be
2922 When the working dir contains no uncommitted changes, it will be
2916 replaced by the state of the requested revision from the repo. When
2923 replaced by the state of the requested revision from the repo. When
2917 the requested revision is on a different branch, the working dir
2924 the requested revision is on a different branch, the working dir
2918 will additionally be switched to that branch.
2925 will additionally be switched to that branch.
2919
2926
2920 When there are uncommitted changes, use option -C to discard them,
2927 When there are uncommitted changes, use option -C to discard them,
2921 forcibly replacing the state of the working dir with the requested
2928 forcibly replacing the state of the working dir with the requested
2922 revision.
2929 revision.
2923
2930
2924 When there are uncommitted changes and option -C is not used, and
2931 When there are uncommitted changes and option -C is not used, and
2925 the parent revision and requested revision are on the same branch,
2932 the parent revision and requested revision are on the same branch,
2926 and one of them is an ancestor of the other, then the new working
2933 and one of them is an ancestor of the other, then the new working
2927 directory will contain the requested revision merged with the
2934 directory will contain the requested revision merged with the
2928 uncommitted changes. Otherwise, the update will fail with a
2935 uncommitted changes. Otherwise, the update will fail with a
2929 suggestion to use 'merge' or 'update -C' instead.
2936 suggestion to use 'merge' or 'update -C' instead.
2930
2937
2931 If you want to update just one file to an older revision, use revert.
2938 If you want to update just one file to an older revision, use revert.
2932
2939
2933 See 'hg help dates' for a list of formats valid for --date.
2940 See 'hg help dates' for a list of formats valid for --date.
2934 """
2941 """
2935 if rev and node:
2942 if rev and node:
2936 raise util.Abort(_("please specify just one revision"))
2943 raise util.Abort(_("please specify just one revision"))
2937
2944
2938 if not rev:
2945 if not rev:
2939 rev = node
2946 rev = node
2940
2947
2941 if date:
2948 if date:
2942 if rev:
2949 if rev:
2943 raise util.Abort(_("you can't specify a revision and a date"))
2950 raise util.Abort(_("you can't specify a revision and a date"))
2944 rev = cmdutil.finddate(ui, repo, date)
2951 rev = cmdutil.finddate(ui, repo, date)
2945
2952
2946 if clean:
2953 if clean:
2947 return hg.clean(repo, rev)
2954 return hg.clean(repo, rev)
2948 else:
2955 else:
2949 return hg.update(repo, rev)
2956 return hg.update(repo, rev)
2950
2957
2951 def verify(ui, repo):
2958 def verify(ui, repo):
2952 """verify the integrity of the repository
2959 """verify the integrity of the repository
2953
2960
2954 Verify the integrity of the current repository.
2961 Verify the integrity of the current repository.
2955
2962
2956 This will perform an extensive check of the repository's
2963 This will perform an extensive check of the repository's
2957 integrity, validating the hashes and checksums of each entry in
2964 integrity, validating the hashes and checksums of each entry in
2958 the changelog, manifest, and tracked files, as well as the
2965 the changelog, manifest, and tracked files, as well as the
2959 integrity of their crosslinks and indices.
2966 integrity of their crosslinks and indices.
2960 """
2967 """
2961 return hg.verify(repo)
2968 return hg.verify(repo)
2962
2969
2963 def version_(ui):
2970 def version_(ui):
2964 """output version and copyright information"""
2971 """output version and copyright information"""
2965 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2972 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2966 % util.version())
2973 % util.version())
2967 ui.status(_(
2974 ui.status(_(
2968 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2975 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2969 "This is free software; see the source for copying conditions. "
2976 "This is free software; see the source for copying conditions. "
2970 "There is NO\nwarranty; "
2977 "There is NO\nwarranty; "
2971 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2978 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2972 ))
2979 ))
2973
2980
2974 # Command options and aliases are listed here, alphabetically
2981 # Command options and aliases are listed here, alphabetically
2975
2982
2976 globalopts = [
2983 globalopts = [
2977 ('R', 'repository', '',
2984 ('R', 'repository', '',
2978 _('repository root directory or symbolic path name')),
2985 _('repository root directory or symbolic path name')),
2979 ('', 'cwd', '', _('change working directory')),
2986 ('', 'cwd', '', _('change working directory')),
2980 ('y', 'noninteractive', None,
2987 ('y', 'noninteractive', None,
2981 _('do not prompt, assume \'yes\' for any required answers')),
2988 _('do not prompt, assume \'yes\' for any required answers')),
2982 ('q', 'quiet', None, _('suppress output')),
2989 ('q', 'quiet', None, _('suppress output')),
2983 ('v', 'verbose', None, _('enable additional output')),
2990 ('v', 'verbose', None, _('enable additional output')),
2984 ('', 'config', [], _('set/override config option')),
2991 ('', 'config', [], _('set/override config option')),
2985 ('', 'debug', None, _('enable debugging output')),
2992 ('', 'debug', None, _('enable debugging output')),
2986 ('', 'debugger', None, _('start debugger')),
2993 ('', 'debugger', None, _('start debugger')),
2987 ('', 'encoding', util._encoding, _('set the charset encoding')),
2994 ('', 'encoding', util._encoding, _('set the charset encoding')),
2988 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2995 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2989 ('', 'lsprof', None, _('print improved command execution profile')),
2996 ('', 'lsprof', None, _('print improved command execution profile')),
2990 ('', 'traceback', None, _('print traceback on exception')),
2997 ('', 'traceback', None, _('print traceback on exception')),
2991 ('', 'time', None, _('time how long the command takes')),
2998 ('', 'time', None, _('time how long the command takes')),
2992 ('', 'profile', None, _('print command execution profile')),
2999 ('', 'profile', None, _('print command execution profile')),
2993 ('', 'version', None, _('output version information and exit')),
3000 ('', 'version', None, _('output version information and exit')),
2994 ('h', 'help', None, _('display help and exit')),
3001 ('h', 'help', None, _('display help and exit')),
2995 ]
3002 ]
2996
3003
2997 dryrunopts = [('n', 'dry-run', None,
3004 dryrunopts = [('n', 'dry-run', None,
2998 _('do not perform actions, just print output'))]
3005 _('do not perform actions, just print output'))]
2999
3006
3000 remoteopts = [
3007 remoteopts = [
3001 ('e', 'ssh', '', _('specify ssh command to use')),
3008 ('e', 'ssh', '', _('specify ssh command to use')),
3002 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3009 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3003 ]
3010 ]
3004
3011
3005 walkopts = [
3012 walkopts = [
3006 ('I', 'include', [], _('include names matching the given patterns')),
3013 ('I', 'include', [], _('include names matching the given patterns')),
3007 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3014 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3008 ]
3015 ]
3009
3016
3010 commitopts = [
3017 commitopts = [
3011 ('m', 'message', '', _('use <text> as commit message')),
3018 ('m', 'message', '', _('use <text> as commit message')),
3012 ('l', 'logfile', '', _('read commit message from <file>')),
3019 ('l', 'logfile', '', _('read commit message from <file>')),
3013 ]
3020 ]
3014
3021
3015 commitopts2 = [
3022 commitopts2 = [
3016 ('d', 'date', '', _('record datecode as commit date')),
3023 ('d', 'date', '', _('record datecode as commit date')),
3017 ('u', 'user', '', _('record user as committer')),
3024 ('u', 'user', '', _('record user as committer')),
3018 ]
3025 ]
3019
3026
3020 templateopts = [
3027 templateopts = [
3021 ('', 'style', '', _('display using template map file')),
3028 ('', 'style', '', _('display using template map file')),
3022 ('', 'template', '', _('display with template')),
3029 ('', 'template', '', _('display with template')),
3023 ]
3030 ]
3024
3031
3025 logopts = [
3032 logopts = [
3026 ('p', 'patch', None, _('show patch')),
3033 ('p', 'patch', None, _('show patch')),
3027 ('l', 'limit', '', _('limit number of changes displayed')),
3034 ('l', 'limit', '', _('limit number of changes displayed')),
3028 ('M', 'no-merges', None, _('do not show merges')),
3035 ('M', 'no-merges', None, _('do not show merges')),
3029 ] + templateopts
3036 ] + templateopts
3030
3037
3031 diffopts = [
3038 diffopts = [
3032 ('a', 'text', None, _('treat all files as text')),
3039 ('a', 'text', None, _('treat all files as text')),
3033 ('g', 'git', None, _('use git extended diff format')),
3040 ('g', 'git', None, _('use git extended diff format')),
3034 ('', 'nodates', None, _("don't include dates in diff headers"))
3041 ('', 'nodates', None, _("don't include dates in diff headers"))
3035 ]
3042 ]
3036
3043
3037 diffopts2 = [
3044 diffopts2 = [
3038 ('p', 'show-function', None, _('show which function each change is in')),
3045 ('p', 'show-function', None, _('show which function each change is in')),
3039 ('w', 'ignore-all-space', None,
3046 ('w', 'ignore-all-space', None,
3040 _('ignore white space when comparing lines')),
3047 _('ignore white space when comparing lines')),
3041 ('b', 'ignore-space-change', None,
3048 ('b', 'ignore-space-change', None,
3042 _('ignore changes in the amount of white space')),
3049 _('ignore changes in the amount of white space')),
3043 ('B', 'ignore-blank-lines', None,
3050 ('B', 'ignore-blank-lines', None,
3044 _('ignore changes whose lines are all blank')),
3051 _('ignore changes whose lines are all blank')),
3045 ('U', 'unified', '', _('number of lines of context to show'))
3052 ('U', 'unified', '', _('number of lines of context to show'))
3046 ]
3053 ]
3047
3054
3048 similarityopts = [
3055 similarityopts = [
3049 ('s', 'similarity', '',
3056 ('s', 'similarity', '',
3050 _('guess renamed files by similarity (0<=s<=100)'))
3057 _('guess renamed files by similarity (0<=s<=100)'))
3051 ]
3058 ]
3052
3059
3053 table = {
3060 table = {
3054 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3061 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3055 "addremove":
3062 "addremove":
3056 (addremove, similarityopts + walkopts + dryrunopts,
3063 (addremove, similarityopts + walkopts + dryrunopts,
3057 _('[OPTION]... [FILE]...')),
3064 _('[OPTION]... [FILE]...')),
3058 "^annotate|blame":
3065 "^annotate|blame":
3059 (annotate,
3066 (annotate,
3060 [('r', 'rev', '', _('annotate the specified revision')),
3067 [('r', 'rev', '', _('annotate the specified revision')),
3061 ('f', 'follow', None, _('follow file copies and renames')),
3068 ('f', 'follow', None, _('follow file copies and renames')),
3062 ('a', 'text', None, _('treat all files as text')),
3069 ('a', 'text', None, _('treat all files as text')),
3063 ('u', 'user', None, _('list the author (long with -v)')),
3070 ('u', 'user', None, _('list the author (long with -v)')),
3064 ('d', 'date', None, _('list the date (short with -q)')),
3071 ('d', 'date', None, _('list the date (short with -q)')),
3065 ('n', 'number', None, _('list the revision number (default)')),
3072 ('n', 'number', None, _('list the revision number (default)')),
3066 ('c', 'changeset', None, _('list the changeset')),
3073 ('c', 'changeset', None, _('list the changeset')),
3067 ('l', 'line-number', None,
3074 ('l', 'line-number', None,
3068 _('show line number at the first appearance'))
3075 _('show line number at the first appearance'))
3069 ] + walkopts,
3076 ] + walkopts,
3070 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3077 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3071 "archive":
3078 "archive":
3072 (archive,
3079 (archive,
3073 [('', 'no-decode', None, _('do not pass files through decoders')),
3080 [('', 'no-decode', None, _('do not pass files through decoders')),
3074 ('p', 'prefix', '', _('directory prefix for files in archive')),
3081 ('p', 'prefix', '', _('directory prefix for files in archive')),
3075 ('r', 'rev', '', _('revision to distribute')),
3082 ('r', 'rev', '', _('revision to distribute')),
3076 ('t', 'type', '', _('type of distribution to create')),
3083 ('t', 'type', '', _('type of distribution to create')),
3077 ] + walkopts,
3084 ] + walkopts,
3078 _('[OPTION]... DEST')),
3085 _('[OPTION]... DEST')),
3079 "backout":
3086 "backout":
3080 (backout,
3087 (backout,
3081 [('', 'merge', None,
3088 [('', 'merge', None,
3082 _('merge with old dirstate parent after backout')),
3089 _('merge with old dirstate parent after backout')),
3083 ('', 'parent', '', _('parent to choose when backing out merge')),
3090 ('', 'parent', '', _('parent to choose when backing out merge')),
3084 ('r', 'rev', '', _('revision to backout')),
3091 ('r', 'rev', '', _('revision to backout')),
3085 ] + walkopts + commitopts + commitopts2,
3092 ] + walkopts + commitopts + commitopts2,
3086 _('[OPTION]... [-r] REV')),
3093 _('[OPTION]... [-r] REV')),
3087 "bisect":
3094 "bisect":
3088 (bisect,
3095 (bisect,
3089 [('r', 'reset', False, _('reset bisect state')),
3096 [('r', 'reset', False, _('reset bisect state')),
3090 ('g', 'good', False, _('mark changeset good')),
3097 ('g', 'good', False, _('mark changeset good')),
3091 ('b', 'bad', False, _('mark changeset bad')),
3098 ('b', 'bad', False, _('mark changeset bad')),
3092 ('s', 'skip', False, _('skip testing changeset')),
3099 ('s', 'skip', False, _('skip testing changeset')),
3093 ('c', 'command', '', _('use command to check changeset state')),
3100 ('c', 'command', '', _('use command to check changeset state')),
3094 ('U', 'noupdate', False, _('do not update to target'))],
3101 ('U', 'noupdate', False, _('do not update to target'))],
3095 _("[-gbsr] [-c CMD] [REV]")),
3102 _("[-gbsr] [-c CMD] [REV]")),
3096 "branch":
3103 "branch":
3097 (branch,
3104 (branch,
3098 [('f', 'force', None,
3105 [('f', 'force', None,
3099 _('set branch name even if it shadows an existing branch')),
3106 _('set branch name even if it shadows an existing branch')),
3100 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3107 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3101 _('[-fC] [NAME]')),
3108 _('[-fC] [NAME]')),
3102 "branches":
3109 "branches":
3103 (branches,
3110 (branches,
3104 [('a', 'active', False,
3111 [('a', 'active', False,
3105 _('show only branches that have unmerged heads'))],
3112 _('show only branches that have unmerged heads'))],
3106 _('[-a]')),
3113 _('[-a]')),
3107 "bundle":
3114 "bundle":
3108 (bundle,
3115 (bundle,
3109 [('f', 'force', None,
3116 [('f', 'force', None,
3110 _('run even when remote repository is unrelated')),
3117 _('run even when remote repository is unrelated')),
3111 ('r', 'rev', [],
3118 ('r', 'rev', [],
3112 _('a changeset up to which you would like to bundle')),
3119 _('a changeset up to which you would like to bundle')),
3113 ('', 'base', [],
3120 ('', 'base', [],
3114 _('a base changeset to specify instead of a destination')),
3121 _('a base changeset to specify instead of a destination')),
3115 ('a', 'all', None, _('bundle all changesets in the repository')),
3122 ('a', 'all', None, _('bundle all changesets in the repository')),
3116 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3123 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3117 ] + remoteopts,
3124 ] + remoteopts,
3118 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3125 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3119 "cat":
3126 "cat":
3120 (cat,
3127 (cat,
3121 [('o', 'output', '', _('print output to file with formatted name')),
3128 [('o', 'output', '', _('print output to file with formatted name')),
3122 ('r', 'rev', '', _('print the given revision')),
3129 ('r', 'rev', '', _('print the given revision')),
3123 ('', 'decode', None, _('apply any matching decode filter')),
3130 ('', 'decode', None, _('apply any matching decode filter')),
3124 ] + walkopts,
3131 ] + walkopts,
3125 _('[OPTION]... FILE...')),
3132 _('[OPTION]... FILE...')),
3126 "^clone":
3133 "^clone":
3127 (clone,
3134 (clone,
3128 [('U', 'noupdate', None,
3135 [('U', 'noupdate', None,
3129 _('the clone will only contain a repository (no working copy)')),
3136 _('the clone will only contain a repository (no working copy)')),
3130 ('r', 'rev', [],
3137 ('r', 'rev', [],
3131 _('a changeset you would like to have after cloning')),
3138 _('a changeset you would like to have after cloning')),
3132 ('', 'pull', None, _('use pull protocol to copy metadata')),
3139 ('', 'pull', None, _('use pull protocol to copy metadata')),
3133 ('', 'uncompressed', None,
3140 ('', 'uncompressed', None,
3134 _('use uncompressed transfer (fast over LAN)')),
3141 _('use uncompressed transfer (fast over LAN)')),
3135 ] + remoteopts,
3142 ] + remoteopts,
3136 _('[OPTION]... SOURCE [DEST]')),
3143 _('[OPTION]... SOURCE [DEST]')),
3137 "^commit|ci":
3144 "^commit|ci":
3138 (commit,
3145 (commit,
3139 [('A', 'addremove', None,
3146 [('A', 'addremove', None,
3140 _('mark new/missing files as added/removed before committing')),
3147 _('mark new/missing files as added/removed before committing')),
3141 ('', 'close-branch', None,
3148 ('', 'close-branch', None,
3142 _('mark a branch as closed, hiding it from the branch list')),
3149 _('mark a branch as closed, hiding it from the branch list')),
3143 ] + walkopts + commitopts + commitopts2,
3150 ] + walkopts + commitopts + commitopts2,
3144 _('[OPTION]... [FILE]...')),
3151 _('[OPTION]... [FILE]...')),
3145 "copy|cp":
3152 "copy|cp":
3146 (copy,
3153 (copy,
3147 [('A', 'after', None, _('record a copy that has already occurred')),
3154 [('A', 'after', None, _('record a copy that has already occurred')),
3148 ('f', 'force', None,
3155 ('f', 'force', None,
3149 _('forcibly copy over an existing managed file')),
3156 _('forcibly copy over an existing managed file')),
3150 ] + walkopts + dryrunopts,
3157 ] + walkopts + dryrunopts,
3151 _('[OPTION]... [SOURCE]... DEST')),
3158 _('[OPTION]... [SOURCE]... DEST')),
3152 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3159 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3153 "debugcheckstate": (debugcheckstate, []),
3160 "debugcheckstate": (debugcheckstate, []),
3154 "debugcomplete":
3161 "debugcomplete":
3155 (debugcomplete,
3162 (debugcomplete,
3156 [('o', 'options', None, _('show the command options'))],
3163 [('o', 'options', None, _('show the command options'))],
3157 _('[-o] CMD')),
3164 _('[-o] CMD')),
3158 "debugdate":
3165 "debugdate":
3159 (debugdate,
3166 (debugdate,
3160 [('e', 'extended', None, _('try extended date formats'))],
3167 [('e', 'extended', None, _('try extended date formats'))],
3161 _('[-e] DATE [RANGE]')),
3168 _('[-e] DATE [RANGE]')),
3162 "debugdata": (debugdata, [], _('FILE REV')),
3169 "debugdata": (debugdata, [], _('FILE REV')),
3163 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3170 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3164 "debugindex": (debugindex, [], _('FILE')),
3171 "debugindex": (debugindex, [], _('FILE')),
3165 "debugindexdot": (debugindexdot, [], _('FILE')),
3172 "debugindexdot": (debugindexdot, [], _('FILE')),
3166 "debuginstall": (debuginstall, []),
3173 "debuginstall": (debuginstall, []),
3167 "debugrawcommit|rawcommit":
3174 "debugrawcommit|rawcommit":
3168 (rawcommit,
3175 (rawcommit,
3169 [('p', 'parent', [], _('parent')),
3176 [('p', 'parent', [], _('parent')),
3170 ('F', 'files', '', _('file list'))
3177 ('F', 'files', '', _('file list'))
3171 ] + commitopts + commitopts2,
3178 ] + commitopts + commitopts2,
3172 _('[OPTION]... [FILE]...')),
3179 _('[OPTION]... [FILE]...')),
3173 "debugrebuildstate":
3180 "debugrebuildstate":
3174 (debugrebuildstate,
3181 (debugrebuildstate,
3175 [('r', 'rev', '', _('revision to rebuild to'))],
3182 [('r', 'rev', '', _('revision to rebuild to'))],
3176 _('[-r REV] [REV]')),
3183 _('[-r REV] [REV]')),
3177 "debugrename":
3184 "debugrename":
3178 (debugrename,
3185 (debugrename,
3179 [('r', 'rev', '', _('revision to debug'))],
3186 [('r', 'rev', '', _('revision to debug'))],
3180 _('[-r REV] FILE')),
3187 _('[-r REV] FILE')),
3181 "debugsetparents":
3188 "debugsetparents":
3182 (debugsetparents, [], _('REV1 [REV2]')),
3189 (debugsetparents, [], _('REV1 [REV2]')),
3183 "debugstate":
3190 "debugstate":
3184 (debugstate,
3191 (debugstate,
3185 [('', 'nodates', None, _('do not display the saved mtime'))],
3192 [('', 'nodates', None, _('do not display the saved mtime'))],
3186 _('[OPTION]...')),
3193 _('[OPTION]...')),
3187 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3194 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3188 "^diff":
3195 "^diff":
3189 (diff,
3196 (diff,
3190 [('r', 'rev', [], _('revision')),
3197 [('r', 'rev', [], _('revision')),
3191 ('c', 'change', '', _('change made by revision'))
3198 ('c', 'change', '', _('change made by revision'))
3192 ] + diffopts + diffopts2 + walkopts,
3199 ] + diffopts + diffopts2 + walkopts,
3193 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3200 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3194 "^export":
3201 "^export":
3195 (export,
3202 (export,
3196 [('o', 'output', '', _('print output to file with formatted name')),
3203 [('o', 'output', '', _('print output to file with formatted name')),
3197 ('', 'switch-parent', None, _('diff against the second parent'))
3204 ('', 'switch-parent', None, _('diff against the second parent'))
3198 ] + diffopts,
3205 ] + diffopts,
3199 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3206 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3200 "grep":
3207 "grep":
3201 (grep,
3208 (grep,
3202 [('0', 'print0', None, _('end fields with NUL')),
3209 [('0', 'print0', None, _('end fields with NUL')),
3203 ('', 'all', None, _('print all revisions that match')),
3210 ('', 'all', None, _('print all revisions that match')),
3204 ('f', 'follow', None,
3211 ('f', 'follow', None,
3205 _('follow changeset history, or file history across copies and renames')),
3212 _('follow changeset history, or file history across copies and renames')),
3206 ('i', 'ignore-case', None, _('ignore case when matching')),
3213 ('i', 'ignore-case', None, _('ignore case when matching')),
3207 ('l', 'files-with-matches', None,
3214 ('l', 'files-with-matches', None,
3208 _('print only filenames and revs that match')),
3215 _('print only filenames and revs that match')),
3209 ('n', 'line-number', None, _('print matching line numbers')),
3216 ('n', 'line-number', None, _('print matching line numbers')),
3210 ('r', 'rev', [], _('search in given revision range')),
3217 ('r', 'rev', [], _('search in given revision range')),
3211 ('u', 'user', None, _('list the author (long with -v)')),
3218 ('u', 'user', None, _('list the author (long with -v)')),
3212 ('d', 'date', None, _('list the date (short with -q)')),
3219 ('d', 'date', None, _('list the date (short with -q)')),
3213 ] + walkopts,
3220 ] + walkopts,
3214 _('[OPTION]... PATTERN [FILE]...')),
3221 _('[OPTION]... PATTERN [FILE]...')),
3215 "heads":
3222 "heads":
3216 (heads,
3223 (heads,
3217 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3224 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3225 ('a', 'active', False,
3226 _('show only the active heads from open branches')),
3218 ] + templateopts,
3227 ] + templateopts,
3219 _('[-r REV] [REV]...')),
3228 _('[-r REV] [REV]...')),
3220 "help": (help_, [], _('[TOPIC]')),
3229 "help": (help_, [], _('[TOPIC]')),
3221 "identify|id":
3230 "identify|id":
3222 (identify,
3231 (identify,
3223 [('r', 'rev', '', _('identify the specified rev')),
3232 [('r', 'rev', '', _('identify the specified rev')),
3224 ('n', 'num', None, _('show local revision number')),
3233 ('n', 'num', None, _('show local revision number')),
3225 ('i', 'id', None, _('show global revision id')),
3234 ('i', 'id', None, _('show global revision id')),
3226 ('b', 'branch', None, _('show branch')),
3235 ('b', 'branch', None, _('show branch')),
3227 ('t', 'tags', None, _('show tags'))],
3236 ('t', 'tags', None, _('show tags'))],
3228 _('[-nibt] [-r REV] [SOURCE]')),
3237 _('[-nibt] [-r REV] [SOURCE]')),
3229 "import|patch":
3238 "import|patch":
3230 (import_,
3239 (import_,
3231 [('p', 'strip', 1,
3240 [('p', 'strip', 1,
3232 _('directory strip option for patch. This has the same\n'
3241 _('directory strip option for patch. This has the same\n'
3233 'meaning as the corresponding patch option')),
3242 'meaning as the corresponding patch option')),
3234 ('b', 'base', '', _('base path')),
3243 ('b', 'base', '', _('base path')),
3235 ('f', 'force', None,
3244 ('f', 'force', None,
3236 _('skip check for outstanding uncommitted changes')),
3245 _('skip check for outstanding uncommitted changes')),
3237 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3246 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3238 ('', 'exact', None,
3247 ('', 'exact', None,
3239 _('apply patch to the nodes from which it was generated')),
3248 _('apply patch to the nodes from which it was generated')),
3240 ('', 'import-branch', None,
3249 ('', 'import-branch', None,
3241 _('Use any branch information in patch (implied by --exact)'))] +
3250 _('Use any branch information in patch (implied by --exact)'))] +
3242 commitopts + commitopts2 + similarityopts,
3251 commitopts + commitopts2 + similarityopts,
3243 _('[OPTION]... PATCH...')),
3252 _('[OPTION]... PATCH...')),
3244 "incoming|in":
3253 "incoming|in":
3245 (incoming,
3254 (incoming,
3246 [('f', 'force', None,
3255 [('f', 'force', None,
3247 _('run even when remote repository is unrelated')),
3256 _('run even when remote repository is unrelated')),
3248 ('n', 'newest-first', None, _('show newest record first')),
3257 ('n', 'newest-first', None, _('show newest record first')),
3249 ('', 'bundle', '', _('file to store the bundles into')),
3258 ('', 'bundle', '', _('file to store the bundles into')),
3250 ('r', 'rev', [],
3259 ('r', 'rev', [],
3251 _('a specific revision up to which you would like to pull')),
3260 _('a specific revision up to which you would like to pull')),
3252 ] + logopts + remoteopts,
3261 ] + logopts + remoteopts,
3253 _('[-p] [-n] [-M] [-f] [-r REV]...'
3262 _('[-p] [-n] [-M] [-f] [-r REV]...'
3254 ' [--bundle FILENAME] [SOURCE]')),
3263 ' [--bundle FILENAME] [SOURCE]')),
3255 "^init":
3264 "^init":
3256 (init,
3265 (init,
3257 remoteopts,
3266 remoteopts,
3258 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3267 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3259 "locate":
3268 "locate":
3260 (locate,
3269 (locate,
3261 [('r', 'rev', '', _('search the repository as it stood at rev')),
3270 [('r', 'rev', '', _('search the repository as it stood at rev')),
3262 ('0', 'print0', None,
3271 ('0', 'print0', None,
3263 _('end filenames with NUL, for use with xargs')),
3272 _('end filenames with NUL, for use with xargs')),
3264 ('f', 'fullpath', None,
3273 ('f', 'fullpath', None,
3265 _('print complete paths from the filesystem root')),
3274 _('print complete paths from the filesystem root')),
3266 ] + walkopts,
3275 ] + walkopts,
3267 _('[OPTION]... [PATTERN]...')),
3276 _('[OPTION]... [PATTERN]...')),
3268 "^log|history":
3277 "^log|history":
3269 (log,
3278 (log,
3270 [('f', 'follow', None,
3279 [('f', 'follow', None,
3271 _('follow changeset history, or file history across copies and renames')),
3280 _('follow changeset history, or file history across copies and renames')),
3272 ('', 'follow-first', None,
3281 ('', 'follow-first', None,
3273 _('only follow the first parent of merge changesets')),
3282 _('only follow the first parent of merge changesets')),
3274 ('d', 'date', '', _('show revs matching date spec')),
3283 ('d', 'date', '', _('show revs matching date spec')),
3275 ('C', 'copies', None, _('show copied files')),
3284 ('C', 'copies', None, _('show copied files')),
3276 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3285 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3277 ('r', 'rev', [], _('show the specified revision or range')),
3286 ('r', 'rev', [], _('show the specified revision or range')),
3278 ('', 'removed', None, _('include revs where files were removed')),
3287 ('', 'removed', None, _('include revs where files were removed')),
3279 ('m', 'only-merges', None, _('show only merges')),
3288 ('m', 'only-merges', None, _('show only merges')),
3280 ('u', 'user', [], _('revs committed by user')),
3289 ('u', 'user', [], _('revs committed by user')),
3281 ('b', 'only-branch', [],
3290 ('b', 'only-branch', [],
3282 _('show only changesets within the given named branch')),
3291 _('show only changesets within the given named branch')),
3283 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3292 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3284 ] + logopts + walkopts,
3293 ] + logopts + walkopts,
3285 _('[OPTION]... [FILE]')),
3294 _('[OPTION]... [FILE]')),
3286 "manifest":
3295 "manifest":
3287 (manifest,
3296 (manifest,
3288 [('r', 'rev', '', _('revision to display'))],
3297 [('r', 'rev', '', _('revision to display'))],
3289 _('[-r REV]')),
3298 _('[-r REV]')),
3290 "^merge":
3299 "^merge":
3291 (merge,
3300 (merge,
3292 [('f', 'force', None, _('force a merge with outstanding changes')),
3301 [('f', 'force', None, _('force a merge with outstanding changes')),
3293 ('r', 'rev', '', _('revision to merge')),
3302 ('r', 'rev', '', _('revision to merge')),
3294 ],
3303 ],
3295 _('[-f] [[-r] REV]')),
3304 _('[-f] [[-r] REV]')),
3296 "outgoing|out":
3305 "outgoing|out":
3297 (outgoing,
3306 (outgoing,
3298 [('f', 'force', None,
3307 [('f', 'force', None,
3299 _('run even when remote repository is unrelated')),
3308 _('run even when remote repository is unrelated')),
3300 ('r', 'rev', [],
3309 ('r', 'rev', [],
3301 _('a specific revision up to which you would like to push')),
3310 _('a specific revision up to which you would like to push')),
3302 ('n', 'newest-first', None, _('show newest record first')),
3311 ('n', 'newest-first', None, _('show newest record first')),
3303 ] + logopts + remoteopts,
3312 ] + logopts + remoteopts,
3304 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3313 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3305 "^parents":
3314 "^parents":
3306 (parents,
3315 (parents,
3307 [('r', 'rev', '', _('show parents from the specified rev')),
3316 [('r', 'rev', '', _('show parents from the specified rev')),
3308 ] + templateopts,
3317 ] + templateopts,
3309 _('hg parents [-r REV] [FILE]')),
3318 _('hg parents [-r REV] [FILE]')),
3310 "paths": (paths, [], _('[NAME]')),
3319 "paths": (paths, [], _('[NAME]')),
3311 "^pull":
3320 "^pull":
3312 (pull,
3321 (pull,
3313 [('u', 'update', None,
3322 [('u', 'update', None,
3314 _('update to new tip if changesets were pulled')),
3323 _('update to new tip if changesets were pulled')),
3315 ('f', 'force', None,
3324 ('f', 'force', None,
3316 _('run even when remote repository is unrelated')),
3325 _('run even when remote repository is unrelated')),
3317 ('r', 'rev', [],
3326 ('r', 'rev', [],
3318 _('a specific revision up to which you would like to pull')),
3327 _('a specific revision up to which you would like to pull')),
3319 ] + remoteopts,
3328 ] + remoteopts,
3320 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3329 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3321 "^push":
3330 "^push":
3322 (push,
3331 (push,
3323 [('f', 'force', None, _('force push')),
3332 [('f', 'force', None, _('force push')),
3324 ('r', 'rev', [],
3333 ('r', 'rev', [],
3325 _('a specific revision up to which you would like to push')),
3334 _('a specific revision up to which you would like to push')),
3326 ] + remoteopts,
3335 ] + remoteopts,
3327 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3336 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3328 "recover": (recover, []),
3337 "recover": (recover, []),
3329 "^remove|rm":
3338 "^remove|rm":
3330 (remove,
3339 (remove,
3331 [('A', 'after', None, _('record delete for missing files')),
3340 [('A', 'after', None, _('record delete for missing files')),
3332 ('f', 'force', None,
3341 ('f', 'force', None,
3333 _('remove (and delete) file even if added or modified')),
3342 _('remove (and delete) file even if added or modified')),
3334 ] + walkopts,
3343 ] + walkopts,
3335 _('[OPTION]... FILE...')),
3344 _('[OPTION]... FILE...')),
3336 "rename|mv":
3345 "rename|mv":
3337 (rename,
3346 (rename,
3338 [('A', 'after', None, _('record a rename that has already occurred')),
3347 [('A', 'after', None, _('record a rename that has already occurred')),
3339 ('f', 'force', None,
3348 ('f', 'force', None,
3340 _('forcibly copy over an existing managed file')),
3349 _('forcibly copy over an existing managed file')),
3341 ] + walkopts + dryrunopts,
3350 ] + walkopts + dryrunopts,
3342 _('[OPTION]... SOURCE... DEST')),
3351 _('[OPTION]... SOURCE... DEST')),
3343 "resolve":
3352 "resolve":
3344 (resolve,
3353 (resolve,
3345 [('a', 'all', None, _('remerge all unresolved files')),
3354 [('a', 'all', None, _('remerge all unresolved files')),
3346 ('l', 'list', None, _('list state of files needing merge')),
3355 ('l', 'list', None, _('list state of files needing merge')),
3347 ('m', 'mark', None, _('mark files as resolved')),
3356 ('m', 'mark', None, _('mark files as resolved')),
3348 ('u', 'unmark', None, _('unmark files as resolved'))],
3357 ('u', 'unmark', None, _('unmark files as resolved'))],
3349 _('[OPTION]... [FILE]...')),
3358 _('[OPTION]... [FILE]...')),
3350 "revert":
3359 "revert":
3351 (revert,
3360 (revert,
3352 [('a', 'all', None, _('revert all changes when no arguments given')),
3361 [('a', 'all', None, _('revert all changes when no arguments given')),
3353 ('d', 'date', '', _('tipmost revision matching date')),
3362 ('d', 'date', '', _('tipmost revision matching date')),
3354 ('r', 'rev', '', _('revision to revert to')),
3363 ('r', 'rev', '', _('revision to revert to')),
3355 ('', 'no-backup', None, _('do not save backup copies of files')),
3364 ('', 'no-backup', None, _('do not save backup copies of files')),
3356 ] + walkopts + dryrunopts,
3365 ] + walkopts + dryrunopts,
3357 _('[OPTION]... [-r REV] [NAME]...')),
3366 _('[OPTION]... [-r REV] [NAME]...')),
3358 "rollback": (rollback, []),
3367 "rollback": (rollback, []),
3359 "root": (root, []),
3368 "root": (root, []),
3360 "^serve":
3369 "^serve":
3361 (serve,
3370 (serve,
3362 [('A', 'accesslog', '', _('name of access log file to write to')),
3371 [('A', 'accesslog', '', _('name of access log file to write to')),
3363 ('d', 'daemon', None, _('run server in background')),
3372 ('d', 'daemon', None, _('run server in background')),
3364 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3373 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3365 ('E', 'errorlog', '', _('name of error log file to write to')),
3374 ('E', 'errorlog', '', _('name of error log file to write to')),
3366 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3375 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3367 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3376 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3368 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3377 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3369 ('n', 'name', '',
3378 ('n', 'name', '',
3370 _('name to show in web pages (default: working dir)')),
3379 _('name to show in web pages (default: working dir)')),
3371 ('', 'webdir-conf', '', _('name of the webdir config file'
3380 ('', 'webdir-conf', '', _('name of the webdir config file'
3372 ' (serve more than one repo)')),
3381 ' (serve more than one repo)')),
3373 ('', 'pid-file', '', _('name of file to write process ID to')),
3382 ('', 'pid-file', '', _('name of file to write process ID to')),
3374 ('', 'stdio', None, _('for remote clients')),
3383 ('', 'stdio', None, _('for remote clients')),
3375 ('t', 'templates', '', _('web templates to use')),
3384 ('t', 'templates', '', _('web templates to use')),
3376 ('', 'style', '', _('template style to use')),
3385 ('', 'style', '', _('template style to use')),
3377 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3386 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3378 ('', 'certificate', '', _('SSL certificate file'))],
3387 ('', 'certificate', '', _('SSL certificate file'))],
3379 _('[OPTION]...')),
3388 _('[OPTION]...')),
3380 "showconfig|debugconfig":
3389 "showconfig|debugconfig":
3381 (showconfig,
3390 (showconfig,
3382 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3391 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3383 _('[-u] [NAME]...')),
3392 _('[-u] [NAME]...')),
3384 "^status|st":
3393 "^status|st":
3385 (status,
3394 (status,
3386 [('A', 'all', None, _('show status of all files')),
3395 [('A', 'all', None, _('show status of all files')),
3387 ('m', 'modified', None, _('show only modified files')),
3396 ('m', 'modified', None, _('show only modified files')),
3388 ('a', 'added', None, _('show only added files')),
3397 ('a', 'added', None, _('show only added files')),
3389 ('r', 'removed', None, _('show only removed files')),
3398 ('r', 'removed', None, _('show only removed files')),
3390 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3399 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3391 ('c', 'clean', None, _('show only files without changes')),
3400 ('c', 'clean', None, _('show only files without changes')),
3392 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3401 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3393 ('i', 'ignored', None, _('show only ignored files')),
3402 ('i', 'ignored', None, _('show only ignored files')),
3394 ('n', 'no-status', None, _('hide status prefix')),
3403 ('n', 'no-status', None, _('hide status prefix')),
3395 ('C', 'copies', None, _('show source of copied files')),
3404 ('C', 'copies', None, _('show source of copied files')),
3396 ('0', 'print0', None,
3405 ('0', 'print0', None,
3397 _('end filenames with NUL, for use with xargs')),
3406 _('end filenames with NUL, for use with xargs')),
3398 ('', 'rev', [], _('show difference from revision')),
3407 ('', 'rev', [], _('show difference from revision')),
3399 ] + walkopts,
3408 ] + walkopts,
3400 _('[OPTION]... [FILE]...')),
3409 _('[OPTION]... [FILE]...')),
3401 "tag":
3410 "tag":
3402 (tag,
3411 (tag,
3403 [('f', 'force', None, _('replace existing tag')),
3412 [('f', 'force', None, _('replace existing tag')),
3404 ('l', 'local', None, _('make the tag local')),
3413 ('l', 'local', None, _('make the tag local')),
3405 ('r', 'rev', '', _('revision to tag')),
3414 ('r', 'rev', '', _('revision to tag')),
3406 ('', 'remove', None, _('remove a tag')),
3415 ('', 'remove', None, _('remove a tag')),
3407 # -l/--local is already there, commitopts cannot be used
3416 # -l/--local is already there, commitopts cannot be used
3408 ('m', 'message', '', _('use <text> as commit message')),
3417 ('m', 'message', '', _('use <text> as commit message')),
3409 ] + commitopts2,
3418 ] + commitopts2,
3410 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3419 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3411 "tags": (tags, []),
3420 "tags": (tags, []),
3412 "tip":
3421 "tip":
3413 (tip,
3422 (tip,
3414 [('p', 'patch', None, _('show patch')),
3423 [('p', 'patch', None, _('show patch')),
3415 ] + templateopts,
3424 ] + templateopts,
3416 _('[-p]')),
3425 _('[-p]')),
3417 "unbundle":
3426 "unbundle":
3418 (unbundle,
3427 (unbundle,
3419 [('u', 'update', None,
3428 [('u', 'update', None,
3420 _('update to new tip if changesets were unbundled'))],
3429 _('update to new tip if changesets were unbundled'))],
3421 _('[-u] FILE...')),
3430 _('[-u] FILE...')),
3422 "^update|up|checkout|co":
3431 "^update|up|checkout|co":
3423 (update,
3432 (update,
3424 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3433 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3425 ('d', 'date', '', _('tipmost revision matching date')),
3434 ('d', 'date', '', _('tipmost revision matching date')),
3426 ('r', 'rev', '', _('revision'))],
3435 ('r', 'rev', '', _('revision'))],
3427 _('[-C] [-d DATE] [[-r] REV]')),
3436 _('[-C] [-d DATE] [[-r] REV]')),
3428 "verify": (verify, []),
3437 "verify": (verify, []),
3429 "version": (version_, []),
3438 "version": (version_, []),
3430 }
3439 }
3431
3440
3432 norepo = ("clone init version help debugcomplete debugdata"
3441 norepo = ("clone init version help debugcomplete debugdata"
3433 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3442 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3434 optionalrepo = ("identify paths serve showconfig debugancestor")
3443 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2135 +1,2156
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui, store
12 import lock, transaction, stat, errno, ui, store
13 import os, time, util, extensions, hook, inspect, error
13 import os, time, util, extensions, hook, inspect, error
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = util.set(('lookup', 'changegroupsubset'))
18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 supported = ('revlogv1', 'store', 'fncache')
19 supported = ('revlogv1', 'store', 'fncache')
20
20
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.root = os.path.realpath(path)
23 self.root = os.path.realpath(path)
24 self.path = os.path.join(self.root, ".hg")
24 self.path = os.path.join(self.root, ".hg")
25 self.origroot = path
25 self.origroot = path
26 self.opener = util.opener(self.path)
26 self.opener = util.opener(self.path)
27 self.wopener = util.opener(self.root)
27 self.wopener = util.opener(self.root)
28
28
29 if not os.path.isdir(self.path):
29 if not os.path.isdir(self.path):
30 if create:
30 if create:
31 if not os.path.exists(path):
31 if not os.path.exists(path):
32 os.mkdir(path)
32 os.mkdir(path)
33 os.mkdir(self.path)
33 os.mkdir(self.path)
34 requirements = ["revlogv1"]
34 requirements = ["revlogv1"]
35 if parentui.configbool('format', 'usestore', True):
35 if parentui.configbool('format', 'usestore', True):
36 os.mkdir(os.path.join(self.path, "store"))
36 os.mkdir(os.path.join(self.path, "store"))
37 requirements.append("store")
37 requirements.append("store")
38 if parentui.configbool('format', 'usefncache', True):
38 if parentui.configbool('format', 'usefncache', True):
39 requirements.append("fncache")
39 requirements.append("fncache")
40 # create an invalid changelog
40 # create an invalid changelog
41 self.opener("00changelog.i", "a").write(
41 self.opener("00changelog.i", "a").write(
42 '\0\0\0\2' # represents revlogv2
42 '\0\0\0\2' # represents revlogv2
43 ' dummy changelog to prevent using the old repo layout'
43 ' dummy changelog to prevent using the old repo layout'
44 )
44 )
45 reqfile = self.opener("requires", "w")
45 reqfile = self.opener("requires", "w")
46 for r in requirements:
46 for r in requirements:
47 reqfile.write("%s\n" % r)
47 reqfile.write("%s\n" % r)
48 reqfile.close()
48 reqfile.close()
49 else:
49 else:
50 raise error.RepoError(_("repository %s not found") % path)
50 raise error.RepoError(_("repository %s not found") % path)
51 elif create:
51 elif create:
52 raise error.RepoError(_("repository %s already exists") % path)
52 raise error.RepoError(_("repository %s already exists") % path)
53 else:
53 else:
54 # find requirements
54 # find requirements
55 requirements = []
55 requirements = []
56 try:
56 try:
57 requirements = self.opener("requires").read().splitlines()
57 requirements = self.opener("requires").read().splitlines()
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise error.RepoError(_("requirement '%s' not supported") % r)
60 raise error.RepoError(_("requirement '%s' not supported") % r)
61 except IOError, inst:
61 except IOError, inst:
62 if inst.errno != errno.ENOENT:
62 if inst.errno != errno.ENOENT:
63 raise
63 raise
64
64
65 self.store = store.store(requirements, self.path, util.opener)
65 self.store = store.store(requirements, self.path, util.opener)
66 self.spath = self.store.path
66 self.spath = self.store.path
67 self.sopener = self.store.opener
67 self.sopener = self.store.opener
68 self.sjoin = self.store.join
68 self.sjoin = self.store.join
69 self.opener.createmode = self.store.createmode
69 self.opener.createmode = self.store.createmode
70
70
71 self.ui = ui.ui(parentui=parentui)
71 self.ui = ui.ui(parentui=parentui)
72 try:
72 try:
73 self.ui.readconfig(self.join("hgrc"), self.root)
73 self.ui.readconfig(self.join("hgrc"), self.root)
74 extensions.loadall(self.ui)
74 extensions.loadall(self.ui)
75 except IOError:
75 except IOError:
76 pass
76 pass
77
77
78 self.tagscache = None
78 self.tagscache = None
79 self._tagstypecache = None
79 self._tagstypecache = None
80 self.branchcache = None
80 self.branchcache = None
81 self._ubranchcache = None # UTF-8 version of branchcache
81 self._ubranchcache = None # UTF-8 version of branchcache
82 self._branchcachetip = None
82 self._branchcachetip = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.filterpats = {}
84 self.filterpats = {}
85 self._datafilters = {}
85 self._datafilters = {}
86 self._transref = self._lockref = self._wlockref = None
86 self._transref = self._lockref = self._wlockref = None
87
87
88 def __getattr__(self, name):
88 def __getattr__(self, name):
89 if name == 'changelog':
89 if name == 'changelog':
90 self.changelog = changelog.changelog(self.sopener)
90 self.changelog = changelog.changelog(self.sopener)
91 self.sopener.defversion = self.changelog.version
91 self.sopener.defversion = self.changelog.version
92 return self.changelog
92 return self.changelog
93 if name == 'manifest':
93 if name == 'manifest':
94 self.changelog
94 self.changelog
95 self.manifest = manifest.manifest(self.sopener)
95 self.manifest = manifest.manifest(self.sopener)
96 return self.manifest
96 return self.manifest
97 if name == 'dirstate':
97 if name == 'dirstate':
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
98 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 return self.dirstate
99 return self.dirstate
100 else:
100 else:
101 raise AttributeError(name)
101 raise AttributeError(name)
102
102
103 def __getitem__(self, changeid):
103 def __getitem__(self, changeid):
104 if changeid == None:
104 if changeid == None:
105 return context.workingctx(self)
105 return context.workingctx(self)
106 return context.changectx(self, changeid)
106 return context.changectx(self, changeid)
107
107
108 def __nonzero__(self):
108 def __nonzero__(self):
109 return True
109 return True
110
110
111 def __len__(self):
111 def __len__(self):
112 return len(self.changelog)
112 return len(self.changelog)
113
113
114 def __iter__(self):
114 def __iter__(self):
115 for i in xrange(len(self)):
115 for i in xrange(len(self)):
116 yield i
116 yield i
117
117
118 def url(self):
118 def url(self):
119 return 'file:' + self.root
119 return 'file:' + self.root
120
120
121 def hook(self, name, throw=False, **args):
121 def hook(self, name, throw=False, **args):
122 return hook.hook(self.ui, self, name, throw, **args)
122 return hook.hook(self.ui, self, name, throw, **args)
123
123
124 tag_disallowed = ':\r\n'
124 tag_disallowed = ':\r\n'
125
125
126 def _tag(self, names, node, message, local, user, date, parent=None,
126 def _tag(self, names, node, message, local, user, date, parent=None,
127 extra={}):
127 extra={}):
128 use_dirstate = parent is None
128 use_dirstate = parent is None
129
129
130 if isinstance(names, str):
130 if isinstance(names, str):
131 allchars = names
131 allchars = names
132 names = (names,)
132 names = (names,)
133 else:
133 else:
134 allchars = ''.join(names)
134 allchars = ''.join(names)
135 for c in self.tag_disallowed:
135 for c in self.tag_disallowed:
136 if c in allchars:
136 if c in allchars:
137 raise util.Abort(_('%r cannot be used in a tag name') % c)
137 raise util.Abort(_('%r cannot be used in a tag name') % c)
138
138
139 for name in names:
139 for name in names:
140 self.hook('pretag', throw=True, node=hex(node), tag=name,
140 self.hook('pretag', throw=True, node=hex(node), tag=name,
141 local=local)
141 local=local)
142
142
143 def writetags(fp, names, munge, prevtags):
143 def writetags(fp, names, munge, prevtags):
144 fp.seek(0, 2)
144 fp.seek(0, 2)
145 if prevtags and prevtags[-1] != '\n':
145 if prevtags and prevtags[-1] != '\n':
146 fp.write('\n')
146 fp.write('\n')
147 for name in names:
147 for name in names:
148 m = munge and munge(name) or name
148 m = munge and munge(name) or name
149 if self._tagstypecache and name in self._tagstypecache:
149 if self._tagstypecache and name in self._tagstypecache:
150 old = self.tagscache.get(name, nullid)
150 old = self.tagscache.get(name, nullid)
151 fp.write('%s %s\n' % (hex(old), m))
151 fp.write('%s %s\n' % (hex(old), m))
152 fp.write('%s %s\n' % (hex(node), m))
152 fp.write('%s %s\n' % (hex(node), m))
153 fp.close()
153 fp.close()
154
154
155 prevtags = ''
155 prevtags = ''
156 if local:
156 if local:
157 try:
157 try:
158 fp = self.opener('localtags', 'r+')
158 fp = self.opener('localtags', 'r+')
159 except IOError, err:
159 except IOError, err:
160 fp = self.opener('localtags', 'a')
160 fp = self.opener('localtags', 'a')
161 else:
161 else:
162 prevtags = fp.read()
162 prevtags = fp.read()
163
163
164 # local tags are stored in the current charset
164 # local tags are stored in the current charset
165 writetags(fp, names, None, prevtags)
165 writetags(fp, names, None, prevtags)
166 for name in names:
166 for name in names:
167 self.hook('tag', node=hex(node), tag=name, local=local)
167 self.hook('tag', node=hex(node), tag=name, local=local)
168 return
168 return
169
169
170 if use_dirstate:
170 if use_dirstate:
171 try:
171 try:
172 fp = self.wfile('.hgtags', 'rb+')
172 fp = self.wfile('.hgtags', 'rb+')
173 except IOError, err:
173 except IOError, err:
174 fp = self.wfile('.hgtags', 'ab')
174 fp = self.wfile('.hgtags', 'ab')
175 else:
175 else:
176 prevtags = fp.read()
176 prevtags = fp.read()
177 else:
177 else:
178 try:
178 try:
179 prevtags = self.filectx('.hgtags', parent).data()
179 prevtags = self.filectx('.hgtags', parent).data()
180 except error.LookupError:
180 except error.LookupError:
181 pass
181 pass
182 fp = self.wfile('.hgtags', 'wb')
182 fp = self.wfile('.hgtags', 'wb')
183 if prevtags:
183 if prevtags:
184 fp.write(prevtags)
184 fp.write(prevtags)
185
185
186 # committed tags are stored in UTF-8
186 # committed tags are stored in UTF-8
187 writetags(fp, names, util.fromlocal, prevtags)
187 writetags(fp, names, util.fromlocal, prevtags)
188
188
189 if use_dirstate and '.hgtags' not in self.dirstate:
189 if use_dirstate and '.hgtags' not in self.dirstate:
190 self.add(['.hgtags'])
190 self.add(['.hgtags'])
191
191
192 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
192 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
193 extra=extra)
193 extra=extra)
194
194
195 for name in names:
195 for name in names:
196 self.hook('tag', node=hex(node), tag=name, local=local)
196 self.hook('tag', node=hex(node), tag=name, local=local)
197
197
198 return tagnode
198 return tagnode
199
199
200 def tag(self, names, node, message, local, user, date):
200 def tag(self, names, node, message, local, user, date):
201 '''tag a revision with one or more symbolic names.
201 '''tag a revision with one or more symbolic names.
202
202
203 names is a list of strings or, when adding a single tag, names may be a
203 names is a list of strings or, when adding a single tag, names may be a
204 string.
204 string.
205
205
206 if local is True, the tags are stored in a per-repository file.
206 if local is True, the tags are stored in a per-repository file.
207 otherwise, they are stored in the .hgtags file, and a new
207 otherwise, they are stored in the .hgtags file, and a new
208 changeset is committed with the change.
208 changeset is committed with the change.
209
209
210 keyword arguments:
210 keyword arguments:
211
211
212 local: whether to store tags in non-version-controlled file
212 local: whether to store tags in non-version-controlled file
213 (default False)
213 (default False)
214
214
215 message: commit message to use if committing
215 message: commit message to use if committing
216
216
217 user: name of user to use if committing
217 user: name of user to use if committing
218
218
219 date: date tuple to use if committing'''
219 date: date tuple to use if committing'''
220
220
221 for x in self.status()[:5]:
221 for x in self.status()[:5]:
222 if '.hgtags' in x:
222 if '.hgtags' in x:
223 raise util.Abort(_('working copy of .hgtags is changed '
223 raise util.Abort(_('working copy of .hgtags is changed '
224 '(please commit .hgtags manually)'))
224 '(please commit .hgtags manually)'))
225
225
226 self._tag(names, node, message, local, user, date)
226 self._tag(names, node, message, local, user, date)
227
227
228 def tags(self):
228 def tags(self):
229 '''return a mapping of tag to node'''
229 '''return a mapping of tag to node'''
230 if self.tagscache:
230 if self.tagscache:
231 return self.tagscache
231 return self.tagscache
232
232
233 globaltags = {}
233 globaltags = {}
234 tagtypes = {}
234 tagtypes = {}
235
235
236 def readtags(lines, fn, tagtype):
236 def readtags(lines, fn, tagtype):
237 filetags = {}
237 filetags = {}
238 count = 0
238 count = 0
239
239
240 def warn(msg):
240 def warn(msg):
241 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
241 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
242
242
243 for l in lines:
243 for l in lines:
244 count += 1
244 count += 1
245 if not l:
245 if not l:
246 continue
246 continue
247 s = l.split(" ", 1)
247 s = l.split(" ", 1)
248 if len(s) != 2:
248 if len(s) != 2:
249 warn(_("cannot parse entry"))
249 warn(_("cannot parse entry"))
250 continue
250 continue
251 node, key = s
251 node, key = s
252 key = util.tolocal(key.strip()) # stored in UTF-8
252 key = util.tolocal(key.strip()) # stored in UTF-8
253 try:
253 try:
254 bin_n = bin(node)
254 bin_n = bin(node)
255 except TypeError:
255 except TypeError:
256 warn(_("node '%s' is not well formed") % node)
256 warn(_("node '%s' is not well formed") % node)
257 continue
257 continue
258 if bin_n not in self.changelog.nodemap:
258 if bin_n not in self.changelog.nodemap:
259 warn(_("tag '%s' refers to unknown node") % key)
259 warn(_("tag '%s' refers to unknown node") % key)
260 continue
260 continue
261
261
262 h = []
262 h = []
263 if key in filetags:
263 if key in filetags:
264 n, h = filetags[key]
264 n, h = filetags[key]
265 h.append(n)
265 h.append(n)
266 filetags[key] = (bin_n, h)
266 filetags[key] = (bin_n, h)
267
267
268 for k, nh in filetags.iteritems():
268 for k, nh in filetags.iteritems():
269 if k not in globaltags:
269 if k not in globaltags:
270 globaltags[k] = nh
270 globaltags[k] = nh
271 tagtypes[k] = tagtype
271 tagtypes[k] = tagtype
272 continue
272 continue
273
273
274 # we prefer the global tag if:
274 # we prefer the global tag if:
275 # it supercedes us OR
275 # it supercedes us OR
276 # mutual supercedes and it has a higher rank
276 # mutual supercedes and it has a higher rank
277 # otherwise we win because we're tip-most
277 # otherwise we win because we're tip-most
278 an, ah = nh
278 an, ah = nh
279 bn, bh = globaltags[k]
279 bn, bh = globaltags[k]
280 if (bn != an and an in bh and
280 if (bn != an and an in bh and
281 (bn not in ah or len(bh) > len(ah))):
281 (bn not in ah or len(bh) > len(ah))):
282 an = bn
282 an = bn
283 ah.extend([n for n in bh if n not in ah])
283 ah.extend([n for n in bh if n not in ah])
284 globaltags[k] = an, ah
284 globaltags[k] = an, ah
285 tagtypes[k] = tagtype
285 tagtypes[k] = tagtype
286
286
287 # read the tags file from each head, ending with the tip
287 # read the tags file from each head, ending with the tip
288 f = None
288 f = None
289 for rev, node, fnode in self._hgtagsnodes():
289 for rev, node, fnode in self._hgtagsnodes():
290 f = (f and f.filectx(fnode) or
290 f = (f and f.filectx(fnode) or
291 self.filectx('.hgtags', fileid=fnode))
291 self.filectx('.hgtags', fileid=fnode))
292 readtags(f.data().splitlines(), f, "global")
292 readtags(f.data().splitlines(), f, "global")
293
293
294 try:
294 try:
295 data = util.fromlocal(self.opener("localtags").read())
295 data = util.fromlocal(self.opener("localtags").read())
296 # localtags are stored in the local character set
296 # localtags are stored in the local character set
297 # while the internal tag table is stored in UTF-8
297 # while the internal tag table is stored in UTF-8
298 readtags(data.splitlines(), "localtags", "local")
298 readtags(data.splitlines(), "localtags", "local")
299 except IOError:
299 except IOError:
300 pass
300 pass
301
301
302 self.tagscache = {}
302 self.tagscache = {}
303 self._tagstypecache = {}
303 self._tagstypecache = {}
304 for k, nh in globaltags.iteritems():
304 for k, nh in globaltags.iteritems():
305 n = nh[0]
305 n = nh[0]
306 if n != nullid:
306 if n != nullid:
307 self.tagscache[k] = n
307 self.tagscache[k] = n
308 self._tagstypecache[k] = tagtypes[k]
308 self._tagstypecache[k] = tagtypes[k]
309 self.tagscache['tip'] = self.changelog.tip()
309 self.tagscache['tip'] = self.changelog.tip()
310 return self.tagscache
310 return self.tagscache
311
311
312 def tagtype(self, tagname):
312 def tagtype(self, tagname):
313 '''
313 '''
314 return the type of the given tag. result can be:
314 return the type of the given tag. result can be:
315
315
316 'local' : a local tag
316 'local' : a local tag
317 'global' : a global tag
317 'global' : a global tag
318 None : tag does not exist
318 None : tag does not exist
319 '''
319 '''
320
320
321 self.tags()
321 self.tags()
322
322
323 return self._tagstypecache.get(tagname)
323 return self._tagstypecache.get(tagname)
324
324
325 def _hgtagsnodes(self):
325 def _hgtagsnodes(self):
326 heads = self.heads()
326 heads = self.heads()
327 heads.reverse()
327 heads.reverse()
328 last = {}
328 last = {}
329 ret = []
329 ret = []
330 for node in heads:
330 for node in heads:
331 c = self[node]
331 c = self[node]
332 rev = c.rev()
332 rev = c.rev()
333 try:
333 try:
334 fnode = c.filenode('.hgtags')
334 fnode = c.filenode('.hgtags')
335 except error.LookupError:
335 except error.LookupError:
336 continue
336 continue
337 ret.append((rev, node, fnode))
337 ret.append((rev, node, fnode))
338 if fnode in last:
338 if fnode in last:
339 ret[last[fnode]] = None
339 ret[last[fnode]] = None
340 last[fnode] = len(ret) - 1
340 last[fnode] = len(ret) - 1
341 return [item for item in ret if item]
341 return [item for item in ret if item]
342
342
343 def tagslist(self):
343 def tagslist(self):
344 '''return a list of tags ordered by revision'''
344 '''return a list of tags ordered by revision'''
345 l = []
345 l = []
346 for t, n in self.tags().iteritems():
346 for t, n in self.tags().iteritems():
347 try:
347 try:
348 r = self.changelog.rev(n)
348 r = self.changelog.rev(n)
349 except:
349 except:
350 r = -2 # sort to the beginning of the list if unknown
350 r = -2 # sort to the beginning of the list if unknown
351 l.append((r, t, n))
351 l.append((r, t, n))
352 return [(t, n) for r, t, n in util.sort(l)]
352 return [(t, n) for r, t, n in util.sort(l)]
353
353
354 def nodetags(self, node):
354 def nodetags(self, node):
355 '''return the tags associated with a node'''
355 '''return the tags associated with a node'''
356 if not self.nodetagscache:
356 if not self.nodetagscache:
357 self.nodetagscache = {}
357 self.nodetagscache = {}
358 for t, n in self.tags().iteritems():
358 for t, n in self.tags().iteritems():
359 self.nodetagscache.setdefault(n, []).append(t)
359 self.nodetagscache.setdefault(n, []).append(t)
360 return self.nodetagscache.get(node, [])
360 return self.nodetagscache.get(node, [])
361
361
362 def _branchtags(self, partial, lrev):
362 def _branchtags(self, partial, lrev):
363 # TODO: rename this function?
363 # TODO: rename this function?
364 tiprev = len(self) - 1
364 tiprev = len(self) - 1
365 if lrev != tiprev:
365 if lrev != tiprev:
366 self._updatebranchcache(partial, lrev+1, tiprev+1)
366 self._updatebranchcache(partial, lrev+1, tiprev+1)
367 self._writebranchcache(partial, self.changelog.tip(), tiprev)
367 self._writebranchcache(partial, self.changelog.tip(), tiprev)
368
368
369 return partial
369 return partial
370
370
371 def _branchheads(self):
371 def _branchheads(self):
372 tip = self.changelog.tip()
372 tip = self.changelog.tip()
373 if self.branchcache is not None and self._branchcachetip == tip:
373 if self.branchcache is not None and self._branchcachetip == tip:
374 return self.branchcache
374 return self.branchcache
375
375
376 oldtip = self._branchcachetip
376 oldtip = self._branchcachetip
377 self._branchcachetip = tip
377 self._branchcachetip = tip
378 if self.branchcache is None:
378 if self.branchcache is None:
379 self.branchcache = {} # avoid recursion in changectx
379 self.branchcache = {} # avoid recursion in changectx
380 else:
380 else:
381 self.branchcache.clear() # keep using the same dict
381 self.branchcache.clear() # keep using the same dict
382 if oldtip is None or oldtip not in self.changelog.nodemap:
382 if oldtip is None or oldtip not in self.changelog.nodemap:
383 partial, last, lrev = self._readbranchcache()
383 partial, last, lrev = self._readbranchcache()
384 else:
384 else:
385 lrev = self.changelog.rev(oldtip)
385 lrev = self.changelog.rev(oldtip)
386 partial = self._ubranchcache
386 partial = self._ubranchcache
387
387
388 self._branchtags(partial, lrev)
388 self._branchtags(partial, lrev)
389 # this private cache holds all heads (not just tips)
389 # this private cache holds all heads (not just tips)
390 self._ubranchcache = partial
390 self._ubranchcache = partial
391
391
392 # the branch cache is stored on disk as UTF-8, but in the local
392 # the branch cache is stored on disk as UTF-8, but in the local
393 # charset internally
393 # charset internally
394 for k, v in partial.iteritems():
394 for k, v in partial.iteritems():
395 self.branchcache[util.tolocal(k)] = v
395 self.branchcache[util.tolocal(k)] = v
396 return self.branchcache
396 return self.branchcache
397
397
398
398
399 def branchtags(self):
399 def branchtags(self):
400 '''return a dict where branch names map to the tipmost head of
400 '''return a dict where branch names map to the tipmost head of
401 the branch'''
401 the branch, open heads come before closed'''
402 return dict([(k, v[-1]) for (k, v) in self._branchheads().iteritems()])
402 bt = {}
403 for bn, heads in self._branchheads().iteritems():
404 head = None
405 for i in range(len(heads)-1, -1, -1):
406 h = heads[i]
407 if 'close' not in self.changelog.read(h)[5]:
408 head = h
409 break
410 # no open heads were found
411 if head is None:
412 head = heads[-1]
413 bt[bn] = head
414 return bt
415
403
416
404 def _readbranchcache(self):
417 def _readbranchcache(self):
405 partial = {}
418 partial = {}
406 try:
419 try:
407 f = self.opener("branchheads.cache")
420 f = self.opener("branchheads.cache")
408 lines = f.read().split('\n')
421 lines = f.read().split('\n')
409 f.close()
422 f.close()
410 except (IOError, OSError):
423 except (IOError, OSError):
411 return {}, nullid, nullrev
424 return {}, nullid, nullrev
412
425
413 try:
426 try:
414 last, lrev = lines.pop(0).split(" ", 1)
427 last, lrev = lines.pop(0).split(" ", 1)
415 last, lrev = bin(last), int(lrev)
428 last, lrev = bin(last), int(lrev)
416 if lrev >= len(self) or self[lrev].node() != last:
429 if lrev >= len(self) or self[lrev].node() != last:
417 # invalidate the cache
430 # invalidate the cache
418 raise ValueError('invalidating branch cache (tip differs)')
431 raise ValueError('invalidating branch cache (tip differs)')
419 for l in lines:
432 for l in lines:
420 if not l: continue
433 if not l: continue
421 node, label = l.split(" ", 1)
434 node, label = l.split(" ", 1)
422 partial.setdefault(label.strip(), []).append(bin(node))
435 partial.setdefault(label.strip(), []).append(bin(node))
423 except KeyboardInterrupt:
436 except KeyboardInterrupt:
424 raise
437 raise
425 except Exception, inst:
438 except Exception, inst:
426 if self.ui.debugflag:
439 if self.ui.debugflag:
427 self.ui.warn(str(inst), '\n')
440 self.ui.warn(str(inst), '\n')
428 partial, last, lrev = {}, nullid, nullrev
441 partial, last, lrev = {}, nullid, nullrev
429 return partial, last, lrev
442 return partial, last, lrev
430
443
431 def _writebranchcache(self, branches, tip, tiprev):
444 def _writebranchcache(self, branches, tip, tiprev):
432 try:
445 try:
433 f = self.opener("branchheads.cache", "w", atomictemp=True)
446 f = self.opener("branchheads.cache", "w", atomictemp=True)
434 f.write("%s %s\n" % (hex(tip), tiprev))
447 f.write("%s %s\n" % (hex(tip), tiprev))
435 for label, nodes in branches.iteritems():
448 for label, nodes in branches.iteritems():
436 for node in nodes:
449 for node in nodes:
437 f.write("%s %s\n" % (hex(node), label))
450 f.write("%s %s\n" % (hex(node), label))
438 f.rename()
451 f.rename()
439 except (IOError, OSError):
452 except (IOError, OSError):
440 pass
453 pass
441
454
442 def _updatebranchcache(self, partial, start, end):
455 def _updatebranchcache(self, partial, start, end):
443 for r in xrange(start, end):
456 for r in xrange(start, end):
444 c = self[r]
457 c = self[r]
445 b = c.branch()
458 b = c.branch()
446 bheads = partial.setdefault(b, [])
459 bheads = partial.setdefault(b, [])
447 bheads.append(c.node())
460 bheads.append(c.node())
448 for p in c.parents():
461 for p in c.parents():
449 pn = p.node()
462 pn = p.node()
450 if pn in bheads:
463 if pn in bheads:
451 bheads.remove(pn)
464 bheads.remove(pn)
452
465
453 def lookup(self, key):
466 def lookup(self, key):
454 if isinstance(key, int):
467 if isinstance(key, int):
455 return self.changelog.node(key)
468 return self.changelog.node(key)
456 elif key == '.':
469 elif key == '.':
457 return self.dirstate.parents()[0]
470 return self.dirstate.parents()[0]
458 elif key == 'null':
471 elif key == 'null':
459 return nullid
472 return nullid
460 elif key == 'tip':
473 elif key == 'tip':
461 return self.changelog.tip()
474 return self.changelog.tip()
462 n = self.changelog._match(key)
475 n = self.changelog._match(key)
463 if n:
476 if n:
464 return n
477 return n
465 if key in self.tags():
478 if key in self.tags():
466 return self.tags()[key]
479 return self.tags()[key]
467 if key in self.branchtags():
480 if key in self.branchtags():
468 return self.branchtags()[key]
481 return self.branchtags()[key]
469 n = self.changelog._partialmatch(key)
482 n = self.changelog._partialmatch(key)
470 if n:
483 if n:
471 return n
484 return n
472 try:
485 try:
473 if len(key) == 20:
486 if len(key) == 20:
474 key = hex(key)
487 key = hex(key)
475 except:
488 except:
476 pass
489 pass
477 raise error.RepoError(_("unknown revision '%s'") % key)
490 raise error.RepoError(_("unknown revision '%s'") % key)
478
491
479 def local(self):
492 def local(self):
480 return True
493 return True
481
494
482 def join(self, f):
495 def join(self, f):
483 return os.path.join(self.path, f)
496 return os.path.join(self.path, f)
484
497
485 def wjoin(self, f):
498 def wjoin(self, f):
486 return os.path.join(self.root, f)
499 return os.path.join(self.root, f)
487
500
488 def rjoin(self, f):
501 def rjoin(self, f):
489 return os.path.join(self.root, util.pconvert(f))
502 return os.path.join(self.root, util.pconvert(f))
490
503
491 def file(self, f):
504 def file(self, f):
492 if f[0] == '/':
505 if f[0] == '/':
493 f = f[1:]
506 f = f[1:]
494 return filelog.filelog(self.sopener, f)
507 return filelog.filelog(self.sopener, f)
495
508
496 def changectx(self, changeid):
509 def changectx(self, changeid):
497 return self[changeid]
510 return self[changeid]
498
511
499 def parents(self, changeid=None):
512 def parents(self, changeid=None):
500 '''get list of changectxs for parents of changeid'''
513 '''get list of changectxs for parents of changeid'''
501 return self[changeid].parents()
514 return self[changeid].parents()
502
515
503 def filectx(self, path, changeid=None, fileid=None):
516 def filectx(self, path, changeid=None, fileid=None):
504 """changeid can be a changeset revision, node, or tag.
517 """changeid can be a changeset revision, node, or tag.
505 fileid can be a file revision or node."""
518 fileid can be a file revision or node."""
506 return context.filectx(self, path, changeid, fileid)
519 return context.filectx(self, path, changeid, fileid)
507
520
508 def getcwd(self):
521 def getcwd(self):
509 return self.dirstate.getcwd()
522 return self.dirstate.getcwd()
510
523
511 def pathto(self, f, cwd=None):
524 def pathto(self, f, cwd=None):
512 return self.dirstate.pathto(f, cwd)
525 return self.dirstate.pathto(f, cwd)
513
526
514 def wfile(self, f, mode='r'):
527 def wfile(self, f, mode='r'):
515 return self.wopener(f, mode)
528 return self.wopener(f, mode)
516
529
517 def _link(self, f):
530 def _link(self, f):
518 return os.path.islink(self.wjoin(f))
531 return os.path.islink(self.wjoin(f))
519
532
520 def _filter(self, filter, filename, data):
533 def _filter(self, filter, filename, data):
521 if filter not in self.filterpats:
534 if filter not in self.filterpats:
522 l = []
535 l = []
523 for pat, cmd in self.ui.configitems(filter):
536 for pat, cmd in self.ui.configitems(filter):
524 if cmd == '!':
537 if cmd == '!':
525 continue
538 continue
526 mf = util.matcher(self.root, "", [pat], [], [])[1]
539 mf = util.matcher(self.root, "", [pat], [], [])[1]
527 fn = None
540 fn = None
528 params = cmd
541 params = cmd
529 for name, filterfn in self._datafilters.iteritems():
542 for name, filterfn in self._datafilters.iteritems():
530 if cmd.startswith(name):
543 if cmd.startswith(name):
531 fn = filterfn
544 fn = filterfn
532 params = cmd[len(name):].lstrip()
545 params = cmd[len(name):].lstrip()
533 break
546 break
534 if not fn:
547 if not fn:
535 fn = lambda s, c, **kwargs: util.filter(s, c)
548 fn = lambda s, c, **kwargs: util.filter(s, c)
536 # Wrap old filters not supporting keyword arguments
549 # Wrap old filters not supporting keyword arguments
537 if not inspect.getargspec(fn)[2]:
550 if not inspect.getargspec(fn)[2]:
538 oldfn = fn
551 oldfn = fn
539 fn = lambda s, c, **kwargs: oldfn(s, c)
552 fn = lambda s, c, **kwargs: oldfn(s, c)
540 l.append((mf, fn, params))
553 l.append((mf, fn, params))
541 self.filterpats[filter] = l
554 self.filterpats[filter] = l
542
555
543 for mf, fn, cmd in self.filterpats[filter]:
556 for mf, fn, cmd in self.filterpats[filter]:
544 if mf(filename):
557 if mf(filename):
545 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
558 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
546 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
559 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
547 break
560 break
548
561
549 return data
562 return data
550
563
551 def adddatafilter(self, name, filter):
564 def adddatafilter(self, name, filter):
552 self._datafilters[name] = filter
565 self._datafilters[name] = filter
553
566
554 def wread(self, filename):
567 def wread(self, filename):
555 if self._link(filename):
568 if self._link(filename):
556 data = os.readlink(self.wjoin(filename))
569 data = os.readlink(self.wjoin(filename))
557 else:
570 else:
558 data = self.wopener(filename, 'r').read()
571 data = self.wopener(filename, 'r').read()
559 return self._filter("encode", filename, data)
572 return self._filter("encode", filename, data)
560
573
561 def wwrite(self, filename, data, flags):
574 def wwrite(self, filename, data, flags):
562 data = self._filter("decode", filename, data)
575 data = self._filter("decode", filename, data)
563 try:
576 try:
564 os.unlink(self.wjoin(filename))
577 os.unlink(self.wjoin(filename))
565 except OSError:
578 except OSError:
566 pass
579 pass
567 if 'l' in flags:
580 if 'l' in flags:
568 self.wopener.symlink(data, filename)
581 self.wopener.symlink(data, filename)
569 else:
582 else:
570 self.wopener(filename, 'w').write(data)
583 self.wopener(filename, 'w').write(data)
571 if 'x' in flags:
584 if 'x' in flags:
572 util.set_flags(self.wjoin(filename), False, True)
585 util.set_flags(self.wjoin(filename), False, True)
573
586
574 def wwritedata(self, filename, data):
587 def wwritedata(self, filename, data):
575 return self._filter("decode", filename, data)
588 return self._filter("decode", filename, data)
576
589
577 def transaction(self):
590 def transaction(self):
578 if self._transref and self._transref():
591 if self._transref and self._transref():
579 return self._transref().nest()
592 return self._transref().nest()
580
593
581 # abort here if the journal already exists
594 # abort here if the journal already exists
582 if os.path.exists(self.sjoin("journal")):
595 if os.path.exists(self.sjoin("journal")):
583 raise error.RepoError(_("journal already exists - run hg recover"))
596 raise error.RepoError(_("journal already exists - run hg recover"))
584
597
585 # save dirstate for rollback
598 # save dirstate for rollback
586 try:
599 try:
587 ds = self.opener("dirstate").read()
600 ds = self.opener("dirstate").read()
588 except IOError:
601 except IOError:
589 ds = ""
602 ds = ""
590 self.opener("journal.dirstate", "w").write(ds)
603 self.opener("journal.dirstate", "w").write(ds)
591 self.opener("journal.branch", "w").write(self.dirstate.branch())
604 self.opener("journal.branch", "w").write(self.dirstate.branch())
592
605
593 renames = [(self.sjoin("journal"), self.sjoin("undo")),
606 renames = [(self.sjoin("journal"), self.sjoin("undo")),
594 (self.join("journal.dirstate"), self.join("undo.dirstate")),
607 (self.join("journal.dirstate"), self.join("undo.dirstate")),
595 (self.join("journal.branch"), self.join("undo.branch"))]
608 (self.join("journal.branch"), self.join("undo.branch"))]
596 tr = transaction.transaction(self.ui.warn, self.sopener,
609 tr = transaction.transaction(self.ui.warn, self.sopener,
597 self.sjoin("journal"),
610 self.sjoin("journal"),
598 aftertrans(renames),
611 aftertrans(renames),
599 self.store.createmode)
612 self.store.createmode)
600 self._transref = weakref.ref(tr)
613 self._transref = weakref.ref(tr)
601 return tr
614 return tr
602
615
603 def recover(self):
616 def recover(self):
604 l = self.lock()
617 l = self.lock()
605 try:
618 try:
606 if os.path.exists(self.sjoin("journal")):
619 if os.path.exists(self.sjoin("journal")):
607 self.ui.status(_("rolling back interrupted transaction\n"))
620 self.ui.status(_("rolling back interrupted transaction\n"))
608 transaction.rollback(self.sopener, self.sjoin("journal"))
621 transaction.rollback(self.sopener, self.sjoin("journal"))
609 self.invalidate()
622 self.invalidate()
610 return True
623 return True
611 else:
624 else:
612 self.ui.warn(_("no interrupted transaction available\n"))
625 self.ui.warn(_("no interrupted transaction available\n"))
613 return False
626 return False
614 finally:
627 finally:
615 del l
628 del l
616
629
617 def rollback(self):
630 def rollback(self):
618 wlock = lock = None
631 wlock = lock = None
619 try:
632 try:
620 wlock = self.wlock()
633 wlock = self.wlock()
621 lock = self.lock()
634 lock = self.lock()
622 if os.path.exists(self.sjoin("undo")):
635 if os.path.exists(self.sjoin("undo")):
623 self.ui.status(_("rolling back last transaction\n"))
636 self.ui.status(_("rolling back last transaction\n"))
624 transaction.rollback(self.sopener, self.sjoin("undo"))
637 transaction.rollback(self.sopener, self.sjoin("undo"))
625 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
638 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
626 try:
639 try:
627 branch = self.opener("undo.branch").read()
640 branch = self.opener("undo.branch").read()
628 self.dirstate.setbranch(branch)
641 self.dirstate.setbranch(branch)
629 except IOError:
642 except IOError:
630 self.ui.warn(_("Named branch could not be reset, "
643 self.ui.warn(_("Named branch could not be reset, "
631 "current branch still is: %s\n")
644 "current branch still is: %s\n")
632 % util.tolocal(self.dirstate.branch()))
645 % util.tolocal(self.dirstate.branch()))
633 self.invalidate()
646 self.invalidate()
634 self.dirstate.invalidate()
647 self.dirstate.invalidate()
635 else:
648 else:
636 self.ui.warn(_("no rollback information available\n"))
649 self.ui.warn(_("no rollback information available\n"))
637 finally:
650 finally:
638 del lock, wlock
651 del lock, wlock
639
652
640 def invalidate(self):
653 def invalidate(self):
641 for a in "changelog manifest".split():
654 for a in "changelog manifest".split():
642 if a in self.__dict__:
655 if a in self.__dict__:
643 delattr(self, a)
656 delattr(self, a)
644 self.tagscache = None
657 self.tagscache = None
645 self._tagstypecache = None
658 self._tagstypecache = None
646 self.nodetagscache = None
659 self.nodetagscache = None
647 self.branchcache = None
660 self.branchcache = None
648 self._ubranchcache = None
661 self._ubranchcache = None
649 self._branchcachetip = None
662 self._branchcachetip = None
650
663
651 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
664 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
652 try:
665 try:
653 l = lock.lock(lockname, 0, releasefn, desc=desc)
666 l = lock.lock(lockname, 0, releasefn, desc=desc)
654 except error.LockHeld, inst:
667 except error.LockHeld, inst:
655 if not wait:
668 if not wait:
656 raise
669 raise
657 self.ui.warn(_("waiting for lock on %s held by %r\n") %
670 self.ui.warn(_("waiting for lock on %s held by %r\n") %
658 (desc, inst.locker))
671 (desc, inst.locker))
659 # default to 600 seconds timeout
672 # default to 600 seconds timeout
660 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
673 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
661 releasefn, desc=desc)
674 releasefn, desc=desc)
662 if acquirefn:
675 if acquirefn:
663 acquirefn()
676 acquirefn()
664 return l
677 return l
665
678
666 def lock(self, wait=True):
679 def lock(self, wait=True):
667 if self._lockref and self._lockref():
680 if self._lockref and self._lockref():
668 return self._lockref()
681 return self._lockref()
669
682
670 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
683 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
671 _('repository %s') % self.origroot)
684 _('repository %s') % self.origroot)
672 self._lockref = weakref.ref(l)
685 self._lockref = weakref.ref(l)
673 return l
686 return l
674
687
675 def wlock(self, wait=True):
688 def wlock(self, wait=True):
676 if self._wlockref and self._wlockref():
689 if self._wlockref and self._wlockref():
677 return self._wlockref()
690 return self._wlockref()
678
691
679 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
692 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
680 self.dirstate.invalidate, _('working directory of %s') %
693 self.dirstate.invalidate, _('working directory of %s') %
681 self.origroot)
694 self.origroot)
682 self._wlockref = weakref.ref(l)
695 self._wlockref = weakref.ref(l)
683 return l
696 return l
684
697
685 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
698 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
686 """
699 """
687 commit an individual file as part of a larger transaction
700 commit an individual file as part of a larger transaction
688 """
701 """
689
702
690 fn = fctx.path()
703 fn = fctx.path()
691 t = fctx.data()
704 t = fctx.data()
692 fl = self.file(fn)
705 fl = self.file(fn)
693 fp1 = manifest1.get(fn, nullid)
706 fp1 = manifest1.get(fn, nullid)
694 fp2 = manifest2.get(fn, nullid)
707 fp2 = manifest2.get(fn, nullid)
695
708
696 meta = {}
709 meta = {}
697 cp = fctx.renamed()
710 cp = fctx.renamed()
698 if cp and cp[0] != fn:
711 if cp and cp[0] != fn:
699 # Mark the new revision of this file as a copy of another
712 # Mark the new revision of this file as a copy of another
700 # file. This copy data will effectively act as a parent
713 # file. This copy data will effectively act as a parent
701 # of this new revision. If this is a merge, the first
714 # of this new revision. If this is a merge, the first
702 # parent will be the nullid (meaning "look up the copy data")
715 # parent will be the nullid (meaning "look up the copy data")
703 # and the second one will be the other parent. For example:
716 # and the second one will be the other parent. For example:
704 #
717 #
705 # 0 --- 1 --- 3 rev1 changes file foo
718 # 0 --- 1 --- 3 rev1 changes file foo
706 # \ / rev2 renames foo to bar and changes it
719 # \ / rev2 renames foo to bar and changes it
707 # \- 2 -/ rev3 should have bar with all changes and
720 # \- 2 -/ rev3 should have bar with all changes and
708 # should record that bar descends from
721 # should record that bar descends from
709 # bar in rev2 and foo in rev1
722 # bar in rev2 and foo in rev1
710 #
723 #
711 # this allows this merge to succeed:
724 # this allows this merge to succeed:
712 #
725 #
713 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
726 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
714 # \ / merging rev3 and rev4 should use bar@rev2
727 # \ / merging rev3 and rev4 should use bar@rev2
715 # \- 2 --- 4 as the merge base
728 # \- 2 --- 4 as the merge base
716 #
729 #
717
730
718 cf = cp[0]
731 cf = cp[0]
719 cr = manifest1.get(cf)
732 cr = manifest1.get(cf)
720 nfp = fp2
733 nfp = fp2
721
734
722 if manifest2: # branch merge
735 if manifest2: # branch merge
723 if fp2 == nullid: # copied on remote side
736 if fp2 == nullid: # copied on remote side
724 if fp1 != nullid or cf in manifest2:
737 if fp1 != nullid or cf in manifest2:
725 cr = manifest2[cf]
738 cr = manifest2[cf]
726 nfp = fp1
739 nfp = fp1
727
740
728 # find source in nearest ancestor if we've lost track
741 # find source in nearest ancestor if we've lost track
729 if not cr:
742 if not cr:
730 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
743 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
731 (fn, cf))
744 (fn, cf))
732 for a in self['.'].ancestors():
745 for a in self['.'].ancestors():
733 if cf in a:
746 if cf in a:
734 cr = a[cf].filenode()
747 cr = a[cf].filenode()
735 break
748 break
736
749
737 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
750 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
738 meta["copy"] = cf
751 meta["copy"] = cf
739 meta["copyrev"] = hex(cr)
752 meta["copyrev"] = hex(cr)
740 fp1, fp2 = nullid, nfp
753 fp1, fp2 = nullid, nfp
741 elif fp2 != nullid:
754 elif fp2 != nullid:
742 # is one parent an ancestor of the other?
755 # is one parent an ancestor of the other?
743 fpa = fl.ancestor(fp1, fp2)
756 fpa = fl.ancestor(fp1, fp2)
744 if fpa == fp1:
757 if fpa == fp1:
745 fp1, fp2 = fp2, nullid
758 fp1, fp2 = fp2, nullid
746 elif fpa == fp2:
759 elif fpa == fp2:
747 fp2 = nullid
760 fp2 = nullid
748
761
749 # is the file unmodified from the parent? report existing entry
762 # is the file unmodified from the parent? report existing entry
750 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
763 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
751 return fp1
764 return fp1
752
765
753 changelist.append(fn)
766 changelist.append(fn)
754 return fl.add(t, meta, tr, linkrev, fp1, fp2)
767 return fl.add(t, meta, tr, linkrev, fp1, fp2)
755
768
756 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
769 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
757 if p1 is None:
770 if p1 is None:
758 p1, p2 = self.dirstate.parents()
771 p1, p2 = self.dirstate.parents()
759 return self.commit(files=files, text=text, user=user, date=date,
772 return self.commit(files=files, text=text, user=user, date=date,
760 p1=p1, p2=p2, extra=extra, empty_ok=True)
773 p1=p1, p2=p2, extra=extra, empty_ok=True)
761
774
762 def commit(self, files=None, text="", user=None, date=None,
775 def commit(self, files=None, text="", user=None, date=None,
763 match=None, force=False, force_editor=False,
776 match=None, force=False, force_editor=False,
764 p1=None, p2=None, extra={}, empty_ok=False):
777 p1=None, p2=None, extra={}, empty_ok=False):
765 wlock = lock = None
778 wlock = lock = None
766 if extra.get("close"):
779 if extra.get("close"):
767 force = True
780 force = True
768 if files:
781 if files:
769 files = util.unique(files)
782 files = util.unique(files)
770 try:
783 try:
771 wlock = self.wlock()
784 wlock = self.wlock()
772 lock = self.lock()
785 lock = self.lock()
773 use_dirstate = (p1 is None) # not rawcommit
786 use_dirstate = (p1 is None) # not rawcommit
774
787
775 if use_dirstate:
788 if use_dirstate:
776 p1, p2 = self.dirstate.parents()
789 p1, p2 = self.dirstate.parents()
777 update_dirstate = True
790 update_dirstate = True
778
791
779 if (not force and p2 != nullid and
792 if (not force and p2 != nullid and
780 (match and (match.files() or match.anypats()))):
793 (match and (match.files() or match.anypats()))):
781 raise util.Abort(_('cannot partially commit a merge '
794 raise util.Abort(_('cannot partially commit a merge '
782 '(do not specify files or patterns)'))
795 '(do not specify files or patterns)'))
783
796
784 if files:
797 if files:
785 modified, removed = [], []
798 modified, removed = [], []
786 for f in files:
799 for f in files:
787 s = self.dirstate[f]
800 s = self.dirstate[f]
788 if s in 'nma':
801 if s in 'nma':
789 modified.append(f)
802 modified.append(f)
790 elif s == 'r':
803 elif s == 'r':
791 removed.append(f)
804 removed.append(f)
792 else:
805 else:
793 self.ui.warn(_("%s not tracked!\n") % f)
806 self.ui.warn(_("%s not tracked!\n") % f)
794 changes = [modified, [], removed, [], []]
807 changes = [modified, [], removed, [], []]
795 else:
808 else:
796 changes = self.status(match=match)
809 changes = self.status(match=match)
797 else:
810 else:
798 p1, p2 = p1, p2 or nullid
811 p1, p2 = p1, p2 or nullid
799 update_dirstate = (self.dirstate.parents()[0] == p1)
812 update_dirstate = (self.dirstate.parents()[0] == p1)
800 changes = [files, [], [], [], []]
813 changes = [files, [], [], [], []]
801
814
802 ms = merge_.mergestate(self)
815 ms = merge_.mergestate(self)
803 for f in changes[0]:
816 for f in changes[0]:
804 if f in ms and ms[f] == 'u':
817 if f in ms and ms[f] == 'u':
805 raise util.Abort(_("unresolved merge conflicts "
818 raise util.Abort(_("unresolved merge conflicts "
806 "(see hg resolve)"))
819 "(see hg resolve)"))
807 wctx = context.workingctx(self, (p1, p2), text, user, date,
820 wctx = context.workingctx(self, (p1, p2), text, user, date,
808 extra, changes)
821 extra, changes)
809 return self._commitctx(wctx, force, force_editor, empty_ok,
822 return self._commitctx(wctx, force, force_editor, empty_ok,
810 use_dirstate, update_dirstate)
823 use_dirstate, update_dirstate)
811 finally:
824 finally:
812 del lock, wlock
825 del lock, wlock
813
826
814 def commitctx(self, ctx):
827 def commitctx(self, ctx):
815 """Add a new revision to current repository.
828 """Add a new revision to current repository.
816
829
817 Revision information is passed in the context.memctx argument.
830 Revision information is passed in the context.memctx argument.
818 commitctx() does not touch the working directory.
831 commitctx() does not touch the working directory.
819 """
832 """
820 wlock = lock = None
833 wlock = lock = None
821 try:
834 try:
822 wlock = self.wlock()
835 wlock = self.wlock()
823 lock = self.lock()
836 lock = self.lock()
824 return self._commitctx(ctx, force=True, force_editor=False,
837 return self._commitctx(ctx, force=True, force_editor=False,
825 empty_ok=True, use_dirstate=False,
838 empty_ok=True, use_dirstate=False,
826 update_dirstate=False)
839 update_dirstate=False)
827 finally:
840 finally:
828 del lock, wlock
841 del lock, wlock
829
842
830 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
843 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
831 use_dirstate=True, update_dirstate=True):
844 use_dirstate=True, update_dirstate=True):
832 tr = None
845 tr = None
833 valid = 0 # don't save the dirstate if this isn't set
846 valid = 0 # don't save the dirstate if this isn't set
834 try:
847 try:
835 commit = util.sort(wctx.modified() + wctx.added())
848 commit = util.sort(wctx.modified() + wctx.added())
836 remove = wctx.removed()
849 remove = wctx.removed()
837 extra = wctx.extra().copy()
850 extra = wctx.extra().copy()
838 branchname = extra['branch']
851 branchname = extra['branch']
839 user = wctx.user()
852 user = wctx.user()
840 text = wctx.description()
853 text = wctx.description()
841
854
842 if branchname == 'default' and extra.get('close'):
855 if branchname == 'default' and extra.get('close'):
843 raise util.Abort(_('closing the default branch is invalid'))
856 raise util.Abort(_('closing the default branch is invalid'))
844 p1, p2 = [p.node() for p in wctx.parents()]
857 p1, p2 = [p.node() for p in wctx.parents()]
845 c1 = self.changelog.read(p1)
858 c1 = self.changelog.read(p1)
846 c2 = self.changelog.read(p2)
859 c2 = self.changelog.read(p2)
847 m1 = self.manifest.read(c1[0]).copy()
860 m1 = self.manifest.read(c1[0]).copy()
848 m2 = self.manifest.read(c2[0])
861 m2 = self.manifest.read(c2[0])
849
862
850 if use_dirstate:
863 if use_dirstate:
851 oldname = c1[5].get("branch") # stored in UTF-8
864 oldname = c1[5].get("branch") # stored in UTF-8
852 if (not commit and not remove and not force and p2 == nullid
865 if (not commit and not remove and not force and p2 == nullid
853 and branchname == oldname):
866 and branchname == oldname):
854 self.ui.status(_("nothing changed\n"))
867 self.ui.status(_("nothing changed\n"))
855 return None
868 return None
856
869
857 xp1 = hex(p1)
870 xp1 = hex(p1)
858 if p2 == nullid: xp2 = ''
871 if p2 == nullid: xp2 = ''
859 else: xp2 = hex(p2)
872 else: xp2 = hex(p2)
860
873
861 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
874 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
862
875
863 tr = self.transaction()
876 tr = self.transaction()
864 trp = weakref.proxy(tr)
877 trp = weakref.proxy(tr)
865
878
866 # check in files
879 # check in files
867 new = {}
880 new = {}
868 changed = []
881 changed = []
869 linkrev = len(self)
882 linkrev = len(self)
870 for f in commit:
883 for f in commit:
871 self.ui.note(f + "\n")
884 self.ui.note(f + "\n")
872 try:
885 try:
873 fctx = wctx.filectx(f)
886 fctx = wctx.filectx(f)
874 newflags = fctx.flags()
887 newflags = fctx.flags()
875 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
888 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
876 if ((not changed or changed[-1] != f) and
889 if ((not changed or changed[-1] != f) and
877 m2.get(f) != new[f]):
890 m2.get(f) != new[f]):
878 # mention the file in the changelog if some
891 # mention the file in the changelog if some
879 # flag changed, even if there was no content
892 # flag changed, even if there was no content
880 # change.
893 # change.
881 if m1.flags(f) != newflags:
894 if m1.flags(f) != newflags:
882 changed.append(f)
895 changed.append(f)
883 m1.set(f, newflags)
896 m1.set(f, newflags)
884 if use_dirstate:
897 if use_dirstate:
885 self.dirstate.normal(f)
898 self.dirstate.normal(f)
886
899
887 except (OSError, IOError):
900 except (OSError, IOError):
888 if use_dirstate:
901 if use_dirstate:
889 self.ui.warn(_("trouble committing %s!\n") % f)
902 self.ui.warn(_("trouble committing %s!\n") % f)
890 raise
903 raise
891 else:
904 else:
892 remove.append(f)
905 remove.append(f)
893
906
894 updated, added = [], []
907 updated, added = [], []
895 for f in util.sort(changed):
908 for f in util.sort(changed):
896 if f in m1 or f in m2:
909 if f in m1 or f in m2:
897 updated.append(f)
910 updated.append(f)
898 else:
911 else:
899 added.append(f)
912 added.append(f)
900
913
901 # update manifest
914 # update manifest
902 m1.update(new)
915 m1.update(new)
903 removed = [f for f in util.sort(remove) if f in m1 or f in m2]
916 removed = [f for f in util.sort(remove) if f in m1 or f in m2]
904 removed1 = []
917 removed1 = []
905
918
906 for f in removed:
919 for f in removed:
907 if f in m1:
920 if f in m1:
908 del m1[f]
921 del m1[f]
909 removed1.append(f)
922 removed1.append(f)
910 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
923 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
911 (new, removed1))
924 (new, removed1))
912
925
913 # add changeset
926 # add changeset
914 if (not empty_ok and not text) or force_editor:
927 if (not empty_ok and not text) or force_editor:
915 edittext = []
928 edittext = []
916 if text:
929 if text:
917 edittext.append(text)
930 edittext.append(text)
918 edittext.append("")
931 edittext.append("")
919 edittext.append("") # Empty line between message and comments.
932 edittext.append("") # Empty line between message and comments.
920 edittext.append(_("HG: Enter commit message."
933 edittext.append(_("HG: Enter commit message."
921 " Lines beginning with 'HG:' are removed."))
934 " Lines beginning with 'HG:' are removed."))
922 edittext.append("HG: --")
935 edittext.append("HG: --")
923 edittext.append("HG: user: %s" % user)
936 edittext.append("HG: user: %s" % user)
924 if p2 != nullid:
937 if p2 != nullid:
925 edittext.append("HG: branch merge")
938 edittext.append("HG: branch merge")
926 if branchname:
939 if branchname:
927 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
940 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
928 edittext.extend(["HG: added %s" % f for f in added])
941 edittext.extend(["HG: added %s" % f for f in added])
929 edittext.extend(["HG: changed %s" % f for f in updated])
942 edittext.extend(["HG: changed %s" % f for f in updated])
930 edittext.extend(["HG: removed %s" % f for f in removed])
943 edittext.extend(["HG: removed %s" % f for f in removed])
931 if not added and not updated and not removed:
944 if not added and not updated and not removed:
932 edittext.append("HG: no files changed")
945 edittext.append("HG: no files changed")
933 edittext.append("")
946 edittext.append("")
934 # run editor in the repository root
947 # run editor in the repository root
935 olddir = os.getcwd()
948 olddir = os.getcwd()
936 os.chdir(self.root)
949 os.chdir(self.root)
937 text = self.ui.edit("\n".join(edittext), user)
950 text = self.ui.edit("\n".join(edittext), user)
938 os.chdir(olddir)
951 os.chdir(olddir)
939
952
940 lines = [line.rstrip() for line in text.rstrip().splitlines()]
953 lines = [line.rstrip() for line in text.rstrip().splitlines()]
941 while lines and not lines[0]:
954 while lines and not lines[0]:
942 del lines[0]
955 del lines[0]
943 if not lines and use_dirstate:
956 if not lines and use_dirstate:
944 raise util.Abort(_("empty commit message"))
957 raise util.Abort(_("empty commit message"))
945 text = '\n'.join(lines)
958 text = '\n'.join(lines)
946
959
947 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
960 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
948 user, wctx.date(), extra)
961 user, wctx.date(), extra)
949 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
962 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
950 parent2=xp2)
963 parent2=xp2)
951 tr.close()
964 tr.close()
952
965
953 if self.branchcache:
966 if self.branchcache:
954 self.branchtags()
967 self.branchtags()
955
968
956 if use_dirstate or update_dirstate:
969 if use_dirstate or update_dirstate:
957 self.dirstate.setparents(n)
970 self.dirstate.setparents(n)
958 if use_dirstate:
971 if use_dirstate:
959 for f in removed:
972 for f in removed:
960 self.dirstate.forget(f)
973 self.dirstate.forget(f)
961 valid = 1 # our dirstate updates are complete
974 valid = 1 # our dirstate updates are complete
962
975
963 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
976 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
964 return n
977 return n
965 finally:
978 finally:
966 if not valid: # don't save our updated dirstate
979 if not valid: # don't save our updated dirstate
967 self.dirstate.invalidate()
980 self.dirstate.invalidate()
968 del tr
981 del tr
969
982
970 def walk(self, match, node=None):
983 def walk(self, match, node=None):
971 '''
984 '''
972 walk recursively through the directory tree or a given
985 walk recursively through the directory tree or a given
973 changeset, finding all files matched by the match
986 changeset, finding all files matched by the match
974 function
987 function
975 '''
988 '''
976 return self[node].walk(match)
989 return self[node].walk(match)
977
990
978 def status(self, node1='.', node2=None, match=None,
991 def status(self, node1='.', node2=None, match=None,
979 ignored=False, clean=False, unknown=False):
992 ignored=False, clean=False, unknown=False):
980 """return status of files between two nodes or node and working directory
993 """return status of files between two nodes or node and working directory
981
994
982 If node1 is None, use the first dirstate parent instead.
995 If node1 is None, use the first dirstate parent instead.
983 If node2 is None, compare node1 with working directory.
996 If node2 is None, compare node1 with working directory.
984 """
997 """
985
998
986 def mfmatches(ctx):
999 def mfmatches(ctx):
987 mf = ctx.manifest().copy()
1000 mf = ctx.manifest().copy()
988 for fn in mf.keys():
1001 for fn in mf.keys():
989 if not match(fn):
1002 if not match(fn):
990 del mf[fn]
1003 del mf[fn]
991 return mf
1004 return mf
992
1005
993 if isinstance(node1, context.changectx):
1006 if isinstance(node1, context.changectx):
994 ctx1 = node1
1007 ctx1 = node1
995 else:
1008 else:
996 ctx1 = self[node1]
1009 ctx1 = self[node1]
997 if isinstance(node2, context.changectx):
1010 if isinstance(node2, context.changectx):
998 ctx2 = node2
1011 ctx2 = node2
999 else:
1012 else:
1000 ctx2 = self[node2]
1013 ctx2 = self[node2]
1001
1014
1002 working = ctx2.rev() is None
1015 working = ctx2.rev() is None
1003 parentworking = working and ctx1 == self['.']
1016 parentworking = working and ctx1 == self['.']
1004 match = match or match_.always(self.root, self.getcwd())
1017 match = match or match_.always(self.root, self.getcwd())
1005 listignored, listclean, listunknown = ignored, clean, unknown
1018 listignored, listclean, listunknown = ignored, clean, unknown
1006
1019
1007 # load earliest manifest first for caching reasons
1020 # load earliest manifest first for caching reasons
1008 if not working and ctx2.rev() < ctx1.rev():
1021 if not working and ctx2.rev() < ctx1.rev():
1009 ctx2.manifest()
1022 ctx2.manifest()
1010
1023
1011 if not parentworking:
1024 if not parentworking:
1012 def bad(f, msg):
1025 def bad(f, msg):
1013 if f not in ctx1:
1026 if f not in ctx1:
1014 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1027 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1015 return False
1028 return False
1016 match.bad = bad
1029 match.bad = bad
1017
1030
1018 if working: # we need to scan the working dir
1031 if working: # we need to scan the working dir
1019 s = self.dirstate.status(match, listignored, listclean, listunknown)
1032 s = self.dirstate.status(match, listignored, listclean, listunknown)
1020 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1033 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1021
1034
1022 # check for any possibly clean files
1035 # check for any possibly clean files
1023 if parentworking and cmp:
1036 if parentworking and cmp:
1024 fixup = []
1037 fixup = []
1025 # do a full compare of any files that might have changed
1038 # do a full compare of any files that might have changed
1026 for f in cmp:
1039 for f in cmp:
1027 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1040 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1028 or ctx1[f].cmp(ctx2[f].data())):
1041 or ctx1[f].cmp(ctx2[f].data())):
1029 modified.append(f)
1042 modified.append(f)
1030 else:
1043 else:
1031 fixup.append(f)
1044 fixup.append(f)
1032
1045
1033 if listclean:
1046 if listclean:
1034 clean += fixup
1047 clean += fixup
1035
1048
1036 # update dirstate for files that are actually clean
1049 # update dirstate for files that are actually clean
1037 if fixup:
1050 if fixup:
1038 wlock = None
1051 wlock = None
1039 try:
1052 try:
1040 try:
1053 try:
1041 wlock = self.wlock(False)
1054 wlock = self.wlock(False)
1042 for f in fixup:
1055 for f in fixup:
1043 self.dirstate.normal(f)
1056 self.dirstate.normal(f)
1044 except lock.LockError:
1057 except lock.LockError:
1045 pass
1058 pass
1046 finally:
1059 finally:
1047 del wlock
1060 del wlock
1048
1061
1049 if not parentworking:
1062 if not parentworking:
1050 mf1 = mfmatches(ctx1)
1063 mf1 = mfmatches(ctx1)
1051 if working:
1064 if working:
1052 # we are comparing working dir against non-parent
1065 # we are comparing working dir against non-parent
1053 # generate a pseudo-manifest for the working dir
1066 # generate a pseudo-manifest for the working dir
1054 mf2 = mfmatches(self['.'])
1067 mf2 = mfmatches(self['.'])
1055 for f in cmp + modified + added:
1068 for f in cmp + modified + added:
1056 mf2[f] = None
1069 mf2[f] = None
1057 mf2.set(f, ctx2.flags(f))
1070 mf2.set(f, ctx2.flags(f))
1058 for f in removed:
1071 for f in removed:
1059 if f in mf2:
1072 if f in mf2:
1060 del mf2[f]
1073 del mf2[f]
1061 else:
1074 else:
1062 # we are comparing two revisions
1075 # we are comparing two revisions
1063 deleted, unknown, ignored = [], [], []
1076 deleted, unknown, ignored = [], [], []
1064 mf2 = mfmatches(ctx2)
1077 mf2 = mfmatches(ctx2)
1065
1078
1066 modified, added, clean = [], [], []
1079 modified, added, clean = [], [], []
1067 for fn in mf2:
1080 for fn in mf2:
1068 if fn in mf1:
1081 if fn in mf1:
1069 if (mf1.flags(fn) != mf2.flags(fn) or
1082 if (mf1.flags(fn) != mf2.flags(fn) or
1070 (mf1[fn] != mf2[fn] and
1083 (mf1[fn] != mf2[fn] and
1071 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1084 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1072 modified.append(fn)
1085 modified.append(fn)
1073 elif listclean:
1086 elif listclean:
1074 clean.append(fn)
1087 clean.append(fn)
1075 del mf1[fn]
1088 del mf1[fn]
1076 else:
1089 else:
1077 added.append(fn)
1090 added.append(fn)
1078 removed = mf1.keys()
1091 removed = mf1.keys()
1079
1092
1080 r = modified, added, removed, deleted, unknown, ignored, clean
1093 r = modified, added, removed, deleted, unknown, ignored, clean
1081 [l.sort() for l in r]
1094 [l.sort() for l in r]
1082 return r
1095 return r
1083
1096
1084 def add(self, list):
1097 def add(self, list):
1085 wlock = self.wlock()
1098 wlock = self.wlock()
1086 try:
1099 try:
1087 rejected = []
1100 rejected = []
1088 for f in list:
1101 for f in list:
1089 p = self.wjoin(f)
1102 p = self.wjoin(f)
1090 try:
1103 try:
1091 st = os.lstat(p)
1104 st = os.lstat(p)
1092 except:
1105 except:
1093 self.ui.warn(_("%s does not exist!\n") % f)
1106 self.ui.warn(_("%s does not exist!\n") % f)
1094 rejected.append(f)
1107 rejected.append(f)
1095 continue
1108 continue
1096 if st.st_size > 10000000:
1109 if st.st_size > 10000000:
1097 self.ui.warn(_("%s: files over 10MB may cause memory and"
1110 self.ui.warn(_("%s: files over 10MB may cause memory and"
1098 " performance problems\n"
1111 " performance problems\n"
1099 "(use 'hg revert %s' to unadd the file)\n")
1112 "(use 'hg revert %s' to unadd the file)\n")
1100 % (f, f))
1113 % (f, f))
1101 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1114 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1102 self.ui.warn(_("%s not added: only files and symlinks "
1115 self.ui.warn(_("%s not added: only files and symlinks "
1103 "supported currently\n") % f)
1116 "supported currently\n") % f)
1104 rejected.append(p)
1117 rejected.append(p)
1105 elif self.dirstate[f] in 'amn':
1118 elif self.dirstate[f] in 'amn':
1106 self.ui.warn(_("%s already tracked!\n") % f)
1119 self.ui.warn(_("%s already tracked!\n") % f)
1107 elif self.dirstate[f] == 'r':
1120 elif self.dirstate[f] == 'r':
1108 self.dirstate.normallookup(f)
1121 self.dirstate.normallookup(f)
1109 else:
1122 else:
1110 self.dirstate.add(f)
1123 self.dirstate.add(f)
1111 return rejected
1124 return rejected
1112 finally:
1125 finally:
1113 del wlock
1126 del wlock
1114
1127
1115 def forget(self, list):
1128 def forget(self, list):
1116 wlock = self.wlock()
1129 wlock = self.wlock()
1117 try:
1130 try:
1118 for f in list:
1131 for f in list:
1119 if self.dirstate[f] != 'a':
1132 if self.dirstate[f] != 'a':
1120 self.ui.warn(_("%s not added!\n") % f)
1133 self.ui.warn(_("%s not added!\n") % f)
1121 else:
1134 else:
1122 self.dirstate.forget(f)
1135 self.dirstate.forget(f)
1123 finally:
1136 finally:
1124 del wlock
1137 del wlock
1125
1138
1126 def remove(self, list, unlink=False):
1139 def remove(self, list, unlink=False):
1127 wlock = None
1140 wlock = None
1128 try:
1141 try:
1129 if unlink:
1142 if unlink:
1130 for f in list:
1143 for f in list:
1131 try:
1144 try:
1132 util.unlink(self.wjoin(f))
1145 util.unlink(self.wjoin(f))
1133 except OSError, inst:
1146 except OSError, inst:
1134 if inst.errno != errno.ENOENT:
1147 if inst.errno != errno.ENOENT:
1135 raise
1148 raise
1136 wlock = self.wlock()
1149 wlock = self.wlock()
1137 for f in list:
1150 for f in list:
1138 if unlink and os.path.exists(self.wjoin(f)):
1151 if unlink and os.path.exists(self.wjoin(f)):
1139 self.ui.warn(_("%s still exists!\n") % f)
1152 self.ui.warn(_("%s still exists!\n") % f)
1140 elif self.dirstate[f] == 'a':
1153 elif self.dirstate[f] == 'a':
1141 self.dirstate.forget(f)
1154 self.dirstate.forget(f)
1142 elif f not in self.dirstate:
1155 elif f not in self.dirstate:
1143 self.ui.warn(_("%s not tracked!\n") % f)
1156 self.ui.warn(_("%s not tracked!\n") % f)
1144 else:
1157 else:
1145 self.dirstate.remove(f)
1158 self.dirstate.remove(f)
1146 finally:
1159 finally:
1147 del wlock
1160 del wlock
1148
1161
1149 def undelete(self, list):
1162 def undelete(self, list):
1150 wlock = None
1163 wlock = None
1151 try:
1164 try:
1152 manifests = [self.manifest.read(self.changelog.read(p)[0])
1165 manifests = [self.manifest.read(self.changelog.read(p)[0])
1153 for p in self.dirstate.parents() if p != nullid]
1166 for p in self.dirstate.parents() if p != nullid]
1154 wlock = self.wlock()
1167 wlock = self.wlock()
1155 for f in list:
1168 for f in list:
1156 if self.dirstate[f] != 'r':
1169 if self.dirstate[f] != 'r':
1157 self.ui.warn(_("%s not removed!\n") % f)
1170 self.ui.warn(_("%s not removed!\n") % f)
1158 else:
1171 else:
1159 m = f in manifests[0] and manifests[0] or manifests[1]
1172 m = f in manifests[0] and manifests[0] or manifests[1]
1160 t = self.file(f).read(m[f])
1173 t = self.file(f).read(m[f])
1161 self.wwrite(f, t, m.flags(f))
1174 self.wwrite(f, t, m.flags(f))
1162 self.dirstate.normal(f)
1175 self.dirstate.normal(f)
1163 finally:
1176 finally:
1164 del wlock
1177 del wlock
1165
1178
1166 def copy(self, source, dest):
1179 def copy(self, source, dest):
1167 wlock = None
1180 wlock = None
1168 try:
1181 try:
1169 p = self.wjoin(dest)
1182 p = self.wjoin(dest)
1170 if not (os.path.exists(p) or os.path.islink(p)):
1183 if not (os.path.exists(p) or os.path.islink(p)):
1171 self.ui.warn(_("%s does not exist!\n") % dest)
1184 self.ui.warn(_("%s does not exist!\n") % dest)
1172 elif not (os.path.isfile(p) or os.path.islink(p)):
1185 elif not (os.path.isfile(p) or os.path.islink(p)):
1173 self.ui.warn(_("copy failed: %s is not a file or a "
1186 self.ui.warn(_("copy failed: %s is not a file or a "
1174 "symbolic link\n") % dest)
1187 "symbolic link\n") % dest)
1175 else:
1188 else:
1176 wlock = self.wlock()
1189 wlock = self.wlock()
1177 if self.dirstate[dest] in '?r':
1190 if self.dirstate[dest] in '?r':
1178 self.dirstate.add(dest)
1191 self.dirstate.add(dest)
1179 self.dirstate.copy(source, dest)
1192 self.dirstate.copy(source, dest)
1180 finally:
1193 finally:
1181 del wlock
1194 del wlock
1182
1195
1183 def heads(self, start=None):
1196 def heads(self, start=None, closed=True):
1184 heads = self.changelog.heads(start)
1197 heads = self.changelog.heads(start)
1198 def display(head):
1199 if closed:
1200 return True
1201 extras = self.changelog.read(head)[5]
1202 return ('close' not in extras)
1185 # sort the output in rev descending order
1203 # sort the output in rev descending order
1186 heads = [(-self.changelog.rev(h), h) for h in heads]
1204 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1187 return [n for (r, n) in util.sort(heads)]
1205 return [n for (r, n) in util.sort(heads)]
1188
1206
1189 def branchheads(self, branch=None, start=None):
1207 def branchheads(self, branch=None, start=None, closed=True):
1190 if branch is None:
1208 if branch is None:
1191 branch = self[None].branch()
1209 branch = self[None].branch()
1192 branches = self._branchheads()
1210 branches = self._branchheads()
1193 if branch not in branches:
1211 if branch not in branches:
1194 return []
1212 return []
1195 bheads = branches[branch]
1213 bheads = branches[branch]
1196 # the cache returns heads ordered lowest to highest
1214 # the cache returns heads ordered lowest to highest
1197 bheads.reverse()
1215 bheads.reverse()
1198 if start is not None:
1216 if start is not None:
1199 # filter out the heads that cannot be reached from startrev
1217 # filter out the heads that cannot be reached from startrev
1200 bheads = self.changelog.nodesbetween([start], bheads)[2]
1218 bheads = self.changelog.nodesbetween([start], bheads)[2]
1219 if not closed:
1220 bheads = [h for h in bheads if
1221 ('close' not in self.changelog.read(h)[5])]
1201 return bheads
1222 return bheads
1202
1223
1203 def branches(self, nodes):
1224 def branches(self, nodes):
1204 if not nodes:
1225 if not nodes:
1205 nodes = [self.changelog.tip()]
1226 nodes = [self.changelog.tip()]
1206 b = []
1227 b = []
1207 for n in nodes:
1228 for n in nodes:
1208 t = n
1229 t = n
1209 while 1:
1230 while 1:
1210 p = self.changelog.parents(n)
1231 p = self.changelog.parents(n)
1211 if p[1] != nullid or p[0] == nullid:
1232 if p[1] != nullid or p[0] == nullid:
1212 b.append((t, n, p[0], p[1]))
1233 b.append((t, n, p[0], p[1]))
1213 break
1234 break
1214 n = p[0]
1235 n = p[0]
1215 return b
1236 return b
1216
1237
1217 def between(self, pairs):
1238 def between(self, pairs):
1218 r = []
1239 r = []
1219
1240
1220 for top, bottom in pairs:
1241 for top, bottom in pairs:
1221 n, l, i = top, [], 0
1242 n, l, i = top, [], 0
1222 f = 1
1243 f = 1
1223
1244
1224 while n != bottom:
1245 while n != bottom:
1225 p = self.changelog.parents(n)[0]
1246 p = self.changelog.parents(n)[0]
1226 if i == f:
1247 if i == f:
1227 l.append(n)
1248 l.append(n)
1228 f = f * 2
1249 f = f * 2
1229 n = p
1250 n = p
1230 i += 1
1251 i += 1
1231
1252
1232 r.append(l)
1253 r.append(l)
1233
1254
1234 return r
1255 return r
1235
1256
1236 def findincoming(self, remote, base=None, heads=None, force=False):
1257 def findincoming(self, remote, base=None, heads=None, force=False):
1237 """Return list of roots of the subsets of missing nodes from remote
1258 """Return list of roots of the subsets of missing nodes from remote
1238
1259
1239 If base dict is specified, assume that these nodes and their parents
1260 If base dict is specified, assume that these nodes and their parents
1240 exist on the remote side and that no child of a node of base exists
1261 exist on the remote side and that no child of a node of base exists
1241 in both remote and self.
1262 in both remote and self.
1242 Furthermore base will be updated to include the nodes that exists
1263 Furthermore base will be updated to include the nodes that exists
1243 in self and remote but no children exists in self and remote.
1264 in self and remote but no children exists in self and remote.
1244 If a list of heads is specified, return only nodes which are heads
1265 If a list of heads is specified, return only nodes which are heads
1245 or ancestors of these heads.
1266 or ancestors of these heads.
1246
1267
1247 All the ancestors of base are in self and in remote.
1268 All the ancestors of base are in self and in remote.
1248 All the descendants of the list returned are missing in self.
1269 All the descendants of the list returned are missing in self.
1249 (and so we know that the rest of the nodes are missing in remote, see
1270 (and so we know that the rest of the nodes are missing in remote, see
1250 outgoing)
1271 outgoing)
1251 """
1272 """
1252 return self.findcommonincoming(remote, base, heads, force)[1]
1273 return self.findcommonincoming(remote, base, heads, force)[1]
1253
1274
1254 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1275 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1255 """Return a tuple (common, missing roots, heads) used to identify
1276 """Return a tuple (common, missing roots, heads) used to identify
1256 missing nodes from remote.
1277 missing nodes from remote.
1257
1278
1258 If base dict is specified, assume that these nodes and their parents
1279 If base dict is specified, assume that these nodes and their parents
1259 exist on the remote side and that no child of a node of base exists
1280 exist on the remote side and that no child of a node of base exists
1260 in both remote and self.
1281 in both remote and self.
1261 Furthermore base will be updated to include the nodes that exists
1282 Furthermore base will be updated to include the nodes that exists
1262 in self and remote but no children exists in self and remote.
1283 in self and remote but no children exists in self and remote.
1263 If a list of heads is specified, return only nodes which are heads
1284 If a list of heads is specified, return only nodes which are heads
1264 or ancestors of these heads.
1285 or ancestors of these heads.
1265
1286
1266 All the ancestors of base are in self and in remote.
1287 All the ancestors of base are in self and in remote.
1267 """
1288 """
1268 m = self.changelog.nodemap
1289 m = self.changelog.nodemap
1269 search = []
1290 search = []
1270 fetch = {}
1291 fetch = {}
1271 seen = {}
1292 seen = {}
1272 seenbranch = {}
1293 seenbranch = {}
1273 if base == None:
1294 if base == None:
1274 base = {}
1295 base = {}
1275
1296
1276 if not heads:
1297 if not heads:
1277 heads = remote.heads()
1298 heads = remote.heads()
1278
1299
1279 if self.changelog.tip() == nullid:
1300 if self.changelog.tip() == nullid:
1280 base[nullid] = 1
1301 base[nullid] = 1
1281 if heads != [nullid]:
1302 if heads != [nullid]:
1282 return [nullid], [nullid], list(heads)
1303 return [nullid], [nullid], list(heads)
1283 return [nullid], [], []
1304 return [nullid], [], []
1284
1305
1285 # assume we're closer to the tip than the root
1306 # assume we're closer to the tip than the root
1286 # and start by examining the heads
1307 # and start by examining the heads
1287 self.ui.status(_("searching for changes\n"))
1308 self.ui.status(_("searching for changes\n"))
1288
1309
1289 unknown = []
1310 unknown = []
1290 for h in heads:
1311 for h in heads:
1291 if h not in m:
1312 if h not in m:
1292 unknown.append(h)
1313 unknown.append(h)
1293 else:
1314 else:
1294 base[h] = 1
1315 base[h] = 1
1295
1316
1296 heads = unknown
1317 heads = unknown
1297 if not unknown:
1318 if not unknown:
1298 return base.keys(), [], []
1319 return base.keys(), [], []
1299
1320
1300 req = dict.fromkeys(unknown)
1321 req = dict.fromkeys(unknown)
1301 reqcnt = 0
1322 reqcnt = 0
1302
1323
1303 # search through remote branches
1324 # search through remote branches
1304 # a 'branch' here is a linear segment of history, with four parts:
1325 # a 'branch' here is a linear segment of history, with four parts:
1305 # head, root, first parent, second parent
1326 # head, root, first parent, second parent
1306 # (a branch always has two parents (or none) by definition)
1327 # (a branch always has two parents (or none) by definition)
1307 unknown = remote.branches(unknown)
1328 unknown = remote.branches(unknown)
1308 while unknown:
1329 while unknown:
1309 r = []
1330 r = []
1310 while unknown:
1331 while unknown:
1311 n = unknown.pop(0)
1332 n = unknown.pop(0)
1312 if n[0] in seen:
1333 if n[0] in seen:
1313 continue
1334 continue
1314
1335
1315 self.ui.debug(_("examining %s:%s\n")
1336 self.ui.debug(_("examining %s:%s\n")
1316 % (short(n[0]), short(n[1])))
1337 % (short(n[0]), short(n[1])))
1317 if n[0] == nullid: # found the end of the branch
1338 if n[0] == nullid: # found the end of the branch
1318 pass
1339 pass
1319 elif n in seenbranch:
1340 elif n in seenbranch:
1320 self.ui.debug(_("branch already found\n"))
1341 self.ui.debug(_("branch already found\n"))
1321 continue
1342 continue
1322 elif n[1] and n[1] in m: # do we know the base?
1343 elif n[1] and n[1] in m: # do we know the base?
1323 self.ui.debug(_("found incomplete branch %s:%s\n")
1344 self.ui.debug(_("found incomplete branch %s:%s\n")
1324 % (short(n[0]), short(n[1])))
1345 % (short(n[0]), short(n[1])))
1325 search.append(n[0:2]) # schedule branch range for scanning
1346 search.append(n[0:2]) # schedule branch range for scanning
1326 seenbranch[n] = 1
1347 seenbranch[n] = 1
1327 else:
1348 else:
1328 if n[1] not in seen and n[1] not in fetch:
1349 if n[1] not in seen and n[1] not in fetch:
1329 if n[2] in m and n[3] in m:
1350 if n[2] in m and n[3] in m:
1330 self.ui.debug(_("found new changeset %s\n") %
1351 self.ui.debug(_("found new changeset %s\n") %
1331 short(n[1]))
1352 short(n[1]))
1332 fetch[n[1]] = 1 # earliest unknown
1353 fetch[n[1]] = 1 # earliest unknown
1333 for p in n[2:4]:
1354 for p in n[2:4]:
1334 if p in m:
1355 if p in m:
1335 base[p] = 1 # latest known
1356 base[p] = 1 # latest known
1336
1357
1337 for p in n[2:4]:
1358 for p in n[2:4]:
1338 if p not in req and p not in m:
1359 if p not in req and p not in m:
1339 r.append(p)
1360 r.append(p)
1340 req[p] = 1
1361 req[p] = 1
1341 seen[n[0]] = 1
1362 seen[n[0]] = 1
1342
1363
1343 if r:
1364 if r:
1344 reqcnt += 1
1365 reqcnt += 1
1345 self.ui.debug(_("request %d: %s\n") %
1366 self.ui.debug(_("request %d: %s\n") %
1346 (reqcnt, " ".join(map(short, r))))
1367 (reqcnt, " ".join(map(short, r))))
1347 for p in xrange(0, len(r), 10):
1368 for p in xrange(0, len(r), 10):
1348 for b in remote.branches(r[p:p+10]):
1369 for b in remote.branches(r[p:p+10]):
1349 self.ui.debug(_("received %s:%s\n") %
1370 self.ui.debug(_("received %s:%s\n") %
1350 (short(b[0]), short(b[1])))
1371 (short(b[0]), short(b[1])))
1351 unknown.append(b)
1372 unknown.append(b)
1352
1373
1353 # do binary search on the branches we found
1374 # do binary search on the branches we found
1354 while search:
1375 while search:
1355 newsearch = []
1376 newsearch = []
1356 reqcnt += 1
1377 reqcnt += 1
1357 for n, l in zip(search, remote.between(search)):
1378 for n, l in zip(search, remote.between(search)):
1358 l.append(n[1])
1379 l.append(n[1])
1359 p = n[0]
1380 p = n[0]
1360 f = 1
1381 f = 1
1361 for i in l:
1382 for i in l:
1362 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1383 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1363 if i in m:
1384 if i in m:
1364 if f <= 2:
1385 if f <= 2:
1365 self.ui.debug(_("found new branch changeset %s\n") %
1386 self.ui.debug(_("found new branch changeset %s\n") %
1366 short(p))
1387 short(p))
1367 fetch[p] = 1
1388 fetch[p] = 1
1368 base[i] = 1
1389 base[i] = 1
1369 else:
1390 else:
1370 self.ui.debug(_("narrowed branch search to %s:%s\n")
1391 self.ui.debug(_("narrowed branch search to %s:%s\n")
1371 % (short(p), short(i)))
1392 % (short(p), short(i)))
1372 newsearch.append((p, i))
1393 newsearch.append((p, i))
1373 break
1394 break
1374 p, f = i, f * 2
1395 p, f = i, f * 2
1375 search = newsearch
1396 search = newsearch
1376
1397
1377 # sanity check our fetch list
1398 # sanity check our fetch list
1378 for f in fetch.keys():
1399 for f in fetch.keys():
1379 if f in m:
1400 if f in m:
1380 raise error.RepoError(_("already have changeset ")
1401 raise error.RepoError(_("already have changeset ")
1381 + short(f[:4]))
1402 + short(f[:4]))
1382
1403
1383 if base.keys() == [nullid]:
1404 if base.keys() == [nullid]:
1384 if force:
1405 if force:
1385 self.ui.warn(_("warning: repository is unrelated\n"))
1406 self.ui.warn(_("warning: repository is unrelated\n"))
1386 else:
1407 else:
1387 raise util.Abort(_("repository is unrelated"))
1408 raise util.Abort(_("repository is unrelated"))
1388
1409
1389 self.ui.debug(_("found new changesets starting at ") +
1410 self.ui.debug(_("found new changesets starting at ") +
1390 " ".join([short(f) for f in fetch]) + "\n")
1411 " ".join([short(f) for f in fetch]) + "\n")
1391
1412
1392 self.ui.debug(_("%d total queries\n") % reqcnt)
1413 self.ui.debug(_("%d total queries\n") % reqcnt)
1393
1414
1394 return base.keys(), fetch.keys(), heads
1415 return base.keys(), fetch.keys(), heads
1395
1416
1396 def findoutgoing(self, remote, base=None, heads=None, force=False):
1417 def findoutgoing(self, remote, base=None, heads=None, force=False):
1397 """Return list of nodes that are roots of subsets not in remote
1418 """Return list of nodes that are roots of subsets not in remote
1398
1419
1399 If base dict is specified, assume that these nodes and their parents
1420 If base dict is specified, assume that these nodes and their parents
1400 exist on the remote side.
1421 exist on the remote side.
1401 If a list of heads is specified, return only nodes which are heads
1422 If a list of heads is specified, return only nodes which are heads
1402 or ancestors of these heads, and return a second element which
1423 or ancestors of these heads, and return a second element which
1403 contains all remote heads which get new children.
1424 contains all remote heads which get new children.
1404 """
1425 """
1405 if base == None:
1426 if base == None:
1406 base = {}
1427 base = {}
1407 self.findincoming(remote, base, heads, force=force)
1428 self.findincoming(remote, base, heads, force=force)
1408
1429
1409 self.ui.debug(_("common changesets up to ")
1430 self.ui.debug(_("common changesets up to ")
1410 + " ".join(map(short, base.keys())) + "\n")
1431 + " ".join(map(short, base.keys())) + "\n")
1411
1432
1412 remain = dict.fromkeys(self.changelog.nodemap)
1433 remain = dict.fromkeys(self.changelog.nodemap)
1413
1434
1414 # prune everything remote has from the tree
1435 # prune everything remote has from the tree
1415 del remain[nullid]
1436 del remain[nullid]
1416 remove = base.keys()
1437 remove = base.keys()
1417 while remove:
1438 while remove:
1418 n = remove.pop(0)
1439 n = remove.pop(0)
1419 if n in remain:
1440 if n in remain:
1420 del remain[n]
1441 del remain[n]
1421 for p in self.changelog.parents(n):
1442 for p in self.changelog.parents(n):
1422 remove.append(p)
1443 remove.append(p)
1423
1444
1424 # find every node whose parents have been pruned
1445 # find every node whose parents have been pruned
1425 subset = []
1446 subset = []
1426 # find every remote head that will get new children
1447 # find every remote head that will get new children
1427 updated_heads = {}
1448 updated_heads = {}
1428 for n in remain:
1449 for n in remain:
1429 p1, p2 = self.changelog.parents(n)
1450 p1, p2 = self.changelog.parents(n)
1430 if p1 not in remain and p2 not in remain:
1451 if p1 not in remain and p2 not in remain:
1431 subset.append(n)
1452 subset.append(n)
1432 if heads:
1453 if heads:
1433 if p1 in heads:
1454 if p1 in heads:
1434 updated_heads[p1] = True
1455 updated_heads[p1] = True
1435 if p2 in heads:
1456 if p2 in heads:
1436 updated_heads[p2] = True
1457 updated_heads[p2] = True
1437
1458
1438 # this is the set of all roots we have to push
1459 # this is the set of all roots we have to push
1439 if heads:
1460 if heads:
1440 return subset, updated_heads.keys()
1461 return subset, updated_heads.keys()
1441 else:
1462 else:
1442 return subset
1463 return subset
1443
1464
1444 def pull(self, remote, heads=None, force=False):
1465 def pull(self, remote, heads=None, force=False):
1445 lock = self.lock()
1466 lock = self.lock()
1446 try:
1467 try:
1447 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1468 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1448 force=force)
1469 force=force)
1449 if fetch == [nullid]:
1470 if fetch == [nullid]:
1450 self.ui.status(_("requesting all changes\n"))
1471 self.ui.status(_("requesting all changes\n"))
1451
1472
1452 if not fetch:
1473 if not fetch:
1453 self.ui.status(_("no changes found\n"))
1474 self.ui.status(_("no changes found\n"))
1454 return 0
1475 return 0
1455
1476
1456 if heads is None and remote.capable('changegroupsubset'):
1477 if heads is None and remote.capable('changegroupsubset'):
1457 heads = rheads
1478 heads = rheads
1458
1479
1459 if heads is None:
1480 if heads is None:
1460 cg = remote.changegroup(fetch, 'pull')
1481 cg = remote.changegroup(fetch, 'pull')
1461 else:
1482 else:
1462 if not remote.capable('changegroupsubset'):
1483 if not remote.capable('changegroupsubset'):
1463 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1484 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1464 cg = remote.changegroupsubset(fetch, heads, 'pull')
1485 cg = remote.changegroupsubset(fetch, heads, 'pull')
1465 return self.addchangegroup(cg, 'pull', remote.url())
1486 return self.addchangegroup(cg, 'pull', remote.url())
1466 finally:
1487 finally:
1467 del lock
1488 del lock
1468
1489
1469 def push(self, remote, force=False, revs=None):
1490 def push(self, remote, force=False, revs=None):
1470 # there are two ways to push to remote repo:
1491 # there are two ways to push to remote repo:
1471 #
1492 #
1472 # addchangegroup assumes local user can lock remote
1493 # addchangegroup assumes local user can lock remote
1473 # repo (local filesystem, old ssh servers).
1494 # repo (local filesystem, old ssh servers).
1474 #
1495 #
1475 # unbundle assumes local user cannot lock remote repo (new ssh
1496 # unbundle assumes local user cannot lock remote repo (new ssh
1476 # servers, http servers).
1497 # servers, http servers).
1477
1498
1478 if remote.capable('unbundle'):
1499 if remote.capable('unbundle'):
1479 return self.push_unbundle(remote, force, revs)
1500 return self.push_unbundle(remote, force, revs)
1480 return self.push_addchangegroup(remote, force, revs)
1501 return self.push_addchangegroup(remote, force, revs)
1481
1502
1482 def prepush(self, remote, force, revs):
1503 def prepush(self, remote, force, revs):
1483 common = {}
1504 common = {}
1484 remote_heads = remote.heads()
1505 remote_heads = remote.heads()
1485 inc = self.findincoming(remote, common, remote_heads, force=force)
1506 inc = self.findincoming(remote, common, remote_heads, force=force)
1486
1507
1487 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1508 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1488 if revs is not None:
1509 if revs is not None:
1489 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1510 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1490 else:
1511 else:
1491 bases, heads = update, self.changelog.heads()
1512 bases, heads = update, self.changelog.heads()
1492
1513
1493 if not bases:
1514 if not bases:
1494 self.ui.status(_("no changes found\n"))
1515 self.ui.status(_("no changes found\n"))
1495 return None, 1
1516 return None, 1
1496 elif not force:
1517 elif not force:
1497 # check if we're creating new remote heads
1518 # check if we're creating new remote heads
1498 # to be a remote head after push, node must be either
1519 # to be a remote head after push, node must be either
1499 # - unknown locally
1520 # - unknown locally
1500 # - a local outgoing head descended from update
1521 # - a local outgoing head descended from update
1501 # - a remote head that's known locally and not
1522 # - a remote head that's known locally and not
1502 # ancestral to an outgoing head
1523 # ancestral to an outgoing head
1503
1524
1504 warn = 0
1525 warn = 0
1505
1526
1506 if remote_heads == [nullid]:
1527 if remote_heads == [nullid]:
1507 warn = 0
1528 warn = 0
1508 elif not revs and len(heads) > len(remote_heads):
1529 elif not revs and len(heads) > len(remote_heads):
1509 warn = 1
1530 warn = 1
1510 else:
1531 else:
1511 newheads = list(heads)
1532 newheads = list(heads)
1512 for r in remote_heads:
1533 for r in remote_heads:
1513 if r in self.changelog.nodemap:
1534 if r in self.changelog.nodemap:
1514 desc = self.changelog.heads(r, heads)
1535 desc = self.changelog.heads(r, heads)
1515 l = [h for h in heads if h in desc]
1536 l = [h for h in heads if h in desc]
1516 if not l:
1537 if not l:
1517 newheads.append(r)
1538 newheads.append(r)
1518 else:
1539 else:
1519 newheads.append(r)
1540 newheads.append(r)
1520 if len(newheads) > len(remote_heads):
1541 if len(newheads) > len(remote_heads):
1521 warn = 1
1542 warn = 1
1522
1543
1523 if warn:
1544 if warn:
1524 self.ui.warn(_("abort: push creates new remote heads!\n"))
1545 self.ui.warn(_("abort: push creates new remote heads!\n"))
1525 self.ui.status(_("(did you forget to merge?"
1546 self.ui.status(_("(did you forget to merge?"
1526 " use push -f to force)\n"))
1547 " use push -f to force)\n"))
1527 return None, 0
1548 return None, 0
1528 elif inc:
1549 elif inc:
1529 self.ui.warn(_("note: unsynced remote changes!\n"))
1550 self.ui.warn(_("note: unsynced remote changes!\n"))
1530
1551
1531
1552
1532 if revs is None:
1553 if revs is None:
1533 # use the fast path, no race possible on push
1554 # use the fast path, no race possible on push
1534 cg = self._changegroup(common.keys(), 'push')
1555 cg = self._changegroup(common.keys(), 'push')
1535 else:
1556 else:
1536 cg = self.changegroupsubset(update, revs, 'push')
1557 cg = self.changegroupsubset(update, revs, 'push')
1537 return cg, remote_heads
1558 return cg, remote_heads
1538
1559
1539 def push_addchangegroup(self, remote, force, revs):
1560 def push_addchangegroup(self, remote, force, revs):
1540 lock = remote.lock()
1561 lock = remote.lock()
1541 try:
1562 try:
1542 ret = self.prepush(remote, force, revs)
1563 ret = self.prepush(remote, force, revs)
1543 if ret[0] is not None:
1564 if ret[0] is not None:
1544 cg, remote_heads = ret
1565 cg, remote_heads = ret
1545 return remote.addchangegroup(cg, 'push', self.url())
1566 return remote.addchangegroup(cg, 'push', self.url())
1546 return ret[1]
1567 return ret[1]
1547 finally:
1568 finally:
1548 del lock
1569 del lock
1549
1570
1550 def push_unbundle(self, remote, force, revs):
1571 def push_unbundle(self, remote, force, revs):
1551 # local repo finds heads on server, finds out what revs it
1572 # local repo finds heads on server, finds out what revs it
1552 # must push. once revs transferred, if server finds it has
1573 # must push. once revs transferred, if server finds it has
1553 # different heads (someone else won commit/push race), server
1574 # different heads (someone else won commit/push race), server
1554 # aborts.
1575 # aborts.
1555
1576
1556 ret = self.prepush(remote, force, revs)
1577 ret = self.prepush(remote, force, revs)
1557 if ret[0] is not None:
1578 if ret[0] is not None:
1558 cg, remote_heads = ret
1579 cg, remote_heads = ret
1559 if force: remote_heads = ['force']
1580 if force: remote_heads = ['force']
1560 return remote.unbundle(cg, remote_heads, 'push')
1581 return remote.unbundle(cg, remote_heads, 'push')
1561 return ret[1]
1582 return ret[1]
1562
1583
1563 def changegroupinfo(self, nodes, source):
1584 def changegroupinfo(self, nodes, source):
1564 if self.ui.verbose or source == 'bundle':
1585 if self.ui.verbose or source == 'bundle':
1565 self.ui.status(_("%d changesets found\n") % len(nodes))
1586 self.ui.status(_("%d changesets found\n") % len(nodes))
1566 if self.ui.debugflag:
1587 if self.ui.debugflag:
1567 self.ui.debug(_("list of changesets:\n"))
1588 self.ui.debug(_("list of changesets:\n"))
1568 for node in nodes:
1589 for node in nodes:
1569 self.ui.debug("%s\n" % hex(node))
1590 self.ui.debug("%s\n" % hex(node))
1570
1591
1571 def changegroupsubset(self, bases, heads, source, extranodes=None):
1592 def changegroupsubset(self, bases, heads, source, extranodes=None):
1572 """This function generates a changegroup consisting of all the nodes
1593 """This function generates a changegroup consisting of all the nodes
1573 that are descendents of any of the bases, and ancestors of any of
1594 that are descendents of any of the bases, and ancestors of any of
1574 the heads.
1595 the heads.
1575
1596
1576 It is fairly complex as determining which filenodes and which
1597 It is fairly complex as determining which filenodes and which
1577 manifest nodes need to be included for the changeset to be complete
1598 manifest nodes need to be included for the changeset to be complete
1578 is non-trivial.
1599 is non-trivial.
1579
1600
1580 Another wrinkle is doing the reverse, figuring out which changeset in
1601 Another wrinkle is doing the reverse, figuring out which changeset in
1581 the changegroup a particular filenode or manifestnode belongs to.
1602 the changegroup a particular filenode or manifestnode belongs to.
1582
1603
1583 The caller can specify some nodes that must be included in the
1604 The caller can specify some nodes that must be included in the
1584 changegroup using the extranodes argument. It should be a dict
1605 changegroup using the extranodes argument. It should be a dict
1585 where the keys are the filenames (or 1 for the manifest), and the
1606 where the keys are the filenames (or 1 for the manifest), and the
1586 values are lists of (node, linknode) tuples, where node is a wanted
1607 values are lists of (node, linknode) tuples, where node is a wanted
1587 node and linknode is the changelog node that should be transmitted as
1608 node and linknode is the changelog node that should be transmitted as
1588 the linkrev.
1609 the linkrev.
1589 """
1610 """
1590
1611
1591 if extranodes is None:
1612 if extranodes is None:
1592 # can we go through the fast path ?
1613 # can we go through the fast path ?
1593 heads.sort()
1614 heads.sort()
1594 allheads = self.heads()
1615 allheads = self.heads()
1595 allheads.sort()
1616 allheads.sort()
1596 if heads == allheads:
1617 if heads == allheads:
1597 common = []
1618 common = []
1598 # parents of bases are known from both sides
1619 # parents of bases are known from both sides
1599 for n in bases:
1620 for n in bases:
1600 for p in self.changelog.parents(n):
1621 for p in self.changelog.parents(n):
1601 if p != nullid:
1622 if p != nullid:
1602 common.append(p)
1623 common.append(p)
1603 return self._changegroup(common, source)
1624 return self._changegroup(common, source)
1604
1625
1605 self.hook('preoutgoing', throw=True, source=source)
1626 self.hook('preoutgoing', throw=True, source=source)
1606
1627
1607 # Set up some initial variables
1628 # Set up some initial variables
1608 # Make it easy to refer to self.changelog
1629 # Make it easy to refer to self.changelog
1609 cl = self.changelog
1630 cl = self.changelog
1610 # msng is short for missing - compute the list of changesets in this
1631 # msng is short for missing - compute the list of changesets in this
1611 # changegroup.
1632 # changegroup.
1612 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1633 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1613 self.changegroupinfo(msng_cl_lst, source)
1634 self.changegroupinfo(msng_cl_lst, source)
1614 # Some bases may turn out to be superfluous, and some heads may be
1635 # Some bases may turn out to be superfluous, and some heads may be
1615 # too. nodesbetween will return the minimal set of bases and heads
1636 # too. nodesbetween will return the minimal set of bases and heads
1616 # necessary to re-create the changegroup.
1637 # necessary to re-create the changegroup.
1617
1638
1618 # Known heads are the list of heads that it is assumed the recipient
1639 # Known heads are the list of heads that it is assumed the recipient
1619 # of this changegroup will know about.
1640 # of this changegroup will know about.
1620 knownheads = {}
1641 knownheads = {}
1621 # We assume that all parents of bases are known heads.
1642 # We assume that all parents of bases are known heads.
1622 for n in bases:
1643 for n in bases:
1623 for p in cl.parents(n):
1644 for p in cl.parents(n):
1624 if p != nullid:
1645 if p != nullid:
1625 knownheads[p] = 1
1646 knownheads[p] = 1
1626 knownheads = knownheads.keys()
1647 knownheads = knownheads.keys()
1627 if knownheads:
1648 if knownheads:
1628 # Now that we know what heads are known, we can compute which
1649 # Now that we know what heads are known, we can compute which
1629 # changesets are known. The recipient must know about all
1650 # changesets are known. The recipient must know about all
1630 # changesets required to reach the known heads from the null
1651 # changesets required to reach the known heads from the null
1631 # changeset.
1652 # changeset.
1632 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1653 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1633 junk = None
1654 junk = None
1634 # Transform the list into an ersatz set.
1655 # Transform the list into an ersatz set.
1635 has_cl_set = dict.fromkeys(has_cl_set)
1656 has_cl_set = dict.fromkeys(has_cl_set)
1636 else:
1657 else:
1637 # If there were no known heads, the recipient cannot be assumed to
1658 # If there were no known heads, the recipient cannot be assumed to
1638 # know about any changesets.
1659 # know about any changesets.
1639 has_cl_set = {}
1660 has_cl_set = {}
1640
1661
1641 # Make it easy to refer to self.manifest
1662 # Make it easy to refer to self.manifest
1642 mnfst = self.manifest
1663 mnfst = self.manifest
1643 # We don't know which manifests are missing yet
1664 # We don't know which manifests are missing yet
1644 msng_mnfst_set = {}
1665 msng_mnfst_set = {}
1645 # Nor do we know which filenodes are missing.
1666 # Nor do we know which filenodes are missing.
1646 msng_filenode_set = {}
1667 msng_filenode_set = {}
1647
1668
1648 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1669 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1649 junk = None
1670 junk = None
1650
1671
1651 # A changeset always belongs to itself, so the changenode lookup
1672 # A changeset always belongs to itself, so the changenode lookup
1652 # function for a changenode is identity.
1673 # function for a changenode is identity.
1653 def identity(x):
1674 def identity(x):
1654 return x
1675 return x
1655
1676
1656 # A function generating function. Sets up an environment for the
1677 # A function generating function. Sets up an environment for the
1657 # inner function.
1678 # inner function.
1658 def cmp_by_rev_func(revlog):
1679 def cmp_by_rev_func(revlog):
1659 # Compare two nodes by their revision number in the environment's
1680 # Compare two nodes by their revision number in the environment's
1660 # revision history. Since the revision number both represents the
1681 # revision history. Since the revision number both represents the
1661 # most efficient order to read the nodes in, and represents a
1682 # most efficient order to read the nodes in, and represents a
1662 # topological sorting of the nodes, this function is often useful.
1683 # topological sorting of the nodes, this function is often useful.
1663 def cmp_by_rev(a, b):
1684 def cmp_by_rev(a, b):
1664 return cmp(revlog.rev(a), revlog.rev(b))
1685 return cmp(revlog.rev(a), revlog.rev(b))
1665 return cmp_by_rev
1686 return cmp_by_rev
1666
1687
1667 # If we determine that a particular file or manifest node must be a
1688 # If we determine that a particular file or manifest node must be a
1668 # node that the recipient of the changegroup will already have, we can
1689 # node that the recipient of the changegroup will already have, we can
1669 # also assume the recipient will have all the parents. This function
1690 # also assume the recipient will have all the parents. This function
1670 # prunes them from the set of missing nodes.
1691 # prunes them from the set of missing nodes.
1671 def prune_parents(revlog, hasset, msngset):
1692 def prune_parents(revlog, hasset, msngset):
1672 haslst = hasset.keys()
1693 haslst = hasset.keys()
1673 haslst.sort(cmp_by_rev_func(revlog))
1694 haslst.sort(cmp_by_rev_func(revlog))
1674 for node in haslst:
1695 for node in haslst:
1675 parentlst = [p for p in revlog.parents(node) if p != nullid]
1696 parentlst = [p for p in revlog.parents(node) if p != nullid]
1676 while parentlst:
1697 while parentlst:
1677 n = parentlst.pop()
1698 n = parentlst.pop()
1678 if n not in hasset:
1699 if n not in hasset:
1679 hasset[n] = 1
1700 hasset[n] = 1
1680 p = [p for p in revlog.parents(n) if p != nullid]
1701 p = [p for p in revlog.parents(n) if p != nullid]
1681 parentlst.extend(p)
1702 parentlst.extend(p)
1682 for n in hasset:
1703 for n in hasset:
1683 msngset.pop(n, None)
1704 msngset.pop(n, None)
1684
1705
1685 # This is a function generating function used to set up an environment
1706 # This is a function generating function used to set up an environment
1686 # for the inner function to execute in.
1707 # for the inner function to execute in.
1687 def manifest_and_file_collector(changedfileset):
1708 def manifest_and_file_collector(changedfileset):
1688 # This is an information gathering function that gathers
1709 # This is an information gathering function that gathers
1689 # information from each changeset node that goes out as part of
1710 # information from each changeset node that goes out as part of
1690 # the changegroup. The information gathered is a list of which
1711 # the changegroup. The information gathered is a list of which
1691 # manifest nodes are potentially required (the recipient may
1712 # manifest nodes are potentially required (the recipient may
1692 # already have them) and total list of all files which were
1713 # already have them) and total list of all files which were
1693 # changed in any changeset in the changegroup.
1714 # changed in any changeset in the changegroup.
1694 #
1715 #
1695 # We also remember the first changenode we saw any manifest
1716 # We also remember the first changenode we saw any manifest
1696 # referenced by so we can later determine which changenode 'owns'
1717 # referenced by so we can later determine which changenode 'owns'
1697 # the manifest.
1718 # the manifest.
1698 def collect_manifests_and_files(clnode):
1719 def collect_manifests_and_files(clnode):
1699 c = cl.read(clnode)
1720 c = cl.read(clnode)
1700 for f in c[3]:
1721 for f in c[3]:
1701 # This is to make sure we only have one instance of each
1722 # This is to make sure we only have one instance of each
1702 # filename string for each filename.
1723 # filename string for each filename.
1703 changedfileset.setdefault(f, f)
1724 changedfileset.setdefault(f, f)
1704 msng_mnfst_set.setdefault(c[0], clnode)
1725 msng_mnfst_set.setdefault(c[0], clnode)
1705 return collect_manifests_and_files
1726 return collect_manifests_and_files
1706
1727
1707 # Figure out which manifest nodes (of the ones we think might be part
1728 # Figure out which manifest nodes (of the ones we think might be part
1708 # of the changegroup) the recipient must know about and remove them
1729 # of the changegroup) the recipient must know about and remove them
1709 # from the changegroup.
1730 # from the changegroup.
1710 def prune_manifests():
1731 def prune_manifests():
1711 has_mnfst_set = {}
1732 has_mnfst_set = {}
1712 for n in msng_mnfst_set:
1733 for n in msng_mnfst_set:
1713 # If a 'missing' manifest thinks it belongs to a changenode
1734 # If a 'missing' manifest thinks it belongs to a changenode
1714 # the recipient is assumed to have, obviously the recipient
1735 # the recipient is assumed to have, obviously the recipient
1715 # must have that manifest.
1736 # must have that manifest.
1716 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1737 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1717 if linknode in has_cl_set:
1738 if linknode in has_cl_set:
1718 has_mnfst_set[n] = 1
1739 has_mnfst_set[n] = 1
1719 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1740 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1720
1741
1721 # Use the information collected in collect_manifests_and_files to say
1742 # Use the information collected in collect_manifests_and_files to say
1722 # which changenode any manifestnode belongs to.
1743 # which changenode any manifestnode belongs to.
1723 def lookup_manifest_link(mnfstnode):
1744 def lookup_manifest_link(mnfstnode):
1724 return msng_mnfst_set[mnfstnode]
1745 return msng_mnfst_set[mnfstnode]
1725
1746
1726 # A function generating function that sets up the initial environment
1747 # A function generating function that sets up the initial environment
1727 # the inner function.
1748 # the inner function.
1728 def filenode_collector(changedfiles):
1749 def filenode_collector(changedfiles):
1729 next_rev = [0]
1750 next_rev = [0]
1730 # This gathers information from each manifestnode included in the
1751 # This gathers information from each manifestnode included in the
1731 # changegroup about which filenodes the manifest node references
1752 # changegroup about which filenodes the manifest node references
1732 # so we can include those in the changegroup too.
1753 # so we can include those in the changegroup too.
1733 #
1754 #
1734 # It also remembers which changenode each filenode belongs to. It
1755 # It also remembers which changenode each filenode belongs to. It
1735 # does this by assuming the a filenode belongs to the changenode
1756 # does this by assuming the a filenode belongs to the changenode
1736 # the first manifest that references it belongs to.
1757 # the first manifest that references it belongs to.
1737 def collect_msng_filenodes(mnfstnode):
1758 def collect_msng_filenodes(mnfstnode):
1738 r = mnfst.rev(mnfstnode)
1759 r = mnfst.rev(mnfstnode)
1739 if r == next_rev[0]:
1760 if r == next_rev[0]:
1740 # If the last rev we looked at was the one just previous,
1761 # If the last rev we looked at was the one just previous,
1741 # we only need to see a diff.
1762 # we only need to see a diff.
1742 deltamf = mnfst.readdelta(mnfstnode)
1763 deltamf = mnfst.readdelta(mnfstnode)
1743 # For each line in the delta
1764 # For each line in the delta
1744 for f, fnode in deltamf.iteritems():
1765 for f, fnode in deltamf.iteritems():
1745 f = changedfiles.get(f, None)
1766 f = changedfiles.get(f, None)
1746 # And if the file is in the list of files we care
1767 # And if the file is in the list of files we care
1747 # about.
1768 # about.
1748 if f is not None:
1769 if f is not None:
1749 # Get the changenode this manifest belongs to
1770 # Get the changenode this manifest belongs to
1750 clnode = msng_mnfst_set[mnfstnode]
1771 clnode = msng_mnfst_set[mnfstnode]
1751 # Create the set of filenodes for the file if
1772 # Create the set of filenodes for the file if
1752 # there isn't one already.
1773 # there isn't one already.
1753 ndset = msng_filenode_set.setdefault(f, {})
1774 ndset = msng_filenode_set.setdefault(f, {})
1754 # And set the filenode's changelog node to the
1775 # And set the filenode's changelog node to the
1755 # manifest's if it hasn't been set already.
1776 # manifest's if it hasn't been set already.
1756 ndset.setdefault(fnode, clnode)
1777 ndset.setdefault(fnode, clnode)
1757 else:
1778 else:
1758 # Otherwise we need a full manifest.
1779 # Otherwise we need a full manifest.
1759 m = mnfst.read(mnfstnode)
1780 m = mnfst.read(mnfstnode)
1760 # For every file in we care about.
1781 # For every file in we care about.
1761 for f in changedfiles:
1782 for f in changedfiles:
1762 fnode = m.get(f, None)
1783 fnode = m.get(f, None)
1763 # If it's in the manifest
1784 # If it's in the manifest
1764 if fnode is not None:
1785 if fnode is not None:
1765 # See comments above.
1786 # See comments above.
1766 clnode = msng_mnfst_set[mnfstnode]
1787 clnode = msng_mnfst_set[mnfstnode]
1767 ndset = msng_filenode_set.setdefault(f, {})
1788 ndset = msng_filenode_set.setdefault(f, {})
1768 ndset.setdefault(fnode, clnode)
1789 ndset.setdefault(fnode, clnode)
1769 # Remember the revision we hope to see next.
1790 # Remember the revision we hope to see next.
1770 next_rev[0] = r + 1
1791 next_rev[0] = r + 1
1771 return collect_msng_filenodes
1792 return collect_msng_filenodes
1772
1793
1773 # We have a list of filenodes we think we need for a file, lets remove
1794 # We have a list of filenodes we think we need for a file, lets remove
1774 # all those we now the recipient must have.
1795 # all those we now the recipient must have.
1775 def prune_filenodes(f, filerevlog):
1796 def prune_filenodes(f, filerevlog):
1776 msngset = msng_filenode_set[f]
1797 msngset = msng_filenode_set[f]
1777 hasset = {}
1798 hasset = {}
1778 # If a 'missing' filenode thinks it belongs to a changenode we
1799 # If a 'missing' filenode thinks it belongs to a changenode we
1779 # assume the recipient must have, then the recipient must have
1800 # assume the recipient must have, then the recipient must have
1780 # that filenode.
1801 # that filenode.
1781 for n in msngset:
1802 for n in msngset:
1782 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1803 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1783 if clnode in has_cl_set:
1804 if clnode in has_cl_set:
1784 hasset[n] = 1
1805 hasset[n] = 1
1785 prune_parents(filerevlog, hasset, msngset)
1806 prune_parents(filerevlog, hasset, msngset)
1786
1807
1787 # A function generator function that sets up the a context for the
1808 # A function generator function that sets up the a context for the
1788 # inner function.
1809 # inner function.
1789 def lookup_filenode_link_func(fname):
1810 def lookup_filenode_link_func(fname):
1790 msngset = msng_filenode_set[fname]
1811 msngset = msng_filenode_set[fname]
1791 # Lookup the changenode the filenode belongs to.
1812 # Lookup the changenode the filenode belongs to.
1792 def lookup_filenode_link(fnode):
1813 def lookup_filenode_link(fnode):
1793 return msngset[fnode]
1814 return msngset[fnode]
1794 return lookup_filenode_link
1815 return lookup_filenode_link
1795
1816
1796 # Add the nodes that were explicitly requested.
1817 # Add the nodes that were explicitly requested.
1797 def add_extra_nodes(name, nodes):
1818 def add_extra_nodes(name, nodes):
1798 if not extranodes or name not in extranodes:
1819 if not extranodes or name not in extranodes:
1799 return
1820 return
1800
1821
1801 for node, linknode in extranodes[name]:
1822 for node, linknode in extranodes[name]:
1802 if node not in nodes:
1823 if node not in nodes:
1803 nodes[node] = linknode
1824 nodes[node] = linknode
1804
1825
1805 # Now that we have all theses utility functions to help out and
1826 # Now that we have all theses utility functions to help out and
1806 # logically divide up the task, generate the group.
1827 # logically divide up the task, generate the group.
1807 def gengroup():
1828 def gengroup():
1808 # The set of changed files starts empty.
1829 # The set of changed files starts empty.
1809 changedfiles = {}
1830 changedfiles = {}
1810 # Create a changenode group generator that will call our functions
1831 # Create a changenode group generator that will call our functions
1811 # back to lookup the owning changenode and collect information.
1832 # back to lookup the owning changenode and collect information.
1812 group = cl.group(msng_cl_lst, identity,
1833 group = cl.group(msng_cl_lst, identity,
1813 manifest_and_file_collector(changedfiles))
1834 manifest_and_file_collector(changedfiles))
1814 for chnk in group:
1835 for chnk in group:
1815 yield chnk
1836 yield chnk
1816
1837
1817 # The list of manifests has been collected by the generator
1838 # The list of manifests has been collected by the generator
1818 # calling our functions back.
1839 # calling our functions back.
1819 prune_manifests()
1840 prune_manifests()
1820 add_extra_nodes(1, msng_mnfst_set)
1841 add_extra_nodes(1, msng_mnfst_set)
1821 msng_mnfst_lst = msng_mnfst_set.keys()
1842 msng_mnfst_lst = msng_mnfst_set.keys()
1822 # Sort the manifestnodes by revision number.
1843 # Sort the manifestnodes by revision number.
1823 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1844 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1824 # Create a generator for the manifestnodes that calls our lookup
1845 # Create a generator for the manifestnodes that calls our lookup
1825 # and data collection functions back.
1846 # and data collection functions back.
1826 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1847 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1827 filenode_collector(changedfiles))
1848 filenode_collector(changedfiles))
1828 for chnk in group:
1849 for chnk in group:
1829 yield chnk
1850 yield chnk
1830
1851
1831 # These are no longer needed, dereference and toss the memory for
1852 # These are no longer needed, dereference and toss the memory for
1832 # them.
1853 # them.
1833 msng_mnfst_lst = None
1854 msng_mnfst_lst = None
1834 msng_mnfst_set.clear()
1855 msng_mnfst_set.clear()
1835
1856
1836 if extranodes:
1857 if extranodes:
1837 for fname in extranodes:
1858 for fname in extranodes:
1838 if isinstance(fname, int):
1859 if isinstance(fname, int):
1839 continue
1860 continue
1840 msng_filenode_set.setdefault(fname, {})
1861 msng_filenode_set.setdefault(fname, {})
1841 changedfiles[fname] = 1
1862 changedfiles[fname] = 1
1842 # Go through all our files in order sorted by name.
1863 # Go through all our files in order sorted by name.
1843 for fname in util.sort(changedfiles):
1864 for fname in util.sort(changedfiles):
1844 filerevlog = self.file(fname)
1865 filerevlog = self.file(fname)
1845 if not len(filerevlog):
1866 if not len(filerevlog):
1846 raise util.Abort(_("empty or missing revlog for %s") % fname)
1867 raise util.Abort(_("empty or missing revlog for %s") % fname)
1847 # Toss out the filenodes that the recipient isn't really
1868 # Toss out the filenodes that the recipient isn't really
1848 # missing.
1869 # missing.
1849 if fname in msng_filenode_set:
1870 if fname in msng_filenode_set:
1850 prune_filenodes(fname, filerevlog)
1871 prune_filenodes(fname, filerevlog)
1851 add_extra_nodes(fname, msng_filenode_set[fname])
1872 add_extra_nodes(fname, msng_filenode_set[fname])
1852 msng_filenode_lst = msng_filenode_set[fname].keys()
1873 msng_filenode_lst = msng_filenode_set[fname].keys()
1853 else:
1874 else:
1854 msng_filenode_lst = []
1875 msng_filenode_lst = []
1855 # If any filenodes are left, generate the group for them,
1876 # If any filenodes are left, generate the group for them,
1856 # otherwise don't bother.
1877 # otherwise don't bother.
1857 if len(msng_filenode_lst) > 0:
1878 if len(msng_filenode_lst) > 0:
1858 yield changegroup.chunkheader(len(fname))
1879 yield changegroup.chunkheader(len(fname))
1859 yield fname
1880 yield fname
1860 # Sort the filenodes by their revision #
1881 # Sort the filenodes by their revision #
1861 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1882 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1862 # Create a group generator and only pass in a changenode
1883 # Create a group generator and only pass in a changenode
1863 # lookup function as we need to collect no information
1884 # lookup function as we need to collect no information
1864 # from filenodes.
1885 # from filenodes.
1865 group = filerevlog.group(msng_filenode_lst,
1886 group = filerevlog.group(msng_filenode_lst,
1866 lookup_filenode_link_func(fname))
1887 lookup_filenode_link_func(fname))
1867 for chnk in group:
1888 for chnk in group:
1868 yield chnk
1889 yield chnk
1869 if fname in msng_filenode_set:
1890 if fname in msng_filenode_set:
1870 # Don't need this anymore, toss it to free memory.
1891 # Don't need this anymore, toss it to free memory.
1871 del msng_filenode_set[fname]
1892 del msng_filenode_set[fname]
1872 # Signal that no more groups are left.
1893 # Signal that no more groups are left.
1873 yield changegroup.closechunk()
1894 yield changegroup.closechunk()
1874
1895
1875 if msng_cl_lst:
1896 if msng_cl_lst:
1876 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1897 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1877
1898
1878 return util.chunkbuffer(gengroup())
1899 return util.chunkbuffer(gengroup())
1879
1900
1880 def changegroup(self, basenodes, source):
1901 def changegroup(self, basenodes, source):
1881 # to avoid a race we use changegroupsubset() (issue1320)
1902 # to avoid a race we use changegroupsubset() (issue1320)
1882 return self.changegroupsubset(basenodes, self.heads(), source)
1903 return self.changegroupsubset(basenodes, self.heads(), source)
1883
1904
1884 def _changegroup(self, common, source):
1905 def _changegroup(self, common, source):
1885 """Generate a changegroup of all nodes that we have that a recipient
1906 """Generate a changegroup of all nodes that we have that a recipient
1886 doesn't.
1907 doesn't.
1887
1908
1888 This is much easier than the previous function as we can assume that
1909 This is much easier than the previous function as we can assume that
1889 the recipient has any changenode we aren't sending them.
1910 the recipient has any changenode we aren't sending them.
1890
1911
1891 common is the set of common nodes between remote and self"""
1912 common is the set of common nodes between remote and self"""
1892
1913
1893 self.hook('preoutgoing', throw=True, source=source)
1914 self.hook('preoutgoing', throw=True, source=source)
1894
1915
1895 cl = self.changelog
1916 cl = self.changelog
1896 nodes = cl.findmissing(common)
1917 nodes = cl.findmissing(common)
1897 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1918 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1898 self.changegroupinfo(nodes, source)
1919 self.changegroupinfo(nodes, source)
1899
1920
1900 def identity(x):
1921 def identity(x):
1901 return x
1922 return x
1902
1923
1903 def gennodelst(log):
1924 def gennodelst(log):
1904 for r in log:
1925 for r in log:
1905 if log.linkrev(r) in revset:
1926 if log.linkrev(r) in revset:
1906 yield log.node(r)
1927 yield log.node(r)
1907
1928
1908 def changed_file_collector(changedfileset):
1929 def changed_file_collector(changedfileset):
1909 def collect_changed_files(clnode):
1930 def collect_changed_files(clnode):
1910 c = cl.read(clnode)
1931 c = cl.read(clnode)
1911 for fname in c[3]:
1932 for fname in c[3]:
1912 changedfileset[fname] = 1
1933 changedfileset[fname] = 1
1913 return collect_changed_files
1934 return collect_changed_files
1914
1935
1915 def lookuprevlink_func(revlog):
1936 def lookuprevlink_func(revlog):
1916 def lookuprevlink(n):
1937 def lookuprevlink(n):
1917 return cl.node(revlog.linkrev(revlog.rev(n)))
1938 return cl.node(revlog.linkrev(revlog.rev(n)))
1918 return lookuprevlink
1939 return lookuprevlink
1919
1940
1920 def gengroup():
1941 def gengroup():
1921 # construct a list of all changed files
1942 # construct a list of all changed files
1922 changedfiles = {}
1943 changedfiles = {}
1923
1944
1924 for chnk in cl.group(nodes, identity,
1945 for chnk in cl.group(nodes, identity,
1925 changed_file_collector(changedfiles)):
1946 changed_file_collector(changedfiles)):
1926 yield chnk
1947 yield chnk
1927
1948
1928 mnfst = self.manifest
1949 mnfst = self.manifest
1929 nodeiter = gennodelst(mnfst)
1950 nodeiter = gennodelst(mnfst)
1930 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1951 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1931 yield chnk
1952 yield chnk
1932
1953
1933 for fname in util.sort(changedfiles):
1954 for fname in util.sort(changedfiles):
1934 filerevlog = self.file(fname)
1955 filerevlog = self.file(fname)
1935 if not len(filerevlog):
1956 if not len(filerevlog):
1936 raise util.Abort(_("empty or missing revlog for %s") % fname)
1957 raise util.Abort(_("empty or missing revlog for %s") % fname)
1937 nodeiter = gennodelst(filerevlog)
1958 nodeiter = gennodelst(filerevlog)
1938 nodeiter = list(nodeiter)
1959 nodeiter = list(nodeiter)
1939 if nodeiter:
1960 if nodeiter:
1940 yield changegroup.chunkheader(len(fname))
1961 yield changegroup.chunkheader(len(fname))
1941 yield fname
1962 yield fname
1942 lookup = lookuprevlink_func(filerevlog)
1963 lookup = lookuprevlink_func(filerevlog)
1943 for chnk in filerevlog.group(nodeiter, lookup):
1964 for chnk in filerevlog.group(nodeiter, lookup):
1944 yield chnk
1965 yield chnk
1945
1966
1946 yield changegroup.closechunk()
1967 yield changegroup.closechunk()
1947
1968
1948 if nodes:
1969 if nodes:
1949 self.hook('outgoing', node=hex(nodes[0]), source=source)
1970 self.hook('outgoing', node=hex(nodes[0]), source=source)
1950
1971
1951 return util.chunkbuffer(gengroup())
1972 return util.chunkbuffer(gengroup())
1952
1973
1953 def addchangegroup(self, source, srctype, url, emptyok=False):
1974 def addchangegroup(self, source, srctype, url, emptyok=False):
1954 """add changegroup to repo.
1975 """add changegroup to repo.
1955
1976
1956 return values:
1977 return values:
1957 - nothing changed or no source: 0
1978 - nothing changed or no source: 0
1958 - more heads than before: 1+added heads (2..n)
1979 - more heads than before: 1+added heads (2..n)
1959 - less heads than before: -1-removed heads (-2..-n)
1980 - less heads than before: -1-removed heads (-2..-n)
1960 - number of heads stays the same: 1
1981 - number of heads stays the same: 1
1961 """
1982 """
1962 def csmap(x):
1983 def csmap(x):
1963 self.ui.debug(_("add changeset %s\n") % short(x))
1984 self.ui.debug(_("add changeset %s\n") % short(x))
1964 return len(cl)
1985 return len(cl)
1965
1986
1966 def revmap(x):
1987 def revmap(x):
1967 return cl.rev(x)
1988 return cl.rev(x)
1968
1989
1969 if not source:
1990 if not source:
1970 return 0
1991 return 0
1971
1992
1972 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1993 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1973
1994
1974 changesets = files = revisions = 0
1995 changesets = files = revisions = 0
1975
1996
1976 # write changelog data to temp files so concurrent readers will not see
1997 # write changelog data to temp files so concurrent readers will not see
1977 # inconsistent view
1998 # inconsistent view
1978 cl = self.changelog
1999 cl = self.changelog
1979 cl.delayupdate()
2000 cl.delayupdate()
1980 oldheads = len(cl.heads())
2001 oldheads = len(cl.heads())
1981
2002
1982 tr = self.transaction()
2003 tr = self.transaction()
1983 try:
2004 try:
1984 trp = weakref.proxy(tr)
2005 trp = weakref.proxy(tr)
1985 # pull off the changeset group
2006 # pull off the changeset group
1986 self.ui.status(_("adding changesets\n"))
2007 self.ui.status(_("adding changesets\n"))
1987 cor = len(cl) - 1
2008 cor = len(cl) - 1
1988 chunkiter = changegroup.chunkiter(source)
2009 chunkiter = changegroup.chunkiter(source)
1989 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2010 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1990 raise util.Abort(_("received changelog group is empty"))
2011 raise util.Abort(_("received changelog group is empty"))
1991 cnr = len(cl) - 1
2012 cnr = len(cl) - 1
1992 changesets = cnr - cor
2013 changesets = cnr - cor
1993
2014
1994 # pull off the manifest group
2015 # pull off the manifest group
1995 self.ui.status(_("adding manifests\n"))
2016 self.ui.status(_("adding manifests\n"))
1996 chunkiter = changegroup.chunkiter(source)
2017 chunkiter = changegroup.chunkiter(source)
1997 # no need to check for empty manifest group here:
2018 # no need to check for empty manifest group here:
1998 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2019 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1999 # no new manifest will be created and the manifest group will
2020 # no new manifest will be created and the manifest group will
2000 # be empty during the pull
2021 # be empty during the pull
2001 self.manifest.addgroup(chunkiter, revmap, trp)
2022 self.manifest.addgroup(chunkiter, revmap, trp)
2002
2023
2003 # process the files
2024 # process the files
2004 self.ui.status(_("adding file changes\n"))
2025 self.ui.status(_("adding file changes\n"))
2005 while 1:
2026 while 1:
2006 f = changegroup.getchunk(source)
2027 f = changegroup.getchunk(source)
2007 if not f:
2028 if not f:
2008 break
2029 break
2009 self.ui.debug(_("adding %s revisions\n") % f)
2030 self.ui.debug(_("adding %s revisions\n") % f)
2010 fl = self.file(f)
2031 fl = self.file(f)
2011 o = len(fl)
2032 o = len(fl)
2012 chunkiter = changegroup.chunkiter(source)
2033 chunkiter = changegroup.chunkiter(source)
2013 if fl.addgroup(chunkiter, revmap, trp) is None:
2034 if fl.addgroup(chunkiter, revmap, trp) is None:
2014 raise util.Abort(_("received file revlog group is empty"))
2035 raise util.Abort(_("received file revlog group is empty"))
2015 revisions += len(fl) - o
2036 revisions += len(fl) - o
2016 files += 1
2037 files += 1
2017
2038
2018 # make changelog see real files again
2039 # make changelog see real files again
2019 cl.finalize(trp)
2040 cl.finalize(trp)
2020
2041
2021 newheads = len(self.changelog.heads())
2042 newheads = len(self.changelog.heads())
2022 heads = ""
2043 heads = ""
2023 if oldheads and newheads != oldheads:
2044 if oldheads and newheads != oldheads:
2024 heads = _(" (%+d heads)") % (newheads - oldheads)
2045 heads = _(" (%+d heads)") % (newheads - oldheads)
2025
2046
2026 self.ui.status(_("added %d changesets"
2047 self.ui.status(_("added %d changesets"
2027 " with %d changes to %d files%s\n")
2048 " with %d changes to %d files%s\n")
2028 % (changesets, revisions, files, heads))
2049 % (changesets, revisions, files, heads))
2029
2050
2030 if changesets > 0:
2051 if changesets > 0:
2031 self.hook('pretxnchangegroup', throw=True,
2052 self.hook('pretxnchangegroup', throw=True,
2032 node=hex(self.changelog.node(cor+1)), source=srctype,
2053 node=hex(self.changelog.node(cor+1)), source=srctype,
2033 url=url)
2054 url=url)
2034
2055
2035 tr.close()
2056 tr.close()
2036 finally:
2057 finally:
2037 del tr
2058 del tr
2038
2059
2039 if changesets > 0:
2060 if changesets > 0:
2040 # forcefully update the on-disk branch cache
2061 # forcefully update the on-disk branch cache
2041 self.ui.debug(_("updating the branch cache\n"))
2062 self.ui.debug(_("updating the branch cache\n"))
2042 self.branchtags()
2063 self.branchtags()
2043 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2064 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2044 source=srctype, url=url)
2065 source=srctype, url=url)
2045
2066
2046 for i in xrange(cor + 1, cnr + 1):
2067 for i in xrange(cor + 1, cnr + 1):
2047 self.hook("incoming", node=hex(self.changelog.node(i)),
2068 self.hook("incoming", node=hex(self.changelog.node(i)),
2048 source=srctype, url=url)
2069 source=srctype, url=url)
2049
2070
2050 # never return 0 here:
2071 # never return 0 here:
2051 if newheads < oldheads:
2072 if newheads < oldheads:
2052 return newheads - oldheads - 1
2073 return newheads - oldheads - 1
2053 else:
2074 else:
2054 return newheads - oldheads + 1
2075 return newheads - oldheads + 1
2055
2076
2056
2077
2057 def stream_in(self, remote):
2078 def stream_in(self, remote):
2058 fp = remote.stream_out()
2079 fp = remote.stream_out()
2059 l = fp.readline()
2080 l = fp.readline()
2060 try:
2081 try:
2061 resp = int(l)
2082 resp = int(l)
2062 except ValueError:
2083 except ValueError:
2063 raise error.ResponseError(
2084 raise error.ResponseError(
2064 _('Unexpected response from remote server:'), l)
2085 _('Unexpected response from remote server:'), l)
2065 if resp == 1:
2086 if resp == 1:
2066 raise util.Abort(_('operation forbidden by server'))
2087 raise util.Abort(_('operation forbidden by server'))
2067 elif resp == 2:
2088 elif resp == 2:
2068 raise util.Abort(_('locking the remote repository failed'))
2089 raise util.Abort(_('locking the remote repository failed'))
2069 elif resp != 0:
2090 elif resp != 0:
2070 raise util.Abort(_('the server sent an unknown error code'))
2091 raise util.Abort(_('the server sent an unknown error code'))
2071 self.ui.status(_('streaming all changes\n'))
2092 self.ui.status(_('streaming all changes\n'))
2072 l = fp.readline()
2093 l = fp.readline()
2073 try:
2094 try:
2074 total_files, total_bytes = map(int, l.split(' ', 1))
2095 total_files, total_bytes = map(int, l.split(' ', 1))
2075 except (ValueError, TypeError):
2096 except (ValueError, TypeError):
2076 raise error.ResponseError(
2097 raise error.ResponseError(
2077 _('Unexpected response from remote server:'), l)
2098 _('Unexpected response from remote server:'), l)
2078 self.ui.status(_('%d files to transfer, %s of data\n') %
2099 self.ui.status(_('%d files to transfer, %s of data\n') %
2079 (total_files, util.bytecount(total_bytes)))
2100 (total_files, util.bytecount(total_bytes)))
2080 start = time.time()
2101 start = time.time()
2081 for i in xrange(total_files):
2102 for i in xrange(total_files):
2082 # XXX doesn't support '\n' or '\r' in filenames
2103 # XXX doesn't support '\n' or '\r' in filenames
2083 l = fp.readline()
2104 l = fp.readline()
2084 try:
2105 try:
2085 name, size = l.split('\0', 1)
2106 name, size = l.split('\0', 1)
2086 size = int(size)
2107 size = int(size)
2087 except (ValueError, TypeError):
2108 except (ValueError, TypeError):
2088 raise error.ResponseError(
2109 raise error.ResponseError(
2089 _('Unexpected response from remote server:'), l)
2110 _('Unexpected response from remote server:'), l)
2090 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2111 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2091 ofp = self.sopener(name, 'w')
2112 ofp = self.sopener(name, 'w')
2092 for chunk in util.filechunkiter(fp, limit=size):
2113 for chunk in util.filechunkiter(fp, limit=size):
2093 ofp.write(chunk)
2114 ofp.write(chunk)
2094 ofp.close()
2115 ofp.close()
2095 elapsed = time.time() - start
2116 elapsed = time.time() - start
2096 if elapsed <= 0:
2117 if elapsed <= 0:
2097 elapsed = 0.001
2118 elapsed = 0.001
2098 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2119 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2099 (util.bytecount(total_bytes), elapsed,
2120 (util.bytecount(total_bytes), elapsed,
2100 util.bytecount(total_bytes / elapsed)))
2121 util.bytecount(total_bytes / elapsed)))
2101 self.invalidate()
2122 self.invalidate()
2102 return len(self.heads()) + 1
2123 return len(self.heads()) + 1
2103
2124
2104 def clone(self, remote, heads=[], stream=False):
2125 def clone(self, remote, heads=[], stream=False):
2105 '''clone remote repository.
2126 '''clone remote repository.
2106
2127
2107 keyword arguments:
2128 keyword arguments:
2108 heads: list of revs to clone (forces use of pull)
2129 heads: list of revs to clone (forces use of pull)
2109 stream: use streaming clone if possible'''
2130 stream: use streaming clone if possible'''
2110
2131
2111 # now, all clients that can request uncompressed clones can
2132 # now, all clients that can request uncompressed clones can
2112 # read repo formats supported by all servers that can serve
2133 # read repo formats supported by all servers that can serve
2113 # them.
2134 # them.
2114
2135
2115 # if revlog format changes, client will have to check version
2136 # if revlog format changes, client will have to check version
2116 # and format flags on "stream" capability, and use
2137 # and format flags on "stream" capability, and use
2117 # uncompressed only if compatible.
2138 # uncompressed only if compatible.
2118
2139
2119 if stream and not heads and remote.capable('stream'):
2140 if stream and not heads and remote.capable('stream'):
2120 return self.stream_in(remote)
2141 return self.stream_in(remote)
2121 return self.pull(remote, heads)
2142 return self.pull(remote, heads)
2122
2143
2123 # used to avoid circular references so destructors work
2144 # used to avoid circular references so destructors work
2124 def aftertrans(files):
2145 def aftertrans(files):
2125 renamefiles = [tuple(t) for t in files]
2146 renamefiles = [tuple(t) for t in files]
2126 def a():
2147 def a():
2127 for src, dest in renamefiles:
2148 for src, dest in renamefiles:
2128 util.rename(src, dest)
2149 util.rename(src, dest)
2129 return a
2150 return a
2130
2151
2131 def instance(ui, path, create):
2152 def instance(ui, path, create):
2132 return localrepository(ui, util.drop_scheme('file', path), create)
2153 return localrepository(ui, util.drop_scheme('file', path), create)
2133
2154
2134 def islocal(path):
2155 def islocal(path):
2135 return True
2156 return True
General Comments 0
You need to be logged in to leave comments. Login now