##// END OF EJS Templates
resolve: move reset to localrepo.commit...
Matt Mackall -
r7848:89e05c02 default
parent child Browse files
Show More
@@ -1,3425 +1,3422 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _, gettext
9 from i18n import _, gettext
10 import os, re, sys
10 import os, re, sys
11 import hg, util, revlog, bundlerepo, extensions, copies, context, error
11 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import difflib, patch, time, help, mdiff, tempfile, url
12 import difflib, patch, time, help, mdiff, tempfile, url
13 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
13 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
14 import merge as merge_
14 import merge as merge_
15
15
16 # Commands start here, listed alphabetically
16 # Commands start here, listed alphabetically
17
17
18 def add(ui, repo, *pats, **opts):
18 def add(ui, repo, *pats, **opts):
19 """add the specified files on the next commit
19 """add the specified files on the next commit
20
20
21 Schedule files to be version controlled and added to the repository.
21 Schedule files to be version controlled and added to the repository.
22
22
23 The files will be added to the repository at the next commit. To
23 The files will be added to the repository at the next commit. To
24 undo an add before that, see hg revert.
24 undo an add before that, see hg revert.
25
25
26 If no names are given, add all files to the repository.
26 If no names are given, add all files to the repository.
27 """
27 """
28
28
29 rejected = None
29 rejected = None
30 exacts = {}
30 exacts = {}
31 names = []
31 names = []
32 m = cmdutil.match(repo, pats, opts)
32 m = cmdutil.match(repo, pats, opts)
33 m.bad = lambda x,y: True
33 m.bad = lambda x,y: True
34 for abs in repo.walk(m):
34 for abs in repo.walk(m):
35 if m.exact(abs):
35 if m.exact(abs):
36 if ui.verbose:
36 if ui.verbose:
37 ui.status(_('adding %s\n') % m.rel(abs))
37 ui.status(_('adding %s\n') % m.rel(abs))
38 names.append(abs)
38 names.append(abs)
39 exacts[abs] = 1
39 exacts[abs] = 1
40 elif abs not in repo.dirstate:
40 elif abs not in repo.dirstate:
41 ui.status(_('adding %s\n') % m.rel(abs))
41 ui.status(_('adding %s\n') % m.rel(abs))
42 names.append(abs)
42 names.append(abs)
43 if not opts.get('dry_run'):
43 if not opts.get('dry_run'):
44 rejected = repo.add(names)
44 rejected = repo.add(names)
45 rejected = [p for p in rejected if p in exacts]
45 rejected = [p for p in rejected if p in exacts]
46 return rejected and 1 or 0
46 return rejected and 1 or 0
47
47
48 def addremove(ui, repo, *pats, **opts):
48 def addremove(ui, repo, *pats, **opts):
49 """add all new files, delete all missing files
49 """add all new files, delete all missing files
50
50
51 Add all new files and remove all missing files from the repository.
51 Add all new files and remove all missing files from the repository.
52
52
53 New files are ignored if they match any of the patterns in .hgignore. As
53 New files are ignored if they match any of the patterns in .hgignore. As
54 with add, these changes take effect at the next commit.
54 with add, these changes take effect at the next commit.
55
55
56 Use the -s option to detect renamed files. With a parameter > 0,
56 Use the -s option to detect renamed files. With a parameter > 0,
57 this compares every removed file with every added file and records
57 this compares every removed file with every added file and records
58 those similar enough as renames. This option takes a percentage
58 those similar enough as renames. This option takes a percentage
59 between 0 (disabled) and 100 (files must be identical) as its
59 between 0 (disabled) and 100 (files must be identical) as its
60 parameter. Detecting renamed files this way can be expensive.
60 parameter. Detecting renamed files this way can be expensive.
61 """
61 """
62 try:
62 try:
63 sim = float(opts.get('similarity') or 0)
63 sim = float(opts.get('similarity') or 0)
64 except ValueError:
64 except ValueError:
65 raise util.Abort(_('similarity must be a number'))
65 raise util.Abort(_('similarity must be a number'))
66 if sim < 0 or sim > 100:
66 if sim < 0 or sim > 100:
67 raise util.Abort(_('similarity must be between 0 and 100'))
67 raise util.Abort(_('similarity must be between 0 and 100'))
68 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
68 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
69
69
70 def annotate(ui, repo, *pats, **opts):
70 def annotate(ui, repo, *pats, **opts):
71 """show changeset information per file line
71 """show changeset information per file line
72
72
73 List changes in files, showing the revision id responsible for each line
73 List changes in files, showing the revision id responsible for each line
74
74
75 This command is useful to discover who did a change or when a change took
75 This command is useful to discover who did a change or when a change took
76 place.
76 place.
77
77
78 Without the -a option, annotate will avoid processing files it
78 Without the -a option, annotate will avoid processing files it
79 detects as binary. With -a, annotate will generate an annotation
79 detects as binary. With -a, annotate will generate an annotation
80 anyway, probably with undesirable results.
80 anyway, probably with undesirable results.
81 """
81 """
82 datefunc = ui.quiet and util.shortdate or util.datestr
82 datefunc = ui.quiet and util.shortdate or util.datestr
83 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
84
84
85 if not pats:
85 if not pats:
86 raise util.Abort(_('at least one file name or pattern required'))
86 raise util.Abort(_('at least one file name or pattern required'))
87
87
88 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
89 ('number', lambda x: str(x[0].rev())),
89 ('number', lambda x: str(x[0].rev())),
90 ('changeset', lambda x: short(x[0].node())),
90 ('changeset', lambda x: short(x[0].node())),
91 ('date', getdate),
91 ('date', getdate),
92 ('follow', lambda x: x[0].path()),
92 ('follow', lambda x: x[0].path()),
93 ]
93 ]
94
94
95 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
95 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
96 and not opts.get('follow')):
96 and not opts.get('follow')):
97 opts['number'] = 1
97 opts['number'] = 1
98
98
99 linenumber = opts.get('line_number') is not None
99 linenumber = opts.get('line_number') is not None
100 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
100 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
101 raise util.Abort(_('at least one of -n/-c is required for -l'))
101 raise util.Abort(_('at least one of -n/-c is required for -l'))
102
102
103 funcmap = [func for op, func in opmap if opts.get(op)]
103 funcmap = [func for op, func in opmap if opts.get(op)]
104 if linenumber:
104 if linenumber:
105 lastfunc = funcmap[-1]
105 lastfunc = funcmap[-1]
106 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
107
107
108 ctx = repo[opts.get('rev')]
108 ctx = repo[opts.get('rev')]
109
109
110 m = cmdutil.match(repo, pats, opts)
110 m = cmdutil.match(repo, pats, opts)
111 for abs in ctx.walk(m):
111 for abs in ctx.walk(m):
112 fctx = ctx[abs]
112 fctx = ctx[abs]
113 if not opts.get('text') and util.binary(fctx.data()):
113 if not opts.get('text') and util.binary(fctx.data()):
114 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
114 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
115 continue
115 continue
116
116
117 lines = fctx.annotate(follow=opts.get('follow'),
117 lines = fctx.annotate(follow=opts.get('follow'),
118 linenumber=linenumber)
118 linenumber=linenumber)
119 pieces = []
119 pieces = []
120
120
121 for f in funcmap:
121 for f in funcmap:
122 l = [f(n) for n, dummy in lines]
122 l = [f(n) for n, dummy in lines]
123 if l:
123 if l:
124 ml = max(map(len, l))
124 ml = max(map(len, l))
125 pieces.append(["%*s" % (ml, x) for x in l])
125 pieces.append(["%*s" % (ml, x) for x in l])
126
126
127 if pieces:
127 if pieces:
128 for p, l in zip(zip(*pieces), lines):
128 for p, l in zip(zip(*pieces), lines):
129 ui.write("%s: %s" % (" ".join(p), l[1]))
129 ui.write("%s: %s" % (" ".join(p), l[1]))
130
130
131 def archive(ui, repo, dest, **opts):
131 def archive(ui, repo, dest, **opts):
132 '''create unversioned archive of a repository revision
132 '''create unversioned archive of a repository revision
133
133
134 By default, the revision used is the parent of the working
134 By default, the revision used is the parent of the working
135 directory; use "-r" to specify a different revision.
135 directory; use "-r" to specify a different revision.
136
136
137 To specify the type of archive to create, use "-t". Valid
137 To specify the type of archive to create, use "-t". Valid
138 types are:
138 types are:
139
139
140 "files" (default): a directory full of files
140 "files" (default): a directory full of files
141 "tar": tar archive, uncompressed
141 "tar": tar archive, uncompressed
142 "tbz2": tar archive, compressed using bzip2
142 "tbz2": tar archive, compressed using bzip2
143 "tgz": tar archive, compressed using gzip
143 "tgz": tar archive, compressed using gzip
144 "uzip": zip archive, uncompressed
144 "uzip": zip archive, uncompressed
145 "zip": zip archive, compressed using deflate
145 "zip": zip archive, compressed using deflate
146
146
147 The exact name of the destination archive or directory is given
147 The exact name of the destination archive or directory is given
148 using a format string; see "hg help export" for details.
148 using a format string; see "hg help export" for details.
149
149
150 Each member added to an archive file has a directory prefix
150 Each member added to an archive file has a directory prefix
151 prepended. Use "-p" to specify a format string for the prefix.
151 prepended. Use "-p" to specify a format string for the prefix.
152 The default is the basename of the archive, with suffixes removed.
152 The default is the basename of the archive, with suffixes removed.
153 '''
153 '''
154
154
155 ctx = repo[opts.get('rev')]
155 ctx = repo[opts.get('rev')]
156 if not ctx:
156 if not ctx:
157 raise util.Abort(_('no working directory: please specify a revision'))
157 raise util.Abort(_('no working directory: please specify a revision'))
158 node = ctx.node()
158 node = ctx.node()
159 dest = cmdutil.make_filename(repo, dest, node)
159 dest = cmdutil.make_filename(repo, dest, node)
160 if os.path.realpath(dest) == repo.root:
160 if os.path.realpath(dest) == repo.root:
161 raise util.Abort(_('repository root cannot be destination'))
161 raise util.Abort(_('repository root cannot be destination'))
162 matchfn = cmdutil.match(repo, [], opts)
162 matchfn = cmdutil.match(repo, [], opts)
163 kind = opts.get('type') or 'files'
163 kind = opts.get('type') or 'files'
164 prefix = opts.get('prefix')
164 prefix = opts.get('prefix')
165 if dest == '-':
165 if dest == '-':
166 if kind == 'files':
166 if kind == 'files':
167 raise util.Abort(_('cannot archive plain files to stdout'))
167 raise util.Abort(_('cannot archive plain files to stdout'))
168 dest = sys.stdout
168 dest = sys.stdout
169 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
169 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
170 prefix = cmdutil.make_filename(repo, prefix, node)
170 prefix = cmdutil.make_filename(repo, prefix, node)
171 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
171 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
172 matchfn, prefix)
172 matchfn, prefix)
173
173
174 def backout(ui, repo, node=None, rev=None, **opts):
174 def backout(ui, repo, node=None, rev=None, **opts):
175 '''reverse effect of earlier changeset
175 '''reverse effect of earlier changeset
176
176
177 Commit the backed out changes as a new changeset. The new
177 Commit the backed out changes as a new changeset. The new
178 changeset is a child of the backed out changeset.
178 changeset is a child of the backed out changeset.
179
179
180 If you back out a changeset other than the tip, a new head is
180 If you back out a changeset other than the tip, a new head is
181 created. This head will be the new tip and you should merge this
181 created. This head will be the new tip and you should merge this
182 backout changeset with another head (current one by default).
182 backout changeset with another head (current one by default).
183
183
184 The --merge option remembers the parent of the working directory
184 The --merge option remembers the parent of the working directory
185 before starting the backout, then merges the new head with that
185 before starting the backout, then merges the new head with that
186 changeset afterwards. This saves you from doing the merge by
186 changeset afterwards. This saves you from doing the merge by
187 hand. The result of this merge is not committed, as with a normal
187 hand. The result of this merge is not committed, as with a normal
188 merge.
188 merge.
189
189
190 See \'hg help dates\' for a list of formats valid for -d/--date.
190 See \'hg help dates\' for a list of formats valid for -d/--date.
191 '''
191 '''
192 if rev and node:
192 if rev and node:
193 raise util.Abort(_("please specify just one revision"))
193 raise util.Abort(_("please specify just one revision"))
194
194
195 if not rev:
195 if not rev:
196 rev = node
196 rev = node
197
197
198 if not rev:
198 if not rev:
199 raise util.Abort(_("please specify a revision to backout"))
199 raise util.Abort(_("please specify a revision to backout"))
200
200
201 date = opts.get('date')
201 date = opts.get('date')
202 if date:
202 if date:
203 opts['date'] = util.parsedate(date)
203 opts['date'] = util.parsedate(date)
204
204
205 cmdutil.bail_if_changed(repo)
205 cmdutil.bail_if_changed(repo)
206 node = repo.lookup(rev)
206 node = repo.lookup(rev)
207
207
208 op1, op2 = repo.dirstate.parents()
208 op1, op2 = repo.dirstate.parents()
209 a = repo.changelog.ancestor(op1, node)
209 a = repo.changelog.ancestor(op1, node)
210 if a != node:
210 if a != node:
211 raise util.Abort(_('cannot back out change on a different branch'))
211 raise util.Abort(_('cannot back out change on a different branch'))
212
212
213 p1, p2 = repo.changelog.parents(node)
213 p1, p2 = repo.changelog.parents(node)
214 if p1 == nullid:
214 if p1 == nullid:
215 raise util.Abort(_('cannot back out a change with no parents'))
215 raise util.Abort(_('cannot back out a change with no parents'))
216 if p2 != nullid:
216 if p2 != nullid:
217 if not opts.get('parent'):
217 if not opts.get('parent'):
218 raise util.Abort(_('cannot back out a merge changeset without '
218 raise util.Abort(_('cannot back out a merge changeset without '
219 '--parent'))
219 '--parent'))
220 p = repo.lookup(opts['parent'])
220 p = repo.lookup(opts['parent'])
221 if p not in (p1, p2):
221 if p not in (p1, p2):
222 raise util.Abort(_('%s is not a parent of %s') %
222 raise util.Abort(_('%s is not a parent of %s') %
223 (short(p), short(node)))
223 (short(p), short(node)))
224 parent = p
224 parent = p
225 else:
225 else:
226 if opts.get('parent'):
226 if opts.get('parent'):
227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
228 parent = p1
228 parent = p1
229
229
230 # the backout should appear on the same branch
230 # the backout should appear on the same branch
231 branch = repo.dirstate.branch()
231 branch = repo.dirstate.branch()
232 hg.clean(repo, node, show_stats=False)
232 hg.clean(repo, node, show_stats=False)
233 repo.dirstate.setbranch(branch)
233 repo.dirstate.setbranch(branch)
234 revert_opts = opts.copy()
234 revert_opts = opts.copy()
235 revert_opts['date'] = None
235 revert_opts['date'] = None
236 revert_opts['all'] = True
236 revert_opts['all'] = True
237 revert_opts['rev'] = hex(parent)
237 revert_opts['rev'] = hex(parent)
238 revert_opts['no_backup'] = None
238 revert_opts['no_backup'] = None
239 revert(ui, repo, **revert_opts)
239 revert(ui, repo, **revert_opts)
240 commit_opts = opts.copy()
240 commit_opts = opts.copy()
241 commit_opts['addremove'] = False
241 commit_opts['addremove'] = False
242 if not commit_opts['message'] and not commit_opts['logfile']:
242 if not commit_opts['message'] and not commit_opts['logfile']:
243 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
243 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
244 commit_opts['force_editor'] = True
244 commit_opts['force_editor'] = True
245 commit(ui, repo, **commit_opts)
245 commit(ui, repo, **commit_opts)
246 def nice(node):
246 def nice(node):
247 return '%d:%s' % (repo.changelog.rev(node), short(node))
247 return '%d:%s' % (repo.changelog.rev(node), short(node))
248 ui.status(_('changeset %s backs out changeset %s\n') %
248 ui.status(_('changeset %s backs out changeset %s\n') %
249 (nice(repo.changelog.tip()), nice(node)))
249 (nice(repo.changelog.tip()), nice(node)))
250 if op1 != node:
250 if op1 != node:
251 hg.clean(repo, op1, show_stats=False)
251 hg.clean(repo, op1, show_stats=False)
252 if opts.get('merge'):
252 if opts.get('merge'):
253 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
253 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
254 hg.merge(repo, hex(repo.changelog.tip()))
254 hg.merge(repo, hex(repo.changelog.tip()))
255 else:
255 else:
256 ui.status(_('the backout changeset is a new head - '
256 ui.status(_('the backout changeset is a new head - '
257 'do not forget to merge\n'))
257 'do not forget to merge\n'))
258 ui.status(_('(use "backout --merge" '
258 ui.status(_('(use "backout --merge" '
259 'if you want to auto-merge)\n'))
259 'if you want to auto-merge)\n'))
260
260
261 def bisect(ui, repo, rev=None, extra=None, command=None,
261 def bisect(ui, repo, rev=None, extra=None, command=None,
262 reset=None, good=None, bad=None, skip=None, noupdate=None):
262 reset=None, good=None, bad=None, skip=None, noupdate=None):
263 """subdivision search of changesets
263 """subdivision search of changesets
264
264
265 This command helps to find changesets which introduce problems.
265 This command helps to find changesets which introduce problems.
266 To use, mark the earliest changeset you know exhibits the problem
266 To use, mark the earliest changeset you know exhibits the problem
267 as bad, then mark the latest changeset which is free from the
267 as bad, then mark the latest changeset which is free from the
268 problem as good. Bisect will update your working directory to a
268 problem as good. Bisect will update your working directory to a
269 revision for testing (unless the --noupdate option is specified).
269 revision for testing (unless the --noupdate option is specified).
270 Once you have performed tests, mark the working directory as bad
270 Once you have performed tests, mark the working directory as bad
271 or good and bisect will either update to another candidate changeset
271 or good and bisect will either update to another candidate changeset
272 or announce that it has found the bad revision.
272 or announce that it has found the bad revision.
273
273
274 As a shortcut, you can also use the revision argument to mark a
274 As a shortcut, you can also use the revision argument to mark a
275 revision as good or bad without checking it out first.
275 revision as good or bad without checking it out first.
276
276
277 If you supply a command it will be used for automatic bisection. Its exit
277 If you supply a command it will be used for automatic bisection. Its exit
278 status will be used as flag to mark revision as bad or good. In case exit
278 status will be used as flag to mark revision as bad or good. In case exit
279 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
279 status is 0 the revision is marked as good, 125 - skipped, 127 (command not
280 found) - bisection will be aborted; any other status bigger than 0 will
280 found) - bisection will be aborted; any other status bigger than 0 will
281 mark revision as bad.
281 mark revision as bad.
282 """
282 """
283 def print_result(nodes, good):
283 def print_result(nodes, good):
284 displayer = cmdutil.show_changeset(ui, repo, {})
284 displayer = cmdutil.show_changeset(ui, repo, {})
285 transition = (good and "good" or "bad")
285 transition = (good and "good" or "bad")
286 if len(nodes) == 1:
286 if len(nodes) == 1:
287 # narrowed it down to a single revision
287 # narrowed it down to a single revision
288 ui.write(_("The first %s revision is:\n") % transition)
288 ui.write(_("The first %s revision is:\n") % transition)
289 displayer.show(repo[nodes[0]])
289 displayer.show(repo[nodes[0]])
290 else:
290 else:
291 # multiple possible revisions
291 # multiple possible revisions
292 ui.write(_("Due to skipped revisions, the first "
292 ui.write(_("Due to skipped revisions, the first "
293 "%s revision could be any of:\n") % transition)
293 "%s revision could be any of:\n") % transition)
294 for n in nodes:
294 for n in nodes:
295 displayer.show(repo[n])
295 displayer.show(repo[n])
296
296
297 def check_state(state, interactive=True):
297 def check_state(state, interactive=True):
298 if not state['good'] or not state['bad']:
298 if not state['good'] or not state['bad']:
299 if (good or bad or skip or reset) and interactive:
299 if (good or bad or skip or reset) and interactive:
300 return
300 return
301 if not state['good']:
301 if not state['good']:
302 raise util.Abort(_('cannot bisect (no known good revisions)'))
302 raise util.Abort(_('cannot bisect (no known good revisions)'))
303 else:
303 else:
304 raise util.Abort(_('cannot bisect (no known bad revisions)'))
304 raise util.Abort(_('cannot bisect (no known bad revisions)'))
305 return True
305 return True
306
306
307 # backward compatibility
307 # backward compatibility
308 if rev in "good bad reset init".split():
308 if rev in "good bad reset init".split():
309 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
309 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
310 cmd, rev, extra = rev, extra, None
310 cmd, rev, extra = rev, extra, None
311 if cmd == "good":
311 if cmd == "good":
312 good = True
312 good = True
313 elif cmd == "bad":
313 elif cmd == "bad":
314 bad = True
314 bad = True
315 else:
315 else:
316 reset = True
316 reset = True
317 elif extra or good + bad + skip + reset + bool(command) > 1:
317 elif extra or good + bad + skip + reset + bool(command) > 1:
318 raise util.Abort(_('incompatible arguments'))
318 raise util.Abort(_('incompatible arguments'))
319
319
320 if reset:
320 if reset:
321 p = repo.join("bisect.state")
321 p = repo.join("bisect.state")
322 if os.path.exists(p):
322 if os.path.exists(p):
323 os.unlink(p)
323 os.unlink(p)
324 return
324 return
325
325
326 state = hbisect.load_state(repo)
326 state = hbisect.load_state(repo)
327
327
328 if command:
328 if command:
329 commandpath = util.find_exe(command)
329 commandpath = util.find_exe(command)
330 changesets = 1
330 changesets = 1
331 try:
331 try:
332 while changesets:
332 while changesets:
333 # update state
333 # update state
334 status = os.spawnl(os.P_WAIT, commandpath)
334 status = os.spawnl(os.P_WAIT, commandpath)
335 if status == 125:
335 if status == 125:
336 transition = "skip"
336 transition = "skip"
337 elif status == 0:
337 elif status == 0:
338 transition = "good"
338 transition = "good"
339 # status < 0 means process was killed
339 # status < 0 means process was killed
340 elif status == 127:
340 elif status == 127:
341 raise util.Abort(_("failed to execute %s") % command)
341 raise util.Abort(_("failed to execute %s") % command)
342 elif status < 0:
342 elif status < 0:
343 raise util.Abort(_("%s killed") % command)
343 raise util.Abort(_("%s killed") % command)
344 else:
344 else:
345 transition = "bad"
345 transition = "bad"
346 node = repo.lookup(rev or '.')
346 node = repo.lookup(rev or '.')
347 state[transition].append(node)
347 state[transition].append(node)
348 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
348 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
349 check_state(state, interactive=False)
349 check_state(state, interactive=False)
350 # bisect
350 # bisect
351 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
351 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
352 # update to next check
352 # update to next check
353 cmdutil.bail_if_changed(repo)
353 cmdutil.bail_if_changed(repo)
354 hg.clean(repo, nodes[0], show_stats=False)
354 hg.clean(repo, nodes[0], show_stats=False)
355 finally:
355 finally:
356 hbisect.save_state(repo, state)
356 hbisect.save_state(repo, state)
357 return print_result(nodes, not status)
357 return print_result(nodes, not status)
358
358
359 # update state
359 # update state
360 node = repo.lookup(rev or '.')
360 node = repo.lookup(rev or '.')
361 if good:
361 if good:
362 state['good'].append(node)
362 state['good'].append(node)
363 elif bad:
363 elif bad:
364 state['bad'].append(node)
364 state['bad'].append(node)
365 elif skip:
365 elif skip:
366 state['skip'].append(node)
366 state['skip'].append(node)
367
367
368 hbisect.save_state(repo, state)
368 hbisect.save_state(repo, state)
369
369
370 if not check_state(state):
370 if not check_state(state):
371 return
371 return
372
372
373 # actually bisect
373 # actually bisect
374 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
374 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
375 if changesets == 0:
375 if changesets == 0:
376 print_result(nodes, good)
376 print_result(nodes, good)
377 else:
377 else:
378 assert len(nodes) == 1 # only a single node can be tested next
378 assert len(nodes) == 1 # only a single node can be tested next
379 node = nodes[0]
379 node = nodes[0]
380 # compute the approximate number of remaining tests
380 # compute the approximate number of remaining tests
381 tests, size = 0, 2
381 tests, size = 0, 2
382 while size <= changesets:
382 while size <= changesets:
383 tests, size = tests + 1, size * 2
383 tests, size = tests + 1, size * 2
384 rev = repo.changelog.rev(node)
384 rev = repo.changelog.rev(node)
385 ui.write(_("Testing changeset %s:%s "
385 ui.write(_("Testing changeset %s:%s "
386 "(%s changesets remaining, ~%s tests)\n")
386 "(%s changesets remaining, ~%s tests)\n")
387 % (rev, short(node), changesets, tests))
387 % (rev, short(node), changesets, tests))
388 if not noupdate:
388 if not noupdate:
389 cmdutil.bail_if_changed(repo)
389 cmdutil.bail_if_changed(repo)
390 return hg.clean(repo, node)
390 return hg.clean(repo, node)
391
391
392 def branch(ui, repo, label=None, **opts):
392 def branch(ui, repo, label=None, **opts):
393 """set or show the current branch name
393 """set or show the current branch name
394
394
395 With no argument, show the current branch name. With one argument,
395 With no argument, show the current branch name. With one argument,
396 set the working directory branch name (the branch does not exist in
396 set the working directory branch name (the branch does not exist in
397 the repository until the next commit).
397 the repository until the next commit).
398
398
399 Unless --force is specified, branch will not let you set a
399 Unless --force is specified, branch will not let you set a
400 branch name that shadows an existing branch.
400 branch name that shadows an existing branch.
401
401
402 Use --clean to reset the working directory branch to that of the
402 Use --clean to reset the working directory branch to that of the
403 parent of the working directory, negating a previous branch change.
403 parent of the working directory, negating a previous branch change.
404
404
405 Use the command 'hg update' to switch to an existing branch.
405 Use the command 'hg update' to switch to an existing branch.
406 """
406 """
407
407
408 if opts.get('clean'):
408 if opts.get('clean'):
409 label = repo[None].parents()[0].branch()
409 label = repo[None].parents()[0].branch()
410 repo.dirstate.setbranch(label)
410 repo.dirstate.setbranch(label)
411 ui.status(_('reset working directory to branch %s\n') % label)
411 ui.status(_('reset working directory to branch %s\n') % label)
412 elif label:
412 elif label:
413 if not opts.get('force') and label in repo.branchtags():
413 if not opts.get('force') and label in repo.branchtags():
414 if label not in [p.branch() for p in repo.parents()]:
414 if label not in [p.branch() for p in repo.parents()]:
415 raise util.Abort(_('a branch of the same name already exists'
415 raise util.Abort(_('a branch of the same name already exists'
416 ' (use --force to override)'))
416 ' (use --force to override)'))
417 repo.dirstate.setbranch(util.fromlocal(label))
417 repo.dirstate.setbranch(util.fromlocal(label))
418 ui.status(_('marked working directory as branch %s\n') % label)
418 ui.status(_('marked working directory as branch %s\n') % label)
419 else:
419 else:
420 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
420 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
421
421
422 def branches(ui, repo, active=False):
422 def branches(ui, repo, active=False):
423 """list repository named branches
423 """list repository named branches
424
424
425 List the repository's named branches, indicating which ones are
425 List the repository's named branches, indicating which ones are
426 inactive. If active is specified, only show active branches.
426 inactive. If active is specified, only show active branches.
427
427
428 A branch is considered active if it contains repository heads.
428 A branch is considered active if it contains repository heads.
429
429
430 Use the command 'hg update' to switch to an existing branch.
430 Use the command 'hg update' to switch to an existing branch.
431 """
431 """
432 hexfunc = ui.debugflag and hex or short
432 hexfunc = ui.debugflag and hex or short
433 activebranches = [util.tolocal(repo[n].branch())
433 activebranches = [util.tolocal(repo[n].branch())
434 for n in repo.heads(closed=False)]
434 for n in repo.heads(closed=False)]
435 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
435 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
436 for tag, node in repo.branchtags().items()])
436 for tag, node in repo.branchtags().items()])
437 branches.reverse()
437 branches.reverse()
438
438
439 for isactive, node, tag in branches:
439 for isactive, node, tag in branches:
440 if (not active) or isactive:
440 if (not active) or isactive:
441 if ui.quiet:
441 if ui.quiet:
442 ui.write("%s\n" % tag)
442 ui.write("%s\n" % tag)
443 else:
443 else:
444 hn = repo.lookup(node)
444 hn = repo.lookup(node)
445 if isactive:
445 if isactive:
446 notice = ''
446 notice = ''
447 elif hn not in repo.branchheads(tag, closed=False):
447 elif hn not in repo.branchheads(tag, closed=False):
448 notice = ' (closed)'
448 notice = ' (closed)'
449 else:
449 else:
450 notice = ' (inactive)'
450 notice = ' (inactive)'
451 rev = str(node).rjust(31 - util.colwidth(tag))
451 rev = str(node).rjust(31 - util.colwidth(tag))
452 data = tag, rev, hexfunc(hn), notice
452 data = tag, rev, hexfunc(hn), notice
453 ui.write("%s %s:%s%s\n" % data)
453 ui.write("%s %s:%s%s\n" % data)
454
454
455 def bundle(ui, repo, fname, dest=None, **opts):
455 def bundle(ui, repo, fname, dest=None, **opts):
456 """create a changegroup file
456 """create a changegroup file
457
457
458 Generate a compressed changegroup file collecting changesets not
458 Generate a compressed changegroup file collecting changesets not
459 known to be in another repository.
459 known to be in another repository.
460
460
461 If no destination repository is specified the destination is
461 If no destination repository is specified the destination is
462 assumed to have all the nodes specified by one or more --base
462 assumed to have all the nodes specified by one or more --base
463 parameters. To create a bundle containing all changesets, use
463 parameters. To create a bundle containing all changesets, use
464 --all (or --base null). To change the compression method applied,
464 --all (or --base null). To change the compression method applied,
465 use the -t option (by default, bundles are compressed using bz2).
465 use the -t option (by default, bundles are compressed using bz2).
466
466
467 The bundle file can then be transferred using conventional means and
467 The bundle file can then be transferred using conventional means and
468 applied to another repository with the unbundle or pull command.
468 applied to another repository with the unbundle or pull command.
469 This is useful when direct push and pull are not available or when
469 This is useful when direct push and pull are not available or when
470 exporting an entire repository is undesirable.
470 exporting an entire repository is undesirable.
471
471
472 Applying bundles preserves all changeset contents including
472 Applying bundles preserves all changeset contents including
473 permissions, copy/rename information, and revision history.
473 permissions, copy/rename information, and revision history.
474 """
474 """
475 revs = opts.get('rev') or None
475 revs = opts.get('rev') or None
476 if revs:
476 if revs:
477 revs = [repo.lookup(rev) for rev in revs]
477 revs = [repo.lookup(rev) for rev in revs]
478 if opts.get('all'):
478 if opts.get('all'):
479 base = ['null']
479 base = ['null']
480 else:
480 else:
481 base = opts.get('base')
481 base = opts.get('base')
482 if base:
482 if base:
483 if dest:
483 if dest:
484 raise util.Abort(_("--base is incompatible with specifiying "
484 raise util.Abort(_("--base is incompatible with specifiying "
485 "a destination"))
485 "a destination"))
486 base = [repo.lookup(rev) for rev in base]
486 base = [repo.lookup(rev) for rev in base]
487 # create the right base
487 # create the right base
488 # XXX: nodesbetween / changegroup* should be "fixed" instead
488 # XXX: nodesbetween / changegroup* should be "fixed" instead
489 o = []
489 o = []
490 has = {nullid: None}
490 has = {nullid: None}
491 for n in base:
491 for n in base:
492 has.update(repo.changelog.reachable(n))
492 has.update(repo.changelog.reachable(n))
493 if revs:
493 if revs:
494 visit = list(revs)
494 visit = list(revs)
495 else:
495 else:
496 visit = repo.changelog.heads()
496 visit = repo.changelog.heads()
497 seen = {}
497 seen = {}
498 while visit:
498 while visit:
499 n = visit.pop(0)
499 n = visit.pop(0)
500 parents = [p for p in repo.changelog.parents(n) if p not in has]
500 parents = [p for p in repo.changelog.parents(n) if p not in has]
501 if len(parents) == 0:
501 if len(parents) == 0:
502 o.insert(0, n)
502 o.insert(0, n)
503 else:
503 else:
504 for p in parents:
504 for p in parents:
505 if p not in seen:
505 if p not in seen:
506 seen[p] = 1
506 seen[p] = 1
507 visit.append(p)
507 visit.append(p)
508 else:
508 else:
509 cmdutil.setremoteconfig(ui, opts)
509 cmdutil.setremoteconfig(ui, opts)
510 dest, revs, checkout = hg.parseurl(
510 dest, revs, checkout = hg.parseurl(
511 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
511 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
512 other = hg.repository(ui, dest)
512 other = hg.repository(ui, dest)
513 o = repo.findoutgoing(other, force=opts.get('force'))
513 o = repo.findoutgoing(other, force=opts.get('force'))
514
514
515 if revs:
515 if revs:
516 cg = repo.changegroupsubset(o, revs, 'bundle')
516 cg = repo.changegroupsubset(o, revs, 'bundle')
517 else:
517 else:
518 cg = repo.changegroup(o, 'bundle')
518 cg = repo.changegroup(o, 'bundle')
519
519
520 bundletype = opts.get('type', 'bzip2').lower()
520 bundletype = opts.get('type', 'bzip2').lower()
521 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
521 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
522 bundletype = btypes.get(bundletype)
522 bundletype = btypes.get(bundletype)
523 if bundletype not in changegroup.bundletypes:
523 if bundletype not in changegroup.bundletypes:
524 raise util.Abort(_('unknown bundle type specified with --type'))
524 raise util.Abort(_('unknown bundle type specified with --type'))
525
525
526 changegroup.writebundle(cg, fname, bundletype)
526 changegroup.writebundle(cg, fname, bundletype)
527
527
528 def cat(ui, repo, file1, *pats, **opts):
528 def cat(ui, repo, file1, *pats, **opts):
529 """output the current or given revision of files
529 """output the current or given revision of files
530
530
531 Print the specified files as they were at the given revision.
531 Print the specified files as they were at the given revision.
532 If no revision is given, the parent of the working directory is used,
532 If no revision is given, the parent of the working directory is used,
533 or tip if no revision is checked out.
533 or tip if no revision is checked out.
534
534
535 Output may be to a file, in which case the name of the file is
535 Output may be to a file, in which case the name of the file is
536 given using a format string. The formatting rules are the same as
536 given using a format string. The formatting rules are the same as
537 for the export command, with the following additions:
537 for the export command, with the following additions:
538
538
539 %s basename of file being printed
539 %s basename of file being printed
540 %d dirname of file being printed, or '.' if in repo root
540 %d dirname of file being printed, or '.' if in repo root
541 %p root-relative path name of file being printed
541 %p root-relative path name of file being printed
542 """
542 """
543 ctx = repo[opts.get('rev')]
543 ctx = repo[opts.get('rev')]
544 err = 1
544 err = 1
545 m = cmdutil.match(repo, (file1,) + pats, opts)
545 m = cmdutil.match(repo, (file1,) + pats, opts)
546 for abs in ctx.walk(m):
546 for abs in ctx.walk(m):
547 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
547 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
548 data = ctx[abs].data()
548 data = ctx[abs].data()
549 if opts.get('decode'):
549 if opts.get('decode'):
550 data = repo.wwritedata(abs, data)
550 data = repo.wwritedata(abs, data)
551 fp.write(data)
551 fp.write(data)
552 err = 0
552 err = 0
553 return err
553 return err
554
554
555 def clone(ui, source, dest=None, **opts):
555 def clone(ui, source, dest=None, **opts):
556 """make a copy of an existing repository
556 """make a copy of an existing repository
557
557
558 Create a copy of an existing repository in a new directory.
558 Create a copy of an existing repository in a new directory.
559
559
560 If no destination directory name is specified, it defaults to the
560 If no destination directory name is specified, it defaults to the
561 basename of the source.
561 basename of the source.
562
562
563 The location of the source is added to the new repository's
563 The location of the source is added to the new repository's
564 .hg/hgrc file, as the default to be used for future pulls.
564 .hg/hgrc file, as the default to be used for future pulls.
565
565
566 For efficiency, hardlinks are used for cloning whenever the source
566 For efficiency, hardlinks are used for cloning whenever the source
567 and destination are on the same filesystem (note this applies only
567 and destination are on the same filesystem (note this applies only
568 to the repository data, not to the checked out files). Some
568 to the repository data, not to the checked out files). Some
569 filesystems, such as AFS, implement hardlinking incorrectly, but
569 filesystems, such as AFS, implement hardlinking incorrectly, but
570 do not report errors. In these cases, use the --pull option to
570 do not report errors. In these cases, use the --pull option to
571 avoid hardlinking.
571 avoid hardlinking.
572
572
573 In some cases, you can clone repositories and checked out files
573 In some cases, you can clone repositories and checked out files
574 using full hardlinks with
574 using full hardlinks with
575
575
576 $ cp -al REPO REPOCLONE
576 $ cp -al REPO REPOCLONE
577
577
578 This is the fastest way to clone, but it is not always safe. The
578 This is the fastest way to clone, but it is not always safe. The
579 operation is not atomic (making sure REPO is not modified during
579 operation is not atomic (making sure REPO is not modified during
580 the operation is up to you) and you have to make sure your editor
580 the operation is up to you) and you have to make sure your editor
581 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
581 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
582 this is not compatible with certain extensions that place their
582 this is not compatible with certain extensions that place their
583 metadata under the .hg directory, such as mq.
583 metadata under the .hg directory, such as mq.
584
584
585 If you use the -r option to clone up to a specific revision, no
585 If you use the -r option to clone up to a specific revision, no
586 subsequent revisions will be present in the cloned repository.
586 subsequent revisions will be present in the cloned repository.
587 This option implies --pull, even on local repositories.
587 This option implies --pull, even on local repositories.
588
588
589 If the -U option is used, the new clone will contain only a repository
589 If the -U option is used, the new clone will contain only a repository
590 (.hg) and no working copy (the working copy parent is the null revision).
590 (.hg) and no working copy (the working copy parent is the null revision).
591
591
592 See 'hg help urls' for valid source format details.
592 See 'hg help urls' for valid source format details.
593
593
594 It is possible to specify an ssh:// URL as the destination, but no
594 It is possible to specify an ssh:// URL as the destination, but no
595 .hg/hgrc and working directory will be created on the remote side.
595 .hg/hgrc and working directory will be created on the remote side.
596 Look at the help text for urls for important details about ssh:// URLs.
596 Look at the help text for urls for important details about ssh:// URLs.
597 """
597 """
598 cmdutil.setremoteconfig(ui, opts)
598 cmdutil.setremoteconfig(ui, opts)
599 hg.clone(ui, source, dest,
599 hg.clone(ui, source, dest,
600 pull=opts.get('pull'),
600 pull=opts.get('pull'),
601 stream=opts.get('uncompressed'),
601 stream=opts.get('uncompressed'),
602 rev=opts.get('rev'),
602 rev=opts.get('rev'),
603 update=not opts.get('noupdate'))
603 update=not opts.get('noupdate'))
604
604
605 def commit(ui, repo, *pats, **opts):
605 def commit(ui, repo, *pats, **opts):
606 """commit the specified files or all outstanding changes
606 """commit the specified files or all outstanding changes
607
607
608 Commit changes to the given files into the repository.
608 Commit changes to the given files into the repository.
609
609
610 If a list of files is omitted, all changes reported by "hg status"
610 If a list of files is omitted, all changes reported by "hg status"
611 will be committed.
611 will be committed.
612
612
613 If you are committing the result of a merge, do not provide any
613 If you are committing the result of a merge, do not provide any
614 file names or -I/-X filters.
614 file names or -I/-X filters.
615
615
616 If no commit message is specified, the configured editor is started to
616 If no commit message is specified, the configured editor is started to
617 prompt you for a message.
617 prompt you for a message.
618
618
619 See 'hg help dates' for a list of formats valid for -d/--date.
619 See 'hg help dates' for a list of formats valid for -d/--date.
620 """
620 """
621 extra = {}
621 extra = {}
622 if opts.get('close_branch'):
622 if opts.get('close_branch'):
623 extra['close'] = 1
623 extra['close'] = 1
624 def commitfunc(ui, repo, message, match, opts):
624 def commitfunc(ui, repo, message, match, opts):
625 return repo.commit(match.files(), message, opts.get('user'),
625 return repo.commit(match.files(), message, opts.get('user'),
626 opts.get('date'), match, force_editor=opts.get('force_editor'),
626 opts.get('date'), match, force_editor=opts.get('force_editor'),
627 extra=extra)
627 extra=extra)
628
628
629 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
629 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
630 if not node:
630 if not node:
631 return
631 return
632 cl = repo.changelog
632 cl = repo.changelog
633 rev = cl.rev(node)
633 rev = cl.rev(node)
634 parents = cl.parentrevs(rev)
634 parents = cl.parentrevs(rev)
635 if rev - 1 in parents:
635 if rev - 1 in parents:
636 # one of the parents was the old tip
636 # one of the parents was the old tip
637 pass
637 pass
638 elif (parents == (nullrev, nullrev) or
638 elif (parents == (nullrev, nullrev) or
639 len(cl.heads(cl.node(parents[0]))) > 1 and
639 len(cl.heads(cl.node(parents[0]))) > 1 and
640 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
640 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
641 ui.status(_('created new head\n'))
641 ui.status(_('created new head\n'))
642
642
643 if ui.debugflag:
643 if ui.debugflag:
644 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
644 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
645 elif ui.verbose:
645 elif ui.verbose:
646 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
646 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
647
647
648 ms = merge_.mergestate(repo)
649 ms.reset(node)
650
651 def copy(ui, repo, *pats, **opts):
648 def copy(ui, repo, *pats, **opts):
652 """mark files as copied for the next commit
649 """mark files as copied for the next commit
653
650
654 Mark dest as having copies of source files. If dest is a
651 Mark dest as having copies of source files. If dest is a
655 directory, copies are put in that directory. If dest is a file,
652 directory, copies are put in that directory. If dest is a file,
656 the source must be a single file.
653 the source must be a single file.
657
654
658 By default, this command copies the contents of files as they
655 By default, this command copies the contents of files as they
659 stand in the working directory. If invoked with --after, the
656 stand in the working directory. If invoked with --after, the
660 operation is recorded, but no copying is performed.
657 operation is recorded, but no copying is performed.
661
658
662 This command takes effect with the next commit. To undo a copy
659 This command takes effect with the next commit. To undo a copy
663 before that, see hg revert.
660 before that, see hg revert.
664 """
661 """
665 wlock = repo.wlock(False)
662 wlock = repo.wlock(False)
666 try:
663 try:
667 return cmdutil.copy(ui, repo, pats, opts)
664 return cmdutil.copy(ui, repo, pats, opts)
668 finally:
665 finally:
669 del wlock
666 del wlock
670
667
671 def debugancestor(ui, repo, *args):
668 def debugancestor(ui, repo, *args):
672 """find the ancestor revision of two revisions in a given index"""
669 """find the ancestor revision of two revisions in a given index"""
673 if len(args) == 3:
670 if len(args) == 3:
674 index, rev1, rev2 = args
671 index, rev1, rev2 = args
675 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
672 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
676 lookup = r.lookup
673 lookup = r.lookup
677 elif len(args) == 2:
674 elif len(args) == 2:
678 if not repo:
675 if not repo:
679 raise util.Abort(_("There is no Mercurial repository here "
676 raise util.Abort(_("There is no Mercurial repository here "
680 "(.hg not found)"))
677 "(.hg not found)"))
681 rev1, rev2 = args
678 rev1, rev2 = args
682 r = repo.changelog
679 r = repo.changelog
683 lookup = repo.lookup
680 lookup = repo.lookup
684 else:
681 else:
685 raise util.Abort(_('either two or three arguments required'))
682 raise util.Abort(_('either two or three arguments required'))
686 a = r.ancestor(lookup(rev1), lookup(rev2))
683 a = r.ancestor(lookup(rev1), lookup(rev2))
687 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
684 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
688
685
689 def debugcomplete(ui, cmd='', **opts):
686 def debugcomplete(ui, cmd='', **opts):
690 """returns the completion list associated with the given command"""
687 """returns the completion list associated with the given command"""
691
688
692 if opts.get('options'):
689 if opts.get('options'):
693 options = []
690 options = []
694 otables = [globalopts]
691 otables = [globalopts]
695 if cmd:
692 if cmd:
696 aliases, entry = cmdutil.findcmd(cmd, table, False)
693 aliases, entry = cmdutil.findcmd(cmd, table, False)
697 otables.append(entry[1])
694 otables.append(entry[1])
698 for t in otables:
695 for t in otables:
699 for o in t:
696 for o in t:
700 if o[0]:
697 if o[0]:
701 options.append('-%s' % o[0])
698 options.append('-%s' % o[0])
702 options.append('--%s' % o[1])
699 options.append('--%s' % o[1])
703 ui.write("%s\n" % "\n".join(options))
700 ui.write("%s\n" % "\n".join(options))
704 return
701 return
705
702
706 cmdlist = cmdutil.findpossible(cmd, table)
703 cmdlist = cmdutil.findpossible(cmd, table)
707 if ui.verbose:
704 if ui.verbose:
708 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
705 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
709 ui.write("%s\n" % "\n".join(util.sort(cmdlist)))
706 ui.write("%s\n" % "\n".join(util.sort(cmdlist)))
710
707
711 def debugfsinfo(ui, path = "."):
708 def debugfsinfo(ui, path = "."):
712 file('.debugfsinfo', 'w').write('')
709 file('.debugfsinfo', 'w').write('')
713 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
710 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
714 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
711 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
715 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
712 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
716 and 'yes' or 'no'))
713 and 'yes' or 'no'))
717 os.unlink('.debugfsinfo')
714 os.unlink('.debugfsinfo')
718
715
719 def debugrebuildstate(ui, repo, rev="tip"):
716 def debugrebuildstate(ui, repo, rev="tip"):
720 """rebuild the dirstate as it would look like for the given revision"""
717 """rebuild the dirstate as it would look like for the given revision"""
721 ctx = repo[rev]
718 ctx = repo[rev]
722 wlock = repo.wlock()
719 wlock = repo.wlock()
723 try:
720 try:
724 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
721 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
725 finally:
722 finally:
726 del wlock
723 del wlock
727
724
728 def debugcheckstate(ui, repo):
725 def debugcheckstate(ui, repo):
729 """validate the correctness of the current dirstate"""
726 """validate the correctness of the current dirstate"""
730 parent1, parent2 = repo.dirstate.parents()
727 parent1, parent2 = repo.dirstate.parents()
731 m1 = repo[parent1].manifest()
728 m1 = repo[parent1].manifest()
732 m2 = repo[parent2].manifest()
729 m2 = repo[parent2].manifest()
733 errors = 0
730 errors = 0
734 for f in repo.dirstate:
731 for f in repo.dirstate:
735 state = repo.dirstate[f]
732 state = repo.dirstate[f]
736 if state in "nr" and f not in m1:
733 if state in "nr" and f not in m1:
737 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
734 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
738 errors += 1
735 errors += 1
739 if state in "a" and f in m1:
736 if state in "a" and f in m1:
740 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
737 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
741 errors += 1
738 errors += 1
742 if state in "m" and f not in m1 and f not in m2:
739 if state in "m" and f not in m1 and f not in m2:
743 ui.warn(_("%s in state %s, but not in either manifest\n") %
740 ui.warn(_("%s in state %s, but not in either manifest\n") %
744 (f, state))
741 (f, state))
745 errors += 1
742 errors += 1
746 for f in m1:
743 for f in m1:
747 state = repo.dirstate[f]
744 state = repo.dirstate[f]
748 if state not in "nrm":
745 if state not in "nrm":
749 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
746 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
750 errors += 1
747 errors += 1
751 if errors:
748 if errors:
752 error = _(".hg/dirstate inconsistent with current parent's manifest")
749 error = _(".hg/dirstate inconsistent with current parent's manifest")
753 raise util.Abort(error)
750 raise util.Abort(error)
754
751
755 def showconfig(ui, repo, *values, **opts):
752 def showconfig(ui, repo, *values, **opts):
756 """show combined config settings from all hgrc files
753 """show combined config settings from all hgrc files
757
754
758 With no args, print names and values of all config items.
755 With no args, print names and values of all config items.
759
756
760 With one arg of the form section.name, print just the value of
757 With one arg of the form section.name, print just the value of
761 that config item.
758 that config item.
762
759
763 With multiple args, print names and values of all config items
760 With multiple args, print names and values of all config items
764 with matching section names."""
761 with matching section names."""
765
762
766 untrusted = bool(opts.get('untrusted'))
763 untrusted = bool(opts.get('untrusted'))
767 if values:
764 if values:
768 if len([v for v in values if '.' in v]) > 1:
765 if len([v for v in values if '.' in v]) > 1:
769 raise util.Abort(_('only one config item permitted'))
766 raise util.Abort(_('only one config item permitted'))
770 for section, name, value in ui.walkconfig(untrusted=untrusted):
767 for section, name, value in ui.walkconfig(untrusted=untrusted):
771 sectname = section + '.' + name
768 sectname = section + '.' + name
772 if values:
769 if values:
773 for v in values:
770 for v in values:
774 if v == section:
771 if v == section:
775 ui.write('%s=%s\n' % (sectname, value))
772 ui.write('%s=%s\n' % (sectname, value))
776 elif v == sectname:
773 elif v == sectname:
777 ui.write(value, '\n')
774 ui.write(value, '\n')
778 else:
775 else:
779 ui.write('%s=%s\n' % (sectname, value))
776 ui.write('%s=%s\n' % (sectname, value))
780
777
781 def debugsetparents(ui, repo, rev1, rev2=None):
778 def debugsetparents(ui, repo, rev1, rev2=None):
782 """manually set the parents of the current working directory
779 """manually set the parents of the current working directory
783
780
784 This is useful for writing repository conversion tools, but should
781 This is useful for writing repository conversion tools, but should
785 be used with care.
782 be used with care.
786 """
783 """
787
784
788 if not rev2:
785 if not rev2:
789 rev2 = hex(nullid)
786 rev2 = hex(nullid)
790
787
791 wlock = repo.wlock()
788 wlock = repo.wlock()
792 try:
789 try:
793 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
790 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
794 finally:
791 finally:
795 del wlock
792 del wlock
796
793
797 def debugstate(ui, repo, nodates=None):
794 def debugstate(ui, repo, nodates=None):
798 """show the contents of the current dirstate"""
795 """show the contents of the current dirstate"""
799 timestr = ""
796 timestr = ""
800 showdate = not nodates
797 showdate = not nodates
801 for file_, ent in util.sort(repo.dirstate._map.iteritems()):
798 for file_, ent in util.sort(repo.dirstate._map.iteritems()):
802 if showdate:
799 if showdate:
803 if ent[3] == -1:
800 if ent[3] == -1:
804 # Pad or slice to locale representation
801 # Pad or slice to locale representation
805 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
802 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
806 timestr = 'unset'
803 timestr = 'unset'
807 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
804 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
808 else:
805 else:
809 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
806 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
810 if ent[1] & 020000:
807 if ent[1] & 020000:
811 mode = 'lnk'
808 mode = 'lnk'
812 else:
809 else:
813 mode = '%3o' % (ent[1] & 0777)
810 mode = '%3o' % (ent[1] & 0777)
814 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
811 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
815 for f in repo.dirstate.copies():
812 for f in repo.dirstate.copies():
816 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
813 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
817
814
818 def debugdata(ui, file_, rev):
815 def debugdata(ui, file_, rev):
819 """dump the contents of a data file revision"""
816 """dump the contents of a data file revision"""
820 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
817 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
821 try:
818 try:
822 ui.write(r.revision(r.lookup(rev)))
819 ui.write(r.revision(r.lookup(rev)))
823 except KeyError:
820 except KeyError:
824 raise util.Abort(_('invalid revision identifier %s') % rev)
821 raise util.Abort(_('invalid revision identifier %s') % rev)
825
822
826 def debugdate(ui, date, range=None, **opts):
823 def debugdate(ui, date, range=None, **opts):
827 """parse and display a date"""
824 """parse and display a date"""
828 if opts["extended"]:
825 if opts["extended"]:
829 d = util.parsedate(date, util.extendeddateformats)
826 d = util.parsedate(date, util.extendeddateformats)
830 else:
827 else:
831 d = util.parsedate(date)
828 d = util.parsedate(date)
832 ui.write("internal: %s %s\n" % d)
829 ui.write("internal: %s %s\n" % d)
833 ui.write("standard: %s\n" % util.datestr(d))
830 ui.write("standard: %s\n" % util.datestr(d))
834 if range:
831 if range:
835 m = util.matchdate(range)
832 m = util.matchdate(range)
836 ui.write("match: %s\n" % m(d[0]))
833 ui.write("match: %s\n" % m(d[0]))
837
834
838 def debugindex(ui, file_):
835 def debugindex(ui, file_):
839 """dump the contents of an index file"""
836 """dump the contents of an index file"""
840 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
837 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
841 ui.write(" rev offset length base linkrev"
838 ui.write(" rev offset length base linkrev"
842 " nodeid p1 p2\n")
839 " nodeid p1 p2\n")
843 for i in r:
840 for i in r:
844 node = r.node(i)
841 node = r.node(i)
845 try:
842 try:
846 pp = r.parents(node)
843 pp = r.parents(node)
847 except:
844 except:
848 pp = [nullid, nullid]
845 pp = [nullid, nullid]
849 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
846 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
850 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
847 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
851 short(node), short(pp[0]), short(pp[1])))
848 short(node), short(pp[0]), short(pp[1])))
852
849
853 def debugindexdot(ui, file_):
850 def debugindexdot(ui, file_):
854 """dump an index DAG as a .dot file"""
851 """dump an index DAG as a .dot file"""
855 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
852 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
856 ui.write("digraph G {\n")
853 ui.write("digraph G {\n")
857 for i in r:
854 for i in r:
858 node = r.node(i)
855 node = r.node(i)
859 pp = r.parents(node)
856 pp = r.parents(node)
860 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
857 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
861 if pp[1] != nullid:
858 if pp[1] != nullid:
862 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
859 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
863 ui.write("}\n")
860 ui.write("}\n")
864
861
865 def debuginstall(ui):
862 def debuginstall(ui):
866 '''test Mercurial installation'''
863 '''test Mercurial installation'''
867
864
868 def writetemp(contents):
865 def writetemp(contents):
869 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
866 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
870 f = os.fdopen(fd, "wb")
867 f = os.fdopen(fd, "wb")
871 f.write(contents)
868 f.write(contents)
872 f.close()
869 f.close()
873 return name
870 return name
874
871
875 problems = 0
872 problems = 0
876
873
877 # encoding
874 # encoding
878 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
875 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
879 try:
876 try:
880 util.fromlocal("test")
877 util.fromlocal("test")
881 except util.Abort, inst:
878 except util.Abort, inst:
882 ui.write(" %s\n" % inst)
879 ui.write(" %s\n" % inst)
883 ui.write(_(" (check that your locale is properly set)\n"))
880 ui.write(_(" (check that your locale is properly set)\n"))
884 problems += 1
881 problems += 1
885
882
886 # compiled modules
883 # compiled modules
887 ui.status(_("Checking extensions...\n"))
884 ui.status(_("Checking extensions...\n"))
888 try:
885 try:
889 import bdiff, mpatch, base85
886 import bdiff, mpatch, base85
890 except Exception, inst:
887 except Exception, inst:
891 ui.write(" %s\n" % inst)
888 ui.write(" %s\n" % inst)
892 ui.write(_(" One or more extensions could not be found"))
889 ui.write(_(" One or more extensions could not be found"))
893 ui.write(_(" (check that you compiled the extensions)\n"))
890 ui.write(_(" (check that you compiled the extensions)\n"))
894 problems += 1
891 problems += 1
895
892
896 # templates
893 # templates
897 ui.status(_("Checking templates...\n"))
894 ui.status(_("Checking templates...\n"))
898 try:
895 try:
899 import templater
896 import templater
900 t = templater.templater(templater.templatepath("map-cmdline.default"))
897 t = templater.templater(templater.templatepath("map-cmdline.default"))
901 except Exception, inst:
898 except Exception, inst:
902 ui.write(" %s\n" % inst)
899 ui.write(" %s\n" % inst)
903 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
900 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
904 problems += 1
901 problems += 1
905
902
906 # patch
903 # patch
907 ui.status(_("Checking patch...\n"))
904 ui.status(_("Checking patch...\n"))
908 patchproblems = 0
905 patchproblems = 0
909 a = "1\n2\n3\n4\n"
906 a = "1\n2\n3\n4\n"
910 b = "1\n2\n3\ninsert\n4\n"
907 b = "1\n2\n3\ninsert\n4\n"
911 fa = writetemp(a)
908 fa = writetemp(a)
912 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
909 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
913 os.path.basename(fa))
910 os.path.basename(fa))
914 fd = writetemp(d)
911 fd = writetemp(d)
915
912
916 files = {}
913 files = {}
917 try:
914 try:
918 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
915 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
919 except util.Abort, e:
916 except util.Abort, e:
920 ui.write(_(" patch call failed:\n"))
917 ui.write(_(" patch call failed:\n"))
921 ui.write(" " + str(e) + "\n")
918 ui.write(" " + str(e) + "\n")
922 patchproblems += 1
919 patchproblems += 1
923 else:
920 else:
924 if list(files) != [os.path.basename(fa)]:
921 if list(files) != [os.path.basename(fa)]:
925 ui.write(_(" unexpected patch output!\n"))
922 ui.write(_(" unexpected patch output!\n"))
926 patchproblems += 1
923 patchproblems += 1
927 a = file(fa).read()
924 a = file(fa).read()
928 if a != b:
925 if a != b:
929 ui.write(_(" patch test failed!\n"))
926 ui.write(_(" patch test failed!\n"))
930 patchproblems += 1
927 patchproblems += 1
931
928
932 if patchproblems:
929 if patchproblems:
933 if ui.config('ui', 'patch'):
930 if ui.config('ui', 'patch'):
934 ui.write(_(" (Current patch tool may be incompatible with patch,"
931 ui.write(_(" (Current patch tool may be incompatible with patch,"
935 " or misconfigured. Please check your .hgrc file)\n"))
932 " or misconfigured. Please check your .hgrc file)\n"))
936 else:
933 else:
937 ui.write(_(" Internal patcher failure, please report this error"
934 ui.write(_(" Internal patcher failure, please report this error"
938 " to http://www.selenic.com/mercurial/bts\n"))
935 " to http://www.selenic.com/mercurial/bts\n"))
939 problems += patchproblems
936 problems += patchproblems
940
937
941 os.unlink(fa)
938 os.unlink(fa)
942 os.unlink(fd)
939 os.unlink(fd)
943
940
944 # editor
941 # editor
945 ui.status(_("Checking commit editor...\n"))
942 ui.status(_("Checking commit editor...\n"))
946 editor = ui.geteditor()
943 editor = ui.geteditor()
947 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
944 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
948 if not cmdpath:
945 if not cmdpath:
949 if editor == 'vi':
946 if editor == 'vi':
950 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
947 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
951 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
948 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
952 else:
949 else:
953 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
950 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
954 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
951 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
955 problems += 1
952 problems += 1
956
953
957 # check username
954 # check username
958 ui.status(_("Checking username...\n"))
955 ui.status(_("Checking username...\n"))
959 user = os.environ.get("HGUSER")
956 user = os.environ.get("HGUSER")
960 if user is None:
957 if user is None:
961 user = ui.config("ui", "username")
958 user = ui.config("ui", "username")
962 if user is None:
959 if user is None:
963 user = os.environ.get("EMAIL")
960 user = os.environ.get("EMAIL")
964 if not user:
961 if not user:
965 ui.warn(" ")
962 ui.warn(" ")
966 ui.username()
963 ui.username()
967 ui.write(_(" (specify a username in your .hgrc file)\n"))
964 ui.write(_(" (specify a username in your .hgrc file)\n"))
968
965
969 if not problems:
966 if not problems:
970 ui.status(_("No problems detected\n"))
967 ui.status(_("No problems detected\n"))
971 else:
968 else:
972 ui.write(_("%s problems detected,"
969 ui.write(_("%s problems detected,"
973 " please check your install!\n") % problems)
970 " please check your install!\n") % problems)
974
971
975 return problems
972 return problems
976
973
977 def debugrename(ui, repo, file1, *pats, **opts):
974 def debugrename(ui, repo, file1, *pats, **opts):
978 """dump rename information"""
975 """dump rename information"""
979
976
980 ctx = repo[opts.get('rev')]
977 ctx = repo[opts.get('rev')]
981 m = cmdutil.match(repo, (file1,) + pats, opts)
978 m = cmdutil.match(repo, (file1,) + pats, opts)
982 for abs in ctx.walk(m):
979 for abs in ctx.walk(m):
983 fctx = ctx[abs]
980 fctx = ctx[abs]
984 o = fctx.filelog().renamed(fctx.filenode())
981 o = fctx.filelog().renamed(fctx.filenode())
985 rel = m.rel(abs)
982 rel = m.rel(abs)
986 if o:
983 if o:
987 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
984 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
988 else:
985 else:
989 ui.write(_("%s not renamed\n") % rel)
986 ui.write(_("%s not renamed\n") % rel)
990
987
991 def debugwalk(ui, repo, *pats, **opts):
988 def debugwalk(ui, repo, *pats, **opts):
992 """show how files match on given patterns"""
989 """show how files match on given patterns"""
993 m = cmdutil.match(repo, pats, opts)
990 m = cmdutil.match(repo, pats, opts)
994 items = list(repo.walk(m))
991 items = list(repo.walk(m))
995 if not items:
992 if not items:
996 return
993 return
997 fmt = 'f %%-%ds %%-%ds %%s' % (
994 fmt = 'f %%-%ds %%-%ds %%s' % (
998 max([len(abs) for abs in items]),
995 max([len(abs) for abs in items]),
999 max([len(m.rel(abs)) for abs in items]))
996 max([len(m.rel(abs)) for abs in items]))
1000 for abs in items:
997 for abs in items:
1001 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
998 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1002 ui.write("%s\n" % line.rstrip())
999 ui.write("%s\n" % line.rstrip())
1003
1000
1004 def diff(ui, repo, *pats, **opts):
1001 def diff(ui, repo, *pats, **opts):
1005 """diff repository (or selected files)
1002 """diff repository (or selected files)
1006
1003
1007 Show differences between revisions for the specified files.
1004 Show differences between revisions for the specified files.
1008
1005
1009 Differences between files are shown using the unified diff format.
1006 Differences between files are shown using the unified diff format.
1010
1007
1011 NOTE: diff may generate unexpected results for merges, as it will
1008 NOTE: diff may generate unexpected results for merges, as it will
1012 default to comparing against the working directory's first parent
1009 default to comparing against the working directory's first parent
1013 changeset if no revisions are specified.
1010 changeset if no revisions are specified.
1014
1011
1015 When two revision arguments are given, then changes are shown
1012 When two revision arguments are given, then changes are shown
1016 between those revisions. If only one revision is specified then
1013 between those revisions. If only one revision is specified then
1017 that revision is compared to the working directory, and, when no
1014 that revision is compared to the working directory, and, when no
1018 revisions are specified, the working directory files are compared
1015 revisions are specified, the working directory files are compared
1019 to its parent.
1016 to its parent.
1020
1017
1021 Without the -a option, diff will avoid generating diffs of files
1018 Without the -a option, diff will avoid generating diffs of files
1022 it detects as binary. With -a, diff will generate a diff anyway,
1019 it detects as binary. With -a, diff will generate a diff anyway,
1023 probably with undesirable results.
1020 probably with undesirable results.
1024
1021
1025 Use the --git option to generate diffs in the git extended diff
1022 Use the --git option to generate diffs in the git extended diff
1026 format. For more information, read hg help diffs.
1023 format. For more information, read hg help diffs.
1027 """
1024 """
1028
1025
1029 revs = opts.get('rev')
1026 revs = opts.get('rev')
1030 change = opts.get('change')
1027 change = opts.get('change')
1031
1028
1032 if revs and change:
1029 if revs and change:
1033 msg = _('cannot specify --rev and --change at the same time')
1030 msg = _('cannot specify --rev and --change at the same time')
1034 raise util.Abort(msg)
1031 raise util.Abort(msg)
1035 elif change:
1032 elif change:
1036 node2 = repo.lookup(change)
1033 node2 = repo.lookup(change)
1037 node1 = repo[node2].parents()[0].node()
1034 node1 = repo[node2].parents()[0].node()
1038 else:
1035 else:
1039 node1, node2 = cmdutil.revpair(repo, revs)
1036 node1, node2 = cmdutil.revpair(repo, revs)
1040
1037
1041 m = cmdutil.match(repo, pats, opts)
1038 m = cmdutil.match(repo, pats, opts)
1042 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1039 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1043 for chunk in it:
1040 for chunk in it:
1044 repo.ui.write(chunk)
1041 repo.ui.write(chunk)
1045
1042
1046 def export(ui, repo, *changesets, **opts):
1043 def export(ui, repo, *changesets, **opts):
1047 """dump the header and diffs for one or more changesets
1044 """dump the header and diffs for one or more changesets
1048
1045
1049 Print the changeset header and diffs for one or more revisions.
1046 Print the changeset header and diffs for one or more revisions.
1050
1047
1051 The information shown in the changeset header is: author,
1048 The information shown in the changeset header is: author,
1052 changeset hash, parent(s) and commit comment.
1049 changeset hash, parent(s) and commit comment.
1053
1050
1054 NOTE: export may generate unexpected diff output for merge changesets,
1051 NOTE: export may generate unexpected diff output for merge changesets,
1055 as it will compare the merge changeset against its first parent only.
1052 as it will compare the merge changeset against its first parent only.
1056
1053
1057 Output may be to a file, in which case the name of the file is
1054 Output may be to a file, in which case the name of the file is
1058 given using a format string. The formatting rules are as follows:
1055 given using a format string. The formatting rules are as follows:
1059
1056
1060 %% literal "%" character
1057 %% literal "%" character
1061 %H changeset hash (40 bytes of hexadecimal)
1058 %H changeset hash (40 bytes of hexadecimal)
1062 %N number of patches being generated
1059 %N number of patches being generated
1063 %R changeset revision number
1060 %R changeset revision number
1064 %b basename of the exporting repository
1061 %b basename of the exporting repository
1065 %h short-form changeset hash (12 bytes of hexadecimal)
1062 %h short-form changeset hash (12 bytes of hexadecimal)
1066 %n zero-padded sequence number, starting at 1
1063 %n zero-padded sequence number, starting at 1
1067 %r zero-padded changeset revision number
1064 %r zero-padded changeset revision number
1068
1065
1069 Without the -a option, export will avoid generating diffs of files
1066 Without the -a option, export will avoid generating diffs of files
1070 it detects as binary. With -a, export will generate a diff anyway,
1067 it detects as binary. With -a, export will generate a diff anyway,
1071 probably with undesirable results.
1068 probably with undesirable results.
1072
1069
1073 Use the --git option to generate diffs in the git extended diff
1070 Use the --git option to generate diffs in the git extended diff
1074 format. Read the diffs help topic for more information.
1071 format. Read the diffs help topic for more information.
1075
1072
1076 With the --switch-parent option, the diff will be against the second
1073 With the --switch-parent option, the diff will be against the second
1077 parent. It can be useful to review a merge.
1074 parent. It can be useful to review a merge.
1078 """
1075 """
1079 if not changesets:
1076 if not changesets:
1080 raise util.Abort(_("export requires at least one changeset"))
1077 raise util.Abort(_("export requires at least one changeset"))
1081 revs = cmdutil.revrange(repo, changesets)
1078 revs = cmdutil.revrange(repo, changesets)
1082 if len(revs) > 1:
1079 if len(revs) > 1:
1083 ui.note(_('exporting patches:\n'))
1080 ui.note(_('exporting patches:\n'))
1084 else:
1081 else:
1085 ui.note(_('exporting patch:\n'))
1082 ui.note(_('exporting patch:\n'))
1086 patch.export(repo, revs, template=opts.get('output'),
1083 patch.export(repo, revs, template=opts.get('output'),
1087 switch_parent=opts.get('switch_parent'),
1084 switch_parent=opts.get('switch_parent'),
1088 opts=patch.diffopts(ui, opts))
1085 opts=patch.diffopts(ui, opts))
1089
1086
1090 def grep(ui, repo, pattern, *pats, **opts):
1087 def grep(ui, repo, pattern, *pats, **opts):
1091 """search for a pattern in specified files and revisions
1088 """search for a pattern in specified files and revisions
1092
1089
1093 Search revisions of files for a regular expression.
1090 Search revisions of files for a regular expression.
1094
1091
1095 This command behaves differently than Unix grep. It only accepts
1092 This command behaves differently than Unix grep. It only accepts
1096 Python/Perl regexps. It searches repository history, not the
1093 Python/Perl regexps. It searches repository history, not the
1097 working directory. It always prints the revision number in which
1094 working directory. It always prints the revision number in which
1098 a match appears.
1095 a match appears.
1099
1096
1100 By default, grep only prints output for the first revision of a
1097 By default, grep only prints output for the first revision of a
1101 file in which it finds a match. To get it to print every revision
1098 file in which it finds a match. To get it to print every revision
1102 that contains a change in match status ("-" for a match that
1099 that contains a change in match status ("-" for a match that
1103 becomes a non-match, or "+" for a non-match that becomes a match),
1100 becomes a non-match, or "+" for a non-match that becomes a match),
1104 use the --all flag.
1101 use the --all flag.
1105 """
1102 """
1106 reflags = 0
1103 reflags = 0
1107 if opts.get('ignore_case'):
1104 if opts.get('ignore_case'):
1108 reflags |= re.I
1105 reflags |= re.I
1109 try:
1106 try:
1110 regexp = re.compile(pattern, reflags)
1107 regexp = re.compile(pattern, reflags)
1111 except Exception, inst:
1108 except Exception, inst:
1112 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1109 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1113 return None
1110 return None
1114 sep, eol = ':', '\n'
1111 sep, eol = ':', '\n'
1115 if opts.get('print0'):
1112 if opts.get('print0'):
1116 sep = eol = '\0'
1113 sep = eol = '\0'
1117
1114
1118 fcache = {}
1115 fcache = {}
1119 def getfile(fn):
1116 def getfile(fn):
1120 if fn not in fcache:
1117 if fn not in fcache:
1121 fcache[fn] = repo.file(fn)
1118 fcache[fn] = repo.file(fn)
1122 return fcache[fn]
1119 return fcache[fn]
1123
1120
1124 def matchlines(body):
1121 def matchlines(body):
1125 begin = 0
1122 begin = 0
1126 linenum = 0
1123 linenum = 0
1127 while True:
1124 while True:
1128 match = regexp.search(body, begin)
1125 match = regexp.search(body, begin)
1129 if not match:
1126 if not match:
1130 break
1127 break
1131 mstart, mend = match.span()
1128 mstart, mend = match.span()
1132 linenum += body.count('\n', begin, mstart) + 1
1129 linenum += body.count('\n', begin, mstart) + 1
1133 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1130 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1134 begin = body.find('\n', mend) + 1 or len(body)
1131 begin = body.find('\n', mend) + 1 or len(body)
1135 lend = begin - 1
1132 lend = begin - 1
1136 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1133 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1137
1134
1138 class linestate(object):
1135 class linestate(object):
1139 def __init__(self, line, linenum, colstart, colend):
1136 def __init__(self, line, linenum, colstart, colend):
1140 self.line = line
1137 self.line = line
1141 self.linenum = linenum
1138 self.linenum = linenum
1142 self.colstart = colstart
1139 self.colstart = colstart
1143 self.colend = colend
1140 self.colend = colend
1144
1141
1145 def __hash__(self):
1142 def __hash__(self):
1146 return hash((self.linenum, self.line))
1143 return hash((self.linenum, self.line))
1147
1144
1148 def __eq__(self, other):
1145 def __eq__(self, other):
1149 return self.line == other.line
1146 return self.line == other.line
1150
1147
1151 matches = {}
1148 matches = {}
1152 copies = {}
1149 copies = {}
1153 def grepbody(fn, rev, body):
1150 def grepbody(fn, rev, body):
1154 matches[rev].setdefault(fn, [])
1151 matches[rev].setdefault(fn, [])
1155 m = matches[rev][fn]
1152 m = matches[rev][fn]
1156 for lnum, cstart, cend, line in matchlines(body):
1153 for lnum, cstart, cend, line in matchlines(body):
1157 s = linestate(line, lnum, cstart, cend)
1154 s = linestate(line, lnum, cstart, cend)
1158 m.append(s)
1155 m.append(s)
1159
1156
1160 def difflinestates(a, b):
1157 def difflinestates(a, b):
1161 sm = difflib.SequenceMatcher(None, a, b)
1158 sm = difflib.SequenceMatcher(None, a, b)
1162 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1159 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1163 if tag == 'insert':
1160 if tag == 'insert':
1164 for i in xrange(blo, bhi):
1161 for i in xrange(blo, bhi):
1165 yield ('+', b[i])
1162 yield ('+', b[i])
1166 elif tag == 'delete':
1163 elif tag == 'delete':
1167 for i in xrange(alo, ahi):
1164 for i in xrange(alo, ahi):
1168 yield ('-', a[i])
1165 yield ('-', a[i])
1169 elif tag == 'replace':
1166 elif tag == 'replace':
1170 for i in xrange(alo, ahi):
1167 for i in xrange(alo, ahi):
1171 yield ('-', a[i])
1168 yield ('-', a[i])
1172 for i in xrange(blo, bhi):
1169 for i in xrange(blo, bhi):
1173 yield ('+', b[i])
1170 yield ('+', b[i])
1174
1171
1175 prev = {}
1172 prev = {}
1176 def display(fn, rev, states, prevstates):
1173 def display(fn, rev, states, prevstates):
1177 datefunc = ui.quiet and util.shortdate or util.datestr
1174 datefunc = ui.quiet and util.shortdate or util.datestr
1178 found = False
1175 found = False
1179 filerevmatches = {}
1176 filerevmatches = {}
1180 r = prev.get(fn, -1)
1177 r = prev.get(fn, -1)
1181 if opts.get('all'):
1178 if opts.get('all'):
1182 iter = difflinestates(states, prevstates)
1179 iter = difflinestates(states, prevstates)
1183 else:
1180 else:
1184 iter = [('', l) for l in prevstates]
1181 iter = [('', l) for l in prevstates]
1185 for change, l in iter:
1182 for change, l in iter:
1186 cols = [fn, str(r)]
1183 cols = [fn, str(r)]
1187 if opts.get('line_number'):
1184 if opts.get('line_number'):
1188 cols.append(str(l.linenum))
1185 cols.append(str(l.linenum))
1189 if opts.get('all'):
1186 if opts.get('all'):
1190 cols.append(change)
1187 cols.append(change)
1191 if opts.get('user'):
1188 if opts.get('user'):
1192 cols.append(ui.shortuser(get(r)[1]))
1189 cols.append(ui.shortuser(get(r)[1]))
1193 if opts.get('date'):
1190 if opts.get('date'):
1194 cols.append(datefunc(get(r)[2]))
1191 cols.append(datefunc(get(r)[2]))
1195 if opts.get('files_with_matches'):
1192 if opts.get('files_with_matches'):
1196 c = (fn, r)
1193 c = (fn, r)
1197 if c in filerevmatches:
1194 if c in filerevmatches:
1198 continue
1195 continue
1199 filerevmatches[c] = 1
1196 filerevmatches[c] = 1
1200 else:
1197 else:
1201 cols.append(l.line)
1198 cols.append(l.line)
1202 ui.write(sep.join(cols), eol)
1199 ui.write(sep.join(cols), eol)
1203 found = True
1200 found = True
1204 return found
1201 return found
1205
1202
1206 fstate = {}
1203 fstate = {}
1207 skip = {}
1204 skip = {}
1208 get = util.cachefunc(lambda r: repo[r].changeset())
1205 get = util.cachefunc(lambda r: repo[r].changeset())
1209 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1206 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1210 found = False
1207 found = False
1211 follow = opts.get('follow')
1208 follow = opts.get('follow')
1212 for st, rev, fns in changeiter:
1209 for st, rev, fns in changeiter:
1213 if st == 'window':
1210 if st == 'window':
1214 matches.clear()
1211 matches.clear()
1215 elif st == 'add':
1212 elif st == 'add':
1216 ctx = repo[rev]
1213 ctx = repo[rev]
1217 matches[rev] = {}
1214 matches[rev] = {}
1218 for fn in fns:
1215 for fn in fns:
1219 if fn in skip:
1216 if fn in skip:
1220 continue
1217 continue
1221 try:
1218 try:
1222 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1219 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1223 fstate.setdefault(fn, [])
1220 fstate.setdefault(fn, [])
1224 if follow:
1221 if follow:
1225 copied = getfile(fn).renamed(ctx.filenode(fn))
1222 copied = getfile(fn).renamed(ctx.filenode(fn))
1226 if copied:
1223 if copied:
1227 copies.setdefault(rev, {})[fn] = copied[0]
1224 copies.setdefault(rev, {})[fn] = copied[0]
1228 except error.LookupError:
1225 except error.LookupError:
1229 pass
1226 pass
1230 elif st == 'iter':
1227 elif st == 'iter':
1231 for fn, m in util.sort(matches[rev].items()):
1228 for fn, m in util.sort(matches[rev].items()):
1232 copy = copies.get(rev, {}).get(fn)
1229 copy = copies.get(rev, {}).get(fn)
1233 if fn in skip:
1230 if fn in skip:
1234 if copy:
1231 if copy:
1235 skip[copy] = True
1232 skip[copy] = True
1236 continue
1233 continue
1237 if fn in prev or fstate[fn]:
1234 if fn in prev or fstate[fn]:
1238 r = display(fn, rev, m, fstate[fn])
1235 r = display(fn, rev, m, fstate[fn])
1239 found = found or r
1236 found = found or r
1240 if r and not opts.get('all'):
1237 if r and not opts.get('all'):
1241 skip[fn] = True
1238 skip[fn] = True
1242 if copy:
1239 if copy:
1243 skip[copy] = True
1240 skip[copy] = True
1244 fstate[fn] = m
1241 fstate[fn] = m
1245 if copy:
1242 if copy:
1246 fstate[copy] = m
1243 fstate[copy] = m
1247 prev[fn] = rev
1244 prev[fn] = rev
1248
1245
1249 for fn, state in util.sort(fstate.items()):
1246 for fn, state in util.sort(fstate.items()):
1250 if fn in skip:
1247 if fn in skip:
1251 continue
1248 continue
1252 if fn not in copies.get(prev[fn], {}):
1249 if fn not in copies.get(prev[fn], {}):
1253 found = display(fn, rev, {}, state) or found
1250 found = display(fn, rev, {}, state) or found
1254 return (not found and 1) or 0
1251 return (not found and 1) or 0
1255
1252
1256 def heads(ui, repo, *branchrevs, **opts):
1253 def heads(ui, repo, *branchrevs, **opts):
1257 """show current repository heads or show branch heads
1254 """show current repository heads or show branch heads
1258
1255
1259 With no arguments, show all repository head changesets.
1256 With no arguments, show all repository head changesets.
1260
1257
1261 If branch or revisions names are given this will show the heads of
1258 If branch or revisions names are given this will show the heads of
1262 the specified branches or the branches those revisions are tagged
1259 the specified branches or the branches those revisions are tagged
1263 with.
1260 with.
1264
1261
1265 Repository "heads" are changesets that don't have child
1262 Repository "heads" are changesets that don't have child
1266 changesets. They are where development generally takes place and
1263 changesets. They are where development generally takes place and
1267 are the usual targets for update and merge operations.
1264 are the usual targets for update and merge operations.
1268
1265
1269 Branch heads are changesets that have a given branch tag, but have
1266 Branch heads are changesets that have a given branch tag, but have
1270 no child changesets with that tag. They are usually where
1267 no child changesets with that tag. They are usually where
1271 development on the given branch takes place.
1268 development on the given branch takes place.
1272 """
1269 """
1273 if opts.get('rev'):
1270 if opts.get('rev'):
1274 start = repo.lookup(opts['rev'])
1271 start = repo.lookup(opts['rev'])
1275 else:
1272 else:
1276 start = None
1273 start = None
1277 closed = not opts.get('active')
1274 closed = not opts.get('active')
1278 if not branchrevs:
1275 if not branchrevs:
1279 # Assume we're looking repo-wide heads if no revs were specified.
1276 # Assume we're looking repo-wide heads if no revs were specified.
1280 heads = repo.heads(start, closed=closed)
1277 heads = repo.heads(start, closed=closed)
1281 else:
1278 else:
1282 heads = []
1279 heads = []
1283 visitedset = util.set()
1280 visitedset = util.set()
1284 for branchrev in branchrevs:
1281 for branchrev in branchrevs:
1285 branch = repo[branchrev].branch()
1282 branch = repo[branchrev].branch()
1286 if branch in visitedset:
1283 if branch in visitedset:
1287 continue
1284 continue
1288 visitedset.add(branch)
1285 visitedset.add(branch)
1289 bheads = repo.branchheads(branch, start, closed=closed)
1286 bheads = repo.branchheads(branch, start, closed=closed)
1290 if not bheads:
1287 if not bheads:
1291 if branch != branchrev:
1288 if branch != branchrev:
1292 ui.warn(_("no changes on branch %s containing %s are "
1289 ui.warn(_("no changes on branch %s containing %s are "
1293 "reachable from %s\n")
1290 "reachable from %s\n")
1294 % (branch, branchrev, opts.get('rev')))
1291 % (branch, branchrev, opts.get('rev')))
1295 else:
1292 else:
1296 ui.warn(_("no changes on branch %s are reachable from %s\n")
1293 ui.warn(_("no changes on branch %s are reachable from %s\n")
1297 % (branch, opts.get('rev')))
1294 % (branch, opts.get('rev')))
1298 heads.extend(bheads)
1295 heads.extend(bheads)
1299 if not heads:
1296 if not heads:
1300 return 1
1297 return 1
1301 displayer = cmdutil.show_changeset(ui, repo, opts)
1298 displayer = cmdutil.show_changeset(ui, repo, opts)
1302 for n in heads:
1299 for n in heads:
1303 displayer.show(repo[n])
1300 displayer.show(repo[n])
1304
1301
1305 def help_(ui, name=None, with_version=False):
1302 def help_(ui, name=None, with_version=False):
1306 """show help for a given topic or a help overview
1303 """show help for a given topic or a help overview
1307
1304
1308 With no arguments, print a list of commands and short help.
1305 With no arguments, print a list of commands and short help.
1309
1306
1310 Given a topic, extension, or command name, print help for that topic."""
1307 Given a topic, extension, or command name, print help for that topic."""
1311 option_lists = []
1308 option_lists = []
1312
1309
1313 def addglobalopts(aliases):
1310 def addglobalopts(aliases):
1314 if ui.verbose:
1311 if ui.verbose:
1315 option_lists.append((_("global options:"), globalopts))
1312 option_lists.append((_("global options:"), globalopts))
1316 if name == 'shortlist':
1313 if name == 'shortlist':
1317 option_lists.append((_('use "hg help" for the full list '
1314 option_lists.append((_('use "hg help" for the full list '
1318 'of commands'), ()))
1315 'of commands'), ()))
1319 else:
1316 else:
1320 if name == 'shortlist':
1317 if name == 'shortlist':
1321 msg = _('use "hg help" for the full list of commands '
1318 msg = _('use "hg help" for the full list of commands '
1322 'or "hg -v" for details')
1319 'or "hg -v" for details')
1323 elif aliases:
1320 elif aliases:
1324 msg = _('use "hg -v help%s" to show aliases and '
1321 msg = _('use "hg -v help%s" to show aliases and '
1325 'global options') % (name and " " + name or "")
1322 'global options') % (name and " " + name or "")
1326 else:
1323 else:
1327 msg = _('use "hg -v help %s" to show global options') % name
1324 msg = _('use "hg -v help %s" to show global options') % name
1328 option_lists.append((msg, ()))
1325 option_lists.append((msg, ()))
1329
1326
1330 def helpcmd(name):
1327 def helpcmd(name):
1331 if with_version:
1328 if with_version:
1332 version_(ui)
1329 version_(ui)
1333 ui.write('\n')
1330 ui.write('\n')
1334
1331
1335 try:
1332 try:
1336 aliases, i = cmdutil.findcmd(name, table, False)
1333 aliases, i = cmdutil.findcmd(name, table, False)
1337 except error.AmbiguousCommand, inst:
1334 except error.AmbiguousCommand, inst:
1338 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1335 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1339 helplist(_('list of commands:\n\n'), select)
1336 helplist(_('list of commands:\n\n'), select)
1340 return
1337 return
1341
1338
1342 # synopsis
1339 # synopsis
1343 if len(i) > 2:
1340 if len(i) > 2:
1344 if i[2].startswith('hg'):
1341 if i[2].startswith('hg'):
1345 ui.write("%s\n" % i[2])
1342 ui.write("%s\n" % i[2])
1346 else:
1343 else:
1347 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1344 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1348 else:
1345 else:
1349 ui.write('hg %s\n' % aliases[0])
1346 ui.write('hg %s\n' % aliases[0])
1350
1347
1351 # aliases
1348 # aliases
1352 if not ui.quiet and len(aliases) > 1:
1349 if not ui.quiet and len(aliases) > 1:
1353 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1350 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1354
1351
1355 # description
1352 # description
1356 doc = gettext(i[0].__doc__)
1353 doc = gettext(i[0].__doc__)
1357 if not doc:
1354 if not doc:
1358 doc = _("(no help text available)")
1355 doc = _("(no help text available)")
1359 if ui.quiet:
1356 if ui.quiet:
1360 doc = doc.splitlines(0)[0]
1357 doc = doc.splitlines(0)[0]
1361 ui.write("\n%s\n" % doc.rstrip())
1358 ui.write("\n%s\n" % doc.rstrip())
1362
1359
1363 if not ui.quiet:
1360 if not ui.quiet:
1364 # options
1361 # options
1365 if i[1]:
1362 if i[1]:
1366 option_lists.append((_("options:\n"), i[1]))
1363 option_lists.append((_("options:\n"), i[1]))
1367
1364
1368 addglobalopts(False)
1365 addglobalopts(False)
1369
1366
1370 def helplist(header, select=None):
1367 def helplist(header, select=None):
1371 h = {}
1368 h = {}
1372 cmds = {}
1369 cmds = {}
1373 for c, e in table.iteritems():
1370 for c, e in table.iteritems():
1374 f = c.split("|", 1)[0]
1371 f = c.split("|", 1)[0]
1375 if select and not select(f):
1372 if select and not select(f):
1376 continue
1373 continue
1377 if (not select and name != 'shortlist' and
1374 if (not select and name != 'shortlist' and
1378 e[0].__module__ != __name__):
1375 e[0].__module__ != __name__):
1379 continue
1376 continue
1380 if name == "shortlist" and not f.startswith("^"):
1377 if name == "shortlist" and not f.startswith("^"):
1381 continue
1378 continue
1382 f = f.lstrip("^")
1379 f = f.lstrip("^")
1383 if not ui.debugflag and f.startswith("debug"):
1380 if not ui.debugflag and f.startswith("debug"):
1384 continue
1381 continue
1385 doc = gettext(e[0].__doc__)
1382 doc = gettext(e[0].__doc__)
1386 if not doc:
1383 if not doc:
1387 doc = _("(no help text available)")
1384 doc = _("(no help text available)")
1388 h[f] = doc.splitlines(0)[0].rstrip()
1385 h[f] = doc.splitlines(0)[0].rstrip()
1389 cmds[f] = c.lstrip("^")
1386 cmds[f] = c.lstrip("^")
1390
1387
1391 if not h:
1388 if not h:
1392 ui.status(_('no commands defined\n'))
1389 ui.status(_('no commands defined\n'))
1393 return
1390 return
1394
1391
1395 ui.status(header)
1392 ui.status(header)
1396 fns = util.sort(h)
1393 fns = util.sort(h)
1397 m = max(map(len, fns))
1394 m = max(map(len, fns))
1398 for f in fns:
1395 for f in fns:
1399 if ui.verbose:
1396 if ui.verbose:
1400 commands = cmds[f].replace("|",", ")
1397 commands = cmds[f].replace("|",", ")
1401 ui.write(" %s:\n %s\n"%(commands, h[f]))
1398 ui.write(" %s:\n %s\n"%(commands, h[f]))
1402 else:
1399 else:
1403 ui.write(' %-*s %s\n' % (m, f, h[f]))
1400 ui.write(' %-*s %s\n' % (m, f, h[f]))
1404
1401
1405 exts = list(extensions.extensions())
1402 exts = list(extensions.extensions())
1406 if exts and name != 'shortlist':
1403 if exts and name != 'shortlist':
1407 ui.write(_('\nenabled extensions:\n\n'))
1404 ui.write(_('\nenabled extensions:\n\n'))
1408 maxlength = 0
1405 maxlength = 0
1409 exthelps = []
1406 exthelps = []
1410 for ename, ext in exts:
1407 for ename, ext in exts:
1411 doc = (ext.__doc__ or _('(no help text available)'))
1408 doc = (ext.__doc__ or _('(no help text available)'))
1412 ename = ename.split('.')[-1]
1409 ename = ename.split('.')[-1]
1413 maxlength = max(len(ename), maxlength)
1410 maxlength = max(len(ename), maxlength)
1414 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1411 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1415 for ename, text in exthelps:
1412 for ename, text in exthelps:
1416 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1413 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1417
1414
1418 if not ui.quiet:
1415 if not ui.quiet:
1419 addglobalopts(True)
1416 addglobalopts(True)
1420
1417
1421 def helptopic(name):
1418 def helptopic(name):
1422 for names, header, doc in help.helptable:
1419 for names, header, doc in help.helptable:
1423 if name in names:
1420 if name in names:
1424 break
1421 break
1425 else:
1422 else:
1426 raise error.UnknownCommand(name)
1423 raise error.UnknownCommand(name)
1427
1424
1428 # description
1425 # description
1429 if not doc:
1426 if not doc:
1430 doc = _("(no help text available)")
1427 doc = _("(no help text available)")
1431 if callable(doc):
1428 if callable(doc):
1432 doc = doc()
1429 doc = doc()
1433
1430
1434 ui.write("%s\n" % header)
1431 ui.write("%s\n" % header)
1435 ui.write("%s\n" % doc.rstrip())
1432 ui.write("%s\n" % doc.rstrip())
1436
1433
1437 def helpext(name):
1434 def helpext(name):
1438 try:
1435 try:
1439 mod = extensions.find(name)
1436 mod = extensions.find(name)
1440 except KeyError:
1437 except KeyError:
1441 raise error.UnknownCommand(name)
1438 raise error.UnknownCommand(name)
1442
1439
1443 doc = gettext(mod.__doc__) or _('no help text available')
1440 doc = gettext(mod.__doc__) or _('no help text available')
1444 doc = doc.splitlines(0)
1441 doc = doc.splitlines(0)
1445 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1442 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1446 for d in doc[1:]:
1443 for d in doc[1:]:
1447 ui.write(d, '\n')
1444 ui.write(d, '\n')
1448
1445
1449 ui.status('\n')
1446 ui.status('\n')
1450
1447
1451 try:
1448 try:
1452 ct = mod.cmdtable
1449 ct = mod.cmdtable
1453 except AttributeError:
1450 except AttributeError:
1454 ct = {}
1451 ct = {}
1455
1452
1456 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1453 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1457 helplist(_('list of commands:\n\n'), modcmds.has_key)
1454 helplist(_('list of commands:\n\n'), modcmds.has_key)
1458
1455
1459 if name and name != 'shortlist':
1456 if name and name != 'shortlist':
1460 i = None
1457 i = None
1461 for f in (helptopic, helpcmd, helpext):
1458 for f in (helptopic, helpcmd, helpext):
1462 try:
1459 try:
1463 f(name)
1460 f(name)
1464 i = None
1461 i = None
1465 break
1462 break
1466 except error.UnknownCommand, inst:
1463 except error.UnknownCommand, inst:
1467 i = inst
1464 i = inst
1468 if i:
1465 if i:
1469 raise i
1466 raise i
1470
1467
1471 else:
1468 else:
1472 # program name
1469 # program name
1473 if ui.verbose or with_version:
1470 if ui.verbose or with_version:
1474 version_(ui)
1471 version_(ui)
1475 else:
1472 else:
1476 ui.status(_("Mercurial Distributed SCM\n"))
1473 ui.status(_("Mercurial Distributed SCM\n"))
1477 ui.status('\n')
1474 ui.status('\n')
1478
1475
1479 # list of commands
1476 # list of commands
1480 if name == "shortlist":
1477 if name == "shortlist":
1481 header = _('basic commands:\n\n')
1478 header = _('basic commands:\n\n')
1482 else:
1479 else:
1483 header = _('list of commands:\n\n')
1480 header = _('list of commands:\n\n')
1484
1481
1485 helplist(header)
1482 helplist(header)
1486
1483
1487 # list all option lists
1484 # list all option lists
1488 opt_output = []
1485 opt_output = []
1489 for title, options in option_lists:
1486 for title, options in option_lists:
1490 opt_output.append(("\n%s" % title, None))
1487 opt_output.append(("\n%s" % title, None))
1491 for shortopt, longopt, default, desc in options:
1488 for shortopt, longopt, default, desc in options:
1492 if "DEPRECATED" in desc and not ui.verbose: continue
1489 if "DEPRECATED" in desc and not ui.verbose: continue
1493 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1490 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1494 longopt and " --%s" % longopt),
1491 longopt and " --%s" % longopt),
1495 "%s%s" % (desc,
1492 "%s%s" % (desc,
1496 default
1493 default
1497 and _(" (default: %s)") % default
1494 and _(" (default: %s)") % default
1498 or "")))
1495 or "")))
1499
1496
1500 if not name:
1497 if not name:
1501 ui.write(_("\nadditional help topics:\n\n"))
1498 ui.write(_("\nadditional help topics:\n\n"))
1502 topics = []
1499 topics = []
1503 for names, header, doc in help.helptable:
1500 for names, header, doc in help.helptable:
1504 names = [(-len(name), name) for name in names]
1501 names = [(-len(name), name) for name in names]
1505 names.sort()
1502 names.sort()
1506 topics.append((names[0][1], header))
1503 topics.append((names[0][1], header))
1507 topics_len = max([len(s[0]) for s in topics])
1504 topics_len = max([len(s[0]) for s in topics])
1508 for t, desc in topics:
1505 for t, desc in topics:
1509 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1506 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1510
1507
1511 if opt_output:
1508 if opt_output:
1512 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1509 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1513 for first, second in opt_output:
1510 for first, second in opt_output:
1514 if second:
1511 if second:
1515 ui.write(" %-*s %s\n" % (opts_len, first, second))
1512 ui.write(" %-*s %s\n" % (opts_len, first, second))
1516 else:
1513 else:
1517 ui.write("%s\n" % first)
1514 ui.write("%s\n" % first)
1518
1515
1519 def identify(ui, repo, source=None,
1516 def identify(ui, repo, source=None,
1520 rev=None, num=None, id=None, branch=None, tags=None):
1517 rev=None, num=None, id=None, branch=None, tags=None):
1521 """identify the working copy or specified revision
1518 """identify the working copy or specified revision
1522
1519
1523 With no revision, print a summary of the current state of the repo.
1520 With no revision, print a summary of the current state of the repo.
1524
1521
1525 With a path, do a lookup in another repository.
1522 With a path, do a lookup in another repository.
1526
1523
1527 This summary identifies the repository state using one or two parent
1524 This summary identifies the repository state using one or two parent
1528 hash identifiers, followed by a "+" if there are uncommitted changes
1525 hash identifiers, followed by a "+" if there are uncommitted changes
1529 in the working directory, a list of tags for this revision and a branch
1526 in the working directory, a list of tags for this revision and a branch
1530 name for non-default branches.
1527 name for non-default branches.
1531 """
1528 """
1532
1529
1533 if not repo and not source:
1530 if not repo and not source:
1534 raise util.Abort(_("There is no Mercurial repository here "
1531 raise util.Abort(_("There is no Mercurial repository here "
1535 "(.hg not found)"))
1532 "(.hg not found)"))
1536
1533
1537 hexfunc = ui.debugflag and hex or short
1534 hexfunc = ui.debugflag and hex or short
1538 default = not (num or id or branch or tags)
1535 default = not (num or id or branch or tags)
1539 output = []
1536 output = []
1540
1537
1541 revs = []
1538 revs = []
1542 if source:
1539 if source:
1543 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1540 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1544 repo = hg.repository(ui, source)
1541 repo = hg.repository(ui, source)
1545
1542
1546 if not repo.local():
1543 if not repo.local():
1547 if not rev and revs:
1544 if not rev and revs:
1548 rev = revs[0]
1545 rev = revs[0]
1549 if not rev:
1546 if not rev:
1550 rev = "tip"
1547 rev = "tip"
1551 if num or branch or tags:
1548 if num or branch or tags:
1552 raise util.Abort(
1549 raise util.Abort(
1553 "can't query remote revision number, branch, or tags")
1550 "can't query remote revision number, branch, or tags")
1554 output = [hexfunc(repo.lookup(rev))]
1551 output = [hexfunc(repo.lookup(rev))]
1555 elif not rev:
1552 elif not rev:
1556 ctx = repo[None]
1553 ctx = repo[None]
1557 parents = ctx.parents()
1554 parents = ctx.parents()
1558 changed = False
1555 changed = False
1559 if default or id or num:
1556 if default or id or num:
1560 changed = ctx.files() + ctx.deleted()
1557 changed = ctx.files() + ctx.deleted()
1561 if default or id:
1558 if default or id:
1562 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1559 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1563 (changed) and "+" or "")]
1560 (changed) and "+" or "")]
1564 if num:
1561 if num:
1565 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1562 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1566 (changed) and "+" or ""))
1563 (changed) and "+" or ""))
1567 else:
1564 else:
1568 ctx = repo[rev]
1565 ctx = repo[rev]
1569 if default or id:
1566 if default or id:
1570 output = [hexfunc(ctx.node())]
1567 output = [hexfunc(ctx.node())]
1571 if num:
1568 if num:
1572 output.append(str(ctx.rev()))
1569 output.append(str(ctx.rev()))
1573
1570
1574 if repo.local() and default and not ui.quiet:
1571 if repo.local() and default and not ui.quiet:
1575 b = util.tolocal(ctx.branch())
1572 b = util.tolocal(ctx.branch())
1576 if b != 'default':
1573 if b != 'default':
1577 output.append("(%s)" % b)
1574 output.append("(%s)" % b)
1578
1575
1579 # multiple tags for a single parent separated by '/'
1576 # multiple tags for a single parent separated by '/'
1580 t = "/".join(ctx.tags())
1577 t = "/".join(ctx.tags())
1581 if t:
1578 if t:
1582 output.append(t)
1579 output.append(t)
1583
1580
1584 if branch:
1581 if branch:
1585 output.append(util.tolocal(ctx.branch()))
1582 output.append(util.tolocal(ctx.branch()))
1586
1583
1587 if tags:
1584 if tags:
1588 output.extend(ctx.tags())
1585 output.extend(ctx.tags())
1589
1586
1590 ui.write("%s\n" % ' '.join(output))
1587 ui.write("%s\n" % ' '.join(output))
1591
1588
1592 def import_(ui, repo, patch1, *patches, **opts):
1589 def import_(ui, repo, patch1, *patches, **opts):
1593 """import an ordered set of patches
1590 """import an ordered set of patches
1594
1591
1595 Import a list of patches and commit them individually.
1592 Import a list of patches and commit them individually.
1596
1593
1597 If there are outstanding changes in the working directory, import
1594 If there are outstanding changes in the working directory, import
1598 will abort unless given the -f flag.
1595 will abort unless given the -f flag.
1599
1596
1600 You can import a patch straight from a mail message. Even patches
1597 You can import a patch straight from a mail message. Even patches
1601 as attachments work (body part must be type text/plain or
1598 as attachments work (body part must be type text/plain or
1602 text/x-patch to be used). From and Subject headers of email
1599 text/x-patch to be used). From and Subject headers of email
1603 message are used as default committer and commit message. All
1600 message are used as default committer and commit message. All
1604 text/plain body parts before first diff are added to commit
1601 text/plain body parts before first diff are added to commit
1605 message.
1602 message.
1606
1603
1607 If the imported patch was generated by hg export, user and description
1604 If the imported patch was generated by hg export, user and description
1608 from patch override values from message headers and body. Values
1605 from patch override values from message headers and body. Values
1609 given on command line with -m and -u override these.
1606 given on command line with -m and -u override these.
1610
1607
1611 If --exact is specified, import will set the working directory
1608 If --exact is specified, import will set the working directory
1612 to the parent of each patch before applying it, and will abort
1609 to the parent of each patch before applying it, and will abort
1613 if the resulting changeset has a different ID than the one
1610 if the resulting changeset has a different ID than the one
1614 recorded in the patch. This may happen due to character set
1611 recorded in the patch. This may happen due to character set
1615 problems or other deficiencies in the text patch format.
1612 problems or other deficiencies in the text patch format.
1616
1613
1617 With --similarity, hg will attempt to discover renames and copies
1614 With --similarity, hg will attempt to discover renames and copies
1618 in the patch in the same way as 'addremove'.
1615 in the patch in the same way as 'addremove'.
1619
1616
1620 To read a patch from standard input, use patch name "-".
1617 To read a patch from standard input, use patch name "-".
1621 See 'hg help dates' for a list of formats valid for -d/--date.
1618 See 'hg help dates' for a list of formats valid for -d/--date.
1622 """
1619 """
1623 patches = (patch1,) + patches
1620 patches = (patch1,) + patches
1624
1621
1625 date = opts.get('date')
1622 date = opts.get('date')
1626 if date:
1623 if date:
1627 opts['date'] = util.parsedate(date)
1624 opts['date'] = util.parsedate(date)
1628
1625
1629 try:
1626 try:
1630 sim = float(opts.get('similarity') or 0)
1627 sim = float(opts.get('similarity') or 0)
1631 except ValueError:
1628 except ValueError:
1632 raise util.Abort(_('similarity must be a number'))
1629 raise util.Abort(_('similarity must be a number'))
1633 if sim < 0 or sim > 100:
1630 if sim < 0 or sim > 100:
1634 raise util.Abort(_('similarity must be between 0 and 100'))
1631 raise util.Abort(_('similarity must be between 0 and 100'))
1635
1632
1636 if opts.get('exact') or not opts.get('force'):
1633 if opts.get('exact') or not opts.get('force'):
1637 cmdutil.bail_if_changed(repo)
1634 cmdutil.bail_if_changed(repo)
1638
1635
1639 d = opts["base"]
1636 d = opts["base"]
1640 strip = opts["strip"]
1637 strip = opts["strip"]
1641 wlock = lock = None
1638 wlock = lock = None
1642 try:
1639 try:
1643 wlock = repo.wlock()
1640 wlock = repo.wlock()
1644 lock = repo.lock()
1641 lock = repo.lock()
1645 for p in patches:
1642 for p in patches:
1646 pf = os.path.join(d, p)
1643 pf = os.path.join(d, p)
1647
1644
1648 if pf == '-':
1645 if pf == '-':
1649 ui.status(_("applying patch from stdin\n"))
1646 ui.status(_("applying patch from stdin\n"))
1650 pf = sys.stdin
1647 pf = sys.stdin
1651 else:
1648 else:
1652 ui.status(_("applying %s\n") % p)
1649 ui.status(_("applying %s\n") % p)
1653 pf = url.open(ui, pf)
1650 pf = url.open(ui, pf)
1654 data = patch.extract(ui, pf)
1651 data = patch.extract(ui, pf)
1655 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1652 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1656
1653
1657 if tmpname is None:
1654 if tmpname is None:
1658 raise util.Abort(_('no diffs found'))
1655 raise util.Abort(_('no diffs found'))
1659
1656
1660 try:
1657 try:
1661 cmdline_message = cmdutil.logmessage(opts)
1658 cmdline_message = cmdutil.logmessage(opts)
1662 if cmdline_message:
1659 if cmdline_message:
1663 # pickup the cmdline msg
1660 # pickup the cmdline msg
1664 message = cmdline_message
1661 message = cmdline_message
1665 elif message:
1662 elif message:
1666 # pickup the patch msg
1663 # pickup the patch msg
1667 message = message.strip()
1664 message = message.strip()
1668 else:
1665 else:
1669 # launch the editor
1666 # launch the editor
1670 message = None
1667 message = None
1671 ui.debug(_('message:\n%s\n') % message)
1668 ui.debug(_('message:\n%s\n') % message)
1672
1669
1673 wp = repo.parents()
1670 wp = repo.parents()
1674 if opts.get('exact'):
1671 if opts.get('exact'):
1675 if not nodeid or not p1:
1672 if not nodeid or not p1:
1676 raise util.Abort(_('not a mercurial patch'))
1673 raise util.Abort(_('not a mercurial patch'))
1677 p1 = repo.lookup(p1)
1674 p1 = repo.lookup(p1)
1678 p2 = repo.lookup(p2 or hex(nullid))
1675 p2 = repo.lookup(p2 or hex(nullid))
1679
1676
1680 if p1 != wp[0].node():
1677 if p1 != wp[0].node():
1681 hg.clean(repo, p1)
1678 hg.clean(repo, p1)
1682 repo.dirstate.setparents(p1, p2)
1679 repo.dirstate.setparents(p1, p2)
1683 elif p2:
1680 elif p2:
1684 try:
1681 try:
1685 p1 = repo.lookup(p1)
1682 p1 = repo.lookup(p1)
1686 p2 = repo.lookup(p2)
1683 p2 = repo.lookup(p2)
1687 if p1 == wp[0].node():
1684 if p1 == wp[0].node():
1688 repo.dirstate.setparents(p1, p2)
1685 repo.dirstate.setparents(p1, p2)
1689 except error.RepoError:
1686 except error.RepoError:
1690 pass
1687 pass
1691 if opts.get('exact') or opts.get('import_branch'):
1688 if opts.get('exact') or opts.get('import_branch'):
1692 repo.dirstate.setbranch(branch or 'default')
1689 repo.dirstate.setbranch(branch or 'default')
1693
1690
1694 files = {}
1691 files = {}
1695 try:
1692 try:
1696 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1693 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1697 files=files)
1694 files=files)
1698 finally:
1695 finally:
1699 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1696 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1700 if not opts.get('no_commit'):
1697 if not opts.get('no_commit'):
1701 n = repo.commit(files, message, opts.get('user') or user,
1698 n = repo.commit(files, message, opts.get('user') or user,
1702 opts.get('date') or date)
1699 opts.get('date') or date)
1703 if opts.get('exact'):
1700 if opts.get('exact'):
1704 if hex(n) != nodeid:
1701 if hex(n) != nodeid:
1705 repo.rollback()
1702 repo.rollback()
1706 raise util.Abort(_('patch is damaged'
1703 raise util.Abort(_('patch is damaged'
1707 ' or loses information'))
1704 ' or loses information'))
1708 # Force a dirstate write so that the next transaction
1705 # Force a dirstate write so that the next transaction
1709 # backups an up-do-date file.
1706 # backups an up-do-date file.
1710 repo.dirstate.write()
1707 repo.dirstate.write()
1711 finally:
1708 finally:
1712 os.unlink(tmpname)
1709 os.unlink(tmpname)
1713 finally:
1710 finally:
1714 del lock, wlock
1711 del lock, wlock
1715
1712
1716 def incoming(ui, repo, source="default", **opts):
1713 def incoming(ui, repo, source="default", **opts):
1717 """show new changesets found in source
1714 """show new changesets found in source
1718
1715
1719 Show new changesets found in the specified path/URL or the default
1716 Show new changesets found in the specified path/URL or the default
1720 pull location. These are the changesets that would be pulled if a pull
1717 pull location. These are the changesets that would be pulled if a pull
1721 was requested.
1718 was requested.
1722
1719
1723 For remote repository, using --bundle avoids downloading the changesets
1720 For remote repository, using --bundle avoids downloading the changesets
1724 twice if the incoming is followed by a pull.
1721 twice if the incoming is followed by a pull.
1725
1722
1726 See pull for valid source format details.
1723 See pull for valid source format details.
1727 """
1724 """
1728 limit = cmdutil.loglimit(opts)
1725 limit = cmdutil.loglimit(opts)
1729 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1726 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1730 cmdutil.setremoteconfig(ui, opts)
1727 cmdutil.setremoteconfig(ui, opts)
1731
1728
1732 other = hg.repository(ui, source)
1729 other = hg.repository(ui, source)
1733 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1730 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1734 if revs:
1731 if revs:
1735 revs = [other.lookup(rev) for rev in revs]
1732 revs = [other.lookup(rev) for rev in revs]
1736 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1733 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1737 force=opts["force"])
1734 force=opts["force"])
1738 if not incoming:
1735 if not incoming:
1739 try:
1736 try:
1740 os.unlink(opts["bundle"])
1737 os.unlink(opts["bundle"])
1741 except:
1738 except:
1742 pass
1739 pass
1743 ui.status(_("no changes found\n"))
1740 ui.status(_("no changes found\n"))
1744 return 1
1741 return 1
1745
1742
1746 cleanup = None
1743 cleanup = None
1747 try:
1744 try:
1748 fname = opts["bundle"]
1745 fname = opts["bundle"]
1749 if fname or not other.local():
1746 if fname or not other.local():
1750 # create a bundle (uncompressed if other repo is not local)
1747 # create a bundle (uncompressed if other repo is not local)
1751
1748
1752 if revs is None and other.capable('changegroupsubset'):
1749 if revs is None and other.capable('changegroupsubset'):
1753 revs = rheads
1750 revs = rheads
1754
1751
1755 if revs is None:
1752 if revs is None:
1756 cg = other.changegroup(incoming, "incoming")
1753 cg = other.changegroup(incoming, "incoming")
1757 else:
1754 else:
1758 cg = other.changegroupsubset(incoming, revs, 'incoming')
1755 cg = other.changegroupsubset(incoming, revs, 'incoming')
1759 bundletype = other.local() and "HG10BZ" or "HG10UN"
1756 bundletype = other.local() and "HG10BZ" or "HG10UN"
1760 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1757 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1761 # keep written bundle?
1758 # keep written bundle?
1762 if opts["bundle"]:
1759 if opts["bundle"]:
1763 cleanup = None
1760 cleanup = None
1764 if not other.local():
1761 if not other.local():
1765 # use the created uncompressed bundlerepo
1762 # use the created uncompressed bundlerepo
1766 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1763 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1767
1764
1768 o = other.changelog.nodesbetween(incoming, revs)[0]
1765 o = other.changelog.nodesbetween(incoming, revs)[0]
1769 if opts.get('newest_first'):
1766 if opts.get('newest_first'):
1770 o.reverse()
1767 o.reverse()
1771 displayer = cmdutil.show_changeset(ui, other, opts)
1768 displayer = cmdutil.show_changeset(ui, other, opts)
1772 count = 0
1769 count = 0
1773 for n in o:
1770 for n in o:
1774 if count >= limit:
1771 if count >= limit:
1775 break
1772 break
1776 parents = [p for p in other.changelog.parents(n) if p != nullid]
1773 parents = [p for p in other.changelog.parents(n) if p != nullid]
1777 if opts.get('no_merges') and len(parents) == 2:
1774 if opts.get('no_merges') and len(parents) == 2:
1778 continue
1775 continue
1779 count += 1
1776 count += 1
1780 displayer.show(other[n])
1777 displayer.show(other[n])
1781 finally:
1778 finally:
1782 if hasattr(other, 'close'):
1779 if hasattr(other, 'close'):
1783 other.close()
1780 other.close()
1784 if cleanup:
1781 if cleanup:
1785 os.unlink(cleanup)
1782 os.unlink(cleanup)
1786
1783
1787 def init(ui, dest=".", **opts):
1784 def init(ui, dest=".", **opts):
1788 """create a new repository in the given directory
1785 """create a new repository in the given directory
1789
1786
1790 Initialize a new repository in the given directory. If the given
1787 Initialize a new repository in the given directory. If the given
1791 directory does not exist, it is created.
1788 directory does not exist, it is created.
1792
1789
1793 If no directory is given, the current directory is used.
1790 If no directory is given, the current directory is used.
1794
1791
1795 It is possible to specify an ssh:// URL as the destination.
1792 It is possible to specify an ssh:// URL as the destination.
1796 See 'hg help urls' for more information.
1793 See 'hg help urls' for more information.
1797 """
1794 """
1798 cmdutil.setremoteconfig(ui, opts)
1795 cmdutil.setremoteconfig(ui, opts)
1799 hg.repository(ui, dest, create=1)
1796 hg.repository(ui, dest, create=1)
1800
1797
1801 def locate(ui, repo, *pats, **opts):
1798 def locate(ui, repo, *pats, **opts):
1802 """locate files matching specific patterns
1799 """locate files matching specific patterns
1803
1800
1804 Print all files under Mercurial control whose names match the
1801 Print all files under Mercurial control whose names match the
1805 given patterns.
1802 given patterns.
1806
1803
1807 This command searches the entire repository by default. To search
1804 This command searches the entire repository by default. To search
1808 just the current directory and its subdirectories, use
1805 just the current directory and its subdirectories, use
1809 "--include .".
1806 "--include .".
1810
1807
1811 If no patterns are given to match, this command prints all file
1808 If no patterns are given to match, this command prints all file
1812 names.
1809 names.
1813
1810
1814 If you want to feed the output of this command into the "xargs"
1811 If you want to feed the output of this command into the "xargs"
1815 command, use the "-0" option to both this command and "xargs".
1812 command, use the "-0" option to both this command and "xargs".
1816 This will avoid the problem of "xargs" treating single filenames
1813 This will avoid the problem of "xargs" treating single filenames
1817 that contain white space as multiple filenames.
1814 that contain white space as multiple filenames.
1818 """
1815 """
1819 end = opts.get('print0') and '\0' or '\n'
1816 end = opts.get('print0') and '\0' or '\n'
1820 rev = opts.get('rev') or None
1817 rev = opts.get('rev') or None
1821
1818
1822 ret = 1
1819 ret = 1
1823 m = cmdutil.match(repo, pats, opts, default='relglob')
1820 m = cmdutil.match(repo, pats, opts, default='relglob')
1824 m.bad = lambda x,y: False
1821 m.bad = lambda x,y: False
1825 for abs in repo[rev].walk(m):
1822 for abs in repo[rev].walk(m):
1826 if not rev and abs not in repo.dirstate:
1823 if not rev and abs not in repo.dirstate:
1827 continue
1824 continue
1828 if opts.get('fullpath'):
1825 if opts.get('fullpath'):
1829 ui.write(repo.wjoin(abs), end)
1826 ui.write(repo.wjoin(abs), end)
1830 else:
1827 else:
1831 ui.write(((pats and m.rel(abs)) or abs), end)
1828 ui.write(((pats and m.rel(abs)) or abs), end)
1832 ret = 0
1829 ret = 0
1833
1830
1834 return ret
1831 return ret
1835
1832
1836 def log(ui, repo, *pats, **opts):
1833 def log(ui, repo, *pats, **opts):
1837 """show revision history of entire repository or files
1834 """show revision history of entire repository or files
1838
1835
1839 Print the revision history of the specified files or the entire
1836 Print the revision history of the specified files or the entire
1840 project.
1837 project.
1841
1838
1842 File history is shown without following rename or copy history of
1839 File history is shown without following rename or copy history of
1843 files. Use -f/--follow with a file name to follow history across
1840 files. Use -f/--follow with a file name to follow history across
1844 renames and copies. --follow without a file name will only show
1841 renames and copies. --follow without a file name will only show
1845 ancestors or descendants of the starting revision. --follow-first
1842 ancestors or descendants of the starting revision. --follow-first
1846 only follows the first parent of merge revisions.
1843 only follows the first parent of merge revisions.
1847
1844
1848 If no revision range is specified, the default is tip:0 unless
1845 If no revision range is specified, the default is tip:0 unless
1849 --follow is set, in which case the working directory parent is
1846 --follow is set, in which case the working directory parent is
1850 used as the starting revision.
1847 used as the starting revision.
1851
1848
1852 See 'hg help dates' for a list of formats valid for -d/--date.
1849 See 'hg help dates' for a list of formats valid for -d/--date.
1853
1850
1854 By default this command outputs: changeset id and hash, tags,
1851 By default this command outputs: changeset id and hash, tags,
1855 non-trivial parents, user, date and time, and a summary for each
1852 non-trivial parents, user, date and time, and a summary for each
1856 commit. When the -v/--verbose switch is used, the list of changed
1853 commit. When the -v/--verbose switch is used, the list of changed
1857 files and full commit message is shown.
1854 files and full commit message is shown.
1858
1855
1859 NOTE: log -p may generate unexpected diff output for merge
1856 NOTE: log -p may generate unexpected diff output for merge
1860 changesets, as it will only compare the merge changeset against
1857 changesets, as it will only compare the merge changeset against
1861 its first parent. Also, the files: list will only reflect files
1858 its first parent. Also, the files: list will only reflect files
1862 that are different from BOTH parents.
1859 that are different from BOTH parents.
1863
1860
1864 """
1861 """
1865
1862
1866 get = util.cachefunc(lambda r: repo[r].changeset())
1863 get = util.cachefunc(lambda r: repo[r].changeset())
1867 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1864 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1868
1865
1869 limit = cmdutil.loglimit(opts)
1866 limit = cmdutil.loglimit(opts)
1870 count = 0
1867 count = 0
1871
1868
1872 if opts.get('copies') and opts.get('rev'):
1869 if opts.get('copies') and opts.get('rev'):
1873 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1870 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1874 else:
1871 else:
1875 endrev = len(repo)
1872 endrev = len(repo)
1876 rcache = {}
1873 rcache = {}
1877 ncache = {}
1874 ncache = {}
1878 def getrenamed(fn, rev):
1875 def getrenamed(fn, rev):
1879 '''looks up all renames for a file (up to endrev) the first
1876 '''looks up all renames for a file (up to endrev) the first
1880 time the file is given. It indexes on the changerev and only
1877 time the file is given. It indexes on the changerev and only
1881 parses the manifest if linkrev != changerev.
1878 parses the manifest if linkrev != changerev.
1882 Returns rename info for fn at changerev rev.'''
1879 Returns rename info for fn at changerev rev.'''
1883 if fn not in rcache:
1880 if fn not in rcache:
1884 rcache[fn] = {}
1881 rcache[fn] = {}
1885 ncache[fn] = {}
1882 ncache[fn] = {}
1886 fl = repo.file(fn)
1883 fl = repo.file(fn)
1887 for i in fl:
1884 for i in fl:
1888 node = fl.node(i)
1885 node = fl.node(i)
1889 lr = fl.linkrev(i)
1886 lr = fl.linkrev(i)
1890 renamed = fl.renamed(node)
1887 renamed = fl.renamed(node)
1891 rcache[fn][lr] = renamed
1888 rcache[fn][lr] = renamed
1892 if renamed:
1889 if renamed:
1893 ncache[fn][node] = renamed
1890 ncache[fn][node] = renamed
1894 if lr >= endrev:
1891 if lr >= endrev:
1895 break
1892 break
1896 if rev in rcache[fn]:
1893 if rev in rcache[fn]:
1897 return rcache[fn][rev]
1894 return rcache[fn][rev]
1898
1895
1899 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1896 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1900 # filectx logic.
1897 # filectx logic.
1901
1898
1902 try:
1899 try:
1903 return repo[rev][fn].renamed()
1900 return repo[rev][fn].renamed()
1904 except error.LookupError:
1901 except error.LookupError:
1905 pass
1902 pass
1906 return None
1903 return None
1907
1904
1908 df = False
1905 df = False
1909 if opts["date"]:
1906 if opts["date"]:
1910 df = util.matchdate(opts["date"])
1907 df = util.matchdate(opts["date"])
1911
1908
1912 only_branches = opts.get('only_branch')
1909 only_branches = opts.get('only_branch')
1913
1910
1914 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1911 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1915 for st, rev, fns in changeiter:
1912 for st, rev, fns in changeiter:
1916 if st == 'add':
1913 if st == 'add':
1917 parents = [p for p in repo.changelog.parentrevs(rev)
1914 parents = [p for p in repo.changelog.parentrevs(rev)
1918 if p != nullrev]
1915 if p != nullrev]
1919 if opts.get('no_merges') and len(parents) == 2:
1916 if opts.get('no_merges') and len(parents) == 2:
1920 continue
1917 continue
1921 if opts.get('only_merges') and len(parents) != 2:
1918 if opts.get('only_merges') and len(parents) != 2:
1922 continue
1919 continue
1923
1920
1924 if only_branches:
1921 if only_branches:
1925 revbranch = get(rev)[5]['branch']
1922 revbranch = get(rev)[5]['branch']
1926 if revbranch not in only_branches:
1923 if revbranch not in only_branches:
1927 continue
1924 continue
1928
1925
1929 if df:
1926 if df:
1930 changes = get(rev)
1927 changes = get(rev)
1931 if not df(changes[2][0]):
1928 if not df(changes[2][0]):
1932 continue
1929 continue
1933
1930
1934 if opts.get('keyword'):
1931 if opts.get('keyword'):
1935 changes = get(rev)
1932 changes = get(rev)
1936 miss = 0
1933 miss = 0
1937 for k in [kw.lower() for kw in opts['keyword']]:
1934 for k in [kw.lower() for kw in opts['keyword']]:
1938 if not (k in changes[1].lower() or
1935 if not (k in changes[1].lower() or
1939 k in changes[4].lower() or
1936 k in changes[4].lower() or
1940 k in " ".join(changes[3]).lower()):
1937 k in " ".join(changes[3]).lower()):
1941 miss = 1
1938 miss = 1
1942 break
1939 break
1943 if miss:
1940 if miss:
1944 continue
1941 continue
1945
1942
1946 if opts['user']:
1943 if opts['user']:
1947 changes = get(rev)
1944 changes = get(rev)
1948 miss = 0
1945 miss = 0
1949 for k in opts['user']:
1946 for k in opts['user']:
1950 if k != changes[1]:
1947 if k != changes[1]:
1951 miss = 1
1948 miss = 1
1952 break
1949 break
1953 if miss:
1950 if miss:
1954 continue
1951 continue
1955
1952
1956 copies = []
1953 copies = []
1957 if opts.get('copies') and rev:
1954 if opts.get('copies') and rev:
1958 for fn in get(rev)[3]:
1955 for fn in get(rev)[3]:
1959 rename = getrenamed(fn, rev)
1956 rename = getrenamed(fn, rev)
1960 if rename:
1957 if rename:
1961 copies.append((fn, rename[0]))
1958 copies.append((fn, rename[0]))
1962 displayer.show(context.changectx(repo, rev), copies=copies)
1959 displayer.show(context.changectx(repo, rev), copies=copies)
1963 elif st == 'iter':
1960 elif st == 'iter':
1964 if count == limit: break
1961 if count == limit: break
1965 if displayer.flush(rev):
1962 if displayer.flush(rev):
1966 count += 1
1963 count += 1
1967
1964
1968 def manifest(ui, repo, node=None, rev=None):
1965 def manifest(ui, repo, node=None, rev=None):
1969 """output the current or given revision of the project manifest
1966 """output the current or given revision of the project manifest
1970
1967
1971 Print a list of version controlled files for the given revision.
1968 Print a list of version controlled files for the given revision.
1972 If no revision is given, the parent of the working directory is used,
1969 If no revision is given, the parent of the working directory is used,
1973 or tip if no revision is checked out.
1970 or tip if no revision is checked out.
1974
1971
1975 The manifest is the list of files being version controlled. If no revision
1972 The manifest is the list of files being version controlled. If no revision
1976 is given then the first parent of the working directory is used.
1973 is given then the first parent of the working directory is used.
1977
1974
1978 With -v flag, print file permissions, symlink and executable bits. With
1975 With -v flag, print file permissions, symlink and executable bits. With
1979 --debug flag, print file revision hashes.
1976 --debug flag, print file revision hashes.
1980 """
1977 """
1981
1978
1982 if rev and node:
1979 if rev and node:
1983 raise util.Abort(_("please specify just one revision"))
1980 raise util.Abort(_("please specify just one revision"))
1984
1981
1985 if not node:
1982 if not node:
1986 node = rev
1983 node = rev
1987
1984
1988 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1985 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1989 ctx = repo[node]
1986 ctx = repo[node]
1990 for f in ctx:
1987 for f in ctx:
1991 if ui.debugflag:
1988 if ui.debugflag:
1992 ui.write("%40s " % hex(ctx.manifest()[f]))
1989 ui.write("%40s " % hex(ctx.manifest()[f]))
1993 if ui.verbose:
1990 if ui.verbose:
1994 ui.write(decor[ctx.flags(f)])
1991 ui.write(decor[ctx.flags(f)])
1995 ui.write("%s\n" % f)
1992 ui.write("%s\n" % f)
1996
1993
1997 def merge(ui, repo, node=None, force=None, rev=None):
1994 def merge(ui, repo, node=None, force=None, rev=None):
1998 """merge working directory with another revision
1995 """merge working directory with another revision
1999
1996
2000 Merge the contents of the current working directory and the
1997 Merge the contents of the current working directory and the
2001 requested revision. Files that changed between either parent are
1998 requested revision. Files that changed between either parent are
2002 marked as changed for the next commit and a commit must be
1999 marked as changed for the next commit and a commit must be
2003 performed before any further updates are allowed.
2000 performed before any further updates are allowed.
2004
2001
2005 If no revision is specified, the working directory's parent is a
2002 If no revision is specified, the working directory's parent is a
2006 head revision, and the current branch contains exactly one other head,
2003 head revision, and the current branch contains exactly one other head,
2007 the other head is merged with by default. Otherwise, an explicit
2004 the other head is merged with by default. Otherwise, an explicit
2008 revision to merge with must be provided.
2005 revision to merge with must be provided.
2009 """
2006 """
2010
2007
2011 if rev and node:
2008 if rev and node:
2012 raise util.Abort(_("please specify just one revision"))
2009 raise util.Abort(_("please specify just one revision"))
2013 if not node:
2010 if not node:
2014 node = rev
2011 node = rev
2015
2012
2016 if not node:
2013 if not node:
2017 branch = repo.changectx(None).branch()
2014 branch = repo.changectx(None).branch()
2018 bheads = repo.branchheads(branch)
2015 bheads = repo.branchheads(branch)
2019 if len(bheads) > 2:
2016 if len(bheads) > 2:
2020 raise util.Abort(_("branch '%s' has %d heads - "
2017 raise util.Abort(_("branch '%s' has %d heads - "
2021 "please merge with an explicit rev") %
2018 "please merge with an explicit rev") %
2022 (branch, len(bheads)))
2019 (branch, len(bheads)))
2023
2020
2024 parent = repo.dirstate.parents()[0]
2021 parent = repo.dirstate.parents()[0]
2025 if len(bheads) == 1:
2022 if len(bheads) == 1:
2026 if len(repo.heads()) > 1:
2023 if len(repo.heads()) > 1:
2027 raise util.Abort(_("branch '%s' has one head - "
2024 raise util.Abort(_("branch '%s' has one head - "
2028 "please merge with an explicit rev") %
2025 "please merge with an explicit rev") %
2029 branch)
2026 branch)
2030 msg = _('there is nothing to merge')
2027 msg = _('there is nothing to merge')
2031 if parent != repo.lookup(repo[None].branch()):
2028 if parent != repo.lookup(repo[None].branch()):
2032 msg = _('%s - use "hg update" instead') % msg
2029 msg = _('%s - use "hg update" instead') % msg
2033 raise util.Abort(msg)
2030 raise util.Abort(msg)
2034
2031
2035 if parent not in bheads:
2032 if parent not in bheads:
2036 raise util.Abort(_('working dir not at a head rev - '
2033 raise util.Abort(_('working dir not at a head rev - '
2037 'use "hg update" or merge with an explicit rev'))
2034 'use "hg update" or merge with an explicit rev'))
2038 node = parent == bheads[0] and bheads[-1] or bheads[0]
2035 node = parent == bheads[0] and bheads[-1] or bheads[0]
2039 return hg.merge(repo, node, force=force)
2036 return hg.merge(repo, node, force=force)
2040
2037
2041 def outgoing(ui, repo, dest=None, **opts):
2038 def outgoing(ui, repo, dest=None, **opts):
2042 """show changesets not found in destination
2039 """show changesets not found in destination
2043
2040
2044 Show changesets not found in the specified destination repository or
2041 Show changesets not found in the specified destination repository or
2045 the default push location. These are the changesets that would be pushed
2042 the default push location. These are the changesets that would be pushed
2046 if a push was requested.
2043 if a push was requested.
2047
2044
2048 See pull for valid destination format details.
2045 See pull for valid destination format details.
2049 """
2046 """
2050 limit = cmdutil.loglimit(opts)
2047 limit = cmdutil.loglimit(opts)
2051 dest, revs, checkout = hg.parseurl(
2048 dest, revs, checkout = hg.parseurl(
2052 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2049 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2053 cmdutil.setremoteconfig(ui, opts)
2050 cmdutil.setremoteconfig(ui, opts)
2054 if revs:
2051 if revs:
2055 revs = [repo.lookup(rev) for rev in revs]
2052 revs = [repo.lookup(rev) for rev in revs]
2056
2053
2057 other = hg.repository(ui, dest)
2054 other = hg.repository(ui, dest)
2058 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2055 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2059 o = repo.findoutgoing(other, force=opts.get('force'))
2056 o = repo.findoutgoing(other, force=opts.get('force'))
2060 if not o:
2057 if not o:
2061 ui.status(_("no changes found\n"))
2058 ui.status(_("no changes found\n"))
2062 return 1
2059 return 1
2063 o = repo.changelog.nodesbetween(o, revs)[0]
2060 o = repo.changelog.nodesbetween(o, revs)[0]
2064 if opts.get('newest_first'):
2061 if opts.get('newest_first'):
2065 o.reverse()
2062 o.reverse()
2066 displayer = cmdutil.show_changeset(ui, repo, opts)
2063 displayer = cmdutil.show_changeset(ui, repo, opts)
2067 count = 0
2064 count = 0
2068 for n in o:
2065 for n in o:
2069 if count >= limit:
2066 if count >= limit:
2070 break
2067 break
2071 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2068 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2072 if opts.get('no_merges') and len(parents) == 2:
2069 if opts.get('no_merges') and len(parents) == 2:
2073 continue
2070 continue
2074 count += 1
2071 count += 1
2075 displayer.show(repo[n])
2072 displayer.show(repo[n])
2076
2073
2077 def parents(ui, repo, file_=None, **opts):
2074 def parents(ui, repo, file_=None, **opts):
2078 """show the parents of the working dir or revision
2075 """show the parents of the working dir or revision
2079
2076
2080 Print the working directory's parent revisions. If a
2077 Print the working directory's parent revisions. If a
2081 revision is given via --rev, the parent of that revision
2078 revision is given via --rev, the parent of that revision
2082 will be printed. If a file argument is given, revision in
2079 will be printed. If a file argument is given, revision in
2083 which the file was last changed (before the working directory
2080 which the file was last changed (before the working directory
2084 revision or the argument to --rev if given) is printed.
2081 revision or the argument to --rev if given) is printed.
2085 """
2082 """
2086 rev = opts.get('rev')
2083 rev = opts.get('rev')
2087 if rev:
2084 if rev:
2088 ctx = repo[rev]
2085 ctx = repo[rev]
2089 else:
2086 else:
2090 ctx = repo[None]
2087 ctx = repo[None]
2091
2088
2092 if file_:
2089 if file_:
2093 m = cmdutil.match(repo, (file_,), opts)
2090 m = cmdutil.match(repo, (file_,), opts)
2094 if m.anypats() or len(m.files()) != 1:
2091 if m.anypats() or len(m.files()) != 1:
2095 raise util.Abort(_('can only specify an explicit file name'))
2092 raise util.Abort(_('can only specify an explicit file name'))
2096 file_ = m.files()[0]
2093 file_ = m.files()[0]
2097 filenodes = []
2094 filenodes = []
2098 for cp in ctx.parents():
2095 for cp in ctx.parents():
2099 if not cp:
2096 if not cp:
2100 continue
2097 continue
2101 try:
2098 try:
2102 filenodes.append(cp.filenode(file_))
2099 filenodes.append(cp.filenode(file_))
2103 except error.LookupError:
2100 except error.LookupError:
2104 pass
2101 pass
2105 if not filenodes:
2102 if not filenodes:
2106 raise util.Abort(_("'%s' not found in manifest!") % file_)
2103 raise util.Abort(_("'%s' not found in manifest!") % file_)
2107 fl = repo.file(file_)
2104 fl = repo.file(file_)
2108 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2105 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2109 else:
2106 else:
2110 p = [cp.node() for cp in ctx.parents()]
2107 p = [cp.node() for cp in ctx.parents()]
2111
2108
2112 displayer = cmdutil.show_changeset(ui, repo, opts)
2109 displayer = cmdutil.show_changeset(ui, repo, opts)
2113 for n in p:
2110 for n in p:
2114 if n != nullid:
2111 if n != nullid:
2115 displayer.show(repo[n])
2112 displayer.show(repo[n])
2116
2113
2117 def paths(ui, repo, search=None):
2114 def paths(ui, repo, search=None):
2118 """show aliases for remote repositories
2115 """show aliases for remote repositories
2119
2116
2120 Show definition of symbolic path name NAME. If no name is given, show
2117 Show definition of symbolic path name NAME. If no name is given, show
2121 definition of available names.
2118 definition of available names.
2122
2119
2123 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2120 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2124 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2121 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2125
2122
2126 See 'hg help urls' for more information.
2123 See 'hg help urls' for more information.
2127 """
2124 """
2128 if search:
2125 if search:
2129 for name, path in ui.configitems("paths"):
2126 for name, path in ui.configitems("paths"):
2130 if name == search:
2127 if name == search:
2131 ui.write("%s\n" % url.hidepassword(path))
2128 ui.write("%s\n" % url.hidepassword(path))
2132 return
2129 return
2133 ui.warn(_("not found!\n"))
2130 ui.warn(_("not found!\n"))
2134 return 1
2131 return 1
2135 else:
2132 else:
2136 for name, path in ui.configitems("paths"):
2133 for name, path in ui.configitems("paths"):
2137 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2134 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2138
2135
2139 def postincoming(ui, repo, modheads, optupdate, checkout):
2136 def postincoming(ui, repo, modheads, optupdate, checkout):
2140 if modheads == 0:
2137 if modheads == 0:
2141 return
2138 return
2142 if optupdate:
2139 if optupdate:
2143 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2140 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2144 return hg.update(repo, checkout)
2141 return hg.update(repo, checkout)
2145 else:
2142 else:
2146 ui.status(_("not updating, since new heads added\n"))
2143 ui.status(_("not updating, since new heads added\n"))
2147 if modheads > 1:
2144 if modheads > 1:
2148 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2145 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2149 else:
2146 else:
2150 ui.status(_("(run 'hg update' to get a working copy)\n"))
2147 ui.status(_("(run 'hg update' to get a working copy)\n"))
2151
2148
2152 def pull(ui, repo, source="default", **opts):
2149 def pull(ui, repo, source="default", **opts):
2153 """pull changes from the specified source
2150 """pull changes from the specified source
2154
2151
2155 Pull changes from a remote repository to a local one.
2152 Pull changes from a remote repository to a local one.
2156
2153
2157 This finds all changes from the repository at the specified path
2154 This finds all changes from the repository at the specified path
2158 or URL and adds them to the local repository. By default, this
2155 or URL and adds them to the local repository. By default, this
2159 does not update the copy of the project in the working directory.
2156 does not update the copy of the project in the working directory.
2160
2157
2161 If SOURCE is omitted, the 'default' path will be used.
2158 If SOURCE is omitted, the 'default' path will be used.
2162 See 'hg help urls' for more information.
2159 See 'hg help urls' for more information.
2163 """
2160 """
2164 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2161 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2165 cmdutil.setremoteconfig(ui, opts)
2162 cmdutil.setremoteconfig(ui, opts)
2166
2163
2167 other = hg.repository(ui, source)
2164 other = hg.repository(ui, source)
2168 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2165 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2169 if revs:
2166 if revs:
2170 try:
2167 try:
2171 revs = [other.lookup(rev) for rev in revs]
2168 revs = [other.lookup(rev) for rev in revs]
2172 except error.CapabilityError:
2169 except error.CapabilityError:
2173 err = _("Other repository doesn't support revision lookup, "
2170 err = _("Other repository doesn't support revision lookup, "
2174 "so a rev cannot be specified.")
2171 "so a rev cannot be specified.")
2175 raise util.Abort(err)
2172 raise util.Abort(err)
2176
2173
2177 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2174 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2178 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2175 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2179
2176
2180 def push(ui, repo, dest=None, **opts):
2177 def push(ui, repo, dest=None, **opts):
2181 """push changes to the specified destination
2178 """push changes to the specified destination
2182
2179
2183 Push changes from the local repository to the given destination.
2180 Push changes from the local repository to the given destination.
2184
2181
2185 This is the symmetrical operation for pull. It helps to move
2182 This is the symmetrical operation for pull. It helps to move
2186 changes from the current repository to a different one. If the
2183 changes from the current repository to a different one. If the
2187 destination is local this is identical to a pull in that directory
2184 destination is local this is identical to a pull in that directory
2188 from the current one.
2185 from the current one.
2189
2186
2190 By default, push will refuse to run if it detects the result would
2187 By default, push will refuse to run if it detects the result would
2191 increase the number of remote heads. This generally indicates the
2188 increase the number of remote heads. This generally indicates the
2192 the client has forgotten to pull and merge before pushing.
2189 the client has forgotten to pull and merge before pushing.
2193
2190
2194 If -r is used, the named changeset and all its ancestors will be pushed
2191 If -r is used, the named changeset and all its ancestors will be pushed
2195 to the remote repository.
2192 to the remote repository.
2196
2193
2197 Look at the help text for urls for important details about ssh:// URLs.
2194 Look at the help text for urls for important details about ssh:// URLs.
2198 If DESTINATION is omitted, a default path will be used.
2195 If DESTINATION is omitted, a default path will be used.
2199 See 'hg help urls' for more information.
2196 See 'hg help urls' for more information.
2200 """
2197 """
2201 dest, revs, checkout = hg.parseurl(
2198 dest, revs, checkout = hg.parseurl(
2202 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2199 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2203 cmdutil.setremoteconfig(ui, opts)
2200 cmdutil.setremoteconfig(ui, opts)
2204
2201
2205 other = hg.repository(ui, dest)
2202 other = hg.repository(ui, dest)
2206 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2203 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2207 if revs:
2204 if revs:
2208 revs = [repo.lookup(rev) for rev in revs]
2205 revs = [repo.lookup(rev) for rev in revs]
2209 r = repo.push(other, opts.get('force'), revs=revs)
2206 r = repo.push(other, opts.get('force'), revs=revs)
2210 return r == 0
2207 return r == 0
2211
2208
2212 def rawcommit(ui, repo, *pats, **opts):
2209 def rawcommit(ui, repo, *pats, **opts):
2213 """raw commit interface (DEPRECATED)
2210 """raw commit interface (DEPRECATED)
2214
2211
2215 (DEPRECATED)
2212 (DEPRECATED)
2216 Lowlevel commit, for use in helper scripts.
2213 Lowlevel commit, for use in helper scripts.
2217
2214
2218 This command is not intended to be used by normal users, as it is
2215 This command is not intended to be used by normal users, as it is
2219 primarily useful for importing from other SCMs.
2216 primarily useful for importing from other SCMs.
2220
2217
2221 This command is now deprecated and will be removed in a future
2218 This command is now deprecated and will be removed in a future
2222 release, please use debugsetparents and commit instead.
2219 release, please use debugsetparents and commit instead.
2223 """
2220 """
2224
2221
2225 ui.warn(_("(the rawcommit command is deprecated)\n"))
2222 ui.warn(_("(the rawcommit command is deprecated)\n"))
2226
2223
2227 message = cmdutil.logmessage(opts)
2224 message = cmdutil.logmessage(opts)
2228
2225
2229 files = cmdutil.match(repo, pats, opts).files()
2226 files = cmdutil.match(repo, pats, opts).files()
2230 if opts.get('files'):
2227 if opts.get('files'):
2231 files += open(opts['files']).read().splitlines()
2228 files += open(opts['files']).read().splitlines()
2232
2229
2233 parents = [repo.lookup(p) for p in opts['parent']]
2230 parents = [repo.lookup(p) for p in opts['parent']]
2234
2231
2235 try:
2232 try:
2236 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2233 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2237 except ValueError, inst:
2234 except ValueError, inst:
2238 raise util.Abort(str(inst))
2235 raise util.Abort(str(inst))
2239
2236
2240 def recover(ui, repo):
2237 def recover(ui, repo):
2241 """roll back an interrupted transaction
2238 """roll back an interrupted transaction
2242
2239
2243 Recover from an interrupted commit or pull.
2240 Recover from an interrupted commit or pull.
2244
2241
2245 This command tries to fix the repository status after an interrupted
2242 This command tries to fix the repository status after an interrupted
2246 operation. It should only be necessary when Mercurial suggests it.
2243 operation. It should only be necessary when Mercurial suggests it.
2247 """
2244 """
2248 if repo.recover():
2245 if repo.recover():
2249 return hg.verify(repo)
2246 return hg.verify(repo)
2250 return 1
2247 return 1
2251
2248
2252 def remove(ui, repo, *pats, **opts):
2249 def remove(ui, repo, *pats, **opts):
2253 """remove the specified files on the next commit
2250 """remove the specified files on the next commit
2254
2251
2255 Schedule the indicated files for removal from the repository.
2252 Schedule the indicated files for removal from the repository.
2256
2253
2257 This only removes files from the current branch, not from the entire
2254 This only removes files from the current branch, not from the entire
2258 project history. -A can be used to remove only files that have already
2255 project history. -A can be used to remove only files that have already
2259 been deleted, -f can be used to force deletion, and -Af can be used
2256 been deleted, -f can be used to force deletion, and -Af can be used
2260 to remove files from the next revision without deleting them.
2257 to remove files from the next revision without deleting them.
2261
2258
2262 The following table details the behavior of remove for different file
2259 The following table details the behavior of remove for different file
2263 states (columns) and option combinations (rows). The file states are
2260 states (columns) and option combinations (rows). The file states are
2264 Added, Clean, Modified and Missing (as reported by hg status). The
2261 Added, Clean, Modified and Missing (as reported by hg status). The
2265 actions are Warn, Remove (from branch) and Delete (from disk).
2262 actions are Warn, Remove (from branch) and Delete (from disk).
2266
2263
2267 A C M !
2264 A C M !
2268 none W RD W R
2265 none W RD W R
2269 -f R RD RD R
2266 -f R RD RD R
2270 -A W W W R
2267 -A W W W R
2271 -Af R R R R
2268 -Af R R R R
2272
2269
2273 This command schedules the files to be removed at the next commit.
2270 This command schedules the files to be removed at the next commit.
2274 To undo a remove before that, see hg revert.
2271 To undo a remove before that, see hg revert.
2275 """
2272 """
2276
2273
2277 after, force = opts.get('after'), opts.get('force')
2274 after, force = opts.get('after'), opts.get('force')
2278 if not pats and not after:
2275 if not pats and not after:
2279 raise util.Abort(_('no files specified'))
2276 raise util.Abort(_('no files specified'))
2280
2277
2281 m = cmdutil.match(repo, pats, opts)
2278 m = cmdutil.match(repo, pats, opts)
2282 s = repo.status(match=m, clean=True)
2279 s = repo.status(match=m, clean=True)
2283 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2280 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2284
2281
2285 def warn(files, reason):
2282 def warn(files, reason):
2286 for f in files:
2283 for f in files:
2287 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2284 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2288 % (m.rel(f), reason))
2285 % (m.rel(f), reason))
2289
2286
2290 if force:
2287 if force:
2291 remove, forget = modified + deleted + clean, added
2288 remove, forget = modified + deleted + clean, added
2292 elif after:
2289 elif after:
2293 remove, forget = deleted, []
2290 remove, forget = deleted, []
2294 warn(modified + added + clean, _('still exists'))
2291 warn(modified + added + clean, _('still exists'))
2295 else:
2292 else:
2296 remove, forget = deleted + clean, []
2293 remove, forget = deleted + clean, []
2297 warn(modified, _('is modified'))
2294 warn(modified, _('is modified'))
2298 warn(added, _('has been marked for add'))
2295 warn(added, _('has been marked for add'))
2299
2296
2300 for f in util.sort(remove + forget):
2297 for f in util.sort(remove + forget):
2301 if ui.verbose or not m.exact(f):
2298 if ui.verbose or not m.exact(f):
2302 ui.status(_('removing %s\n') % m.rel(f))
2299 ui.status(_('removing %s\n') % m.rel(f))
2303
2300
2304 repo.forget(forget)
2301 repo.forget(forget)
2305 repo.remove(remove, unlink=not after)
2302 repo.remove(remove, unlink=not after)
2306
2303
2307 def rename(ui, repo, *pats, **opts):
2304 def rename(ui, repo, *pats, **opts):
2308 """rename files; equivalent of copy + remove
2305 """rename files; equivalent of copy + remove
2309
2306
2310 Mark dest as copies of sources; mark sources for deletion. If
2307 Mark dest as copies of sources; mark sources for deletion. If
2311 dest is a directory, copies are put in that directory. If dest is
2308 dest is a directory, copies are put in that directory. If dest is
2312 a file, there can only be one source.
2309 a file, there can only be one source.
2313
2310
2314 By default, this command copies the contents of files as they
2311 By default, this command copies the contents of files as they
2315 exist in the working directory. If invoked with --after, the
2312 exist in the working directory. If invoked with --after, the
2316 operation is recorded, but no copying is performed.
2313 operation is recorded, but no copying is performed.
2317
2314
2318 This command takes effect at the next commit. To undo a rename
2315 This command takes effect at the next commit. To undo a rename
2319 before that, see hg revert.
2316 before that, see hg revert.
2320 """
2317 """
2321 wlock = repo.wlock(False)
2318 wlock = repo.wlock(False)
2322 try:
2319 try:
2323 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2320 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2324 finally:
2321 finally:
2325 del wlock
2322 del wlock
2326
2323
2327 def resolve(ui, repo, *pats, **opts):
2324 def resolve(ui, repo, *pats, **opts):
2328 """retry file merges from a merge or update
2325 """retry file merges from a merge or update
2329
2326
2330 This command will cleanly retry unresolved file merges using file
2327 This command will cleanly retry unresolved file merges using file
2331 revisions preserved from the last update or merge. To attempt to
2328 revisions preserved from the last update or merge. To attempt to
2332 resolve all unresolved files, use the -a switch.
2329 resolve all unresolved files, use the -a switch.
2333
2330
2334 This command will also allow listing resolved files and manually
2331 This command will also allow listing resolved files and manually
2335 marking and unmarking files as resolved.
2332 marking and unmarking files as resolved.
2336
2333
2337 The codes used to show the status of files are:
2334 The codes used to show the status of files are:
2338 U = unresolved
2335 U = unresolved
2339 R = resolved
2336 R = resolved
2340 """
2337 """
2341
2338
2342 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2339 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2343
2340
2344 if (show and (mark or unmark)) or (mark and unmark):
2341 if (show and (mark or unmark)) or (mark and unmark):
2345 raise util.Abort(_("too many options specified"))
2342 raise util.Abort(_("too many options specified"))
2346 if pats and all:
2343 if pats and all:
2347 raise util.Abort(_("can't specify --all and patterns"))
2344 raise util.Abort(_("can't specify --all and patterns"))
2348 if not (all or pats or show or mark or unmark):
2345 if not (all or pats or show or mark or unmark):
2349 raise util.Abort(_('no files or directories specified; '
2346 raise util.Abort(_('no files or directories specified; '
2350 'use --all to remerge all files'))
2347 'use --all to remerge all files'))
2351
2348
2352 ms = merge_.mergestate(repo)
2349 ms = merge_.mergestate(repo)
2353 m = cmdutil.match(repo, pats, opts)
2350 m = cmdutil.match(repo, pats, opts)
2354
2351
2355 for f in ms:
2352 for f in ms:
2356 if m(f):
2353 if m(f):
2357 if show:
2354 if show:
2358 ui.write("%s %s\n" % (ms[f].upper(), f))
2355 ui.write("%s %s\n" % (ms[f].upper(), f))
2359 elif mark:
2356 elif mark:
2360 ms.mark(f, "r")
2357 ms.mark(f, "r")
2361 elif unmark:
2358 elif unmark:
2362 ms.mark(f, "u")
2359 ms.mark(f, "u")
2363 else:
2360 else:
2364 wctx = repo[None]
2361 wctx = repo[None]
2365 mctx = wctx.parents()[-1]
2362 mctx = wctx.parents()[-1]
2366
2363
2367 # backup pre-resolve (merge uses .orig for its own purposes)
2364 # backup pre-resolve (merge uses .orig for its own purposes)
2368 a = repo.wjoin(f)
2365 a = repo.wjoin(f)
2369 util.copyfile(a, a + ".resolve")
2366 util.copyfile(a, a + ".resolve")
2370
2367
2371 # resolve file
2368 # resolve file
2372 ms.resolve(f, wctx, mctx)
2369 ms.resolve(f, wctx, mctx)
2373
2370
2374 # replace filemerge's .orig file with our resolve file
2371 # replace filemerge's .orig file with our resolve file
2375 util.rename(a + ".resolve", a + ".orig")
2372 util.rename(a + ".resolve", a + ".orig")
2376
2373
2377 def revert(ui, repo, *pats, **opts):
2374 def revert(ui, repo, *pats, **opts):
2378 """restore individual files or dirs to an earlier state
2375 """restore individual files or dirs to an earlier state
2379
2376
2380 (use update -r to check out earlier revisions, revert does not
2377 (use update -r to check out earlier revisions, revert does not
2381 change the working dir parents)
2378 change the working dir parents)
2382
2379
2383 With no revision specified, revert the named files or directories
2380 With no revision specified, revert the named files or directories
2384 to the contents they had in the parent of the working directory.
2381 to the contents they had in the parent of the working directory.
2385 This restores the contents of the affected files to an unmodified
2382 This restores the contents of the affected files to an unmodified
2386 state and unschedules adds, removes, copies, and renames. If the
2383 state and unschedules adds, removes, copies, and renames. If the
2387 working directory has two parents, you must explicitly specify the
2384 working directory has two parents, you must explicitly specify the
2388 revision to revert to.
2385 revision to revert to.
2389
2386
2390 Using the -r option, revert the given files or directories to their
2387 Using the -r option, revert the given files or directories to their
2391 contents as of a specific revision. This can be helpful to "roll
2388 contents as of a specific revision. This can be helpful to "roll
2392 back" some or all of an earlier change.
2389 back" some or all of an earlier change.
2393 See 'hg help dates' for a list of formats valid for -d/--date.
2390 See 'hg help dates' for a list of formats valid for -d/--date.
2394
2391
2395 Revert modifies the working directory. It does not commit any
2392 Revert modifies the working directory. It does not commit any
2396 changes, or change the parent of the working directory. If you
2393 changes, or change the parent of the working directory. If you
2397 revert to a revision other than the parent of the working
2394 revert to a revision other than the parent of the working
2398 directory, the reverted files will thus appear modified
2395 directory, the reverted files will thus appear modified
2399 afterwards.
2396 afterwards.
2400
2397
2401 If a file has been deleted, it is restored. If the executable
2398 If a file has been deleted, it is restored. If the executable
2402 mode of a file was changed, it is reset.
2399 mode of a file was changed, it is reset.
2403
2400
2404 If names are given, all files matching the names are reverted.
2401 If names are given, all files matching the names are reverted.
2405 If no arguments are given, no files are reverted.
2402 If no arguments are given, no files are reverted.
2406
2403
2407 Modified files are saved with a .orig suffix before reverting.
2404 Modified files are saved with a .orig suffix before reverting.
2408 To disable these backups, use --no-backup.
2405 To disable these backups, use --no-backup.
2409 """
2406 """
2410
2407
2411 if opts["date"]:
2408 if opts["date"]:
2412 if opts["rev"]:
2409 if opts["rev"]:
2413 raise util.Abort(_("you can't specify a revision and a date"))
2410 raise util.Abort(_("you can't specify a revision and a date"))
2414 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2411 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2415
2412
2416 if not pats and not opts.get('all'):
2413 if not pats and not opts.get('all'):
2417 raise util.Abort(_('no files or directories specified; '
2414 raise util.Abort(_('no files or directories specified; '
2418 'use --all to revert the whole repo'))
2415 'use --all to revert the whole repo'))
2419
2416
2420 parent, p2 = repo.dirstate.parents()
2417 parent, p2 = repo.dirstate.parents()
2421 if not opts.get('rev') and p2 != nullid:
2418 if not opts.get('rev') and p2 != nullid:
2422 raise util.Abort(_('uncommitted merge - please provide a '
2419 raise util.Abort(_('uncommitted merge - please provide a '
2423 'specific revision'))
2420 'specific revision'))
2424 ctx = repo[opts.get('rev')]
2421 ctx = repo[opts.get('rev')]
2425 node = ctx.node()
2422 node = ctx.node()
2426 mf = ctx.manifest()
2423 mf = ctx.manifest()
2427 if node == parent:
2424 if node == parent:
2428 pmf = mf
2425 pmf = mf
2429 else:
2426 else:
2430 pmf = None
2427 pmf = None
2431
2428
2432 # need all matching names in dirstate and manifest of target rev,
2429 # need all matching names in dirstate and manifest of target rev,
2433 # so have to walk both. do not print errors if files exist in one
2430 # so have to walk both. do not print errors if files exist in one
2434 # but not other.
2431 # but not other.
2435
2432
2436 names = {}
2433 names = {}
2437
2434
2438 wlock = repo.wlock()
2435 wlock = repo.wlock()
2439 try:
2436 try:
2440 # walk dirstate.
2437 # walk dirstate.
2441 files = []
2438 files = []
2442
2439
2443 m = cmdutil.match(repo, pats, opts)
2440 m = cmdutil.match(repo, pats, opts)
2444 m.bad = lambda x,y: False
2441 m.bad = lambda x,y: False
2445 for abs in repo.walk(m):
2442 for abs in repo.walk(m):
2446 names[abs] = m.rel(abs), m.exact(abs)
2443 names[abs] = m.rel(abs), m.exact(abs)
2447
2444
2448 # walk target manifest.
2445 # walk target manifest.
2449
2446
2450 def badfn(path, msg):
2447 def badfn(path, msg):
2451 if path in names:
2448 if path in names:
2452 return False
2449 return False
2453 path_ = path + '/'
2450 path_ = path + '/'
2454 for f in names:
2451 for f in names:
2455 if f.startswith(path_):
2452 if f.startswith(path_):
2456 return False
2453 return False
2457 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2454 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2458 return False
2455 return False
2459
2456
2460 m = cmdutil.match(repo, pats, opts)
2457 m = cmdutil.match(repo, pats, opts)
2461 m.bad = badfn
2458 m.bad = badfn
2462 for abs in repo[node].walk(m):
2459 for abs in repo[node].walk(m):
2463 if abs not in names:
2460 if abs not in names:
2464 names[abs] = m.rel(abs), m.exact(abs)
2461 names[abs] = m.rel(abs), m.exact(abs)
2465
2462
2466 m = cmdutil.matchfiles(repo, names)
2463 m = cmdutil.matchfiles(repo, names)
2467 changes = repo.status(match=m)[:4]
2464 changes = repo.status(match=m)[:4]
2468 modified, added, removed, deleted = map(dict.fromkeys, changes)
2465 modified, added, removed, deleted = map(dict.fromkeys, changes)
2469
2466
2470 # if f is a rename, also revert the source
2467 # if f is a rename, also revert the source
2471 cwd = repo.getcwd()
2468 cwd = repo.getcwd()
2472 for f in added:
2469 for f in added:
2473 src = repo.dirstate.copied(f)
2470 src = repo.dirstate.copied(f)
2474 if src and src not in names and repo.dirstate[src] == 'r':
2471 if src and src not in names and repo.dirstate[src] == 'r':
2475 removed[src] = None
2472 removed[src] = None
2476 names[src] = (repo.pathto(src, cwd), True)
2473 names[src] = (repo.pathto(src, cwd), True)
2477
2474
2478 def removeforget(abs):
2475 def removeforget(abs):
2479 if repo.dirstate[abs] == 'a':
2476 if repo.dirstate[abs] == 'a':
2480 return _('forgetting %s\n')
2477 return _('forgetting %s\n')
2481 return _('removing %s\n')
2478 return _('removing %s\n')
2482
2479
2483 revert = ([], _('reverting %s\n'))
2480 revert = ([], _('reverting %s\n'))
2484 add = ([], _('adding %s\n'))
2481 add = ([], _('adding %s\n'))
2485 remove = ([], removeforget)
2482 remove = ([], removeforget)
2486 undelete = ([], _('undeleting %s\n'))
2483 undelete = ([], _('undeleting %s\n'))
2487
2484
2488 disptable = (
2485 disptable = (
2489 # dispatch table:
2486 # dispatch table:
2490 # file state
2487 # file state
2491 # action if in target manifest
2488 # action if in target manifest
2492 # action if not in target manifest
2489 # action if not in target manifest
2493 # make backup if in target manifest
2490 # make backup if in target manifest
2494 # make backup if not in target manifest
2491 # make backup if not in target manifest
2495 (modified, revert, remove, True, True),
2492 (modified, revert, remove, True, True),
2496 (added, revert, remove, True, False),
2493 (added, revert, remove, True, False),
2497 (removed, undelete, None, False, False),
2494 (removed, undelete, None, False, False),
2498 (deleted, revert, remove, False, False),
2495 (deleted, revert, remove, False, False),
2499 )
2496 )
2500
2497
2501 for abs, (rel, exact) in util.sort(names.items()):
2498 for abs, (rel, exact) in util.sort(names.items()):
2502 mfentry = mf.get(abs)
2499 mfentry = mf.get(abs)
2503 target = repo.wjoin(abs)
2500 target = repo.wjoin(abs)
2504 def handle(xlist, dobackup):
2501 def handle(xlist, dobackup):
2505 xlist[0].append(abs)
2502 xlist[0].append(abs)
2506 if dobackup and not opts.get('no_backup') and util.lexists(target):
2503 if dobackup and not opts.get('no_backup') and util.lexists(target):
2507 bakname = "%s.orig" % rel
2504 bakname = "%s.orig" % rel
2508 ui.note(_('saving current version of %s as %s\n') %
2505 ui.note(_('saving current version of %s as %s\n') %
2509 (rel, bakname))
2506 (rel, bakname))
2510 if not opts.get('dry_run'):
2507 if not opts.get('dry_run'):
2511 util.copyfile(target, bakname)
2508 util.copyfile(target, bakname)
2512 if ui.verbose or not exact:
2509 if ui.verbose or not exact:
2513 msg = xlist[1]
2510 msg = xlist[1]
2514 if not isinstance(msg, basestring):
2511 if not isinstance(msg, basestring):
2515 msg = msg(abs)
2512 msg = msg(abs)
2516 ui.status(msg % rel)
2513 ui.status(msg % rel)
2517 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2514 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2518 if abs not in table: continue
2515 if abs not in table: continue
2519 # file has changed in dirstate
2516 # file has changed in dirstate
2520 if mfentry:
2517 if mfentry:
2521 handle(hitlist, backuphit)
2518 handle(hitlist, backuphit)
2522 elif misslist is not None:
2519 elif misslist is not None:
2523 handle(misslist, backupmiss)
2520 handle(misslist, backupmiss)
2524 break
2521 break
2525 else:
2522 else:
2526 if abs not in repo.dirstate:
2523 if abs not in repo.dirstate:
2527 if mfentry:
2524 if mfentry:
2528 handle(add, True)
2525 handle(add, True)
2529 elif exact:
2526 elif exact:
2530 ui.warn(_('file not managed: %s\n') % rel)
2527 ui.warn(_('file not managed: %s\n') % rel)
2531 continue
2528 continue
2532 # file has not changed in dirstate
2529 # file has not changed in dirstate
2533 if node == parent:
2530 if node == parent:
2534 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2531 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2535 continue
2532 continue
2536 if pmf is None:
2533 if pmf is None:
2537 # only need parent manifest in this unlikely case,
2534 # only need parent manifest in this unlikely case,
2538 # so do not read by default
2535 # so do not read by default
2539 pmf = repo[parent].manifest()
2536 pmf = repo[parent].manifest()
2540 if abs in pmf:
2537 if abs in pmf:
2541 if mfentry:
2538 if mfentry:
2542 # if version of file is same in parent and target
2539 # if version of file is same in parent and target
2543 # manifests, do nothing
2540 # manifests, do nothing
2544 if (pmf[abs] != mfentry or
2541 if (pmf[abs] != mfentry or
2545 pmf.flags(abs) != mf.flags(abs)):
2542 pmf.flags(abs) != mf.flags(abs)):
2546 handle(revert, False)
2543 handle(revert, False)
2547 else:
2544 else:
2548 handle(remove, False)
2545 handle(remove, False)
2549
2546
2550 if not opts.get('dry_run'):
2547 if not opts.get('dry_run'):
2551 def checkout(f):
2548 def checkout(f):
2552 fc = ctx[f]
2549 fc = ctx[f]
2553 repo.wwrite(f, fc.data(), fc.flags())
2550 repo.wwrite(f, fc.data(), fc.flags())
2554
2551
2555 audit_path = util.path_auditor(repo.root)
2552 audit_path = util.path_auditor(repo.root)
2556 for f in remove[0]:
2553 for f in remove[0]:
2557 if repo.dirstate[f] == 'a':
2554 if repo.dirstate[f] == 'a':
2558 repo.dirstate.forget(f)
2555 repo.dirstate.forget(f)
2559 continue
2556 continue
2560 audit_path(f)
2557 audit_path(f)
2561 try:
2558 try:
2562 util.unlink(repo.wjoin(f))
2559 util.unlink(repo.wjoin(f))
2563 except OSError:
2560 except OSError:
2564 pass
2561 pass
2565 repo.dirstate.remove(f)
2562 repo.dirstate.remove(f)
2566
2563
2567 normal = None
2564 normal = None
2568 if node == parent:
2565 if node == parent:
2569 # We're reverting to our parent. If possible, we'd like status
2566 # We're reverting to our parent. If possible, we'd like status
2570 # to report the file as clean. We have to use normallookup for
2567 # to report the file as clean. We have to use normallookup for
2571 # merges to avoid losing information about merged/dirty files.
2568 # merges to avoid losing information about merged/dirty files.
2572 if p2 != nullid:
2569 if p2 != nullid:
2573 normal = repo.dirstate.normallookup
2570 normal = repo.dirstate.normallookup
2574 else:
2571 else:
2575 normal = repo.dirstate.normal
2572 normal = repo.dirstate.normal
2576 for f in revert[0]:
2573 for f in revert[0]:
2577 checkout(f)
2574 checkout(f)
2578 if normal:
2575 if normal:
2579 normal(f)
2576 normal(f)
2580
2577
2581 for f in add[0]:
2578 for f in add[0]:
2582 checkout(f)
2579 checkout(f)
2583 repo.dirstate.add(f)
2580 repo.dirstate.add(f)
2584
2581
2585 normal = repo.dirstate.normallookup
2582 normal = repo.dirstate.normallookup
2586 if node == parent and p2 == nullid:
2583 if node == parent and p2 == nullid:
2587 normal = repo.dirstate.normal
2584 normal = repo.dirstate.normal
2588 for f in undelete[0]:
2585 for f in undelete[0]:
2589 checkout(f)
2586 checkout(f)
2590 normal(f)
2587 normal(f)
2591
2588
2592 finally:
2589 finally:
2593 del wlock
2590 del wlock
2594
2591
2595 def rollback(ui, repo):
2592 def rollback(ui, repo):
2596 """roll back the last transaction
2593 """roll back the last transaction
2597
2594
2598 This command should be used with care. There is only one level of
2595 This command should be used with care. There is only one level of
2599 rollback, and there is no way to undo a rollback. It will also
2596 rollback, and there is no way to undo a rollback. It will also
2600 restore the dirstate at the time of the last transaction, losing
2597 restore the dirstate at the time of the last transaction, losing
2601 any dirstate changes since that time.
2598 any dirstate changes since that time.
2602
2599
2603 Transactions are used to encapsulate the effects of all commands
2600 Transactions are used to encapsulate the effects of all commands
2604 that create new changesets or propagate existing changesets into a
2601 that create new changesets or propagate existing changesets into a
2605 repository. For example, the following commands are transactional,
2602 repository. For example, the following commands are transactional,
2606 and their effects can be rolled back:
2603 and their effects can be rolled back:
2607
2604
2608 commit
2605 commit
2609 import
2606 import
2610 pull
2607 pull
2611 push (with this repository as destination)
2608 push (with this repository as destination)
2612 unbundle
2609 unbundle
2613
2610
2614 This command is not intended for use on public repositories. Once
2611 This command is not intended for use on public repositories. Once
2615 changes are visible for pull by other users, rolling a transaction
2612 changes are visible for pull by other users, rolling a transaction
2616 back locally is ineffective (someone else may already have pulled
2613 back locally is ineffective (someone else may already have pulled
2617 the changes). Furthermore, a race is possible with readers of the
2614 the changes). Furthermore, a race is possible with readers of the
2618 repository; for example an in-progress pull from the repository
2615 repository; for example an in-progress pull from the repository
2619 may fail if a rollback is performed.
2616 may fail if a rollback is performed.
2620 """
2617 """
2621 repo.rollback()
2618 repo.rollback()
2622
2619
2623 def root(ui, repo):
2620 def root(ui, repo):
2624 """print the root (top) of the current working dir
2621 """print the root (top) of the current working dir
2625
2622
2626 Print the root directory of the current repository.
2623 Print the root directory of the current repository.
2627 """
2624 """
2628 ui.write(repo.root + "\n")
2625 ui.write(repo.root + "\n")
2629
2626
2630 def serve(ui, repo, **opts):
2627 def serve(ui, repo, **opts):
2631 """export the repository via HTTP
2628 """export the repository via HTTP
2632
2629
2633 Start a local HTTP repository browser and pull server.
2630 Start a local HTTP repository browser and pull server.
2634
2631
2635 By default, the server logs accesses to stdout and errors to
2632 By default, the server logs accesses to stdout and errors to
2636 stderr. Use the "-A" and "-E" options to log to files.
2633 stderr. Use the "-A" and "-E" options to log to files.
2637 """
2634 """
2638
2635
2639 if opts["stdio"]:
2636 if opts["stdio"]:
2640 if repo is None:
2637 if repo is None:
2641 raise error.RepoError(_("There is no Mercurial repository here"
2638 raise error.RepoError(_("There is no Mercurial repository here"
2642 " (.hg not found)"))
2639 " (.hg not found)"))
2643 s = sshserver.sshserver(ui, repo)
2640 s = sshserver.sshserver(ui, repo)
2644 s.serve_forever()
2641 s.serve_forever()
2645
2642
2646 parentui = ui.parentui or ui
2643 parentui = ui.parentui or ui
2647 optlist = ("name templates style address port prefix ipv6"
2644 optlist = ("name templates style address port prefix ipv6"
2648 " accesslog errorlog webdir_conf certificate")
2645 " accesslog errorlog webdir_conf certificate")
2649 for o in optlist.split():
2646 for o in optlist.split():
2650 if opts[o]:
2647 if opts[o]:
2651 parentui.setconfig("web", o, str(opts[o]))
2648 parentui.setconfig("web", o, str(opts[o]))
2652 if (repo is not None) and (repo.ui != parentui):
2649 if (repo is not None) and (repo.ui != parentui):
2653 repo.ui.setconfig("web", o, str(opts[o]))
2650 repo.ui.setconfig("web", o, str(opts[o]))
2654
2651
2655 if repo is None and not ui.config("web", "webdir_conf"):
2652 if repo is None and not ui.config("web", "webdir_conf"):
2656 raise error.RepoError(_("There is no Mercurial repository here"
2653 raise error.RepoError(_("There is no Mercurial repository here"
2657 " (.hg not found)"))
2654 " (.hg not found)"))
2658
2655
2659 class service:
2656 class service:
2660 def init(self):
2657 def init(self):
2661 util.set_signal_handler()
2658 util.set_signal_handler()
2662 self.httpd = hgweb.server.create_server(parentui, repo)
2659 self.httpd = hgweb.server.create_server(parentui, repo)
2663
2660
2664 if not ui.verbose: return
2661 if not ui.verbose: return
2665
2662
2666 if self.httpd.prefix:
2663 if self.httpd.prefix:
2667 prefix = self.httpd.prefix.strip('/') + '/'
2664 prefix = self.httpd.prefix.strip('/') + '/'
2668 else:
2665 else:
2669 prefix = ''
2666 prefix = ''
2670
2667
2671 port = ':%d' % self.httpd.port
2668 port = ':%d' % self.httpd.port
2672 if port == ':80':
2669 if port == ':80':
2673 port = ''
2670 port = ''
2674
2671
2675 bindaddr = self.httpd.addr
2672 bindaddr = self.httpd.addr
2676 if bindaddr == '0.0.0.0':
2673 if bindaddr == '0.0.0.0':
2677 bindaddr = '*'
2674 bindaddr = '*'
2678 elif ':' in bindaddr: # IPv6
2675 elif ':' in bindaddr: # IPv6
2679 bindaddr = '[%s]' % bindaddr
2676 bindaddr = '[%s]' % bindaddr
2680
2677
2681 fqaddr = self.httpd.fqaddr
2678 fqaddr = self.httpd.fqaddr
2682 if ':' in fqaddr:
2679 if ':' in fqaddr:
2683 fqaddr = '[%s]' % fqaddr
2680 fqaddr = '[%s]' % fqaddr
2684 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2681 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2685 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2682 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2686
2683
2687 def run(self):
2684 def run(self):
2688 self.httpd.serve_forever()
2685 self.httpd.serve_forever()
2689
2686
2690 service = service()
2687 service = service()
2691
2688
2692 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2689 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2693
2690
2694 def status(ui, repo, *pats, **opts):
2691 def status(ui, repo, *pats, **opts):
2695 """show changed files in the working directory
2692 """show changed files in the working directory
2696
2693
2697 Show status of files in the repository. If names are given, only
2694 Show status of files in the repository. If names are given, only
2698 files that match are shown. Files that are clean or ignored or
2695 files that match are shown. Files that are clean or ignored or
2699 source of a copy/move operation, are not listed unless -c (clean),
2696 source of a copy/move operation, are not listed unless -c (clean),
2700 -i (ignored), -C (copies) or -A is given. Unless options described
2697 -i (ignored), -C (copies) or -A is given. Unless options described
2701 with "show only ..." are given, the options -mardu are used.
2698 with "show only ..." are given, the options -mardu are used.
2702
2699
2703 Option -q/--quiet hides untracked (unknown and ignored) files
2700 Option -q/--quiet hides untracked (unknown and ignored) files
2704 unless explicitly requested with -u/--unknown or -i/-ignored.
2701 unless explicitly requested with -u/--unknown or -i/-ignored.
2705
2702
2706 NOTE: status may appear to disagree with diff if permissions have
2703 NOTE: status may appear to disagree with diff if permissions have
2707 changed or a merge has occurred. The standard diff format does not
2704 changed or a merge has occurred. The standard diff format does not
2708 report permission changes and diff only reports changes relative
2705 report permission changes and diff only reports changes relative
2709 to one merge parent.
2706 to one merge parent.
2710
2707
2711 If one revision is given, it is used as the base revision.
2708 If one revision is given, it is used as the base revision.
2712 If two revisions are given, the difference between them is shown.
2709 If two revisions are given, the difference between them is shown.
2713
2710
2714 The codes used to show the status of files are:
2711 The codes used to show the status of files are:
2715 M = modified
2712 M = modified
2716 A = added
2713 A = added
2717 R = removed
2714 R = removed
2718 C = clean
2715 C = clean
2719 ! = deleted, but still tracked
2716 ! = deleted, but still tracked
2720 ? = not tracked
2717 ? = not tracked
2721 I = ignored
2718 I = ignored
2722 = the previous added file was copied from here
2719 = the previous added file was copied from here
2723 """
2720 """
2724
2721
2725 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2722 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2726 cwd = (pats and repo.getcwd()) or ''
2723 cwd = (pats and repo.getcwd()) or ''
2727 end = opts.get('print0') and '\0' or '\n'
2724 end = opts.get('print0') and '\0' or '\n'
2728 copy = {}
2725 copy = {}
2729 states = 'modified added removed deleted unknown ignored clean'.split()
2726 states = 'modified added removed deleted unknown ignored clean'.split()
2730 show = [k for k in states if opts.get(k)]
2727 show = [k for k in states if opts.get(k)]
2731 if opts.get('all'):
2728 if opts.get('all'):
2732 show += ui.quiet and (states[:4] + ['clean']) or states
2729 show += ui.quiet and (states[:4] + ['clean']) or states
2733 if not show:
2730 if not show:
2734 show = ui.quiet and states[:4] or states[:5]
2731 show = ui.quiet and states[:4] or states[:5]
2735
2732
2736 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2733 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2737 'ignored' in show, 'clean' in show, 'unknown' in show)
2734 'ignored' in show, 'clean' in show, 'unknown' in show)
2738 changestates = zip(states, 'MAR!?IC', stat)
2735 changestates = zip(states, 'MAR!?IC', stat)
2739
2736
2740 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2737 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2741 ctxn = repo[nullid]
2738 ctxn = repo[nullid]
2742 ctx1 = repo[node1]
2739 ctx1 = repo[node1]
2743 ctx2 = repo[node2]
2740 ctx2 = repo[node2]
2744 added = stat[1]
2741 added = stat[1]
2745 if node2 is None:
2742 if node2 is None:
2746 added = stat[0] + stat[1] # merged?
2743 added = stat[0] + stat[1] # merged?
2747
2744
2748 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2745 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2749 if k in added:
2746 if k in added:
2750 copy[k] = v
2747 copy[k] = v
2751 elif v in added:
2748 elif v in added:
2752 copy[v] = k
2749 copy[v] = k
2753
2750
2754 for state, char, files in changestates:
2751 for state, char, files in changestates:
2755 if state in show:
2752 if state in show:
2756 format = "%s %%s%s" % (char, end)
2753 format = "%s %%s%s" % (char, end)
2757 if opts.get('no_status'):
2754 if opts.get('no_status'):
2758 format = "%%s%s" % end
2755 format = "%%s%s" % end
2759
2756
2760 for f in files:
2757 for f in files:
2761 ui.write(format % repo.pathto(f, cwd))
2758 ui.write(format % repo.pathto(f, cwd))
2762 if f in copy:
2759 if f in copy:
2763 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2760 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2764
2761
2765 def tag(ui, repo, name1, *names, **opts):
2762 def tag(ui, repo, name1, *names, **opts):
2766 """add one or more tags for the current or given revision
2763 """add one or more tags for the current or given revision
2767
2764
2768 Name a particular revision using <name>.
2765 Name a particular revision using <name>.
2769
2766
2770 Tags are used to name particular revisions of the repository and are
2767 Tags are used to name particular revisions of the repository and are
2771 very useful to compare different revisions, to go back to significant
2768 very useful to compare different revisions, to go back to significant
2772 earlier versions or to mark branch points as releases, etc.
2769 earlier versions or to mark branch points as releases, etc.
2773
2770
2774 If no revision is given, the parent of the working directory is used,
2771 If no revision is given, the parent of the working directory is used,
2775 or tip if no revision is checked out.
2772 or tip if no revision is checked out.
2776
2773
2777 To facilitate version control, distribution, and merging of tags,
2774 To facilitate version control, distribution, and merging of tags,
2778 they are stored as a file named ".hgtags" which is managed
2775 they are stored as a file named ".hgtags" which is managed
2779 similarly to other project files and can be hand-edited if
2776 similarly to other project files and can be hand-edited if
2780 necessary. The file '.hg/localtags' is used for local tags (not
2777 necessary. The file '.hg/localtags' is used for local tags (not
2781 shared among repositories).
2778 shared among repositories).
2782
2779
2783 See 'hg help dates' for a list of formats valid for -d/--date.
2780 See 'hg help dates' for a list of formats valid for -d/--date.
2784 """
2781 """
2785
2782
2786 rev_ = "."
2783 rev_ = "."
2787 names = (name1,) + names
2784 names = (name1,) + names
2788 if len(names) != len(dict.fromkeys(names)):
2785 if len(names) != len(dict.fromkeys(names)):
2789 raise util.Abort(_('tag names must be unique'))
2786 raise util.Abort(_('tag names must be unique'))
2790 for n in names:
2787 for n in names:
2791 if n in ['tip', '.', 'null']:
2788 if n in ['tip', '.', 'null']:
2792 raise util.Abort(_('the name \'%s\' is reserved') % n)
2789 raise util.Abort(_('the name \'%s\' is reserved') % n)
2793 if opts.get('rev') and opts.get('remove'):
2790 if opts.get('rev') and opts.get('remove'):
2794 raise util.Abort(_("--rev and --remove are incompatible"))
2791 raise util.Abort(_("--rev and --remove are incompatible"))
2795 if opts.get('rev'):
2792 if opts.get('rev'):
2796 rev_ = opts['rev']
2793 rev_ = opts['rev']
2797 message = opts.get('message')
2794 message = opts.get('message')
2798 if opts.get('remove'):
2795 if opts.get('remove'):
2799 expectedtype = opts.get('local') and 'local' or 'global'
2796 expectedtype = opts.get('local') and 'local' or 'global'
2800 for n in names:
2797 for n in names:
2801 if not repo.tagtype(n):
2798 if not repo.tagtype(n):
2802 raise util.Abort(_('tag \'%s\' does not exist') % n)
2799 raise util.Abort(_('tag \'%s\' does not exist') % n)
2803 if repo.tagtype(n) != expectedtype:
2800 if repo.tagtype(n) != expectedtype:
2804 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2801 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2805 (n, expectedtype))
2802 (n, expectedtype))
2806 rev_ = nullid
2803 rev_ = nullid
2807 if not message:
2804 if not message:
2808 message = _('Removed tag %s') % ', '.join(names)
2805 message = _('Removed tag %s') % ', '.join(names)
2809 elif not opts.get('force'):
2806 elif not opts.get('force'):
2810 for n in names:
2807 for n in names:
2811 if n in repo.tags():
2808 if n in repo.tags():
2812 raise util.Abort(_('tag \'%s\' already exists '
2809 raise util.Abort(_('tag \'%s\' already exists '
2813 '(use -f to force)') % n)
2810 '(use -f to force)') % n)
2814 if not rev_ and repo.dirstate.parents()[1] != nullid:
2811 if not rev_ and repo.dirstate.parents()[1] != nullid:
2815 raise util.Abort(_('uncommitted merge - please provide a '
2812 raise util.Abort(_('uncommitted merge - please provide a '
2816 'specific revision'))
2813 'specific revision'))
2817 r = repo[rev_].node()
2814 r = repo[rev_].node()
2818
2815
2819 if not message:
2816 if not message:
2820 message = (_('Added tag %s for changeset %s') %
2817 message = (_('Added tag %s for changeset %s') %
2821 (', '.join(names), short(r)))
2818 (', '.join(names), short(r)))
2822
2819
2823 date = opts.get('date')
2820 date = opts.get('date')
2824 if date:
2821 if date:
2825 date = util.parsedate(date)
2822 date = util.parsedate(date)
2826
2823
2827 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2824 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2828
2825
2829 def tags(ui, repo):
2826 def tags(ui, repo):
2830 """list repository tags
2827 """list repository tags
2831
2828
2832 This lists both regular and local tags. When the -v/--verbose switch
2829 This lists both regular and local tags. When the -v/--verbose switch
2833 is used, a third column "local" is printed for local tags.
2830 is used, a third column "local" is printed for local tags.
2834 """
2831 """
2835
2832
2836 l = repo.tagslist()
2833 l = repo.tagslist()
2837 l.reverse()
2834 l.reverse()
2838 hexfunc = ui.debugflag and hex or short
2835 hexfunc = ui.debugflag and hex or short
2839 tagtype = ""
2836 tagtype = ""
2840
2837
2841 for t, n in l:
2838 for t, n in l:
2842 if ui.quiet:
2839 if ui.quiet:
2843 ui.write("%s\n" % t)
2840 ui.write("%s\n" % t)
2844 continue
2841 continue
2845
2842
2846 try:
2843 try:
2847 hn = hexfunc(n)
2844 hn = hexfunc(n)
2848 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2845 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2849 except error.LookupError:
2846 except error.LookupError:
2850 r = " ?:%s" % hn
2847 r = " ?:%s" % hn
2851 else:
2848 else:
2852 spaces = " " * (30 - util.colwidth(t))
2849 spaces = " " * (30 - util.colwidth(t))
2853 if ui.verbose:
2850 if ui.verbose:
2854 if repo.tagtype(t) == 'local':
2851 if repo.tagtype(t) == 'local':
2855 tagtype = " local"
2852 tagtype = " local"
2856 else:
2853 else:
2857 tagtype = ""
2854 tagtype = ""
2858 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2855 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2859
2856
2860 def tip(ui, repo, **opts):
2857 def tip(ui, repo, **opts):
2861 """show the tip revision
2858 """show the tip revision
2862
2859
2863 The tip revision (usually just called the tip) is the most
2860 The tip revision (usually just called the tip) is the most
2864 recently added changeset in the repository, the most recently
2861 recently added changeset in the repository, the most recently
2865 changed head.
2862 changed head.
2866
2863
2867 If you have just made a commit, that commit will be the tip. If
2864 If you have just made a commit, that commit will be the tip. If
2868 you have just pulled changes from another repository, the tip of
2865 you have just pulled changes from another repository, the tip of
2869 that repository becomes the current tip. The "tip" tag is special
2866 that repository becomes the current tip. The "tip" tag is special
2870 and cannot be renamed or assigned to a different changeset.
2867 and cannot be renamed or assigned to a different changeset.
2871 """
2868 """
2872 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2869 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2873
2870
2874 def unbundle(ui, repo, fname1, *fnames, **opts):
2871 def unbundle(ui, repo, fname1, *fnames, **opts):
2875 """apply one or more changegroup files
2872 """apply one or more changegroup files
2876
2873
2877 Apply one or more compressed changegroup files generated by the
2874 Apply one or more compressed changegroup files generated by the
2878 bundle command.
2875 bundle command.
2879 """
2876 """
2880 fnames = (fname1,) + fnames
2877 fnames = (fname1,) + fnames
2881
2878
2882 lock = None
2879 lock = None
2883 try:
2880 try:
2884 lock = repo.lock()
2881 lock = repo.lock()
2885 for fname in fnames:
2882 for fname in fnames:
2886 f = url.open(ui, fname)
2883 f = url.open(ui, fname)
2887 gen = changegroup.readbundle(f, fname)
2884 gen = changegroup.readbundle(f, fname)
2888 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2885 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2889 finally:
2886 finally:
2890 del lock
2887 del lock
2891
2888
2892 return postincoming(ui, repo, modheads, opts.get('update'), None)
2889 return postincoming(ui, repo, modheads, opts.get('update'), None)
2893
2890
2894 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2891 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2895 """update working directory
2892 """update working directory
2896
2893
2897 Update the repository's working directory to the specified revision,
2894 Update the repository's working directory to the specified revision,
2898 or the tip of the current branch if none is specified. Use null as
2895 or the tip of the current branch if none is specified. Use null as
2899 the revision to remove the working copy (like 'hg clone -U').
2896 the revision to remove the working copy (like 'hg clone -U').
2900
2897
2901 When the working dir contains no uncommitted changes, it will be
2898 When the working dir contains no uncommitted changes, it will be
2902 replaced by the state of the requested revision from the repo. When
2899 replaced by the state of the requested revision from the repo. When
2903 the requested revision is on a different branch, the working dir
2900 the requested revision is on a different branch, the working dir
2904 will additionally be switched to that branch.
2901 will additionally be switched to that branch.
2905
2902
2906 When there are uncommitted changes, use option -C to discard them,
2903 When there are uncommitted changes, use option -C to discard them,
2907 forcibly replacing the state of the working dir with the requested
2904 forcibly replacing the state of the working dir with the requested
2908 revision.
2905 revision.
2909
2906
2910 When there are uncommitted changes and option -C is not used, and
2907 When there are uncommitted changes and option -C is not used, and
2911 the parent revision and requested revision are on the same branch,
2908 the parent revision and requested revision are on the same branch,
2912 and one of them is an ancestor of the other, then the new working
2909 and one of them is an ancestor of the other, then the new working
2913 directory will contain the requested revision merged with the
2910 directory will contain the requested revision merged with the
2914 uncommitted changes. Otherwise, the update will fail with a
2911 uncommitted changes. Otherwise, the update will fail with a
2915 suggestion to use 'merge' or 'update -C' instead.
2912 suggestion to use 'merge' or 'update -C' instead.
2916
2913
2917 If you want to update just one file to an older revision, use revert.
2914 If you want to update just one file to an older revision, use revert.
2918
2915
2919 See 'hg help dates' for a list of formats valid for --date.
2916 See 'hg help dates' for a list of formats valid for --date.
2920 """
2917 """
2921 if rev and node:
2918 if rev and node:
2922 raise util.Abort(_("please specify just one revision"))
2919 raise util.Abort(_("please specify just one revision"))
2923
2920
2924 if not rev:
2921 if not rev:
2925 rev = node
2922 rev = node
2926
2923
2927 if date:
2924 if date:
2928 if rev:
2925 if rev:
2929 raise util.Abort(_("you can't specify a revision and a date"))
2926 raise util.Abort(_("you can't specify a revision and a date"))
2930 rev = cmdutil.finddate(ui, repo, date)
2927 rev = cmdutil.finddate(ui, repo, date)
2931
2928
2932 if clean:
2929 if clean:
2933 return hg.clean(repo, rev)
2930 return hg.clean(repo, rev)
2934 else:
2931 else:
2935 return hg.update(repo, rev)
2932 return hg.update(repo, rev)
2936
2933
2937 def verify(ui, repo):
2934 def verify(ui, repo):
2938 """verify the integrity of the repository
2935 """verify the integrity of the repository
2939
2936
2940 Verify the integrity of the current repository.
2937 Verify the integrity of the current repository.
2941
2938
2942 This will perform an extensive check of the repository's
2939 This will perform an extensive check of the repository's
2943 integrity, validating the hashes and checksums of each entry in
2940 integrity, validating the hashes and checksums of each entry in
2944 the changelog, manifest, and tracked files, as well as the
2941 the changelog, manifest, and tracked files, as well as the
2945 integrity of their crosslinks and indices.
2942 integrity of their crosslinks and indices.
2946 """
2943 """
2947 return hg.verify(repo)
2944 return hg.verify(repo)
2948
2945
2949 def version_(ui):
2946 def version_(ui):
2950 """output version and copyright information"""
2947 """output version and copyright information"""
2951 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2948 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2952 % util.version())
2949 % util.version())
2953 ui.status(_(
2950 ui.status(_(
2954 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2951 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2955 "This is free software; see the source for copying conditions. "
2952 "This is free software; see the source for copying conditions. "
2956 "There is NO\nwarranty; "
2953 "There is NO\nwarranty; "
2957 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2954 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2958 ))
2955 ))
2959
2956
2960 # Command options and aliases are listed here, alphabetically
2957 # Command options and aliases are listed here, alphabetically
2961
2958
2962 globalopts = [
2959 globalopts = [
2963 ('R', 'repository', '',
2960 ('R', 'repository', '',
2964 _('repository root directory or symbolic path name')),
2961 _('repository root directory or symbolic path name')),
2965 ('', 'cwd', '', _('change working directory')),
2962 ('', 'cwd', '', _('change working directory')),
2966 ('y', 'noninteractive', None,
2963 ('y', 'noninteractive', None,
2967 _('do not prompt, assume \'yes\' for any required answers')),
2964 _('do not prompt, assume \'yes\' for any required answers')),
2968 ('q', 'quiet', None, _('suppress output')),
2965 ('q', 'quiet', None, _('suppress output')),
2969 ('v', 'verbose', None, _('enable additional output')),
2966 ('v', 'verbose', None, _('enable additional output')),
2970 ('', 'config', [], _('set/override config option')),
2967 ('', 'config', [], _('set/override config option')),
2971 ('', 'debug', None, _('enable debugging output')),
2968 ('', 'debug', None, _('enable debugging output')),
2972 ('', 'debugger', None, _('start debugger')),
2969 ('', 'debugger', None, _('start debugger')),
2973 ('', 'encoding', util._encoding, _('set the charset encoding')),
2970 ('', 'encoding', util._encoding, _('set the charset encoding')),
2974 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2971 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2975 ('', 'lsprof', None, _('print improved command execution profile')),
2972 ('', 'lsprof', None, _('print improved command execution profile')),
2976 ('', 'traceback', None, _('print traceback on exception')),
2973 ('', 'traceback', None, _('print traceback on exception')),
2977 ('', 'time', None, _('time how long the command takes')),
2974 ('', 'time', None, _('time how long the command takes')),
2978 ('', 'profile', None, _('print command execution profile')),
2975 ('', 'profile', None, _('print command execution profile')),
2979 ('', 'version', None, _('output version information and exit')),
2976 ('', 'version', None, _('output version information and exit')),
2980 ('h', 'help', None, _('display help and exit')),
2977 ('h', 'help', None, _('display help and exit')),
2981 ]
2978 ]
2982
2979
2983 dryrunopts = [('n', 'dry-run', None,
2980 dryrunopts = [('n', 'dry-run', None,
2984 _('do not perform actions, just print output'))]
2981 _('do not perform actions, just print output'))]
2985
2982
2986 remoteopts = [
2983 remoteopts = [
2987 ('e', 'ssh', '', _('specify ssh command to use')),
2984 ('e', 'ssh', '', _('specify ssh command to use')),
2988 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2985 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2989 ]
2986 ]
2990
2987
2991 walkopts = [
2988 walkopts = [
2992 ('I', 'include', [], _('include names matching the given patterns')),
2989 ('I', 'include', [], _('include names matching the given patterns')),
2993 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2990 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2994 ]
2991 ]
2995
2992
2996 commitopts = [
2993 commitopts = [
2997 ('m', 'message', '', _('use <text> as commit message')),
2994 ('m', 'message', '', _('use <text> as commit message')),
2998 ('l', 'logfile', '', _('read commit message from <file>')),
2995 ('l', 'logfile', '', _('read commit message from <file>')),
2999 ]
2996 ]
3000
2997
3001 commitopts2 = [
2998 commitopts2 = [
3002 ('d', 'date', '', _('record datecode as commit date')),
2999 ('d', 'date', '', _('record datecode as commit date')),
3003 ('u', 'user', '', _('record user as committer')),
3000 ('u', 'user', '', _('record user as committer')),
3004 ]
3001 ]
3005
3002
3006 templateopts = [
3003 templateopts = [
3007 ('', 'style', '', _('display using template map file')),
3004 ('', 'style', '', _('display using template map file')),
3008 ('', 'template', '', _('display with template')),
3005 ('', 'template', '', _('display with template')),
3009 ]
3006 ]
3010
3007
3011 logopts = [
3008 logopts = [
3012 ('p', 'patch', None, _('show patch')),
3009 ('p', 'patch', None, _('show patch')),
3013 ('g', 'git', None, _('use git extended diff format')),
3010 ('g', 'git', None, _('use git extended diff format')),
3014 ('l', 'limit', '', _('limit number of changes displayed')),
3011 ('l', 'limit', '', _('limit number of changes displayed')),
3015 ('M', 'no-merges', None, _('do not show merges')),
3012 ('M', 'no-merges', None, _('do not show merges')),
3016 ] + templateopts
3013 ] + templateopts
3017
3014
3018 diffopts = [
3015 diffopts = [
3019 ('a', 'text', None, _('treat all files as text')),
3016 ('a', 'text', None, _('treat all files as text')),
3020 ('g', 'git', None, _('use git extended diff format')),
3017 ('g', 'git', None, _('use git extended diff format')),
3021 ('', 'nodates', None, _("don't include dates in diff headers"))
3018 ('', 'nodates', None, _("don't include dates in diff headers"))
3022 ]
3019 ]
3023
3020
3024 diffopts2 = [
3021 diffopts2 = [
3025 ('p', 'show-function', None, _('show which function each change is in')),
3022 ('p', 'show-function', None, _('show which function each change is in')),
3026 ('w', 'ignore-all-space', None,
3023 ('w', 'ignore-all-space', None,
3027 _('ignore white space when comparing lines')),
3024 _('ignore white space when comparing lines')),
3028 ('b', 'ignore-space-change', None,
3025 ('b', 'ignore-space-change', None,
3029 _('ignore changes in the amount of white space')),
3026 _('ignore changes in the amount of white space')),
3030 ('B', 'ignore-blank-lines', None,
3027 ('B', 'ignore-blank-lines', None,
3031 _('ignore changes whose lines are all blank')),
3028 _('ignore changes whose lines are all blank')),
3032 ('U', 'unified', '', _('number of lines of context to show'))
3029 ('U', 'unified', '', _('number of lines of context to show'))
3033 ]
3030 ]
3034
3031
3035 similarityopts = [
3032 similarityopts = [
3036 ('s', 'similarity', '',
3033 ('s', 'similarity', '',
3037 _('guess renamed files by similarity (0<=s<=100)'))
3034 _('guess renamed files by similarity (0<=s<=100)'))
3038 ]
3035 ]
3039
3036
3040 table = {
3037 table = {
3041 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3038 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3042 "addremove":
3039 "addremove":
3043 (addremove, similarityopts + walkopts + dryrunopts,
3040 (addremove, similarityopts + walkopts + dryrunopts,
3044 _('[OPTION]... [FILE]...')),
3041 _('[OPTION]... [FILE]...')),
3045 "^annotate|blame":
3042 "^annotate|blame":
3046 (annotate,
3043 (annotate,
3047 [('r', 'rev', '', _('annotate the specified revision')),
3044 [('r', 'rev', '', _('annotate the specified revision')),
3048 ('f', 'follow', None, _('follow file copies and renames')),
3045 ('f', 'follow', None, _('follow file copies and renames')),
3049 ('a', 'text', None, _('treat all files as text')),
3046 ('a', 'text', None, _('treat all files as text')),
3050 ('u', 'user', None, _('list the author (long with -v)')),
3047 ('u', 'user', None, _('list the author (long with -v)')),
3051 ('d', 'date', None, _('list the date (short with -q)')),
3048 ('d', 'date', None, _('list the date (short with -q)')),
3052 ('n', 'number', None, _('list the revision number (default)')),
3049 ('n', 'number', None, _('list the revision number (default)')),
3053 ('c', 'changeset', None, _('list the changeset')),
3050 ('c', 'changeset', None, _('list the changeset')),
3054 ('l', 'line-number', None,
3051 ('l', 'line-number', None,
3055 _('show line number at the first appearance'))
3052 _('show line number at the first appearance'))
3056 ] + walkopts,
3053 ] + walkopts,
3057 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3054 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3058 "archive":
3055 "archive":
3059 (archive,
3056 (archive,
3060 [('', 'no-decode', None, _('do not pass files through decoders')),
3057 [('', 'no-decode', None, _('do not pass files through decoders')),
3061 ('p', 'prefix', '', _('directory prefix for files in archive')),
3058 ('p', 'prefix', '', _('directory prefix for files in archive')),
3062 ('r', 'rev', '', _('revision to distribute')),
3059 ('r', 'rev', '', _('revision to distribute')),
3063 ('t', 'type', '', _('type of distribution to create')),
3060 ('t', 'type', '', _('type of distribution to create')),
3064 ] + walkopts,
3061 ] + walkopts,
3065 _('[OPTION]... DEST')),
3062 _('[OPTION]... DEST')),
3066 "backout":
3063 "backout":
3067 (backout,
3064 (backout,
3068 [('', 'merge', None,
3065 [('', 'merge', None,
3069 _('merge with old dirstate parent after backout')),
3066 _('merge with old dirstate parent after backout')),
3070 ('', 'parent', '', _('parent to choose when backing out merge')),
3067 ('', 'parent', '', _('parent to choose when backing out merge')),
3071 ('r', 'rev', '', _('revision to backout')),
3068 ('r', 'rev', '', _('revision to backout')),
3072 ] + walkopts + commitopts + commitopts2,
3069 ] + walkopts + commitopts + commitopts2,
3073 _('[OPTION]... [-r] REV')),
3070 _('[OPTION]... [-r] REV')),
3074 "bisect":
3071 "bisect":
3075 (bisect,
3072 (bisect,
3076 [('r', 'reset', False, _('reset bisect state')),
3073 [('r', 'reset', False, _('reset bisect state')),
3077 ('g', 'good', False, _('mark changeset good')),
3074 ('g', 'good', False, _('mark changeset good')),
3078 ('b', 'bad', False, _('mark changeset bad')),
3075 ('b', 'bad', False, _('mark changeset bad')),
3079 ('s', 'skip', False, _('skip testing changeset')),
3076 ('s', 'skip', False, _('skip testing changeset')),
3080 ('c', 'command', '', _('use command to check changeset state')),
3077 ('c', 'command', '', _('use command to check changeset state')),
3081 ('U', 'noupdate', False, _('do not update to target'))],
3078 ('U', 'noupdate', False, _('do not update to target'))],
3082 _("[-gbsr] [-c CMD] [REV]")),
3079 _("[-gbsr] [-c CMD] [REV]")),
3083 "branch":
3080 "branch":
3084 (branch,
3081 (branch,
3085 [('f', 'force', None,
3082 [('f', 'force', None,
3086 _('set branch name even if it shadows an existing branch')),
3083 _('set branch name even if it shadows an existing branch')),
3087 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3084 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3088 _('[-fC] [NAME]')),
3085 _('[-fC] [NAME]')),
3089 "branches":
3086 "branches":
3090 (branches,
3087 (branches,
3091 [('a', 'active', False,
3088 [('a', 'active', False,
3092 _('show only branches that have unmerged heads'))],
3089 _('show only branches that have unmerged heads'))],
3093 _('[-a]')),
3090 _('[-a]')),
3094 "bundle":
3091 "bundle":
3095 (bundle,
3092 (bundle,
3096 [('f', 'force', None,
3093 [('f', 'force', None,
3097 _('run even when remote repository is unrelated')),
3094 _('run even when remote repository is unrelated')),
3098 ('r', 'rev', [],
3095 ('r', 'rev', [],
3099 _('a changeset up to which you would like to bundle')),
3096 _('a changeset up to which you would like to bundle')),
3100 ('', 'base', [],
3097 ('', 'base', [],
3101 _('a base changeset to specify instead of a destination')),
3098 _('a base changeset to specify instead of a destination')),
3102 ('a', 'all', None, _('bundle all changesets in the repository')),
3099 ('a', 'all', None, _('bundle all changesets in the repository')),
3103 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3100 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3104 ] + remoteopts,
3101 ] + remoteopts,
3105 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3102 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3106 "cat":
3103 "cat":
3107 (cat,
3104 (cat,
3108 [('o', 'output', '', _('print output to file with formatted name')),
3105 [('o', 'output', '', _('print output to file with formatted name')),
3109 ('r', 'rev', '', _('print the given revision')),
3106 ('r', 'rev', '', _('print the given revision')),
3110 ('', 'decode', None, _('apply any matching decode filter')),
3107 ('', 'decode', None, _('apply any matching decode filter')),
3111 ] + walkopts,
3108 ] + walkopts,
3112 _('[OPTION]... FILE...')),
3109 _('[OPTION]... FILE...')),
3113 "^clone":
3110 "^clone":
3114 (clone,
3111 (clone,
3115 [('U', 'noupdate', None,
3112 [('U', 'noupdate', None,
3116 _('the clone will only contain a repository (no working copy)')),
3113 _('the clone will only contain a repository (no working copy)')),
3117 ('r', 'rev', [],
3114 ('r', 'rev', [],
3118 _('a changeset you would like to have after cloning')),
3115 _('a changeset you would like to have after cloning')),
3119 ('', 'pull', None, _('use pull protocol to copy metadata')),
3116 ('', 'pull', None, _('use pull protocol to copy metadata')),
3120 ('', 'uncompressed', None,
3117 ('', 'uncompressed', None,
3121 _('use uncompressed transfer (fast over LAN)')),
3118 _('use uncompressed transfer (fast over LAN)')),
3122 ] + remoteopts,
3119 ] + remoteopts,
3123 _('[OPTION]... SOURCE [DEST]')),
3120 _('[OPTION]... SOURCE [DEST]')),
3124 "^commit|ci":
3121 "^commit|ci":
3125 (commit,
3122 (commit,
3126 [('A', 'addremove', None,
3123 [('A', 'addremove', None,
3127 _('mark new/missing files as added/removed before committing')),
3124 _('mark new/missing files as added/removed before committing')),
3128 ('', 'close-branch', None,
3125 ('', 'close-branch', None,
3129 _('mark a branch as closed, hiding it from the branch list')),
3126 _('mark a branch as closed, hiding it from the branch list')),
3130 ] + walkopts + commitopts + commitopts2,
3127 ] + walkopts + commitopts + commitopts2,
3131 _('[OPTION]... [FILE]...')),
3128 _('[OPTION]... [FILE]...')),
3132 "copy|cp":
3129 "copy|cp":
3133 (copy,
3130 (copy,
3134 [('A', 'after', None, _('record a copy that has already occurred')),
3131 [('A', 'after', None, _('record a copy that has already occurred')),
3135 ('f', 'force', None,
3132 ('f', 'force', None,
3136 _('forcibly copy over an existing managed file')),
3133 _('forcibly copy over an existing managed file')),
3137 ] + walkopts + dryrunopts,
3134 ] + walkopts + dryrunopts,
3138 _('[OPTION]... [SOURCE]... DEST')),
3135 _('[OPTION]... [SOURCE]... DEST')),
3139 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3136 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3140 "debugcheckstate": (debugcheckstate, []),
3137 "debugcheckstate": (debugcheckstate, []),
3141 "debugcomplete":
3138 "debugcomplete":
3142 (debugcomplete,
3139 (debugcomplete,
3143 [('o', 'options', None, _('show the command options'))],
3140 [('o', 'options', None, _('show the command options'))],
3144 _('[-o] CMD')),
3141 _('[-o] CMD')),
3145 "debugdate":
3142 "debugdate":
3146 (debugdate,
3143 (debugdate,
3147 [('e', 'extended', None, _('try extended date formats'))],
3144 [('e', 'extended', None, _('try extended date formats'))],
3148 _('[-e] DATE [RANGE]')),
3145 _('[-e] DATE [RANGE]')),
3149 "debugdata": (debugdata, [], _('FILE REV')),
3146 "debugdata": (debugdata, [], _('FILE REV')),
3150 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3147 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3151 "debugindex": (debugindex, [], _('FILE')),
3148 "debugindex": (debugindex, [], _('FILE')),
3152 "debugindexdot": (debugindexdot, [], _('FILE')),
3149 "debugindexdot": (debugindexdot, [], _('FILE')),
3153 "debuginstall": (debuginstall, []),
3150 "debuginstall": (debuginstall, []),
3154 "debugrawcommit|rawcommit":
3151 "debugrawcommit|rawcommit":
3155 (rawcommit,
3152 (rawcommit,
3156 [('p', 'parent', [], _('parent')),
3153 [('p', 'parent', [], _('parent')),
3157 ('F', 'files', '', _('file list'))
3154 ('F', 'files', '', _('file list'))
3158 ] + commitopts + commitopts2,
3155 ] + commitopts + commitopts2,
3159 _('[OPTION]... [FILE]...')),
3156 _('[OPTION]... [FILE]...')),
3160 "debugrebuildstate":
3157 "debugrebuildstate":
3161 (debugrebuildstate,
3158 (debugrebuildstate,
3162 [('r', 'rev', '', _('revision to rebuild to'))],
3159 [('r', 'rev', '', _('revision to rebuild to'))],
3163 _('[-r REV] [REV]')),
3160 _('[-r REV] [REV]')),
3164 "debugrename":
3161 "debugrename":
3165 (debugrename,
3162 (debugrename,
3166 [('r', 'rev', '', _('revision to debug'))],
3163 [('r', 'rev', '', _('revision to debug'))],
3167 _('[-r REV] FILE')),
3164 _('[-r REV] FILE')),
3168 "debugsetparents":
3165 "debugsetparents":
3169 (debugsetparents, [], _('REV1 [REV2]')),
3166 (debugsetparents, [], _('REV1 [REV2]')),
3170 "debugstate":
3167 "debugstate":
3171 (debugstate,
3168 (debugstate,
3172 [('', 'nodates', None, _('do not display the saved mtime'))],
3169 [('', 'nodates', None, _('do not display the saved mtime'))],
3173 _('[OPTION]...')),
3170 _('[OPTION]...')),
3174 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3171 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3175 "^diff":
3172 "^diff":
3176 (diff,
3173 (diff,
3177 [('r', 'rev', [], _('revision')),
3174 [('r', 'rev', [], _('revision')),
3178 ('c', 'change', '', _('change made by revision'))
3175 ('c', 'change', '', _('change made by revision'))
3179 ] + diffopts + diffopts2 + walkopts,
3176 ] + diffopts + diffopts2 + walkopts,
3180 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3177 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3181 "^export":
3178 "^export":
3182 (export,
3179 (export,
3183 [('o', 'output', '', _('print output to file with formatted name')),
3180 [('o', 'output', '', _('print output to file with formatted name')),
3184 ('', 'switch-parent', None, _('diff against the second parent'))
3181 ('', 'switch-parent', None, _('diff against the second parent'))
3185 ] + diffopts,
3182 ] + diffopts,
3186 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3183 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3187 "grep":
3184 "grep":
3188 (grep,
3185 (grep,
3189 [('0', 'print0', None, _('end fields with NUL')),
3186 [('0', 'print0', None, _('end fields with NUL')),
3190 ('', 'all', None, _('print all revisions that match')),
3187 ('', 'all', None, _('print all revisions that match')),
3191 ('f', 'follow', None,
3188 ('f', 'follow', None,
3192 _('follow changeset history, or file history across copies and renames')),
3189 _('follow changeset history, or file history across copies and renames')),
3193 ('i', 'ignore-case', None, _('ignore case when matching')),
3190 ('i', 'ignore-case', None, _('ignore case when matching')),
3194 ('l', 'files-with-matches', None,
3191 ('l', 'files-with-matches', None,
3195 _('print only filenames and revs that match')),
3192 _('print only filenames and revs that match')),
3196 ('n', 'line-number', None, _('print matching line numbers')),
3193 ('n', 'line-number', None, _('print matching line numbers')),
3197 ('r', 'rev', [], _('search in given revision range')),
3194 ('r', 'rev', [], _('search in given revision range')),
3198 ('u', 'user', None, _('list the author (long with -v)')),
3195 ('u', 'user', None, _('list the author (long with -v)')),
3199 ('d', 'date', None, _('list the date (short with -q)')),
3196 ('d', 'date', None, _('list the date (short with -q)')),
3200 ] + walkopts,
3197 ] + walkopts,
3201 _('[OPTION]... PATTERN [FILE]...')),
3198 _('[OPTION]... PATTERN [FILE]...')),
3202 "heads":
3199 "heads":
3203 (heads,
3200 (heads,
3204 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3201 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3205 ('a', 'active', False,
3202 ('a', 'active', False,
3206 _('show only the active heads from open branches')),
3203 _('show only the active heads from open branches')),
3207 ] + templateopts,
3204 ] + templateopts,
3208 _('[-r REV] [REV]...')),
3205 _('[-r REV] [REV]...')),
3209 "help": (help_, [], _('[TOPIC]')),
3206 "help": (help_, [], _('[TOPIC]')),
3210 "identify|id":
3207 "identify|id":
3211 (identify,
3208 (identify,
3212 [('r', 'rev', '', _('identify the specified rev')),
3209 [('r', 'rev', '', _('identify the specified rev')),
3213 ('n', 'num', None, _('show local revision number')),
3210 ('n', 'num', None, _('show local revision number')),
3214 ('i', 'id', None, _('show global revision id')),
3211 ('i', 'id', None, _('show global revision id')),
3215 ('b', 'branch', None, _('show branch')),
3212 ('b', 'branch', None, _('show branch')),
3216 ('t', 'tags', None, _('show tags'))],
3213 ('t', 'tags', None, _('show tags'))],
3217 _('[-nibt] [-r REV] [SOURCE]')),
3214 _('[-nibt] [-r REV] [SOURCE]')),
3218 "import|patch":
3215 "import|patch":
3219 (import_,
3216 (import_,
3220 [('p', 'strip', 1,
3217 [('p', 'strip', 1,
3221 _('directory strip option for patch. This has the same\n'
3218 _('directory strip option for patch. This has the same\n'
3222 'meaning as the corresponding patch option')),
3219 'meaning as the corresponding patch option')),
3223 ('b', 'base', '', _('base path')),
3220 ('b', 'base', '', _('base path')),
3224 ('f', 'force', None,
3221 ('f', 'force', None,
3225 _('skip check for outstanding uncommitted changes')),
3222 _('skip check for outstanding uncommitted changes')),
3226 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3223 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3227 ('', 'exact', None,
3224 ('', 'exact', None,
3228 _('apply patch to the nodes from which it was generated')),
3225 _('apply patch to the nodes from which it was generated')),
3229 ('', 'import-branch', None,
3226 ('', 'import-branch', None,
3230 _('Use any branch information in patch (implied by --exact)'))] +
3227 _('Use any branch information in patch (implied by --exact)'))] +
3231 commitopts + commitopts2 + similarityopts,
3228 commitopts + commitopts2 + similarityopts,
3232 _('[OPTION]... PATCH...')),
3229 _('[OPTION]... PATCH...')),
3233 "incoming|in":
3230 "incoming|in":
3234 (incoming,
3231 (incoming,
3235 [('f', 'force', None,
3232 [('f', 'force', None,
3236 _('run even when remote repository is unrelated')),
3233 _('run even when remote repository is unrelated')),
3237 ('n', 'newest-first', None, _('show newest record first')),
3234 ('n', 'newest-first', None, _('show newest record first')),
3238 ('', 'bundle', '', _('file to store the bundles into')),
3235 ('', 'bundle', '', _('file to store the bundles into')),
3239 ('r', 'rev', [],
3236 ('r', 'rev', [],
3240 _('a specific revision up to which you would like to pull')),
3237 _('a specific revision up to which you would like to pull')),
3241 ] + logopts + remoteopts,
3238 ] + logopts + remoteopts,
3242 _('[-p] [-n] [-M] [-f] [-r REV]...'
3239 _('[-p] [-n] [-M] [-f] [-r REV]...'
3243 ' [--bundle FILENAME] [SOURCE]')),
3240 ' [--bundle FILENAME] [SOURCE]')),
3244 "^init":
3241 "^init":
3245 (init,
3242 (init,
3246 remoteopts,
3243 remoteopts,
3247 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3244 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3248 "locate":
3245 "locate":
3249 (locate,
3246 (locate,
3250 [('r', 'rev', '', _('search the repository as it stood at rev')),
3247 [('r', 'rev', '', _('search the repository as it stood at rev')),
3251 ('0', 'print0', None,
3248 ('0', 'print0', None,
3252 _('end filenames with NUL, for use with xargs')),
3249 _('end filenames with NUL, for use with xargs')),
3253 ('f', 'fullpath', None,
3250 ('f', 'fullpath', None,
3254 _('print complete paths from the filesystem root')),
3251 _('print complete paths from the filesystem root')),
3255 ] + walkopts,
3252 ] + walkopts,
3256 _('[OPTION]... [PATTERN]...')),
3253 _('[OPTION]... [PATTERN]...')),
3257 "^log|history":
3254 "^log|history":
3258 (log,
3255 (log,
3259 [('f', 'follow', None,
3256 [('f', 'follow', None,
3260 _('follow changeset history, or file history across copies and renames')),
3257 _('follow changeset history, or file history across copies and renames')),
3261 ('', 'follow-first', None,
3258 ('', 'follow-first', None,
3262 _('only follow the first parent of merge changesets')),
3259 _('only follow the first parent of merge changesets')),
3263 ('d', 'date', '', _('show revs matching date spec')),
3260 ('d', 'date', '', _('show revs matching date spec')),
3264 ('C', 'copies', None, _('show copied files')),
3261 ('C', 'copies', None, _('show copied files')),
3265 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3262 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3266 ('r', 'rev', [], _('show the specified revision or range')),
3263 ('r', 'rev', [], _('show the specified revision or range')),
3267 ('', 'removed', None, _('include revs where files were removed')),
3264 ('', 'removed', None, _('include revs where files were removed')),
3268 ('m', 'only-merges', None, _('show only merges')),
3265 ('m', 'only-merges', None, _('show only merges')),
3269 ('u', 'user', [], _('revs committed by user')),
3266 ('u', 'user', [], _('revs committed by user')),
3270 ('b', 'only-branch', [],
3267 ('b', 'only-branch', [],
3271 _('show only changesets within the given named branch')),
3268 _('show only changesets within the given named branch')),
3272 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3269 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3273 ] + logopts + walkopts,
3270 ] + logopts + walkopts,
3274 _('[OPTION]... [FILE]')),
3271 _('[OPTION]... [FILE]')),
3275 "manifest":
3272 "manifest":
3276 (manifest,
3273 (manifest,
3277 [('r', 'rev', '', _('revision to display'))],
3274 [('r', 'rev', '', _('revision to display'))],
3278 _('[-r REV]')),
3275 _('[-r REV]')),
3279 "^merge":
3276 "^merge":
3280 (merge,
3277 (merge,
3281 [('f', 'force', None, _('force a merge with outstanding changes')),
3278 [('f', 'force', None, _('force a merge with outstanding changes')),
3282 ('r', 'rev', '', _('revision to merge')),
3279 ('r', 'rev', '', _('revision to merge')),
3283 ],
3280 ],
3284 _('[-f] [[-r] REV]')),
3281 _('[-f] [[-r] REV]')),
3285 "outgoing|out":
3282 "outgoing|out":
3286 (outgoing,
3283 (outgoing,
3287 [('f', 'force', None,
3284 [('f', 'force', None,
3288 _('run even when remote repository is unrelated')),
3285 _('run even when remote repository is unrelated')),
3289 ('r', 'rev', [],
3286 ('r', 'rev', [],
3290 _('a specific revision up to which you would like to push')),
3287 _('a specific revision up to which you would like to push')),
3291 ('n', 'newest-first', None, _('show newest record first')),
3288 ('n', 'newest-first', None, _('show newest record first')),
3292 ] + logopts + remoteopts,
3289 ] + logopts + remoteopts,
3293 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3290 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3294 "^parents":
3291 "^parents":
3295 (parents,
3292 (parents,
3296 [('r', 'rev', '', _('show parents from the specified rev')),
3293 [('r', 'rev', '', _('show parents from the specified rev')),
3297 ] + templateopts,
3294 ] + templateopts,
3298 _('hg parents [-r REV] [FILE]')),
3295 _('hg parents [-r REV] [FILE]')),
3299 "paths": (paths, [], _('[NAME]')),
3296 "paths": (paths, [], _('[NAME]')),
3300 "^pull":
3297 "^pull":
3301 (pull,
3298 (pull,
3302 [('u', 'update', None,
3299 [('u', 'update', None,
3303 _('update to new tip if changesets were pulled')),
3300 _('update to new tip if changesets were pulled')),
3304 ('f', 'force', None,
3301 ('f', 'force', None,
3305 _('run even when remote repository is unrelated')),
3302 _('run even when remote repository is unrelated')),
3306 ('r', 'rev', [],
3303 ('r', 'rev', [],
3307 _('a specific revision up to which you would like to pull')),
3304 _('a specific revision up to which you would like to pull')),
3308 ] + remoteopts,
3305 ] + remoteopts,
3309 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3306 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3310 "^push":
3307 "^push":
3311 (push,
3308 (push,
3312 [('f', 'force', None, _('force push')),
3309 [('f', 'force', None, _('force push')),
3313 ('r', 'rev', [],
3310 ('r', 'rev', [],
3314 _('a specific revision up to which you would like to push')),
3311 _('a specific revision up to which you would like to push')),
3315 ] + remoteopts,
3312 ] + remoteopts,
3316 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3313 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3317 "recover": (recover, []),
3314 "recover": (recover, []),
3318 "^remove|rm":
3315 "^remove|rm":
3319 (remove,
3316 (remove,
3320 [('A', 'after', None, _('record delete for missing files')),
3317 [('A', 'after', None, _('record delete for missing files')),
3321 ('f', 'force', None,
3318 ('f', 'force', None,
3322 _('remove (and delete) file even if added or modified')),
3319 _('remove (and delete) file even if added or modified')),
3323 ] + walkopts,
3320 ] + walkopts,
3324 _('[OPTION]... FILE...')),
3321 _('[OPTION]... FILE...')),
3325 "rename|mv":
3322 "rename|mv":
3326 (rename,
3323 (rename,
3327 [('A', 'after', None, _('record a rename that has already occurred')),
3324 [('A', 'after', None, _('record a rename that has already occurred')),
3328 ('f', 'force', None,
3325 ('f', 'force', None,
3329 _('forcibly copy over an existing managed file')),
3326 _('forcibly copy over an existing managed file')),
3330 ] + walkopts + dryrunopts,
3327 ] + walkopts + dryrunopts,
3331 _('[OPTION]... SOURCE... DEST')),
3328 _('[OPTION]... SOURCE... DEST')),
3332 "resolve":
3329 "resolve":
3333 (resolve,
3330 (resolve,
3334 [('a', 'all', None, _('remerge all unresolved files')),
3331 [('a', 'all', None, _('remerge all unresolved files')),
3335 ('l', 'list', None, _('list state of files needing merge')),
3332 ('l', 'list', None, _('list state of files needing merge')),
3336 ('m', 'mark', None, _('mark files as resolved')),
3333 ('m', 'mark', None, _('mark files as resolved')),
3337 ('u', 'unmark', None, _('unmark files as resolved'))]
3334 ('u', 'unmark', None, _('unmark files as resolved'))]
3338 + walkopts,
3335 + walkopts,
3339 _('[OPTION]... [FILE]...')),
3336 _('[OPTION]... [FILE]...')),
3340 "revert":
3337 "revert":
3341 (revert,
3338 (revert,
3342 [('a', 'all', None, _('revert all changes when no arguments given')),
3339 [('a', 'all', None, _('revert all changes when no arguments given')),
3343 ('d', 'date', '', _('tipmost revision matching date')),
3340 ('d', 'date', '', _('tipmost revision matching date')),
3344 ('r', 'rev', '', _('revision to revert to')),
3341 ('r', 'rev', '', _('revision to revert to')),
3345 ('', 'no-backup', None, _('do not save backup copies of files')),
3342 ('', 'no-backup', None, _('do not save backup copies of files')),
3346 ] + walkopts + dryrunopts,
3343 ] + walkopts + dryrunopts,
3347 _('[OPTION]... [-r REV] [NAME]...')),
3344 _('[OPTION]... [-r REV] [NAME]...')),
3348 "rollback": (rollback, []),
3345 "rollback": (rollback, []),
3349 "root": (root, []),
3346 "root": (root, []),
3350 "^serve":
3347 "^serve":
3351 (serve,
3348 (serve,
3352 [('A', 'accesslog', '', _('name of access log file to write to')),
3349 [('A', 'accesslog', '', _('name of access log file to write to')),
3353 ('d', 'daemon', None, _('run server in background')),
3350 ('d', 'daemon', None, _('run server in background')),
3354 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3351 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3355 ('E', 'errorlog', '', _('name of error log file to write to')),
3352 ('E', 'errorlog', '', _('name of error log file to write to')),
3356 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3353 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3357 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3354 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3358 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3355 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3359 ('n', 'name', '',
3356 ('n', 'name', '',
3360 _('name to show in web pages (default: working dir)')),
3357 _('name to show in web pages (default: working dir)')),
3361 ('', 'webdir-conf', '', _('name of the webdir config file'
3358 ('', 'webdir-conf', '', _('name of the webdir config file'
3362 ' (serve more than one repo)')),
3359 ' (serve more than one repo)')),
3363 ('', 'pid-file', '', _('name of file to write process ID to')),
3360 ('', 'pid-file', '', _('name of file to write process ID to')),
3364 ('', 'stdio', None, _('for remote clients')),
3361 ('', 'stdio', None, _('for remote clients')),
3365 ('t', 'templates', '', _('web templates to use')),
3362 ('t', 'templates', '', _('web templates to use')),
3366 ('', 'style', '', _('template style to use')),
3363 ('', 'style', '', _('template style to use')),
3367 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3364 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3368 ('', 'certificate', '', _('SSL certificate file'))],
3365 ('', 'certificate', '', _('SSL certificate file'))],
3369 _('[OPTION]...')),
3366 _('[OPTION]...')),
3370 "showconfig|debugconfig":
3367 "showconfig|debugconfig":
3371 (showconfig,
3368 (showconfig,
3372 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3369 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3373 _('[-u] [NAME]...')),
3370 _('[-u] [NAME]...')),
3374 "^status|st":
3371 "^status|st":
3375 (status,
3372 (status,
3376 [('A', 'all', None, _('show status of all files')),
3373 [('A', 'all', None, _('show status of all files')),
3377 ('m', 'modified', None, _('show only modified files')),
3374 ('m', 'modified', None, _('show only modified files')),
3378 ('a', 'added', None, _('show only added files')),
3375 ('a', 'added', None, _('show only added files')),
3379 ('r', 'removed', None, _('show only removed files')),
3376 ('r', 'removed', None, _('show only removed files')),
3380 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3377 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3381 ('c', 'clean', None, _('show only files without changes')),
3378 ('c', 'clean', None, _('show only files without changes')),
3382 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3379 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3383 ('i', 'ignored', None, _('show only ignored files')),
3380 ('i', 'ignored', None, _('show only ignored files')),
3384 ('n', 'no-status', None, _('hide status prefix')),
3381 ('n', 'no-status', None, _('hide status prefix')),
3385 ('C', 'copies', None, _('show source of copied files')),
3382 ('C', 'copies', None, _('show source of copied files')),
3386 ('0', 'print0', None,
3383 ('0', 'print0', None,
3387 _('end filenames with NUL, for use with xargs')),
3384 _('end filenames with NUL, for use with xargs')),
3388 ('', 'rev', [], _('show difference from revision')),
3385 ('', 'rev', [], _('show difference from revision')),
3389 ] + walkopts,
3386 ] + walkopts,
3390 _('[OPTION]... [FILE]...')),
3387 _('[OPTION]... [FILE]...')),
3391 "tag":
3388 "tag":
3392 (tag,
3389 (tag,
3393 [('f', 'force', None, _('replace existing tag')),
3390 [('f', 'force', None, _('replace existing tag')),
3394 ('l', 'local', None, _('make the tag local')),
3391 ('l', 'local', None, _('make the tag local')),
3395 ('r', 'rev', '', _('revision to tag')),
3392 ('r', 'rev', '', _('revision to tag')),
3396 ('', 'remove', None, _('remove a tag')),
3393 ('', 'remove', None, _('remove a tag')),
3397 # -l/--local is already there, commitopts cannot be used
3394 # -l/--local is already there, commitopts cannot be used
3398 ('m', 'message', '', _('use <text> as commit message')),
3395 ('m', 'message', '', _('use <text> as commit message')),
3399 ] + commitopts2,
3396 ] + commitopts2,
3400 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3397 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3401 "tags": (tags, []),
3398 "tags": (tags, []),
3402 "tip":
3399 "tip":
3403 (tip,
3400 (tip,
3404 [('p', 'patch', None, _('show patch')),
3401 [('p', 'patch', None, _('show patch')),
3405 ('g', 'git', None, _('use git extended diff format')),
3402 ('g', 'git', None, _('use git extended diff format')),
3406 ] + templateopts,
3403 ] + templateopts,
3407 _('[-p]')),
3404 _('[-p]')),
3408 "unbundle":
3405 "unbundle":
3409 (unbundle,
3406 (unbundle,
3410 [('u', 'update', None,
3407 [('u', 'update', None,
3411 _('update to new tip if changesets were unbundled'))],
3408 _('update to new tip if changesets were unbundled'))],
3412 _('[-u] FILE...')),
3409 _('[-u] FILE...')),
3413 "^update|up|checkout|co":
3410 "^update|up|checkout|co":
3414 (update,
3411 (update,
3415 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3412 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3416 ('d', 'date', '', _('tipmost revision matching date')),
3413 ('d', 'date', '', _('tipmost revision matching date')),
3417 ('r', 'rev', '', _('revision'))],
3414 ('r', 'rev', '', _('revision'))],
3418 _('[-C] [-d DATE] [[-r] REV]')),
3415 _('[-C] [-d DATE] [[-r] REV]')),
3419 "verify": (verify, []),
3416 "verify": (verify, []),
3420 "version": (version_, []),
3417 "version": (version_, []),
3421 }
3418 }
3422
3419
3423 norepo = ("clone init version help debugcomplete debugdata"
3420 norepo = ("clone init version help debugcomplete debugdata"
3424 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3421 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3425 optionalrepo = ("identify paths serve showconfig debugancestor")
3422 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2163 +1,2166 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui, store
12 import lock, transaction, stat, errno, ui, store
13 import os, time, util, extensions, hook, inspect, error
13 import os, time, util, extensions, hook, inspect, error
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = util.set(('lookup', 'changegroupsubset'))
18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 supported = ('revlogv1', 'store', 'fncache')
19 supported = ('revlogv1', 'store', 'fncache')
20
20
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.root = os.path.realpath(path)
23 self.root = os.path.realpath(path)
24 self.path = os.path.join(self.root, ".hg")
24 self.path = os.path.join(self.root, ".hg")
25 self.origroot = path
25 self.origroot = path
26 self.opener = util.opener(self.path)
26 self.opener = util.opener(self.path)
27 self.wopener = util.opener(self.root)
27 self.wopener = util.opener(self.root)
28
28
29 if not os.path.isdir(self.path):
29 if not os.path.isdir(self.path):
30 if create:
30 if create:
31 if not os.path.exists(path):
31 if not os.path.exists(path):
32 os.mkdir(path)
32 os.mkdir(path)
33 os.mkdir(self.path)
33 os.mkdir(self.path)
34 requirements = ["revlogv1"]
34 requirements = ["revlogv1"]
35 if parentui.configbool('format', 'usestore', True):
35 if parentui.configbool('format', 'usestore', True):
36 os.mkdir(os.path.join(self.path, "store"))
36 os.mkdir(os.path.join(self.path, "store"))
37 requirements.append("store")
37 requirements.append("store")
38 if parentui.configbool('format', 'usefncache', True):
38 if parentui.configbool('format', 'usefncache', True):
39 requirements.append("fncache")
39 requirements.append("fncache")
40 # create an invalid changelog
40 # create an invalid changelog
41 self.opener("00changelog.i", "a").write(
41 self.opener("00changelog.i", "a").write(
42 '\0\0\0\2' # represents revlogv2
42 '\0\0\0\2' # represents revlogv2
43 ' dummy changelog to prevent using the old repo layout'
43 ' dummy changelog to prevent using the old repo layout'
44 )
44 )
45 reqfile = self.opener("requires", "w")
45 reqfile = self.opener("requires", "w")
46 for r in requirements:
46 for r in requirements:
47 reqfile.write("%s\n" % r)
47 reqfile.write("%s\n" % r)
48 reqfile.close()
48 reqfile.close()
49 else:
49 else:
50 raise error.RepoError(_("repository %s not found") % path)
50 raise error.RepoError(_("repository %s not found") % path)
51 elif create:
51 elif create:
52 raise error.RepoError(_("repository %s already exists") % path)
52 raise error.RepoError(_("repository %s already exists") % path)
53 else:
53 else:
54 # find requirements
54 # find requirements
55 requirements = []
55 requirements = []
56 try:
56 try:
57 requirements = self.opener("requires").read().splitlines()
57 requirements = self.opener("requires").read().splitlines()
58 for r in requirements:
58 for r in requirements:
59 if r not in self.supported:
59 if r not in self.supported:
60 raise error.RepoError(_("requirement '%s' not supported") % r)
60 raise error.RepoError(_("requirement '%s' not supported") % r)
61 except IOError, inst:
61 except IOError, inst:
62 if inst.errno != errno.ENOENT:
62 if inst.errno != errno.ENOENT:
63 raise
63 raise
64
64
65 self.store = store.store(requirements, self.path, util.opener)
65 self.store = store.store(requirements, self.path, util.opener)
66 self.spath = self.store.path
66 self.spath = self.store.path
67 self.sopener = self.store.opener
67 self.sopener = self.store.opener
68 self.sjoin = self.store.join
68 self.sjoin = self.store.join
69 self.opener.createmode = self.store.createmode
69 self.opener.createmode = self.store.createmode
70
70
71 self.ui = ui.ui(parentui=parentui)
71 self.ui = ui.ui(parentui=parentui)
72 try:
72 try:
73 self.ui.readconfig(self.join("hgrc"), self.root)
73 self.ui.readconfig(self.join("hgrc"), self.root)
74 extensions.loadall(self.ui)
74 extensions.loadall(self.ui)
75 except IOError:
75 except IOError:
76 pass
76 pass
77
77
78 self.tagscache = None
78 self.tagscache = None
79 self._tagstypecache = None
79 self._tagstypecache = None
80 self.branchcache = None
80 self.branchcache = None
81 self._ubranchcache = None # UTF-8 version of branchcache
81 self._ubranchcache = None # UTF-8 version of branchcache
82 self._branchcachetip = None
82 self._branchcachetip = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.filterpats = {}
84 self.filterpats = {}
85 self._datafilters = {}
85 self._datafilters = {}
86 self._transref = self._lockref = self._wlockref = None
86 self._transref = self._lockref = self._wlockref = None
87
87
88 def __getattr__(self, name):
88 def __getattr__(self, name):
89 if name == 'changelog':
89 if name == 'changelog':
90 self.changelog = changelog.changelog(self.sopener)
90 self.changelog = changelog.changelog(self.sopener)
91 if 'HG_PENDING' in os.environ:
91 if 'HG_PENDING' in os.environ:
92 p = os.environ['HG_PENDING']
92 p = os.environ['HG_PENDING']
93 if p.startswith(self.root):
93 if p.startswith(self.root):
94 self.changelog.readpending('00changelog.i.a')
94 self.changelog.readpending('00changelog.i.a')
95 self.sopener.defversion = self.changelog.version
95 self.sopener.defversion = self.changelog.version
96 return self.changelog
96 return self.changelog
97 if name == 'manifest':
97 if name == 'manifest':
98 self.changelog
98 self.changelog
99 self.manifest = manifest.manifest(self.sopener)
99 self.manifest = manifest.manifest(self.sopener)
100 return self.manifest
100 return self.manifest
101 if name == 'dirstate':
101 if name == 'dirstate':
102 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
102 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
103 return self.dirstate
103 return self.dirstate
104 else:
104 else:
105 raise AttributeError(name)
105 raise AttributeError(name)
106
106
107 def __getitem__(self, changeid):
107 def __getitem__(self, changeid):
108 if changeid == None:
108 if changeid == None:
109 return context.workingctx(self)
109 return context.workingctx(self)
110 return context.changectx(self, changeid)
110 return context.changectx(self, changeid)
111
111
112 def __nonzero__(self):
112 def __nonzero__(self):
113 return True
113 return True
114
114
115 def __len__(self):
115 def __len__(self):
116 return len(self.changelog)
116 return len(self.changelog)
117
117
118 def __iter__(self):
118 def __iter__(self):
119 for i in xrange(len(self)):
119 for i in xrange(len(self)):
120 yield i
120 yield i
121
121
122 def url(self):
122 def url(self):
123 return 'file:' + self.root
123 return 'file:' + self.root
124
124
125 def hook(self, name, throw=False, **args):
125 def hook(self, name, throw=False, **args):
126 return hook.hook(self.ui, self, name, throw, **args)
126 return hook.hook(self.ui, self, name, throw, **args)
127
127
128 tag_disallowed = ':\r\n'
128 tag_disallowed = ':\r\n'
129
129
130 def _tag(self, names, node, message, local, user, date, parent=None,
130 def _tag(self, names, node, message, local, user, date, parent=None,
131 extra={}):
131 extra={}):
132 use_dirstate = parent is None
132 use_dirstate = parent is None
133
133
134 if isinstance(names, str):
134 if isinstance(names, str):
135 allchars = names
135 allchars = names
136 names = (names,)
136 names = (names,)
137 else:
137 else:
138 allchars = ''.join(names)
138 allchars = ''.join(names)
139 for c in self.tag_disallowed:
139 for c in self.tag_disallowed:
140 if c in allchars:
140 if c in allchars:
141 raise util.Abort(_('%r cannot be used in a tag name') % c)
141 raise util.Abort(_('%r cannot be used in a tag name') % c)
142
142
143 for name in names:
143 for name in names:
144 self.hook('pretag', throw=True, node=hex(node), tag=name,
144 self.hook('pretag', throw=True, node=hex(node), tag=name,
145 local=local)
145 local=local)
146
146
147 def writetags(fp, names, munge, prevtags):
147 def writetags(fp, names, munge, prevtags):
148 fp.seek(0, 2)
148 fp.seek(0, 2)
149 if prevtags and prevtags[-1] != '\n':
149 if prevtags and prevtags[-1] != '\n':
150 fp.write('\n')
150 fp.write('\n')
151 for name in names:
151 for name in names:
152 m = munge and munge(name) or name
152 m = munge and munge(name) or name
153 if self._tagstypecache and name in self._tagstypecache:
153 if self._tagstypecache and name in self._tagstypecache:
154 old = self.tagscache.get(name, nullid)
154 old = self.tagscache.get(name, nullid)
155 fp.write('%s %s\n' % (hex(old), m))
155 fp.write('%s %s\n' % (hex(old), m))
156 fp.write('%s %s\n' % (hex(node), m))
156 fp.write('%s %s\n' % (hex(node), m))
157 fp.close()
157 fp.close()
158
158
159 prevtags = ''
159 prevtags = ''
160 if local:
160 if local:
161 try:
161 try:
162 fp = self.opener('localtags', 'r+')
162 fp = self.opener('localtags', 'r+')
163 except IOError, err:
163 except IOError, err:
164 fp = self.opener('localtags', 'a')
164 fp = self.opener('localtags', 'a')
165 else:
165 else:
166 prevtags = fp.read()
166 prevtags = fp.read()
167
167
168 # local tags are stored in the current charset
168 # local tags are stored in the current charset
169 writetags(fp, names, None, prevtags)
169 writetags(fp, names, None, prevtags)
170 for name in names:
170 for name in names:
171 self.hook('tag', node=hex(node), tag=name, local=local)
171 self.hook('tag', node=hex(node), tag=name, local=local)
172 return
172 return
173
173
174 if use_dirstate:
174 if use_dirstate:
175 try:
175 try:
176 fp = self.wfile('.hgtags', 'rb+')
176 fp = self.wfile('.hgtags', 'rb+')
177 except IOError, err:
177 except IOError, err:
178 fp = self.wfile('.hgtags', 'ab')
178 fp = self.wfile('.hgtags', 'ab')
179 else:
179 else:
180 prevtags = fp.read()
180 prevtags = fp.read()
181 else:
181 else:
182 try:
182 try:
183 prevtags = self.filectx('.hgtags', parent).data()
183 prevtags = self.filectx('.hgtags', parent).data()
184 except error.LookupError:
184 except error.LookupError:
185 pass
185 pass
186 fp = self.wfile('.hgtags', 'wb')
186 fp = self.wfile('.hgtags', 'wb')
187 if prevtags:
187 if prevtags:
188 fp.write(prevtags)
188 fp.write(prevtags)
189
189
190 # committed tags are stored in UTF-8
190 # committed tags are stored in UTF-8
191 writetags(fp, names, util.fromlocal, prevtags)
191 writetags(fp, names, util.fromlocal, prevtags)
192
192
193 if use_dirstate and '.hgtags' not in self.dirstate:
193 if use_dirstate and '.hgtags' not in self.dirstate:
194 self.add(['.hgtags'])
194 self.add(['.hgtags'])
195
195
196 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
196 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
197 extra=extra)
197 extra=extra)
198
198
199 for name in names:
199 for name in names:
200 self.hook('tag', node=hex(node), tag=name, local=local)
200 self.hook('tag', node=hex(node), tag=name, local=local)
201
201
202 return tagnode
202 return tagnode
203
203
204 def tag(self, names, node, message, local, user, date):
204 def tag(self, names, node, message, local, user, date):
205 '''tag a revision with one or more symbolic names.
205 '''tag a revision with one or more symbolic names.
206
206
207 names is a list of strings or, when adding a single tag, names may be a
207 names is a list of strings or, when adding a single tag, names may be a
208 string.
208 string.
209
209
210 if local is True, the tags are stored in a per-repository file.
210 if local is True, the tags are stored in a per-repository file.
211 otherwise, they are stored in the .hgtags file, and a new
211 otherwise, they are stored in the .hgtags file, and a new
212 changeset is committed with the change.
212 changeset is committed with the change.
213
213
214 keyword arguments:
214 keyword arguments:
215
215
216 local: whether to store tags in non-version-controlled file
216 local: whether to store tags in non-version-controlled file
217 (default False)
217 (default False)
218
218
219 message: commit message to use if committing
219 message: commit message to use if committing
220
220
221 user: name of user to use if committing
221 user: name of user to use if committing
222
222
223 date: date tuple to use if committing'''
223 date: date tuple to use if committing'''
224
224
225 for x in self.status()[:5]:
225 for x in self.status()[:5]:
226 if '.hgtags' in x:
226 if '.hgtags' in x:
227 raise util.Abort(_('working copy of .hgtags is changed '
227 raise util.Abort(_('working copy of .hgtags is changed '
228 '(please commit .hgtags manually)'))
228 '(please commit .hgtags manually)'))
229
229
230 self.tags() # instantiate the cache
230 self.tags() # instantiate the cache
231 self._tag(names, node, message, local, user, date)
231 self._tag(names, node, message, local, user, date)
232
232
233 def tags(self):
233 def tags(self):
234 '''return a mapping of tag to node'''
234 '''return a mapping of tag to node'''
235 if self.tagscache:
235 if self.tagscache:
236 return self.tagscache
236 return self.tagscache
237
237
238 globaltags = {}
238 globaltags = {}
239 tagtypes = {}
239 tagtypes = {}
240
240
241 def readtags(lines, fn, tagtype):
241 def readtags(lines, fn, tagtype):
242 filetags = {}
242 filetags = {}
243 count = 0
243 count = 0
244
244
245 def warn(msg):
245 def warn(msg):
246 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
246 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
247
247
248 for l in lines:
248 for l in lines:
249 count += 1
249 count += 1
250 if not l:
250 if not l:
251 continue
251 continue
252 s = l.split(" ", 1)
252 s = l.split(" ", 1)
253 if len(s) != 2:
253 if len(s) != 2:
254 warn(_("cannot parse entry"))
254 warn(_("cannot parse entry"))
255 continue
255 continue
256 node, key = s
256 node, key = s
257 key = util.tolocal(key.strip()) # stored in UTF-8
257 key = util.tolocal(key.strip()) # stored in UTF-8
258 try:
258 try:
259 bin_n = bin(node)
259 bin_n = bin(node)
260 except TypeError:
260 except TypeError:
261 warn(_("node '%s' is not well formed") % node)
261 warn(_("node '%s' is not well formed") % node)
262 continue
262 continue
263 if bin_n not in self.changelog.nodemap:
263 if bin_n not in self.changelog.nodemap:
264 warn(_("tag '%s' refers to unknown node") % key)
264 warn(_("tag '%s' refers to unknown node") % key)
265 continue
265 continue
266
266
267 h = []
267 h = []
268 if key in filetags:
268 if key in filetags:
269 n, h = filetags[key]
269 n, h = filetags[key]
270 h.append(n)
270 h.append(n)
271 filetags[key] = (bin_n, h)
271 filetags[key] = (bin_n, h)
272
272
273 for k, nh in filetags.iteritems():
273 for k, nh in filetags.iteritems():
274 if k not in globaltags:
274 if k not in globaltags:
275 globaltags[k] = nh
275 globaltags[k] = nh
276 tagtypes[k] = tagtype
276 tagtypes[k] = tagtype
277 continue
277 continue
278
278
279 # we prefer the global tag if:
279 # we prefer the global tag if:
280 # it supercedes us OR
280 # it supercedes us OR
281 # mutual supercedes and it has a higher rank
281 # mutual supercedes and it has a higher rank
282 # otherwise we win because we're tip-most
282 # otherwise we win because we're tip-most
283 an, ah = nh
283 an, ah = nh
284 bn, bh = globaltags[k]
284 bn, bh = globaltags[k]
285 if (bn != an and an in bh and
285 if (bn != an and an in bh and
286 (bn not in ah or len(bh) > len(ah))):
286 (bn not in ah or len(bh) > len(ah))):
287 an = bn
287 an = bn
288 ah.extend([n for n in bh if n not in ah])
288 ah.extend([n for n in bh if n not in ah])
289 globaltags[k] = an, ah
289 globaltags[k] = an, ah
290 tagtypes[k] = tagtype
290 tagtypes[k] = tagtype
291
291
292 # read the tags file from each head, ending with the tip
292 # read the tags file from each head, ending with the tip
293 f = None
293 f = None
294 for rev, node, fnode in self._hgtagsnodes():
294 for rev, node, fnode in self._hgtagsnodes():
295 f = (f and f.filectx(fnode) or
295 f = (f and f.filectx(fnode) or
296 self.filectx('.hgtags', fileid=fnode))
296 self.filectx('.hgtags', fileid=fnode))
297 readtags(f.data().splitlines(), f, "global")
297 readtags(f.data().splitlines(), f, "global")
298
298
299 try:
299 try:
300 data = util.fromlocal(self.opener("localtags").read())
300 data = util.fromlocal(self.opener("localtags").read())
301 # localtags are stored in the local character set
301 # localtags are stored in the local character set
302 # while the internal tag table is stored in UTF-8
302 # while the internal tag table is stored in UTF-8
303 readtags(data.splitlines(), "localtags", "local")
303 readtags(data.splitlines(), "localtags", "local")
304 except IOError:
304 except IOError:
305 pass
305 pass
306
306
307 self.tagscache = {}
307 self.tagscache = {}
308 self._tagstypecache = {}
308 self._tagstypecache = {}
309 for k, nh in globaltags.iteritems():
309 for k, nh in globaltags.iteritems():
310 n = nh[0]
310 n = nh[0]
311 if n != nullid:
311 if n != nullid:
312 self.tagscache[k] = n
312 self.tagscache[k] = n
313 self._tagstypecache[k] = tagtypes[k]
313 self._tagstypecache[k] = tagtypes[k]
314 self.tagscache['tip'] = self.changelog.tip()
314 self.tagscache['tip'] = self.changelog.tip()
315 return self.tagscache
315 return self.tagscache
316
316
317 def tagtype(self, tagname):
317 def tagtype(self, tagname):
318 '''
318 '''
319 return the type of the given tag. result can be:
319 return the type of the given tag. result can be:
320
320
321 'local' : a local tag
321 'local' : a local tag
322 'global' : a global tag
322 'global' : a global tag
323 None : tag does not exist
323 None : tag does not exist
324 '''
324 '''
325
325
326 self.tags()
326 self.tags()
327
327
328 return self._tagstypecache.get(tagname)
328 return self._tagstypecache.get(tagname)
329
329
330 def _hgtagsnodes(self):
330 def _hgtagsnodes(self):
331 heads = self.heads()
331 heads = self.heads()
332 heads.reverse()
332 heads.reverse()
333 last = {}
333 last = {}
334 ret = []
334 ret = []
335 for node in heads:
335 for node in heads:
336 c = self[node]
336 c = self[node]
337 rev = c.rev()
337 rev = c.rev()
338 try:
338 try:
339 fnode = c.filenode('.hgtags')
339 fnode = c.filenode('.hgtags')
340 except error.LookupError:
340 except error.LookupError:
341 continue
341 continue
342 ret.append((rev, node, fnode))
342 ret.append((rev, node, fnode))
343 if fnode in last:
343 if fnode in last:
344 ret[last[fnode]] = None
344 ret[last[fnode]] = None
345 last[fnode] = len(ret) - 1
345 last[fnode] = len(ret) - 1
346 return [item for item in ret if item]
346 return [item for item in ret if item]
347
347
348 def tagslist(self):
348 def tagslist(self):
349 '''return a list of tags ordered by revision'''
349 '''return a list of tags ordered by revision'''
350 l = []
350 l = []
351 for t, n in self.tags().iteritems():
351 for t, n in self.tags().iteritems():
352 try:
352 try:
353 r = self.changelog.rev(n)
353 r = self.changelog.rev(n)
354 except:
354 except:
355 r = -2 # sort to the beginning of the list if unknown
355 r = -2 # sort to the beginning of the list if unknown
356 l.append((r, t, n))
356 l.append((r, t, n))
357 return [(t, n) for r, t, n in util.sort(l)]
357 return [(t, n) for r, t, n in util.sort(l)]
358
358
359 def nodetags(self, node):
359 def nodetags(self, node):
360 '''return the tags associated with a node'''
360 '''return the tags associated with a node'''
361 if not self.nodetagscache:
361 if not self.nodetagscache:
362 self.nodetagscache = {}
362 self.nodetagscache = {}
363 for t, n in self.tags().iteritems():
363 for t, n in self.tags().iteritems():
364 self.nodetagscache.setdefault(n, []).append(t)
364 self.nodetagscache.setdefault(n, []).append(t)
365 return self.nodetagscache.get(node, [])
365 return self.nodetagscache.get(node, [])
366
366
367 def _branchtags(self, partial, lrev):
367 def _branchtags(self, partial, lrev):
368 # TODO: rename this function?
368 # TODO: rename this function?
369 tiprev = len(self) - 1
369 tiprev = len(self) - 1
370 if lrev != tiprev:
370 if lrev != tiprev:
371 self._updatebranchcache(partial, lrev+1, tiprev+1)
371 self._updatebranchcache(partial, lrev+1, tiprev+1)
372 self._writebranchcache(partial, self.changelog.tip(), tiprev)
372 self._writebranchcache(partial, self.changelog.tip(), tiprev)
373
373
374 return partial
374 return partial
375
375
376 def _branchheads(self):
376 def _branchheads(self):
377 tip = self.changelog.tip()
377 tip = self.changelog.tip()
378 if self.branchcache is not None and self._branchcachetip == tip:
378 if self.branchcache is not None and self._branchcachetip == tip:
379 return self.branchcache
379 return self.branchcache
380
380
381 oldtip = self._branchcachetip
381 oldtip = self._branchcachetip
382 self._branchcachetip = tip
382 self._branchcachetip = tip
383 if self.branchcache is None:
383 if self.branchcache is None:
384 self.branchcache = {} # avoid recursion in changectx
384 self.branchcache = {} # avoid recursion in changectx
385 else:
385 else:
386 self.branchcache.clear() # keep using the same dict
386 self.branchcache.clear() # keep using the same dict
387 if oldtip is None or oldtip not in self.changelog.nodemap:
387 if oldtip is None or oldtip not in self.changelog.nodemap:
388 partial, last, lrev = self._readbranchcache()
388 partial, last, lrev = self._readbranchcache()
389 else:
389 else:
390 lrev = self.changelog.rev(oldtip)
390 lrev = self.changelog.rev(oldtip)
391 partial = self._ubranchcache
391 partial = self._ubranchcache
392
392
393 self._branchtags(partial, lrev)
393 self._branchtags(partial, lrev)
394 # this private cache holds all heads (not just tips)
394 # this private cache holds all heads (not just tips)
395 self._ubranchcache = partial
395 self._ubranchcache = partial
396
396
397 # the branch cache is stored on disk as UTF-8, but in the local
397 # the branch cache is stored on disk as UTF-8, but in the local
398 # charset internally
398 # charset internally
399 for k, v in partial.iteritems():
399 for k, v in partial.iteritems():
400 self.branchcache[util.tolocal(k)] = v
400 self.branchcache[util.tolocal(k)] = v
401 return self.branchcache
401 return self.branchcache
402
402
403
403
404 def branchtags(self):
404 def branchtags(self):
405 '''return a dict where branch names map to the tipmost head of
405 '''return a dict where branch names map to the tipmost head of
406 the branch, open heads come before closed'''
406 the branch, open heads come before closed'''
407 bt = {}
407 bt = {}
408 for bn, heads in self._branchheads().iteritems():
408 for bn, heads in self._branchheads().iteritems():
409 head = None
409 head = None
410 for i in range(len(heads)-1, -1, -1):
410 for i in range(len(heads)-1, -1, -1):
411 h = heads[i]
411 h = heads[i]
412 if 'close' not in self.changelog.read(h)[5]:
412 if 'close' not in self.changelog.read(h)[5]:
413 head = h
413 head = h
414 break
414 break
415 # no open heads were found
415 # no open heads were found
416 if head is None:
416 if head is None:
417 head = heads[-1]
417 head = heads[-1]
418 bt[bn] = head
418 bt[bn] = head
419 return bt
419 return bt
420
420
421
421
422 def _readbranchcache(self):
422 def _readbranchcache(self):
423 partial = {}
423 partial = {}
424 try:
424 try:
425 f = self.opener("branchheads.cache")
425 f = self.opener("branchheads.cache")
426 lines = f.read().split('\n')
426 lines = f.read().split('\n')
427 f.close()
427 f.close()
428 except (IOError, OSError):
428 except (IOError, OSError):
429 return {}, nullid, nullrev
429 return {}, nullid, nullrev
430
430
431 try:
431 try:
432 last, lrev = lines.pop(0).split(" ", 1)
432 last, lrev = lines.pop(0).split(" ", 1)
433 last, lrev = bin(last), int(lrev)
433 last, lrev = bin(last), int(lrev)
434 if lrev >= len(self) or self[lrev].node() != last:
434 if lrev >= len(self) or self[lrev].node() != last:
435 # invalidate the cache
435 # invalidate the cache
436 raise ValueError('invalidating branch cache (tip differs)')
436 raise ValueError('invalidating branch cache (tip differs)')
437 for l in lines:
437 for l in lines:
438 if not l: continue
438 if not l: continue
439 node, label = l.split(" ", 1)
439 node, label = l.split(" ", 1)
440 partial.setdefault(label.strip(), []).append(bin(node))
440 partial.setdefault(label.strip(), []).append(bin(node))
441 except KeyboardInterrupt:
441 except KeyboardInterrupt:
442 raise
442 raise
443 except Exception, inst:
443 except Exception, inst:
444 if self.ui.debugflag:
444 if self.ui.debugflag:
445 self.ui.warn(str(inst), '\n')
445 self.ui.warn(str(inst), '\n')
446 partial, last, lrev = {}, nullid, nullrev
446 partial, last, lrev = {}, nullid, nullrev
447 return partial, last, lrev
447 return partial, last, lrev
448
448
449 def _writebranchcache(self, branches, tip, tiprev):
449 def _writebranchcache(self, branches, tip, tiprev):
450 try:
450 try:
451 f = self.opener("branchheads.cache", "w", atomictemp=True)
451 f = self.opener("branchheads.cache", "w", atomictemp=True)
452 f.write("%s %s\n" % (hex(tip), tiprev))
452 f.write("%s %s\n" % (hex(tip), tiprev))
453 for label, nodes in branches.iteritems():
453 for label, nodes in branches.iteritems():
454 for node in nodes:
454 for node in nodes:
455 f.write("%s %s\n" % (hex(node), label))
455 f.write("%s %s\n" % (hex(node), label))
456 f.rename()
456 f.rename()
457 except (IOError, OSError):
457 except (IOError, OSError):
458 pass
458 pass
459
459
460 def _updatebranchcache(self, partial, start, end):
460 def _updatebranchcache(self, partial, start, end):
461 for r in xrange(start, end):
461 for r in xrange(start, end):
462 c = self[r]
462 c = self[r]
463 b = c.branch()
463 b = c.branch()
464 bheads = partial.setdefault(b, [])
464 bheads = partial.setdefault(b, [])
465 bheads.append(c.node())
465 bheads.append(c.node())
466 for p in c.parents():
466 for p in c.parents():
467 pn = p.node()
467 pn = p.node()
468 if pn in bheads:
468 if pn in bheads:
469 bheads.remove(pn)
469 bheads.remove(pn)
470
470
471 def lookup(self, key):
471 def lookup(self, key):
472 if isinstance(key, int):
472 if isinstance(key, int):
473 return self.changelog.node(key)
473 return self.changelog.node(key)
474 elif key == '.':
474 elif key == '.':
475 return self.dirstate.parents()[0]
475 return self.dirstate.parents()[0]
476 elif key == 'null':
476 elif key == 'null':
477 return nullid
477 return nullid
478 elif key == 'tip':
478 elif key == 'tip':
479 return self.changelog.tip()
479 return self.changelog.tip()
480 n = self.changelog._match(key)
480 n = self.changelog._match(key)
481 if n:
481 if n:
482 return n
482 return n
483 if key in self.tags():
483 if key in self.tags():
484 return self.tags()[key]
484 return self.tags()[key]
485 if key in self.branchtags():
485 if key in self.branchtags():
486 return self.branchtags()[key]
486 return self.branchtags()[key]
487 n = self.changelog._partialmatch(key)
487 n = self.changelog._partialmatch(key)
488 if n:
488 if n:
489 return n
489 return n
490 try:
490 try:
491 if len(key) == 20:
491 if len(key) == 20:
492 key = hex(key)
492 key = hex(key)
493 except:
493 except:
494 pass
494 pass
495 raise error.RepoError(_("unknown revision '%s'") % key)
495 raise error.RepoError(_("unknown revision '%s'") % key)
496
496
497 def local(self):
497 def local(self):
498 return True
498 return True
499
499
500 def join(self, f):
500 def join(self, f):
501 return os.path.join(self.path, f)
501 return os.path.join(self.path, f)
502
502
503 def wjoin(self, f):
503 def wjoin(self, f):
504 return os.path.join(self.root, f)
504 return os.path.join(self.root, f)
505
505
506 def rjoin(self, f):
506 def rjoin(self, f):
507 return os.path.join(self.root, util.pconvert(f))
507 return os.path.join(self.root, util.pconvert(f))
508
508
509 def file(self, f):
509 def file(self, f):
510 if f[0] == '/':
510 if f[0] == '/':
511 f = f[1:]
511 f = f[1:]
512 return filelog.filelog(self.sopener, f)
512 return filelog.filelog(self.sopener, f)
513
513
514 def changectx(self, changeid):
514 def changectx(self, changeid):
515 return self[changeid]
515 return self[changeid]
516
516
517 def parents(self, changeid=None):
517 def parents(self, changeid=None):
518 '''get list of changectxs for parents of changeid'''
518 '''get list of changectxs for parents of changeid'''
519 return self[changeid].parents()
519 return self[changeid].parents()
520
520
521 def filectx(self, path, changeid=None, fileid=None):
521 def filectx(self, path, changeid=None, fileid=None):
522 """changeid can be a changeset revision, node, or tag.
522 """changeid can be a changeset revision, node, or tag.
523 fileid can be a file revision or node."""
523 fileid can be a file revision or node."""
524 return context.filectx(self, path, changeid, fileid)
524 return context.filectx(self, path, changeid, fileid)
525
525
526 def getcwd(self):
526 def getcwd(self):
527 return self.dirstate.getcwd()
527 return self.dirstate.getcwd()
528
528
529 def pathto(self, f, cwd=None):
529 def pathto(self, f, cwd=None):
530 return self.dirstate.pathto(f, cwd)
530 return self.dirstate.pathto(f, cwd)
531
531
532 def wfile(self, f, mode='r'):
532 def wfile(self, f, mode='r'):
533 return self.wopener(f, mode)
533 return self.wopener(f, mode)
534
534
535 def _link(self, f):
535 def _link(self, f):
536 return os.path.islink(self.wjoin(f))
536 return os.path.islink(self.wjoin(f))
537
537
538 def _filter(self, filter, filename, data):
538 def _filter(self, filter, filename, data):
539 if filter not in self.filterpats:
539 if filter not in self.filterpats:
540 l = []
540 l = []
541 for pat, cmd in self.ui.configitems(filter):
541 for pat, cmd in self.ui.configitems(filter):
542 if cmd == '!':
542 if cmd == '!':
543 continue
543 continue
544 mf = util.matcher(self.root, "", [pat], [], [])[1]
544 mf = util.matcher(self.root, "", [pat], [], [])[1]
545 fn = None
545 fn = None
546 params = cmd
546 params = cmd
547 for name, filterfn in self._datafilters.iteritems():
547 for name, filterfn in self._datafilters.iteritems():
548 if cmd.startswith(name):
548 if cmd.startswith(name):
549 fn = filterfn
549 fn = filterfn
550 params = cmd[len(name):].lstrip()
550 params = cmd[len(name):].lstrip()
551 break
551 break
552 if not fn:
552 if not fn:
553 fn = lambda s, c, **kwargs: util.filter(s, c)
553 fn = lambda s, c, **kwargs: util.filter(s, c)
554 # Wrap old filters not supporting keyword arguments
554 # Wrap old filters not supporting keyword arguments
555 if not inspect.getargspec(fn)[2]:
555 if not inspect.getargspec(fn)[2]:
556 oldfn = fn
556 oldfn = fn
557 fn = lambda s, c, **kwargs: oldfn(s, c)
557 fn = lambda s, c, **kwargs: oldfn(s, c)
558 l.append((mf, fn, params))
558 l.append((mf, fn, params))
559 self.filterpats[filter] = l
559 self.filterpats[filter] = l
560
560
561 for mf, fn, cmd in self.filterpats[filter]:
561 for mf, fn, cmd in self.filterpats[filter]:
562 if mf(filename):
562 if mf(filename):
563 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
563 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
564 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
564 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
565 break
565 break
566
566
567 return data
567 return data
568
568
569 def adddatafilter(self, name, filter):
569 def adddatafilter(self, name, filter):
570 self._datafilters[name] = filter
570 self._datafilters[name] = filter
571
571
572 def wread(self, filename):
572 def wread(self, filename):
573 if self._link(filename):
573 if self._link(filename):
574 data = os.readlink(self.wjoin(filename))
574 data = os.readlink(self.wjoin(filename))
575 else:
575 else:
576 data = self.wopener(filename, 'r').read()
576 data = self.wopener(filename, 'r').read()
577 return self._filter("encode", filename, data)
577 return self._filter("encode", filename, data)
578
578
579 def wwrite(self, filename, data, flags):
579 def wwrite(self, filename, data, flags):
580 data = self._filter("decode", filename, data)
580 data = self._filter("decode", filename, data)
581 try:
581 try:
582 os.unlink(self.wjoin(filename))
582 os.unlink(self.wjoin(filename))
583 except OSError:
583 except OSError:
584 pass
584 pass
585 if 'l' in flags:
585 if 'l' in flags:
586 self.wopener.symlink(data, filename)
586 self.wopener.symlink(data, filename)
587 else:
587 else:
588 self.wopener(filename, 'w').write(data)
588 self.wopener(filename, 'w').write(data)
589 if 'x' in flags:
589 if 'x' in flags:
590 util.set_flags(self.wjoin(filename), False, True)
590 util.set_flags(self.wjoin(filename), False, True)
591
591
592 def wwritedata(self, filename, data):
592 def wwritedata(self, filename, data):
593 return self._filter("decode", filename, data)
593 return self._filter("decode", filename, data)
594
594
595 def transaction(self):
595 def transaction(self):
596 if self._transref and self._transref():
596 if self._transref and self._transref():
597 return self._transref().nest()
597 return self._transref().nest()
598
598
599 # abort here if the journal already exists
599 # abort here if the journal already exists
600 if os.path.exists(self.sjoin("journal")):
600 if os.path.exists(self.sjoin("journal")):
601 raise error.RepoError(_("journal already exists - run hg recover"))
601 raise error.RepoError(_("journal already exists - run hg recover"))
602
602
603 # save dirstate for rollback
603 # save dirstate for rollback
604 try:
604 try:
605 ds = self.opener("dirstate").read()
605 ds = self.opener("dirstate").read()
606 except IOError:
606 except IOError:
607 ds = ""
607 ds = ""
608 self.opener("journal.dirstate", "w").write(ds)
608 self.opener("journal.dirstate", "w").write(ds)
609 self.opener("journal.branch", "w").write(self.dirstate.branch())
609 self.opener("journal.branch", "w").write(self.dirstate.branch())
610
610
611 renames = [(self.sjoin("journal"), self.sjoin("undo")),
611 renames = [(self.sjoin("journal"), self.sjoin("undo")),
612 (self.join("journal.dirstate"), self.join("undo.dirstate")),
612 (self.join("journal.dirstate"), self.join("undo.dirstate")),
613 (self.join("journal.branch"), self.join("undo.branch"))]
613 (self.join("journal.branch"), self.join("undo.branch"))]
614 tr = transaction.transaction(self.ui.warn, self.sopener,
614 tr = transaction.transaction(self.ui.warn, self.sopener,
615 self.sjoin("journal"),
615 self.sjoin("journal"),
616 aftertrans(renames),
616 aftertrans(renames),
617 self.store.createmode)
617 self.store.createmode)
618 self._transref = weakref.ref(tr)
618 self._transref = weakref.ref(tr)
619 return tr
619 return tr
620
620
621 def recover(self):
621 def recover(self):
622 l = self.lock()
622 l = self.lock()
623 try:
623 try:
624 if os.path.exists(self.sjoin("journal")):
624 if os.path.exists(self.sjoin("journal")):
625 self.ui.status(_("rolling back interrupted transaction\n"))
625 self.ui.status(_("rolling back interrupted transaction\n"))
626 transaction.rollback(self.sopener, self.sjoin("journal"))
626 transaction.rollback(self.sopener, self.sjoin("journal"))
627 self.invalidate()
627 self.invalidate()
628 return True
628 return True
629 else:
629 else:
630 self.ui.warn(_("no interrupted transaction available\n"))
630 self.ui.warn(_("no interrupted transaction available\n"))
631 return False
631 return False
632 finally:
632 finally:
633 del l
633 del l
634
634
635 def rollback(self):
635 def rollback(self):
636 wlock = lock = None
636 wlock = lock = None
637 try:
637 try:
638 wlock = self.wlock()
638 wlock = self.wlock()
639 lock = self.lock()
639 lock = self.lock()
640 if os.path.exists(self.sjoin("undo")):
640 if os.path.exists(self.sjoin("undo")):
641 self.ui.status(_("rolling back last transaction\n"))
641 self.ui.status(_("rolling back last transaction\n"))
642 transaction.rollback(self.sopener, self.sjoin("undo"))
642 transaction.rollback(self.sopener, self.sjoin("undo"))
643 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
643 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
644 try:
644 try:
645 branch = self.opener("undo.branch").read()
645 branch = self.opener("undo.branch").read()
646 self.dirstate.setbranch(branch)
646 self.dirstate.setbranch(branch)
647 except IOError:
647 except IOError:
648 self.ui.warn(_("Named branch could not be reset, "
648 self.ui.warn(_("Named branch could not be reset, "
649 "current branch still is: %s\n")
649 "current branch still is: %s\n")
650 % util.tolocal(self.dirstate.branch()))
650 % util.tolocal(self.dirstate.branch()))
651 self.invalidate()
651 self.invalidate()
652 self.dirstate.invalidate()
652 self.dirstate.invalidate()
653 else:
653 else:
654 self.ui.warn(_("no rollback information available\n"))
654 self.ui.warn(_("no rollback information available\n"))
655 finally:
655 finally:
656 del lock, wlock
656 del lock, wlock
657
657
658 def invalidate(self):
658 def invalidate(self):
659 for a in "changelog manifest".split():
659 for a in "changelog manifest".split():
660 if a in self.__dict__:
660 if a in self.__dict__:
661 delattr(self, a)
661 delattr(self, a)
662 self.tagscache = None
662 self.tagscache = None
663 self._tagstypecache = None
663 self._tagstypecache = None
664 self.nodetagscache = None
664 self.nodetagscache = None
665 self.branchcache = None
665 self.branchcache = None
666 self._ubranchcache = None
666 self._ubranchcache = None
667 self._branchcachetip = None
667 self._branchcachetip = None
668
668
669 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
669 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
670 try:
670 try:
671 l = lock.lock(lockname, 0, releasefn, desc=desc)
671 l = lock.lock(lockname, 0, releasefn, desc=desc)
672 except error.LockHeld, inst:
672 except error.LockHeld, inst:
673 if not wait:
673 if not wait:
674 raise
674 raise
675 self.ui.warn(_("waiting for lock on %s held by %r\n") %
675 self.ui.warn(_("waiting for lock on %s held by %r\n") %
676 (desc, inst.locker))
676 (desc, inst.locker))
677 # default to 600 seconds timeout
677 # default to 600 seconds timeout
678 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
678 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
679 releasefn, desc=desc)
679 releasefn, desc=desc)
680 if acquirefn:
680 if acquirefn:
681 acquirefn()
681 acquirefn()
682 return l
682 return l
683
683
684 def lock(self, wait=True):
684 def lock(self, wait=True):
685 if self._lockref and self._lockref():
685 if self._lockref and self._lockref():
686 return self._lockref()
686 return self._lockref()
687
687
688 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
688 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
689 _('repository %s') % self.origroot)
689 _('repository %s') % self.origroot)
690 self._lockref = weakref.ref(l)
690 self._lockref = weakref.ref(l)
691 return l
691 return l
692
692
693 def wlock(self, wait=True):
693 def wlock(self, wait=True):
694 if self._wlockref and self._wlockref():
694 if self._wlockref and self._wlockref():
695 return self._wlockref()
695 return self._wlockref()
696
696
697 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
697 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
698 self.dirstate.invalidate, _('working directory of %s') %
698 self.dirstate.invalidate, _('working directory of %s') %
699 self.origroot)
699 self.origroot)
700 self._wlockref = weakref.ref(l)
700 self._wlockref = weakref.ref(l)
701 return l
701 return l
702
702
703 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
703 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
704 """
704 """
705 commit an individual file as part of a larger transaction
705 commit an individual file as part of a larger transaction
706 """
706 """
707
707
708 fn = fctx.path()
708 fn = fctx.path()
709 t = fctx.data()
709 t = fctx.data()
710 fl = self.file(fn)
710 fl = self.file(fn)
711 fp1 = manifest1.get(fn, nullid)
711 fp1 = manifest1.get(fn, nullid)
712 fp2 = manifest2.get(fn, nullid)
712 fp2 = manifest2.get(fn, nullid)
713
713
714 meta = {}
714 meta = {}
715 cp = fctx.renamed()
715 cp = fctx.renamed()
716 if cp and cp[0] != fn:
716 if cp and cp[0] != fn:
717 # Mark the new revision of this file as a copy of another
717 # Mark the new revision of this file as a copy of another
718 # file. This copy data will effectively act as a parent
718 # file. This copy data will effectively act as a parent
719 # of this new revision. If this is a merge, the first
719 # of this new revision. If this is a merge, the first
720 # parent will be the nullid (meaning "look up the copy data")
720 # parent will be the nullid (meaning "look up the copy data")
721 # and the second one will be the other parent. For example:
721 # and the second one will be the other parent. For example:
722 #
722 #
723 # 0 --- 1 --- 3 rev1 changes file foo
723 # 0 --- 1 --- 3 rev1 changes file foo
724 # \ / rev2 renames foo to bar and changes it
724 # \ / rev2 renames foo to bar and changes it
725 # \- 2 -/ rev3 should have bar with all changes and
725 # \- 2 -/ rev3 should have bar with all changes and
726 # should record that bar descends from
726 # should record that bar descends from
727 # bar in rev2 and foo in rev1
727 # bar in rev2 and foo in rev1
728 #
728 #
729 # this allows this merge to succeed:
729 # this allows this merge to succeed:
730 #
730 #
731 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
731 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
732 # \ / merging rev3 and rev4 should use bar@rev2
732 # \ / merging rev3 and rev4 should use bar@rev2
733 # \- 2 --- 4 as the merge base
733 # \- 2 --- 4 as the merge base
734 #
734 #
735
735
736 cf = cp[0]
736 cf = cp[0]
737 cr = manifest1.get(cf)
737 cr = manifest1.get(cf)
738 nfp = fp2
738 nfp = fp2
739
739
740 if manifest2: # branch merge
740 if manifest2: # branch merge
741 if fp2 == nullid or cr is None: # copied on remote side
741 if fp2 == nullid or cr is None: # copied on remote side
742 if cf in manifest2:
742 if cf in manifest2:
743 cr = manifest2[cf]
743 cr = manifest2[cf]
744 nfp = fp1
744 nfp = fp1
745
745
746 # find source in nearest ancestor if we've lost track
746 # find source in nearest ancestor if we've lost track
747 if not cr:
747 if not cr:
748 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
748 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
749 (fn, cf))
749 (fn, cf))
750 for a in self['.'].ancestors():
750 for a in self['.'].ancestors():
751 if cf in a:
751 if cf in a:
752 cr = a[cf].filenode()
752 cr = a[cf].filenode()
753 break
753 break
754
754
755 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
755 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
756 meta["copy"] = cf
756 meta["copy"] = cf
757 meta["copyrev"] = hex(cr)
757 meta["copyrev"] = hex(cr)
758 fp1, fp2 = nullid, nfp
758 fp1, fp2 = nullid, nfp
759 elif fp2 != nullid:
759 elif fp2 != nullid:
760 # is one parent an ancestor of the other?
760 # is one parent an ancestor of the other?
761 fpa = fl.ancestor(fp1, fp2)
761 fpa = fl.ancestor(fp1, fp2)
762 if fpa == fp1:
762 if fpa == fp1:
763 fp1, fp2 = fp2, nullid
763 fp1, fp2 = fp2, nullid
764 elif fpa == fp2:
764 elif fpa == fp2:
765 fp2 = nullid
765 fp2 = nullid
766
766
767 # is the file unmodified from the parent? report existing entry
767 # is the file unmodified from the parent? report existing entry
768 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
768 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
769 return fp1
769 return fp1
770
770
771 changelist.append(fn)
771 changelist.append(fn)
772 return fl.add(t, meta, tr, linkrev, fp1, fp2)
772 return fl.add(t, meta, tr, linkrev, fp1, fp2)
773
773
774 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
774 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
775 if p1 is None:
775 if p1 is None:
776 p1, p2 = self.dirstate.parents()
776 p1, p2 = self.dirstate.parents()
777 return self.commit(files=files, text=text, user=user, date=date,
777 return self.commit(files=files, text=text, user=user, date=date,
778 p1=p1, p2=p2, extra=extra, empty_ok=True)
778 p1=p1, p2=p2, extra=extra, empty_ok=True)
779
779
780 def commit(self, files=None, text="", user=None, date=None,
780 def commit(self, files=None, text="", user=None, date=None,
781 match=None, force=False, force_editor=False,
781 match=None, force=False, force_editor=False,
782 p1=None, p2=None, extra={}, empty_ok=False):
782 p1=None, p2=None, extra={}, empty_ok=False):
783 wlock = lock = None
783 wlock = lock = None
784 if extra.get("close"):
784 if extra.get("close"):
785 force = True
785 force = True
786 if files:
786 if files:
787 files = util.unique(files)
787 files = util.unique(files)
788 try:
788 try:
789 wlock = self.wlock()
789 wlock = self.wlock()
790 lock = self.lock()
790 lock = self.lock()
791 use_dirstate = (p1 is None) # not rawcommit
791 use_dirstate = (p1 is None) # not rawcommit
792
792
793 if use_dirstate:
793 if use_dirstate:
794 p1, p2 = self.dirstate.parents()
794 p1, p2 = self.dirstate.parents()
795 update_dirstate = True
795 update_dirstate = True
796
796
797 if (not force and p2 != nullid and
797 if (not force and p2 != nullid and
798 (match and (match.files() or match.anypats()))):
798 (match and (match.files() or match.anypats()))):
799 raise util.Abort(_('cannot partially commit a merge '
799 raise util.Abort(_('cannot partially commit a merge '
800 '(do not specify files or patterns)'))
800 '(do not specify files or patterns)'))
801
801
802 if files:
802 if files:
803 modified, removed = [], []
803 modified, removed = [], []
804 for f in files:
804 for f in files:
805 s = self.dirstate[f]
805 s = self.dirstate[f]
806 if s in 'nma':
806 if s in 'nma':
807 modified.append(f)
807 modified.append(f)
808 elif s == 'r':
808 elif s == 'r':
809 removed.append(f)
809 removed.append(f)
810 else:
810 else:
811 self.ui.warn(_("%s not tracked!\n") % f)
811 self.ui.warn(_("%s not tracked!\n") % f)
812 changes = [modified, [], removed, [], []]
812 changes = [modified, [], removed, [], []]
813 else:
813 else:
814 changes = self.status(match=match)
814 changes = self.status(match=match)
815 else:
815 else:
816 p1, p2 = p1, p2 or nullid
816 p1, p2 = p1, p2 or nullid
817 update_dirstate = (self.dirstate.parents()[0] == p1)
817 update_dirstate = (self.dirstate.parents()[0] == p1)
818 changes = [files, [], [], [], []]
818 changes = [files, [], [], [], []]
819
819
820 ms = merge_.mergestate(self)
820 ms = merge_.mergestate(self)
821 for f in changes[0]:
821 for f in changes[0]:
822 if f in ms and ms[f] == 'u':
822 if f in ms and ms[f] == 'u':
823 raise util.Abort(_("unresolved merge conflicts "
823 raise util.Abort(_("unresolved merge conflicts "
824 "(see hg resolve)"))
824 "(see hg resolve)"))
825 wctx = context.workingctx(self, (p1, p2), text, user, date,
825 wctx = context.workingctx(self, (p1, p2), text, user, date,
826 extra, changes)
826 extra, changes)
827 return self._commitctx(wctx, force, force_editor, empty_ok,
827 r = self._commitctx(wctx, force, force_editor, empty_ok,
828 use_dirstate, update_dirstate)
828 use_dirstate, update_dirstate)
829 ms.reset()
830 return r
831
829 finally:
832 finally:
830 del lock, wlock
833 del lock, wlock
831
834
832 def commitctx(self, ctx):
835 def commitctx(self, ctx):
833 """Add a new revision to current repository.
836 """Add a new revision to current repository.
834
837
835 Revision information is passed in the context.memctx argument.
838 Revision information is passed in the context.memctx argument.
836 commitctx() does not touch the working directory.
839 commitctx() does not touch the working directory.
837 """
840 """
838 wlock = lock = None
841 wlock = lock = None
839 try:
842 try:
840 wlock = self.wlock()
843 wlock = self.wlock()
841 lock = self.lock()
844 lock = self.lock()
842 return self._commitctx(ctx, force=True, force_editor=False,
845 return self._commitctx(ctx, force=True, force_editor=False,
843 empty_ok=True, use_dirstate=False,
846 empty_ok=True, use_dirstate=False,
844 update_dirstate=False)
847 update_dirstate=False)
845 finally:
848 finally:
846 del lock, wlock
849 del lock, wlock
847
850
848 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
851 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
849 use_dirstate=True, update_dirstate=True):
852 use_dirstate=True, update_dirstate=True):
850 tr = None
853 tr = None
851 valid = 0 # don't save the dirstate if this isn't set
854 valid = 0 # don't save the dirstate if this isn't set
852 try:
855 try:
853 commit = util.sort(wctx.modified() + wctx.added())
856 commit = util.sort(wctx.modified() + wctx.added())
854 remove = wctx.removed()
857 remove = wctx.removed()
855 extra = wctx.extra().copy()
858 extra = wctx.extra().copy()
856 branchname = extra['branch']
859 branchname = extra['branch']
857 user = wctx.user()
860 user = wctx.user()
858 text = wctx.description()
861 text = wctx.description()
859
862
860 p1, p2 = [p.node() for p in wctx.parents()]
863 p1, p2 = [p.node() for p in wctx.parents()]
861 c1 = self.changelog.read(p1)
864 c1 = self.changelog.read(p1)
862 c2 = self.changelog.read(p2)
865 c2 = self.changelog.read(p2)
863 m1 = self.manifest.read(c1[0]).copy()
866 m1 = self.manifest.read(c1[0]).copy()
864 m2 = self.manifest.read(c2[0])
867 m2 = self.manifest.read(c2[0])
865
868
866 if use_dirstate:
869 if use_dirstate:
867 oldname = c1[5].get("branch") # stored in UTF-8
870 oldname = c1[5].get("branch") # stored in UTF-8
868 if (not commit and not remove and not force and p2 == nullid
871 if (not commit and not remove and not force and p2 == nullid
869 and branchname == oldname):
872 and branchname == oldname):
870 self.ui.status(_("nothing changed\n"))
873 self.ui.status(_("nothing changed\n"))
871 return None
874 return None
872
875
873 xp1 = hex(p1)
876 xp1 = hex(p1)
874 if p2 == nullid: xp2 = ''
877 if p2 == nullid: xp2 = ''
875 else: xp2 = hex(p2)
878 else: xp2 = hex(p2)
876
879
877 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
880 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
878
881
879 tr = self.transaction()
882 tr = self.transaction()
880 trp = weakref.proxy(tr)
883 trp = weakref.proxy(tr)
881
884
882 # check in files
885 # check in files
883 new = {}
886 new = {}
884 changed = []
887 changed = []
885 linkrev = len(self)
888 linkrev = len(self)
886 for f in commit:
889 for f in commit:
887 self.ui.note(f + "\n")
890 self.ui.note(f + "\n")
888 try:
891 try:
889 fctx = wctx.filectx(f)
892 fctx = wctx.filectx(f)
890 newflags = fctx.flags()
893 newflags = fctx.flags()
891 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
894 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
892 if ((not changed or changed[-1] != f) and
895 if ((not changed or changed[-1] != f) and
893 m2.get(f) != new[f]):
896 m2.get(f) != new[f]):
894 # mention the file in the changelog if some
897 # mention the file in the changelog if some
895 # flag changed, even if there was no content
898 # flag changed, even if there was no content
896 # change.
899 # change.
897 if m1.flags(f) != newflags:
900 if m1.flags(f) != newflags:
898 changed.append(f)
901 changed.append(f)
899 m1.set(f, newflags)
902 m1.set(f, newflags)
900 if use_dirstate:
903 if use_dirstate:
901 self.dirstate.normal(f)
904 self.dirstate.normal(f)
902
905
903 except (OSError, IOError):
906 except (OSError, IOError):
904 if use_dirstate:
907 if use_dirstate:
905 self.ui.warn(_("trouble committing %s!\n") % f)
908 self.ui.warn(_("trouble committing %s!\n") % f)
906 raise
909 raise
907 else:
910 else:
908 remove.append(f)
911 remove.append(f)
909
912
910 updated, added = [], []
913 updated, added = [], []
911 for f in util.sort(changed):
914 for f in util.sort(changed):
912 if f in m1 or f in m2:
915 if f in m1 or f in m2:
913 updated.append(f)
916 updated.append(f)
914 else:
917 else:
915 added.append(f)
918 added.append(f)
916
919
917 # update manifest
920 # update manifest
918 m1.update(new)
921 m1.update(new)
919 removed = [f for f in util.sort(remove) if f in m1 or f in m2]
922 removed = [f for f in util.sort(remove) if f in m1 or f in m2]
920 removed1 = []
923 removed1 = []
921
924
922 for f in removed:
925 for f in removed:
923 if f in m1:
926 if f in m1:
924 del m1[f]
927 del m1[f]
925 removed1.append(f)
928 removed1.append(f)
926 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
929 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
927 (new, removed1))
930 (new, removed1))
928
931
929 # add changeset
932 # add changeset
930 if (not empty_ok and not text) or force_editor:
933 if (not empty_ok and not text) or force_editor:
931 edittext = []
934 edittext = []
932 if text:
935 if text:
933 edittext.append(text)
936 edittext.append(text)
934 edittext.append("")
937 edittext.append("")
935 edittext.append("") # Empty line between message and comments.
938 edittext.append("") # Empty line between message and comments.
936 edittext.append(_("HG: Enter commit message."
939 edittext.append(_("HG: Enter commit message."
937 " Lines beginning with 'HG:' are removed."))
940 " Lines beginning with 'HG:' are removed."))
938 edittext.append("HG: --")
941 edittext.append("HG: --")
939 edittext.append("HG: user: %s" % user)
942 edittext.append("HG: user: %s" % user)
940 if p2 != nullid:
943 if p2 != nullid:
941 edittext.append("HG: branch merge")
944 edittext.append("HG: branch merge")
942 if branchname:
945 if branchname:
943 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
946 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
944 edittext.extend(["HG: added %s" % f for f in added])
947 edittext.extend(["HG: added %s" % f for f in added])
945 edittext.extend(["HG: changed %s" % f for f in updated])
948 edittext.extend(["HG: changed %s" % f for f in updated])
946 edittext.extend(["HG: removed %s" % f for f in removed])
949 edittext.extend(["HG: removed %s" % f for f in removed])
947 if not added and not updated and not removed:
950 if not added and not updated and not removed:
948 edittext.append("HG: no files changed")
951 edittext.append("HG: no files changed")
949 edittext.append("")
952 edittext.append("")
950 # run editor in the repository root
953 # run editor in the repository root
951 olddir = os.getcwd()
954 olddir = os.getcwd()
952 os.chdir(self.root)
955 os.chdir(self.root)
953 text = self.ui.edit("\n".join(edittext), user)
956 text = self.ui.edit("\n".join(edittext), user)
954 os.chdir(olddir)
957 os.chdir(olddir)
955
958
956 lines = [line.rstrip() for line in text.rstrip().splitlines()]
959 lines = [line.rstrip() for line in text.rstrip().splitlines()]
957 while lines and not lines[0]:
960 while lines and not lines[0]:
958 del lines[0]
961 del lines[0]
959 if not lines and use_dirstate:
962 if not lines and use_dirstate:
960 raise util.Abort(_("empty commit message"))
963 raise util.Abort(_("empty commit message"))
961 text = '\n'.join(lines)
964 text = '\n'.join(lines)
962
965
963 self.changelog.delayupdate()
966 self.changelog.delayupdate()
964 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
967 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
965 user, wctx.date(), extra)
968 user, wctx.date(), extra)
966 p = lambda: self.changelog.writepending() and self.root or ""
969 p = lambda: self.changelog.writepending() and self.root or ""
967 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
970 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
968 parent2=xp2, pending=p)
971 parent2=xp2, pending=p)
969 self.changelog.finalize(trp)
972 self.changelog.finalize(trp)
970 tr.close()
973 tr.close()
971
974
972 if self.branchcache:
975 if self.branchcache:
973 self.branchtags()
976 self.branchtags()
974
977
975 if use_dirstate or update_dirstate:
978 if use_dirstate or update_dirstate:
976 self.dirstate.setparents(n)
979 self.dirstate.setparents(n)
977 if use_dirstate:
980 if use_dirstate:
978 for f in removed:
981 for f in removed:
979 self.dirstate.forget(f)
982 self.dirstate.forget(f)
980 valid = 1 # our dirstate updates are complete
983 valid = 1 # our dirstate updates are complete
981
984
982 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
985 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
983 return n
986 return n
984 finally:
987 finally:
985 if not valid: # don't save our updated dirstate
988 if not valid: # don't save our updated dirstate
986 self.dirstate.invalidate()
989 self.dirstate.invalidate()
987 del tr
990 del tr
988
991
989 def walk(self, match, node=None):
992 def walk(self, match, node=None):
990 '''
993 '''
991 walk recursively through the directory tree or a given
994 walk recursively through the directory tree or a given
992 changeset, finding all files matched by the match
995 changeset, finding all files matched by the match
993 function
996 function
994 '''
997 '''
995 return self[node].walk(match)
998 return self[node].walk(match)
996
999
997 def status(self, node1='.', node2=None, match=None,
1000 def status(self, node1='.', node2=None, match=None,
998 ignored=False, clean=False, unknown=False):
1001 ignored=False, clean=False, unknown=False):
999 """return status of files between two nodes or node and working directory
1002 """return status of files between two nodes or node and working directory
1000
1003
1001 If node1 is None, use the first dirstate parent instead.
1004 If node1 is None, use the first dirstate parent instead.
1002 If node2 is None, compare node1 with working directory.
1005 If node2 is None, compare node1 with working directory.
1003 """
1006 """
1004
1007
1005 def mfmatches(ctx):
1008 def mfmatches(ctx):
1006 mf = ctx.manifest().copy()
1009 mf = ctx.manifest().copy()
1007 for fn in mf.keys():
1010 for fn in mf.keys():
1008 if not match(fn):
1011 if not match(fn):
1009 del mf[fn]
1012 del mf[fn]
1010 return mf
1013 return mf
1011
1014
1012 if isinstance(node1, context.changectx):
1015 if isinstance(node1, context.changectx):
1013 ctx1 = node1
1016 ctx1 = node1
1014 else:
1017 else:
1015 ctx1 = self[node1]
1018 ctx1 = self[node1]
1016 if isinstance(node2, context.changectx):
1019 if isinstance(node2, context.changectx):
1017 ctx2 = node2
1020 ctx2 = node2
1018 else:
1021 else:
1019 ctx2 = self[node2]
1022 ctx2 = self[node2]
1020
1023
1021 working = ctx2.rev() is None
1024 working = ctx2.rev() is None
1022 parentworking = working and ctx1 == self['.']
1025 parentworking = working and ctx1 == self['.']
1023 match = match or match_.always(self.root, self.getcwd())
1026 match = match or match_.always(self.root, self.getcwd())
1024 listignored, listclean, listunknown = ignored, clean, unknown
1027 listignored, listclean, listunknown = ignored, clean, unknown
1025
1028
1026 # load earliest manifest first for caching reasons
1029 # load earliest manifest first for caching reasons
1027 if not working and ctx2.rev() < ctx1.rev():
1030 if not working and ctx2.rev() < ctx1.rev():
1028 ctx2.manifest()
1031 ctx2.manifest()
1029
1032
1030 if not parentworking:
1033 if not parentworking:
1031 def bad(f, msg):
1034 def bad(f, msg):
1032 if f not in ctx1:
1035 if f not in ctx1:
1033 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1036 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1034 return False
1037 return False
1035 match.bad = bad
1038 match.bad = bad
1036
1039
1037 if working: # we need to scan the working dir
1040 if working: # we need to scan the working dir
1038 s = self.dirstate.status(match, listignored, listclean, listunknown)
1041 s = self.dirstate.status(match, listignored, listclean, listunknown)
1039 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1042 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1040
1043
1041 # check for any possibly clean files
1044 # check for any possibly clean files
1042 if parentworking and cmp:
1045 if parentworking and cmp:
1043 fixup = []
1046 fixup = []
1044 # do a full compare of any files that might have changed
1047 # do a full compare of any files that might have changed
1045 for f in cmp:
1048 for f in cmp:
1046 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1049 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1047 or ctx1[f].cmp(ctx2[f].data())):
1050 or ctx1[f].cmp(ctx2[f].data())):
1048 modified.append(f)
1051 modified.append(f)
1049 else:
1052 else:
1050 fixup.append(f)
1053 fixup.append(f)
1051
1054
1052 if listclean:
1055 if listclean:
1053 clean += fixup
1056 clean += fixup
1054
1057
1055 # update dirstate for files that are actually clean
1058 # update dirstate for files that are actually clean
1056 if fixup:
1059 if fixup:
1057 wlock = None
1060 wlock = None
1058 try:
1061 try:
1059 try:
1062 try:
1060 wlock = self.wlock(False)
1063 wlock = self.wlock(False)
1061 for f in fixup:
1064 for f in fixup:
1062 self.dirstate.normal(f)
1065 self.dirstate.normal(f)
1063 except lock.LockError:
1066 except lock.LockError:
1064 pass
1067 pass
1065 finally:
1068 finally:
1066 del wlock
1069 del wlock
1067
1070
1068 if not parentworking:
1071 if not parentworking:
1069 mf1 = mfmatches(ctx1)
1072 mf1 = mfmatches(ctx1)
1070 if working:
1073 if working:
1071 # we are comparing working dir against non-parent
1074 # we are comparing working dir against non-parent
1072 # generate a pseudo-manifest for the working dir
1075 # generate a pseudo-manifest for the working dir
1073 mf2 = mfmatches(self['.'])
1076 mf2 = mfmatches(self['.'])
1074 for f in cmp + modified + added:
1077 for f in cmp + modified + added:
1075 mf2[f] = None
1078 mf2[f] = None
1076 mf2.set(f, ctx2.flags(f))
1079 mf2.set(f, ctx2.flags(f))
1077 for f in removed:
1080 for f in removed:
1078 if f in mf2:
1081 if f in mf2:
1079 del mf2[f]
1082 del mf2[f]
1080 else:
1083 else:
1081 # we are comparing two revisions
1084 # we are comparing two revisions
1082 deleted, unknown, ignored = [], [], []
1085 deleted, unknown, ignored = [], [], []
1083 mf2 = mfmatches(ctx2)
1086 mf2 = mfmatches(ctx2)
1084
1087
1085 modified, added, clean = [], [], []
1088 modified, added, clean = [], [], []
1086 for fn in mf2:
1089 for fn in mf2:
1087 if fn in mf1:
1090 if fn in mf1:
1088 if (mf1.flags(fn) != mf2.flags(fn) or
1091 if (mf1.flags(fn) != mf2.flags(fn) or
1089 (mf1[fn] != mf2[fn] and
1092 (mf1[fn] != mf2[fn] and
1090 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1093 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1091 modified.append(fn)
1094 modified.append(fn)
1092 elif listclean:
1095 elif listclean:
1093 clean.append(fn)
1096 clean.append(fn)
1094 del mf1[fn]
1097 del mf1[fn]
1095 else:
1098 else:
1096 added.append(fn)
1099 added.append(fn)
1097 removed = mf1.keys()
1100 removed = mf1.keys()
1098
1101
1099 r = modified, added, removed, deleted, unknown, ignored, clean
1102 r = modified, added, removed, deleted, unknown, ignored, clean
1100 [l.sort() for l in r]
1103 [l.sort() for l in r]
1101 return r
1104 return r
1102
1105
1103 def add(self, list):
1106 def add(self, list):
1104 wlock = self.wlock()
1107 wlock = self.wlock()
1105 try:
1108 try:
1106 rejected = []
1109 rejected = []
1107 for f in list:
1110 for f in list:
1108 p = self.wjoin(f)
1111 p = self.wjoin(f)
1109 try:
1112 try:
1110 st = os.lstat(p)
1113 st = os.lstat(p)
1111 except:
1114 except:
1112 self.ui.warn(_("%s does not exist!\n") % f)
1115 self.ui.warn(_("%s does not exist!\n") % f)
1113 rejected.append(f)
1116 rejected.append(f)
1114 continue
1117 continue
1115 if st.st_size > 10000000:
1118 if st.st_size > 10000000:
1116 self.ui.warn(_("%s: files over 10MB may cause memory and"
1119 self.ui.warn(_("%s: files over 10MB may cause memory and"
1117 " performance problems\n"
1120 " performance problems\n"
1118 "(use 'hg revert %s' to unadd the file)\n")
1121 "(use 'hg revert %s' to unadd the file)\n")
1119 % (f, f))
1122 % (f, f))
1120 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1123 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1121 self.ui.warn(_("%s not added: only files and symlinks "
1124 self.ui.warn(_("%s not added: only files and symlinks "
1122 "supported currently\n") % f)
1125 "supported currently\n") % f)
1123 rejected.append(p)
1126 rejected.append(p)
1124 elif self.dirstate[f] in 'amn':
1127 elif self.dirstate[f] in 'amn':
1125 self.ui.warn(_("%s already tracked!\n") % f)
1128 self.ui.warn(_("%s already tracked!\n") % f)
1126 elif self.dirstate[f] == 'r':
1129 elif self.dirstate[f] == 'r':
1127 self.dirstate.normallookup(f)
1130 self.dirstate.normallookup(f)
1128 else:
1131 else:
1129 self.dirstate.add(f)
1132 self.dirstate.add(f)
1130 return rejected
1133 return rejected
1131 finally:
1134 finally:
1132 del wlock
1135 del wlock
1133
1136
1134 def forget(self, list):
1137 def forget(self, list):
1135 wlock = self.wlock()
1138 wlock = self.wlock()
1136 try:
1139 try:
1137 for f in list:
1140 for f in list:
1138 if self.dirstate[f] != 'a':
1141 if self.dirstate[f] != 'a':
1139 self.ui.warn(_("%s not added!\n") % f)
1142 self.ui.warn(_("%s not added!\n") % f)
1140 else:
1143 else:
1141 self.dirstate.forget(f)
1144 self.dirstate.forget(f)
1142 finally:
1145 finally:
1143 del wlock
1146 del wlock
1144
1147
1145 def remove(self, list, unlink=False):
1148 def remove(self, list, unlink=False):
1146 wlock = None
1149 wlock = None
1147 try:
1150 try:
1148 if unlink:
1151 if unlink:
1149 for f in list:
1152 for f in list:
1150 try:
1153 try:
1151 util.unlink(self.wjoin(f))
1154 util.unlink(self.wjoin(f))
1152 except OSError, inst:
1155 except OSError, inst:
1153 if inst.errno != errno.ENOENT:
1156 if inst.errno != errno.ENOENT:
1154 raise
1157 raise
1155 wlock = self.wlock()
1158 wlock = self.wlock()
1156 for f in list:
1159 for f in list:
1157 if unlink and os.path.exists(self.wjoin(f)):
1160 if unlink and os.path.exists(self.wjoin(f)):
1158 self.ui.warn(_("%s still exists!\n") % f)
1161 self.ui.warn(_("%s still exists!\n") % f)
1159 elif self.dirstate[f] == 'a':
1162 elif self.dirstate[f] == 'a':
1160 self.dirstate.forget(f)
1163 self.dirstate.forget(f)
1161 elif f not in self.dirstate:
1164 elif f not in self.dirstate:
1162 self.ui.warn(_("%s not tracked!\n") % f)
1165 self.ui.warn(_("%s not tracked!\n") % f)
1163 else:
1166 else:
1164 self.dirstate.remove(f)
1167 self.dirstate.remove(f)
1165 finally:
1168 finally:
1166 del wlock
1169 del wlock
1167
1170
1168 def undelete(self, list):
1171 def undelete(self, list):
1169 wlock = None
1172 wlock = None
1170 try:
1173 try:
1171 manifests = [self.manifest.read(self.changelog.read(p)[0])
1174 manifests = [self.manifest.read(self.changelog.read(p)[0])
1172 for p in self.dirstate.parents() if p != nullid]
1175 for p in self.dirstate.parents() if p != nullid]
1173 wlock = self.wlock()
1176 wlock = self.wlock()
1174 for f in list:
1177 for f in list:
1175 if self.dirstate[f] != 'r':
1178 if self.dirstate[f] != 'r':
1176 self.ui.warn(_("%s not removed!\n") % f)
1179 self.ui.warn(_("%s not removed!\n") % f)
1177 else:
1180 else:
1178 m = f in manifests[0] and manifests[0] or manifests[1]
1181 m = f in manifests[0] and manifests[0] or manifests[1]
1179 t = self.file(f).read(m[f])
1182 t = self.file(f).read(m[f])
1180 self.wwrite(f, t, m.flags(f))
1183 self.wwrite(f, t, m.flags(f))
1181 self.dirstate.normal(f)
1184 self.dirstate.normal(f)
1182 finally:
1185 finally:
1183 del wlock
1186 del wlock
1184
1187
1185 def copy(self, source, dest):
1188 def copy(self, source, dest):
1186 wlock = None
1189 wlock = None
1187 try:
1190 try:
1188 p = self.wjoin(dest)
1191 p = self.wjoin(dest)
1189 if not (os.path.exists(p) or os.path.islink(p)):
1192 if not (os.path.exists(p) or os.path.islink(p)):
1190 self.ui.warn(_("%s does not exist!\n") % dest)
1193 self.ui.warn(_("%s does not exist!\n") % dest)
1191 elif not (os.path.isfile(p) or os.path.islink(p)):
1194 elif not (os.path.isfile(p) or os.path.islink(p)):
1192 self.ui.warn(_("copy failed: %s is not a file or a "
1195 self.ui.warn(_("copy failed: %s is not a file or a "
1193 "symbolic link\n") % dest)
1196 "symbolic link\n") % dest)
1194 else:
1197 else:
1195 wlock = self.wlock()
1198 wlock = self.wlock()
1196 if self.dirstate[dest] in '?r':
1199 if self.dirstate[dest] in '?r':
1197 self.dirstate.add(dest)
1200 self.dirstate.add(dest)
1198 self.dirstate.copy(source, dest)
1201 self.dirstate.copy(source, dest)
1199 finally:
1202 finally:
1200 del wlock
1203 del wlock
1201
1204
1202 def heads(self, start=None, closed=True):
1205 def heads(self, start=None, closed=True):
1203 heads = self.changelog.heads(start)
1206 heads = self.changelog.heads(start)
1204 def display(head):
1207 def display(head):
1205 if closed:
1208 if closed:
1206 return True
1209 return True
1207 extras = self.changelog.read(head)[5]
1210 extras = self.changelog.read(head)[5]
1208 return ('close' not in extras)
1211 return ('close' not in extras)
1209 # sort the output in rev descending order
1212 # sort the output in rev descending order
1210 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1213 heads = [(-self.changelog.rev(h), h) for h in heads if display(h)]
1211 return [n for (r, n) in util.sort(heads)]
1214 return [n for (r, n) in util.sort(heads)]
1212
1215
1213 def branchheads(self, branch=None, start=None, closed=True):
1216 def branchheads(self, branch=None, start=None, closed=True):
1214 if branch is None:
1217 if branch is None:
1215 branch = self[None].branch()
1218 branch = self[None].branch()
1216 branches = self._branchheads()
1219 branches = self._branchheads()
1217 if branch not in branches:
1220 if branch not in branches:
1218 return []
1221 return []
1219 bheads = branches[branch]
1222 bheads = branches[branch]
1220 # the cache returns heads ordered lowest to highest
1223 # the cache returns heads ordered lowest to highest
1221 bheads.reverse()
1224 bheads.reverse()
1222 if start is not None:
1225 if start is not None:
1223 # filter out the heads that cannot be reached from startrev
1226 # filter out the heads that cannot be reached from startrev
1224 bheads = self.changelog.nodesbetween([start], bheads)[2]
1227 bheads = self.changelog.nodesbetween([start], bheads)[2]
1225 if not closed:
1228 if not closed:
1226 bheads = [h for h in bheads if
1229 bheads = [h for h in bheads if
1227 ('close' not in self.changelog.read(h)[5])]
1230 ('close' not in self.changelog.read(h)[5])]
1228 return bheads
1231 return bheads
1229
1232
1230 def branches(self, nodes):
1233 def branches(self, nodes):
1231 if not nodes:
1234 if not nodes:
1232 nodes = [self.changelog.tip()]
1235 nodes = [self.changelog.tip()]
1233 b = []
1236 b = []
1234 for n in nodes:
1237 for n in nodes:
1235 t = n
1238 t = n
1236 while 1:
1239 while 1:
1237 p = self.changelog.parents(n)
1240 p = self.changelog.parents(n)
1238 if p[1] != nullid or p[0] == nullid:
1241 if p[1] != nullid or p[0] == nullid:
1239 b.append((t, n, p[0], p[1]))
1242 b.append((t, n, p[0], p[1]))
1240 break
1243 break
1241 n = p[0]
1244 n = p[0]
1242 return b
1245 return b
1243
1246
1244 def between(self, pairs):
1247 def between(self, pairs):
1245 r = []
1248 r = []
1246
1249
1247 for top, bottom in pairs:
1250 for top, bottom in pairs:
1248 n, l, i = top, [], 0
1251 n, l, i = top, [], 0
1249 f = 1
1252 f = 1
1250
1253
1251 while n != bottom and n != nullid:
1254 while n != bottom and n != nullid:
1252 p = self.changelog.parents(n)[0]
1255 p = self.changelog.parents(n)[0]
1253 if i == f:
1256 if i == f:
1254 l.append(n)
1257 l.append(n)
1255 f = f * 2
1258 f = f * 2
1256 n = p
1259 n = p
1257 i += 1
1260 i += 1
1258
1261
1259 r.append(l)
1262 r.append(l)
1260
1263
1261 return r
1264 return r
1262
1265
1263 def findincoming(self, remote, base=None, heads=None, force=False):
1266 def findincoming(self, remote, base=None, heads=None, force=False):
1264 """Return list of roots of the subsets of missing nodes from remote
1267 """Return list of roots of the subsets of missing nodes from remote
1265
1268
1266 If base dict is specified, assume that these nodes and their parents
1269 If base dict is specified, assume that these nodes and their parents
1267 exist on the remote side and that no child of a node of base exists
1270 exist on the remote side and that no child of a node of base exists
1268 in both remote and self.
1271 in both remote and self.
1269 Furthermore base will be updated to include the nodes that exists
1272 Furthermore base will be updated to include the nodes that exists
1270 in self and remote but no children exists in self and remote.
1273 in self and remote but no children exists in self and remote.
1271 If a list of heads is specified, return only nodes which are heads
1274 If a list of heads is specified, return only nodes which are heads
1272 or ancestors of these heads.
1275 or ancestors of these heads.
1273
1276
1274 All the ancestors of base are in self and in remote.
1277 All the ancestors of base are in self and in remote.
1275 All the descendants of the list returned are missing in self.
1278 All the descendants of the list returned are missing in self.
1276 (and so we know that the rest of the nodes are missing in remote, see
1279 (and so we know that the rest of the nodes are missing in remote, see
1277 outgoing)
1280 outgoing)
1278 """
1281 """
1279 return self.findcommonincoming(remote, base, heads, force)[1]
1282 return self.findcommonincoming(remote, base, heads, force)[1]
1280
1283
1281 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1284 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1282 """Return a tuple (common, missing roots, heads) used to identify
1285 """Return a tuple (common, missing roots, heads) used to identify
1283 missing nodes from remote.
1286 missing nodes from remote.
1284
1287
1285 If base dict is specified, assume that these nodes and their parents
1288 If base dict is specified, assume that these nodes and their parents
1286 exist on the remote side and that no child of a node of base exists
1289 exist on the remote side and that no child of a node of base exists
1287 in both remote and self.
1290 in both remote and self.
1288 Furthermore base will be updated to include the nodes that exists
1291 Furthermore base will be updated to include the nodes that exists
1289 in self and remote but no children exists in self and remote.
1292 in self and remote but no children exists in self and remote.
1290 If a list of heads is specified, return only nodes which are heads
1293 If a list of heads is specified, return only nodes which are heads
1291 or ancestors of these heads.
1294 or ancestors of these heads.
1292
1295
1293 All the ancestors of base are in self and in remote.
1296 All the ancestors of base are in self and in remote.
1294 """
1297 """
1295 m = self.changelog.nodemap
1298 m = self.changelog.nodemap
1296 search = []
1299 search = []
1297 fetch = {}
1300 fetch = {}
1298 seen = {}
1301 seen = {}
1299 seenbranch = {}
1302 seenbranch = {}
1300 if base == None:
1303 if base == None:
1301 base = {}
1304 base = {}
1302
1305
1303 if not heads:
1306 if not heads:
1304 heads = remote.heads()
1307 heads = remote.heads()
1305
1308
1306 if self.changelog.tip() == nullid:
1309 if self.changelog.tip() == nullid:
1307 base[nullid] = 1
1310 base[nullid] = 1
1308 if heads != [nullid]:
1311 if heads != [nullid]:
1309 return [nullid], [nullid], list(heads)
1312 return [nullid], [nullid], list(heads)
1310 return [nullid], [], []
1313 return [nullid], [], []
1311
1314
1312 # assume we're closer to the tip than the root
1315 # assume we're closer to the tip than the root
1313 # and start by examining the heads
1316 # and start by examining the heads
1314 self.ui.status(_("searching for changes\n"))
1317 self.ui.status(_("searching for changes\n"))
1315
1318
1316 unknown = []
1319 unknown = []
1317 for h in heads:
1320 for h in heads:
1318 if h not in m:
1321 if h not in m:
1319 unknown.append(h)
1322 unknown.append(h)
1320 else:
1323 else:
1321 base[h] = 1
1324 base[h] = 1
1322
1325
1323 heads = unknown
1326 heads = unknown
1324 if not unknown:
1327 if not unknown:
1325 return base.keys(), [], []
1328 return base.keys(), [], []
1326
1329
1327 req = dict.fromkeys(unknown)
1330 req = dict.fromkeys(unknown)
1328 reqcnt = 0
1331 reqcnt = 0
1329
1332
1330 # search through remote branches
1333 # search through remote branches
1331 # a 'branch' here is a linear segment of history, with four parts:
1334 # a 'branch' here is a linear segment of history, with four parts:
1332 # head, root, first parent, second parent
1335 # head, root, first parent, second parent
1333 # (a branch always has two parents (or none) by definition)
1336 # (a branch always has two parents (or none) by definition)
1334 unknown = remote.branches(unknown)
1337 unknown = remote.branches(unknown)
1335 while unknown:
1338 while unknown:
1336 r = []
1339 r = []
1337 while unknown:
1340 while unknown:
1338 n = unknown.pop(0)
1341 n = unknown.pop(0)
1339 if n[0] in seen:
1342 if n[0] in seen:
1340 continue
1343 continue
1341
1344
1342 self.ui.debug(_("examining %s:%s\n")
1345 self.ui.debug(_("examining %s:%s\n")
1343 % (short(n[0]), short(n[1])))
1346 % (short(n[0]), short(n[1])))
1344 if n[0] == nullid: # found the end of the branch
1347 if n[0] == nullid: # found the end of the branch
1345 pass
1348 pass
1346 elif n in seenbranch:
1349 elif n in seenbranch:
1347 self.ui.debug(_("branch already found\n"))
1350 self.ui.debug(_("branch already found\n"))
1348 continue
1351 continue
1349 elif n[1] and n[1] in m: # do we know the base?
1352 elif n[1] and n[1] in m: # do we know the base?
1350 self.ui.debug(_("found incomplete branch %s:%s\n")
1353 self.ui.debug(_("found incomplete branch %s:%s\n")
1351 % (short(n[0]), short(n[1])))
1354 % (short(n[0]), short(n[1])))
1352 search.append(n[0:2]) # schedule branch range for scanning
1355 search.append(n[0:2]) # schedule branch range for scanning
1353 seenbranch[n] = 1
1356 seenbranch[n] = 1
1354 else:
1357 else:
1355 if n[1] not in seen and n[1] not in fetch:
1358 if n[1] not in seen and n[1] not in fetch:
1356 if n[2] in m and n[3] in m:
1359 if n[2] in m and n[3] in m:
1357 self.ui.debug(_("found new changeset %s\n") %
1360 self.ui.debug(_("found new changeset %s\n") %
1358 short(n[1]))
1361 short(n[1]))
1359 fetch[n[1]] = 1 # earliest unknown
1362 fetch[n[1]] = 1 # earliest unknown
1360 for p in n[2:4]:
1363 for p in n[2:4]:
1361 if p in m:
1364 if p in m:
1362 base[p] = 1 # latest known
1365 base[p] = 1 # latest known
1363
1366
1364 for p in n[2:4]:
1367 for p in n[2:4]:
1365 if p not in req and p not in m:
1368 if p not in req and p not in m:
1366 r.append(p)
1369 r.append(p)
1367 req[p] = 1
1370 req[p] = 1
1368 seen[n[0]] = 1
1371 seen[n[0]] = 1
1369
1372
1370 if r:
1373 if r:
1371 reqcnt += 1
1374 reqcnt += 1
1372 self.ui.debug(_("request %d: %s\n") %
1375 self.ui.debug(_("request %d: %s\n") %
1373 (reqcnt, " ".join(map(short, r))))
1376 (reqcnt, " ".join(map(short, r))))
1374 for p in xrange(0, len(r), 10):
1377 for p in xrange(0, len(r), 10):
1375 for b in remote.branches(r[p:p+10]):
1378 for b in remote.branches(r[p:p+10]):
1376 self.ui.debug(_("received %s:%s\n") %
1379 self.ui.debug(_("received %s:%s\n") %
1377 (short(b[0]), short(b[1])))
1380 (short(b[0]), short(b[1])))
1378 unknown.append(b)
1381 unknown.append(b)
1379
1382
1380 # do binary search on the branches we found
1383 # do binary search on the branches we found
1381 while search:
1384 while search:
1382 newsearch = []
1385 newsearch = []
1383 reqcnt += 1
1386 reqcnt += 1
1384 for n, l in zip(search, remote.between(search)):
1387 for n, l in zip(search, remote.between(search)):
1385 l.append(n[1])
1388 l.append(n[1])
1386 p = n[0]
1389 p = n[0]
1387 f = 1
1390 f = 1
1388 for i in l:
1391 for i in l:
1389 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1392 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1390 if i in m:
1393 if i in m:
1391 if f <= 2:
1394 if f <= 2:
1392 self.ui.debug(_("found new branch changeset %s\n") %
1395 self.ui.debug(_("found new branch changeset %s\n") %
1393 short(p))
1396 short(p))
1394 fetch[p] = 1
1397 fetch[p] = 1
1395 base[i] = 1
1398 base[i] = 1
1396 else:
1399 else:
1397 self.ui.debug(_("narrowed branch search to %s:%s\n")
1400 self.ui.debug(_("narrowed branch search to %s:%s\n")
1398 % (short(p), short(i)))
1401 % (short(p), short(i)))
1399 newsearch.append((p, i))
1402 newsearch.append((p, i))
1400 break
1403 break
1401 p, f = i, f * 2
1404 p, f = i, f * 2
1402 search = newsearch
1405 search = newsearch
1403
1406
1404 # sanity check our fetch list
1407 # sanity check our fetch list
1405 for f in fetch.keys():
1408 for f in fetch.keys():
1406 if f in m:
1409 if f in m:
1407 raise error.RepoError(_("already have changeset ")
1410 raise error.RepoError(_("already have changeset ")
1408 + short(f[:4]))
1411 + short(f[:4]))
1409
1412
1410 if base.keys() == [nullid]:
1413 if base.keys() == [nullid]:
1411 if force:
1414 if force:
1412 self.ui.warn(_("warning: repository is unrelated\n"))
1415 self.ui.warn(_("warning: repository is unrelated\n"))
1413 else:
1416 else:
1414 raise util.Abort(_("repository is unrelated"))
1417 raise util.Abort(_("repository is unrelated"))
1415
1418
1416 self.ui.debug(_("found new changesets starting at ") +
1419 self.ui.debug(_("found new changesets starting at ") +
1417 " ".join([short(f) for f in fetch]) + "\n")
1420 " ".join([short(f) for f in fetch]) + "\n")
1418
1421
1419 self.ui.debug(_("%d total queries\n") % reqcnt)
1422 self.ui.debug(_("%d total queries\n") % reqcnt)
1420
1423
1421 return base.keys(), fetch.keys(), heads
1424 return base.keys(), fetch.keys(), heads
1422
1425
1423 def findoutgoing(self, remote, base=None, heads=None, force=False):
1426 def findoutgoing(self, remote, base=None, heads=None, force=False):
1424 """Return list of nodes that are roots of subsets not in remote
1427 """Return list of nodes that are roots of subsets not in remote
1425
1428
1426 If base dict is specified, assume that these nodes and their parents
1429 If base dict is specified, assume that these nodes and their parents
1427 exist on the remote side.
1430 exist on the remote side.
1428 If a list of heads is specified, return only nodes which are heads
1431 If a list of heads is specified, return only nodes which are heads
1429 or ancestors of these heads, and return a second element which
1432 or ancestors of these heads, and return a second element which
1430 contains all remote heads which get new children.
1433 contains all remote heads which get new children.
1431 """
1434 """
1432 if base == None:
1435 if base == None:
1433 base = {}
1436 base = {}
1434 self.findincoming(remote, base, heads, force=force)
1437 self.findincoming(remote, base, heads, force=force)
1435
1438
1436 self.ui.debug(_("common changesets up to ")
1439 self.ui.debug(_("common changesets up to ")
1437 + " ".join(map(short, base.keys())) + "\n")
1440 + " ".join(map(short, base.keys())) + "\n")
1438
1441
1439 remain = dict.fromkeys(self.changelog.nodemap)
1442 remain = dict.fromkeys(self.changelog.nodemap)
1440
1443
1441 # prune everything remote has from the tree
1444 # prune everything remote has from the tree
1442 del remain[nullid]
1445 del remain[nullid]
1443 remove = base.keys()
1446 remove = base.keys()
1444 while remove:
1447 while remove:
1445 n = remove.pop(0)
1448 n = remove.pop(0)
1446 if n in remain:
1449 if n in remain:
1447 del remain[n]
1450 del remain[n]
1448 for p in self.changelog.parents(n):
1451 for p in self.changelog.parents(n):
1449 remove.append(p)
1452 remove.append(p)
1450
1453
1451 # find every node whose parents have been pruned
1454 # find every node whose parents have been pruned
1452 subset = []
1455 subset = []
1453 # find every remote head that will get new children
1456 # find every remote head that will get new children
1454 updated_heads = {}
1457 updated_heads = {}
1455 for n in remain:
1458 for n in remain:
1456 p1, p2 = self.changelog.parents(n)
1459 p1, p2 = self.changelog.parents(n)
1457 if p1 not in remain and p2 not in remain:
1460 if p1 not in remain and p2 not in remain:
1458 subset.append(n)
1461 subset.append(n)
1459 if heads:
1462 if heads:
1460 if p1 in heads:
1463 if p1 in heads:
1461 updated_heads[p1] = True
1464 updated_heads[p1] = True
1462 if p2 in heads:
1465 if p2 in heads:
1463 updated_heads[p2] = True
1466 updated_heads[p2] = True
1464
1467
1465 # this is the set of all roots we have to push
1468 # this is the set of all roots we have to push
1466 if heads:
1469 if heads:
1467 return subset, updated_heads.keys()
1470 return subset, updated_heads.keys()
1468 else:
1471 else:
1469 return subset
1472 return subset
1470
1473
1471 def pull(self, remote, heads=None, force=False):
1474 def pull(self, remote, heads=None, force=False):
1472 lock = self.lock()
1475 lock = self.lock()
1473 try:
1476 try:
1474 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1477 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1475 force=force)
1478 force=force)
1476 if fetch == [nullid]:
1479 if fetch == [nullid]:
1477 self.ui.status(_("requesting all changes\n"))
1480 self.ui.status(_("requesting all changes\n"))
1478
1481
1479 if not fetch:
1482 if not fetch:
1480 self.ui.status(_("no changes found\n"))
1483 self.ui.status(_("no changes found\n"))
1481 return 0
1484 return 0
1482
1485
1483 if heads is None and remote.capable('changegroupsubset'):
1486 if heads is None and remote.capable('changegroupsubset'):
1484 heads = rheads
1487 heads = rheads
1485
1488
1486 if heads is None:
1489 if heads is None:
1487 cg = remote.changegroup(fetch, 'pull')
1490 cg = remote.changegroup(fetch, 'pull')
1488 else:
1491 else:
1489 if not remote.capable('changegroupsubset'):
1492 if not remote.capable('changegroupsubset'):
1490 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1493 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1491 cg = remote.changegroupsubset(fetch, heads, 'pull')
1494 cg = remote.changegroupsubset(fetch, heads, 'pull')
1492 return self.addchangegroup(cg, 'pull', remote.url())
1495 return self.addchangegroup(cg, 'pull', remote.url())
1493 finally:
1496 finally:
1494 del lock
1497 del lock
1495
1498
1496 def push(self, remote, force=False, revs=None):
1499 def push(self, remote, force=False, revs=None):
1497 # there are two ways to push to remote repo:
1500 # there are two ways to push to remote repo:
1498 #
1501 #
1499 # addchangegroup assumes local user can lock remote
1502 # addchangegroup assumes local user can lock remote
1500 # repo (local filesystem, old ssh servers).
1503 # repo (local filesystem, old ssh servers).
1501 #
1504 #
1502 # unbundle assumes local user cannot lock remote repo (new ssh
1505 # unbundle assumes local user cannot lock remote repo (new ssh
1503 # servers, http servers).
1506 # servers, http servers).
1504
1507
1505 if remote.capable('unbundle'):
1508 if remote.capable('unbundle'):
1506 return self.push_unbundle(remote, force, revs)
1509 return self.push_unbundle(remote, force, revs)
1507 return self.push_addchangegroup(remote, force, revs)
1510 return self.push_addchangegroup(remote, force, revs)
1508
1511
1509 def prepush(self, remote, force, revs):
1512 def prepush(self, remote, force, revs):
1510 common = {}
1513 common = {}
1511 remote_heads = remote.heads()
1514 remote_heads = remote.heads()
1512 inc = self.findincoming(remote, common, remote_heads, force=force)
1515 inc = self.findincoming(remote, common, remote_heads, force=force)
1513
1516
1514 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1517 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1515 if revs is not None:
1518 if revs is not None:
1516 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1519 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1517 else:
1520 else:
1518 bases, heads = update, self.changelog.heads()
1521 bases, heads = update, self.changelog.heads()
1519
1522
1520 if not bases:
1523 if not bases:
1521 self.ui.status(_("no changes found\n"))
1524 self.ui.status(_("no changes found\n"))
1522 return None, 1
1525 return None, 1
1523 elif not force:
1526 elif not force:
1524 # check if we're creating new remote heads
1527 # check if we're creating new remote heads
1525 # to be a remote head after push, node must be either
1528 # to be a remote head after push, node must be either
1526 # - unknown locally
1529 # - unknown locally
1527 # - a local outgoing head descended from update
1530 # - a local outgoing head descended from update
1528 # - a remote head that's known locally and not
1531 # - a remote head that's known locally and not
1529 # ancestral to an outgoing head
1532 # ancestral to an outgoing head
1530
1533
1531 warn = 0
1534 warn = 0
1532
1535
1533 if remote_heads == [nullid]:
1536 if remote_heads == [nullid]:
1534 warn = 0
1537 warn = 0
1535 elif not revs and len(heads) > len(remote_heads):
1538 elif not revs and len(heads) > len(remote_heads):
1536 warn = 1
1539 warn = 1
1537 else:
1540 else:
1538 newheads = list(heads)
1541 newheads = list(heads)
1539 for r in remote_heads:
1542 for r in remote_heads:
1540 if r in self.changelog.nodemap:
1543 if r in self.changelog.nodemap:
1541 desc = self.changelog.heads(r, heads)
1544 desc = self.changelog.heads(r, heads)
1542 l = [h for h in heads if h in desc]
1545 l = [h for h in heads if h in desc]
1543 if not l:
1546 if not l:
1544 newheads.append(r)
1547 newheads.append(r)
1545 else:
1548 else:
1546 newheads.append(r)
1549 newheads.append(r)
1547 if len(newheads) > len(remote_heads):
1550 if len(newheads) > len(remote_heads):
1548 warn = 1
1551 warn = 1
1549
1552
1550 if warn:
1553 if warn:
1551 self.ui.warn(_("abort: push creates new remote heads!\n"))
1554 self.ui.warn(_("abort: push creates new remote heads!\n"))
1552 self.ui.status(_("(did you forget to merge?"
1555 self.ui.status(_("(did you forget to merge?"
1553 " use push -f to force)\n"))
1556 " use push -f to force)\n"))
1554 return None, 0
1557 return None, 0
1555 elif inc:
1558 elif inc:
1556 self.ui.warn(_("note: unsynced remote changes!\n"))
1559 self.ui.warn(_("note: unsynced remote changes!\n"))
1557
1560
1558
1561
1559 if revs is None:
1562 if revs is None:
1560 # use the fast path, no race possible on push
1563 # use the fast path, no race possible on push
1561 cg = self._changegroup(common.keys(), 'push')
1564 cg = self._changegroup(common.keys(), 'push')
1562 else:
1565 else:
1563 cg = self.changegroupsubset(update, revs, 'push')
1566 cg = self.changegroupsubset(update, revs, 'push')
1564 return cg, remote_heads
1567 return cg, remote_heads
1565
1568
1566 def push_addchangegroup(self, remote, force, revs):
1569 def push_addchangegroup(self, remote, force, revs):
1567 lock = remote.lock()
1570 lock = remote.lock()
1568 try:
1571 try:
1569 ret = self.prepush(remote, force, revs)
1572 ret = self.prepush(remote, force, revs)
1570 if ret[0] is not None:
1573 if ret[0] is not None:
1571 cg, remote_heads = ret
1574 cg, remote_heads = ret
1572 return remote.addchangegroup(cg, 'push', self.url())
1575 return remote.addchangegroup(cg, 'push', self.url())
1573 return ret[1]
1576 return ret[1]
1574 finally:
1577 finally:
1575 del lock
1578 del lock
1576
1579
1577 def push_unbundle(self, remote, force, revs):
1580 def push_unbundle(self, remote, force, revs):
1578 # local repo finds heads on server, finds out what revs it
1581 # local repo finds heads on server, finds out what revs it
1579 # must push. once revs transferred, if server finds it has
1582 # must push. once revs transferred, if server finds it has
1580 # different heads (someone else won commit/push race), server
1583 # different heads (someone else won commit/push race), server
1581 # aborts.
1584 # aborts.
1582
1585
1583 ret = self.prepush(remote, force, revs)
1586 ret = self.prepush(remote, force, revs)
1584 if ret[0] is not None:
1587 if ret[0] is not None:
1585 cg, remote_heads = ret
1588 cg, remote_heads = ret
1586 if force: remote_heads = ['force']
1589 if force: remote_heads = ['force']
1587 return remote.unbundle(cg, remote_heads, 'push')
1590 return remote.unbundle(cg, remote_heads, 'push')
1588 return ret[1]
1591 return ret[1]
1589
1592
1590 def changegroupinfo(self, nodes, source):
1593 def changegroupinfo(self, nodes, source):
1591 if self.ui.verbose or source == 'bundle':
1594 if self.ui.verbose or source == 'bundle':
1592 self.ui.status(_("%d changesets found\n") % len(nodes))
1595 self.ui.status(_("%d changesets found\n") % len(nodes))
1593 if self.ui.debugflag:
1596 if self.ui.debugflag:
1594 self.ui.debug(_("list of changesets:\n"))
1597 self.ui.debug(_("list of changesets:\n"))
1595 for node in nodes:
1598 for node in nodes:
1596 self.ui.debug("%s\n" % hex(node))
1599 self.ui.debug("%s\n" % hex(node))
1597
1600
1598 def changegroupsubset(self, bases, heads, source, extranodes=None):
1601 def changegroupsubset(self, bases, heads, source, extranodes=None):
1599 """This function generates a changegroup consisting of all the nodes
1602 """This function generates a changegroup consisting of all the nodes
1600 that are descendents of any of the bases, and ancestors of any of
1603 that are descendents of any of the bases, and ancestors of any of
1601 the heads.
1604 the heads.
1602
1605
1603 It is fairly complex as determining which filenodes and which
1606 It is fairly complex as determining which filenodes and which
1604 manifest nodes need to be included for the changeset to be complete
1607 manifest nodes need to be included for the changeset to be complete
1605 is non-trivial.
1608 is non-trivial.
1606
1609
1607 Another wrinkle is doing the reverse, figuring out which changeset in
1610 Another wrinkle is doing the reverse, figuring out which changeset in
1608 the changegroup a particular filenode or manifestnode belongs to.
1611 the changegroup a particular filenode or manifestnode belongs to.
1609
1612
1610 The caller can specify some nodes that must be included in the
1613 The caller can specify some nodes that must be included in the
1611 changegroup using the extranodes argument. It should be a dict
1614 changegroup using the extranodes argument. It should be a dict
1612 where the keys are the filenames (or 1 for the manifest), and the
1615 where the keys are the filenames (or 1 for the manifest), and the
1613 values are lists of (node, linknode) tuples, where node is a wanted
1616 values are lists of (node, linknode) tuples, where node is a wanted
1614 node and linknode is the changelog node that should be transmitted as
1617 node and linknode is the changelog node that should be transmitted as
1615 the linkrev.
1618 the linkrev.
1616 """
1619 """
1617
1620
1618 if extranodes is None:
1621 if extranodes is None:
1619 # can we go through the fast path ?
1622 # can we go through the fast path ?
1620 heads.sort()
1623 heads.sort()
1621 allheads = self.heads()
1624 allheads = self.heads()
1622 allheads.sort()
1625 allheads.sort()
1623 if heads == allheads:
1626 if heads == allheads:
1624 common = []
1627 common = []
1625 # parents of bases are known from both sides
1628 # parents of bases are known from both sides
1626 for n in bases:
1629 for n in bases:
1627 for p in self.changelog.parents(n):
1630 for p in self.changelog.parents(n):
1628 if p != nullid:
1631 if p != nullid:
1629 common.append(p)
1632 common.append(p)
1630 return self._changegroup(common, source)
1633 return self._changegroup(common, source)
1631
1634
1632 self.hook('preoutgoing', throw=True, source=source)
1635 self.hook('preoutgoing', throw=True, source=source)
1633
1636
1634 # Set up some initial variables
1637 # Set up some initial variables
1635 # Make it easy to refer to self.changelog
1638 # Make it easy to refer to self.changelog
1636 cl = self.changelog
1639 cl = self.changelog
1637 # msng is short for missing - compute the list of changesets in this
1640 # msng is short for missing - compute the list of changesets in this
1638 # changegroup.
1641 # changegroup.
1639 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1642 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1640 self.changegroupinfo(msng_cl_lst, source)
1643 self.changegroupinfo(msng_cl_lst, source)
1641 # Some bases may turn out to be superfluous, and some heads may be
1644 # Some bases may turn out to be superfluous, and some heads may be
1642 # too. nodesbetween will return the minimal set of bases and heads
1645 # too. nodesbetween will return the minimal set of bases and heads
1643 # necessary to re-create the changegroup.
1646 # necessary to re-create the changegroup.
1644
1647
1645 # Known heads are the list of heads that it is assumed the recipient
1648 # Known heads are the list of heads that it is assumed the recipient
1646 # of this changegroup will know about.
1649 # of this changegroup will know about.
1647 knownheads = {}
1650 knownheads = {}
1648 # We assume that all parents of bases are known heads.
1651 # We assume that all parents of bases are known heads.
1649 for n in bases:
1652 for n in bases:
1650 for p in cl.parents(n):
1653 for p in cl.parents(n):
1651 if p != nullid:
1654 if p != nullid:
1652 knownheads[p] = 1
1655 knownheads[p] = 1
1653 knownheads = knownheads.keys()
1656 knownheads = knownheads.keys()
1654 if knownheads:
1657 if knownheads:
1655 # Now that we know what heads are known, we can compute which
1658 # Now that we know what heads are known, we can compute which
1656 # changesets are known. The recipient must know about all
1659 # changesets are known. The recipient must know about all
1657 # changesets required to reach the known heads from the null
1660 # changesets required to reach the known heads from the null
1658 # changeset.
1661 # changeset.
1659 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1662 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1660 junk = None
1663 junk = None
1661 # Transform the list into an ersatz set.
1664 # Transform the list into an ersatz set.
1662 has_cl_set = dict.fromkeys(has_cl_set)
1665 has_cl_set = dict.fromkeys(has_cl_set)
1663 else:
1666 else:
1664 # If there were no known heads, the recipient cannot be assumed to
1667 # If there were no known heads, the recipient cannot be assumed to
1665 # know about any changesets.
1668 # know about any changesets.
1666 has_cl_set = {}
1669 has_cl_set = {}
1667
1670
1668 # Make it easy to refer to self.manifest
1671 # Make it easy to refer to self.manifest
1669 mnfst = self.manifest
1672 mnfst = self.manifest
1670 # We don't know which manifests are missing yet
1673 # We don't know which manifests are missing yet
1671 msng_mnfst_set = {}
1674 msng_mnfst_set = {}
1672 # Nor do we know which filenodes are missing.
1675 # Nor do we know which filenodes are missing.
1673 msng_filenode_set = {}
1676 msng_filenode_set = {}
1674
1677
1675 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1678 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1676 junk = None
1679 junk = None
1677
1680
1678 # A changeset always belongs to itself, so the changenode lookup
1681 # A changeset always belongs to itself, so the changenode lookup
1679 # function for a changenode is identity.
1682 # function for a changenode is identity.
1680 def identity(x):
1683 def identity(x):
1681 return x
1684 return x
1682
1685
1683 # A function generating function. Sets up an environment for the
1686 # A function generating function. Sets up an environment for the
1684 # inner function.
1687 # inner function.
1685 def cmp_by_rev_func(revlog):
1688 def cmp_by_rev_func(revlog):
1686 # Compare two nodes by their revision number in the environment's
1689 # Compare two nodes by their revision number in the environment's
1687 # revision history. Since the revision number both represents the
1690 # revision history. Since the revision number both represents the
1688 # most efficient order to read the nodes in, and represents a
1691 # most efficient order to read the nodes in, and represents a
1689 # topological sorting of the nodes, this function is often useful.
1692 # topological sorting of the nodes, this function is often useful.
1690 def cmp_by_rev(a, b):
1693 def cmp_by_rev(a, b):
1691 return cmp(revlog.rev(a), revlog.rev(b))
1694 return cmp(revlog.rev(a), revlog.rev(b))
1692 return cmp_by_rev
1695 return cmp_by_rev
1693
1696
1694 # If we determine that a particular file or manifest node must be a
1697 # If we determine that a particular file or manifest node must be a
1695 # node that the recipient of the changegroup will already have, we can
1698 # node that the recipient of the changegroup will already have, we can
1696 # also assume the recipient will have all the parents. This function
1699 # also assume the recipient will have all the parents. This function
1697 # prunes them from the set of missing nodes.
1700 # prunes them from the set of missing nodes.
1698 def prune_parents(revlog, hasset, msngset):
1701 def prune_parents(revlog, hasset, msngset):
1699 haslst = hasset.keys()
1702 haslst = hasset.keys()
1700 haslst.sort(cmp_by_rev_func(revlog))
1703 haslst.sort(cmp_by_rev_func(revlog))
1701 for node in haslst:
1704 for node in haslst:
1702 parentlst = [p for p in revlog.parents(node) if p != nullid]
1705 parentlst = [p for p in revlog.parents(node) if p != nullid]
1703 while parentlst:
1706 while parentlst:
1704 n = parentlst.pop()
1707 n = parentlst.pop()
1705 if n not in hasset:
1708 if n not in hasset:
1706 hasset[n] = 1
1709 hasset[n] = 1
1707 p = [p for p in revlog.parents(n) if p != nullid]
1710 p = [p for p in revlog.parents(n) if p != nullid]
1708 parentlst.extend(p)
1711 parentlst.extend(p)
1709 for n in hasset:
1712 for n in hasset:
1710 msngset.pop(n, None)
1713 msngset.pop(n, None)
1711
1714
1712 # This is a function generating function used to set up an environment
1715 # This is a function generating function used to set up an environment
1713 # for the inner function to execute in.
1716 # for the inner function to execute in.
1714 def manifest_and_file_collector(changedfileset):
1717 def manifest_and_file_collector(changedfileset):
1715 # This is an information gathering function that gathers
1718 # This is an information gathering function that gathers
1716 # information from each changeset node that goes out as part of
1719 # information from each changeset node that goes out as part of
1717 # the changegroup. The information gathered is a list of which
1720 # the changegroup. The information gathered is a list of which
1718 # manifest nodes are potentially required (the recipient may
1721 # manifest nodes are potentially required (the recipient may
1719 # already have them) and total list of all files which were
1722 # already have them) and total list of all files which were
1720 # changed in any changeset in the changegroup.
1723 # changed in any changeset in the changegroup.
1721 #
1724 #
1722 # We also remember the first changenode we saw any manifest
1725 # We also remember the first changenode we saw any manifest
1723 # referenced by so we can later determine which changenode 'owns'
1726 # referenced by so we can later determine which changenode 'owns'
1724 # the manifest.
1727 # the manifest.
1725 def collect_manifests_and_files(clnode):
1728 def collect_manifests_and_files(clnode):
1726 c = cl.read(clnode)
1729 c = cl.read(clnode)
1727 for f in c[3]:
1730 for f in c[3]:
1728 # This is to make sure we only have one instance of each
1731 # This is to make sure we only have one instance of each
1729 # filename string for each filename.
1732 # filename string for each filename.
1730 changedfileset.setdefault(f, f)
1733 changedfileset.setdefault(f, f)
1731 msng_mnfst_set.setdefault(c[0], clnode)
1734 msng_mnfst_set.setdefault(c[0], clnode)
1732 return collect_manifests_and_files
1735 return collect_manifests_and_files
1733
1736
1734 # Figure out which manifest nodes (of the ones we think might be part
1737 # Figure out which manifest nodes (of the ones we think might be part
1735 # of the changegroup) the recipient must know about and remove them
1738 # of the changegroup) the recipient must know about and remove them
1736 # from the changegroup.
1739 # from the changegroup.
1737 def prune_manifests():
1740 def prune_manifests():
1738 has_mnfst_set = {}
1741 has_mnfst_set = {}
1739 for n in msng_mnfst_set:
1742 for n in msng_mnfst_set:
1740 # If a 'missing' manifest thinks it belongs to a changenode
1743 # If a 'missing' manifest thinks it belongs to a changenode
1741 # the recipient is assumed to have, obviously the recipient
1744 # the recipient is assumed to have, obviously the recipient
1742 # must have that manifest.
1745 # must have that manifest.
1743 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1746 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1744 if linknode in has_cl_set:
1747 if linknode in has_cl_set:
1745 has_mnfst_set[n] = 1
1748 has_mnfst_set[n] = 1
1746 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1749 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1747
1750
1748 # Use the information collected in collect_manifests_and_files to say
1751 # Use the information collected in collect_manifests_and_files to say
1749 # which changenode any manifestnode belongs to.
1752 # which changenode any manifestnode belongs to.
1750 def lookup_manifest_link(mnfstnode):
1753 def lookup_manifest_link(mnfstnode):
1751 return msng_mnfst_set[mnfstnode]
1754 return msng_mnfst_set[mnfstnode]
1752
1755
1753 # A function generating function that sets up the initial environment
1756 # A function generating function that sets up the initial environment
1754 # the inner function.
1757 # the inner function.
1755 def filenode_collector(changedfiles):
1758 def filenode_collector(changedfiles):
1756 next_rev = [0]
1759 next_rev = [0]
1757 # This gathers information from each manifestnode included in the
1760 # This gathers information from each manifestnode included in the
1758 # changegroup about which filenodes the manifest node references
1761 # changegroup about which filenodes the manifest node references
1759 # so we can include those in the changegroup too.
1762 # so we can include those in the changegroup too.
1760 #
1763 #
1761 # It also remembers which changenode each filenode belongs to. It
1764 # It also remembers which changenode each filenode belongs to. It
1762 # does this by assuming the a filenode belongs to the changenode
1765 # does this by assuming the a filenode belongs to the changenode
1763 # the first manifest that references it belongs to.
1766 # the first manifest that references it belongs to.
1764 def collect_msng_filenodes(mnfstnode):
1767 def collect_msng_filenodes(mnfstnode):
1765 r = mnfst.rev(mnfstnode)
1768 r = mnfst.rev(mnfstnode)
1766 if r == next_rev[0]:
1769 if r == next_rev[0]:
1767 # If the last rev we looked at was the one just previous,
1770 # If the last rev we looked at was the one just previous,
1768 # we only need to see a diff.
1771 # we only need to see a diff.
1769 deltamf = mnfst.readdelta(mnfstnode)
1772 deltamf = mnfst.readdelta(mnfstnode)
1770 # For each line in the delta
1773 # For each line in the delta
1771 for f, fnode in deltamf.iteritems():
1774 for f, fnode in deltamf.iteritems():
1772 f = changedfiles.get(f, None)
1775 f = changedfiles.get(f, None)
1773 # And if the file is in the list of files we care
1776 # And if the file is in the list of files we care
1774 # about.
1777 # about.
1775 if f is not None:
1778 if f is not None:
1776 # Get the changenode this manifest belongs to
1779 # Get the changenode this manifest belongs to
1777 clnode = msng_mnfst_set[mnfstnode]
1780 clnode = msng_mnfst_set[mnfstnode]
1778 # Create the set of filenodes for the file if
1781 # Create the set of filenodes for the file if
1779 # there isn't one already.
1782 # there isn't one already.
1780 ndset = msng_filenode_set.setdefault(f, {})
1783 ndset = msng_filenode_set.setdefault(f, {})
1781 # And set the filenode's changelog node to the
1784 # And set the filenode's changelog node to the
1782 # manifest's if it hasn't been set already.
1785 # manifest's if it hasn't been set already.
1783 ndset.setdefault(fnode, clnode)
1786 ndset.setdefault(fnode, clnode)
1784 else:
1787 else:
1785 # Otherwise we need a full manifest.
1788 # Otherwise we need a full manifest.
1786 m = mnfst.read(mnfstnode)
1789 m = mnfst.read(mnfstnode)
1787 # For every file in we care about.
1790 # For every file in we care about.
1788 for f in changedfiles:
1791 for f in changedfiles:
1789 fnode = m.get(f, None)
1792 fnode = m.get(f, None)
1790 # If it's in the manifest
1793 # If it's in the manifest
1791 if fnode is not None:
1794 if fnode is not None:
1792 # See comments above.
1795 # See comments above.
1793 clnode = msng_mnfst_set[mnfstnode]
1796 clnode = msng_mnfst_set[mnfstnode]
1794 ndset = msng_filenode_set.setdefault(f, {})
1797 ndset = msng_filenode_set.setdefault(f, {})
1795 ndset.setdefault(fnode, clnode)
1798 ndset.setdefault(fnode, clnode)
1796 # Remember the revision we hope to see next.
1799 # Remember the revision we hope to see next.
1797 next_rev[0] = r + 1
1800 next_rev[0] = r + 1
1798 return collect_msng_filenodes
1801 return collect_msng_filenodes
1799
1802
1800 # We have a list of filenodes we think we need for a file, lets remove
1803 # We have a list of filenodes we think we need for a file, lets remove
1801 # all those we now the recipient must have.
1804 # all those we now the recipient must have.
1802 def prune_filenodes(f, filerevlog):
1805 def prune_filenodes(f, filerevlog):
1803 msngset = msng_filenode_set[f]
1806 msngset = msng_filenode_set[f]
1804 hasset = {}
1807 hasset = {}
1805 # If a 'missing' filenode thinks it belongs to a changenode we
1808 # If a 'missing' filenode thinks it belongs to a changenode we
1806 # assume the recipient must have, then the recipient must have
1809 # assume the recipient must have, then the recipient must have
1807 # that filenode.
1810 # that filenode.
1808 for n in msngset:
1811 for n in msngset:
1809 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1812 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1810 if clnode in has_cl_set:
1813 if clnode in has_cl_set:
1811 hasset[n] = 1
1814 hasset[n] = 1
1812 prune_parents(filerevlog, hasset, msngset)
1815 prune_parents(filerevlog, hasset, msngset)
1813
1816
1814 # A function generator function that sets up the a context for the
1817 # A function generator function that sets up the a context for the
1815 # inner function.
1818 # inner function.
1816 def lookup_filenode_link_func(fname):
1819 def lookup_filenode_link_func(fname):
1817 msngset = msng_filenode_set[fname]
1820 msngset = msng_filenode_set[fname]
1818 # Lookup the changenode the filenode belongs to.
1821 # Lookup the changenode the filenode belongs to.
1819 def lookup_filenode_link(fnode):
1822 def lookup_filenode_link(fnode):
1820 return msngset[fnode]
1823 return msngset[fnode]
1821 return lookup_filenode_link
1824 return lookup_filenode_link
1822
1825
1823 # Add the nodes that were explicitly requested.
1826 # Add the nodes that were explicitly requested.
1824 def add_extra_nodes(name, nodes):
1827 def add_extra_nodes(name, nodes):
1825 if not extranodes or name not in extranodes:
1828 if not extranodes or name not in extranodes:
1826 return
1829 return
1827
1830
1828 for node, linknode in extranodes[name]:
1831 for node, linknode in extranodes[name]:
1829 if node not in nodes:
1832 if node not in nodes:
1830 nodes[node] = linknode
1833 nodes[node] = linknode
1831
1834
1832 # Now that we have all theses utility functions to help out and
1835 # Now that we have all theses utility functions to help out and
1833 # logically divide up the task, generate the group.
1836 # logically divide up the task, generate the group.
1834 def gengroup():
1837 def gengroup():
1835 # The set of changed files starts empty.
1838 # The set of changed files starts empty.
1836 changedfiles = {}
1839 changedfiles = {}
1837 # Create a changenode group generator that will call our functions
1840 # Create a changenode group generator that will call our functions
1838 # back to lookup the owning changenode and collect information.
1841 # back to lookup the owning changenode and collect information.
1839 group = cl.group(msng_cl_lst, identity,
1842 group = cl.group(msng_cl_lst, identity,
1840 manifest_and_file_collector(changedfiles))
1843 manifest_and_file_collector(changedfiles))
1841 for chnk in group:
1844 for chnk in group:
1842 yield chnk
1845 yield chnk
1843
1846
1844 # The list of manifests has been collected by the generator
1847 # The list of manifests has been collected by the generator
1845 # calling our functions back.
1848 # calling our functions back.
1846 prune_manifests()
1849 prune_manifests()
1847 add_extra_nodes(1, msng_mnfst_set)
1850 add_extra_nodes(1, msng_mnfst_set)
1848 msng_mnfst_lst = msng_mnfst_set.keys()
1851 msng_mnfst_lst = msng_mnfst_set.keys()
1849 # Sort the manifestnodes by revision number.
1852 # Sort the manifestnodes by revision number.
1850 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1853 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1851 # Create a generator for the manifestnodes that calls our lookup
1854 # Create a generator for the manifestnodes that calls our lookup
1852 # and data collection functions back.
1855 # and data collection functions back.
1853 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1856 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1854 filenode_collector(changedfiles))
1857 filenode_collector(changedfiles))
1855 for chnk in group:
1858 for chnk in group:
1856 yield chnk
1859 yield chnk
1857
1860
1858 # These are no longer needed, dereference and toss the memory for
1861 # These are no longer needed, dereference and toss the memory for
1859 # them.
1862 # them.
1860 msng_mnfst_lst = None
1863 msng_mnfst_lst = None
1861 msng_mnfst_set.clear()
1864 msng_mnfst_set.clear()
1862
1865
1863 if extranodes:
1866 if extranodes:
1864 for fname in extranodes:
1867 for fname in extranodes:
1865 if isinstance(fname, int):
1868 if isinstance(fname, int):
1866 continue
1869 continue
1867 msng_filenode_set.setdefault(fname, {})
1870 msng_filenode_set.setdefault(fname, {})
1868 changedfiles[fname] = 1
1871 changedfiles[fname] = 1
1869 # Go through all our files in order sorted by name.
1872 # Go through all our files in order sorted by name.
1870 for fname in util.sort(changedfiles):
1873 for fname in util.sort(changedfiles):
1871 filerevlog = self.file(fname)
1874 filerevlog = self.file(fname)
1872 if not len(filerevlog):
1875 if not len(filerevlog):
1873 raise util.Abort(_("empty or missing revlog for %s") % fname)
1876 raise util.Abort(_("empty or missing revlog for %s") % fname)
1874 # Toss out the filenodes that the recipient isn't really
1877 # Toss out the filenodes that the recipient isn't really
1875 # missing.
1878 # missing.
1876 if fname in msng_filenode_set:
1879 if fname in msng_filenode_set:
1877 prune_filenodes(fname, filerevlog)
1880 prune_filenodes(fname, filerevlog)
1878 add_extra_nodes(fname, msng_filenode_set[fname])
1881 add_extra_nodes(fname, msng_filenode_set[fname])
1879 msng_filenode_lst = msng_filenode_set[fname].keys()
1882 msng_filenode_lst = msng_filenode_set[fname].keys()
1880 else:
1883 else:
1881 msng_filenode_lst = []
1884 msng_filenode_lst = []
1882 # If any filenodes are left, generate the group for them,
1885 # If any filenodes are left, generate the group for them,
1883 # otherwise don't bother.
1886 # otherwise don't bother.
1884 if len(msng_filenode_lst) > 0:
1887 if len(msng_filenode_lst) > 0:
1885 yield changegroup.chunkheader(len(fname))
1888 yield changegroup.chunkheader(len(fname))
1886 yield fname
1889 yield fname
1887 # Sort the filenodes by their revision #
1890 # Sort the filenodes by their revision #
1888 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1891 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1889 # Create a group generator and only pass in a changenode
1892 # Create a group generator and only pass in a changenode
1890 # lookup function as we need to collect no information
1893 # lookup function as we need to collect no information
1891 # from filenodes.
1894 # from filenodes.
1892 group = filerevlog.group(msng_filenode_lst,
1895 group = filerevlog.group(msng_filenode_lst,
1893 lookup_filenode_link_func(fname))
1896 lookup_filenode_link_func(fname))
1894 for chnk in group:
1897 for chnk in group:
1895 yield chnk
1898 yield chnk
1896 if fname in msng_filenode_set:
1899 if fname in msng_filenode_set:
1897 # Don't need this anymore, toss it to free memory.
1900 # Don't need this anymore, toss it to free memory.
1898 del msng_filenode_set[fname]
1901 del msng_filenode_set[fname]
1899 # Signal that no more groups are left.
1902 # Signal that no more groups are left.
1900 yield changegroup.closechunk()
1903 yield changegroup.closechunk()
1901
1904
1902 if msng_cl_lst:
1905 if msng_cl_lst:
1903 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1906 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1904
1907
1905 return util.chunkbuffer(gengroup())
1908 return util.chunkbuffer(gengroup())
1906
1909
1907 def changegroup(self, basenodes, source):
1910 def changegroup(self, basenodes, source):
1908 # to avoid a race we use changegroupsubset() (issue1320)
1911 # to avoid a race we use changegroupsubset() (issue1320)
1909 return self.changegroupsubset(basenodes, self.heads(), source)
1912 return self.changegroupsubset(basenodes, self.heads(), source)
1910
1913
1911 def _changegroup(self, common, source):
1914 def _changegroup(self, common, source):
1912 """Generate a changegroup of all nodes that we have that a recipient
1915 """Generate a changegroup of all nodes that we have that a recipient
1913 doesn't.
1916 doesn't.
1914
1917
1915 This is much easier than the previous function as we can assume that
1918 This is much easier than the previous function as we can assume that
1916 the recipient has any changenode we aren't sending them.
1919 the recipient has any changenode we aren't sending them.
1917
1920
1918 common is the set of common nodes between remote and self"""
1921 common is the set of common nodes between remote and self"""
1919
1922
1920 self.hook('preoutgoing', throw=True, source=source)
1923 self.hook('preoutgoing', throw=True, source=source)
1921
1924
1922 cl = self.changelog
1925 cl = self.changelog
1923 nodes = cl.findmissing(common)
1926 nodes = cl.findmissing(common)
1924 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1927 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1925 self.changegroupinfo(nodes, source)
1928 self.changegroupinfo(nodes, source)
1926
1929
1927 def identity(x):
1930 def identity(x):
1928 return x
1931 return x
1929
1932
1930 def gennodelst(log):
1933 def gennodelst(log):
1931 for r in log:
1934 for r in log:
1932 if log.linkrev(r) in revset:
1935 if log.linkrev(r) in revset:
1933 yield log.node(r)
1936 yield log.node(r)
1934
1937
1935 def changed_file_collector(changedfileset):
1938 def changed_file_collector(changedfileset):
1936 def collect_changed_files(clnode):
1939 def collect_changed_files(clnode):
1937 c = cl.read(clnode)
1940 c = cl.read(clnode)
1938 for fname in c[3]:
1941 for fname in c[3]:
1939 changedfileset[fname] = 1
1942 changedfileset[fname] = 1
1940 return collect_changed_files
1943 return collect_changed_files
1941
1944
1942 def lookuprevlink_func(revlog):
1945 def lookuprevlink_func(revlog):
1943 def lookuprevlink(n):
1946 def lookuprevlink(n):
1944 return cl.node(revlog.linkrev(revlog.rev(n)))
1947 return cl.node(revlog.linkrev(revlog.rev(n)))
1945 return lookuprevlink
1948 return lookuprevlink
1946
1949
1947 def gengroup():
1950 def gengroup():
1948 # construct a list of all changed files
1951 # construct a list of all changed files
1949 changedfiles = {}
1952 changedfiles = {}
1950
1953
1951 for chnk in cl.group(nodes, identity,
1954 for chnk in cl.group(nodes, identity,
1952 changed_file_collector(changedfiles)):
1955 changed_file_collector(changedfiles)):
1953 yield chnk
1956 yield chnk
1954
1957
1955 mnfst = self.manifest
1958 mnfst = self.manifest
1956 nodeiter = gennodelst(mnfst)
1959 nodeiter = gennodelst(mnfst)
1957 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1960 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1958 yield chnk
1961 yield chnk
1959
1962
1960 for fname in util.sort(changedfiles):
1963 for fname in util.sort(changedfiles):
1961 filerevlog = self.file(fname)
1964 filerevlog = self.file(fname)
1962 if not len(filerevlog):
1965 if not len(filerevlog):
1963 raise util.Abort(_("empty or missing revlog for %s") % fname)
1966 raise util.Abort(_("empty or missing revlog for %s") % fname)
1964 nodeiter = gennodelst(filerevlog)
1967 nodeiter = gennodelst(filerevlog)
1965 nodeiter = list(nodeiter)
1968 nodeiter = list(nodeiter)
1966 if nodeiter:
1969 if nodeiter:
1967 yield changegroup.chunkheader(len(fname))
1970 yield changegroup.chunkheader(len(fname))
1968 yield fname
1971 yield fname
1969 lookup = lookuprevlink_func(filerevlog)
1972 lookup = lookuprevlink_func(filerevlog)
1970 for chnk in filerevlog.group(nodeiter, lookup):
1973 for chnk in filerevlog.group(nodeiter, lookup):
1971 yield chnk
1974 yield chnk
1972
1975
1973 yield changegroup.closechunk()
1976 yield changegroup.closechunk()
1974
1977
1975 if nodes:
1978 if nodes:
1976 self.hook('outgoing', node=hex(nodes[0]), source=source)
1979 self.hook('outgoing', node=hex(nodes[0]), source=source)
1977
1980
1978 return util.chunkbuffer(gengroup())
1981 return util.chunkbuffer(gengroup())
1979
1982
1980 def addchangegroup(self, source, srctype, url, emptyok=False):
1983 def addchangegroup(self, source, srctype, url, emptyok=False):
1981 """add changegroup to repo.
1984 """add changegroup to repo.
1982
1985
1983 return values:
1986 return values:
1984 - nothing changed or no source: 0
1987 - nothing changed or no source: 0
1985 - more heads than before: 1+added heads (2..n)
1988 - more heads than before: 1+added heads (2..n)
1986 - less heads than before: -1-removed heads (-2..-n)
1989 - less heads than before: -1-removed heads (-2..-n)
1987 - number of heads stays the same: 1
1990 - number of heads stays the same: 1
1988 """
1991 """
1989 def csmap(x):
1992 def csmap(x):
1990 self.ui.debug(_("add changeset %s\n") % short(x))
1993 self.ui.debug(_("add changeset %s\n") % short(x))
1991 return len(cl)
1994 return len(cl)
1992
1995
1993 def revmap(x):
1996 def revmap(x):
1994 return cl.rev(x)
1997 return cl.rev(x)
1995
1998
1996 if not source:
1999 if not source:
1997 return 0
2000 return 0
1998
2001
1999 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2002 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2000
2003
2001 changesets = files = revisions = 0
2004 changesets = files = revisions = 0
2002
2005
2003 # write changelog data to temp files so concurrent readers will not see
2006 # write changelog data to temp files so concurrent readers will not see
2004 # inconsistent view
2007 # inconsistent view
2005 cl = self.changelog
2008 cl = self.changelog
2006 cl.delayupdate()
2009 cl.delayupdate()
2007 oldheads = len(cl.heads())
2010 oldheads = len(cl.heads())
2008
2011
2009 tr = self.transaction()
2012 tr = self.transaction()
2010 try:
2013 try:
2011 trp = weakref.proxy(tr)
2014 trp = weakref.proxy(tr)
2012 # pull off the changeset group
2015 # pull off the changeset group
2013 self.ui.status(_("adding changesets\n"))
2016 self.ui.status(_("adding changesets\n"))
2014 cor = len(cl) - 1
2017 cor = len(cl) - 1
2015 chunkiter = changegroup.chunkiter(source)
2018 chunkiter = changegroup.chunkiter(source)
2016 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2019 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2017 raise util.Abort(_("received changelog group is empty"))
2020 raise util.Abort(_("received changelog group is empty"))
2018 cnr = len(cl) - 1
2021 cnr = len(cl) - 1
2019 changesets = cnr - cor
2022 changesets = cnr - cor
2020
2023
2021 # pull off the manifest group
2024 # pull off the manifest group
2022 self.ui.status(_("adding manifests\n"))
2025 self.ui.status(_("adding manifests\n"))
2023 chunkiter = changegroup.chunkiter(source)
2026 chunkiter = changegroup.chunkiter(source)
2024 # no need to check for empty manifest group here:
2027 # no need to check for empty manifest group here:
2025 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2028 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2026 # no new manifest will be created and the manifest group will
2029 # no new manifest will be created and the manifest group will
2027 # be empty during the pull
2030 # be empty during the pull
2028 self.manifest.addgroup(chunkiter, revmap, trp)
2031 self.manifest.addgroup(chunkiter, revmap, trp)
2029
2032
2030 # process the files
2033 # process the files
2031 self.ui.status(_("adding file changes\n"))
2034 self.ui.status(_("adding file changes\n"))
2032 while 1:
2035 while 1:
2033 f = changegroup.getchunk(source)
2036 f = changegroup.getchunk(source)
2034 if not f:
2037 if not f:
2035 break
2038 break
2036 self.ui.debug(_("adding %s revisions\n") % f)
2039 self.ui.debug(_("adding %s revisions\n") % f)
2037 fl = self.file(f)
2040 fl = self.file(f)
2038 o = len(fl)
2041 o = len(fl)
2039 chunkiter = changegroup.chunkiter(source)
2042 chunkiter = changegroup.chunkiter(source)
2040 if fl.addgroup(chunkiter, revmap, trp) is None:
2043 if fl.addgroup(chunkiter, revmap, trp) is None:
2041 raise util.Abort(_("received file revlog group is empty"))
2044 raise util.Abort(_("received file revlog group is empty"))
2042 revisions += len(fl) - o
2045 revisions += len(fl) - o
2043 files += 1
2046 files += 1
2044
2047
2045 newheads = len(self.changelog.heads())
2048 newheads = len(self.changelog.heads())
2046 heads = ""
2049 heads = ""
2047 if oldheads and newheads != oldheads:
2050 if oldheads and newheads != oldheads:
2048 heads = _(" (%+d heads)") % (newheads - oldheads)
2051 heads = _(" (%+d heads)") % (newheads - oldheads)
2049
2052
2050 self.ui.status(_("added %d changesets"
2053 self.ui.status(_("added %d changesets"
2051 " with %d changes to %d files%s\n")
2054 " with %d changes to %d files%s\n")
2052 % (changesets, revisions, files, heads))
2055 % (changesets, revisions, files, heads))
2053
2056
2054 if changesets > 0:
2057 if changesets > 0:
2055 p = lambda: self.changelog.writepending() and self.root or ""
2058 p = lambda: self.changelog.writepending() and self.root or ""
2056 self.hook('pretxnchangegroup', throw=True,
2059 self.hook('pretxnchangegroup', throw=True,
2057 node=hex(self.changelog.node(cor+1)), source=srctype,
2060 node=hex(self.changelog.node(cor+1)), source=srctype,
2058 url=url, pending=p)
2061 url=url, pending=p)
2059
2062
2060 # make changelog see real files again
2063 # make changelog see real files again
2061 cl.finalize(trp)
2064 cl.finalize(trp)
2062
2065
2063 tr.close()
2066 tr.close()
2064 finally:
2067 finally:
2065 del tr
2068 del tr
2066
2069
2067 if changesets > 0:
2070 if changesets > 0:
2068 # forcefully update the on-disk branch cache
2071 # forcefully update the on-disk branch cache
2069 self.ui.debug(_("updating the branch cache\n"))
2072 self.ui.debug(_("updating the branch cache\n"))
2070 self.branchtags()
2073 self.branchtags()
2071 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2074 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2072 source=srctype, url=url)
2075 source=srctype, url=url)
2073
2076
2074 for i in xrange(cor + 1, cnr + 1):
2077 for i in xrange(cor + 1, cnr + 1):
2075 self.hook("incoming", node=hex(self.changelog.node(i)),
2078 self.hook("incoming", node=hex(self.changelog.node(i)),
2076 source=srctype, url=url)
2079 source=srctype, url=url)
2077
2080
2078 # never return 0 here:
2081 # never return 0 here:
2079 if newheads < oldheads:
2082 if newheads < oldheads:
2080 return newheads - oldheads - 1
2083 return newheads - oldheads - 1
2081 else:
2084 else:
2082 return newheads - oldheads + 1
2085 return newheads - oldheads + 1
2083
2086
2084
2087
2085 def stream_in(self, remote):
2088 def stream_in(self, remote):
2086 fp = remote.stream_out()
2089 fp = remote.stream_out()
2087 l = fp.readline()
2090 l = fp.readline()
2088 try:
2091 try:
2089 resp = int(l)
2092 resp = int(l)
2090 except ValueError:
2093 except ValueError:
2091 raise error.ResponseError(
2094 raise error.ResponseError(
2092 _('Unexpected response from remote server:'), l)
2095 _('Unexpected response from remote server:'), l)
2093 if resp == 1:
2096 if resp == 1:
2094 raise util.Abort(_('operation forbidden by server'))
2097 raise util.Abort(_('operation forbidden by server'))
2095 elif resp == 2:
2098 elif resp == 2:
2096 raise util.Abort(_('locking the remote repository failed'))
2099 raise util.Abort(_('locking the remote repository failed'))
2097 elif resp != 0:
2100 elif resp != 0:
2098 raise util.Abort(_('the server sent an unknown error code'))
2101 raise util.Abort(_('the server sent an unknown error code'))
2099 self.ui.status(_('streaming all changes\n'))
2102 self.ui.status(_('streaming all changes\n'))
2100 l = fp.readline()
2103 l = fp.readline()
2101 try:
2104 try:
2102 total_files, total_bytes = map(int, l.split(' ', 1))
2105 total_files, total_bytes = map(int, l.split(' ', 1))
2103 except (ValueError, TypeError):
2106 except (ValueError, TypeError):
2104 raise error.ResponseError(
2107 raise error.ResponseError(
2105 _('Unexpected response from remote server:'), l)
2108 _('Unexpected response from remote server:'), l)
2106 self.ui.status(_('%d files to transfer, %s of data\n') %
2109 self.ui.status(_('%d files to transfer, %s of data\n') %
2107 (total_files, util.bytecount(total_bytes)))
2110 (total_files, util.bytecount(total_bytes)))
2108 start = time.time()
2111 start = time.time()
2109 for i in xrange(total_files):
2112 for i in xrange(total_files):
2110 # XXX doesn't support '\n' or '\r' in filenames
2113 # XXX doesn't support '\n' or '\r' in filenames
2111 l = fp.readline()
2114 l = fp.readline()
2112 try:
2115 try:
2113 name, size = l.split('\0', 1)
2116 name, size = l.split('\0', 1)
2114 size = int(size)
2117 size = int(size)
2115 except (ValueError, TypeError):
2118 except (ValueError, TypeError):
2116 raise error.ResponseError(
2119 raise error.ResponseError(
2117 _('Unexpected response from remote server:'), l)
2120 _('Unexpected response from remote server:'), l)
2118 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2121 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2119 ofp = self.sopener(name, 'w')
2122 ofp = self.sopener(name, 'w')
2120 for chunk in util.filechunkiter(fp, limit=size):
2123 for chunk in util.filechunkiter(fp, limit=size):
2121 ofp.write(chunk)
2124 ofp.write(chunk)
2122 ofp.close()
2125 ofp.close()
2123 elapsed = time.time() - start
2126 elapsed = time.time() - start
2124 if elapsed <= 0:
2127 if elapsed <= 0:
2125 elapsed = 0.001
2128 elapsed = 0.001
2126 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2129 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2127 (util.bytecount(total_bytes), elapsed,
2130 (util.bytecount(total_bytes), elapsed,
2128 util.bytecount(total_bytes / elapsed)))
2131 util.bytecount(total_bytes / elapsed)))
2129 self.invalidate()
2132 self.invalidate()
2130 return len(self.heads()) + 1
2133 return len(self.heads()) + 1
2131
2134
2132 def clone(self, remote, heads=[], stream=False):
2135 def clone(self, remote, heads=[], stream=False):
2133 '''clone remote repository.
2136 '''clone remote repository.
2134
2137
2135 keyword arguments:
2138 keyword arguments:
2136 heads: list of revs to clone (forces use of pull)
2139 heads: list of revs to clone (forces use of pull)
2137 stream: use streaming clone if possible'''
2140 stream: use streaming clone if possible'''
2138
2141
2139 # now, all clients that can request uncompressed clones can
2142 # now, all clients that can request uncompressed clones can
2140 # read repo formats supported by all servers that can serve
2143 # read repo formats supported by all servers that can serve
2141 # them.
2144 # them.
2142
2145
2143 # if revlog format changes, client will have to check version
2146 # if revlog format changes, client will have to check version
2144 # and format flags on "stream" capability, and use
2147 # and format flags on "stream" capability, and use
2145 # uncompressed only if compatible.
2148 # uncompressed only if compatible.
2146
2149
2147 if stream and not heads and remote.capable('stream'):
2150 if stream and not heads and remote.capable('stream'):
2148 return self.stream_in(remote)
2151 return self.stream_in(remote)
2149 return self.pull(remote, heads)
2152 return self.pull(remote, heads)
2150
2153
2151 # used to avoid circular references so destructors work
2154 # used to avoid circular references so destructors work
2152 def aftertrans(files):
2155 def aftertrans(files):
2153 renamefiles = [tuple(t) for t in files]
2156 renamefiles = [tuple(t) for t in files]
2154 def a():
2157 def a():
2155 for src, dest in renamefiles:
2158 for src, dest in renamefiles:
2156 util.rename(src, dest)
2159 util.rename(src, dest)
2157 return a
2160 return a
2158
2161
2159 def instance(ui, path, create):
2162 def instance(ui, path, create):
2160 return localrepository(ui, util.drop_scheme('file', path), create)
2163 return localrepository(ui, util.drop_scheme('file', path), create)
2161
2164
2162 def islocal(path):
2165 def islocal(path):
2163 return True
2166 return True
@@ -1,506 +1,507 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, hex, bin
8 from node import nullid, nullrev, hex, bin
9 from i18n import _
9 from i18n import _
10 import errno, util, os, filemerge, copies, shutil
10 import errno, util, os, filemerge, copies, shutil
11
11
12 class mergestate(object):
12 class mergestate(object):
13 '''track 3-way merge state of individual files'''
13 '''track 3-way merge state of individual files'''
14 def __init__(self, repo):
14 def __init__(self, repo):
15 self._repo = repo
15 self._repo = repo
16 self._read()
16 self._read()
17 def reset(self, node):
17 def reset(self, node=None):
18 self._state = {}
18 self._state = {}
19 self._local = node
19 if node:
20 self._local = node
20 shutil.rmtree(self._repo.join("merge"), True)
21 shutil.rmtree(self._repo.join("merge"), True)
21 def _read(self):
22 def _read(self):
22 self._state = {}
23 self._state = {}
23 try:
24 try:
24 localnode = None
25 localnode = None
25 f = self._repo.opener("merge/state")
26 f = self._repo.opener("merge/state")
26 for i, l in enumerate(f):
27 for i, l in enumerate(f):
27 if i == 0:
28 if i == 0:
28 localnode = l[:-1]
29 localnode = l[:-1]
29 else:
30 else:
30 bits = l[:-1].split("\0")
31 bits = l[:-1].split("\0")
31 self._state[bits[0]] = bits[1:]
32 self._state[bits[0]] = bits[1:]
32 self._local = bin(localnode)
33 self._local = bin(localnode)
33 except IOError, err:
34 except IOError, err:
34 if err.errno != errno.ENOENT:
35 if err.errno != errno.ENOENT:
35 raise
36 raise
36 def _write(self):
37 def _write(self):
37 f = self._repo.opener("merge/state", "w")
38 f = self._repo.opener("merge/state", "w")
38 f.write(hex(self._local) + "\n")
39 f.write(hex(self._local) + "\n")
39 for d, v in self._state.iteritems():
40 for d, v in self._state.iteritems():
40 f.write("\0".join([d] + v) + "\n")
41 f.write("\0".join([d] + v) + "\n")
41 def add(self, fcl, fco, fca, fd, flags):
42 def add(self, fcl, fco, fca, fd, flags):
42 hash = util.sha1(fcl.path()).hexdigest()
43 hash = util.sha1(fcl.path()).hexdigest()
43 self._repo.opener("merge/" + hash, "w").write(fcl.data())
44 self._repo.opener("merge/" + hash, "w").write(fcl.data())
44 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
45 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
45 hex(fca.filenode()), fco.path(), flags]
46 hex(fca.filenode()), fco.path(), flags]
46 self._write()
47 self._write()
47 def __contains__(self, dfile):
48 def __contains__(self, dfile):
48 return dfile in self._state
49 return dfile in self._state
49 def __getitem__(self, dfile):
50 def __getitem__(self, dfile):
50 return self._state[dfile][0]
51 return self._state[dfile][0]
51 def __iter__(self):
52 def __iter__(self):
52 l = self._state.keys()
53 l = self._state.keys()
53 l.sort()
54 l.sort()
54 for f in l:
55 for f in l:
55 yield f
56 yield f
56 def mark(self, dfile, state):
57 def mark(self, dfile, state):
57 self._state[dfile][0] = state
58 self._state[dfile][0] = state
58 self._write()
59 self._write()
59 def resolve(self, dfile, wctx, octx):
60 def resolve(self, dfile, wctx, octx):
60 if self[dfile] == 'r':
61 if self[dfile] == 'r':
61 return 0
62 return 0
62 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
63 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
63 f = self._repo.opener("merge/" + hash)
64 f = self._repo.opener("merge/" + hash)
64 self._repo.wwrite(dfile, f.read(), flags)
65 self._repo.wwrite(dfile, f.read(), flags)
65 fcd = wctx[dfile]
66 fcd = wctx[dfile]
66 fco = octx[ofile]
67 fco = octx[ofile]
67 fca = self._repo.filectx(afile, fileid=anode)
68 fca = self._repo.filectx(afile, fileid=anode)
68 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
69 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
69 if not r:
70 if not r:
70 self.mark(dfile, 'r')
71 self.mark(dfile, 'r')
71 return r
72 return r
72
73
73 def _checkunknown(wctx, mctx):
74 def _checkunknown(wctx, mctx):
74 "check for collisions between unknown files and files in mctx"
75 "check for collisions between unknown files and files in mctx"
75 for f in wctx.unknown():
76 for f in wctx.unknown():
76 if f in mctx and mctx[f].cmp(wctx[f].data()):
77 if f in mctx and mctx[f].cmp(wctx[f].data()):
77 raise util.Abort(_("untracked file in working directory differs"
78 raise util.Abort(_("untracked file in working directory differs"
78 " from file in requested revision: '%s'") % f)
79 " from file in requested revision: '%s'") % f)
79
80
80 def _checkcollision(mctx):
81 def _checkcollision(mctx):
81 "check for case folding collisions in the destination context"
82 "check for case folding collisions in the destination context"
82 folded = {}
83 folded = {}
83 for fn in mctx:
84 for fn in mctx:
84 fold = fn.lower()
85 fold = fn.lower()
85 if fold in folded:
86 if fold in folded:
86 raise util.Abort(_("case-folding collision between %s and %s")
87 raise util.Abort(_("case-folding collision between %s and %s")
87 % (fn, folded[fold]))
88 % (fn, folded[fold]))
88 folded[fold] = fn
89 folded[fold] = fn
89
90
90 def _forgetremoved(wctx, mctx, branchmerge):
91 def _forgetremoved(wctx, mctx, branchmerge):
91 """
92 """
92 Forget removed files
93 Forget removed files
93
94
94 If we're jumping between revisions (as opposed to merging), and if
95 If we're jumping between revisions (as opposed to merging), and if
95 neither the working directory nor the target rev has the file,
96 neither the working directory nor the target rev has the file,
96 then we need to remove it from the dirstate, to prevent the
97 then we need to remove it from the dirstate, to prevent the
97 dirstate from listing the file when it is no longer in the
98 dirstate from listing the file when it is no longer in the
98 manifest.
99 manifest.
99
100
100 If we're merging, and the other revision has removed a file
101 If we're merging, and the other revision has removed a file
101 that is not present in the working directory, we need to mark it
102 that is not present in the working directory, we need to mark it
102 as removed.
103 as removed.
103 """
104 """
104
105
105 action = []
106 action = []
106 state = branchmerge and 'r' or 'f'
107 state = branchmerge and 'r' or 'f'
107 for f in wctx.deleted():
108 for f in wctx.deleted():
108 if f not in mctx:
109 if f not in mctx:
109 action.append((f, state))
110 action.append((f, state))
110
111
111 if not branchmerge:
112 if not branchmerge:
112 for f in wctx.removed():
113 for f in wctx.removed():
113 if f not in mctx:
114 if f not in mctx:
114 action.append((f, "f"))
115 action.append((f, "f"))
115
116
116 return action
117 return action
117
118
118 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
119 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
119 """
120 """
120 Merge p1 and p2 with ancestor ma and generate merge action list
121 Merge p1 and p2 with ancestor ma and generate merge action list
121
122
122 overwrite = whether we clobber working files
123 overwrite = whether we clobber working files
123 partial = function to filter file lists
124 partial = function to filter file lists
124 """
125 """
125
126
126 repo.ui.note(_("resolving manifests\n"))
127 repo.ui.note(_("resolving manifests\n"))
127 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
128 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
128 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
129 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
129
130
130 m1 = p1.manifest()
131 m1 = p1.manifest()
131 m2 = p2.manifest()
132 m2 = p2.manifest()
132 ma = pa.manifest()
133 ma = pa.manifest()
133 backwards = (pa == p2)
134 backwards = (pa == p2)
134 action = []
135 action = []
135 copy, copied, diverge = {}, {}, {}
136 copy, copied, diverge = {}, {}, {}
136
137
137 def fmerge(f, f2=None, fa=None):
138 def fmerge(f, f2=None, fa=None):
138 """merge flags"""
139 """merge flags"""
139 if not f2:
140 if not f2:
140 f2 = f
141 f2 = f
141 fa = f
142 fa = f
142 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
143 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
143 if m == n: # flags agree
144 if m == n: # flags agree
144 return m # unchanged
145 return m # unchanged
145 if m and n: # flags are set but don't agree
146 if m and n: # flags are set but don't agree
146 if not a: # both differ from parent
147 if not a: # both differ from parent
147 r = repo.ui.prompt(
148 r = repo.ui.prompt(
148 _(" conflicting flags for %s\n"
149 _(" conflicting flags for %s\n"
149 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
150 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
150 return r != "n" and r or ''
151 return r != "n" and r or ''
151 if m == a:
152 if m == a:
152 return n # changed from m to n
153 return n # changed from m to n
153 return m # changed from n to m
154 return m # changed from n to m
154 if m and m != a: # changed from a to m
155 if m and m != a: # changed from a to m
155 return m
156 return m
156 if n and n != a: # changed from a to n
157 if n and n != a: # changed from a to n
157 return n
158 return n
158 return '' # flag was cleared
159 return '' # flag was cleared
159
160
160 def act(msg, m, f, *args):
161 def act(msg, m, f, *args):
161 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
162 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
162 action.append((f, m) + args)
163 action.append((f, m) + args)
163
164
164 if pa and not (backwards or overwrite):
165 if pa and not (backwards or overwrite):
165 if repo.ui.configbool("merge", "followcopies", True):
166 if repo.ui.configbool("merge", "followcopies", True):
166 dirs = repo.ui.configbool("merge", "followdirs", True)
167 dirs = repo.ui.configbool("merge", "followdirs", True)
167 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
168 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
168 copied = dict.fromkeys(copy.values())
169 copied = dict.fromkeys(copy.values())
169 for of, fl in diverge.iteritems():
170 for of, fl in diverge.iteritems():
170 act("divergent renames", "dr", of, fl)
171 act("divergent renames", "dr", of, fl)
171
172
172 # Compare manifests
173 # Compare manifests
173 for f, n in m1.iteritems():
174 for f, n in m1.iteritems():
174 if partial and not partial(f):
175 if partial and not partial(f):
175 continue
176 continue
176 if f in m2:
177 if f in m2:
177 if overwrite or backwards:
178 if overwrite or backwards:
178 rflags = m2.flags(f)
179 rflags = m2.flags(f)
179 else:
180 else:
180 rflags = fmerge(f)
181 rflags = fmerge(f)
181 # are files different?
182 # are files different?
182 if n != m2[f]:
183 if n != m2[f]:
183 a = ma.get(f, nullid)
184 a = ma.get(f, nullid)
184 # are we clobbering?
185 # are we clobbering?
185 if overwrite:
186 if overwrite:
186 act("clobbering", "g", f, rflags)
187 act("clobbering", "g", f, rflags)
187 # or are we going back in time and clean?
188 # or are we going back in time and clean?
188 elif backwards:
189 elif backwards:
189 if not n[20:] or not p2[f].cmp(p1[f].data()):
190 if not n[20:] or not p2[f].cmp(p1[f].data()):
190 act("reverting", "g", f, rflags)
191 act("reverting", "g", f, rflags)
191 # are both different from the ancestor?
192 # are both different from the ancestor?
192 elif n != a and m2[f] != a:
193 elif n != a and m2[f] != a:
193 act("versions differ", "m", f, f, f, rflags, False)
194 act("versions differ", "m", f, f, f, rflags, False)
194 # is remote's version newer?
195 # is remote's version newer?
195 elif m2[f] != a:
196 elif m2[f] != a:
196 act("remote is newer", "g", f, rflags)
197 act("remote is newer", "g", f, rflags)
197 # local is newer, not overwrite, check mode bits
198 # local is newer, not overwrite, check mode bits
198 elif m1.flags(f) != rflags:
199 elif m1.flags(f) != rflags:
199 act("update permissions", "e", f, rflags)
200 act("update permissions", "e", f, rflags)
200 # contents same, check mode bits
201 # contents same, check mode bits
201 elif m1.flags(f) != rflags:
202 elif m1.flags(f) != rflags:
202 act("update permissions", "e", f, rflags)
203 act("update permissions", "e", f, rflags)
203 elif f in copied:
204 elif f in copied:
204 continue
205 continue
205 elif f in copy:
206 elif f in copy:
206 f2 = copy[f]
207 f2 = copy[f]
207 if f2 not in m2: # directory rename
208 if f2 not in m2: # directory rename
208 act("remote renamed directory to " + f2, "d",
209 act("remote renamed directory to " + f2, "d",
209 f, None, f2, m1.flags(f))
210 f, None, f2, m1.flags(f))
210 elif f2 in m1: # case 2 A,B/B/B
211 elif f2 in m1: # case 2 A,B/B/B
211 act("local copied to " + f2, "m",
212 act("local copied to " + f2, "m",
212 f, f2, f, fmerge(f, f2, f2), False)
213 f, f2, f, fmerge(f, f2, f2), False)
213 else: # case 4,21 A/B/B
214 else: # case 4,21 A/B/B
214 act("local moved to " + f2, "m",
215 act("local moved to " + f2, "m",
215 f, f2, f, fmerge(f, f2, f2), False)
216 f, f2, f, fmerge(f, f2, f2), False)
216 elif f in ma:
217 elif f in ma:
217 if n != ma[f] and not overwrite:
218 if n != ma[f] and not overwrite:
218 if repo.ui.prompt(
219 if repo.ui.prompt(
219 _(" local changed %s which remote deleted\n"
220 _(" local changed %s which remote deleted\n"
220 "use (c)hanged version or (d)elete?") % f,
221 "use (c)hanged version or (d)elete?") % f,
221 _("[cd]"), _("c")) == _("d"):
222 _("[cd]"), _("c")) == _("d"):
222 act("prompt delete", "r", f)
223 act("prompt delete", "r", f)
223 act("prompt keep", "a", f)
224 act("prompt keep", "a", f)
224 else:
225 else:
225 act("other deleted", "r", f)
226 act("other deleted", "r", f)
226 else:
227 else:
227 # file is created on branch or in working directory
228 # file is created on branch or in working directory
228 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
229 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
229 act("remote deleted", "r", f)
230 act("remote deleted", "r", f)
230
231
231 for f, n in m2.iteritems():
232 for f, n in m2.iteritems():
232 if partial and not partial(f):
233 if partial and not partial(f):
233 continue
234 continue
234 if f in m1:
235 if f in m1:
235 continue
236 continue
236 if f in copied:
237 if f in copied:
237 continue
238 continue
238 if f in copy:
239 if f in copy:
239 f2 = copy[f]
240 f2 = copy[f]
240 if f2 not in m1: # directory rename
241 if f2 not in m1: # directory rename
241 act("local renamed directory to " + f2, "d",
242 act("local renamed directory to " + f2, "d",
242 None, f, f2, m2.flags(f))
243 None, f, f2, m2.flags(f))
243 elif f2 in m2: # rename case 1, A/A,B/A
244 elif f2 in m2: # rename case 1, A/A,B/A
244 act("remote copied to " + f, "m",
245 act("remote copied to " + f, "m",
245 f2, f, f, fmerge(f2, f, f2), False)
246 f2, f, f, fmerge(f2, f, f2), False)
246 else: # case 3,20 A/B/A
247 else: # case 3,20 A/B/A
247 act("remote moved to " + f, "m",
248 act("remote moved to " + f, "m",
248 f2, f, f, fmerge(f2, f, f2), True)
249 f2, f, f, fmerge(f2, f, f2), True)
249 elif f in ma:
250 elif f in ma:
250 if overwrite or backwards:
251 if overwrite or backwards:
251 act("recreating", "g", f, m2.flags(f))
252 act("recreating", "g", f, m2.flags(f))
252 elif n != ma[f]:
253 elif n != ma[f]:
253 if repo.ui.prompt(
254 if repo.ui.prompt(
254 _("remote changed %s which local deleted\n"
255 _("remote changed %s which local deleted\n"
255 "use (c)hanged version or leave (d)eleted?") % f,
256 "use (c)hanged version or leave (d)eleted?") % f,
256 _("[cd]"), _("c")) == _("c"):
257 _("[cd]"), _("c")) == _("c"):
257 act("prompt recreating", "g", f, m2.flags(f))
258 act("prompt recreating", "g", f, m2.flags(f))
258 else:
259 else:
259 act("remote created", "g", f, m2.flags(f))
260 act("remote created", "g", f, m2.flags(f))
260
261
261 return action
262 return action
262
263
263 def actioncmp(a1, a2):
264 def actioncmp(a1, a2):
264 m1 = a1[1]
265 m1 = a1[1]
265 m2 = a2[1]
266 m2 = a2[1]
266 if m1 == m2:
267 if m1 == m2:
267 return cmp(a1, a2)
268 return cmp(a1, a2)
268 if m1 == 'r':
269 if m1 == 'r':
269 return -1
270 return -1
270 if m2 == 'r':
271 if m2 == 'r':
271 return 1
272 return 1
272 return cmp(a1, a2)
273 return cmp(a1, a2)
273
274
274 def applyupdates(repo, action, wctx, mctx):
275 def applyupdates(repo, action, wctx, mctx):
275 "apply the merge action list to the working directory"
276 "apply the merge action list to the working directory"
276
277
277 updated, merged, removed, unresolved = 0, 0, 0, 0
278 updated, merged, removed, unresolved = 0, 0, 0, 0
278 ms = mergestate(repo)
279 ms = mergestate(repo)
279 ms.reset(wctx.parents()[0].node())
280 ms.reset(wctx.parents()[0].node())
280 moves = []
281 moves = []
281 action.sort(actioncmp)
282 action.sort(actioncmp)
282
283
283 # prescan for merges
284 # prescan for merges
284 for a in action:
285 for a in action:
285 f, m = a[:2]
286 f, m = a[:2]
286 if m == 'm': # merge
287 if m == 'm': # merge
287 f2, fd, flags, move = a[2:]
288 f2, fd, flags, move = a[2:]
288 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
289 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
289 fcl = wctx[f]
290 fcl = wctx[f]
290 fco = mctx[f2]
291 fco = mctx[f2]
291 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
292 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
292 ms.add(fcl, fco, fca, fd, flags)
293 ms.add(fcl, fco, fca, fd, flags)
293 if f != fd and move:
294 if f != fd and move:
294 moves.append(f)
295 moves.append(f)
295
296
296 # remove renamed files after safely stored
297 # remove renamed files after safely stored
297 for f in moves:
298 for f in moves:
298 if util.lexists(repo.wjoin(f)):
299 if util.lexists(repo.wjoin(f)):
299 repo.ui.debug(_("removing %s\n") % f)
300 repo.ui.debug(_("removing %s\n") % f)
300 os.unlink(repo.wjoin(f))
301 os.unlink(repo.wjoin(f))
301
302
302 audit_path = util.path_auditor(repo.root)
303 audit_path = util.path_auditor(repo.root)
303
304
304 for a in action:
305 for a in action:
305 f, m = a[:2]
306 f, m = a[:2]
306 if f and f[0] == "/":
307 if f and f[0] == "/":
307 continue
308 continue
308 if m == "r": # remove
309 if m == "r": # remove
309 repo.ui.note(_("removing %s\n") % f)
310 repo.ui.note(_("removing %s\n") % f)
310 audit_path(f)
311 audit_path(f)
311 try:
312 try:
312 util.unlink(repo.wjoin(f))
313 util.unlink(repo.wjoin(f))
313 except OSError, inst:
314 except OSError, inst:
314 if inst.errno != errno.ENOENT:
315 if inst.errno != errno.ENOENT:
315 repo.ui.warn(_("update failed to remove %s: %s!\n") %
316 repo.ui.warn(_("update failed to remove %s: %s!\n") %
316 (f, inst.strerror))
317 (f, inst.strerror))
317 removed += 1
318 removed += 1
318 elif m == "m": # merge
319 elif m == "m": # merge
319 f2, fd, flags, move = a[2:]
320 f2, fd, flags, move = a[2:]
320 r = ms.resolve(fd, wctx, mctx)
321 r = ms.resolve(fd, wctx, mctx)
321 if r > 0:
322 if r > 0:
322 unresolved += 1
323 unresolved += 1
323 else:
324 else:
324 if r is None:
325 if r is None:
325 updated += 1
326 updated += 1
326 else:
327 else:
327 merged += 1
328 merged += 1
328 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
329 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
329 if f != fd and move and util.lexists(repo.wjoin(f)):
330 if f != fd and move and util.lexists(repo.wjoin(f)):
330 repo.ui.debug(_("removing %s\n") % f)
331 repo.ui.debug(_("removing %s\n") % f)
331 os.unlink(repo.wjoin(f))
332 os.unlink(repo.wjoin(f))
332 elif m == "g": # get
333 elif m == "g": # get
333 flags = a[2]
334 flags = a[2]
334 repo.ui.note(_("getting %s\n") % f)
335 repo.ui.note(_("getting %s\n") % f)
335 t = mctx.filectx(f).data()
336 t = mctx.filectx(f).data()
336 repo.wwrite(f, t, flags)
337 repo.wwrite(f, t, flags)
337 updated += 1
338 updated += 1
338 elif m == "d": # directory rename
339 elif m == "d": # directory rename
339 f2, fd, flags = a[2:]
340 f2, fd, flags = a[2:]
340 if f:
341 if f:
341 repo.ui.note(_("moving %s to %s\n") % (f, fd))
342 repo.ui.note(_("moving %s to %s\n") % (f, fd))
342 t = wctx.filectx(f).data()
343 t = wctx.filectx(f).data()
343 repo.wwrite(fd, t, flags)
344 repo.wwrite(fd, t, flags)
344 util.unlink(repo.wjoin(f))
345 util.unlink(repo.wjoin(f))
345 if f2:
346 if f2:
346 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
347 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
347 t = mctx.filectx(f2).data()
348 t = mctx.filectx(f2).data()
348 repo.wwrite(fd, t, flags)
349 repo.wwrite(fd, t, flags)
349 updated += 1
350 updated += 1
350 elif m == "dr": # divergent renames
351 elif m == "dr": # divergent renames
351 fl = a[2]
352 fl = a[2]
352 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
353 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
353 for nf in fl:
354 for nf in fl:
354 repo.ui.warn(" %s\n" % nf)
355 repo.ui.warn(" %s\n" % nf)
355 elif m == "e": # exec
356 elif m == "e": # exec
356 flags = a[2]
357 flags = a[2]
357 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
358 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
358
359
359 return updated, merged, removed, unresolved
360 return updated, merged, removed, unresolved
360
361
361 def recordupdates(repo, action, branchmerge):
362 def recordupdates(repo, action, branchmerge):
362 "record merge actions to the dirstate"
363 "record merge actions to the dirstate"
363
364
364 for a in action:
365 for a in action:
365 f, m = a[:2]
366 f, m = a[:2]
366 if m == "r": # remove
367 if m == "r": # remove
367 if branchmerge:
368 if branchmerge:
368 repo.dirstate.remove(f)
369 repo.dirstate.remove(f)
369 else:
370 else:
370 repo.dirstate.forget(f)
371 repo.dirstate.forget(f)
371 elif m == "a": # re-add
372 elif m == "a": # re-add
372 if not branchmerge:
373 if not branchmerge:
373 repo.dirstate.add(f)
374 repo.dirstate.add(f)
374 elif m == "f": # forget
375 elif m == "f": # forget
375 repo.dirstate.forget(f)
376 repo.dirstate.forget(f)
376 elif m == "e": # exec change
377 elif m == "e": # exec change
377 repo.dirstate.normallookup(f)
378 repo.dirstate.normallookup(f)
378 elif m == "g": # get
379 elif m == "g": # get
379 if branchmerge:
380 if branchmerge:
380 repo.dirstate.normaldirty(f)
381 repo.dirstate.normaldirty(f)
381 else:
382 else:
382 repo.dirstate.normal(f)
383 repo.dirstate.normal(f)
383 elif m == "m": # merge
384 elif m == "m": # merge
384 f2, fd, flag, move = a[2:]
385 f2, fd, flag, move = a[2:]
385 if branchmerge:
386 if branchmerge:
386 # We've done a branch merge, mark this file as merged
387 # We've done a branch merge, mark this file as merged
387 # so that we properly record the merger later
388 # so that we properly record the merger later
388 repo.dirstate.merge(fd)
389 repo.dirstate.merge(fd)
389 if f != f2: # copy/rename
390 if f != f2: # copy/rename
390 if move:
391 if move:
391 repo.dirstate.remove(f)
392 repo.dirstate.remove(f)
392 if f != fd:
393 if f != fd:
393 repo.dirstate.copy(f, fd)
394 repo.dirstate.copy(f, fd)
394 else:
395 else:
395 repo.dirstate.copy(f2, fd)
396 repo.dirstate.copy(f2, fd)
396 else:
397 else:
397 # We've update-merged a locally modified file, so
398 # We've update-merged a locally modified file, so
398 # we set the dirstate to emulate a normal checkout
399 # we set the dirstate to emulate a normal checkout
399 # of that file some time in the past. Thus our
400 # of that file some time in the past. Thus our
400 # merge will appear as a normal local file
401 # merge will appear as a normal local file
401 # modification.
402 # modification.
402 repo.dirstate.normallookup(fd)
403 repo.dirstate.normallookup(fd)
403 if move:
404 if move:
404 repo.dirstate.forget(f)
405 repo.dirstate.forget(f)
405 elif m == "d": # directory rename
406 elif m == "d": # directory rename
406 f2, fd, flag = a[2:]
407 f2, fd, flag = a[2:]
407 if not f2 and f not in repo.dirstate:
408 if not f2 and f not in repo.dirstate:
408 # untracked file moved
409 # untracked file moved
409 continue
410 continue
410 if branchmerge:
411 if branchmerge:
411 repo.dirstate.add(fd)
412 repo.dirstate.add(fd)
412 if f:
413 if f:
413 repo.dirstate.remove(f)
414 repo.dirstate.remove(f)
414 repo.dirstate.copy(f, fd)
415 repo.dirstate.copy(f, fd)
415 if f2:
416 if f2:
416 repo.dirstate.copy(f2, fd)
417 repo.dirstate.copy(f2, fd)
417 else:
418 else:
418 repo.dirstate.normal(fd)
419 repo.dirstate.normal(fd)
419 if f:
420 if f:
420 repo.dirstate.forget(f)
421 repo.dirstate.forget(f)
421
422
422 def update(repo, node, branchmerge, force, partial):
423 def update(repo, node, branchmerge, force, partial):
423 """
424 """
424 Perform a merge between the working directory and the given node
425 Perform a merge between the working directory and the given node
425
426
426 branchmerge = whether to merge between branches
427 branchmerge = whether to merge between branches
427 force = whether to force branch merging or file overwriting
428 force = whether to force branch merging or file overwriting
428 partial = a function to filter file lists (dirstate not updated)
429 partial = a function to filter file lists (dirstate not updated)
429 """
430 """
430
431
431 wlock = repo.wlock()
432 wlock = repo.wlock()
432 try:
433 try:
433 wc = repo[None]
434 wc = repo[None]
434 if node is None:
435 if node is None:
435 # tip of current branch
436 # tip of current branch
436 try:
437 try:
437 node = repo.branchtags()[wc.branch()]
438 node = repo.branchtags()[wc.branch()]
438 except KeyError:
439 except KeyError:
439 if wc.branch() == "default": # no default branch!
440 if wc.branch() == "default": # no default branch!
440 node = repo.lookup("tip") # update to tip
441 node = repo.lookup("tip") # update to tip
441 else:
442 else:
442 raise util.Abort(_("branch %s not found") % wc.branch())
443 raise util.Abort(_("branch %s not found") % wc.branch())
443 overwrite = force and not branchmerge
444 overwrite = force and not branchmerge
444 pl = wc.parents()
445 pl = wc.parents()
445 p1, p2 = pl[0], repo[node]
446 p1, p2 = pl[0], repo[node]
446 pa = p1.ancestor(p2)
447 pa = p1.ancestor(p2)
447 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
448 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
448 fastforward = False
449 fastforward = False
449
450
450 ### check phase
451 ### check phase
451 if not overwrite and len(pl) > 1:
452 if not overwrite and len(pl) > 1:
452 raise util.Abort(_("outstanding uncommitted merges"))
453 raise util.Abort(_("outstanding uncommitted merges"))
453 if branchmerge:
454 if branchmerge:
454 if pa == p2:
455 if pa == p2:
455 raise util.Abort(_("can't merge with ancestor"))
456 raise util.Abort(_("can't merge with ancestor"))
456 elif pa == p1:
457 elif pa == p1:
457 if p1.branch() != p2.branch():
458 if p1.branch() != p2.branch():
458 fastforward = True
459 fastforward = True
459 else:
460 else:
460 raise util.Abort(_("nothing to merge (use 'hg update'"
461 raise util.Abort(_("nothing to merge (use 'hg update'"
461 " or check 'hg heads')"))
462 " or check 'hg heads')"))
462 if not force and (wc.files() or wc.deleted()):
463 if not force and (wc.files() or wc.deleted()):
463 raise util.Abort(_("outstanding uncommitted changes"))
464 raise util.Abort(_("outstanding uncommitted changes"))
464 elif not overwrite:
465 elif not overwrite:
465 if pa == p1 or pa == p2: # linear
466 if pa == p1 or pa == p2: # linear
466 pass # all good
467 pass # all good
467 elif p1.branch() == p2.branch():
468 elif p1.branch() == p2.branch():
468 if wc.files() or wc.deleted():
469 if wc.files() or wc.deleted():
469 raise util.Abort(_("crosses branches (use 'hg merge' or "
470 raise util.Abort(_("crosses branches (use 'hg merge' or "
470 "'hg update -C' to discard changes)"))
471 "'hg update -C' to discard changes)"))
471 raise util.Abort(_("crosses branches (use 'hg merge' "
472 raise util.Abort(_("crosses branches (use 'hg merge' "
472 "or 'hg update -C')"))
473 "or 'hg update -C')"))
473 elif wc.files() or wc.deleted():
474 elif wc.files() or wc.deleted():
474 raise util.Abort(_("crosses named branches (use "
475 raise util.Abort(_("crosses named branches (use "
475 "'hg update -C' to discard changes)"))
476 "'hg update -C' to discard changes)"))
476 else:
477 else:
477 # Allow jumping branches if there are no changes
478 # Allow jumping branches if there are no changes
478 overwrite = True
479 overwrite = True
479
480
480 ### calculate phase
481 ### calculate phase
481 action = []
482 action = []
482 if not force:
483 if not force:
483 _checkunknown(wc, p2)
484 _checkunknown(wc, p2)
484 if not util.checkcase(repo.path):
485 if not util.checkcase(repo.path):
485 _checkcollision(p2)
486 _checkcollision(p2)
486 action += _forgetremoved(wc, p2, branchmerge)
487 action += _forgetremoved(wc, p2, branchmerge)
487 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
488 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
488
489
489 ### apply phase
490 ### apply phase
490 if not branchmerge: # just jump to the new rev
491 if not branchmerge: # just jump to the new rev
491 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
492 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
492 if not partial:
493 if not partial:
493 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
494 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
494
495
495 stats = applyupdates(repo, action, wc, p2)
496 stats = applyupdates(repo, action, wc, p2)
496
497
497 if not partial:
498 if not partial:
498 recordupdates(repo, action, branchmerge)
499 recordupdates(repo, action, branchmerge)
499 repo.dirstate.setparents(fp1, fp2)
500 repo.dirstate.setparents(fp1, fp2)
500 if not branchmerge and not fastforward:
501 if not branchmerge and not fastforward:
501 repo.dirstate.setbranch(p2.branch())
502 repo.dirstate.setbranch(p2.branch())
502 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
503 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
503
504
504 return stats
505 return stats
505 finally:
506 finally:
506 del wlock
507 del wlock
General Comments 0
You need to be logged in to leave comments. Login now