##// END OF EJS Templates
Use UTF-8 in .hg/branch
Alexis S. L. Carvalho -
r3862:46abbed0 default
parent child Browse files
Show More
@@ -1,3314 +1,3314 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
13 demandload(globals(), "difflib patch time help mdiff tempfile")
13 demandload(globals(), "difflib patch time help mdiff tempfile")
14 demandload(globals(), "traceback errno version atexit")
14 demandload(globals(), "traceback errno version atexit")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 n = _lookup(repo, hex(op1))
243 n = _lookup(repo, hex(op1))
244 hg.merge(repo, n)
244 hg.merge(repo, n)
245 else:
245 else:
246 ui.status(_('the backout changeset is a new head - '
246 ui.status(_('the backout changeset is a new head - '
247 'do not forget to merge\n'))
247 'do not forget to merge\n'))
248 ui.status(_('(use "backout --merge" '
248 ui.status(_('(use "backout --merge" '
249 'if you want to auto-merge)\n'))
249 'if you want to auto-merge)\n'))
250
250
251 def branch(ui, repo, label=None):
251 def branch(ui, repo, label=None):
252 """set or show the current branch name
252 """set or show the current branch name
253
253
254 With <name>, set the current branch name. Otherwise, show the
254 With <name>, set the current branch name. Otherwise, show the
255 current branch name.
255 current branch name.
256 """
256 """
257
257
258 if label is not None:
258 if label is not None:
259 repo.opener("branch", "w").write(label)
259 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
260 else:
260 else:
261 b = repo.workingctx().branch()
261 b = util.tolocal(repo.workingctx().branch())
262 if b:
262 if b:
263 ui.write("%s\n" % b)
263 ui.write("%s\n" % b)
264
264
265 def branches(ui, repo):
265 def branches(ui, repo):
266 """list repository named branches
266 """list repository named branches
267
267
268 List the repository's named branches.
268 List the repository's named branches.
269 """
269 """
270 b = repo.branchtags()
270 b = repo.branchtags()
271 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
271 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
272 l.sort()
272 l.sort()
273 for r, n, t in l:
273 for r, n, t in l:
274 hexfunc = ui.debugflag and hex or short
274 hexfunc = ui.debugflag and hex or short
275 if ui.quiet:
275 if ui.quiet:
276 ui.write("%s\n" % t)
276 ui.write("%s\n" % t)
277 else:
277 else:
278 t = util.localsub(t, 30)
278 t = util.localsub(t, 30)
279 t += " " * (30 - util.locallen(t))
279 t += " " * (30 - util.locallen(t))
280 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
280 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
281
281
282 def bundle(ui, repo, fname, dest=None, **opts):
282 def bundle(ui, repo, fname, dest=None, **opts):
283 """create a changegroup file
283 """create a changegroup file
284
284
285 Generate a compressed changegroup file collecting changesets not
285 Generate a compressed changegroup file collecting changesets not
286 found in the other repository.
286 found in the other repository.
287
287
288 If no destination repository is specified the destination is assumed
288 If no destination repository is specified the destination is assumed
289 to have all the nodes specified by one or more --base parameters.
289 to have all the nodes specified by one or more --base parameters.
290
290
291 The bundle file can then be transferred using conventional means and
291 The bundle file can then be transferred using conventional means and
292 applied to another repository with the unbundle or pull command.
292 applied to another repository with the unbundle or pull command.
293 This is useful when direct push and pull are not available or when
293 This is useful when direct push and pull are not available or when
294 exporting an entire repository is undesirable.
294 exporting an entire repository is undesirable.
295
295
296 Applying bundles preserves all changeset contents including
296 Applying bundles preserves all changeset contents including
297 permissions, copy/rename information, and revision history.
297 permissions, copy/rename information, and revision history.
298 """
298 """
299 revs = opts.get('rev') or None
299 revs = opts.get('rev') or None
300 if revs:
300 if revs:
301 revs = [repo.lookup(rev) for rev in revs]
301 revs = [repo.lookup(rev) for rev in revs]
302 base = opts.get('base')
302 base = opts.get('base')
303 if base:
303 if base:
304 if dest:
304 if dest:
305 raise util.Abort(_("--base is incompatible with specifiying "
305 raise util.Abort(_("--base is incompatible with specifiying "
306 "a destination"))
306 "a destination"))
307 base = [repo.lookup(rev) for rev in base]
307 base = [repo.lookup(rev) for rev in base]
308 # create the right base
308 # create the right base
309 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 # XXX: nodesbetween / changegroup* should be "fixed" instead
310 o = []
310 o = []
311 has = {nullid: None}
311 has = {nullid: None}
312 for n in base:
312 for n in base:
313 has.update(repo.changelog.reachable(n))
313 has.update(repo.changelog.reachable(n))
314 if revs:
314 if revs:
315 visit = list(revs)
315 visit = list(revs)
316 else:
316 else:
317 visit = repo.changelog.heads()
317 visit = repo.changelog.heads()
318 seen = {}
318 seen = {}
319 while visit:
319 while visit:
320 n = visit.pop(0)
320 n = visit.pop(0)
321 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 parents = [p for p in repo.changelog.parents(n) if p not in has]
322 if len(parents) == 0:
322 if len(parents) == 0:
323 o.insert(0, n)
323 o.insert(0, n)
324 else:
324 else:
325 for p in parents:
325 for p in parents:
326 if p not in seen:
326 if p not in seen:
327 seen[p] = 1
327 seen[p] = 1
328 visit.append(p)
328 visit.append(p)
329 else:
329 else:
330 setremoteconfig(ui, opts)
330 setremoteconfig(ui, opts)
331 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 dest = ui.expandpath(dest or 'default-push', dest or 'default')
332 other = hg.repository(ui, dest)
332 other = hg.repository(ui, dest)
333 o = repo.findoutgoing(other, force=opts['force'])
333 o = repo.findoutgoing(other, force=opts['force'])
334
334
335 if revs:
335 if revs:
336 cg = repo.changegroupsubset(o, revs, 'bundle')
336 cg = repo.changegroupsubset(o, revs, 'bundle')
337 else:
337 else:
338 cg = repo.changegroup(o, 'bundle')
338 cg = repo.changegroup(o, 'bundle')
339 changegroup.writebundle(cg, fname, "HG10BZ")
339 changegroup.writebundle(cg, fname, "HG10BZ")
340
340
341 def cat(ui, repo, file1, *pats, **opts):
341 def cat(ui, repo, file1, *pats, **opts):
342 """output the latest or given revisions of files
342 """output the latest or given revisions of files
343
343
344 Print the specified files as they were at the given revision.
344 Print the specified files as they were at the given revision.
345 If no revision is given then working dir parent is used, or tip
345 If no revision is given then working dir parent is used, or tip
346 if no revision is checked out.
346 if no revision is checked out.
347
347
348 Output may be to a file, in which case the name of the file is
348 Output may be to a file, in which case the name of the file is
349 given using a format string. The formatting rules are the same as
349 given using a format string. The formatting rules are the same as
350 for the export command, with the following additions:
350 for the export command, with the following additions:
351
351
352 %s basename of file being printed
352 %s basename of file being printed
353 %d dirname of file being printed, or '.' if in repo root
353 %d dirname of file being printed, or '.' if in repo root
354 %p root-relative path name of file being printed
354 %p root-relative path name of file being printed
355 """
355 """
356 ctx = repo.changectx(opts['rev'])
356 ctx = repo.changectx(opts['rev'])
357 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
358 ctx.node()):
358 ctx.node()):
359 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
360 fp.write(ctx.filectx(abs).data())
360 fp.write(ctx.filectx(abs).data())
361
361
362 def clone(ui, source, dest=None, **opts):
362 def clone(ui, source, dest=None, **opts):
363 """make a copy of an existing repository
363 """make a copy of an existing repository
364
364
365 Create a copy of an existing repository in a new directory.
365 Create a copy of an existing repository in a new directory.
366
366
367 If no destination directory name is specified, it defaults to the
367 If no destination directory name is specified, it defaults to the
368 basename of the source.
368 basename of the source.
369
369
370 The location of the source is added to the new repository's
370 The location of the source is added to the new repository's
371 .hg/hgrc file, as the default to be used for future pulls.
371 .hg/hgrc file, as the default to be used for future pulls.
372
372
373 For efficiency, hardlinks are used for cloning whenever the source
373 For efficiency, hardlinks are used for cloning whenever the source
374 and destination are on the same filesystem (note this applies only
374 and destination are on the same filesystem (note this applies only
375 to the repository data, not to the checked out files). Some
375 to the repository data, not to the checked out files). Some
376 filesystems, such as AFS, implement hardlinking incorrectly, but
376 filesystems, such as AFS, implement hardlinking incorrectly, but
377 do not report errors. In these cases, use the --pull option to
377 do not report errors. In these cases, use the --pull option to
378 avoid hardlinking.
378 avoid hardlinking.
379
379
380 You can safely clone repositories and checked out files using full
380 You can safely clone repositories and checked out files using full
381 hardlinks with
381 hardlinks with
382
382
383 $ cp -al REPO REPOCLONE
383 $ cp -al REPO REPOCLONE
384
384
385 which is the fastest way to clone. However, the operation is not
385 which is the fastest way to clone. However, the operation is not
386 atomic (making sure REPO is not modified during the operation is
386 atomic (making sure REPO is not modified during the operation is
387 up to you) and you have to make sure your editor breaks hardlinks
387 up to you) and you have to make sure your editor breaks hardlinks
388 (Emacs and most Linux Kernel tools do so).
388 (Emacs and most Linux Kernel tools do so).
389
389
390 If you use the -r option to clone up to a specific revision, no
390 If you use the -r option to clone up to a specific revision, no
391 subsequent revisions will be present in the cloned repository.
391 subsequent revisions will be present in the cloned repository.
392 This option implies --pull, even on local repositories.
392 This option implies --pull, even on local repositories.
393
393
394 See pull for valid source format details.
394 See pull for valid source format details.
395
395
396 It is possible to specify an ssh:// URL as the destination, but no
396 It is possible to specify an ssh:// URL as the destination, but no
397 .hg/hgrc and working directory will be created on the remote side.
397 .hg/hgrc and working directory will be created on the remote side.
398 Look at the help text for the pull command for important details
398 Look at the help text for the pull command for important details
399 about ssh:// URLs.
399 about ssh:// URLs.
400 """
400 """
401 setremoteconfig(ui, opts)
401 setremoteconfig(ui, opts)
402 hg.clone(ui, ui.expandpath(source), dest,
402 hg.clone(ui, ui.expandpath(source), dest,
403 pull=opts['pull'],
403 pull=opts['pull'],
404 stream=opts['uncompressed'],
404 stream=opts['uncompressed'],
405 rev=opts['rev'],
405 rev=opts['rev'],
406 update=not opts['noupdate'])
406 update=not opts['noupdate'])
407
407
408 def commit(ui, repo, *pats, **opts):
408 def commit(ui, repo, *pats, **opts):
409 """commit the specified files or all outstanding changes
409 """commit the specified files or all outstanding changes
410
410
411 Commit changes to the given files into the repository.
411 Commit changes to the given files into the repository.
412
412
413 If a list of files is omitted, all changes reported by "hg status"
413 If a list of files is omitted, all changes reported by "hg status"
414 will be committed.
414 will be committed.
415
415
416 If no commit message is specified, the editor configured in your hgrc
416 If no commit message is specified, the editor configured in your hgrc
417 or in the EDITOR environment variable is started to enter a message.
417 or in the EDITOR environment variable is started to enter a message.
418 """
418 """
419 message = logmessage(opts)
419 message = logmessage(opts)
420
420
421 if opts['addremove']:
421 if opts['addremove']:
422 cmdutil.addremove(repo, pats, opts)
422 cmdutil.addremove(repo, pats, opts)
423 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
424 if pats:
424 if pats:
425 status = repo.status(files=fns, match=match)
425 status = repo.status(files=fns, match=match)
426 modified, added, removed, deleted, unknown = status[:5]
426 modified, added, removed, deleted, unknown = status[:5]
427 files = modified + added + removed
427 files = modified + added + removed
428 slist = None
428 slist = None
429 for f in fns:
429 for f in fns:
430 if f not in files:
430 if f not in files:
431 rf = repo.wjoin(f)
431 rf = repo.wjoin(f)
432 if f in unknown:
432 if f in unknown:
433 raise util.Abort(_("file %s not tracked!") % rf)
433 raise util.Abort(_("file %s not tracked!") % rf)
434 try:
434 try:
435 mode = os.lstat(rf)[stat.ST_MODE]
435 mode = os.lstat(rf)[stat.ST_MODE]
436 except OSError:
436 except OSError:
437 raise util.Abort(_("file %s not found!") % rf)
437 raise util.Abort(_("file %s not found!") % rf)
438 if stat.S_ISDIR(mode):
438 if stat.S_ISDIR(mode):
439 name = f + '/'
439 name = f + '/'
440 if slist is None:
440 if slist is None:
441 slist = list(files)
441 slist = list(files)
442 slist.sort()
442 slist.sort()
443 i = bisect.bisect(slist, name)
443 i = bisect.bisect(slist, name)
444 if i >= len(slist) or not slist[i].startswith(name):
444 if i >= len(slist) or not slist[i].startswith(name):
445 raise util.Abort(_("no match under directory %s!")
445 raise util.Abort(_("no match under directory %s!")
446 % rf)
446 % rf)
447 elif not stat.S_ISREG(mode):
447 elif not stat.S_ISREG(mode):
448 raise util.Abort(_("can't commit %s: "
448 raise util.Abort(_("can't commit %s: "
449 "unsupported file type!") % rf)
449 "unsupported file type!") % rf)
450 else:
450 else:
451 files = []
451 files = []
452 try:
452 try:
453 repo.commit(files, message, opts['user'], opts['date'], match,
453 repo.commit(files, message, opts['user'], opts['date'], match,
454 force_editor=opts.get('force_editor'))
454 force_editor=opts.get('force_editor'))
455 except ValueError, inst:
455 except ValueError, inst:
456 raise util.Abort(str(inst))
456 raise util.Abort(str(inst))
457
457
458 def docopy(ui, repo, pats, opts, wlock):
458 def docopy(ui, repo, pats, opts, wlock):
459 # called with the repo lock held
459 # called with the repo lock held
460 #
460 #
461 # hgsep => pathname that uses "/" to separate directories
461 # hgsep => pathname that uses "/" to separate directories
462 # ossep => pathname that uses os.sep to separate directories
462 # ossep => pathname that uses os.sep to separate directories
463 cwd = repo.getcwd()
463 cwd = repo.getcwd()
464 errors = 0
464 errors = 0
465 copied = []
465 copied = []
466 targets = {}
466 targets = {}
467
467
468 # abs: hgsep
468 # abs: hgsep
469 # rel: ossep
469 # rel: ossep
470 # return: hgsep
470 # return: hgsep
471 def okaytocopy(abs, rel, exact):
471 def okaytocopy(abs, rel, exact):
472 reasons = {'?': _('is not managed'),
472 reasons = {'?': _('is not managed'),
473 'a': _('has been marked for add'),
473 'a': _('has been marked for add'),
474 'r': _('has been marked for remove')}
474 'r': _('has been marked for remove')}
475 state = repo.dirstate.state(abs)
475 state = repo.dirstate.state(abs)
476 reason = reasons.get(state)
476 reason = reasons.get(state)
477 if reason:
477 if reason:
478 if state == 'a':
478 if state == 'a':
479 origsrc = repo.dirstate.copied(abs)
479 origsrc = repo.dirstate.copied(abs)
480 if origsrc is not None:
480 if origsrc is not None:
481 return origsrc
481 return origsrc
482 if exact:
482 if exact:
483 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
484 else:
484 else:
485 return abs
485 return abs
486
486
487 # origsrc: hgsep
487 # origsrc: hgsep
488 # abssrc: hgsep
488 # abssrc: hgsep
489 # relsrc: ossep
489 # relsrc: ossep
490 # target: ossep
490 # target: ossep
491 def copy(origsrc, abssrc, relsrc, target, exact):
491 def copy(origsrc, abssrc, relsrc, target, exact):
492 abstarget = util.canonpath(repo.root, cwd, target)
492 abstarget = util.canonpath(repo.root, cwd, target)
493 reltarget = util.pathto(cwd, abstarget)
493 reltarget = util.pathto(cwd, abstarget)
494 prevsrc = targets.get(abstarget)
494 prevsrc = targets.get(abstarget)
495 if prevsrc is not None:
495 if prevsrc is not None:
496 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
497 (reltarget, util.localpath(abssrc),
497 (reltarget, util.localpath(abssrc),
498 util.localpath(prevsrc)))
498 util.localpath(prevsrc)))
499 return
499 return
500 if (not opts['after'] and os.path.exists(reltarget) or
500 if (not opts['after'] and os.path.exists(reltarget) or
501 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
502 if not opts['force']:
502 if not opts['force']:
503 ui.warn(_('%s: not overwriting - file exists\n') %
503 ui.warn(_('%s: not overwriting - file exists\n') %
504 reltarget)
504 reltarget)
505 return
505 return
506 if not opts['after'] and not opts.get('dry_run'):
506 if not opts['after'] and not opts.get('dry_run'):
507 os.unlink(reltarget)
507 os.unlink(reltarget)
508 if opts['after']:
508 if opts['after']:
509 if not os.path.exists(reltarget):
509 if not os.path.exists(reltarget):
510 return
510 return
511 else:
511 else:
512 targetdir = os.path.dirname(reltarget) or '.'
512 targetdir = os.path.dirname(reltarget) or '.'
513 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
514 os.makedirs(targetdir)
514 os.makedirs(targetdir)
515 try:
515 try:
516 restore = repo.dirstate.state(abstarget) == 'r'
516 restore = repo.dirstate.state(abstarget) == 'r'
517 if restore and not opts.get('dry_run'):
517 if restore and not opts.get('dry_run'):
518 repo.undelete([abstarget], wlock)
518 repo.undelete([abstarget], wlock)
519 try:
519 try:
520 if not opts.get('dry_run'):
520 if not opts.get('dry_run'):
521 util.copyfile(relsrc, reltarget)
521 util.copyfile(relsrc, reltarget)
522 restore = False
522 restore = False
523 finally:
523 finally:
524 if restore:
524 if restore:
525 repo.remove([abstarget], wlock)
525 repo.remove([abstarget], wlock)
526 except IOError, inst:
526 except IOError, inst:
527 if inst.errno == errno.ENOENT:
527 if inst.errno == errno.ENOENT:
528 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 ui.warn(_('%s: deleted in working copy\n') % relsrc)
529 else:
529 else:
530 ui.warn(_('%s: cannot copy - %s\n') %
530 ui.warn(_('%s: cannot copy - %s\n') %
531 (relsrc, inst.strerror))
531 (relsrc, inst.strerror))
532 errors += 1
532 errors += 1
533 return
533 return
534 if ui.verbose or not exact:
534 if ui.verbose or not exact:
535 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
536 targets[abstarget] = abssrc
536 targets[abstarget] = abssrc
537 if abstarget != origsrc and not opts.get('dry_run'):
537 if abstarget != origsrc and not opts.get('dry_run'):
538 repo.copy(origsrc, abstarget, wlock)
538 repo.copy(origsrc, abstarget, wlock)
539 copied.append((abssrc, relsrc, exact))
539 copied.append((abssrc, relsrc, exact))
540
540
541 # pat: ossep
541 # pat: ossep
542 # dest ossep
542 # dest ossep
543 # srcs: list of (hgsep, hgsep, ossep, bool)
543 # srcs: list of (hgsep, hgsep, ossep, bool)
544 # return: function that takes hgsep and returns ossep
544 # return: function that takes hgsep and returns ossep
545 def targetpathfn(pat, dest, srcs):
545 def targetpathfn(pat, dest, srcs):
546 if os.path.isdir(pat):
546 if os.path.isdir(pat):
547 abspfx = util.canonpath(repo.root, cwd, pat)
547 abspfx = util.canonpath(repo.root, cwd, pat)
548 abspfx = util.localpath(abspfx)
548 abspfx = util.localpath(abspfx)
549 if destdirexists:
549 if destdirexists:
550 striplen = len(os.path.split(abspfx)[0])
550 striplen = len(os.path.split(abspfx)[0])
551 else:
551 else:
552 striplen = len(abspfx)
552 striplen = len(abspfx)
553 if striplen:
553 if striplen:
554 striplen += len(os.sep)
554 striplen += len(os.sep)
555 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
556 elif destdirexists:
556 elif destdirexists:
557 res = lambda p: os.path.join(dest,
557 res = lambda p: os.path.join(dest,
558 os.path.basename(util.localpath(p)))
558 os.path.basename(util.localpath(p)))
559 else:
559 else:
560 res = lambda p: dest
560 res = lambda p: dest
561 return res
561 return res
562
562
563 # pat: ossep
563 # pat: ossep
564 # dest ossep
564 # dest ossep
565 # srcs: list of (hgsep, hgsep, ossep, bool)
565 # srcs: list of (hgsep, hgsep, ossep, bool)
566 # return: function that takes hgsep and returns ossep
566 # return: function that takes hgsep and returns ossep
567 def targetpathafterfn(pat, dest, srcs):
567 def targetpathafterfn(pat, dest, srcs):
568 if util.patkind(pat, None)[0]:
568 if util.patkind(pat, None)[0]:
569 # a mercurial pattern
569 # a mercurial pattern
570 res = lambda p: os.path.join(dest,
570 res = lambda p: os.path.join(dest,
571 os.path.basename(util.localpath(p)))
571 os.path.basename(util.localpath(p)))
572 else:
572 else:
573 abspfx = util.canonpath(repo.root, cwd, pat)
573 abspfx = util.canonpath(repo.root, cwd, pat)
574 if len(abspfx) < len(srcs[0][0]):
574 if len(abspfx) < len(srcs[0][0]):
575 # A directory. Either the target path contains the last
575 # A directory. Either the target path contains the last
576 # component of the source path or it does not.
576 # component of the source path or it does not.
577 def evalpath(striplen):
577 def evalpath(striplen):
578 score = 0
578 score = 0
579 for s in srcs:
579 for s in srcs:
580 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 t = os.path.join(dest, util.localpath(s[0])[striplen:])
581 if os.path.exists(t):
581 if os.path.exists(t):
582 score += 1
582 score += 1
583 return score
583 return score
584
584
585 abspfx = util.localpath(abspfx)
585 abspfx = util.localpath(abspfx)
586 striplen = len(abspfx)
586 striplen = len(abspfx)
587 if striplen:
587 if striplen:
588 striplen += len(os.sep)
588 striplen += len(os.sep)
589 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
590 score = evalpath(striplen)
590 score = evalpath(striplen)
591 striplen1 = len(os.path.split(abspfx)[0])
591 striplen1 = len(os.path.split(abspfx)[0])
592 if striplen1:
592 if striplen1:
593 striplen1 += len(os.sep)
593 striplen1 += len(os.sep)
594 if evalpath(striplen1) > score:
594 if evalpath(striplen1) > score:
595 striplen = striplen1
595 striplen = striplen1
596 res = lambda p: os.path.join(dest,
596 res = lambda p: os.path.join(dest,
597 util.localpath(p)[striplen:])
597 util.localpath(p)[striplen:])
598 else:
598 else:
599 # a file
599 # a file
600 if destdirexists:
600 if destdirexists:
601 res = lambda p: os.path.join(dest,
601 res = lambda p: os.path.join(dest,
602 os.path.basename(util.localpath(p)))
602 os.path.basename(util.localpath(p)))
603 else:
603 else:
604 res = lambda p: dest
604 res = lambda p: dest
605 return res
605 return res
606
606
607
607
608 pats = list(pats)
608 pats = list(pats)
609 if not pats:
609 if not pats:
610 raise util.Abort(_('no source or destination specified'))
610 raise util.Abort(_('no source or destination specified'))
611 if len(pats) == 1:
611 if len(pats) == 1:
612 raise util.Abort(_('no destination specified'))
612 raise util.Abort(_('no destination specified'))
613 dest = pats.pop()
613 dest = pats.pop()
614 destdirexists = os.path.isdir(dest)
614 destdirexists = os.path.isdir(dest)
615 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
616 raise util.Abort(_('with multiple sources, destination must be an '
616 raise util.Abort(_('with multiple sources, destination must be an '
617 'existing directory'))
617 'existing directory'))
618 if opts['after']:
618 if opts['after']:
619 tfn = targetpathafterfn
619 tfn = targetpathafterfn
620 else:
620 else:
621 tfn = targetpathfn
621 tfn = targetpathfn
622 copylist = []
622 copylist = []
623 for pat in pats:
623 for pat in pats:
624 srcs = []
624 srcs = []
625 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
625 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
626 origsrc = okaytocopy(abssrc, relsrc, exact)
626 origsrc = okaytocopy(abssrc, relsrc, exact)
627 if origsrc:
627 if origsrc:
628 srcs.append((origsrc, abssrc, relsrc, exact))
628 srcs.append((origsrc, abssrc, relsrc, exact))
629 if not srcs:
629 if not srcs:
630 continue
630 continue
631 copylist.append((tfn(pat, dest, srcs), srcs))
631 copylist.append((tfn(pat, dest, srcs), srcs))
632 if not copylist:
632 if not copylist:
633 raise util.Abort(_('no files to copy'))
633 raise util.Abort(_('no files to copy'))
634
634
635 for targetpath, srcs in copylist:
635 for targetpath, srcs in copylist:
636 for origsrc, abssrc, relsrc, exact in srcs:
636 for origsrc, abssrc, relsrc, exact in srcs:
637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
638
638
639 if errors:
639 if errors:
640 ui.warn(_('(consider using --after)\n'))
640 ui.warn(_('(consider using --after)\n'))
641 return errors, copied
641 return errors, copied
642
642
643 def copy(ui, repo, *pats, **opts):
643 def copy(ui, repo, *pats, **opts):
644 """mark files as copied for the next commit
644 """mark files as copied for the next commit
645
645
646 Mark dest as having copies of source files. If dest is a
646 Mark dest as having copies of source files. If dest is a
647 directory, copies are put in that directory. If dest is a file,
647 directory, copies are put in that directory. If dest is a file,
648 there can only be one source.
648 there can only be one source.
649
649
650 By default, this command copies the contents of files as they
650 By default, this command copies the contents of files as they
651 stand in the working directory. If invoked with --after, the
651 stand in the working directory. If invoked with --after, the
652 operation is recorded, but no copying is performed.
652 operation is recorded, but no copying is performed.
653
653
654 This command takes effect in the next commit. To undo a copy
654 This command takes effect in the next commit. To undo a copy
655 before that, see hg revert.
655 before that, see hg revert.
656 """
656 """
657 wlock = repo.wlock(0)
657 wlock = repo.wlock(0)
658 errs, copied = docopy(ui, repo, pats, opts, wlock)
658 errs, copied = docopy(ui, repo, pats, opts, wlock)
659 return errs
659 return errs
660
660
661 def debugancestor(ui, index, rev1, rev2):
661 def debugancestor(ui, index, rev1, rev2):
662 """find the ancestor revision of two revisions in a given index"""
662 """find the ancestor revision of two revisions in a given index"""
663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
666
666
667 def debugcomplete(ui, cmd='', **opts):
667 def debugcomplete(ui, cmd='', **opts):
668 """returns the completion list associated with the given command"""
668 """returns the completion list associated with the given command"""
669
669
670 if opts['options']:
670 if opts['options']:
671 options = []
671 options = []
672 otables = [globalopts]
672 otables = [globalopts]
673 if cmd:
673 if cmd:
674 aliases, entry = findcmd(ui, cmd)
674 aliases, entry = findcmd(ui, cmd)
675 otables.append(entry[1])
675 otables.append(entry[1])
676 for t in otables:
676 for t in otables:
677 for o in t:
677 for o in t:
678 if o[0]:
678 if o[0]:
679 options.append('-%s' % o[0])
679 options.append('-%s' % o[0])
680 options.append('--%s' % o[1])
680 options.append('--%s' % o[1])
681 ui.write("%s\n" % "\n".join(options))
681 ui.write("%s\n" % "\n".join(options))
682 return
682 return
683
683
684 clist = findpossible(ui, cmd).keys()
684 clist = findpossible(ui, cmd).keys()
685 clist.sort()
685 clist.sort()
686 ui.write("%s\n" % "\n".join(clist))
686 ui.write("%s\n" % "\n".join(clist))
687
687
688 def debugrebuildstate(ui, repo, rev=None):
688 def debugrebuildstate(ui, repo, rev=None):
689 """rebuild the dirstate as it would look like for the given revision"""
689 """rebuild the dirstate as it would look like for the given revision"""
690 if not rev:
690 if not rev:
691 rev = repo.changelog.tip()
691 rev = repo.changelog.tip()
692 else:
692 else:
693 rev = repo.lookup(rev)
693 rev = repo.lookup(rev)
694 change = repo.changelog.read(rev)
694 change = repo.changelog.read(rev)
695 n = change[0]
695 n = change[0]
696 files = repo.manifest.read(n)
696 files = repo.manifest.read(n)
697 wlock = repo.wlock()
697 wlock = repo.wlock()
698 repo.dirstate.rebuild(rev, files)
698 repo.dirstate.rebuild(rev, files)
699
699
700 def debugcheckstate(ui, repo):
700 def debugcheckstate(ui, repo):
701 """validate the correctness of the current dirstate"""
701 """validate the correctness of the current dirstate"""
702 parent1, parent2 = repo.dirstate.parents()
702 parent1, parent2 = repo.dirstate.parents()
703 repo.dirstate.read()
703 repo.dirstate.read()
704 dc = repo.dirstate.map
704 dc = repo.dirstate.map
705 keys = dc.keys()
705 keys = dc.keys()
706 keys.sort()
706 keys.sort()
707 m1n = repo.changelog.read(parent1)[0]
707 m1n = repo.changelog.read(parent1)[0]
708 m2n = repo.changelog.read(parent2)[0]
708 m2n = repo.changelog.read(parent2)[0]
709 m1 = repo.manifest.read(m1n)
709 m1 = repo.manifest.read(m1n)
710 m2 = repo.manifest.read(m2n)
710 m2 = repo.manifest.read(m2n)
711 errors = 0
711 errors = 0
712 for f in dc:
712 for f in dc:
713 state = repo.dirstate.state(f)
713 state = repo.dirstate.state(f)
714 if state in "nr" and f not in m1:
714 if state in "nr" and f not in m1:
715 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
715 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
716 errors += 1
716 errors += 1
717 if state in "a" and f in m1:
717 if state in "a" and f in m1:
718 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
718 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
719 errors += 1
719 errors += 1
720 if state in "m" and f not in m1 and f not in m2:
720 if state in "m" and f not in m1 and f not in m2:
721 ui.warn(_("%s in state %s, but not in either manifest\n") %
721 ui.warn(_("%s in state %s, but not in either manifest\n") %
722 (f, state))
722 (f, state))
723 errors += 1
723 errors += 1
724 for f in m1:
724 for f in m1:
725 state = repo.dirstate.state(f)
725 state = repo.dirstate.state(f)
726 if state not in "nrm":
726 if state not in "nrm":
727 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
727 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
728 errors += 1
728 errors += 1
729 if errors:
729 if errors:
730 error = _(".hg/dirstate inconsistent with current parent's manifest")
730 error = _(".hg/dirstate inconsistent with current parent's manifest")
731 raise util.Abort(error)
731 raise util.Abort(error)
732
732
733 def showconfig(ui, repo, *values, **opts):
733 def showconfig(ui, repo, *values, **opts):
734 """show combined config settings from all hgrc files
734 """show combined config settings from all hgrc files
735
735
736 With no args, print names and values of all config items.
736 With no args, print names and values of all config items.
737
737
738 With one arg of the form section.name, print just the value of
738 With one arg of the form section.name, print just the value of
739 that config item.
739 that config item.
740
740
741 With multiple args, print names and values of all config items
741 With multiple args, print names and values of all config items
742 with matching section names."""
742 with matching section names."""
743
743
744 untrusted = bool(opts.get('untrusted'))
744 untrusted = bool(opts.get('untrusted'))
745 if values:
745 if values:
746 if len([v for v in values if '.' in v]) > 1:
746 if len([v for v in values if '.' in v]) > 1:
747 raise util.Abort(_('only one config item permitted'))
747 raise util.Abort(_('only one config item permitted'))
748 for section, name, value in ui.walkconfig(untrusted=untrusted):
748 for section, name, value in ui.walkconfig(untrusted=untrusted):
749 sectname = section + '.' + name
749 sectname = section + '.' + name
750 if values:
750 if values:
751 for v in values:
751 for v in values:
752 if v == section:
752 if v == section:
753 ui.write('%s=%s\n' % (sectname, value))
753 ui.write('%s=%s\n' % (sectname, value))
754 elif v == sectname:
754 elif v == sectname:
755 ui.write(value, '\n')
755 ui.write(value, '\n')
756 else:
756 else:
757 ui.write('%s=%s\n' % (sectname, value))
757 ui.write('%s=%s\n' % (sectname, value))
758
758
759 def debugsetparents(ui, repo, rev1, rev2=None):
759 def debugsetparents(ui, repo, rev1, rev2=None):
760 """manually set the parents of the current working directory
760 """manually set the parents of the current working directory
761
761
762 This is useful for writing repository conversion tools, but should
762 This is useful for writing repository conversion tools, but should
763 be used with care.
763 be used with care.
764 """
764 """
765
765
766 if not rev2:
766 if not rev2:
767 rev2 = hex(nullid)
767 rev2 = hex(nullid)
768
768
769 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
769 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
770
770
771 def debugstate(ui, repo):
771 def debugstate(ui, repo):
772 """show the contents of the current dirstate"""
772 """show the contents of the current dirstate"""
773 repo.dirstate.read()
773 repo.dirstate.read()
774 dc = repo.dirstate.map
774 dc = repo.dirstate.map
775 keys = dc.keys()
775 keys = dc.keys()
776 keys.sort()
776 keys.sort()
777 for file_ in keys:
777 for file_ in keys:
778 ui.write("%c %3o %10d %s %s\n"
778 ui.write("%c %3o %10d %s %s\n"
779 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
779 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
780 time.strftime("%x %X",
780 time.strftime("%x %X",
781 time.localtime(dc[file_][3])), file_))
781 time.localtime(dc[file_][3])), file_))
782 for f in repo.dirstate.copies():
782 for f in repo.dirstate.copies():
783 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
783 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
784
784
785 def debugdata(ui, file_, rev):
785 def debugdata(ui, file_, rev):
786 """dump the contents of an data file revision"""
786 """dump the contents of an data file revision"""
787 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
787 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
788 file_[:-2] + ".i", file_, 0)
788 file_[:-2] + ".i", file_, 0)
789 try:
789 try:
790 ui.write(r.revision(r.lookup(rev)))
790 ui.write(r.revision(r.lookup(rev)))
791 except KeyError:
791 except KeyError:
792 raise util.Abort(_('invalid revision identifier %s') % rev)
792 raise util.Abort(_('invalid revision identifier %s') % rev)
793
793
794 def debugdate(ui, date, range=None, **opts):
794 def debugdate(ui, date, range=None, **opts):
795 """parse and display a date"""
795 """parse and display a date"""
796 if opts["extended"]:
796 if opts["extended"]:
797 d = util.parsedate(date, util.extendeddateformats)
797 d = util.parsedate(date, util.extendeddateformats)
798 else:
798 else:
799 d = util.parsedate(date)
799 d = util.parsedate(date)
800 ui.write("internal: %s %s\n" % d)
800 ui.write("internal: %s %s\n" % d)
801 ui.write("standard: %s\n" % util.datestr(d))
801 ui.write("standard: %s\n" % util.datestr(d))
802 if range:
802 if range:
803 m = util.matchdate(range)
803 m = util.matchdate(range)
804 ui.write("match: %s\n" % m(d[0]))
804 ui.write("match: %s\n" % m(d[0]))
805
805
806 def debugindex(ui, file_):
806 def debugindex(ui, file_):
807 """dump the contents of an index file"""
807 """dump the contents of an index file"""
808 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
808 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
809 ui.write(" rev offset length base linkrev" +
809 ui.write(" rev offset length base linkrev" +
810 " nodeid p1 p2\n")
810 " nodeid p1 p2\n")
811 for i in xrange(r.count()):
811 for i in xrange(r.count()):
812 node = r.node(i)
812 node = r.node(i)
813 pp = r.parents(node)
813 pp = r.parents(node)
814 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
814 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
815 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
815 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
816 short(node), short(pp[0]), short(pp[1])))
816 short(node), short(pp[0]), short(pp[1])))
817
817
818 def debugindexdot(ui, file_):
818 def debugindexdot(ui, file_):
819 """dump an index DAG as a .dot file"""
819 """dump an index DAG as a .dot file"""
820 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
820 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
821 ui.write("digraph G {\n")
821 ui.write("digraph G {\n")
822 for i in xrange(r.count()):
822 for i in xrange(r.count()):
823 node = r.node(i)
823 node = r.node(i)
824 pp = r.parents(node)
824 pp = r.parents(node)
825 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
825 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
826 if pp[1] != nullid:
826 if pp[1] != nullid:
827 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
827 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
828 ui.write("}\n")
828 ui.write("}\n")
829
829
830 def debuginstall(ui):
830 def debuginstall(ui):
831 '''test Mercurial installation'''
831 '''test Mercurial installation'''
832
832
833 def writetemp(contents):
833 def writetemp(contents):
834 (fd, name) = tempfile.mkstemp()
834 (fd, name) = tempfile.mkstemp()
835 f = os.fdopen(fd, "wb")
835 f = os.fdopen(fd, "wb")
836 f.write(contents)
836 f.write(contents)
837 f.close()
837 f.close()
838 return name
838 return name
839
839
840 problems = 0
840 problems = 0
841
841
842 # encoding
842 # encoding
843 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
843 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
844 try:
844 try:
845 util.fromlocal("test")
845 util.fromlocal("test")
846 except util.Abort, inst:
846 except util.Abort, inst:
847 ui.write(" %s\n" % inst)
847 ui.write(" %s\n" % inst)
848 ui.write(_(" (check that your locale is properly set)\n"))
848 ui.write(_(" (check that your locale is properly set)\n"))
849 problems += 1
849 problems += 1
850
850
851 # compiled modules
851 # compiled modules
852 ui.status(_("Checking extensions...\n"))
852 ui.status(_("Checking extensions...\n"))
853 try:
853 try:
854 import bdiff, mpatch, base85
854 import bdiff, mpatch, base85
855 except Exception, inst:
855 except Exception, inst:
856 ui.write(" %s\n" % inst)
856 ui.write(" %s\n" % inst)
857 ui.write(_(" One or more extensions could not be found"))
857 ui.write(_(" One or more extensions could not be found"))
858 ui.write(_(" (check that you compiled the extensions)\n"))
858 ui.write(_(" (check that you compiled the extensions)\n"))
859 problems += 1
859 problems += 1
860
860
861 # templates
861 # templates
862 ui.status(_("Checking templates...\n"))
862 ui.status(_("Checking templates...\n"))
863 try:
863 try:
864 import templater
864 import templater
865 t = templater.templater(templater.templatepath("map-cmdline.default"))
865 t = templater.templater(templater.templatepath("map-cmdline.default"))
866 except Exception, inst:
866 except Exception, inst:
867 ui.write(" %s\n" % inst)
867 ui.write(" %s\n" % inst)
868 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
868 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
869 problems += 1
869 problems += 1
870
870
871 # patch
871 # patch
872 ui.status(_("Checking patch...\n"))
872 ui.status(_("Checking patch...\n"))
873 path = os.environ.get('PATH', '')
873 path = os.environ.get('PATH', '')
874 patcher = util.find_in_path('gpatch', path,
874 patcher = util.find_in_path('gpatch', path,
875 util.find_in_path('patch', path, None))
875 util.find_in_path('patch', path, None))
876 if not patcher:
876 if not patcher:
877 ui.write(_(" Can't find patch or gpatch in PATH\n"))
877 ui.write(_(" Can't find patch or gpatch in PATH\n"))
878 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
878 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
879 problems += 1
879 problems += 1
880 else:
880 else:
881 # actually attempt a patch here
881 # actually attempt a patch here
882 a = "1\n2\n3\n4\n"
882 a = "1\n2\n3\n4\n"
883 b = "1\n2\n3\ninsert\n4\n"
883 b = "1\n2\n3\ninsert\n4\n"
884 d = mdiff.unidiff(a, None, b, None, "a")
884 d = mdiff.unidiff(a, None, b, None, "a")
885 fa = writetemp(a)
885 fa = writetemp(a)
886 fd = writetemp(d)
886 fd = writetemp(d)
887 fp = os.popen('%s %s %s' % (patcher, fa, fd))
887 fp = os.popen('%s %s %s' % (patcher, fa, fd))
888 files = []
888 files = []
889 output = ""
889 output = ""
890 for line in fp:
890 for line in fp:
891 output += line
891 output += line
892 if line.startswith('patching file '):
892 if line.startswith('patching file '):
893 pf = util.parse_patch_output(line.rstrip())
893 pf = util.parse_patch_output(line.rstrip())
894 files.append(pf)
894 files.append(pf)
895 if files != [fa]:
895 if files != [fa]:
896 ui.write(_(" unexpected patch output!"))
896 ui.write(_(" unexpected patch output!"))
897 ui.write(_(" (you may have an incompatible version of patch)\n"))
897 ui.write(_(" (you may have an incompatible version of patch)\n"))
898 ui.write(output)
898 ui.write(output)
899 problems += 1
899 problems += 1
900 a = file(fa).read()
900 a = file(fa).read()
901 if a != b:
901 if a != b:
902 ui.write(_(" patch test failed!"))
902 ui.write(_(" patch test failed!"))
903 ui.write(_(" (you may have an incompatible version of patch)\n"))
903 ui.write(_(" (you may have an incompatible version of patch)\n"))
904 problems += 1
904 problems += 1
905 os.unlink(fa)
905 os.unlink(fa)
906 os.unlink(fd)
906 os.unlink(fd)
907
907
908 # merge helper
908 # merge helper
909 ui.status(_("Checking merge helper...\n"))
909 ui.status(_("Checking merge helper...\n"))
910 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
910 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
911 or "hgmerge")
911 or "hgmerge")
912 cmdpath = util.find_in_path(cmd, path)
912 cmdpath = util.find_in_path(cmd, path)
913 if not cmdpath:
913 if not cmdpath:
914 cmdpath = util.find_in_path(cmd.split()[0], path)
914 cmdpath = util.find_in_path(cmd.split()[0], path)
915 if not cmdpath:
915 if not cmdpath:
916 if cmd == 'hgmerge':
916 if cmd == 'hgmerge':
917 ui.write(_(" No merge helper set and can't find default"
917 ui.write(_(" No merge helper set and can't find default"
918 " hgmerge script in PATH\n"))
918 " hgmerge script in PATH\n"))
919 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
919 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
920 else:
920 else:
921 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
921 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
922 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
922 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
923 problems += 1
923 problems += 1
924 else:
924 else:
925 # actually attempt a patch here
925 # actually attempt a patch here
926 fa = writetemp("1\n2\n3\n4\n")
926 fa = writetemp("1\n2\n3\n4\n")
927 fl = writetemp("1\n2\n3\ninsert\n4\n")
927 fl = writetemp("1\n2\n3\ninsert\n4\n")
928 fr = writetemp("begin\n1\n2\n3\n4\n")
928 fr = writetemp("begin\n1\n2\n3\n4\n")
929 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
929 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
930 if r:
930 if r:
931 ui.write(_(" got unexpected merge error %d!") % r)
931 ui.write(_(" got unexpected merge error %d!") % r)
932 problems += 1
932 problems += 1
933 m = file(fl).read()
933 m = file(fl).read()
934 if m != "begin\n1\n2\n3\ninsert\n4\n":
934 if m != "begin\n1\n2\n3\ninsert\n4\n":
935 ui.write(_(" got unexpected merge results!") % r)
935 ui.write(_(" got unexpected merge results!") % r)
936 ui.write(_(" (your merge helper may have the"
936 ui.write(_(" (your merge helper may have the"
937 " wrong argument order)\n"))
937 " wrong argument order)\n"))
938 ui.write(m)
938 ui.write(m)
939 os.unlink(fa)
939 os.unlink(fa)
940 os.unlink(fl)
940 os.unlink(fl)
941 os.unlink(fr)
941 os.unlink(fr)
942
942
943 # editor
943 # editor
944 ui.status(_("Checking commit editor...\n"))
944 ui.status(_("Checking commit editor...\n"))
945 editor = (os.environ.get("HGEDITOR") or
945 editor = (os.environ.get("HGEDITOR") or
946 ui.config("ui", "editor") or
946 ui.config("ui", "editor") or
947 os.environ.get("EDITOR", "vi"))
947 os.environ.get("EDITOR", "vi"))
948 cmdpath = util.find_in_path(editor, path)
948 cmdpath = util.find_in_path(editor, path)
949 if not cmdpath:
949 if not cmdpath:
950 cmdpath = util.find_in_path(editor.split()[0], path)
950 cmdpath = util.find_in_path(editor.split()[0], path)
951 if not cmdpath:
951 if not cmdpath:
952 if editor == 'vi':
952 if editor == 'vi':
953 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
953 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
954 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
954 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
955 else:
955 else:
956 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
956 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
957 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
957 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
958 problems += 1
958 problems += 1
959
959
960 # check username
960 # check username
961 ui.status(_("Checking username...\n"))
961 ui.status(_("Checking username...\n"))
962 user = os.environ.get("HGUSER")
962 user = os.environ.get("HGUSER")
963 if user is None:
963 if user is None:
964 user = ui.config("ui", "username")
964 user = ui.config("ui", "username")
965 if user is None:
965 if user is None:
966 user = os.environ.get("EMAIL")
966 user = os.environ.get("EMAIL")
967 if not user:
967 if not user:
968 ui.warn(" ")
968 ui.warn(" ")
969 ui.username()
969 ui.username()
970 ui.write(_(" (specify a username in your .hgrc file)\n"))
970 ui.write(_(" (specify a username in your .hgrc file)\n"))
971
971
972 if not problems:
972 if not problems:
973 ui.status(_("No problems detected\n"))
973 ui.status(_("No problems detected\n"))
974 else:
974 else:
975 ui.write(_("%s problems detected,"
975 ui.write(_("%s problems detected,"
976 " please check your install!\n") % problems)
976 " please check your install!\n") % problems)
977
977
978 return problems
978 return problems
979
979
980 def debugrename(ui, repo, file1, *pats, **opts):
980 def debugrename(ui, repo, file1, *pats, **opts):
981 """dump rename information"""
981 """dump rename information"""
982
982
983 ctx = repo.changectx(opts.get('rev', 'tip'))
983 ctx = repo.changectx(opts.get('rev', 'tip'))
984 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
984 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
985 ctx.node()):
985 ctx.node()):
986 m = ctx.filectx(abs).renamed()
986 m = ctx.filectx(abs).renamed()
987 if m:
987 if m:
988 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
988 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
989 else:
989 else:
990 ui.write(_("%s not renamed\n") % rel)
990 ui.write(_("%s not renamed\n") % rel)
991
991
992 def debugwalk(ui, repo, *pats, **opts):
992 def debugwalk(ui, repo, *pats, **opts):
993 """show how files match on given patterns"""
993 """show how files match on given patterns"""
994 items = list(cmdutil.walk(repo, pats, opts))
994 items = list(cmdutil.walk(repo, pats, opts))
995 if not items:
995 if not items:
996 return
996 return
997 fmt = '%%s %%-%ds %%-%ds %%s' % (
997 fmt = '%%s %%-%ds %%-%ds %%s' % (
998 max([len(abs) for (src, abs, rel, exact) in items]),
998 max([len(abs) for (src, abs, rel, exact) in items]),
999 max([len(rel) for (src, abs, rel, exact) in items]))
999 max([len(rel) for (src, abs, rel, exact) in items]))
1000 for src, abs, rel, exact in items:
1000 for src, abs, rel, exact in items:
1001 line = fmt % (src, abs, rel, exact and 'exact' or '')
1001 line = fmt % (src, abs, rel, exact and 'exact' or '')
1002 ui.write("%s\n" % line.rstrip())
1002 ui.write("%s\n" % line.rstrip())
1003
1003
1004 def diff(ui, repo, *pats, **opts):
1004 def diff(ui, repo, *pats, **opts):
1005 """diff repository (or selected files)
1005 """diff repository (or selected files)
1006
1006
1007 Show differences between revisions for the specified files.
1007 Show differences between revisions for the specified files.
1008
1008
1009 Differences between files are shown using the unified diff format.
1009 Differences between files are shown using the unified diff format.
1010
1010
1011 NOTE: diff may generate unexpected results for merges, as it will
1011 NOTE: diff may generate unexpected results for merges, as it will
1012 default to comparing against the working directory's first parent
1012 default to comparing against the working directory's first parent
1013 changeset if no revisions are specified.
1013 changeset if no revisions are specified.
1014
1014
1015 When two revision arguments are given, then changes are shown
1015 When two revision arguments are given, then changes are shown
1016 between those revisions. If only one revision is specified then
1016 between those revisions. If only one revision is specified then
1017 that revision is compared to the working directory, and, when no
1017 that revision is compared to the working directory, and, when no
1018 revisions are specified, the working directory files are compared
1018 revisions are specified, the working directory files are compared
1019 to its parent.
1019 to its parent.
1020
1020
1021 Without the -a option, diff will avoid generating diffs of files
1021 Without the -a option, diff will avoid generating diffs of files
1022 it detects as binary. With -a, diff will generate a diff anyway,
1022 it detects as binary. With -a, diff will generate a diff anyway,
1023 probably with undesirable results.
1023 probably with undesirable results.
1024 """
1024 """
1025 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1025 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1026
1026
1027 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1027 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1028
1028
1029 patch.diff(repo, node1, node2, fns, match=matchfn,
1029 patch.diff(repo, node1, node2, fns, match=matchfn,
1030 opts=patch.diffopts(ui, opts))
1030 opts=patch.diffopts(ui, opts))
1031
1031
1032 def export(ui, repo, *changesets, **opts):
1032 def export(ui, repo, *changesets, **opts):
1033 """dump the header and diffs for one or more changesets
1033 """dump the header and diffs for one or more changesets
1034
1034
1035 Print the changeset header and diffs for one or more revisions.
1035 Print the changeset header and diffs for one or more revisions.
1036
1036
1037 The information shown in the changeset header is: author,
1037 The information shown in the changeset header is: author,
1038 changeset hash, parent(s) and commit comment.
1038 changeset hash, parent(s) and commit comment.
1039
1039
1040 NOTE: export may generate unexpected diff output for merge changesets,
1040 NOTE: export may generate unexpected diff output for merge changesets,
1041 as it will compare the merge changeset against its first parent only.
1041 as it will compare the merge changeset against its first parent only.
1042
1042
1043 Output may be to a file, in which case the name of the file is
1043 Output may be to a file, in which case the name of the file is
1044 given using a format string. The formatting rules are as follows:
1044 given using a format string. The formatting rules are as follows:
1045
1045
1046 %% literal "%" character
1046 %% literal "%" character
1047 %H changeset hash (40 bytes of hexadecimal)
1047 %H changeset hash (40 bytes of hexadecimal)
1048 %N number of patches being generated
1048 %N number of patches being generated
1049 %R changeset revision number
1049 %R changeset revision number
1050 %b basename of the exporting repository
1050 %b basename of the exporting repository
1051 %h short-form changeset hash (12 bytes of hexadecimal)
1051 %h short-form changeset hash (12 bytes of hexadecimal)
1052 %n zero-padded sequence number, starting at 1
1052 %n zero-padded sequence number, starting at 1
1053 %r zero-padded changeset revision number
1053 %r zero-padded changeset revision number
1054
1054
1055 Without the -a option, export will avoid generating diffs of files
1055 Without the -a option, export will avoid generating diffs of files
1056 it detects as binary. With -a, export will generate a diff anyway,
1056 it detects as binary. With -a, export will generate a diff anyway,
1057 probably with undesirable results.
1057 probably with undesirable results.
1058
1058
1059 With the --switch-parent option, the diff will be against the second
1059 With the --switch-parent option, the diff will be against the second
1060 parent. It can be useful to review a merge.
1060 parent. It can be useful to review a merge.
1061 """
1061 """
1062 if not changesets:
1062 if not changesets:
1063 raise util.Abort(_("export requires at least one changeset"))
1063 raise util.Abort(_("export requires at least one changeset"))
1064 revs = cmdutil.revrange(repo, changesets)
1064 revs = cmdutil.revrange(repo, changesets)
1065 if len(revs) > 1:
1065 if len(revs) > 1:
1066 ui.note(_('exporting patches:\n'))
1066 ui.note(_('exporting patches:\n'))
1067 else:
1067 else:
1068 ui.note(_('exporting patch:\n'))
1068 ui.note(_('exporting patch:\n'))
1069 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1069 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1070 switch_parent=opts['switch_parent'],
1070 switch_parent=opts['switch_parent'],
1071 opts=patch.diffopts(ui, opts))
1071 opts=patch.diffopts(ui, opts))
1072
1072
1073 def grep(ui, repo, pattern, *pats, **opts):
1073 def grep(ui, repo, pattern, *pats, **opts):
1074 """search for a pattern in specified files and revisions
1074 """search for a pattern in specified files and revisions
1075
1075
1076 Search revisions of files for a regular expression.
1076 Search revisions of files for a regular expression.
1077
1077
1078 This command behaves differently than Unix grep. It only accepts
1078 This command behaves differently than Unix grep. It only accepts
1079 Python/Perl regexps. It searches repository history, not the
1079 Python/Perl regexps. It searches repository history, not the
1080 working directory. It always prints the revision number in which
1080 working directory. It always prints the revision number in which
1081 a match appears.
1081 a match appears.
1082
1082
1083 By default, grep only prints output for the first revision of a
1083 By default, grep only prints output for the first revision of a
1084 file in which it finds a match. To get it to print every revision
1084 file in which it finds a match. To get it to print every revision
1085 that contains a change in match status ("-" for a match that
1085 that contains a change in match status ("-" for a match that
1086 becomes a non-match, or "+" for a non-match that becomes a match),
1086 becomes a non-match, or "+" for a non-match that becomes a match),
1087 use the --all flag.
1087 use the --all flag.
1088 """
1088 """
1089 reflags = 0
1089 reflags = 0
1090 if opts['ignore_case']:
1090 if opts['ignore_case']:
1091 reflags |= re.I
1091 reflags |= re.I
1092 regexp = re.compile(pattern, reflags)
1092 regexp = re.compile(pattern, reflags)
1093 sep, eol = ':', '\n'
1093 sep, eol = ':', '\n'
1094 if opts['print0']:
1094 if opts['print0']:
1095 sep = eol = '\0'
1095 sep = eol = '\0'
1096
1096
1097 fcache = {}
1097 fcache = {}
1098 def getfile(fn):
1098 def getfile(fn):
1099 if fn not in fcache:
1099 if fn not in fcache:
1100 fcache[fn] = repo.file(fn)
1100 fcache[fn] = repo.file(fn)
1101 return fcache[fn]
1101 return fcache[fn]
1102
1102
1103 def matchlines(body):
1103 def matchlines(body):
1104 begin = 0
1104 begin = 0
1105 linenum = 0
1105 linenum = 0
1106 while True:
1106 while True:
1107 match = regexp.search(body, begin)
1107 match = regexp.search(body, begin)
1108 if not match:
1108 if not match:
1109 break
1109 break
1110 mstart, mend = match.span()
1110 mstart, mend = match.span()
1111 linenum += body.count('\n', begin, mstart) + 1
1111 linenum += body.count('\n', begin, mstart) + 1
1112 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1112 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1113 lend = body.find('\n', mend)
1113 lend = body.find('\n', mend)
1114 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1114 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1115 begin = lend + 1
1115 begin = lend + 1
1116
1116
1117 class linestate(object):
1117 class linestate(object):
1118 def __init__(self, line, linenum, colstart, colend):
1118 def __init__(self, line, linenum, colstart, colend):
1119 self.line = line
1119 self.line = line
1120 self.linenum = linenum
1120 self.linenum = linenum
1121 self.colstart = colstart
1121 self.colstart = colstart
1122 self.colend = colend
1122 self.colend = colend
1123
1123
1124 def __eq__(self, other):
1124 def __eq__(self, other):
1125 return self.line == other.line
1125 return self.line == other.line
1126
1126
1127 matches = {}
1127 matches = {}
1128 copies = {}
1128 copies = {}
1129 def grepbody(fn, rev, body):
1129 def grepbody(fn, rev, body):
1130 matches[rev].setdefault(fn, [])
1130 matches[rev].setdefault(fn, [])
1131 m = matches[rev][fn]
1131 m = matches[rev][fn]
1132 for lnum, cstart, cend, line in matchlines(body):
1132 for lnum, cstart, cend, line in matchlines(body):
1133 s = linestate(line, lnum, cstart, cend)
1133 s = linestate(line, lnum, cstart, cend)
1134 m.append(s)
1134 m.append(s)
1135
1135
1136 def difflinestates(a, b):
1136 def difflinestates(a, b):
1137 sm = difflib.SequenceMatcher(None, a, b)
1137 sm = difflib.SequenceMatcher(None, a, b)
1138 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1138 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1139 if tag == 'insert':
1139 if tag == 'insert':
1140 for i in xrange(blo, bhi):
1140 for i in xrange(blo, bhi):
1141 yield ('+', b[i])
1141 yield ('+', b[i])
1142 elif tag == 'delete':
1142 elif tag == 'delete':
1143 for i in xrange(alo, ahi):
1143 for i in xrange(alo, ahi):
1144 yield ('-', a[i])
1144 yield ('-', a[i])
1145 elif tag == 'replace':
1145 elif tag == 'replace':
1146 for i in xrange(alo, ahi):
1146 for i in xrange(alo, ahi):
1147 yield ('-', a[i])
1147 yield ('-', a[i])
1148 for i in xrange(blo, bhi):
1148 for i in xrange(blo, bhi):
1149 yield ('+', b[i])
1149 yield ('+', b[i])
1150
1150
1151 prev = {}
1151 prev = {}
1152 def display(fn, rev, states, prevstates):
1152 def display(fn, rev, states, prevstates):
1153 counts = {'-': 0, '+': 0}
1153 counts = {'-': 0, '+': 0}
1154 filerevmatches = {}
1154 filerevmatches = {}
1155 if incrementing or not opts['all']:
1155 if incrementing or not opts['all']:
1156 a, b, r = prevstates, states, rev
1156 a, b, r = prevstates, states, rev
1157 else:
1157 else:
1158 a, b, r = states, prevstates, prev.get(fn, -1)
1158 a, b, r = states, prevstates, prev.get(fn, -1)
1159 for change, l in difflinestates(a, b):
1159 for change, l in difflinestates(a, b):
1160 cols = [fn, str(r)]
1160 cols = [fn, str(r)]
1161 if opts['line_number']:
1161 if opts['line_number']:
1162 cols.append(str(l.linenum))
1162 cols.append(str(l.linenum))
1163 if opts['all']:
1163 if opts['all']:
1164 cols.append(change)
1164 cols.append(change)
1165 if opts['user']:
1165 if opts['user']:
1166 cols.append(ui.shortuser(get(r)[1]))
1166 cols.append(ui.shortuser(get(r)[1]))
1167 if opts['files_with_matches']:
1167 if opts['files_with_matches']:
1168 c = (fn, r)
1168 c = (fn, r)
1169 if c in filerevmatches:
1169 if c in filerevmatches:
1170 continue
1170 continue
1171 filerevmatches[c] = 1
1171 filerevmatches[c] = 1
1172 else:
1172 else:
1173 cols.append(l.line)
1173 cols.append(l.line)
1174 ui.write(sep.join(cols), eol)
1174 ui.write(sep.join(cols), eol)
1175 counts[change] += 1
1175 counts[change] += 1
1176 return counts['+'], counts['-']
1176 return counts['+'], counts['-']
1177
1177
1178 fstate = {}
1178 fstate = {}
1179 skip = {}
1179 skip = {}
1180 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1180 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1181 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1181 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1182 count = 0
1182 count = 0
1183 incrementing = False
1183 incrementing = False
1184 follow = opts.get('follow')
1184 follow = opts.get('follow')
1185 for st, rev, fns in changeiter:
1185 for st, rev, fns in changeiter:
1186 if st == 'window':
1186 if st == 'window':
1187 incrementing = rev
1187 incrementing = rev
1188 matches.clear()
1188 matches.clear()
1189 elif st == 'add':
1189 elif st == 'add':
1190 mf = repo.changectx(rev).manifest()
1190 mf = repo.changectx(rev).manifest()
1191 matches[rev] = {}
1191 matches[rev] = {}
1192 for fn in fns:
1192 for fn in fns:
1193 if fn in skip:
1193 if fn in skip:
1194 continue
1194 continue
1195 fstate.setdefault(fn, {})
1195 fstate.setdefault(fn, {})
1196 try:
1196 try:
1197 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1197 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1198 if follow:
1198 if follow:
1199 copied = getfile(fn).renamed(mf[fn])
1199 copied = getfile(fn).renamed(mf[fn])
1200 if copied:
1200 if copied:
1201 copies.setdefault(rev, {})[fn] = copied[0]
1201 copies.setdefault(rev, {})[fn] = copied[0]
1202 except KeyError:
1202 except KeyError:
1203 pass
1203 pass
1204 elif st == 'iter':
1204 elif st == 'iter':
1205 states = matches[rev].items()
1205 states = matches[rev].items()
1206 states.sort()
1206 states.sort()
1207 for fn, m in states:
1207 for fn, m in states:
1208 copy = copies.get(rev, {}).get(fn)
1208 copy = copies.get(rev, {}).get(fn)
1209 if fn in skip:
1209 if fn in skip:
1210 if copy:
1210 if copy:
1211 skip[copy] = True
1211 skip[copy] = True
1212 continue
1212 continue
1213 if incrementing or not opts['all'] or fstate[fn]:
1213 if incrementing or not opts['all'] or fstate[fn]:
1214 pos, neg = display(fn, rev, m, fstate[fn])
1214 pos, neg = display(fn, rev, m, fstate[fn])
1215 count += pos + neg
1215 count += pos + neg
1216 if pos and not opts['all']:
1216 if pos and not opts['all']:
1217 skip[fn] = True
1217 skip[fn] = True
1218 if copy:
1218 if copy:
1219 skip[copy] = True
1219 skip[copy] = True
1220 fstate[fn] = m
1220 fstate[fn] = m
1221 if copy:
1221 if copy:
1222 fstate[copy] = m
1222 fstate[copy] = m
1223 prev[fn] = rev
1223 prev[fn] = rev
1224
1224
1225 if not incrementing:
1225 if not incrementing:
1226 fstate = fstate.items()
1226 fstate = fstate.items()
1227 fstate.sort()
1227 fstate.sort()
1228 for fn, state in fstate:
1228 for fn, state in fstate:
1229 if fn in skip:
1229 if fn in skip:
1230 continue
1230 continue
1231 if fn not in copies.get(prev[fn], {}):
1231 if fn not in copies.get(prev[fn], {}):
1232 display(fn, rev, {}, state)
1232 display(fn, rev, {}, state)
1233 return (count == 0 and 1) or 0
1233 return (count == 0 and 1) or 0
1234
1234
1235 def heads(ui, repo, **opts):
1235 def heads(ui, repo, **opts):
1236 """show current repository heads
1236 """show current repository heads
1237
1237
1238 Show all repository head changesets.
1238 Show all repository head changesets.
1239
1239
1240 Repository "heads" are changesets that don't have children
1240 Repository "heads" are changesets that don't have children
1241 changesets. They are where development generally takes place and
1241 changesets. They are where development generally takes place and
1242 are the usual targets for update and merge operations.
1242 are the usual targets for update and merge operations.
1243 """
1243 """
1244 if opts['rev']:
1244 if opts['rev']:
1245 heads = repo.heads(repo.lookup(opts['rev']))
1245 heads = repo.heads(repo.lookup(opts['rev']))
1246 else:
1246 else:
1247 heads = repo.heads()
1247 heads = repo.heads()
1248 displayer = cmdutil.show_changeset(ui, repo, opts)
1248 displayer = cmdutil.show_changeset(ui, repo, opts)
1249 for n in heads:
1249 for n in heads:
1250 displayer.show(changenode=n)
1250 displayer.show(changenode=n)
1251
1251
1252 def help_(ui, name=None, with_version=False):
1252 def help_(ui, name=None, with_version=False):
1253 """show help for a command, extension, or list of commands
1253 """show help for a command, extension, or list of commands
1254
1254
1255 With no arguments, print a list of commands and short help.
1255 With no arguments, print a list of commands and short help.
1256
1256
1257 Given a command name, print help for that command.
1257 Given a command name, print help for that command.
1258
1258
1259 Given an extension name, print help for that extension, and the
1259 Given an extension name, print help for that extension, and the
1260 commands it provides."""
1260 commands it provides."""
1261 option_lists = []
1261 option_lists = []
1262
1262
1263 def helpcmd(name):
1263 def helpcmd(name):
1264 if with_version:
1264 if with_version:
1265 version_(ui)
1265 version_(ui)
1266 ui.write('\n')
1266 ui.write('\n')
1267 aliases, i = findcmd(ui, name)
1267 aliases, i = findcmd(ui, name)
1268 # synopsis
1268 # synopsis
1269 ui.write("%s\n\n" % i[2])
1269 ui.write("%s\n\n" % i[2])
1270
1270
1271 # description
1271 # description
1272 doc = i[0].__doc__
1272 doc = i[0].__doc__
1273 if not doc:
1273 if not doc:
1274 doc = _("(No help text available)")
1274 doc = _("(No help text available)")
1275 if ui.quiet:
1275 if ui.quiet:
1276 doc = doc.splitlines(0)[0]
1276 doc = doc.splitlines(0)[0]
1277 ui.write("%s\n" % doc.rstrip())
1277 ui.write("%s\n" % doc.rstrip())
1278
1278
1279 if not ui.quiet:
1279 if not ui.quiet:
1280 # aliases
1280 # aliases
1281 if len(aliases) > 1:
1281 if len(aliases) > 1:
1282 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1282 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1283
1283
1284 # options
1284 # options
1285 if i[1]:
1285 if i[1]:
1286 option_lists.append(("options", i[1]))
1286 option_lists.append(("options", i[1]))
1287
1287
1288 def helplist(select=None):
1288 def helplist(select=None):
1289 h = {}
1289 h = {}
1290 cmds = {}
1290 cmds = {}
1291 for c, e in table.items():
1291 for c, e in table.items():
1292 f = c.split("|", 1)[0]
1292 f = c.split("|", 1)[0]
1293 if select and not select(f):
1293 if select and not select(f):
1294 continue
1294 continue
1295 if name == "shortlist" and not f.startswith("^"):
1295 if name == "shortlist" and not f.startswith("^"):
1296 continue
1296 continue
1297 f = f.lstrip("^")
1297 f = f.lstrip("^")
1298 if not ui.debugflag and f.startswith("debug"):
1298 if not ui.debugflag and f.startswith("debug"):
1299 continue
1299 continue
1300 doc = e[0].__doc__
1300 doc = e[0].__doc__
1301 if not doc:
1301 if not doc:
1302 doc = _("(No help text available)")
1302 doc = _("(No help text available)")
1303 h[f] = doc.splitlines(0)[0].rstrip()
1303 h[f] = doc.splitlines(0)[0].rstrip()
1304 cmds[f] = c.lstrip("^")
1304 cmds[f] = c.lstrip("^")
1305
1305
1306 fns = h.keys()
1306 fns = h.keys()
1307 fns.sort()
1307 fns.sort()
1308 m = max(map(len, fns))
1308 m = max(map(len, fns))
1309 for f in fns:
1309 for f in fns:
1310 if ui.verbose:
1310 if ui.verbose:
1311 commands = cmds[f].replace("|",", ")
1311 commands = cmds[f].replace("|",", ")
1312 ui.write(" %s:\n %s\n"%(commands, h[f]))
1312 ui.write(" %s:\n %s\n"%(commands, h[f]))
1313 else:
1313 else:
1314 ui.write(' %-*s %s\n' % (m, f, h[f]))
1314 ui.write(' %-*s %s\n' % (m, f, h[f]))
1315
1315
1316 def helptopic(name):
1316 def helptopic(name):
1317 v = None
1317 v = None
1318 for i in help.helptable:
1318 for i in help.helptable:
1319 l = i.split('|')
1319 l = i.split('|')
1320 if name in l:
1320 if name in l:
1321 v = i
1321 v = i
1322 header = l[-1]
1322 header = l[-1]
1323 if not v:
1323 if not v:
1324 raise UnknownCommand(name)
1324 raise UnknownCommand(name)
1325
1325
1326 # description
1326 # description
1327 doc = help.helptable[v]
1327 doc = help.helptable[v]
1328 if not doc:
1328 if not doc:
1329 doc = _("(No help text available)")
1329 doc = _("(No help text available)")
1330 if callable(doc):
1330 if callable(doc):
1331 doc = doc()
1331 doc = doc()
1332
1332
1333 ui.write("%s\n" % header)
1333 ui.write("%s\n" % header)
1334 ui.write("%s\n" % doc.rstrip())
1334 ui.write("%s\n" % doc.rstrip())
1335
1335
1336 def helpext(name):
1336 def helpext(name):
1337 try:
1337 try:
1338 mod = findext(name)
1338 mod = findext(name)
1339 except KeyError:
1339 except KeyError:
1340 raise UnknownCommand(name)
1340 raise UnknownCommand(name)
1341
1341
1342 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1342 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1343 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1343 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1344 for d in doc[1:]:
1344 for d in doc[1:]:
1345 ui.write(d, '\n')
1345 ui.write(d, '\n')
1346
1346
1347 ui.status('\n')
1347 ui.status('\n')
1348 if ui.verbose:
1348 if ui.verbose:
1349 ui.status(_('list of commands:\n\n'))
1349 ui.status(_('list of commands:\n\n'))
1350 else:
1350 else:
1351 ui.status(_('list of commands (use "hg help -v %s" '
1351 ui.status(_('list of commands (use "hg help -v %s" '
1352 'to show aliases and global options):\n\n') % name)
1352 'to show aliases and global options):\n\n') % name)
1353
1353
1354 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1354 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1355 helplist(modcmds.has_key)
1355 helplist(modcmds.has_key)
1356
1356
1357 if name and name != 'shortlist':
1357 if name and name != 'shortlist':
1358 i = None
1358 i = None
1359 for f in (helpcmd, helptopic, helpext):
1359 for f in (helpcmd, helptopic, helpext):
1360 try:
1360 try:
1361 f(name)
1361 f(name)
1362 i = None
1362 i = None
1363 break
1363 break
1364 except UnknownCommand, inst:
1364 except UnknownCommand, inst:
1365 i = inst
1365 i = inst
1366 if i:
1366 if i:
1367 raise i
1367 raise i
1368
1368
1369 else:
1369 else:
1370 # program name
1370 # program name
1371 if ui.verbose or with_version:
1371 if ui.verbose or with_version:
1372 version_(ui)
1372 version_(ui)
1373 else:
1373 else:
1374 ui.status(_("Mercurial Distributed SCM\n"))
1374 ui.status(_("Mercurial Distributed SCM\n"))
1375 ui.status('\n')
1375 ui.status('\n')
1376
1376
1377 # list of commands
1377 # list of commands
1378 if name == "shortlist":
1378 if name == "shortlist":
1379 ui.status(_('basic commands (use "hg help" '
1379 ui.status(_('basic commands (use "hg help" '
1380 'for the full list or option "-v" for details):\n\n'))
1380 'for the full list or option "-v" for details):\n\n'))
1381 elif ui.verbose:
1381 elif ui.verbose:
1382 ui.status(_('list of commands:\n\n'))
1382 ui.status(_('list of commands:\n\n'))
1383 else:
1383 else:
1384 ui.status(_('list of commands (use "hg help -v" '
1384 ui.status(_('list of commands (use "hg help -v" '
1385 'to show aliases and global options):\n\n'))
1385 'to show aliases and global options):\n\n'))
1386
1386
1387 helplist()
1387 helplist()
1388
1388
1389 # global options
1389 # global options
1390 if ui.verbose:
1390 if ui.verbose:
1391 option_lists.append(("global options", globalopts))
1391 option_lists.append(("global options", globalopts))
1392
1392
1393 # list all option lists
1393 # list all option lists
1394 opt_output = []
1394 opt_output = []
1395 for title, options in option_lists:
1395 for title, options in option_lists:
1396 opt_output.append(("\n%s:\n" % title, None))
1396 opt_output.append(("\n%s:\n" % title, None))
1397 for shortopt, longopt, default, desc in options:
1397 for shortopt, longopt, default, desc in options:
1398 if "DEPRECATED" in desc and not ui.verbose: continue
1398 if "DEPRECATED" in desc and not ui.verbose: continue
1399 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1400 longopt and " --%s" % longopt),
1400 longopt and " --%s" % longopt),
1401 "%s%s" % (desc,
1401 "%s%s" % (desc,
1402 default
1402 default
1403 and _(" (default: %s)") % default
1403 and _(" (default: %s)") % default
1404 or "")))
1404 or "")))
1405
1405
1406 if opt_output:
1406 if opt_output:
1407 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1407 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1408 for first, second in opt_output:
1408 for first, second in opt_output:
1409 if second:
1409 if second:
1410 ui.write(" %-*s %s\n" % (opts_len, first, second))
1410 ui.write(" %-*s %s\n" % (opts_len, first, second))
1411 else:
1411 else:
1412 ui.write("%s\n" % first)
1412 ui.write("%s\n" % first)
1413
1413
1414 def identify(ui, repo):
1414 def identify(ui, repo):
1415 """print information about the working copy
1415 """print information about the working copy
1416
1416
1417 Print a short summary of the current state of the repo.
1417 Print a short summary of the current state of the repo.
1418
1418
1419 This summary identifies the repository state using one or two parent
1419 This summary identifies the repository state using one or two parent
1420 hash identifiers, followed by a "+" if there are uncommitted changes
1420 hash identifiers, followed by a "+" if there are uncommitted changes
1421 in the working directory, followed by a list of tags for this revision.
1421 in the working directory, followed by a list of tags for this revision.
1422 """
1422 """
1423 parents = [p for p in repo.dirstate.parents() if p != nullid]
1423 parents = [p for p in repo.dirstate.parents() if p != nullid]
1424 if not parents:
1424 if not parents:
1425 ui.write(_("unknown\n"))
1425 ui.write(_("unknown\n"))
1426 return
1426 return
1427
1427
1428 hexfunc = ui.debugflag and hex or short
1428 hexfunc = ui.debugflag and hex or short
1429 modified, added, removed, deleted = repo.status()[:4]
1429 modified, added, removed, deleted = repo.status()[:4]
1430 output = ["%s%s" %
1430 output = ["%s%s" %
1431 ('+'.join([hexfunc(parent) for parent in parents]),
1431 ('+'.join([hexfunc(parent) for parent in parents]),
1432 (modified or added or removed or deleted) and "+" or "")]
1432 (modified or added or removed or deleted) and "+" or "")]
1433
1433
1434 if not ui.quiet:
1434 if not ui.quiet:
1435
1435
1436 branch = repo.workingctx().branch()
1436 branch = util.tolocal(repo.workingctx().branch())
1437 if branch:
1437 if branch:
1438 output.append("(%s)" % branch)
1438 output.append("(%s)" % branch)
1439
1439
1440 # multiple tags for a single parent separated by '/'
1440 # multiple tags for a single parent separated by '/'
1441 parenttags = ['/'.join(tags)
1441 parenttags = ['/'.join(tags)
1442 for tags in map(repo.nodetags, parents) if tags]
1442 for tags in map(repo.nodetags, parents) if tags]
1443 # tags for multiple parents separated by ' + '
1443 # tags for multiple parents separated by ' + '
1444 if parenttags:
1444 if parenttags:
1445 output.append(' + '.join(parenttags))
1445 output.append(' + '.join(parenttags))
1446
1446
1447 ui.write("%s\n" % ' '.join(output))
1447 ui.write("%s\n" % ' '.join(output))
1448
1448
1449 def import_(ui, repo, patch1, *patches, **opts):
1449 def import_(ui, repo, patch1, *patches, **opts):
1450 """import an ordered set of patches
1450 """import an ordered set of patches
1451
1451
1452 Import a list of patches and commit them individually.
1452 Import a list of patches and commit them individually.
1453
1453
1454 If there are outstanding changes in the working directory, import
1454 If there are outstanding changes in the working directory, import
1455 will abort unless given the -f flag.
1455 will abort unless given the -f flag.
1456
1456
1457 You can import a patch straight from a mail message. Even patches
1457 You can import a patch straight from a mail message. Even patches
1458 as attachments work (body part must be type text/plain or
1458 as attachments work (body part must be type text/plain or
1459 text/x-patch to be used). From and Subject headers of email
1459 text/x-patch to be used). From and Subject headers of email
1460 message are used as default committer and commit message. All
1460 message are used as default committer and commit message. All
1461 text/plain body parts before first diff are added to commit
1461 text/plain body parts before first diff are added to commit
1462 message.
1462 message.
1463
1463
1464 If imported patch was generated by hg export, user and description
1464 If imported patch was generated by hg export, user and description
1465 from patch override values from message headers and body. Values
1465 from patch override values from message headers and body. Values
1466 given on command line with -m and -u override these.
1466 given on command line with -m and -u override these.
1467
1467
1468 To read a patch from standard input, use patch name "-".
1468 To read a patch from standard input, use patch name "-".
1469 """
1469 """
1470 patches = (patch1,) + patches
1470 patches = (patch1,) + patches
1471
1471
1472 if not opts['force']:
1472 if not opts['force']:
1473 bail_if_changed(repo)
1473 bail_if_changed(repo)
1474
1474
1475 d = opts["base"]
1475 d = opts["base"]
1476 strip = opts["strip"]
1476 strip = opts["strip"]
1477
1477
1478 wlock = repo.wlock()
1478 wlock = repo.wlock()
1479 lock = repo.lock()
1479 lock = repo.lock()
1480
1480
1481 for p in patches:
1481 for p in patches:
1482 pf = os.path.join(d, p)
1482 pf = os.path.join(d, p)
1483
1483
1484 if pf == '-':
1484 if pf == '-':
1485 ui.status(_("applying patch from stdin\n"))
1485 ui.status(_("applying patch from stdin\n"))
1486 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1486 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1487 else:
1487 else:
1488 ui.status(_("applying %s\n") % p)
1488 ui.status(_("applying %s\n") % p)
1489 tmpname, message, user, date = patch.extract(ui, file(pf))
1489 tmpname, message, user, date = patch.extract(ui, file(pf))
1490
1490
1491 if tmpname is None:
1491 if tmpname is None:
1492 raise util.Abort(_('no diffs found'))
1492 raise util.Abort(_('no diffs found'))
1493
1493
1494 try:
1494 try:
1495 cmdline_message = logmessage(opts)
1495 cmdline_message = logmessage(opts)
1496 if cmdline_message:
1496 if cmdline_message:
1497 # pickup the cmdline msg
1497 # pickup the cmdline msg
1498 message = cmdline_message
1498 message = cmdline_message
1499 elif message:
1499 elif message:
1500 # pickup the patch msg
1500 # pickup the patch msg
1501 message = message.strip()
1501 message = message.strip()
1502 else:
1502 else:
1503 # launch the editor
1503 # launch the editor
1504 message = None
1504 message = None
1505 ui.debug(_('message:\n%s\n') % message)
1505 ui.debug(_('message:\n%s\n') % message)
1506
1506
1507 files = {}
1507 files = {}
1508 try:
1508 try:
1509 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1509 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1510 files=files)
1510 files=files)
1511 finally:
1511 finally:
1512 files = patch.updatedir(ui, repo, files, wlock=wlock)
1512 files = patch.updatedir(ui, repo, files, wlock=wlock)
1513 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1513 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1514 finally:
1514 finally:
1515 os.unlink(tmpname)
1515 os.unlink(tmpname)
1516
1516
1517 def incoming(ui, repo, source="default", **opts):
1517 def incoming(ui, repo, source="default", **opts):
1518 """show new changesets found in source
1518 """show new changesets found in source
1519
1519
1520 Show new changesets found in the specified path/URL or the default
1520 Show new changesets found in the specified path/URL or the default
1521 pull location. These are the changesets that would be pulled if a pull
1521 pull location. These are the changesets that would be pulled if a pull
1522 was requested.
1522 was requested.
1523
1523
1524 For remote repository, using --bundle avoids downloading the changesets
1524 For remote repository, using --bundle avoids downloading the changesets
1525 twice if the incoming is followed by a pull.
1525 twice if the incoming is followed by a pull.
1526
1526
1527 See pull for valid source format details.
1527 See pull for valid source format details.
1528 """
1528 """
1529 source = ui.expandpath(source)
1529 source = ui.expandpath(source)
1530 setremoteconfig(ui, opts)
1530 setremoteconfig(ui, opts)
1531
1531
1532 other = hg.repository(ui, source)
1532 other = hg.repository(ui, source)
1533 incoming = repo.findincoming(other, force=opts["force"])
1533 incoming = repo.findincoming(other, force=opts["force"])
1534 if not incoming:
1534 if not incoming:
1535 ui.status(_("no changes found\n"))
1535 ui.status(_("no changes found\n"))
1536 return
1536 return
1537
1537
1538 cleanup = None
1538 cleanup = None
1539 try:
1539 try:
1540 fname = opts["bundle"]
1540 fname = opts["bundle"]
1541 if fname or not other.local():
1541 if fname or not other.local():
1542 # create a bundle (uncompressed if other repo is not local)
1542 # create a bundle (uncompressed if other repo is not local)
1543 cg = other.changegroup(incoming, "incoming")
1543 cg = other.changegroup(incoming, "incoming")
1544 bundletype = other.local() and "HG10BZ" or "HG10UN"
1544 bundletype = other.local() and "HG10BZ" or "HG10UN"
1545 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1545 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1546 # keep written bundle?
1546 # keep written bundle?
1547 if opts["bundle"]:
1547 if opts["bundle"]:
1548 cleanup = None
1548 cleanup = None
1549 if not other.local():
1549 if not other.local():
1550 # use the created uncompressed bundlerepo
1550 # use the created uncompressed bundlerepo
1551 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1551 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1552
1552
1553 revs = None
1553 revs = None
1554 if opts['rev']:
1554 if opts['rev']:
1555 revs = [other.lookup(rev) for rev in opts['rev']]
1555 revs = [other.lookup(rev) for rev in opts['rev']]
1556 o = other.changelog.nodesbetween(incoming, revs)[0]
1556 o = other.changelog.nodesbetween(incoming, revs)[0]
1557 if opts['newest_first']:
1557 if opts['newest_first']:
1558 o.reverse()
1558 o.reverse()
1559 displayer = cmdutil.show_changeset(ui, other, opts)
1559 displayer = cmdutil.show_changeset(ui, other, opts)
1560 for n in o:
1560 for n in o:
1561 parents = [p for p in other.changelog.parents(n) if p != nullid]
1561 parents = [p for p in other.changelog.parents(n) if p != nullid]
1562 if opts['no_merges'] and len(parents) == 2:
1562 if opts['no_merges'] and len(parents) == 2:
1563 continue
1563 continue
1564 displayer.show(changenode=n)
1564 displayer.show(changenode=n)
1565 finally:
1565 finally:
1566 if hasattr(other, 'close'):
1566 if hasattr(other, 'close'):
1567 other.close()
1567 other.close()
1568 if cleanup:
1568 if cleanup:
1569 os.unlink(cleanup)
1569 os.unlink(cleanup)
1570
1570
1571 def init(ui, dest=".", **opts):
1571 def init(ui, dest=".", **opts):
1572 """create a new repository in the given directory
1572 """create a new repository in the given directory
1573
1573
1574 Initialize a new repository in the given directory. If the given
1574 Initialize a new repository in the given directory. If the given
1575 directory does not exist, it is created.
1575 directory does not exist, it is created.
1576
1576
1577 If no directory is given, the current directory is used.
1577 If no directory is given, the current directory is used.
1578
1578
1579 It is possible to specify an ssh:// URL as the destination.
1579 It is possible to specify an ssh:// URL as the destination.
1580 Look at the help text for the pull command for important details
1580 Look at the help text for the pull command for important details
1581 about ssh:// URLs.
1581 about ssh:// URLs.
1582 """
1582 """
1583 setremoteconfig(ui, opts)
1583 setremoteconfig(ui, opts)
1584 hg.repository(ui, dest, create=1)
1584 hg.repository(ui, dest, create=1)
1585
1585
1586 def locate(ui, repo, *pats, **opts):
1586 def locate(ui, repo, *pats, **opts):
1587 """locate files matching specific patterns
1587 """locate files matching specific patterns
1588
1588
1589 Print all files under Mercurial control whose names match the
1589 Print all files under Mercurial control whose names match the
1590 given patterns.
1590 given patterns.
1591
1591
1592 This command searches the current directory and its
1592 This command searches the current directory and its
1593 subdirectories. To search an entire repository, move to the root
1593 subdirectories. To search an entire repository, move to the root
1594 of the repository.
1594 of the repository.
1595
1595
1596 If no patterns are given to match, this command prints all file
1596 If no patterns are given to match, this command prints all file
1597 names.
1597 names.
1598
1598
1599 If you want to feed the output of this command into the "xargs"
1599 If you want to feed the output of this command into the "xargs"
1600 command, use the "-0" option to both this command and "xargs".
1600 command, use the "-0" option to both this command and "xargs".
1601 This will avoid the problem of "xargs" treating single filenames
1601 This will avoid the problem of "xargs" treating single filenames
1602 that contain white space as multiple filenames.
1602 that contain white space as multiple filenames.
1603 """
1603 """
1604 end = opts['print0'] and '\0' or '\n'
1604 end = opts['print0'] and '\0' or '\n'
1605 rev = opts['rev']
1605 rev = opts['rev']
1606 if rev:
1606 if rev:
1607 node = repo.lookup(rev)
1607 node = repo.lookup(rev)
1608 else:
1608 else:
1609 node = None
1609 node = None
1610
1610
1611 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1611 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1612 head='(?:.*/|)'):
1612 head='(?:.*/|)'):
1613 if not node and repo.dirstate.state(abs) == '?':
1613 if not node and repo.dirstate.state(abs) == '?':
1614 continue
1614 continue
1615 if opts['fullpath']:
1615 if opts['fullpath']:
1616 ui.write(os.path.join(repo.root, abs), end)
1616 ui.write(os.path.join(repo.root, abs), end)
1617 else:
1617 else:
1618 ui.write(((pats and rel) or abs), end)
1618 ui.write(((pats and rel) or abs), end)
1619
1619
1620 def log(ui, repo, *pats, **opts):
1620 def log(ui, repo, *pats, **opts):
1621 """show revision history of entire repository or files
1621 """show revision history of entire repository or files
1622
1622
1623 Print the revision history of the specified files or the entire
1623 Print the revision history of the specified files or the entire
1624 project.
1624 project.
1625
1625
1626 File history is shown without following rename or copy history of
1626 File history is shown without following rename or copy history of
1627 files. Use -f/--follow with a file name to follow history across
1627 files. Use -f/--follow with a file name to follow history across
1628 renames and copies. --follow without a file name will only show
1628 renames and copies. --follow without a file name will only show
1629 ancestors or descendants of the starting revision. --follow-first
1629 ancestors or descendants of the starting revision. --follow-first
1630 only follows the first parent of merge revisions.
1630 only follows the first parent of merge revisions.
1631
1631
1632 If no revision range is specified, the default is tip:0 unless
1632 If no revision range is specified, the default is tip:0 unless
1633 --follow is set, in which case the working directory parent is
1633 --follow is set, in which case the working directory parent is
1634 used as the starting revision.
1634 used as the starting revision.
1635
1635
1636 By default this command outputs: changeset id and hash, tags,
1636 By default this command outputs: changeset id and hash, tags,
1637 non-trivial parents, user, date and time, and a summary for each
1637 non-trivial parents, user, date and time, and a summary for each
1638 commit. When the -v/--verbose switch is used, the list of changed
1638 commit. When the -v/--verbose switch is used, the list of changed
1639 files and full commit message is shown.
1639 files and full commit message is shown.
1640
1640
1641 NOTE: log -p may generate unexpected diff output for merge
1641 NOTE: log -p may generate unexpected diff output for merge
1642 changesets, as it will compare the merge changeset against its
1642 changesets, as it will compare the merge changeset against its
1643 first parent only. Also, the files: list will only reflect files
1643 first parent only. Also, the files: list will only reflect files
1644 that are different from BOTH parents.
1644 that are different from BOTH parents.
1645
1645
1646 """
1646 """
1647
1647
1648 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1648 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1649 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1649 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1650
1650
1651 if opts['limit']:
1651 if opts['limit']:
1652 try:
1652 try:
1653 limit = int(opts['limit'])
1653 limit = int(opts['limit'])
1654 except ValueError:
1654 except ValueError:
1655 raise util.Abort(_('limit must be a positive integer'))
1655 raise util.Abort(_('limit must be a positive integer'))
1656 if limit <= 0: raise util.Abort(_('limit must be positive'))
1656 if limit <= 0: raise util.Abort(_('limit must be positive'))
1657 else:
1657 else:
1658 limit = sys.maxint
1658 limit = sys.maxint
1659 count = 0
1659 count = 0
1660
1660
1661 if opts['copies'] and opts['rev']:
1661 if opts['copies'] and opts['rev']:
1662 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1662 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1663 else:
1663 else:
1664 endrev = repo.changelog.count()
1664 endrev = repo.changelog.count()
1665 rcache = {}
1665 rcache = {}
1666 ncache = {}
1666 ncache = {}
1667 dcache = []
1667 dcache = []
1668 def getrenamed(fn, rev, man):
1668 def getrenamed(fn, rev, man):
1669 '''looks up all renames for a file (up to endrev) the first
1669 '''looks up all renames for a file (up to endrev) the first
1670 time the file is given. It indexes on the changerev and only
1670 time the file is given. It indexes on the changerev and only
1671 parses the manifest if linkrev != changerev.
1671 parses the manifest if linkrev != changerev.
1672 Returns rename info for fn at changerev rev.'''
1672 Returns rename info for fn at changerev rev.'''
1673 if fn not in rcache:
1673 if fn not in rcache:
1674 rcache[fn] = {}
1674 rcache[fn] = {}
1675 ncache[fn] = {}
1675 ncache[fn] = {}
1676 fl = repo.file(fn)
1676 fl = repo.file(fn)
1677 for i in xrange(fl.count()):
1677 for i in xrange(fl.count()):
1678 node = fl.node(i)
1678 node = fl.node(i)
1679 lr = fl.linkrev(node)
1679 lr = fl.linkrev(node)
1680 renamed = fl.renamed(node)
1680 renamed = fl.renamed(node)
1681 rcache[fn][lr] = renamed
1681 rcache[fn][lr] = renamed
1682 if renamed:
1682 if renamed:
1683 ncache[fn][node] = renamed
1683 ncache[fn][node] = renamed
1684 if lr >= endrev:
1684 if lr >= endrev:
1685 break
1685 break
1686 if rev in rcache[fn]:
1686 if rev in rcache[fn]:
1687 return rcache[fn][rev]
1687 return rcache[fn][rev]
1688 mr = repo.manifest.rev(man)
1688 mr = repo.manifest.rev(man)
1689 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1689 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1690 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1690 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1691 if not dcache or dcache[0] != man:
1691 if not dcache or dcache[0] != man:
1692 dcache[:] = [man, repo.manifest.readdelta(man)]
1692 dcache[:] = [man, repo.manifest.readdelta(man)]
1693 if fn in dcache[1]:
1693 if fn in dcache[1]:
1694 return ncache[fn].get(dcache[1][fn])
1694 return ncache[fn].get(dcache[1][fn])
1695 return None
1695 return None
1696
1696
1697 df = False
1697 df = False
1698 if opts["date"]:
1698 if opts["date"]:
1699 df = util.matchdate(opts["date"])
1699 df = util.matchdate(opts["date"])
1700
1700
1701
1701
1702 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1702 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1703 for st, rev, fns in changeiter:
1703 for st, rev, fns in changeiter:
1704 if st == 'add':
1704 if st == 'add':
1705 changenode = repo.changelog.node(rev)
1705 changenode = repo.changelog.node(rev)
1706 parents = [p for p in repo.changelog.parentrevs(rev)
1706 parents = [p for p in repo.changelog.parentrevs(rev)
1707 if p != nullrev]
1707 if p != nullrev]
1708 if opts['no_merges'] and len(parents) == 2:
1708 if opts['no_merges'] and len(parents) == 2:
1709 continue
1709 continue
1710 if opts['only_merges'] and len(parents) != 2:
1710 if opts['only_merges'] and len(parents) != 2:
1711 continue
1711 continue
1712
1712
1713 if df:
1713 if df:
1714 changes = get(rev)
1714 changes = get(rev)
1715 if not df(changes[2][0]):
1715 if not df(changes[2][0]):
1716 continue
1716 continue
1717
1717
1718 if opts['keyword']:
1718 if opts['keyword']:
1719 changes = get(rev)
1719 changes = get(rev)
1720 miss = 0
1720 miss = 0
1721 for k in [kw.lower() for kw in opts['keyword']]:
1721 for k in [kw.lower() for kw in opts['keyword']]:
1722 if not (k in changes[1].lower() or
1722 if not (k in changes[1].lower() or
1723 k in changes[4].lower() or
1723 k in changes[4].lower() or
1724 k in " ".join(changes[3][:20]).lower()):
1724 k in " ".join(changes[3][:20]).lower()):
1725 miss = 1
1725 miss = 1
1726 break
1726 break
1727 if miss:
1727 if miss:
1728 continue
1728 continue
1729
1729
1730 copies = []
1730 copies = []
1731 if opts.get('copies') and rev:
1731 if opts.get('copies') and rev:
1732 mf = get(rev)[0]
1732 mf = get(rev)[0]
1733 for fn in get(rev)[3]:
1733 for fn in get(rev)[3]:
1734 rename = getrenamed(fn, rev, mf)
1734 rename = getrenamed(fn, rev, mf)
1735 if rename:
1735 if rename:
1736 copies.append((fn, rename[0]))
1736 copies.append((fn, rename[0]))
1737 displayer.show(rev, changenode, copies=copies)
1737 displayer.show(rev, changenode, copies=copies)
1738 elif st == 'iter':
1738 elif st == 'iter':
1739 if count == limit: break
1739 if count == limit: break
1740 if displayer.flush(rev):
1740 if displayer.flush(rev):
1741 count += 1
1741 count += 1
1742
1742
1743 def manifest(ui, repo, rev=None):
1743 def manifest(ui, repo, rev=None):
1744 """output the latest or given revision of the project manifest
1744 """output the latest or given revision of the project manifest
1745
1745
1746 Print a list of version controlled files for the given revision.
1746 Print a list of version controlled files for the given revision.
1747
1747
1748 The manifest is the list of files being version controlled. If no revision
1748 The manifest is the list of files being version controlled. If no revision
1749 is given then the first parent of the working directory is used.
1749 is given then the first parent of the working directory is used.
1750
1750
1751 With -v flag, print file permissions. With --debug flag, print
1751 With -v flag, print file permissions. With --debug flag, print
1752 file revision hashes.
1752 file revision hashes.
1753 """
1753 """
1754
1754
1755 m = repo.changectx(rev).manifest()
1755 m = repo.changectx(rev).manifest()
1756 files = m.keys()
1756 files = m.keys()
1757 files.sort()
1757 files.sort()
1758
1758
1759 for f in files:
1759 for f in files:
1760 if ui.debugflag:
1760 if ui.debugflag:
1761 ui.write("%40s " % hex(m[f]))
1761 ui.write("%40s " % hex(m[f]))
1762 if ui.verbose:
1762 if ui.verbose:
1763 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1763 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1764 ui.write("%s\n" % f)
1764 ui.write("%s\n" % f)
1765
1765
1766 def merge(ui, repo, node=None, force=None, branch=None):
1766 def merge(ui, repo, node=None, force=None, branch=None):
1767 """Merge working directory with another revision
1767 """Merge working directory with another revision
1768
1768
1769 Merge the contents of the current working directory and the
1769 Merge the contents of the current working directory and the
1770 requested revision. Files that changed between either parent are
1770 requested revision. Files that changed between either parent are
1771 marked as changed for the next commit and a commit must be
1771 marked as changed for the next commit and a commit must be
1772 performed before any further updates are allowed.
1772 performed before any further updates are allowed.
1773
1773
1774 If no revision is specified, the working directory's parent is a
1774 If no revision is specified, the working directory's parent is a
1775 head revision, and the repository contains exactly one other head,
1775 head revision, and the repository contains exactly one other head,
1776 the other head is merged with by default. Otherwise, an explicit
1776 the other head is merged with by default. Otherwise, an explicit
1777 revision to merge with must be provided.
1777 revision to merge with must be provided.
1778 """
1778 """
1779
1779
1780 if node or branch:
1780 if node or branch:
1781 node = _lookup(repo, node, branch)
1781 node = _lookup(repo, node, branch)
1782 else:
1782 else:
1783 heads = repo.heads()
1783 heads = repo.heads()
1784 if len(heads) > 2:
1784 if len(heads) > 2:
1785 raise util.Abort(_('repo has %d heads - '
1785 raise util.Abort(_('repo has %d heads - '
1786 'please merge with an explicit rev') %
1786 'please merge with an explicit rev') %
1787 len(heads))
1787 len(heads))
1788 if len(heads) == 1:
1788 if len(heads) == 1:
1789 raise util.Abort(_('there is nothing to merge - '
1789 raise util.Abort(_('there is nothing to merge - '
1790 'use "hg update" instead'))
1790 'use "hg update" instead'))
1791 parent = repo.dirstate.parents()[0]
1791 parent = repo.dirstate.parents()[0]
1792 if parent not in heads:
1792 if parent not in heads:
1793 raise util.Abort(_('working dir not at a head rev - '
1793 raise util.Abort(_('working dir not at a head rev - '
1794 'use "hg update" or merge with an explicit rev'))
1794 'use "hg update" or merge with an explicit rev'))
1795 node = parent == heads[0] and heads[-1] or heads[0]
1795 node = parent == heads[0] and heads[-1] or heads[0]
1796 return hg.merge(repo, node, force=force)
1796 return hg.merge(repo, node, force=force)
1797
1797
1798 def outgoing(ui, repo, dest=None, **opts):
1798 def outgoing(ui, repo, dest=None, **opts):
1799 """show changesets not found in destination
1799 """show changesets not found in destination
1800
1800
1801 Show changesets not found in the specified destination repository or
1801 Show changesets not found in the specified destination repository or
1802 the default push location. These are the changesets that would be pushed
1802 the default push location. These are the changesets that would be pushed
1803 if a push was requested.
1803 if a push was requested.
1804
1804
1805 See pull for valid destination format details.
1805 See pull for valid destination format details.
1806 """
1806 """
1807 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1807 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1808 setremoteconfig(ui, opts)
1808 setremoteconfig(ui, opts)
1809 revs = None
1809 revs = None
1810 if opts['rev']:
1810 if opts['rev']:
1811 revs = [repo.lookup(rev) for rev in opts['rev']]
1811 revs = [repo.lookup(rev) for rev in opts['rev']]
1812
1812
1813 other = hg.repository(ui, dest)
1813 other = hg.repository(ui, dest)
1814 o = repo.findoutgoing(other, force=opts['force'])
1814 o = repo.findoutgoing(other, force=opts['force'])
1815 if not o:
1815 if not o:
1816 ui.status(_("no changes found\n"))
1816 ui.status(_("no changes found\n"))
1817 return
1817 return
1818 o = repo.changelog.nodesbetween(o, revs)[0]
1818 o = repo.changelog.nodesbetween(o, revs)[0]
1819 if opts['newest_first']:
1819 if opts['newest_first']:
1820 o.reverse()
1820 o.reverse()
1821 displayer = cmdutil.show_changeset(ui, repo, opts)
1821 displayer = cmdutil.show_changeset(ui, repo, opts)
1822 for n in o:
1822 for n in o:
1823 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1823 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1824 if opts['no_merges'] and len(parents) == 2:
1824 if opts['no_merges'] and len(parents) == 2:
1825 continue
1825 continue
1826 displayer.show(changenode=n)
1826 displayer.show(changenode=n)
1827
1827
1828 def parents(ui, repo, file_=None, **opts):
1828 def parents(ui, repo, file_=None, **opts):
1829 """show the parents of the working dir or revision
1829 """show the parents of the working dir or revision
1830
1830
1831 Print the working directory's parent revisions.
1831 Print the working directory's parent revisions.
1832 """
1832 """
1833 rev = opts.get('rev')
1833 rev = opts.get('rev')
1834 if rev:
1834 if rev:
1835 if file_:
1835 if file_:
1836 ctx = repo.filectx(file_, changeid=rev)
1836 ctx = repo.filectx(file_, changeid=rev)
1837 else:
1837 else:
1838 ctx = repo.changectx(rev)
1838 ctx = repo.changectx(rev)
1839 p = [cp.node() for cp in ctx.parents()]
1839 p = [cp.node() for cp in ctx.parents()]
1840 else:
1840 else:
1841 p = repo.dirstate.parents()
1841 p = repo.dirstate.parents()
1842
1842
1843 displayer = cmdutil.show_changeset(ui, repo, opts)
1843 displayer = cmdutil.show_changeset(ui, repo, opts)
1844 for n in p:
1844 for n in p:
1845 if n != nullid:
1845 if n != nullid:
1846 displayer.show(changenode=n)
1846 displayer.show(changenode=n)
1847
1847
1848 def paths(ui, repo, search=None):
1848 def paths(ui, repo, search=None):
1849 """show definition of symbolic path names
1849 """show definition of symbolic path names
1850
1850
1851 Show definition of symbolic path name NAME. If no name is given, show
1851 Show definition of symbolic path name NAME. If no name is given, show
1852 definition of available names.
1852 definition of available names.
1853
1853
1854 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1854 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1855 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1855 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1856 """
1856 """
1857 if search:
1857 if search:
1858 for name, path in ui.configitems("paths"):
1858 for name, path in ui.configitems("paths"):
1859 if name == search:
1859 if name == search:
1860 ui.write("%s\n" % path)
1860 ui.write("%s\n" % path)
1861 return
1861 return
1862 ui.warn(_("not found!\n"))
1862 ui.warn(_("not found!\n"))
1863 return 1
1863 return 1
1864 else:
1864 else:
1865 for name, path in ui.configitems("paths"):
1865 for name, path in ui.configitems("paths"):
1866 ui.write("%s = %s\n" % (name, path))
1866 ui.write("%s = %s\n" % (name, path))
1867
1867
1868 def postincoming(ui, repo, modheads, optupdate):
1868 def postincoming(ui, repo, modheads, optupdate):
1869 if modheads == 0:
1869 if modheads == 0:
1870 return
1870 return
1871 if optupdate:
1871 if optupdate:
1872 if modheads == 1:
1872 if modheads == 1:
1873 return hg.update(repo, repo.changelog.tip()) # update
1873 return hg.update(repo, repo.changelog.tip()) # update
1874 else:
1874 else:
1875 ui.status(_("not updating, since new heads added\n"))
1875 ui.status(_("not updating, since new heads added\n"))
1876 if modheads > 1:
1876 if modheads > 1:
1877 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1877 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1878 else:
1878 else:
1879 ui.status(_("(run 'hg update' to get a working copy)\n"))
1879 ui.status(_("(run 'hg update' to get a working copy)\n"))
1880
1880
1881 def pull(ui, repo, source="default", **opts):
1881 def pull(ui, repo, source="default", **opts):
1882 """pull changes from the specified source
1882 """pull changes from the specified source
1883
1883
1884 Pull changes from a remote repository to a local one.
1884 Pull changes from a remote repository to a local one.
1885
1885
1886 This finds all changes from the repository at the specified path
1886 This finds all changes from the repository at the specified path
1887 or URL and adds them to the local repository. By default, this
1887 or URL and adds them to the local repository. By default, this
1888 does not update the copy of the project in the working directory.
1888 does not update the copy of the project in the working directory.
1889
1889
1890 Valid URLs are of the form:
1890 Valid URLs are of the form:
1891
1891
1892 local/filesystem/path (or file://local/filesystem/path)
1892 local/filesystem/path (or file://local/filesystem/path)
1893 http://[user@]host[:port]/[path]
1893 http://[user@]host[:port]/[path]
1894 https://[user@]host[:port]/[path]
1894 https://[user@]host[:port]/[path]
1895 ssh://[user@]host[:port]/[path]
1895 ssh://[user@]host[:port]/[path]
1896 static-http://host[:port]/[path]
1896 static-http://host[:port]/[path]
1897
1897
1898 Paths in the local filesystem can either point to Mercurial
1898 Paths in the local filesystem can either point to Mercurial
1899 repositories or to bundle files (as created by 'hg bundle' or
1899 repositories or to bundle files (as created by 'hg bundle' or
1900 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1900 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1901 allows access to a Mercurial repository where you simply use a web
1901 allows access to a Mercurial repository where you simply use a web
1902 server to publish the .hg directory as static content.
1902 server to publish the .hg directory as static content.
1903
1903
1904 Some notes about using SSH with Mercurial:
1904 Some notes about using SSH with Mercurial:
1905 - SSH requires an accessible shell account on the destination machine
1905 - SSH requires an accessible shell account on the destination machine
1906 and a copy of hg in the remote path or specified with as remotecmd.
1906 and a copy of hg in the remote path or specified with as remotecmd.
1907 - path is relative to the remote user's home directory by default.
1907 - path is relative to the remote user's home directory by default.
1908 Use an extra slash at the start of a path to specify an absolute path:
1908 Use an extra slash at the start of a path to specify an absolute path:
1909 ssh://example.com//tmp/repository
1909 ssh://example.com//tmp/repository
1910 - Mercurial doesn't use its own compression via SSH; the right thing
1910 - Mercurial doesn't use its own compression via SSH; the right thing
1911 to do is to configure it in your ~/.ssh/config, e.g.:
1911 to do is to configure it in your ~/.ssh/config, e.g.:
1912 Host *.mylocalnetwork.example.com
1912 Host *.mylocalnetwork.example.com
1913 Compression no
1913 Compression no
1914 Host *
1914 Host *
1915 Compression yes
1915 Compression yes
1916 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1916 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1917 with the --ssh command line option.
1917 with the --ssh command line option.
1918 """
1918 """
1919 source = ui.expandpath(source)
1919 source = ui.expandpath(source)
1920 setremoteconfig(ui, opts)
1920 setremoteconfig(ui, opts)
1921
1921
1922 other = hg.repository(ui, source)
1922 other = hg.repository(ui, source)
1923 ui.status(_('pulling from %s\n') % (source))
1923 ui.status(_('pulling from %s\n') % (source))
1924 revs = None
1924 revs = None
1925 if opts['rev']:
1925 if opts['rev']:
1926 if 'lookup' in other.capabilities:
1926 if 'lookup' in other.capabilities:
1927 revs = [other.lookup(rev) for rev in opts['rev']]
1927 revs = [other.lookup(rev) for rev in opts['rev']]
1928 else:
1928 else:
1929 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1929 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1930 raise util.Abort(error)
1930 raise util.Abort(error)
1931 modheads = repo.pull(other, heads=revs, force=opts['force'])
1931 modheads = repo.pull(other, heads=revs, force=opts['force'])
1932 return postincoming(ui, repo, modheads, opts['update'])
1932 return postincoming(ui, repo, modheads, opts['update'])
1933
1933
1934 def push(ui, repo, dest=None, **opts):
1934 def push(ui, repo, dest=None, **opts):
1935 """push changes to the specified destination
1935 """push changes to the specified destination
1936
1936
1937 Push changes from the local repository to the given destination.
1937 Push changes from the local repository to the given destination.
1938
1938
1939 This is the symmetrical operation for pull. It helps to move
1939 This is the symmetrical operation for pull. It helps to move
1940 changes from the current repository to a different one. If the
1940 changes from the current repository to a different one. If the
1941 destination is local this is identical to a pull in that directory
1941 destination is local this is identical to a pull in that directory
1942 from the current one.
1942 from the current one.
1943
1943
1944 By default, push will refuse to run if it detects the result would
1944 By default, push will refuse to run if it detects the result would
1945 increase the number of remote heads. This generally indicates the
1945 increase the number of remote heads. This generally indicates the
1946 the client has forgotten to sync and merge before pushing.
1946 the client has forgotten to sync and merge before pushing.
1947
1947
1948 Valid URLs are of the form:
1948 Valid URLs are of the form:
1949
1949
1950 local/filesystem/path (or file://local/filesystem/path)
1950 local/filesystem/path (or file://local/filesystem/path)
1951 ssh://[user@]host[:port]/[path]
1951 ssh://[user@]host[:port]/[path]
1952 http://[user@]host[:port]/[path]
1952 http://[user@]host[:port]/[path]
1953 https://[user@]host[:port]/[path]
1953 https://[user@]host[:port]/[path]
1954
1954
1955 Look at the help text for the pull command for important details
1955 Look at the help text for the pull command for important details
1956 about ssh:// URLs.
1956 about ssh:// URLs.
1957
1957
1958 Pushing to http:// and https:// URLs is only possible, if this
1958 Pushing to http:// and https:// URLs is only possible, if this
1959 feature is explicitly enabled on the remote Mercurial server.
1959 feature is explicitly enabled on the remote Mercurial server.
1960 """
1960 """
1961 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1961 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1962 setremoteconfig(ui, opts)
1962 setremoteconfig(ui, opts)
1963
1963
1964 other = hg.repository(ui, dest)
1964 other = hg.repository(ui, dest)
1965 ui.status('pushing to %s\n' % (dest))
1965 ui.status('pushing to %s\n' % (dest))
1966 revs = None
1966 revs = None
1967 if opts['rev']:
1967 if opts['rev']:
1968 revs = [repo.lookup(rev) for rev in opts['rev']]
1968 revs = [repo.lookup(rev) for rev in opts['rev']]
1969 r = repo.push(other, opts['force'], revs=revs)
1969 r = repo.push(other, opts['force'], revs=revs)
1970 return r == 0
1970 return r == 0
1971
1971
1972 def rawcommit(ui, repo, *pats, **opts):
1972 def rawcommit(ui, repo, *pats, **opts):
1973 """raw commit interface (DEPRECATED)
1973 """raw commit interface (DEPRECATED)
1974
1974
1975 (DEPRECATED)
1975 (DEPRECATED)
1976 Lowlevel commit, for use in helper scripts.
1976 Lowlevel commit, for use in helper scripts.
1977
1977
1978 This command is not intended to be used by normal users, as it is
1978 This command is not intended to be used by normal users, as it is
1979 primarily useful for importing from other SCMs.
1979 primarily useful for importing from other SCMs.
1980
1980
1981 This command is now deprecated and will be removed in a future
1981 This command is now deprecated and will be removed in a future
1982 release, please use debugsetparents and commit instead.
1982 release, please use debugsetparents and commit instead.
1983 """
1983 """
1984
1984
1985 ui.warn(_("(the rawcommit command is deprecated)\n"))
1985 ui.warn(_("(the rawcommit command is deprecated)\n"))
1986
1986
1987 message = logmessage(opts)
1987 message = logmessage(opts)
1988
1988
1989 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1989 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1990 if opts['files']:
1990 if opts['files']:
1991 files += open(opts['files']).read().splitlines()
1991 files += open(opts['files']).read().splitlines()
1992
1992
1993 parents = [repo.lookup(p) for p in opts['parent']]
1993 parents = [repo.lookup(p) for p in opts['parent']]
1994
1994
1995 try:
1995 try:
1996 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1996 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1997 except ValueError, inst:
1997 except ValueError, inst:
1998 raise util.Abort(str(inst))
1998 raise util.Abort(str(inst))
1999
1999
2000 def recover(ui, repo):
2000 def recover(ui, repo):
2001 """roll back an interrupted transaction
2001 """roll back an interrupted transaction
2002
2002
2003 Recover from an interrupted commit or pull.
2003 Recover from an interrupted commit or pull.
2004
2004
2005 This command tries to fix the repository status after an interrupted
2005 This command tries to fix the repository status after an interrupted
2006 operation. It should only be necessary when Mercurial suggests it.
2006 operation. It should only be necessary when Mercurial suggests it.
2007 """
2007 """
2008 if repo.recover():
2008 if repo.recover():
2009 return hg.verify(repo)
2009 return hg.verify(repo)
2010 return 1
2010 return 1
2011
2011
2012 def remove(ui, repo, *pats, **opts):
2012 def remove(ui, repo, *pats, **opts):
2013 """remove the specified files on the next commit
2013 """remove the specified files on the next commit
2014
2014
2015 Schedule the indicated files for removal from the repository.
2015 Schedule the indicated files for removal from the repository.
2016
2016
2017 This only removes files from the current branch, not from the
2017 This only removes files from the current branch, not from the
2018 entire project history. If the files still exist in the working
2018 entire project history. If the files still exist in the working
2019 directory, they will be deleted from it. If invoked with --after,
2019 directory, they will be deleted from it. If invoked with --after,
2020 files that have been manually deleted are marked as removed.
2020 files that have been manually deleted are marked as removed.
2021
2021
2022 This command schedules the files to be removed at the next commit.
2022 This command schedules the files to be removed at the next commit.
2023 To undo a remove before that, see hg revert.
2023 To undo a remove before that, see hg revert.
2024
2024
2025 Modified files and added files are not removed by default. To
2025 Modified files and added files are not removed by default. To
2026 remove them, use the -f/--force option.
2026 remove them, use the -f/--force option.
2027 """
2027 """
2028 names = []
2028 names = []
2029 if not opts['after'] and not pats:
2029 if not opts['after'] and not pats:
2030 raise util.Abort(_('no files specified'))
2030 raise util.Abort(_('no files specified'))
2031 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2031 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2032 exact = dict.fromkeys(files)
2032 exact = dict.fromkeys(files)
2033 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2033 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2034 modified, added, removed, deleted, unknown = mardu
2034 modified, added, removed, deleted, unknown = mardu
2035 remove, forget = [], []
2035 remove, forget = [], []
2036 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2036 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2037 reason = None
2037 reason = None
2038 if abs not in deleted and opts['after']:
2038 if abs not in deleted and opts['after']:
2039 reason = _('is still present')
2039 reason = _('is still present')
2040 elif abs in modified and not opts['force']:
2040 elif abs in modified and not opts['force']:
2041 reason = _('is modified (use -f to force removal)')
2041 reason = _('is modified (use -f to force removal)')
2042 elif abs in added:
2042 elif abs in added:
2043 if opts['force']:
2043 if opts['force']:
2044 forget.append(abs)
2044 forget.append(abs)
2045 continue
2045 continue
2046 reason = _('has been marked for add (use -f to force removal)')
2046 reason = _('has been marked for add (use -f to force removal)')
2047 elif abs in unknown:
2047 elif abs in unknown:
2048 reason = _('is not managed')
2048 reason = _('is not managed')
2049 elif abs in removed:
2049 elif abs in removed:
2050 continue
2050 continue
2051 if reason:
2051 if reason:
2052 if exact:
2052 if exact:
2053 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2053 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2054 else:
2054 else:
2055 if ui.verbose or not exact:
2055 if ui.verbose or not exact:
2056 ui.status(_('removing %s\n') % rel)
2056 ui.status(_('removing %s\n') % rel)
2057 remove.append(abs)
2057 remove.append(abs)
2058 repo.forget(forget)
2058 repo.forget(forget)
2059 repo.remove(remove, unlink=not opts['after'])
2059 repo.remove(remove, unlink=not opts['after'])
2060
2060
2061 def rename(ui, repo, *pats, **opts):
2061 def rename(ui, repo, *pats, **opts):
2062 """rename files; equivalent of copy + remove
2062 """rename files; equivalent of copy + remove
2063
2063
2064 Mark dest as copies of sources; mark sources for deletion. If
2064 Mark dest as copies of sources; mark sources for deletion. If
2065 dest is a directory, copies are put in that directory. If dest is
2065 dest is a directory, copies are put in that directory. If dest is
2066 a file, there can only be one source.
2066 a file, there can only be one source.
2067
2067
2068 By default, this command copies the contents of files as they
2068 By default, this command copies the contents of files as they
2069 stand in the working directory. If invoked with --after, the
2069 stand in the working directory. If invoked with --after, the
2070 operation is recorded, but no copying is performed.
2070 operation is recorded, but no copying is performed.
2071
2071
2072 This command takes effect in the next commit. To undo a rename
2072 This command takes effect in the next commit. To undo a rename
2073 before that, see hg revert.
2073 before that, see hg revert.
2074 """
2074 """
2075 wlock = repo.wlock(0)
2075 wlock = repo.wlock(0)
2076 errs, copied = docopy(ui, repo, pats, opts, wlock)
2076 errs, copied = docopy(ui, repo, pats, opts, wlock)
2077 names = []
2077 names = []
2078 for abs, rel, exact in copied:
2078 for abs, rel, exact in copied:
2079 if ui.verbose or not exact:
2079 if ui.verbose or not exact:
2080 ui.status(_('removing %s\n') % rel)
2080 ui.status(_('removing %s\n') % rel)
2081 names.append(abs)
2081 names.append(abs)
2082 if not opts.get('dry_run'):
2082 if not opts.get('dry_run'):
2083 repo.remove(names, True, wlock)
2083 repo.remove(names, True, wlock)
2084 return errs
2084 return errs
2085
2085
2086 def revert(ui, repo, *pats, **opts):
2086 def revert(ui, repo, *pats, **opts):
2087 """revert files or dirs to their states as of some revision
2087 """revert files or dirs to their states as of some revision
2088
2088
2089 With no revision specified, revert the named files or directories
2089 With no revision specified, revert the named files or directories
2090 to the contents they had in the parent of the working directory.
2090 to the contents they had in the parent of the working directory.
2091 This restores the contents of the affected files to an unmodified
2091 This restores the contents of the affected files to an unmodified
2092 state and unschedules adds, removes, copies, and renames. If the
2092 state and unschedules adds, removes, copies, and renames. If the
2093 working directory has two parents, you must explicitly specify the
2093 working directory has two parents, you must explicitly specify the
2094 revision to revert to.
2094 revision to revert to.
2095
2095
2096 Modified files are saved with a .orig suffix before reverting.
2096 Modified files are saved with a .orig suffix before reverting.
2097 To disable these backups, use --no-backup.
2097 To disable these backups, use --no-backup.
2098
2098
2099 Using the -r option, revert the given files or directories to their
2099 Using the -r option, revert the given files or directories to their
2100 contents as of a specific revision. This can be helpful to "roll
2100 contents as of a specific revision. This can be helpful to "roll
2101 back" some or all of a change that should not have been committed.
2101 back" some or all of a change that should not have been committed.
2102
2102
2103 Revert modifies the working directory. It does not commit any
2103 Revert modifies the working directory. It does not commit any
2104 changes, or change the parent of the working directory. If you
2104 changes, or change the parent of the working directory. If you
2105 revert to a revision other than the parent of the working
2105 revert to a revision other than the parent of the working
2106 directory, the reverted files will thus appear modified
2106 directory, the reverted files will thus appear modified
2107 afterwards.
2107 afterwards.
2108
2108
2109 If a file has been deleted, it is recreated. If the executable
2109 If a file has been deleted, it is recreated. If the executable
2110 mode of a file was changed, it is reset.
2110 mode of a file was changed, it is reset.
2111
2111
2112 If names are given, all files matching the names are reverted.
2112 If names are given, all files matching the names are reverted.
2113
2113
2114 If no arguments are given, no files are reverted.
2114 If no arguments are given, no files are reverted.
2115 """
2115 """
2116
2116
2117 if opts["date"]:
2117 if opts["date"]:
2118 if opts["rev"]:
2118 if opts["rev"]:
2119 raise util.Abort(_("you can't specify a revision and a date"))
2119 raise util.Abort(_("you can't specify a revision and a date"))
2120 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2120 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2121
2121
2122 if not pats and not opts['all']:
2122 if not pats and not opts['all']:
2123 raise util.Abort(_('no files or directories specified; '
2123 raise util.Abort(_('no files or directories specified; '
2124 'use --all to revert the whole repo'))
2124 'use --all to revert the whole repo'))
2125
2125
2126 parent, p2 = repo.dirstate.parents()
2126 parent, p2 = repo.dirstate.parents()
2127 if not opts['rev'] and p2 != nullid:
2127 if not opts['rev'] and p2 != nullid:
2128 raise util.Abort(_('uncommitted merge - please provide a '
2128 raise util.Abort(_('uncommitted merge - please provide a '
2129 'specific revision'))
2129 'specific revision'))
2130 node = repo.changectx(opts['rev']).node()
2130 node = repo.changectx(opts['rev']).node()
2131 mf = repo.manifest.read(repo.changelog.read(node)[0])
2131 mf = repo.manifest.read(repo.changelog.read(node)[0])
2132 if node == parent:
2132 if node == parent:
2133 pmf = mf
2133 pmf = mf
2134 else:
2134 else:
2135 pmf = None
2135 pmf = None
2136
2136
2137 wlock = repo.wlock()
2137 wlock = repo.wlock()
2138
2138
2139 # need all matching names in dirstate and manifest of target rev,
2139 # need all matching names in dirstate and manifest of target rev,
2140 # so have to walk both. do not print errors if files exist in one
2140 # so have to walk both. do not print errors if files exist in one
2141 # but not other.
2141 # but not other.
2142
2142
2143 names = {}
2143 names = {}
2144 target_only = {}
2144 target_only = {}
2145
2145
2146 # walk dirstate.
2146 # walk dirstate.
2147
2147
2148 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2148 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2149 badmatch=mf.has_key):
2149 badmatch=mf.has_key):
2150 names[abs] = (rel, exact)
2150 names[abs] = (rel, exact)
2151 if src == 'b':
2151 if src == 'b':
2152 target_only[abs] = True
2152 target_only[abs] = True
2153
2153
2154 # walk target manifest.
2154 # walk target manifest.
2155
2155
2156 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2156 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2157 badmatch=names.has_key):
2157 badmatch=names.has_key):
2158 if abs in names: continue
2158 if abs in names: continue
2159 names[abs] = (rel, exact)
2159 names[abs] = (rel, exact)
2160 target_only[abs] = True
2160 target_only[abs] = True
2161
2161
2162 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2162 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2163 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2163 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2164
2164
2165 revert = ([], _('reverting %s\n'))
2165 revert = ([], _('reverting %s\n'))
2166 add = ([], _('adding %s\n'))
2166 add = ([], _('adding %s\n'))
2167 remove = ([], _('removing %s\n'))
2167 remove = ([], _('removing %s\n'))
2168 forget = ([], _('forgetting %s\n'))
2168 forget = ([], _('forgetting %s\n'))
2169 undelete = ([], _('undeleting %s\n'))
2169 undelete = ([], _('undeleting %s\n'))
2170 update = {}
2170 update = {}
2171
2171
2172 disptable = (
2172 disptable = (
2173 # dispatch table:
2173 # dispatch table:
2174 # file state
2174 # file state
2175 # action if in target manifest
2175 # action if in target manifest
2176 # action if not in target manifest
2176 # action if not in target manifest
2177 # make backup if in target manifest
2177 # make backup if in target manifest
2178 # make backup if not in target manifest
2178 # make backup if not in target manifest
2179 (modified, revert, remove, True, True),
2179 (modified, revert, remove, True, True),
2180 (added, revert, forget, True, False),
2180 (added, revert, forget, True, False),
2181 (removed, undelete, None, False, False),
2181 (removed, undelete, None, False, False),
2182 (deleted, revert, remove, False, False),
2182 (deleted, revert, remove, False, False),
2183 (unknown, add, None, True, False),
2183 (unknown, add, None, True, False),
2184 (target_only, add, None, False, False),
2184 (target_only, add, None, False, False),
2185 )
2185 )
2186
2186
2187 entries = names.items()
2187 entries = names.items()
2188 entries.sort()
2188 entries.sort()
2189
2189
2190 for abs, (rel, exact) in entries:
2190 for abs, (rel, exact) in entries:
2191 mfentry = mf.get(abs)
2191 mfentry = mf.get(abs)
2192 def handle(xlist, dobackup):
2192 def handle(xlist, dobackup):
2193 xlist[0].append(abs)
2193 xlist[0].append(abs)
2194 update[abs] = 1
2194 update[abs] = 1
2195 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2195 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2196 bakname = "%s.orig" % rel
2196 bakname = "%s.orig" % rel
2197 ui.note(_('saving current version of %s as %s\n') %
2197 ui.note(_('saving current version of %s as %s\n') %
2198 (rel, bakname))
2198 (rel, bakname))
2199 if not opts.get('dry_run'):
2199 if not opts.get('dry_run'):
2200 util.copyfile(rel, bakname)
2200 util.copyfile(rel, bakname)
2201 if ui.verbose or not exact:
2201 if ui.verbose or not exact:
2202 ui.status(xlist[1] % rel)
2202 ui.status(xlist[1] % rel)
2203 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2203 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2204 if abs not in table: continue
2204 if abs not in table: continue
2205 # file has changed in dirstate
2205 # file has changed in dirstate
2206 if mfentry:
2206 if mfentry:
2207 handle(hitlist, backuphit)
2207 handle(hitlist, backuphit)
2208 elif misslist is not None:
2208 elif misslist is not None:
2209 handle(misslist, backupmiss)
2209 handle(misslist, backupmiss)
2210 else:
2210 else:
2211 if exact: ui.warn(_('file not managed: %s\n') % rel)
2211 if exact: ui.warn(_('file not managed: %s\n') % rel)
2212 break
2212 break
2213 else:
2213 else:
2214 # file has not changed in dirstate
2214 # file has not changed in dirstate
2215 if node == parent:
2215 if node == parent:
2216 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2216 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2217 continue
2217 continue
2218 if pmf is None:
2218 if pmf is None:
2219 # only need parent manifest in this unlikely case,
2219 # only need parent manifest in this unlikely case,
2220 # so do not read by default
2220 # so do not read by default
2221 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2221 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2222 if abs in pmf:
2222 if abs in pmf:
2223 if mfentry:
2223 if mfentry:
2224 # if version of file is same in parent and target
2224 # if version of file is same in parent and target
2225 # manifests, do nothing
2225 # manifests, do nothing
2226 if pmf[abs] != mfentry:
2226 if pmf[abs] != mfentry:
2227 handle(revert, False)
2227 handle(revert, False)
2228 else:
2228 else:
2229 handle(remove, False)
2229 handle(remove, False)
2230
2230
2231 if not opts.get('dry_run'):
2231 if not opts.get('dry_run'):
2232 repo.dirstate.forget(forget[0])
2232 repo.dirstate.forget(forget[0])
2233 r = hg.revert(repo, node, update.has_key, wlock)
2233 r = hg.revert(repo, node, update.has_key, wlock)
2234 repo.dirstate.update(add[0], 'a')
2234 repo.dirstate.update(add[0], 'a')
2235 repo.dirstate.update(undelete[0], 'n')
2235 repo.dirstate.update(undelete[0], 'n')
2236 repo.dirstate.update(remove[0], 'r')
2236 repo.dirstate.update(remove[0], 'r')
2237 return r
2237 return r
2238
2238
2239 def rollback(ui, repo):
2239 def rollback(ui, repo):
2240 """roll back the last transaction in this repository
2240 """roll back the last transaction in this repository
2241
2241
2242 Roll back the last transaction in this repository, restoring the
2242 Roll back the last transaction in this repository, restoring the
2243 project to its state prior to the transaction.
2243 project to its state prior to the transaction.
2244
2244
2245 Transactions are used to encapsulate the effects of all commands
2245 Transactions are used to encapsulate the effects of all commands
2246 that create new changesets or propagate existing changesets into a
2246 that create new changesets or propagate existing changesets into a
2247 repository. For example, the following commands are transactional,
2247 repository. For example, the following commands are transactional,
2248 and their effects can be rolled back:
2248 and their effects can be rolled back:
2249
2249
2250 commit
2250 commit
2251 import
2251 import
2252 pull
2252 pull
2253 push (with this repository as destination)
2253 push (with this repository as destination)
2254 unbundle
2254 unbundle
2255
2255
2256 This command should be used with care. There is only one level of
2256 This command should be used with care. There is only one level of
2257 rollback, and there is no way to undo a rollback.
2257 rollback, and there is no way to undo a rollback.
2258
2258
2259 This command is not intended for use on public repositories. Once
2259 This command is not intended for use on public repositories. Once
2260 changes are visible for pull by other users, rolling a transaction
2260 changes are visible for pull by other users, rolling a transaction
2261 back locally is ineffective (someone else may already have pulled
2261 back locally is ineffective (someone else may already have pulled
2262 the changes). Furthermore, a race is possible with readers of the
2262 the changes). Furthermore, a race is possible with readers of the
2263 repository; for example an in-progress pull from the repository
2263 repository; for example an in-progress pull from the repository
2264 may fail if a rollback is performed.
2264 may fail if a rollback is performed.
2265 """
2265 """
2266 repo.rollback()
2266 repo.rollback()
2267
2267
2268 def root(ui, repo):
2268 def root(ui, repo):
2269 """print the root (top) of the current working dir
2269 """print the root (top) of the current working dir
2270
2270
2271 Print the root directory of the current repository.
2271 Print the root directory of the current repository.
2272 """
2272 """
2273 ui.write(repo.root + "\n")
2273 ui.write(repo.root + "\n")
2274
2274
2275 def serve(ui, repo, **opts):
2275 def serve(ui, repo, **opts):
2276 """export the repository via HTTP
2276 """export the repository via HTTP
2277
2277
2278 Start a local HTTP repository browser and pull server.
2278 Start a local HTTP repository browser and pull server.
2279
2279
2280 By default, the server logs accesses to stdout and errors to
2280 By default, the server logs accesses to stdout and errors to
2281 stderr. Use the "-A" and "-E" options to log to files.
2281 stderr. Use the "-A" and "-E" options to log to files.
2282 """
2282 """
2283
2283
2284 if opts["stdio"]:
2284 if opts["stdio"]:
2285 if repo is None:
2285 if repo is None:
2286 raise hg.RepoError(_("There is no Mercurial repository here"
2286 raise hg.RepoError(_("There is no Mercurial repository here"
2287 " (.hg not found)"))
2287 " (.hg not found)"))
2288 s = sshserver.sshserver(ui, repo)
2288 s = sshserver.sshserver(ui, repo)
2289 s.serve_forever()
2289 s.serve_forever()
2290
2290
2291 optlist = ("name templates style address port ipv6"
2291 optlist = ("name templates style address port ipv6"
2292 " accesslog errorlog webdir_conf")
2292 " accesslog errorlog webdir_conf")
2293 for o in optlist.split():
2293 for o in optlist.split():
2294 if opts[o]:
2294 if opts[o]:
2295 ui.setconfig("web", o, str(opts[o]))
2295 ui.setconfig("web", o, str(opts[o]))
2296
2296
2297 if repo is None and not ui.config("web", "webdir_conf"):
2297 if repo is None and not ui.config("web", "webdir_conf"):
2298 raise hg.RepoError(_("There is no Mercurial repository here"
2298 raise hg.RepoError(_("There is no Mercurial repository here"
2299 " (.hg not found)"))
2299 " (.hg not found)"))
2300
2300
2301 if opts['daemon'] and not opts['daemon_pipefds']:
2301 if opts['daemon'] and not opts['daemon_pipefds']:
2302 rfd, wfd = os.pipe()
2302 rfd, wfd = os.pipe()
2303 args = sys.argv[:]
2303 args = sys.argv[:]
2304 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2304 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2305 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2305 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2306 args[0], args)
2306 args[0], args)
2307 os.close(wfd)
2307 os.close(wfd)
2308 os.read(rfd, 1)
2308 os.read(rfd, 1)
2309 os._exit(0)
2309 os._exit(0)
2310
2310
2311 httpd = hgweb.server.create_server(ui, repo)
2311 httpd = hgweb.server.create_server(ui, repo)
2312
2312
2313 if ui.verbose:
2313 if ui.verbose:
2314 if httpd.port != 80:
2314 if httpd.port != 80:
2315 ui.status(_('listening at http://%s:%d/\n') %
2315 ui.status(_('listening at http://%s:%d/\n') %
2316 (httpd.addr, httpd.port))
2316 (httpd.addr, httpd.port))
2317 else:
2317 else:
2318 ui.status(_('listening at http://%s/\n') % httpd.addr)
2318 ui.status(_('listening at http://%s/\n') % httpd.addr)
2319
2319
2320 if opts['pid_file']:
2320 if opts['pid_file']:
2321 fp = open(opts['pid_file'], 'w')
2321 fp = open(opts['pid_file'], 'w')
2322 fp.write(str(os.getpid()) + '\n')
2322 fp.write(str(os.getpid()) + '\n')
2323 fp.close()
2323 fp.close()
2324
2324
2325 if opts['daemon_pipefds']:
2325 if opts['daemon_pipefds']:
2326 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2326 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2327 os.close(rfd)
2327 os.close(rfd)
2328 os.write(wfd, 'y')
2328 os.write(wfd, 'y')
2329 os.close(wfd)
2329 os.close(wfd)
2330 sys.stdout.flush()
2330 sys.stdout.flush()
2331 sys.stderr.flush()
2331 sys.stderr.flush()
2332 fd = os.open(util.nulldev, os.O_RDWR)
2332 fd = os.open(util.nulldev, os.O_RDWR)
2333 if fd != 0: os.dup2(fd, 0)
2333 if fd != 0: os.dup2(fd, 0)
2334 if fd != 1: os.dup2(fd, 1)
2334 if fd != 1: os.dup2(fd, 1)
2335 if fd != 2: os.dup2(fd, 2)
2335 if fd != 2: os.dup2(fd, 2)
2336 if fd not in (0, 1, 2): os.close(fd)
2336 if fd not in (0, 1, 2): os.close(fd)
2337
2337
2338 httpd.serve_forever()
2338 httpd.serve_forever()
2339
2339
2340 def status(ui, repo, *pats, **opts):
2340 def status(ui, repo, *pats, **opts):
2341 """show changed files in the working directory
2341 """show changed files in the working directory
2342
2342
2343 Show status of files in the repository. If names are given, only
2343 Show status of files in the repository. If names are given, only
2344 files that match are shown. Files that are clean or ignored, are
2344 files that match are shown. Files that are clean or ignored, are
2345 not listed unless -c (clean), -i (ignored) or -A is given.
2345 not listed unless -c (clean), -i (ignored) or -A is given.
2346
2346
2347 NOTE: status may appear to disagree with diff if permissions have
2347 NOTE: status may appear to disagree with diff if permissions have
2348 changed or a merge has occurred. The standard diff format does not
2348 changed or a merge has occurred. The standard diff format does not
2349 report permission changes and diff only reports changes relative
2349 report permission changes and diff only reports changes relative
2350 to one merge parent.
2350 to one merge parent.
2351
2351
2352 If one revision is given, it is used as the base revision.
2352 If one revision is given, it is used as the base revision.
2353 If two revisions are given, the difference between them is shown.
2353 If two revisions are given, the difference between them is shown.
2354
2354
2355 The codes used to show the status of files are:
2355 The codes used to show the status of files are:
2356 M = modified
2356 M = modified
2357 A = added
2357 A = added
2358 R = removed
2358 R = removed
2359 C = clean
2359 C = clean
2360 ! = deleted, but still tracked
2360 ! = deleted, but still tracked
2361 ? = not tracked
2361 ? = not tracked
2362 I = ignored (not shown by default)
2362 I = ignored (not shown by default)
2363 = the previous added file was copied from here
2363 = the previous added file was copied from here
2364 """
2364 """
2365
2365
2366 all = opts['all']
2366 all = opts['all']
2367 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2367 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2368
2368
2369 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2369 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2370 cwd = (pats and repo.getcwd()) or ''
2370 cwd = (pats and repo.getcwd()) or ''
2371 modified, added, removed, deleted, unknown, ignored, clean = [
2371 modified, added, removed, deleted, unknown, ignored, clean = [
2372 [util.pathto(cwd, x) for x in n]
2372 [util.pathto(cwd, x) for x in n]
2373 for n in repo.status(node1=node1, node2=node2, files=files,
2373 for n in repo.status(node1=node1, node2=node2, files=files,
2374 match=matchfn,
2374 match=matchfn,
2375 list_ignored=all or opts['ignored'],
2375 list_ignored=all or opts['ignored'],
2376 list_clean=all or opts['clean'])]
2376 list_clean=all or opts['clean'])]
2377
2377
2378 changetypes = (('modified', 'M', modified),
2378 changetypes = (('modified', 'M', modified),
2379 ('added', 'A', added),
2379 ('added', 'A', added),
2380 ('removed', 'R', removed),
2380 ('removed', 'R', removed),
2381 ('deleted', '!', deleted),
2381 ('deleted', '!', deleted),
2382 ('unknown', '?', unknown),
2382 ('unknown', '?', unknown),
2383 ('ignored', 'I', ignored))
2383 ('ignored', 'I', ignored))
2384
2384
2385 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2385 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2386
2386
2387 end = opts['print0'] and '\0' or '\n'
2387 end = opts['print0'] and '\0' or '\n'
2388
2388
2389 for opt, char, changes in ([ct for ct in explicit_changetypes
2389 for opt, char, changes in ([ct for ct in explicit_changetypes
2390 if all or opts[ct[0]]]
2390 if all or opts[ct[0]]]
2391 or changetypes):
2391 or changetypes):
2392 if opts['no_status']:
2392 if opts['no_status']:
2393 format = "%%s%s" % end
2393 format = "%%s%s" % end
2394 else:
2394 else:
2395 format = "%s %%s%s" % (char, end)
2395 format = "%s %%s%s" % (char, end)
2396
2396
2397 for f in changes:
2397 for f in changes:
2398 ui.write(format % f)
2398 ui.write(format % f)
2399 if ((all or opts.get('copies')) and not opts.get('no_status')):
2399 if ((all or opts.get('copies')) and not opts.get('no_status')):
2400 copied = repo.dirstate.copied(f)
2400 copied = repo.dirstate.copied(f)
2401 if copied:
2401 if copied:
2402 ui.write(' %s%s' % (copied, end))
2402 ui.write(' %s%s' % (copied, end))
2403
2403
2404 def tag(ui, repo, name, rev_=None, **opts):
2404 def tag(ui, repo, name, rev_=None, **opts):
2405 """add a tag for the current tip or a given revision
2405 """add a tag for the current tip or a given revision
2406
2406
2407 Name a particular revision using <name>.
2407 Name a particular revision using <name>.
2408
2408
2409 Tags are used to name particular revisions of the repository and are
2409 Tags are used to name particular revisions of the repository and are
2410 very useful to compare different revision, to go back to significant
2410 very useful to compare different revision, to go back to significant
2411 earlier versions or to mark branch points as releases, etc.
2411 earlier versions or to mark branch points as releases, etc.
2412
2412
2413 If no revision is given, the parent of the working directory is used.
2413 If no revision is given, the parent of the working directory is used.
2414
2414
2415 To facilitate version control, distribution, and merging of tags,
2415 To facilitate version control, distribution, and merging of tags,
2416 they are stored as a file named ".hgtags" which is managed
2416 they are stored as a file named ".hgtags" which is managed
2417 similarly to other project files and can be hand-edited if
2417 similarly to other project files and can be hand-edited if
2418 necessary. The file '.hg/localtags' is used for local tags (not
2418 necessary. The file '.hg/localtags' is used for local tags (not
2419 shared among repositories).
2419 shared among repositories).
2420 """
2420 """
2421 if name in ['tip', '.', 'null']:
2421 if name in ['tip', '.', 'null']:
2422 raise util.Abort(_("the name '%s' is reserved") % name)
2422 raise util.Abort(_("the name '%s' is reserved") % name)
2423 if rev_ is not None:
2423 if rev_ is not None:
2424 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2424 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2425 "please use 'hg tag [-r REV] NAME' instead\n"))
2425 "please use 'hg tag [-r REV] NAME' instead\n"))
2426 if opts['rev']:
2426 if opts['rev']:
2427 raise util.Abort(_("use only one form to specify the revision"))
2427 raise util.Abort(_("use only one form to specify the revision"))
2428 if opts['rev']:
2428 if opts['rev']:
2429 rev_ = opts['rev']
2429 rev_ = opts['rev']
2430 if not rev_ and repo.dirstate.parents()[1] != nullid:
2430 if not rev_ and repo.dirstate.parents()[1] != nullid:
2431 raise util.Abort(_('uncommitted merge - please provide a '
2431 raise util.Abort(_('uncommitted merge - please provide a '
2432 'specific revision'))
2432 'specific revision'))
2433 r = repo.changectx(rev_).node()
2433 r = repo.changectx(rev_).node()
2434
2434
2435 message = opts['message']
2435 message = opts['message']
2436 if not message:
2436 if not message:
2437 message = _('Added tag %s for changeset %s') % (name, short(r))
2437 message = _('Added tag %s for changeset %s') % (name, short(r))
2438
2438
2439 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2439 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2440
2440
2441 def tags(ui, repo):
2441 def tags(ui, repo):
2442 """list repository tags
2442 """list repository tags
2443
2443
2444 List the repository tags.
2444 List the repository tags.
2445
2445
2446 This lists both regular and local tags.
2446 This lists both regular and local tags.
2447 """
2447 """
2448
2448
2449 l = repo.tagslist()
2449 l = repo.tagslist()
2450 l.reverse()
2450 l.reverse()
2451 hexfunc = ui.debugflag and hex or short
2451 hexfunc = ui.debugflag and hex or short
2452 for t, n in l:
2452 for t, n in l:
2453 try:
2453 try:
2454 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2454 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2455 except KeyError:
2455 except KeyError:
2456 r = " ?:?"
2456 r = " ?:?"
2457 if ui.quiet:
2457 if ui.quiet:
2458 ui.write("%s\n" % t)
2458 ui.write("%s\n" % t)
2459 else:
2459 else:
2460 t = util.localsub(t, 30)
2460 t = util.localsub(t, 30)
2461 t += " " * (30 - util.locallen(t))
2461 t += " " * (30 - util.locallen(t))
2462 ui.write("%s %s\n" % (t, r))
2462 ui.write("%s %s\n" % (t, r))
2463
2463
2464 def tip(ui, repo, **opts):
2464 def tip(ui, repo, **opts):
2465 """show the tip revision
2465 """show the tip revision
2466
2466
2467 Show the tip revision.
2467 Show the tip revision.
2468 """
2468 """
2469 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2469 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2470
2470
2471 def unbundle(ui, repo, fname, **opts):
2471 def unbundle(ui, repo, fname, **opts):
2472 """apply a changegroup file
2472 """apply a changegroup file
2473
2473
2474 Apply a compressed changegroup file generated by the bundle
2474 Apply a compressed changegroup file generated by the bundle
2475 command.
2475 command.
2476 """
2476 """
2477 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2477 gen = changegroup.readbundle(urllib.urlopen(fname), fname)
2478 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2478 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2479 return postincoming(ui, repo, modheads, opts['update'])
2479 return postincoming(ui, repo, modheads, opts['update'])
2480
2480
2481 def update(ui, repo, node=None, clean=False, branch=None, date=None):
2481 def update(ui, repo, node=None, clean=False, branch=None, date=None):
2482 """update or merge working directory
2482 """update or merge working directory
2483
2483
2484 Update the working directory to the specified revision.
2484 Update the working directory to the specified revision.
2485
2485
2486 If there are no outstanding changes in the working directory and
2486 If there are no outstanding changes in the working directory and
2487 there is a linear relationship between the current version and the
2487 there is a linear relationship between the current version and the
2488 requested version, the result is the requested version.
2488 requested version, the result is the requested version.
2489
2489
2490 To merge the working directory with another revision, use the
2490 To merge the working directory with another revision, use the
2491 merge command.
2491 merge command.
2492
2492
2493 By default, update will refuse to run if doing so would require
2493 By default, update will refuse to run if doing so would require
2494 merging or discarding local changes.
2494 merging or discarding local changes.
2495 """
2495 """
2496 if date:
2496 if date:
2497 if node:
2497 if node:
2498 raise util.Abort(_("you can't specify a revision and a date"))
2498 raise util.Abort(_("you can't specify a revision and a date"))
2499 node = cmdutil.finddate(ui, repo, date)
2499 node = cmdutil.finddate(ui, repo, date)
2500
2500
2501 node = _lookup(repo, node, branch)
2501 node = _lookup(repo, node, branch)
2502 if clean:
2502 if clean:
2503 return hg.clean(repo, node)
2503 return hg.clean(repo, node)
2504 else:
2504 else:
2505 return hg.update(repo, node)
2505 return hg.update(repo, node)
2506
2506
2507 def _lookup(repo, node, branch=None):
2507 def _lookup(repo, node, branch=None):
2508 if branch:
2508 if branch:
2509 repo.ui.warn(_("the --branch option is deprecated, "
2509 repo.ui.warn(_("the --branch option is deprecated, "
2510 "please use 'hg branch' instead\n"))
2510 "please use 'hg branch' instead\n"))
2511 br = repo.branchlookup(branch=branch)
2511 br = repo.branchlookup(branch=branch)
2512 found = []
2512 found = []
2513 for x in br:
2513 for x in br:
2514 if branch in br[x]:
2514 if branch in br[x]:
2515 found.append(x)
2515 found.append(x)
2516 if len(found) > 1:
2516 if len(found) > 1:
2517 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2517 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2518 for x in found:
2518 for x in found:
2519 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2519 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2520 raise util.Abort("")
2520 raise util.Abort("")
2521 if len(found) == 1:
2521 if len(found) == 1:
2522 node = found[0]
2522 node = found[0]
2523 repo.ui.warn(_("Using head %s for branch %s\n")
2523 repo.ui.warn(_("Using head %s for branch %s\n")
2524 % (short(node), branch))
2524 % (short(node), branch))
2525 else:
2525 else:
2526 raise util.Abort(_("branch %s not found") % branch)
2526 raise util.Abort(_("branch %s not found") % branch)
2527 else:
2527 else:
2528 node = node and repo.lookup(node) or repo.changelog.tip()
2528 node = node and repo.lookup(node) or repo.changelog.tip()
2529 return node
2529 return node
2530
2530
2531 def verify(ui, repo):
2531 def verify(ui, repo):
2532 """verify the integrity of the repository
2532 """verify the integrity of the repository
2533
2533
2534 Verify the integrity of the current repository.
2534 Verify the integrity of the current repository.
2535
2535
2536 This will perform an extensive check of the repository's
2536 This will perform an extensive check of the repository's
2537 integrity, validating the hashes and checksums of each entry in
2537 integrity, validating the hashes and checksums of each entry in
2538 the changelog, manifest, and tracked files, as well as the
2538 the changelog, manifest, and tracked files, as well as the
2539 integrity of their crosslinks and indices.
2539 integrity of their crosslinks and indices.
2540 """
2540 """
2541 return hg.verify(repo)
2541 return hg.verify(repo)
2542
2542
2543 def version_(ui):
2543 def version_(ui):
2544 """output version and copyright information"""
2544 """output version and copyright information"""
2545 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2545 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2546 % version.get_version())
2546 % version.get_version())
2547 ui.status(_(
2547 ui.status(_(
2548 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2548 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2549 "This is free software; see the source for copying conditions. "
2549 "This is free software; see the source for copying conditions. "
2550 "There is NO\nwarranty; "
2550 "There is NO\nwarranty; "
2551 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2551 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2552 ))
2552 ))
2553
2553
2554 # Command options and aliases are listed here, alphabetically
2554 # Command options and aliases are listed here, alphabetically
2555
2555
2556 globalopts = [
2556 globalopts = [
2557 ('R', 'repository', '',
2557 ('R', 'repository', '',
2558 _('repository root directory or symbolic path name')),
2558 _('repository root directory or symbolic path name')),
2559 ('', 'cwd', '', _('change working directory')),
2559 ('', 'cwd', '', _('change working directory')),
2560 ('y', 'noninteractive', None,
2560 ('y', 'noninteractive', None,
2561 _('do not prompt, assume \'yes\' for any required answers')),
2561 _('do not prompt, assume \'yes\' for any required answers')),
2562 ('q', 'quiet', None, _('suppress output')),
2562 ('q', 'quiet', None, _('suppress output')),
2563 ('v', 'verbose', None, _('enable additional output')),
2563 ('v', 'verbose', None, _('enable additional output')),
2564 ('', 'config', [], _('set/override config option')),
2564 ('', 'config', [], _('set/override config option')),
2565 ('', 'debug', None, _('enable debugging output')),
2565 ('', 'debug', None, _('enable debugging output')),
2566 ('', 'debugger', None, _('start debugger')),
2566 ('', 'debugger', None, _('start debugger')),
2567 ('', 'encoding', util._encoding, _('set the charset encoding')),
2567 ('', 'encoding', util._encoding, _('set the charset encoding')),
2568 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2568 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2569 ('', 'lsprof', None, _('print improved command execution profile')),
2569 ('', 'lsprof', None, _('print improved command execution profile')),
2570 ('', 'traceback', None, _('print traceback on exception')),
2570 ('', 'traceback', None, _('print traceback on exception')),
2571 ('', 'time', None, _('time how long the command takes')),
2571 ('', 'time', None, _('time how long the command takes')),
2572 ('', 'profile', None, _('print command execution profile')),
2572 ('', 'profile', None, _('print command execution profile')),
2573 ('', 'version', None, _('output version information and exit')),
2573 ('', 'version', None, _('output version information and exit')),
2574 ('h', 'help', None, _('display help and exit')),
2574 ('h', 'help', None, _('display help and exit')),
2575 ]
2575 ]
2576
2576
2577 dryrunopts = [('n', 'dry-run', None,
2577 dryrunopts = [('n', 'dry-run', None,
2578 _('do not perform actions, just print output'))]
2578 _('do not perform actions, just print output'))]
2579
2579
2580 remoteopts = [
2580 remoteopts = [
2581 ('e', 'ssh', '', _('specify ssh command to use')),
2581 ('e', 'ssh', '', _('specify ssh command to use')),
2582 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2582 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2583 ]
2583 ]
2584
2584
2585 walkopts = [
2585 walkopts = [
2586 ('I', 'include', [], _('include names matching the given patterns')),
2586 ('I', 'include', [], _('include names matching the given patterns')),
2587 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2587 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2588 ]
2588 ]
2589
2589
2590 commitopts = [
2590 commitopts = [
2591 ('m', 'message', '', _('use <text> as commit message')),
2591 ('m', 'message', '', _('use <text> as commit message')),
2592 ('l', 'logfile', '', _('read commit message from <file>')),
2592 ('l', 'logfile', '', _('read commit message from <file>')),
2593 ]
2593 ]
2594
2594
2595 table = {
2595 table = {
2596 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2596 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2597 "addremove":
2597 "addremove":
2598 (addremove,
2598 (addremove,
2599 [('s', 'similarity', '',
2599 [('s', 'similarity', '',
2600 _('guess renamed files by similarity (0<=s<=100)')),
2600 _('guess renamed files by similarity (0<=s<=100)')),
2601 ] + walkopts + dryrunopts,
2601 ] + walkopts + dryrunopts,
2602 _('hg addremove [OPTION]... [FILE]...')),
2602 _('hg addremove [OPTION]... [FILE]...')),
2603 "^annotate":
2603 "^annotate":
2604 (annotate,
2604 (annotate,
2605 [('r', 'rev', '', _('annotate the specified revision')),
2605 [('r', 'rev', '', _('annotate the specified revision')),
2606 ('f', 'follow', None, _('follow file copies and renames')),
2606 ('f', 'follow', None, _('follow file copies and renames')),
2607 ('a', 'text', None, _('treat all files as text')),
2607 ('a', 'text', None, _('treat all files as text')),
2608 ('u', 'user', None, _('list the author')),
2608 ('u', 'user', None, _('list the author')),
2609 ('d', 'date', None, _('list the date')),
2609 ('d', 'date', None, _('list the date')),
2610 ('n', 'number', None, _('list the revision number (default)')),
2610 ('n', 'number', None, _('list the revision number (default)')),
2611 ('c', 'changeset', None, _('list the changeset')),
2611 ('c', 'changeset', None, _('list the changeset')),
2612 ] + walkopts,
2612 ] + walkopts,
2613 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2613 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2614 "archive":
2614 "archive":
2615 (archive,
2615 (archive,
2616 [('', 'no-decode', None, _('do not pass files through decoders')),
2616 [('', 'no-decode', None, _('do not pass files through decoders')),
2617 ('p', 'prefix', '', _('directory prefix for files in archive')),
2617 ('p', 'prefix', '', _('directory prefix for files in archive')),
2618 ('r', 'rev', '', _('revision to distribute')),
2618 ('r', 'rev', '', _('revision to distribute')),
2619 ('t', 'type', '', _('type of distribution to create')),
2619 ('t', 'type', '', _('type of distribution to create')),
2620 ] + walkopts,
2620 ] + walkopts,
2621 _('hg archive [OPTION]... DEST')),
2621 _('hg archive [OPTION]... DEST')),
2622 "backout":
2622 "backout":
2623 (backout,
2623 (backout,
2624 [('', 'merge', None,
2624 [('', 'merge', None,
2625 _('merge with old dirstate parent after backout')),
2625 _('merge with old dirstate parent after backout')),
2626 ('d', 'date', '', _('record datecode as commit date')),
2626 ('d', 'date', '', _('record datecode as commit date')),
2627 ('', 'parent', '', _('parent to choose when backing out merge')),
2627 ('', 'parent', '', _('parent to choose when backing out merge')),
2628 ('u', 'user', '', _('record user as committer')),
2628 ('u', 'user', '', _('record user as committer')),
2629 ] + walkopts + commitopts,
2629 ] + walkopts + commitopts,
2630 _('hg backout [OPTION]... REV')),
2630 _('hg backout [OPTION]... REV')),
2631 "branch": (branch, [], _('hg branch [NAME]')),
2631 "branch": (branch, [], _('hg branch [NAME]')),
2632 "branches": (branches, [], _('hg branches')),
2632 "branches": (branches, [], _('hg branches')),
2633 "bundle":
2633 "bundle":
2634 (bundle,
2634 (bundle,
2635 [('f', 'force', None,
2635 [('f', 'force', None,
2636 _('run even when remote repository is unrelated')),
2636 _('run even when remote repository is unrelated')),
2637 ('r', 'rev', [],
2637 ('r', 'rev', [],
2638 _('a changeset you would like to bundle')),
2638 _('a changeset you would like to bundle')),
2639 ('', 'base', [],
2639 ('', 'base', [],
2640 _('a base changeset to specify instead of a destination')),
2640 _('a base changeset to specify instead of a destination')),
2641 ] + remoteopts,
2641 ] + remoteopts,
2642 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2642 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2643 "cat":
2643 "cat":
2644 (cat,
2644 (cat,
2645 [('o', 'output', '', _('print output to file with formatted name')),
2645 [('o', 'output', '', _('print output to file with formatted name')),
2646 ('r', 'rev', '', _('print the given revision')),
2646 ('r', 'rev', '', _('print the given revision')),
2647 ] + walkopts,
2647 ] + walkopts,
2648 _('hg cat [OPTION]... FILE...')),
2648 _('hg cat [OPTION]... FILE...')),
2649 "^clone":
2649 "^clone":
2650 (clone,
2650 (clone,
2651 [('U', 'noupdate', None, _('do not update the new working directory')),
2651 [('U', 'noupdate', None, _('do not update the new working directory')),
2652 ('r', 'rev', [],
2652 ('r', 'rev', [],
2653 _('a changeset you would like to have after cloning')),
2653 _('a changeset you would like to have after cloning')),
2654 ('', 'pull', None, _('use pull protocol to copy metadata')),
2654 ('', 'pull', None, _('use pull protocol to copy metadata')),
2655 ('', 'uncompressed', None,
2655 ('', 'uncompressed', None,
2656 _('use uncompressed transfer (fast over LAN)')),
2656 _('use uncompressed transfer (fast over LAN)')),
2657 ] + remoteopts,
2657 ] + remoteopts,
2658 _('hg clone [OPTION]... SOURCE [DEST]')),
2658 _('hg clone [OPTION]... SOURCE [DEST]')),
2659 "^commit|ci":
2659 "^commit|ci":
2660 (commit,
2660 (commit,
2661 [('A', 'addremove', None,
2661 [('A', 'addremove', None,
2662 _('mark new/missing files as added/removed before committing')),
2662 _('mark new/missing files as added/removed before committing')),
2663 ('d', 'date', '', _('record datecode as commit date')),
2663 ('d', 'date', '', _('record datecode as commit date')),
2664 ('u', 'user', '', _('record user as commiter')),
2664 ('u', 'user', '', _('record user as commiter')),
2665 ] + walkopts + commitopts,
2665 ] + walkopts + commitopts,
2666 _('hg commit [OPTION]... [FILE]...')),
2666 _('hg commit [OPTION]... [FILE]...')),
2667 "copy|cp":
2667 "copy|cp":
2668 (copy,
2668 (copy,
2669 [('A', 'after', None, _('record a copy that has already occurred')),
2669 [('A', 'after', None, _('record a copy that has already occurred')),
2670 ('f', 'force', None,
2670 ('f', 'force', None,
2671 _('forcibly copy over an existing managed file')),
2671 _('forcibly copy over an existing managed file')),
2672 ] + walkopts + dryrunopts,
2672 ] + walkopts + dryrunopts,
2673 _('hg copy [OPTION]... [SOURCE]... DEST')),
2673 _('hg copy [OPTION]... [SOURCE]... DEST')),
2674 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2674 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2675 "debugcomplete":
2675 "debugcomplete":
2676 (debugcomplete,
2676 (debugcomplete,
2677 [('o', 'options', None, _('show the command options'))],
2677 [('o', 'options', None, _('show the command options'))],
2678 _('debugcomplete [-o] CMD')),
2678 _('debugcomplete [-o] CMD')),
2679 "debuginstall": (debuginstall, [], _('debuginstall')),
2679 "debuginstall": (debuginstall, [], _('debuginstall')),
2680 "debugrebuildstate":
2680 "debugrebuildstate":
2681 (debugrebuildstate,
2681 (debugrebuildstate,
2682 [('r', 'rev', '', _('revision to rebuild to'))],
2682 [('r', 'rev', '', _('revision to rebuild to'))],
2683 _('debugrebuildstate [-r REV] [REV]')),
2683 _('debugrebuildstate [-r REV] [REV]')),
2684 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2684 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2685 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2685 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2686 "debugstate": (debugstate, [], _('debugstate')),
2686 "debugstate": (debugstate, [], _('debugstate')),
2687 "debugdate":
2687 "debugdate":
2688 (debugdate,
2688 (debugdate,
2689 [('e', 'extended', None, _('try extended date formats'))],
2689 [('e', 'extended', None, _('try extended date formats'))],
2690 _('debugdate [-e] DATE [RANGE]')),
2690 _('debugdate [-e] DATE [RANGE]')),
2691 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2691 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2692 "debugindex": (debugindex, [], _('debugindex FILE')),
2692 "debugindex": (debugindex, [], _('debugindex FILE')),
2693 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2693 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2694 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2694 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2695 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2695 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2696 "^diff":
2696 "^diff":
2697 (diff,
2697 (diff,
2698 [('r', 'rev', [], _('revision')),
2698 [('r', 'rev', [], _('revision')),
2699 ('a', 'text', None, _('treat all files as text')),
2699 ('a', 'text', None, _('treat all files as text')),
2700 ('p', 'show-function', None,
2700 ('p', 'show-function', None,
2701 _('show which function each change is in')),
2701 _('show which function each change is in')),
2702 ('g', 'git', None, _('use git extended diff format')),
2702 ('g', 'git', None, _('use git extended diff format')),
2703 ('', 'nodates', None, _("don't include dates in diff headers")),
2703 ('', 'nodates', None, _("don't include dates in diff headers")),
2704 ('w', 'ignore-all-space', None,
2704 ('w', 'ignore-all-space', None,
2705 _('ignore white space when comparing lines')),
2705 _('ignore white space when comparing lines')),
2706 ('b', 'ignore-space-change', None,
2706 ('b', 'ignore-space-change', None,
2707 _('ignore changes in the amount of white space')),
2707 _('ignore changes in the amount of white space')),
2708 ('B', 'ignore-blank-lines', None,
2708 ('B', 'ignore-blank-lines', None,
2709 _('ignore changes whose lines are all blank')),
2709 _('ignore changes whose lines are all blank')),
2710 ] + walkopts,
2710 ] + walkopts,
2711 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2711 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2712 "^export":
2712 "^export":
2713 (export,
2713 (export,
2714 [('o', 'output', '', _('print output to file with formatted name')),
2714 [('o', 'output', '', _('print output to file with formatted name')),
2715 ('a', 'text', None, _('treat all files as text')),
2715 ('a', 'text', None, _('treat all files as text')),
2716 ('g', 'git', None, _('use git extended diff format')),
2716 ('g', 'git', None, _('use git extended diff format')),
2717 ('', 'nodates', None, _("don't include dates in diff headers")),
2717 ('', 'nodates', None, _("don't include dates in diff headers")),
2718 ('', 'switch-parent', None, _('diff against the second parent'))],
2718 ('', 'switch-parent', None, _('diff against the second parent'))],
2719 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2719 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2720 "grep":
2720 "grep":
2721 (grep,
2721 (grep,
2722 [('0', 'print0', None, _('end fields with NUL')),
2722 [('0', 'print0', None, _('end fields with NUL')),
2723 ('', 'all', None, _('print all revisions that match')),
2723 ('', 'all', None, _('print all revisions that match')),
2724 ('f', 'follow', None,
2724 ('f', 'follow', None,
2725 _('follow changeset history, or file history across copies and renames')),
2725 _('follow changeset history, or file history across copies and renames')),
2726 ('i', 'ignore-case', None, _('ignore case when matching')),
2726 ('i', 'ignore-case', None, _('ignore case when matching')),
2727 ('l', 'files-with-matches', None,
2727 ('l', 'files-with-matches', None,
2728 _('print only filenames and revs that match')),
2728 _('print only filenames and revs that match')),
2729 ('n', 'line-number', None, _('print matching line numbers')),
2729 ('n', 'line-number', None, _('print matching line numbers')),
2730 ('r', 'rev', [], _('search in given revision range')),
2730 ('r', 'rev', [], _('search in given revision range')),
2731 ('u', 'user', None, _('print user who committed change')),
2731 ('u', 'user', None, _('print user who committed change')),
2732 ] + walkopts,
2732 ] + walkopts,
2733 _('hg grep [OPTION]... PATTERN [FILE]...')),
2733 _('hg grep [OPTION]... PATTERN [FILE]...')),
2734 "heads":
2734 "heads":
2735 (heads,
2735 (heads,
2736 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2736 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2737 ('', 'style', '', _('display using template map file')),
2737 ('', 'style', '', _('display using template map file')),
2738 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2738 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2739 ('', 'template', '', _('display with template'))],
2739 ('', 'template', '', _('display with template'))],
2740 _('hg heads [-r REV]')),
2740 _('hg heads [-r REV]')),
2741 "help": (help_, [], _('hg help [COMMAND]')),
2741 "help": (help_, [], _('hg help [COMMAND]')),
2742 "identify|id": (identify, [], _('hg identify')),
2742 "identify|id": (identify, [], _('hg identify')),
2743 "import|patch":
2743 "import|patch":
2744 (import_,
2744 (import_,
2745 [('p', 'strip', 1,
2745 [('p', 'strip', 1,
2746 _('directory strip option for patch. This has the same\n'
2746 _('directory strip option for patch. This has the same\n'
2747 'meaning as the corresponding patch option')),
2747 'meaning as the corresponding patch option')),
2748 ('b', 'base', '', _('base path (DEPRECATED)')),
2748 ('b', 'base', '', _('base path (DEPRECATED)')),
2749 ('f', 'force', None,
2749 ('f', 'force', None,
2750 _('skip check for outstanding uncommitted changes'))] + commitopts,
2750 _('skip check for outstanding uncommitted changes'))] + commitopts,
2751 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2751 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2752 "incoming|in": (incoming,
2752 "incoming|in": (incoming,
2753 [('M', 'no-merges', None, _('do not show merges')),
2753 [('M', 'no-merges', None, _('do not show merges')),
2754 ('f', 'force', None,
2754 ('f', 'force', None,
2755 _('run even when remote repository is unrelated')),
2755 _('run even when remote repository is unrelated')),
2756 ('', 'style', '', _('display using template map file')),
2756 ('', 'style', '', _('display using template map file')),
2757 ('n', 'newest-first', None, _('show newest record first')),
2757 ('n', 'newest-first', None, _('show newest record first')),
2758 ('', 'bundle', '', _('file to store the bundles into')),
2758 ('', 'bundle', '', _('file to store the bundles into')),
2759 ('p', 'patch', None, _('show patch')),
2759 ('p', 'patch', None, _('show patch')),
2760 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2760 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2761 ('', 'template', '', _('display with template')),
2761 ('', 'template', '', _('display with template')),
2762 ] + remoteopts,
2762 ] + remoteopts,
2763 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2763 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2764 ' [--bundle FILENAME] [SOURCE]')),
2764 ' [--bundle FILENAME] [SOURCE]')),
2765 "^init":
2765 "^init":
2766 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2766 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2767 "locate":
2767 "locate":
2768 (locate,
2768 (locate,
2769 [('r', 'rev', '', _('search the repository as it stood at rev')),
2769 [('r', 'rev', '', _('search the repository as it stood at rev')),
2770 ('0', 'print0', None,
2770 ('0', 'print0', None,
2771 _('end filenames with NUL, for use with xargs')),
2771 _('end filenames with NUL, for use with xargs')),
2772 ('f', 'fullpath', None,
2772 ('f', 'fullpath', None,
2773 _('print complete paths from the filesystem root')),
2773 _('print complete paths from the filesystem root')),
2774 ] + walkopts,
2774 ] + walkopts,
2775 _('hg locate [OPTION]... [PATTERN]...')),
2775 _('hg locate [OPTION]... [PATTERN]...')),
2776 "^log|history":
2776 "^log|history":
2777 (log,
2777 (log,
2778 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2778 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2779 ('f', 'follow', None,
2779 ('f', 'follow', None,
2780 _('follow changeset history, or file history across copies and renames')),
2780 _('follow changeset history, or file history across copies and renames')),
2781 ('', 'follow-first', None,
2781 ('', 'follow-first', None,
2782 _('only follow the first parent of merge changesets')),
2782 _('only follow the first parent of merge changesets')),
2783 ('d', 'date', '', _('show revs matching date spec')),
2783 ('d', 'date', '', _('show revs matching date spec')),
2784 ('C', 'copies', None, _('show copied files')),
2784 ('C', 'copies', None, _('show copied files')),
2785 ('k', 'keyword', [], _('search for a keyword')),
2785 ('k', 'keyword', [], _('search for a keyword')),
2786 ('l', 'limit', '', _('limit number of changes displayed')),
2786 ('l', 'limit', '', _('limit number of changes displayed')),
2787 ('r', 'rev', [], _('show the specified revision or range')),
2787 ('r', 'rev', [], _('show the specified revision or range')),
2788 ('', 'removed', None, _('include revs where files were removed')),
2788 ('', 'removed', None, _('include revs where files were removed')),
2789 ('M', 'no-merges', None, _('do not show merges')),
2789 ('M', 'no-merges', None, _('do not show merges')),
2790 ('', 'style', '', _('display using template map file')),
2790 ('', 'style', '', _('display using template map file')),
2791 ('m', 'only-merges', None, _('show only merges')),
2791 ('m', 'only-merges', None, _('show only merges')),
2792 ('p', 'patch', None, _('show patch')),
2792 ('p', 'patch', None, _('show patch')),
2793 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2793 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2794 ('', 'template', '', _('display with template')),
2794 ('', 'template', '', _('display with template')),
2795 ] + walkopts,
2795 ] + walkopts,
2796 _('hg log [OPTION]... [FILE]')),
2796 _('hg log [OPTION]... [FILE]')),
2797 "manifest": (manifest, [], _('hg manifest [REV]')),
2797 "manifest": (manifest, [], _('hg manifest [REV]')),
2798 "merge":
2798 "merge":
2799 (merge,
2799 (merge,
2800 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2800 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2801 ('f', 'force', None, _('force a merge with outstanding changes'))],
2801 ('f', 'force', None, _('force a merge with outstanding changes'))],
2802 _('hg merge [-f] [REV]')),
2802 _('hg merge [-f] [REV]')),
2803 "outgoing|out": (outgoing,
2803 "outgoing|out": (outgoing,
2804 [('M', 'no-merges', None, _('do not show merges')),
2804 [('M', 'no-merges', None, _('do not show merges')),
2805 ('f', 'force', None,
2805 ('f', 'force', None,
2806 _('run even when remote repository is unrelated')),
2806 _('run even when remote repository is unrelated')),
2807 ('p', 'patch', None, _('show patch')),
2807 ('p', 'patch', None, _('show patch')),
2808 ('', 'style', '', _('display using template map file')),
2808 ('', 'style', '', _('display using template map file')),
2809 ('r', 'rev', [], _('a specific revision you would like to push')),
2809 ('r', 'rev', [], _('a specific revision you would like to push')),
2810 ('n', 'newest-first', None, _('show newest record first')),
2810 ('n', 'newest-first', None, _('show newest record first')),
2811 ('', 'template', '', _('display with template')),
2811 ('', 'template', '', _('display with template')),
2812 ] + remoteopts,
2812 ] + remoteopts,
2813 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2813 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2814 "^parents":
2814 "^parents":
2815 (parents,
2815 (parents,
2816 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2816 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2817 ('r', 'rev', '', _('show parents from the specified rev')),
2817 ('r', 'rev', '', _('show parents from the specified rev')),
2818 ('', 'style', '', _('display using template map file')),
2818 ('', 'style', '', _('display using template map file')),
2819 ('', 'template', '', _('display with template'))],
2819 ('', 'template', '', _('display with template'))],
2820 _('hg parents [-r REV] [FILE]')),
2820 _('hg parents [-r REV] [FILE]')),
2821 "paths": (paths, [], _('hg paths [NAME]')),
2821 "paths": (paths, [], _('hg paths [NAME]')),
2822 "^pull":
2822 "^pull":
2823 (pull,
2823 (pull,
2824 [('u', 'update', None,
2824 [('u', 'update', None,
2825 _('update to new tip if changesets were pulled')),
2825 _('update to new tip if changesets were pulled')),
2826 ('f', 'force', None,
2826 ('f', 'force', None,
2827 _('run even when remote repository is unrelated')),
2827 _('run even when remote repository is unrelated')),
2828 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2828 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2829 ] + remoteopts,
2829 ] + remoteopts,
2830 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2830 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2831 "^push":
2831 "^push":
2832 (push,
2832 (push,
2833 [('f', 'force', None, _('force push')),
2833 [('f', 'force', None, _('force push')),
2834 ('r', 'rev', [], _('a specific revision you would like to push')),
2834 ('r', 'rev', [], _('a specific revision you would like to push')),
2835 ] + remoteopts,
2835 ] + remoteopts,
2836 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2836 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2837 "debugrawcommit|rawcommit":
2837 "debugrawcommit|rawcommit":
2838 (rawcommit,
2838 (rawcommit,
2839 [('p', 'parent', [], _('parent')),
2839 [('p', 'parent', [], _('parent')),
2840 ('d', 'date', '', _('date code')),
2840 ('d', 'date', '', _('date code')),
2841 ('u', 'user', '', _('user')),
2841 ('u', 'user', '', _('user')),
2842 ('F', 'files', '', _('file list'))
2842 ('F', 'files', '', _('file list'))
2843 ] + commitopts,
2843 ] + commitopts,
2844 _('hg debugrawcommit [OPTION]... [FILE]...')),
2844 _('hg debugrawcommit [OPTION]... [FILE]...')),
2845 "recover": (recover, [], _('hg recover')),
2845 "recover": (recover, [], _('hg recover')),
2846 "^remove|rm":
2846 "^remove|rm":
2847 (remove,
2847 (remove,
2848 [('A', 'after', None, _('record remove that has already occurred')),
2848 [('A', 'after', None, _('record remove that has already occurred')),
2849 ('f', 'force', None, _('remove file even if modified')),
2849 ('f', 'force', None, _('remove file even if modified')),
2850 ] + walkopts,
2850 ] + walkopts,
2851 _('hg remove [OPTION]... FILE...')),
2851 _('hg remove [OPTION]... FILE...')),
2852 "rename|mv":
2852 "rename|mv":
2853 (rename,
2853 (rename,
2854 [('A', 'after', None, _('record a rename that has already occurred')),
2854 [('A', 'after', None, _('record a rename that has already occurred')),
2855 ('f', 'force', None,
2855 ('f', 'force', None,
2856 _('forcibly copy over an existing managed file')),
2856 _('forcibly copy over an existing managed file')),
2857 ] + walkopts + dryrunopts,
2857 ] + walkopts + dryrunopts,
2858 _('hg rename [OPTION]... SOURCE... DEST')),
2858 _('hg rename [OPTION]... SOURCE... DEST')),
2859 "^revert":
2859 "^revert":
2860 (revert,
2860 (revert,
2861 [('a', 'all', None, _('revert all changes when no arguments given')),
2861 [('a', 'all', None, _('revert all changes when no arguments given')),
2862 ('d', 'date', '', _('tipmost revision matching date')),
2862 ('d', 'date', '', _('tipmost revision matching date')),
2863 ('r', 'rev', '', _('revision to revert to')),
2863 ('r', 'rev', '', _('revision to revert to')),
2864 ('', 'no-backup', None, _('do not save backup copies of files')),
2864 ('', 'no-backup', None, _('do not save backup copies of files')),
2865 ] + walkopts + dryrunopts,
2865 ] + walkopts + dryrunopts,
2866 _('hg revert [-r REV] [NAME]...')),
2866 _('hg revert [-r REV] [NAME]...')),
2867 "rollback": (rollback, [], _('hg rollback')),
2867 "rollback": (rollback, [], _('hg rollback')),
2868 "root": (root, [], _('hg root')),
2868 "root": (root, [], _('hg root')),
2869 "showconfig|debugconfig":
2869 "showconfig|debugconfig":
2870 (showconfig,
2870 (showconfig,
2871 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2871 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2872 _('showconfig [-u] [NAME]...')),
2872 _('showconfig [-u] [NAME]...')),
2873 "^serve":
2873 "^serve":
2874 (serve,
2874 (serve,
2875 [('A', 'accesslog', '', _('name of access log file to write to')),
2875 [('A', 'accesslog', '', _('name of access log file to write to')),
2876 ('d', 'daemon', None, _('run server in background')),
2876 ('d', 'daemon', None, _('run server in background')),
2877 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2877 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2878 ('E', 'errorlog', '', _('name of error log file to write to')),
2878 ('E', 'errorlog', '', _('name of error log file to write to')),
2879 ('p', 'port', 0, _('port to use (default: 8000)')),
2879 ('p', 'port', 0, _('port to use (default: 8000)')),
2880 ('a', 'address', '', _('address to use')),
2880 ('a', 'address', '', _('address to use')),
2881 ('n', 'name', '',
2881 ('n', 'name', '',
2882 _('name to show in web pages (default: working dir)')),
2882 _('name to show in web pages (default: working dir)')),
2883 ('', 'webdir-conf', '', _('name of the webdir config file'
2883 ('', 'webdir-conf', '', _('name of the webdir config file'
2884 ' (serve more than one repo)')),
2884 ' (serve more than one repo)')),
2885 ('', 'pid-file', '', _('name of file to write process ID to')),
2885 ('', 'pid-file', '', _('name of file to write process ID to')),
2886 ('', 'stdio', None, _('for remote clients')),
2886 ('', 'stdio', None, _('for remote clients')),
2887 ('t', 'templates', '', _('web templates to use')),
2887 ('t', 'templates', '', _('web templates to use')),
2888 ('', 'style', '', _('template style to use')),
2888 ('', 'style', '', _('template style to use')),
2889 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2889 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2890 _('hg serve [OPTION]...')),
2890 _('hg serve [OPTION]...')),
2891 "^status|st":
2891 "^status|st":
2892 (status,
2892 (status,
2893 [('A', 'all', None, _('show status of all files')),
2893 [('A', 'all', None, _('show status of all files')),
2894 ('m', 'modified', None, _('show only modified files')),
2894 ('m', 'modified', None, _('show only modified files')),
2895 ('a', 'added', None, _('show only added files')),
2895 ('a', 'added', None, _('show only added files')),
2896 ('r', 'removed', None, _('show only removed files')),
2896 ('r', 'removed', None, _('show only removed files')),
2897 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2897 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2898 ('c', 'clean', None, _('show only files without changes')),
2898 ('c', 'clean', None, _('show only files without changes')),
2899 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2899 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2900 ('i', 'ignored', None, _('show ignored files')),
2900 ('i', 'ignored', None, _('show ignored files')),
2901 ('n', 'no-status', None, _('hide status prefix')),
2901 ('n', 'no-status', None, _('hide status prefix')),
2902 ('C', 'copies', None, _('show source of copied files')),
2902 ('C', 'copies', None, _('show source of copied files')),
2903 ('0', 'print0', None,
2903 ('0', 'print0', None,
2904 _('end filenames with NUL, for use with xargs')),
2904 _('end filenames with NUL, for use with xargs')),
2905 ('', 'rev', [], _('show difference from revision')),
2905 ('', 'rev', [], _('show difference from revision')),
2906 ] + walkopts,
2906 ] + walkopts,
2907 _('hg status [OPTION]... [FILE]...')),
2907 _('hg status [OPTION]... [FILE]...')),
2908 "tag":
2908 "tag":
2909 (tag,
2909 (tag,
2910 [('l', 'local', None, _('make the tag local')),
2910 [('l', 'local', None, _('make the tag local')),
2911 ('m', 'message', '', _('message for tag commit log entry')),
2911 ('m', 'message', '', _('message for tag commit log entry')),
2912 ('d', 'date', '', _('record datecode as commit date')),
2912 ('d', 'date', '', _('record datecode as commit date')),
2913 ('u', 'user', '', _('record user as commiter')),
2913 ('u', 'user', '', _('record user as commiter')),
2914 ('r', 'rev', '', _('revision to tag'))],
2914 ('r', 'rev', '', _('revision to tag'))],
2915 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2915 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2916 "tags": (tags, [], _('hg tags')),
2916 "tags": (tags, [], _('hg tags')),
2917 "tip":
2917 "tip":
2918 (tip,
2918 (tip,
2919 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2919 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2920 ('', 'style', '', _('display using template map file')),
2920 ('', 'style', '', _('display using template map file')),
2921 ('p', 'patch', None, _('show patch')),
2921 ('p', 'patch', None, _('show patch')),
2922 ('', 'template', '', _('display with template'))],
2922 ('', 'template', '', _('display with template'))],
2923 _('hg tip [-p]')),
2923 _('hg tip [-p]')),
2924 "unbundle":
2924 "unbundle":
2925 (unbundle,
2925 (unbundle,
2926 [('u', 'update', None,
2926 [('u', 'update', None,
2927 _('update to new tip if changesets were unbundled'))],
2927 _('update to new tip if changesets were unbundled'))],
2928 _('hg unbundle [-u] FILE')),
2928 _('hg unbundle [-u] FILE')),
2929 "^update|up|checkout|co":
2929 "^update|up|checkout|co":
2930 (update,
2930 (update,
2931 [('b', 'branch', '',
2931 [('b', 'branch', '',
2932 _('checkout the head of a specific branch (DEPRECATED)')),
2932 _('checkout the head of a specific branch (DEPRECATED)')),
2933 ('C', 'clean', None, _('overwrite locally modified files')),
2933 ('C', 'clean', None, _('overwrite locally modified files')),
2934 ('d', 'date', '', _('tipmost revision matching date'))],
2934 ('d', 'date', '', _('tipmost revision matching date'))],
2935 _('hg update [-C] [REV]')),
2935 _('hg update [-C] [REV]')),
2936 "verify": (verify, [], _('hg verify')),
2936 "verify": (verify, [], _('hg verify')),
2937 "version": (version_, [], _('hg version')),
2937 "version": (version_, [], _('hg version')),
2938 }
2938 }
2939
2939
2940 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2940 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2941 " debugindex debugindexdot debugdate debuginstall")
2941 " debugindex debugindexdot debugdate debuginstall")
2942 optionalrepo = ("paths serve showconfig")
2942 optionalrepo = ("paths serve showconfig")
2943
2943
2944 def findpossible(ui, cmd):
2944 def findpossible(ui, cmd):
2945 """
2945 """
2946 Return cmd -> (aliases, command table entry)
2946 Return cmd -> (aliases, command table entry)
2947 for each matching command.
2947 for each matching command.
2948 Return debug commands (or their aliases) only if no normal command matches.
2948 Return debug commands (or their aliases) only if no normal command matches.
2949 """
2949 """
2950 choice = {}
2950 choice = {}
2951 debugchoice = {}
2951 debugchoice = {}
2952 for e in table.keys():
2952 for e in table.keys():
2953 aliases = e.lstrip("^").split("|")
2953 aliases = e.lstrip("^").split("|")
2954 found = None
2954 found = None
2955 if cmd in aliases:
2955 if cmd in aliases:
2956 found = cmd
2956 found = cmd
2957 elif not ui.config("ui", "strict"):
2957 elif not ui.config("ui", "strict"):
2958 for a in aliases:
2958 for a in aliases:
2959 if a.startswith(cmd):
2959 if a.startswith(cmd):
2960 found = a
2960 found = a
2961 break
2961 break
2962 if found is not None:
2962 if found is not None:
2963 if aliases[0].startswith("debug") or found.startswith("debug"):
2963 if aliases[0].startswith("debug") or found.startswith("debug"):
2964 debugchoice[found] = (aliases, table[e])
2964 debugchoice[found] = (aliases, table[e])
2965 else:
2965 else:
2966 choice[found] = (aliases, table[e])
2966 choice[found] = (aliases, table[e])
2967
2967
2968 if not choice and debugchoice:
2968 if not choice and debugchoice:
2969 choice = debugchoice
2969 choice = debugchoice
2970
2970
2971 return choice
2971 return choice
2972
2972
2973 def findcmd(ui, cmd):
2973 def findcmd(ui, cmd):
2974 """Return (aliases, command table entry) for command string."""
2974 """Return (aliases, command table entry) for command string."""
2975 choice = findpossible(ui, cmd)
2975 choice = findpossible(ui, cmd)
2976
2976
2977 if choice.has_key(cmd):
2977 if choice.has_key(cmd):
2978 return choice[cmd]
2978 return choice[cmd]
2979
2979
2980 if len(choice) > 1:
2980 if len(choice) > 1:
2981 clist = choice.keys()
2981 clist = choice.keys()
2982 clist.sort()
2982 clist.sort()
2983 raise AmbiguousCommand(cmd, clist)
2983 raise AmbiguousCommand(cmd, clist)
2984
2984
2985 if choice:
2985 if choice:
2986 return choice.values()[0]
2986 return choice.values()[0]
2987
2987
2988 raise UnknownCommand(cmd)
2988 raise UnknownCommand(cmd)
2989
2989
2990 def catchterm(*args):
2990 def catchterm(*args):
2991 raise util.SignalInterrupt
2991 raise util.SignalInterrupt
2992
2992
2993 def run():
2993 def run():
2994 sys.exit(dispatch(sys.argv[1:]))
2994 sys.exit(dispatch(sys.argv[1:]))
2995
2995
2996 class ParseError(Exception):
2996 class ParseError(Exception):
2997 """Exception raised on errors in parsing the command line."""
2997 """Exception raised on errors in parsing the command line."""
2998
2998
2999 def parse(ui, args):
2999 def parse(ui, args):
3000 options = {}
3000 options = {}
3001 cmdoptions = {}
3001 cmdoptions = {}
3002
3002
3003 try:
3003 try:
3004 args = fancyopts.fancyopts(args, globalopts, options)
3004 args = fancyopts.fancyopts(args, globalopts, options)
3005 except fancyopts.getopt.GetoptError, inst:
3005 except fancyopts.getopt.GetoptError, inst:
3006 raise ParseError(None, inst)
3006 raise ParseError(None, inst)
3007
3007
3008 if args:
3008 if args:
3009 cmd, args = args[0], args[1:]
3009 cmd, args = args[0], args[1:]
3010 aliases, i = findcmd(ui, cmd)
3010 aliases, i = findcmd(ui, cmd)
3011 cmd = aliases[0]
3011 cmd = aliases[0]
3012 defaults = ui.config("defaults", cmd)
3012 defaults = ui.config("defaults", cmd)
3013 if defaults:
3013 if defaults:
3014 args = shlex.split(defaults) + args
3014 args = shlex.split(defaults) + args
3015 c = list(i[1])
3015 c = list(i[1])
3016 else:
3016 else:
3017 cmd = None
3017 cmd = None
3018 c = []
3018 c = []
3019
3019
3020 # combine global options into local
3020 # combine global options into local
3021 for o in globalopts:
3021 for o in globalopts:
3022 c.append((o[0], o[1], options[o[1]], o[3]))
3022 c.append((o[0], o[1], options[o[1]], o[3]))
3023
3023
3024 try:
3024 try:
3025 args = fancyopts.fancyopts(args, c, cmdoptions)
3025 args = fancyopts.fancyopts(args, c, cmdoptions)
3026 except fancyopts.getopt.GetoptError, inst:
3026 except fancyopts.getopt.GetoptError, inst:
3027 raise ParseError(cmd, inst)
3027 raise ParseError(cmd, inst)
3028
3028
3029 # separate global options back out
3029 # separate global options back out
3030 for o in globalopts:
3030 for o in globalopts:
3031 n = o[1]
3031 n = o[1]
3032 options[n] = cmdoptions[n]
3032 options[n] = cmdoptions[n]
3033 del cmdoptions[n]
3033 del cmdoptions[n]
3034
3034
3035 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3035 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3036
3036
3037 external = {}
3037 external = {}
3038
3038
3039 def findext(name):
3039 def findext(name):
3040 '''return module with given extension name'''
3040 '''return module with given extension name'''
3041 try:
3041 try:
3042 return sys.modules[external[name]]
3042 return sys.modules[external[name]]
3043 except KeyError:
3043 except KeyError:
3044 for k, v in external.iteritems():
3044 for k, v in external.iteritems():
3045 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3045 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3046 return sys.modules[v]
3046 return sys.modules[v]
3047 raise KeyError(name)
3047 raise KeyError(name)
3048
3048
3049 def load_extensions(ui):
3049 def load_extensions(ui):
3050 added = []
3050 added = []
3051 for ext_name, load_from_name in ui.extensions():
3051 for ext_name, load_from_name in ui.extensions():
3052 if ext_name in external:
3052 if ext_name in external:
3053 continue
3053 continue
3054 try:
3054 try:
3055 if load_from_name:
3055 if load_from_name:
3056 # the module will be loaded in sys.modules
3056 # the module will be loaded in sys.modules
3057 # choose an unique name so that it doesn't
3057 # choose an unique name so that it doesn't
3058 # conflicts with other modules
3058 # conflicts with other modules
3059 module_name = "hgext_%s" % ext_name.replace('.', '_')
3059 module_name = "hgext_%s" % ext_name.replace('.', '_')
3060 mod = imp.load_source(module_name, load_from_name)
3060 mod = imp.load_source(module_name, load_from_name)
3061 else:
3061 else:
3062 def importh(name):
3062 def importh(name):
3063 mod = __import__(name)
3063 mod = __import__(name)
3064 components = name.split('.')
3064 components = name.split('.')
3065 for comp in components[1:]:
3065 for comp in components[1:]:
3066 mod = getattr(mod, comp)
3066 mod = getattr(mod, comp)
3067 return mod
3067 return mod
3068 try:
3068 try:
3069 mod = importh("hgext.%s" % ext_name)
3069 mod = importh("hgext.%s" % ext_name)
3070 except ImportError:
3070 except ImportError:
3071 mod = importh(ext_name)
3071 mod = importh(ext_name)
3072 external[ext_name] = mod.__name__
3072 external[ext_name] = mod.__name__
3073 added.append((mod, ext_name))
3073 added.append((mod, ext_name))
3074 except (util.SignalInterrupt, KeyboardInterrupt):
3074 except (util.SignalInterrupt, KeyboardInterrupt):
3075 raise
3075 raise
3076 except Exception, inst:
3076 except Exception, inst:
3077 ui.warn(_("*** failed to import extension %s: %s\n") %
3077 ui.warn(_("*** failed to import extension %s: %s\n") %
3078 (ext_name, inst))
3078 (ext_name, inst))
3079 if ui.print_exc():
3079 if ui.print_exc():
3080 return 1
3080 return 1
3081
3081
3082 for mod, name in added:
3082 for mod, name in added:
3083 uisetup = getattr(mod, 'uisetup', None)
3083 uisetup = getattr(mod, 'uisetup', None)
3084 if uisetup:
3084 if uisetup:
3085 uisetup(ui)
3085 uisetup(ui)
3086 cmdtable = getattr(mod, 'cmdtable', {})
3086 cmdtable = getattr(mod, 'cmdtable', {})
3087 for t in cmdtable:
3087 for t in cmdtable:
3088 if t in table:
3088 if t in table:
3089 ui.warn(_("module %s overrides %s\n") % (name, t))
3089 ui.warn(_("module %s overrides %s\n") % (name, t))
3090 table.update(cmdtable)
3090 table.update(cmdtable)
3091
3091
3092 def parseconfig(config):
3092 def parseconfig(config):
3093 """parse the --config options from the command line"""
3093 """parse the --config options from the command line"""
3094 parsed = []
3094 parsed = []
3095 for cfg in config:
3095 for cfg in config:
3096 try:
3096 try:
3097 name, value = cfg.split('=', 1)
3097 name, value = cfg.split('=', 1)
3098 section, name = name.split('.', 1)
3098 section, name = name.split('.', 1)
3099 if not section or not name:
3099 if not section or not name:
3100 raise IndexError
3100 raise IndexError
3101 parsed.append((section, name, value))
3101 parsed.append((section, name, value))
3102 except (IndexError, ValueError):
3102 except (IndexError, ValueError):
3103 raise util.Abort(_('malformed --config option: %s') % cfg)
3103 raise util.Abort(_('malformed --config option: %s') % cfg)
3104 return parsed
3104 return parsed
3105
3105
3106 def dispatch(args):
3106 def dispatch(args):
3107 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3107 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3108 num = getattr(signal, name, None)
3108 num = getattr(signal, name, None)
3109 if num: signal.signal(num, catchterm)
3109 if num: signal.signal(num, catchterm)
3110
3110
3111 try:
3111 try:
3112 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3112 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3113 except util.Abort, inst:
3113 except util.Abort, inst:
3114 sys.stderr.write(_("abort: %s\n") % inst)
3114 sys.stderr.write(_("abort: %s\n") % inst)
3115 return -1
3115 return -1
3116
3116
3117 load_extensions(u)
3117 load_extensions(u)
3118 u.addreadhook(load_extensions)
3118 u.addreadhook(load_extensions)
3119
3119
3120 try:
3120 try:
3121 cmd, func, args, options, cmdoptions = parse(u, args)
3121 cmd, func, args, options, cmdoptions = parse(u, args)
3122 if options["encoding"]:
3122 if options["encoding"]:
3123 util._encoding = options["encoding"]
3123 util._encoding = options["encoding"]
3124 if options["encodingmode"]:
3124 if options["encodingmode"]:
3125 util._encodingmode = options["encodingmode"]
3125 util._encodingmode = options["encodingmode"]
3126 if options["time"]:
3126 if options["time"]:
3127 def get_times():
3127 def get_times():
3128 t = os.times()
3128 t = os.times()
3129 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3129 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3130 t = (t[0], t[1], t[2], t[3], time.clock())
3130 t = (t[0], t[1], t[2], t[3], time.clock())
3131 return t
3131 return t
3132 s = get_times()
3132 s = get_times()
3133 def print_time():
3133 def print_time():
3134 t = get_times()
3134 t = get_times()
3135 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3135 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3136 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3136 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3137 atexit.register(print_time)
3137 atexit.register(print_time)
3138
3138
3139 # enter the debugger before command execution
3139 # enter the debugger before command execution
3140 if options['debugger']:
3140 if options['debugger']:
3141 pdb.set_trace()
3141 pdb.set_trace()
3142
3142
3143 try:
3143 try:
3144 if options['cwd']:
3144 if options['cwd']:
3145 os.chdir(options['cwd'])
3145 os.chdir(options['cwd'])
3146
3146
3147 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3147 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3148 not options["noninteractive"], options["traceback"],
3148 not options["noninteractive"], options["traceback"],
3149 parseconfig(options["config"]))
3149 parseconfig(options["config"]))
3150
3150
3151 path = u.expandpath(options["repository"]) or ""
3151 path = u.expandpath(options["repository"]) or ""
3152 repo = path and hg.repository(u, path=path) or None
3152 repo = path and hg.repository(u, path=path) or None
3153 if repo and not repo.local():
3153 if repo and not repo.local():
3154 raise util.Abort(_("repository '%s' is not local") % path)
3154 raise util.Abort(_("repository '%s' is not local") % path)
3155
3155
3156 if options['help']:
3156 if options['help']:
3157 return help_(u, cmd, options['version'])
3157 return help_(u, cmd, options['version'])
3158 elif options['version']:
3158 elif options['version']:
3159 return version_(u)
3159 return version_(u)
3160 elif not cmd:
3160 elif not cmd:
3161 return help_(u, 'shortlist')
3161 return help_(u, 'shortlist')
3162
3162
3163 if cmd not in norepo.split():
3163 if cmd not in norepo.split():
3164 try:
3164 try:
3165 if not repo:
3165 if not repo:
3166 repo = hg.repository(u, path=path)
3166 repo = hg.repository(u, path=path)
3167 u = repo.ui
3167 u = repo.ui
3168 for name in external.itervalues():
3168 for name in external.itervalues():
3169 mod = sys.modules[name]
3169 mod = sys.modules[name]
3170 if hasattr(mod, 'reposetup'):
3170 if hasattr(mod, 'reposetup'):
3171 mod.reposetup(u, repo)
3171 mod.reposetup(u, repo)
3172 hg.repo_setup_hooks.append(mod.reposetup)
3172 hg.repo_setup_hooks.append(mod.reposetup)
3173 except hg.RepoError:
3173 except hg.RepoError:
3174 if cmd not in optionalrepo.split():
3174 if cmd not in optionalrepo.split():
3175 raise
3175 raise
3176 d = lambda: func(u, repo, *args, **cmdoptions)
3176 d = lambda: func(u, repo, *args, **cmdoptions)
3177 else:
3177 else:
3178 d = lambda: func(u, *args, **cmdoptions)
3178 d = lambda: func(u, *args, **cmdoptions)
3179
3179
3180 try:
3180 try:
3181 if options['profile']:
3181 if options['profile']:
3182 import hotshot, hotshot.stats
3182 import hotshot, hotshot.stats
3183 prof = hotshot.Profile("hg.prof")
3183 prof = hotshot.Profile("hg.prof")
3184 try:
3184 try:
3185 try:
3185 try:
3186 return prof.runcall(d)
3186 return prof.runcall(d)
3187 except:
3187 except:
3188 try:
3188 try:
3189 u.warn(_('exception raised - generating '
3189 u.warn(_('exception raised - generating '
3190 'profile anyway\n'))
3190 'profile anyway\n'))
3191 except:
3191 except:
3192 pass
3192 pass
3193 raise
3193 raise
3194 finally:
3194 finally:
3195 prof.close()
3195 prof.close()
3196 stats = hotshot.stats.load("hg.prof")
3196 stats = hotshot.stats.load("hg.prof")
3197 stats.strip_dirs()
3197 stats.strip_dirs()
3198 stats.sort_stats('time', 'calls')
3198 stats.sort_stats('time', 'calls')
3199 stats.print_stats(40)
3199 stats.print_stats(40)
3200 elif options['lsprof']:
3200 elif options['lsprof']:
3201 try:
3201 try:
3202 from mercurial import lsprof
3202 from mercurial import lsprof
3203 except ImportError:
3203 except ImportError:
3204 raise util.Abort(_(
3204 raise util.Abort(_(
3205 'lsprof not available - install from '
3205 'lsprof not available - install from '
3206 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3206 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3207 p = lsprof.Profiler()
3207 p = lsprof.Profiler()
3208 p.enable(subcalls=True)
3208 p.enable(subcalls=True)
3209 try:
3209 try:
3210 return d()
3210 return d()
3211 finally:
3211 finally:
3212 p.disable()
3212 p.disable()
3213 stats = lsprof.Stats(p.getstats())
3213 stats = lsprof.Stats(p.getstats())
3214 stats.sort()
3214 stats.sort()
3215 stats.pprint(top=10, file=sys.stderr, climit=5)
3215 stats.pprint(top=10, file=sys.stderr, climit=5)
3216 else:
3216 else:
3217 return d()
3217 return d()
3218 finally:
3218 finally:
3219 u.flush()
3219 u.flush()
3220 except:
3220 except:
3221 # enter the debugger when we hit an exception
3221 # enter the debugger when we hit an exception
3222 if options['debugger']:
3222 if options['debugger']:
3223 pdb.post_mortem(sys.exc_info()[2])
3223 pdb.post_mortem(sys.exc_info()[2])
3224 u.print_exc()
3224 u.print_exc()
3225 raise
3225 raise
3226 except ParseError, inst:
3226 except ParseError, inst:
3227 if inst.args[0]:
3227 if inst.args[0]:
3228 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3228 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3229 help_(u, inst.args[0])
3229 help_(u, inst.args[0])
3230 else:
3230 else:
3231 u.warn(_("hg: %s\n") % inst.args[1])
3231 u.warn(_("hg: %s\n") % inst.args[1])
3232 help_(u, 'shortlist')
3232 help_(u, 'shortlist')
3233 except AmbiguousCommand, inst:
3233 except AmbiguousCommand, inst:
3234 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3234 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3235 (inst.args[0], " ".join(inst.args[1])))
3235 (inst.args[0], " ".join(inst.args[1])))
3236 except UnknownCommand, inst:
3236 except UnknownCommand, inst:
3237 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3237 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3238 help_(u, 'shortlist')
3238 help_(u, 'shortlist')
3239 except hg.RepoError, inst:
3239 except hg.RepoError, inst:
3240 u.warn(_("abort: %s!\n") % inst)
3240 u.warn(_("abort: %s!\n") % inst)
3241 except lock.LockHeld, inst:
3241 except lock.LockHeld, inst:
3242 if inst.errno == errno.ETIMEDOUT:
3242 if inst.errno == errno.ETIMEDOUT:
3243 reason = _('timed out waiting for lock held by %s') % inst.locker
3243 reason = _('timed out waiting for lock held by %s') % inst.locker
3244 else:
3244 else:
3245 reason = _('lock held by %s') % inst.locker
3245 reason = _('lock held by %s') % inst.locker
3246 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3246 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3247 except lock.LockUnavailable, inst:
3247 except lock.LockUnavailable, inst:
3248 u.warn(_("abort: could not lock %s: %s\n") %
3248 u.warn(_("abort: could not lock %s: %s\n") %
3249 (inst.desc or inst.filename, inst.strerror))
3249 (inst.desc or inst.filename, inst.strerror))
3250 except revlog.RevlogError, inst:
3250 except revlog.RevlogError, inst:
3251 u.warn(_("abort: %s!\n") % inst)
3251 u.warn(_("abort: %s!\n") % inst)
3252 except util.SignalInterrupt:
3252 except util.SignalInterrupt:
3253 u.warn(_("killed!\n"))
3253 u.warn(_("killed!\n"))
3254 except KeyboardInterrupt:
3254 except KeyboardInterrupt:
3255 try:
3255 try:
3256 u.warn(_("interrupted!\n"))
3256 u.warn(_("interrupted!\n"))
3257 except IOError, inst:
3257 except IOError, inst:
3258 if inst.errno == errno.EPIPE:
3258 if inst.errno == errno.EPIPE:
3259 if u.debugflag:
3259 if u.debugflag:
3260 u.warn(_("\nbroken pipe\n"))
3260 u.warn(_("\nbroken pipe\n"))
3261 else:
3261 else:
3262 raise
3262 raise
3263 except IOError, inst:
3263 except IOError, inst:
3264 if hasattr(inst, "code"):
3264 if hasattr(inst, "code"):
3265 u.warn(_("abort: %s\n") % inst)
3265 u.warn(_("abort: %s\n") % inst)
3266 elif hasattr(inst, "reason"):
3266 elif hasattr(inst, "reason"):
3267 u.warn(_("abort: error: %s\n") % inst.reason[1])
3267 u.warn(_("abort: error: %s\n") % inst.reason[1])
3268 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3268 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3269 if u.debugflag:
3269 if u.debugflag:
3270 u.warn(_("broken pipe\n"))
3270 u.warn(_("broken pipe\n"))
3271 elif getattr(inst, "strerror", None):
3271 elif getattr(inst, "strerror", None):
3272 if getattr(inst, "filename", None):
3272 if getattr(inst, "filename", None):
3273 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3273 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3274 else:
3274 else:
3275 u.warn(_("abort: %s\n") % inst.strerror)
3275 u.warn(_("abort: %s\n") % inst.strerror)
3276 else:
3276 else:
3277 raise
3277 raise
3278 except OSError, inst:
3278 except OSError, inst:
3279 if getattr(inst, "filename", None):
3279 if getattr(inst, "filename", None):
3280 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3280 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3281 else:
3281 else:
3282 u.warn(_("abort: %s\n") % inst.strerror)
3282 u.warn(_("abort: %s\n") % inst.strerror)
3283 except util.UnexpectedOutput, inst:
3283 except util.UnexpectedOutput, inst:
3284 u.warn(_("abort: %s") % inst[0])
3284 u.warn(_("abort: %s") % inst[0])
3285 if not isinstance(inst[1], basestring):
3285 if not isinstance(inst[1], basestring):
3286 u.warn(" %r\n" % (inst[1],))
3286 u.warn(" %r\n" % (inst[1],))
3287 elif not inst[1]:
3287 elif not inst[1]:
3288 u.warn(_(" empty string\n"))
3288 u.warn(_(" empty string\n"))
3289 else:
3289 else:
3290 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3290 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3291 except util.Abort, inst:
3291 except util.Abort, inst:
3292 u.warn(_("abort: %s\n") % inst)
3292 u.warn(_("abort: %s\n") % inst)
3293 except TypeError, inst:
3293 except TypeError, inst:
3294 # was this an argument error?
3294 # was this an argument error?
3295 tb = traceback.extract_tb(sys.exc_info()[2])
3295 tb = traceback.extract_tb(sys.exc_info()[2])
3296 if len(tb) > 2: # no
3296 if len(tb) > 2: # no
3297 raise
3297 raise
3298 u.debug(inst, "\n")
3298 u.debug(inst, "\n")
3299 u.warn(_("%s: invalid arguments\n") % cmd)
3299 u.warn(_("%s: invalid arguments\n") % cmd)
3300 help_(u, cmd)
3300 help_(u, cmd)
3301 except SystemExit, inst:
3301 except SystemExit, inst:
3302 # Commands shouldn't sys.exit directly, but give a return code.
3302 # Commands shouldn't sys.exit directly, but give a return code.
3303 # Just in case catch this and and pass exit code to caller.
3303 # Just in case catch this and and pass exit code to caller.
3304 return inst.code
3304 return inst.code
3305 except:
3305 except:
3306 u.warn(_("** unknown exception encountered, details follow\n"))
3306 u.warn(_("** unknown exception encountered, details follow\n"))
3307 u.warn(_("** report bug details to "
3307 u.warn(_("** report bug details to "
3308 "http://www.selenic.com/mercurial/bts\n"))
3308 "http://www.selenic.com/mercurial/bts\n"))
3309 u.warn(_("** or mercurial@selenic.com\n"))
3309 u.warn(_("** or mercurial@selenic.com\n"))
3310 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3310 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3311 % version.get_version())
3311 % version.get_version())
3312 raise
3312 raise
3313
3313
3314 return -1
3314 return -1
@@ -1,1967 +1,1971 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19 supported = ('revlogv1', 'store')
19 supported = ('revlogv1', 'store')
20
20
21 def __del__(self):
21 def __del__(self):
22 self.transhandle = None
22 self.transhandle = None
23 def __init__(self, parentui, path=None, create=0):
23 def __init__(self, parentui, path=None, create=0):
24 repo.repository.__init__(self)
24 repo.repository.__init__(self)
25 if not path:
25 if not path:
26 p = os.getcwd()
26 p = os.getcwd()
27 while not os.path.isdir(os.path.join(p, ".hg")):
27 while not os.path.isdir(os.path.join(p, ".hg")):
28 oldp = p
28 oldp = p
29 p = os.path.dirname(p)
29 p = os.path.dirname(p)
30 if p == oldp:
30 if p == oldp:
31 raise repo.RepoError(_("There is no Mercurial repository"
31 raise repo.RepoError(_("There is no Mercurial repository"
32 " here (.hg not found)"))
32 " here (.hg not found)"))
33 path = p
33 path = p
34
34
35 self.path = os.path.join(path, ".hg")
35 self.path = os.path.join(path, ".hg")
36 self.root = os.path.realpath(path)
36 self.root = os.path.realpath(path)
37 self.origroot = path
37 self.origroot = path
38 self.opener = util.opener(self.path)
38 self.opener = util.opener(self.path)
39 self.wopener = util.opener(self.root)
39 self.wopener = util.opener(self.root)
40
40
41 if not os.path.isdir(self.path):
41 if not os.path.isdir(self.path):
42 if create:
42 if create:
43 if not os.path.exists(path):
43 if not os.path.exists(path):
44 os.mkdir(path)
44 os.mkdir(path)
45 os.mkdir(self.path)
45 os.mkdir(self.path)
46 os.mkdir(os.path.join(self.path, "store"))
46 os.mkdir(os.path.join(self.path, "store"))
47 requirements = ("revlogv1", "store")
47 requirements = ("revlogv1", "store")
48 reqfile = self.opener("requires", "w")
48 reqfile = self.opener("requires", "w")
49 for r in requirements:
49 for r in requirements:
50 reqfile.write("%s\n" % r)
50 reqfile.write("%s\n" % r)
51 reqfile.close()
51 reqfile.close()
52 # create an invalid changelog
52 # create an invalid changelog
53 self.opener("00changelog.i", "a").write(
53 self.opener("00changelog.i", "a").write(
54 '\0\0\0\2' # represents revlogv2
54 '\0\0\0\2' # represents revlogv2
55 ' dummy changelog to prevent using the old repo layout'
55 ' dummy changelog to prevent using the old repo layout'
56 )
56 )
57 else:
57 else:
58 raise repo.RepoError(_("repository %s not found") % path)
58 raise repo.RepoError(_("repository %s not found") % path)
59 elif create:
59 elif create:
60 raise repo.RepoError(_("repository %s already exists") % path)
60 raise repo.RepoError(_("repository %s already exists") % path)
61 else:
61 else:
62 # find requirements
62 # find requirements
63 try:
63 try:
64 requirements = self.opener("requires").read().splitlines()
64 requirements = self.opener("requires").read().splitlines()
65 except IOError, inst:
65 except IOError, inst:
66 if inst.errno != errno.ENOENT:
66 if inst.errno != errno.ENOENT:
67 raise
67 raise
68 requirements = []
68 requirements = []
69 # check them
69 # check them
70 for r in requirements:
70 for r in requirements:
71 if r not in self.supported:
71 if r not in self.supported:
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73
73
74 # setup store
74 # setup store
75 if "store" in requirements:
75 if "store" in requirements:
76 self.encodefn = util.encodefilename
76 self.encodefn = util.encodefilename
77 self.decodefn = util.decodefilename
77 self.decodefn = util.decodefilename
78 self.spath = os.path.join(self.path, "store")
78 self.spath = os.path.join(self.path, "store")
79 else:
79 else:
80 self.encodefn = lambda x: x
80 self.encodefn = lambda x: x
81 self.decodefn = lambda x: x
81 self.decodefn = lambda x: x
82 self.spath = self.path
82 self.spath = self.path
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84
84
85 self.ui = ui.ui(parentui=parentui)
85 self.ui = ui.ui(parentui=parentui)
86 try:
86 try:
87 self.ui.readconfig(self.join("hgrc"), self.root)
87 self.ui.readconfig(self.join("hgrc"), self.root)
88 except IOError:
88 except IOError:
89 pass
89 pass
90
90
91 v = self.ui.configrevlog()
91 v = self.ui.configrevlog()
92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
94 fl = v.get('flags', None)
94 fl = v.get('flags', None)
95 flags = 0
95 flags = 0
96 if fl != None:
96 if fl != None:
97 for x in fl.split():
97 for x in fl.split():
98 flags |= revlog.flagstr(x)
98 flags |= revlog.flagstr(x)
99 elif self.revlogv1:
99 elif self.revlogv1:
100 flags = revlog.REVLOG_DEFAULT_FLAGS
100 flags = revlog.REVLOG_DEFAULT_FLAGS
101
101
102 v = self.revlogversion | flags
102 v = self.revlogversion | flags
103 self.manifest = manifest.manifest(self.sopener, v)
103 self.manifest = manifest.manifest(self.sopener, v)
104 self.changelog = changelog.changelog(self.sopener, v)
104 self.changelog = changelog.changelog(self.sopener, v)
105
105
106 fallback = self.ui.config('ui', 'fallbackencoding')
106 fallback = self.ui.config('ui', 'fallbackencoding')
107 if fallback:
107 if fallback:
108 util._fallbackencoding = fallback
108 util._fallbackencoding = fallback
109
109
110 # the changelog might not have the inline index flag
110 # the changelog might not have the inline index flag
111 # on. If the format of the changelog is the same as found in
111 # on. If the format of the changelog is the same as found in
112 # .hgrc, apply any flags found in the .hgrc as well.
112 # .hgrc, apply any flags found in the .hgrc as well.
113 # Otherwise, just version from the changelog
113 # Otherwise, just version from the changelog
114 v = self.changelog.version
114 v = self.changelog.version
115 if v == self.revlogversion:
115 if v == self.revlogversion:
116 v |= flags
116 v |= flags
117 self.revlogversion = v
117 self.revlogversion = v
118
118
119 self.tagscache = None
119 self.tagscache = None
120 self.branchcache = None
120 self.branchcache = None
121 self.nodetagscache = None
121 self.nodetagscache = None
122 self.encodepats = None
122 self.encodepats = None
123 self.decodepats = None
123 self.decodepats = None
124 self.transhandle = None
124 self.transhandle = None
125
125
126 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
126 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
127
127
128 def url(self):
128 def url(self):
129 return 'file:' + self.root
129 return 'file:' + self.root
130
130
131 def hook(self, name, throw=False, **args):
131 def hook(self, name, throw=False, **args):
132 def callhook(hname, funcname):
132 def callhook(hname, funcname):
133 '''call python hook. hook is callable object, looked up as
133 '''call python hook. hook is callable object, looked up as
134 name in python module. if callable returns "true", hook
134 name in python module. if callable returns "true", hook
135 fails, else passes. if hook raises exception, treated as
135 fails, else passes. if hook raises exception, treated as
136 hook failure. exception propagates if throw is "true".
136 hook failure. exception propagates if throw is "true".
137
137
138 reason for "true" meaning "hook failed" is so that
138 reason for "true" meaning "hook failed" is so that
139 unmodified commands (e.g. mercurial.commands.update) can
139 unmodified commands (e.g. mercurial.commands.update) can
140 be run as hooks without wrappers to convert return values.'''
140 be run as hooks without wrappers to convert return values.'''
141
141
142 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
142 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
143 d = funcname.rfind('.')
143 d = funcname.rfind('.')
144 if d == -1:
144 if d == -1:
145 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
145 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
146 % (hname, funcname))
146 % (hname, funcname))
147 modname = funcname[:d]
147 modname = funcname[:d]
148 try:
148 try:
149 obj = __import__(modname)
149 obj = __import__(modname)
150 except ImportError:
150 except ImportError:
151 try:
151 try:
152 # extensions are loaded with hgext_ prefix
152 # extensions are loaded with hgext_ prefix
153 obj = __import__("hgext_%s" % modname)
153 obj = __import__("hgext_%s" % modname)
154 except ImportError:
154 except ImportError:
155 raise util.Abort(_('%s hook is invalid '
155 raise util.Abort(_('%s hook is invalid '
156 '(import of "%s" failed)') %
156 '(import of "%s" failed)') %
157 (hname, modname))
157 (hname, modname))
158 try:
158 try:
159 for p in funcname.split('.')[1:]:
159 for p in funcname.split('.')[1:]:
160 obj = getattr(obj, p)
160 obj = getattr(obj, p)
161 except AttributeError, err:
161 except AttributeError, err:
162 raise util.Abort(_('%s hook is invalid '
162 raise util.Abort(_('%s hook is invalid '
163 '("%s" is not defined)') %
163 '("%s" is not defined)') %
164 (hname, funcname))
164 (hname, funcname))
165 if not callable(obj):
165 if not callable(obj):
166 raise util.Abort(_('%s hook is invalid '
166 raise util.Abort(_('%s hook is invalid '
167 '("%s" is not callable)') %
167 '("%s" is not callable)') %
168 (hname, funcname))
168 (hname, funcname))
169 try:
169 try:
170 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
170 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
171 except (KeyboardInterrupt, util.SignalInterrupt):
171 except (KeyboardInterrupt, util.SignalInterrupt):
172 raise
172 raise
173 except Exception, exc:
173 except Exception, exc:
174 if isinstance(exc, util.Abort):
174 if isinstance(exc, util.Abort):
175 self.ui.warn(_('error: %s hook failed: %s\n') %
175 self.ui.warn(_('error: %s hook failed: %s\n') %
176 (hname, exc.args[0]))
176 (hname, exc.args[0]))
177 else:
177 else:
178 self.ui.warn(_('error: %s hook raised an exception: '
178 self.ui.warn(_('error: %s hook raised an exception: '
179 '%s\n') % (hname, exc))
179 '%s\n') % (hname, exc))
180 if throw:
180 if throw:
181 raise
181 raise
182 self.ui.print_exc()
182 self.ui.print_exc()
183 return True
183 return True
184 if r:
184 if r:
185 if throw:
185 if throw:
186 raise util.Abort(_('%s hook failed') % hname)
186 raise util.Abort(_('%s hook failed') % hname)
187 self.ui.warn(_('warning: %s hook failed\n') % hname)
187 self.ui.warn(_('warning: %s hook failed\n') % hname)
188 return r
188 return r
189
189
190 def runhook(name, cmd):
190 def runhook(name, cmd):
191 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
191 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
192 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
192 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
193 r = util.system(cmd, environ=env, cwd=self.root)
193 r = util.system(cmd, environ=env, cwd=self.root)
194 if r:
194 if r:
195 desc, r = util.explain_exit(r)
195 desc, r = util.explain_exit(r)
196 if throw:
196 if throw:
197 raise util.Abort(_('%s hook %s') % (name, desc))
197 raise util.Abort(_('%s hook %s') % (name, desc))
198 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
198 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
199 return r
199 return r
200
200
201 r = False
201 r = False
202 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
202 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
203 if hname.split(".", 1)[0] == name and cmd]
203 if hname.split(".", 1)[0] == name and cmd]
204 hooks.sort()
204 hooks.sort()
205 for hname, cmd in hooks:
205 for hname, cmd in hooks:
206 if cmd.startswith('python:'):
206 if cmd.startswith('python:'):
207 r = callhook(hname, cmd[7:].strip()) or r
207 r = callhook(hname, cmd[7:].strip()) or r
208 else:
208 else:
209 r = runhook(hname, cmd) or r
209 r = runhook(hname, cmd) or r
210 return r
210 return r
211
211
212 tag_disallowed = ':\r\n'
212 tag_disallowed = ':\r\n'
213
213
214 def tag(self, name, node, message, local, user, date):
214 def tag(self, name, node, message, local, user, date):
215 '''tag a revision with a symbolic name.
215 '''tag a revision with a symbolic name.
216
216
217 if local is True, the tag is stored in a per-repository file.
217 if local is True, the tag is stored in a per-repository file.
218 otherwise, it is stored in the .hgtags file, and a new
218 otherwise, it is stored in the .hgtags file, and a new
219 changeset is committed with the change.
219 changeset is committed with the change.
220
220
221 keyword arguments:
221 keyword arguments:
222
222
223 local: whether to store tag in non-version-controlled file
223 local: whether to store tag in non-version-controlled file
224 (default False)
224 (default False)
225
225
226 message: commit message to use if committing
226 message: commit message to use if committing
227
227
228 user: name of user to use if committing
228 user: name of user to use if committing
229
229
230 date: date tuple to use if committing'''
230 date: date tuple to use if committing'''
231
231
232 for c in self.tag_disallowed:
232 for c in self.tag_disallowed:
233 if c in name:
233 if c in name:
234 raise util.Abort(_('%r cannot be used in a tag name') % c)
234 raise util.Abort(_('%r cannot be used in a tag name') % c)
235
235
236 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
236 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
237
237
238 if local:
238 if local:
239 # local tags are stored in the current charset
239 # local tags are stored in the current charset
240 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
240 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
241 self.hook('tag', node=hex(node), tag=name, local=local)
241 self.hook('tag', node=hex(node), tag=name, local=local)
242 return
242 return
243
243
244 for x in self.status()[:5]:
244 for x in self.status()[:5]:
245 if '.hgtags' in x:
245 if '.hgtags' in x:
246 raise util.Abort(_('working copy of .hgtags is changed '
246 raise util.Abort(_('working copy of .hgtags is changed '
247 '(please commit .hgtags manually)'))
247 '(please commit .hgtags manually)'))
248
248
249 # committed tags are stored in UTF-8
249 # committed tags are stored in UTF-8
250 line = '%s %s\n' % (hex(node), util.fromlocal(name))
250 line = '%s %s\n' % (hex(node), util.fromlocal(name))
251 self.wfile('.hgtags', 'ab').write(line)
251 self.wfile('.hgtags', 'ab').write(line)
252 if self.dirstate.state('.hgtags') == '?':
252 if self.dirstate.state('.hgtags') == '?':
253 self.add(['.hgtags'])
253 self.add(['.hgtags'])
254
254
255 self.commit(['.hgtags'], message, user, date)
255 self.commit(['.hgtags'], message, user, date)
256 self.hook('tag', node=hex(node), tag=name, local=local)
256 self.hook('tag', node=hex(node), tag=name, local=local)
257
257
258 def tags(self):
258 def tags(self):
259 '''return a mapping of tag to node'''
259 '''return a mapping of tag to node'''
260 if not self.tagscache:
260 if not self.tagscache:
261 self.tagscache = {}
261 self.tagscache = {}
262
262
263 def parsetag(line, context):
263 def parsetag(line, context):
264 if not line:
264 if not line:
265 return
265 return
266 s = l.split(" ", 1)
266 s = l.split(" ", 1)
267 if len(s) != 2:
267 if len(s) != 2:
268 self.ui.warn(_("%s: cannot parse entry\n") % context)
268 self.ui.warn(_("%s: cannot parse entry\n") % context)
269 return
269 return
270 node, key = s
270 node, key = s
271 key = util.tolocal(key.strip()) # stored in UTF-8
271 key = util.tolocal(key.strip()) # stored in UTF-8
272 try:
272 try:
273 bin_n = bin(node)
273 bin_n = bin(node)
274 except TypeError:
274 except TypeError:
275 self.ui.warn(_("%s: node '%s' is not well formed\n") %
275 self.ui.warn(_("%s: node '%s' is not well formed\n") %
276 (context, node))
276 (context, node))
277 return
277 return
278 if bin_n not in self.changelog.nodemap:
278 if bin_n not in self.changelog.nodemap:
279 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
279 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
280 (context, key))
280 (context, key))
281 return
281 return
282 self.tagscache[key] = bin_n
282 self.tagscache[key] = bin_n
283
283
284 # read the tags file from each head, ending with the tip,
284 # read the tags file from each head, ending with the tip,
285 # and add each tag found to the map, with "newer" ones
285 # and add each tag found to the map, with "newer" ones
286 # taking precedence
286 # taking precedence
287 f = None
287 f = None
288 for rev, node, fnode in self._hgtagsnodes():
288 for rev, node, fnode in self._hgtagsnodes():
289 f = (f and f.filectx(fnode) or
289 f = (f and f.filectx(fnode) or
290 self.filectx('.hgtags', fileid=fnode))
290 self.filectx('.hgtags', fileid=fnode))
291 count = 0
291 count = 0
292 for l in f.data().splitlines():
292 for l in f.data().splitlines():
293 count += 1
293 count += 1
294 parsetag(l, _("%s, line %d") % (str(f), count))
294 parsetag(l, _("%s, line %d") % (str(f), count))
295
295
296 try:
296 try:
297 f = self.opener("localtags")
297 f = self.opener("localtags")
298 count = 0
298 count = 0
299 for l in f:
299 for l in f:
300 # localtags are stored in the local character set
300 # localtags are stored in the local character set
301 # while the internal tag table is stored in UTF-8
301 # while the internal tag table is stored in UTF-8
302 l = util.fromlocal(l)
302 l = util.fromlocal(l)
303 count += 1
303 count += 1
304 parsetag(l, _("localtags, line %d") % count)
304 parsetag(l, _("localtags, line %d") % count)
305 except IOError:
305 except IOError:
306 pass
306 pass
307
307
308 self.tagscache['tip'] = self.changelog.tip()
308 self.tagscache['tip'] = self.changelog.tip()
309
309
310 return self.tagscache
310 return self.tagscache
311
311
312 def _hgtagsnodes(self):
312 def _hgtagsnodes(self):
313 heads = self.heads()
313 heads = self.heads()
314 heads.reverse()
314 heads.reverse()
315 last = {}
315 last = {}
316 ret = []
316 ret = []
317 for node in heads:
317 for node in heads:
318 c = self.changectx(node)
318 c = self.changectx(node)
319 rev = c.rev()
319 rev = c.rev()
320 try:
320 try:
321 fnode = c.filenode('.hgtags')
321 fnode = c.filenode('.hgtags')
322 except repo.LookupError:
322 except repo.LookupError:
323 continue
323 continue
324 ret.append((rev, node, fnode))
324 ret.append((rev, node, fnode))
325 if fnode in last:
325 if fnode in last:
326 ret[last[fnode]] = None
326 ret[last[fnode]] = None
327 last[fnode] = len(ret) - 1
327 last[fnode] = len(ret) - 1
328 return [item for item in ret if item]
328 return [item for item in ret if item]
329
329
330 def tagslist(self):
330 def tagslist(self):
331 '''return a list of tags ordered by revision'''
331 '''return a list of tags ordered by revision'''
332 l = []
332 l = []
333 for t, n in self.tags().items():
333 for t, n in self.tags().items():
334 try:
334 try:
335 r = self.changelog.rev(n)
335 r = self.changelog.rev(n)
336 except:
336 except:
337 r = -2 # sort to the beginning of the list if unknown
337 r = -2 # sort to the beginning of the list if unknown
338 l.append((r, t, n))
338 l.append((r, t, n))
339 l.sort()
339 l.sort()
340 return [(t, n) for r, t, n in l]
340 return [(t, n) for r, t, n in l]
341
341
342 def nodetags(self, node):
342 def nodetags(self, node):
343 '''return the tags associated with a node'''
343 '''return the tags associated with a node'''
344 if not self.nodetagscache:
344 if not self.nodetagscache:
345 self.nodetagscache = {}
345 self.nodetagscache = {}
346 for t, n in self.tags().items():
346 for t, n in self.tags().items():
347 self.nodetagscache.setdefault(n, []).append(t)
347 self.nodetagscache.setdefault(n, []).append(t)
348 return self.nodetagscache.get(node, [])
348 return self.nodetagscache.get(node, [])
349
349
350 def _branchtags(self):
350 def _branchtags(self):
351 partial, last, lrev = self._readbranchcache()
351 partial, last, lrev = self._readbranchcache()
352
352
353 tiprev = self.changelog.count() - 1
353 tiprev = self.changelog.count() - 1
354 if lrev != tiprev:
354 if lrev != tiprev:
355 self._updatebranchcache(partial, lrev+1, tiprev+1)
355 self._updatebranchcache(partial, lrev+1, tiprev+1)
356 self._writebranchcache(partial, self.changelog.tip(), tiprev)
356 self._writebranchcache(partial, self.changelog.tip(), tiprev)
357
357
358 return partial
358 return partial
359
359
360 def branchtags(self):
360 def branchtags(self):
361 if self.branchcache is not None:
361 if self.branchcache is not None:
362 return self.branchcache
362 return self.branchcache
363
363
364 self.branchcache = {} # avoid recursion in changectx
364 self.branchcache = {} # avoid recursion in changectx
365 partial = self._branchtags()
365 partial = self._branchtags()
366
366
367 # the branch cache is stored on disk as UTF-8, but in the local
367 # the branch cache is stored on disk as UTF-8, but in the local
368 # charset internally
368 # charset internally
369 for k, v in partial.items():
369 for k, v in partial.items():
370 self.branchcache[util.tolocal(k)] = v
370 self.branchcache[util.tolocal(k)] = v
371 return self.branchcache
371 return self.branchcache
372
372
373 def _readbranchcache(self):
373 def _readbranchcache(self):
374 partial = {}
374 partial = {}
375 try:
375 try:
376 f = self.opener("branches.cache")
376 f = self.opener("branches.cache")
377 lines = f.read().split('\n')
377 lines = f.read().split('\n')
378 f.close()
378 f.close()
379 last, lrev = lines.pop(0).rstrip().split(" ", 1)
379 last, lrev = lines.pop(0).rstrip().split(" ", 1)
380 last, lrev = bin(last), int(lrev)
380 last, lrev = bin(last), int(lrev)
381 if not (lrev < self.changelog.count() and
381 if not (lrev < self.changelog.count() and
382 self.changelog.node(lrev) == last): # sanity check
382 self.changelog.node(lrev) == last): # sanity check
383 # invalidate the cache
383 # invalidate the cache
384 raise ValueError('Invalid branch cache: unknown tip')
384 raise ValueError('Invalid branch cache: unknown tip')
385 for l in lines:
385 for l in lines:
386 if not l: continue
386 if not l: continue
387 node, label = l.rstrip().split(" ", 1)
387 node, label = l.rstrip().split(" ", 1)
388 partial[label] = bin(node)
388 partial[label] = bin(node)
389 except (KeyboardInterrupt, util.SignalInterrupt):
389 except (KeyboardInterrupt, util.SignalInterrupt):
390 raise
390 raise
391 except Exception, inst:
391 except Exception, inst:
392 if self.ui.debugflag:
392 if self.ui.debugflag:
393 self.ui.warn(str(inst), '\n')
393 self.ui.warn(str(inst), '\n')
394 partial, last, lrev = {}, nullid, nullrev
394 partial, last, lrev = {}, nullid, nullrev
395 return partial, last, lrev
395 return partial, last, lrev
396
396
397 def _writebranchcache(self, branches, tip, tiprev):
397 def _writebranchcache(self, branches, tip, tiprev):
398 try:
398 try:
399 f = self.opener("branches.cache", "w")
399 f = self.opener("branches.cache", "w")
400 f.write("%s %s\n" % (hex(tip), tiprev))
400 f.write("%s %s\n" % (hex(tip), tiprev))
401 for label, node in branches.iteritems():
401 for label, node in branches.iteritems():
402 f.write("%s %s\n" % (hex(node), label))
402 f.write("%s %s\n" % (hex(node), label))
403 except IOError:
403 except IOError:
404 pass
404 pass
405
405
406 def _updatebranchcache(self, partial, start, end):
406 def _updatebranchcache(self, partial, start, end):
407 for r in xrange(start, end):
407 for r in xrange(start, end):
408 c = self.changectx(r)
408 c = self.changectx(r)
409 b = c.branch()
409 b = c.branch()
410 if b:
410 if b:
411 partial[b] = c.node()
411 partial[b] = c.node()
412
412
413 def lookup(self, key):
413 def lookup(self, key):
414 if key == '.':
414 if key == '.':
415 key = self.dirstate.parents()[0]
415 key = self.dirstate.parents()[0]
416 if key == nullid:
416 if key == nullid:
417 raise repo.RepoError(_("no revision checked out"))
417 raise repo.RepoError(_("no revision checked out"))
418 elif key == 'null':
418 elif key == 'null':
419 return nullid
419 return nullid
420 n = self.changelog._match(key)
420 n = self.changelog._match(key)
421 if n:
421 if n:
422 return n
422 return n
423 if key in self.tags():
423 if key in self.tags():
424 return self.tags()[key]
424 return self.tags()[key]
425 if key in self.branchtags():
425 if key in self.branchtags():
426 return self.branchtags()[key]
426 return self.branchtags()[key]
427 n = self.changelog._partialmatch(key)
427 n = self.changelog._partialmatch(key)
428 if n:
428 if n:
429 return n
429 return n
430 raise repo.RepoError(_("unknown revision '%s'") % key)
430 raise repo.RepoError(_("unknown revision '%s'") % key)
431
431
432 def dev(self):
432 def dev(self):
433 return os.lstat(self.path).st_dev
433 return os.lstat(self.path).st_dev
434
434
435 def local(self):
435 def local(self):
436 return True
436 return True
437
437
438 def join(self, f):
438 def join(self, f):
439 return os.path.join(self.path, f)
439 return os.path.join(self.path, f)
440
440
441 def sjoin(self, f):
441 def sjoin(self, f):
442 f = self.encodefn(f)
442 f = self.encodefn(f)
443 return os.path.join(self.spath, f)
443 return os.path.join(self.spath, f)
444
444
445 def wjoin(self, f):
445 def wjoin(self, f):
446 return os.path.join(self.root, f)
446 return os.path.join(self.root, f)
447
447
448 def file(self, f):
448 def file(self, f):
449 if f[0] == '/':
449 if f[0] == '/':
450 f = f[1:]
450 f = f[1:]
451 return filelog.filelog(self.sopener, f, self.revlogversion)
451 return filelog.filelog(self.sopener, f, self.revlogversion)
452
452
453 def changectx(self, changeid=None):
453 def changectx(self, changeid=None):
454 return context.changectx(self, changeid)
454 return context.changectx(self, changeid)
455
455
456 def workingctx(self):
456 def workingctx(self):
457 return context.workingctx(self)
457 return context.workingctx(self)
458
458
459 def parents(self, changeid=None):
459 def parents(self, changeid=None):
460 '''
460 '''
461 get list of changectxs for parents of changeid or working directory
461 get list of changectxs for parents of changeid or working directory
462 '''
462 '''
463 if changeid is None:
463 if changeid is None:
464 pl = self.dirstate.parents()
464 pl = self.dirstate.parents()
465 else:
465 else:
466 n = self.changelog.lookup(changeid)
466 n = self.changelog.lookup(changeid)
467 pl = self.changelog.parents(n)
467 pl = self.changelog.parents(n)
468 if pl[1] == nullid:
468 if pl[1] == nullid:
469 return [self.changectx(pl[0])]
469 return [self.changectx(pl[0])]
470 return [self.changectx(pl[0]), self.changectx(pl[1])]
470 return [self.changectx(pl[0]), self.changectx(pl[1])]
471
471
472 def filectx(self, path, changeid=None, fileid=None):
472 def filectx(self, path, changeid=None, fileid=None):
473 """changeid can be a changeset revision, node, or tag.
473 """changeid can be a changeset revision, node, or tag.
474 fileid can be a file revision or node."""
474 fileid can be a file revision or node."""
475 return context.filectx(self, path, changeid, fileid)
475 return context.filectx(self, path, changeid, fileid)
476
476
477 def getcwd(self):
477 def getcwd(self):
478 return self.dirstate.getcwd()
478 return self.dirstate.getcwd()
479
479
480 def wfile(self, f, mode='r'):
480 def wfile(self, f, mode='r'):
481 return self.wopener(f, mode)
481 return self.wopener(f, mode)
482
482
483 def wread(self, filename):
483 def wread(self, filename):
484 if self.encodepats == None:
484 if self.encodepats == None:
485 l = []
485 l = []
486 for pat, cmd in self.ui.configitems("encode"):
486 for pat, cmd in self.ui.configitems("encode"):
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
487 mf = util.matcher(self.root, "", [pat], [], [])[1]
488 l.append((mf, cmd))
488 l.append((mf, cmd))
489 self.encodepats = l
489 self.encodepats = l
490
490
491 data = self.wopener(filename, 'r').read()
491 data = self.wopener(filename, 'r').read()
492
492
493 for mf, cmd in self.encodepats:
493 for mf, cmd in self.encodepats:
494 if mf(filename):
494 if mf(filename):
495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
496 data = util.filter(data, cmd)
496 data = util.filter(data, cmd)
497 break
497 break
498
498
499 return data
499 return data
500
500
501 def wwrite(self, filename, data, fd=None):
501 def wwrite(self, filename, data, fd=None):
502 if self.decodepats == None:
502 if self.decodepats == None:
503 l = []
503 l = []
504 for pat, cmd in self.ui.configitems("decode"):
504 for pat, cmd in self.ui.configitems("decode"):
505 mf = util.matcher(self.root, "", [pat], [], [])[1]
505 mf = util.matcher(self.root, "", [pat], [], [])[1]
506 l.append((mf, cmd))
506 l.append((mf, cmd))
507 self.decodepats = l
507 self.decodepats = l
508
508
509 for mf, cmd in self.decodepats:
509 for mf, cmd in self.decodepats:
510 if mf(filename):
510 if mf(filename):
511 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
511 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
512 data = util.filter(data, cmd)
512 data = util.filter(data, cmd)
513 break
513 break
514
514
515 if fd:
515 if fd:
516 return fd.write(data)
516 return fd.write(data)
517 return self.wopener(filename, 'w').write(data)
517 return self.wopener(filename, 'w').write(data)
518
518
519 def transaction(self):
519 def transaction(self):
520 tr = self.transhandle
520 tr = self.transhandle
521 if tr != None and tr.running():
521 if tr != None and tr.running():
522 return tr.nest()
522 return tr.nest()
523
523
524 # save dirstate for rollback
524 # save dirstate for rollback
525 try:
525 try:
526 ds = self.opener("dirstate").read()
526 ds = self.opener("dirstate").read()
527 except IOError:
527 except IOError:
528 ds = ""
528 ds = ""
529 self.opener("journal.dirstate", "w").write(ds)
529 self.opener("journal.dirstate", "w").write(ds)
530
530
531 renames = [(self.sjoin("journal"), self.sjoin("undo")),
531 renames = [(self.sjoin("journal"), self.sjoin("undo")),
532 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
532 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
533 tr = transaction.transaction(self.ui.warn, self.sopener,
533 tr = transaction.transaction(self.ui.warn, self.sopener,
534 self.sjoin("journal"),
534 self.sjoin("journal"),
535 aftertrans(renames))
535 aftertrans(renames))
536 self.transhandle = tr
536 self.transhandle = tr
537 return tr
537 return tr
538
538
539 def recover(self):
539 def recover(self):
540 l = self.lock()
540 l = self.lock()
541 if os.path.exists(self.sjoin("journal")):
541 if os.path.exists(self.sjoin("journal")):
542 self.ui.status(_("rolling back interrupted transaction\n"))
542 self.ui.status(_("rolling back interrupted transaction\n"))
543 transaction.rollback(self.sopener, self.sjoin("journal"))
543 transaction.rollback(self.sopener, self.sjoin("journal"))
544 self.reload()
544 self.reload()
545 return True
545 return True
546 else:
546 else:
547 self.ui.warn(_("no interrupted transaction available\n"))
547 self.ui.warn(_("no interrupted transaction available\n"))
548 return False
548 return False
549
549
550 def rollback(self, wlock=None):
550 def rollback(self, wlock=None):
551 if not wlock:
551 if not wlock:
552 wlock = self.wlock()
552 wlock = self.wlock()
553 l = self.lock()
553 l = self.lock()
554 if os.path.exists(self.sjoin("undo")):
554 if os.path.exists(self.sjoin("undo")):
555 self.ui.status(_("rolling back last transaction\n"))
555 self.ui.status(_("rolling back last transaction\n"))
556 transaction.rollback(self.sopener, self.sjoin("undo"))
556 transaction.rollback(self.sopener, self.sjoin("undo"))
557 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
557 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
558 self.reload()
558 self.reload()
559 self.wreload()
559 self.wreload()
560 else:
560 else:
561 self.ui.warn(_("no rollback information available\n"))
561 self.ui.warn(_("no rollback information available\n"))
562
562
563 def wreload(self):
563 def wreload(self):
564 self.dirstate.read()
564 self.dirstate.read()
565
565
566 def reload(self):
566 def reload(self):
567 self.changelog.load()
567 self.changelog.load()
568 self.manifest.load()
568 self.manifest.load()
569 self.tagscache = None
569 self.tagscache = None
570 self.nodetagscache = None
570 self.nodetagscache = None
571
571
572 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
572 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
573 desc=None):
573 desc=None):
574 try:
574 try:
575 l = lock.lock(lockname, 0, releasefn, desc=desc)
575 l = lock.lock(lockname, 0, releasefn, desc=desc)
576 except lock.LockHeld, inst:
576 except lock.LockHeld, inst:
577 if not wait:
577 if not wait:
578 raise
578 raise
579 self.ui.warn(_("waiting for lock on %s held by %r\n") %
579 self.ui.warn(_("waiting for lock on %s held by %r\n") %
580 (desc, inst.locker))
580 (desc, inst.locker))
581 # default to 600 seconds timeout
581 # default to 600 seconds timeout
582 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
582 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
583 releasefn, desc=desc)
583 releasefn, desc=desc)
584 if acquirefn:
584 if acquirefn:
585 acquirefn()
585 acquirefn()
586 return l
586 return l
587
587
588 def lock(self, wait=1):
588 def lock(self, wait=1):
589 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
589 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
590 desc=_('repository %s') % self.origroot)
590 desc=_('repository %s') % self.origroot)
591
591
592 def wlock(self, wait=1):
592 def wlock(self, wait=1):
593 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
593 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
594 self.wreload,
594 self.wreload,
595 desc=_('working directory of %s') % self.origroot)
595 desc=_('working directory of %s') % self.origroot)
596
596
597 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
597 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
598 """
598 """
599 commit an individual file as part of a larger transaction
599 commit an individual file as part of a larger transaction
600 """
600 """
601
601
602 t = self.wread(fn)
602 t = self.wread(fn)
603 fl = self.file(fn)
603 fl = self.file(fn)
604 fp1 = manifest1.get(fn, nullid)
604 fp1 = manifest1.get(fn, nullid)
605 fp2 = manifest2.get(fn, nullid)
605 fp2 = manifest2.get(fn, nullid)
606
606
607 meta = {}
607 meta = {}
608 cp = self.dirstate.copied(fn)
608 cp = self.dirstate.copied(fn)
609 if cp:
609 if cp:
610 meta["copy"] = cp
610 meta["copy"] = cp
611 if not manifest2: # not a branch merge
611 if not manifest2: # not a branch merge
612 meta["copyrev"] = hex(manifest1.get(cp, nullid))
612 meta["copyrev"] = hex(manifest1.get(cp, nullid))
613 fp2 = nullid
613 fp2 = nullid
614 elif fp2 != nullid: # copied on remote side
614 elif fp2 != nullid: # copied on remote side
615 meta["copyrev"] = hex(manifest1.get(cp, nullid))
615 meta["copyrev"] = hex(manifest1.get(cp, nullid))
616 elif fp1 != nullid: # copied on local side, reversed
616 elif fp1 != nullid: # copied on local side, reversed
617 meta["copyrev"] = hex(manifest2.get(cp))
617 meta["copyrev"] = hex(manifest2.get(cp))
618 fp2 = nullid
618 fp2 = nullid
619 else: # directory rename
619 else: # directory rename
620 meta["copyrev"] = hex(manifest1.get(cp, nullid))
620 meta["copyrev"] = hex(manifest1.get(cp, nullid))
621 self.ui.debug(_(" %s: copy %s:%s\n") %
621 self.ui.debug(_(" %s: copy %s:%s\n") %
622 (fn, cp, meta["copyrev"]))
622 (fn, cp, meta["copyrev"]))
623 fp1 = nullid
623 fp1 = nullid
624 elif fp2 != nullid:
624 elif fp2 != nullid:
625 # is one parent an ancestor of the other?
625 # is one parent an ancestor of the other?
626 fpa = fl.ancestor(fp1, fp2)
626 fpa = fl.ancestor(fp1, fp2)
627 if fpa == fp1:
627 if fpa == fp1:
628 fp1, fp2 = fp2, nullid
628 fp1, fp2 = fp2, nullid
629 elif fpa == fp2:
629 elif fpa == fp2:
630 fp2 = nullid
630 fp2 = nullid
631
631
632 # is the file unmodified from the parent? report existing entry
632 # is the file unmodified from the parent? report existing entry
633 if fp2 == nullid and not fl.cmp(fp1, t):
633 if fp2 == nullid and not fl.cmp(fp1, t):
634 return fp1
634 return fp1
635
635
636 changelist.append(fn)
636 changelist.append(fn)
637 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
637 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
638
638
639 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
639 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
640 if p1 is None:
640 if p1 is None:
641 p1, p2 = self.dirstate.parents()
641 p1, p2 = self.dirstate.parents()
642 return self.commit(files=files, text=text, user=user, date=date,
642 return self.commit(files=files, text=text, user=user, date=date,
643 p1=p1, p2=p2, wlock=wlock)
643 p1=p1, p2=p2, wlock=wlock)
644
644
645 def commit(self, files=None, text="", user=None, date=None,
645 def commit(self, files=None, text="", user=None, date=None,
646 match=util.always, force=False, lock=None, wlock=None,
646 match=util.always, force=False, lock=None, wlock=None,
647 force_editor=False, p1=None, p2=None, extra={}):
647 force_editor=False, p1=None, p2=None, extra={}):
648
648
649 commit = []
649 commit = []
650 remove = []
650 remove = []
651 changed = []
651 changed = []
652 use_dirstate = (p1 is None) # not rawcommit
652 use_dirstate = (p1 is None) # not rawcommit
653 extra = extra.copy()
653 extra = extra.copy()
654
654
655 if use_dirstate:
655 if use_dirstate:
656 if files:
656 if files:
657 for f in files:
657 for f in files:
658 s = self.dirstate.state(f)
658 s = self.dirstate.state(f)
659 if s in 'nmai':
659 if s in 'nmai':
660 commit.append(f)
660 commit.append(f)
661 elif s == 'r':
661 elif s == 'r':
662 remove.append(f)
662 remove.append(f)
663 else:
663 else:
664 self.ui.warn(_("%s not tracked!\n") % f)
664 self.ui.warn(_("%s not tracked!\n") % f)
665 else:
665 else:
666 changes = self.status(match=match)[:5]
666 changes = self.status(match=match)[:5]
667 modified, added, removed, deleted, unknown = changes
667 modified, added, removed, deleted, unknown = changes
668 commit = modified + added
668 commit = modified + added
669 remove = removed
669 remove = removed
670 else:
670 else:
671 commit = files
671 commit = files
672
672
673 if use_dirstate:
673 if use_dirstate:
674 p1, p2 = self.dirstate.parents()
674 p1, p2 = self.dirstate.parents()
675 update_dirstate = True
675 update_dirstate = True
676 else:
676 else:
677 p1, p2 = p1, p2 or nullid
677 p1, p2 = p1, p2 or nullid
678 update_dirstate = (self.dirstate.parents()[0] == p1)
678 update_dirstate = (self.dirstate.parents()[0] == p1)
679
679
680 c1 = self.changelog.read(p1)
680 c1 = self.changelog.read(p1)
681 c2 = self.changelog.read(p2)
681 c2 = self.changelog.read(p2)
682 m1 = self.manifest.read(c1[0]).copy()
682 m1 = self.manifest.read(c1[0]).copy()
683 m2 = self.manifest.read(c2[0])
683 m2 = self.manifest.read(c2[0])
684
684
685 if use_dirstate:
685 if use_dirstate:
686 branchname = util.fromlocal(self.workingctx().branch())
686 branchname = self.workingctx().branch()
687 try:
688 branchname = branchname.decode('UTF-8').encode('UTF-8')
689 except UnicodeDecodeError:
690 raise util.Abort(_('branch name not in UTF-8!'))
687 else:
691 else:
688 branchname = ""
692 branchname = ""
689
693
690 if use_dirstate:
694 if use_dirstate:
691 oldname = c1[5].get("branch", "") # stored in UTF-8
695 oldname = c1[5].get("branch", "") # stored in UTF-8
692 if not commit and not remove and not force and p2 == nullid and \
696 if not commit and not remove and not force and p2 == nullid and \
693 branchname == oldname:
697 branchname == oldname:
694 self.ui.status(_("nothing changed\n"))
698 self.ui.status(_("nothing changed\n"))
695 return None
699 return None
696
700
697 xp1 = hex(p1)
701 xp1 = hex(p1)
698 if p2 == nullid: xp2 = ''
702 if p2 == nullid: xp2 = ''
699 else: xp2 = hex(p2)
703 else: xp2 = hex(p2)
700
704
701 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
705 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
702
706
703 if not wlock:
707 if not wlock:
704 wlock = self.wlock()
708 wlock = self.wlock()
705 if not lock:
709 if not lock:
706 lock = self.lock()
710 lock = self.lock()
707 tr = self.transaction()
711 tr = self.transaction()
708
712
709 # check in files
713 # check in files
710 new = {}
714 new = {}
711 linkrev = self.changelog.count()
715 linkrev = self.changelog.count()
712 commit.sort()
716 commit.sort()
713 for f in commit:
717 for f in commit:
714 self.ui.note(f + "\n")
718 self.ui.note(f + "\n")
715 try:
719 try:
716 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
720 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
717 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
721 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
718 except IOError:
722 except IOError:
719 if use_dirstate:
723 if use_dirstate:
720 self.ui.warn(_("trouble committing %s!\n") % f)
724 self.ui.warn(_("trouble committing %s!\n") % f)
721 raise
725 raise
722 else:
726 else:
723 remove.append(f)
727 remove.append(f)
724
728
725 # update manifest
729 # update manifest
726 m1.update(new)
730 m1.update(new)
727 remove.sort()
731 remove.sort()
728
732
729 for f in remove:
733 for f in remove:
730 if f in m1:
734 if f in m1:
731 del m1[f]
735 del m1[f]
732 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
736 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
733
737
734 # add changeset
738 # add changeset
735 new = new.keys()
739 new = new.keys()
736 new.sort()
740 new.sort()
737
741
738 user = user or self.ui.username()
742 user = user or self.ui.username()
739 if not text or force_editor:
743 if not text or force_editor:
740 edittext = []
744 edittext = []
741 if text:
745 if text:
742 edittext.append(text)
746 edittext.append(text)
743 edittext.append("")
747 edittext.append("")
744 edittext.append("HG: user: %s" % user)
748 edittext.append("HG: user: %s" % user)
745 if p2 != nullid:
749 if p2 != nullid:
746 edittext.append("HG: branch merge")
750 edittext.append("HG: branch merge")
747 edittext.extend(["HG: changed %s" % f for f in changed])
751 edittext.extend(["HG: changed %s" % f for f in changed])
748 edittext.extend(["HG: removed %s" % f for f in remove])
752 edittext.extend(["HG: removed %s" % f for f in remove])
749 if not changed and not remove:
753 if not changed and not remove:
750 edittext.append("HG: no files changed")
754 edittext.append("HG: no files changed")
751 edittext.append("")
755 edittext.append("")
752 # run editor in the repository root
756 # run editor in the repository root
753 olddir = os.getcwd()
757 olddir = os.getcwd()
754 os.chdir(self.root)
758 os.chdir(self.root)
755 text = self.ui.edit("\n".join(edittext), user)
759 text = self.ui.edit("\n".join(edittext), user)
756 os.chdir(olddir)
760 os.chdir(olddir)
757
761
758 lines = [line.rstrip() for line in text.rstrip().splitlines()]
762 lines = [line.rstrip() for line in text.rstrip().splitlines()]
759 while lines and not lines[0]:
763 while lines and not lines[0]:
760 del lines[0]
764 del lines[0]
761 if not lines:
765 if not lines:
762 return None
766 return None
763 text = '\n'.join(lines)
767 text = '\n'.join(lines)
764 if branchname:
768 if branchname:
765 extra["branch"] = branchname
769 extra["branch"] = branchname
766 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
770 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
767 user, date, extra)
771 user, date, extra)
768 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
772 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
769 parent2=xp2)
773 parent2=xp2)
770 tr.close()
774 tr.close()
771
775
772 if use_dirstate or update_dirstate:
776 if use_dirstate or update_dirstate:
773 self.dirstate.setparents(n)
777 self.dirstate.setparents(n)
774 if use_dirstate:
778 if use_dirstate:
775 self.dirstate.update(new, "n")
779 self.dirstate.update(new, "n")
776 self.dirstate.forget(remove)
780 self.dirstate.forget(remove)
777
781
778 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
782 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
779 return n
783 return n
780
784
781 def walk(self, node=None, files=[], match=util.always, badmatch=None):
785 def walk(self, node=None, files=[], match=util.always, badmatch=None):
782 '''
786 '''
783 walk recursively through the directory tree or a given
787 walk recursively through the directory tree or a given
784 changeset, finding all files matched by the match
788 changeset, finding all files matched by the match
785 function
789 function
786
790
787 results are yielded in a tuple (src, filename), where src
791 results are yielded in a tuple (src, filename), where src
788 is one of:
792 is one of:
789 'f' the file was found in the directory tree
793 'f' the file was found in the directory tree
790 'm' the file was only in the dirstate and not in the tree
794 'm' the file was only in the dirstate and not in the tree
791 'b' file was not found and matched badmatch
795 'b' file was not found and matched badmatch
792 '''
796 '''
793
797
794 if node:
798 if node:
795 fdict = dict.fromkeys(files)
799 fdict = dict.fromkeys(files)
796 for fn in self.manifest.read(self.changelog.read(node)[0]):
800 for fn in self.manifest.read(self.changelog.read(node)[0]):
797 for ffn in fdict:
801 for ffn in fdict:
798 # match if the file is the exact name or a directory
802 # match if the file is the exact name or a directory
799 if ffn == fn or fn.startswith("%s/" % ffn):
803 if ffn == fn or fn.startswith("%s/" % ffn):
800 del fdict[ffn]
804 del fdict[ffn]
801 break
805 break
802 if match(fn):
806 if match(fn):
803 yield 'm', fn
807 yield 'm', fn
804 for fn in fdict:
808 for fn in fdict:
805 if badmatch and badmatch(fn):
809 if badmatch and badmatch(fn):
806 if match(fn):
810 if match(fn):
807 yield 'b', fn
811 yield 'b', fn
808 else:
812 else:
809 self.ui.warn(_('%s: No such file in rev %s\n') % (
813 self.ui.warn(_('%s: No such file in rev %s\n') % (
810 util.pathto(self.getcwd(), fn), short(node)))
814 util.pathto(self.getcwd(), fn), short(node)))
811 else:
815 else:
812 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
816 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
813 yield src, fn
817 yield src, fn
814
818
815 def status(self, node1=None, node2=None, files=[], match=util.always,
819 def status(self, node1=None, node2=None, files=[], match=util.always,
816 wlock=None, list_ignored=False, list_clean=False):
820 wlock=None, list_ignored=False, list_clean=False):
817 """return status of files between two nodes or node and working directory
821 """return status of files between two nodes or node and working directory
818
822
819 If node1 is None, use the first dirstate parent instead.
823 If node1 is None, use the first dirstate parent instead.
820 If node2 is None, compare node1 with working directory.
824 If node2 is None, compare node1 with working directory.
821 """
825 """
822
826
823 def fcmp(fn, mf):
827 def fcmp(fn, mf):
824 t1 = self.wread(fn)
828 t1 = self.wread(fn)
825 return self.file(fn).cmp(mf.get(fn, nullid), t1)
829 return self.file(fn).cmp(mf.get(fn, nullid), t1)
826
830
827 def mfmatches(node):
831 def mfmatches(node):
828 change = self.changelog.read(node)
832 change = self.changelog.read(node)
829 mf = self.manifest.read(change[0]).copy()
833 mf = self.manifest.read(change[0]).copy()
830 for fn in mf.keys():
834 for fn in mf.keys():
831 if not match(fn):
835 if not match(fn):
832 del mf[fn]
836 del mf[fn]
833 return mf
837 return mf
834
838
835 modified, added, removed, deleted, unknown = [], [], [], [], []
839 modified, added, removed, deleted, unknown = [], [], [], [], []
836 ignored, clean = [], []
840 ignored, clean = [], []
837
841
838 compareworking = False
842 compareworking = False
839 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
843 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
840 compareworking = True
844 compareworking = True
841
845
842 if not compareworking:
846 if not compareworking:
843 # read the manifest from node1 before the manifest from node2,
847 # read the manifest from node1 before the manifest from node2,
844 # so that we'll hit the manifest cache if we're going through
848 # so that we'll hit the manifest cache if we're going through
845 # all the revisions in parent->child order.
849 # all the revisions in parent->child order.
846 mf1 = mfmatches(node1)
850 mf1 = mfmatches(node1)
847
851
848 # are we comparing the working directory?
852 # are we comparing the working directory?
849 if not node2:
853 if not node2:
850 if not wlock:
854 if not wlock:
851 try:
855 try:
852 wlock = self.wlock(wait=0)
856 wlock = self.wlock(wait=0)
853 except lock.LockException:
857 except lock.LockException:
854 wlock = None
858 wlock = None
855 (lookup, modified, added, removed, deleted, unknown,
859 (lookup, modified, added, removed, deleted, unknown,
856 ignored, clean) = self.dirstate.status(files, match,
860 ignored, clean) = self.dirstate.status(files, match,
857 list_ignored, list_clean)
861 list_ignored, list_clean)
858
862
859 # are we comparing working dir against its parent?
863 # are we comparing working dir against its parent?
860 if compareworking:
864 if compareworking:
861 if lookup:
865 if lookup:
862 # do a full compare of any files that might have changed
866 # do a full compare of any files that might have changed
863 mf2 = mfmatches(self.dirstate.parents()[0])
867 mf2 = mfmatches(self.dirstate.parents()[0])
864 for f in lookup:
868 for f in lookup:
865 if fcmp(f, mf2):
869 if fcmp(f, mf2):
866 modified.append(f)
870 modified.append(f)
867 else:
871 else:
868 clean.append(f)
872 clean.append(f)
869 if wlock is not None:
873 if wlock is not None:
870 self.dirstate.update([f], "n")
874 self.dirstate.update([f], "n")
871 else:
875 else:
872 # we are comparing working dir against non-parent
876 # we are comparing working dir against non-parent
873 # generate a pseudo-manifest for the working dir
877 # generate a pseudo-manifest for the working dir
874 # XXX: create it in dirstate.py ?
878 # XXX: create it in dirstate.py ?
875 mf2 = mfmatches(self.dirstate.parents()[0])
879 mf2 = mfmatches(self.dirstate.parents()[0])
876 for f in lookup + modified + added:
880 for f in lookup + modified + added:
877 mf2[f] = ""
881 mf2[f] = ""
878 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
882 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
879 for f in removed:
883 for f in removed:
880 if f in mf2:
884 if f in mf2:
881 del mf2[f]
885 del mf2[f]
882 else:
886 else:
883 # we are comparing two revisions
887 # we are comparing two revisions
884 mf2 = mfmatches(node2)
888 mf2 = mfmatches(node2)
885
889
886 if not compareworking:
890 if not compareworking:
887 # flush lists from dirstate before comparing manifests
891 # flush lists from dirstate before comparing manifests
888 modified, added, clean = [], [], []
892 modified, added, clean = [], [], []
889
893
890 # make sure to sort the files so we talk to the disk in a
894 # make sure to sort the files so we talk to the disk in a
891 # reasonable order
895 # reasonable order
892 mf2keys = mf2.keys()
896 mf2keys = mf2.keys()
893 mf2keys.sort()
897 mf2keys.sort()
894 for fn in mf2keys:
898 for fn in mf2keys:
895 if mf1.has_key(fn):
899 if mf1.has_key(fn):
896 if mf1.flags(fn) != mf2.flags(fn) or \
900 if mf1.flags(fn) != mf2.flags(fn) or \
897 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
901 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
898 modified.append(fn)
902 modified.append(fn)
899 elif list_clean:
903 elif list_clean:
900 clean.append(fn)
904 clean.append(fn)
901 del mf1[fn]
905 del mf1[fn]
902 else:
906 else:
903 added.append(fn)
907 added.append(fn)
904
908
905 removed = mf1.keys()
909 removed = mf1.keys()
906
910
907 # sort and return results:
911 # sort and return results:
908 for l in modified, added, removed, deleted, unknown, ignored, clean:
912 for l in modified, added, removed, deleted, unknown, ignored, clean:
909 l.sort()
913 l.sort()
910 return (modified, added, removed, deleted, unknown, ignored, clean)
914 return (modified, added, removed, deleted, unknown, ignored, clean)
911
915
912 def add(self, list, wlock=None):
916 def add(self, list, wlock=None):
913 if not wlock:
917 if not wlock:
914 wlock = self.wlock()
918 wlock = self.wlock()
915 for f in list:
919 for f in list:
916 p = self.wjoin(f)
920 p = self.wjoin(f)
917 if not os.path.exists(p):
921 if not os.path.exists(p):
918 self.ui.warn(_("%s does not exist!\n") % f)
922 self.ui.warn(_("%s does not exist!\n") % f)
919 elif not os.path.isfile(p):
923 elif not os.path.isfile(p):
920 self.ui.warn(_("%s not added: only files supported currently\n")
924 self.ui.warn(_("%s not added: only files supported currently\n")
921 % f)
925 % f)
922 elif self.dirstate.state(f) in 'an':
926 elif self.dirstate.state(f) in 'an':
923 self.ui.warn(_("%s already tracked!\n") % f)
927 self.ui.warn(_("%s already tracked!\n") % f)
924 else:
928 else:
925 self.dirstate.update([f], "a")
929 self.dirstate.update([f], "a")
926
930
927 def forget(self, list, wlock=None):
931 def forget(self, list, wlock=None):
928 if not wlock:
932 if not wlock:
929 wlock = self.wlock()
933 wlock = self.wlock()
930 for f in list:
934 for f in list:
931 if self.dirstate.state(f) not in 'ai':
935 if self.dirstate.state(f) not in 'ai':
932 self.ui.warn(_("%s not added!\n") % f)
936 self.ui.warn(_("%s not added!\n") % f)
933 else:
937 else:
934 self.dirstate.forget([f])
938 self.dirstate.forget([f])
935
939
936 def remove(self, list, unlink=False, wlock=None):
940 def remove(self, list, unlink=False, wlock=None):
937 if unlink:
941 if unlink:
938 for f in list:
942 for f in list:
939 try:
943 try:
940 util.unlink(self.wjoin(f))
944 util.unlink(self.wjoin(f))
941 except OSError, inst:
945 except OSError, inst:
942 if inst.errno != errno.ENOENT:
946 if inst.errno != errno.ENOENT:
943 raise
947 raise
944 if not wlock:
948 if not wlock:
945 wlock = self.wlock()
949 wlock = self.wlock()
946 for f in list:
950 for f in list:
947 p = self.wjoin(f)
951 p = self.wjoin(f)
948 if os.path.exists(p):
952 if os.path.exists(p):
949 self.ui.warn(_("%s still exists!\n") % f)
953 self.ui.warn(_("%s still exists!\n") % f)
950 elif self.dirstate.state(f) == 'a':
954 elif self.dirstate.state(f) == 'a':
951 self.dirstate.forget([f])
955 self.dirstate.forget([f])
952 elif f not in self.dirstate:
956 elif f not in self.dirstate:
953 self.ui.warn(_("%s not tracked!\n") % f)
957 self.ui.warn(_("%s not tracked!\n") % f)
954 else:
958 else:
955 self.dirstate.update([f], "r")
959 self.dirstate.update([f], "r")
956
960
957 def undelete(self, list, wlock=None):
961 def undelete(self, list, wlock=None):
958 p = self.dirstate.parents()[0]
962 p = self.dirstate.parents()[0]
959 mn = self.changelog.read(p)[0]
963 mn = self.changelog.read(p)[0]
960 m = self.manifest.read(mn)
964 m = self.manifest.read(mn)
961 if not wlock:
965 if not wlock:
962 wlock = self.wlock()
966 wlock = self.wlock()
963 for f in list:
967 for f in list:
964 if self.dirstate.state(f) not in "r":
968 if self.dirstate.state(f) not in "r":
965 self.ui.warn("%s not removed!\n" % f)
969 self.ui.warn("%s not removed!\n" % f)
966 else:
970 else:
967 t = self.file(f).read(m[f])
971 t = self.file(f).read(m[f])
968 self.wwrite(f, t)
972 self.wwrite(f, t)
969 util.set_exec(self.wjoin(f), m.execf(f))
973 util.set_exec(self.wjoin(f), m.execf(f))
970 self.dirstate.update([f], "n")
974 self.dirstate.update([f], "n")
971
975
972 def copy(self, source, dest, wlock=None):
976 def copy(self, source, dest, wlock=None):
973 p = self.wjoin(dest)
977 p = self.wjoin(dest)
974 if not os.path.exists(p):
978 if not os.path.exists(p):
975 self.ui.warn(_("%s does not exist!\n") % dest)
979 self.ui.warn(_("%s does not exist!\n") % dest)
976 elif not os.path.isfile(p):
980 elif not os.path.isfile(p):
977 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
981 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
978 else:
982 else:
979 if not wlock:
983 if not wlock:
980 wlock = self.wlock()
984 wlock = self.wlock()
981 if self.dirstate.state(dest) == '?':
985 if self.dirstate.state(dest) == '?':
982 self.dirstate.update([dest], "a")
986 self.dirstate.update([dest], "a")
983 self.dirstate.copy(source, dest)
987 self.dirstate.copy(source, dest)
984
988
985 def heads(self, start=None):
989 def heads(self, start=None):
986 heads = self.changelog.heads(start)
990 heads = self.changelog.heads(start)
987 # sort the output in rev descending order
991 # sort the output in rev descending order
988 heads = [(-self.changelog.rev(h), h) for h in heads]
992 heads = [(-self.changelog.rev(h), h) for h in heads]
989 heads.sort()
993 heads.sort()
990 return [n for (r, n) in heads]
994 return [n for (r, n) in heads]
991
995
992 # branchlookup returns a dict giving a list of branches for
996 # branchlookup returns a dict giving a list of branches for
993 # each head. A branch is defined as the tag of a node or
997 # each head. A branch is defined as the tag of a node or
994 # the branch of the node's parents. If a node has multiple
998 # the branch of the node's parents. If a node has multiple
995 # branch tags, tags are eliminated if they are visible from other
999 # branch tags, tags are eliminated if they are visible from other
996 # branch tags.
1000 # branch tags.
997 #
1001 #
998 # So, for this graph: a->b->c->d->e
1002 # So, for this graph: a->b->c->d->e
999 # \ /
1003 # \ /
1000 # aa -----/
1004 # aa -----/
1001 # a has tag 2.6.12
1005 # a has tag 2.6.12
1002 # d has tag 2.6.13
1006 # d has tag 2.6.13
1003 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1007 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1004 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1008 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1005 # from the list.
1009 # from the list.
1006 #
1010 #
1007 # It is possible that more than one head will have the same branch tag.
1011 # It is possible that more than one head will have the same branch tag.
1008 # callers need to check the result for multiple heads under the same
1012 # callers need to check the result for multiple heads under the same
1009 # branch tag if that is a problem for them (ie checkout of a specific
1013 # branch tag if that is a problem for them (ie checkout of a specific
1010 # branch).
1014 # branch).
1011 #
1015 #
1012 # passing in a specific branch will limit the depth of the search
1016 # passing in a specific branch will limit the depth of the search
1013 # through the parents. It won't limit the branches returned in the
1017 # through the parents. It won't limit the branches returned in the
1014 # result though.
1018 # result though.
1015 def branchlookup(self, heads=None, branch=None):
1019 def branchlookup(self, heads=None, branch=None):
1016 if not heads:
1020 if not heads:
1017 heads = self.heads()
1021 heads = self.heads()
1018 headt = [ h for h in heads ]
1022 headt = [ h for h in heads ]
1019 chlog = self.changelog
1023 chlog = self.changelog
1020 branches = {}
1024 branches = {}
1021 merges = []
1025 merges = []
1022 seenmerge = {}
1026 seenmerge = {}
1023
1027
1024 # traverse the tree once for each head, recording in the branches
1028 # traverse the tree once for each head, recording in the branches
1025 # dict which tags are visible from this head. The branches
1029 # dict which tags are visible from this head. The branches
1026 # dict also records which tags are visible from each tag
1030 # dict also records which tags are visible from each tag
1027 # while we traverse.
1031 # while we traverse.
1028 while headt or merges:
1032 while headt or merges:
1029 if merges:
1033 if merges:
1030 n, found = merges.pop()
1034 n, found = merges.pop()
1031 visit = [n]
1035 visit = [n]
1032 else:
1036 else:
1033 h = headt.pop()
1037 h = headt.pop()
1034 visit = [h]
1038 visit = [h]
1035 found = [h]
1039 found = [h]
1036 seen = {}
1040 seen = {}
1037 while visit:
1041 while visit:
1038 n = visit.pop()
1042 n = visit.pop()
1039 if n in seen:
1043 if n in seen:
1040 continue
1044 continue
1041 pp = chlog.parents(n)
1045 pp = chlog.parents(n)
1042 tags = self.nodetags(n)
1046 tags = self.nodetags(n)
1043 if tags:
1047 if tags:
1044 for x in tags:
1048 for x in tags:
1045 if x == 'tip':
1049 if x == 'tip':
1046 continue
1050 continue
1047 for f in found:
1051 for f in found:
1048 branches.setdefault(f, {})[n] = 1
1052 branches.setdefault(f, {})[n] = 1
1049 branches.setdefault(n, {})[n] = 1
1053 branches.setdefault(n, {})[n] = 1
1050 break
1054 break
1051 if n not in found:
1055 if n not in found:
1052 found.append(n)
1056 found.append(n)
1053 if branch in tags:
1057 if branch in tags:
1054 continue
1058 continue
1055 seen[n] = 1
1059 seen[n] = 1
1056 if pp[1] != nullid and n not in seenmerge:
1060 if pp[1] != nullid and n not in seenmerge:
1057 merges.append((pp[1], [x for x in found]))
1061 merges.append((pp[1], [x for x in found]))
1058 seenmerge[n] = 1
1062 seenmerge[n] = 1
1059 if pp[0] != nullid:
1063 if pp[0] != nullid:
1060 visit.append(pp[0])
1064 visit.append(pp[0])
1061 # traverse the branches dict, eliminating branch tags from each
1065 # traverse the branches dict, eliminating branch tags from each
1062 # head that are visible from another branch tag for that head.
1066 # head that are visible from another branch tag for that head.
1063 out = {}
1067 out = {}
1064 viscache = {}
1068 viscache = {}
1065 for h in heads:
1069 for h in heads:
1066 def visible(node):
1070 def visible(node):
1067 if node in viscache:
1071 if node in viscache:
1068 return viscache[node]
1072 return viscache[node]
1069 ret = {}
1073 ret = {}
1070 visit = [node]
1074 visit = [node]
1071 while visit:
1075 while visit:
1072 x = visit.pop()
1076 x = visit.pop()
1073 if x in viscache:
1077 if x in viscache:
1074 ret.update(viscache[x])
1078 ret.update(viscache[x])
1075 elif x not in ret:
1079 elif x not in ret:
1076 ret[x] = 1
1080 ret[x] = 1
1077 if x in branches:
1081 if x in branches:
1078 visit[len(visit):] = branches[x].keys()
1082 visit[len(visit):] = branches[x].keys()
1079 viscache[node] = ret
1083 viscache[node] = ret
1080 return ret
1084 return ret
1081 if h not in branches:
1085 if h not in branches:
1082 continue
1086 continue
1083 # O(n^2), but somewhat limited. This only searches the
1087 # O(n^2), but somewhat limited. This only searches the
1084 # tags visible from a specific head, not all the tags in the
1088 # tags visible from a specific head, not all the tags in the
1085 # whole repo.
1089 # whole repo.
1086 for b in branches[h]:
1090 for b in branches[h]:
1087 vis = False
1091 vis = False
1088 for bb in branches[h].keys():
1092 for bb in branches[h].keys():
1089 if b != bb:
1093 if b != bb:
1090 if b in visible(bb):
1094 if b in visible(bb):
1091 vis = True
1095 vis = True
1092 break
1096 break
1093 if not vis:
1097 if not vis:
1094 l = out.setdefault(h, [])
1098 l = out.setdefault(h, [])
1095 l[len(l):] = self.nodetags(b)
1099 l[len(l):] = self.nodetags(b)
1096 return out
1100 return out
1097
1101
1098 def branches(self, nodes):
1102 def branches(self, nodes):
1099 if not nodes:
1103 if not nodes:
1100 nodes = [self.changelog.tip()]
1104 nodes = [self.changelog.tip()]
1101 b = []
1105 b = []
1102 for n in nodes:
1106 for n in nodes:
1103 t = n
1107 t = n
1104 while 1:
1108 while 1:
1105 p = self.changelog.parents(n)
1109 p = self.changelog.parents(n)
1106 if p[1] != nullid or p[0] == nullid:
1110 if p[1] != nullid or p[0] == nullid:
1107 b.append((t, n, p[0], p[1]))
1111 b.append((t, n, p[0], p[1]))
1108 break
1112 break
1109 n = p[0]
1113 n = p[0]
1110 return b
1114 return b
1111
1115
1112 def between(self, pairs):
1116 def between(self, pairs):
1113 r = []
1117 r = []
1114
1118
1115 for top, bottom in pairs:
1119 for top, bottom in pairs:
1116 n, l, i = top, [], 0
1120 n, l, i = top, [], 0
1117 f = 1
1121 f = 1
1118
1122
1119 while n != bottom:
1123 while n != bottom:
1120 p = self.changelog.parents(n)[0]
1124 p = self.changelog.parents(n)[0]
1121 if i == f:
1125 if i == f:
1122 l.append(n)
1126 l.append(n)
1123 f = f * 2
1127 f = f * 2
1124 n = p
1128 n = p
1125 i += 1
1129 i += 1
1126
1130
1127 r.append(l)
1131 r.append(l)
1128
1132
1129 return r
1133 return r
1130
1134
1131 def findincoming(self, remote, base=None, heads=None, force=False):
1135 def findincoming(self, remote, base=None, heads=None, force=False):
1132 """Return list of roots of the subsets of missing nodes from remote
1136 """Return list of roots of the subsets of missing nodes from remote
1133
1137
1134 If base dict is specified, assume that these nodes and their parents
1138 If base dict is specified, assume that these nodes and their parents
1135 exist on the remote side and that no child of a node of base exists
1139 exist on the remote side and that no child of a node of base exists
1136 in both remote and self.
1140 in both remote and self.
1137 Furthermore base will be updated to include the nodes that exists
1141 Furthermore base will be updated to include the nodes that exists
1138 in self and remote but no children exists in self and remote.
1142 in self and remote but no children exists in self and remote.
1139 If a list of heads is specified, return only nodes which are heads
1143 If a list of heads is specified, return only nodes which are heads
1140 or ancestors of these heads.
1144 or ancestors of these heads.
1141
1145
1142 All the ancestors of base are in self and in remote.
1146 All the ancestors of base are in self and in remote.
1143 All the descendants of the list returned are missing in self.
1147 All the descendants of the list returned are missing in self.
1144 (and so we know that the rest of the nodes are missing in remote, see
1148 (and so we know that the rest of the nodes are missing in remote, see
1145 outgoing)
1149 outgoing)
1146 """
1150 """
1147 m = self.changelog.nodemap
1151 m = self.changelog.nodemap
1148 search = []
1152 search = []
1149 fetch = {}
1153 fetch = {}
1150 seen = {}
1154 seen = {}
1151 seenbranch = {}
1155 seenbranch = {}
1152 if base == None:
1156 if base == None:
1153 base = {}
1157 base = {}
1154
1158
1155 if not heads:
1159 if not heads:
1156 heads = remote.heads()
1160 heads = remote.heads()
1157
1161
1158 if self.changelog.tip() == nullid:
1162 if self.changelog.tip() == nullid:
1159 base[nullid] = 1
1163 base[nullid] = 1
1160 if heads != [nullid]:
1164 if heads != [nullid]:
1161 return [nullid]
1165 return [nullid]
1162 return []
1166 return []
1163
1167
1164 # assume we're closer to the tip than the root
1168 # assume we're closer to the tip than the root
1165 # and start by examining the heads
1169 # and start by examining the heads
1166 self.ui.status(_("searching for changes\n"))
1170 self.ui.status(_("searching for changes\n"))
1167
1171
1168 unknown = []
1172 unknown = []
1169 for h in heads:
1173 for h in heads:
1170 if h not in m:
1174 if h not in m:
1171 unknown.append(h)
1175 unknown.append(h)
1172 else:
1176 else:
1173 base[h] = 1
1177 base[h] = 1
1174
1178
1175 if not unknown:
1179 if not unknown:
1176 return []
1180 return []
1177
1181
1178 req = dict.fromkeys(unknown)
1182 req = dict.fromkeys(unknown)
1179 reqcnt = 0
1183 reqcnt = 0
1180
1184
1181 # search through remote branches
1185 # search through remote branches
1182 # a 'branch' here is a linear segment of history, with four parts:
1186 # a 'branch' here is a linear segment of history, with four parts:
1183 # head, root, first parent, second parent
1187 # head, root, first parent, second parent
1184 # (a branch always has two parents (or none) by definition)
1188 # (a branch always has two parents (or none) by definition)
1185 unknown = remote.branches(unknown)
1189 unknown = remote.branches(unknown)
1186 while unknown:
1190 while unknown:
1187 r = []
1191 r = []
1188 while unknown:
1192 while unknown:
1189 n = unknown.pop(0)
1193 n = unknown.pop(0)
1190 if n[0] in seen:
1194 if n[0] in seen:
1191 continue
1195 continue
1192
1196
1193 self.ui.debug(_("examining %s:%s\n")
1197 self.ui.debug(_("examining %s:%s\n")
1194 % (short(n[0]), short(n[1])))
1198 % (short(n[0]), short(n[1])))
1195 if n[0] == nullid: # found the end of the branch
1199 if n[0] == nullid: # found the end of the branch
1196 pass
1200 pass
1197 elif n in seenbranch:
1201 elif n in seenbranch:
1198 self.ui.debug(_("branch already found\n"))
1202 self.ui.debug(_("branch already found\n"))
1199 continue
1203 continue
1200 elif n[1] and n[1] in m: # do we know the base?
1204 elif n[1] and n[1] in m: # do we know the base?
1201 self.ui.debug(_("found incomplete branch %s:%s\n")
1205 self.ui.debug(_("found incomplete branch %s:%s\n")
1202 % (short(n[0]), short(n[1])))
1206 % (short(n[0]), short(n[1])))
1203 search.append(n) # schedule branch range for scanning
1207 search.append(n) # schedule branch range for scanning
1204 seenbranch[n] = 1
1208 seenbranch[n] = 1
1205 else:
1209 else:
1206 if n[1] not in seen and n[1] not in fetch:
1210 if n[1] not in seen and n[1] not in fetch:
1207 if n[2] in m and n[3] in m:
1211 if n[2] in m and n[3] in m:
1208 self.ui.debug(_("found new changeset %s\n") %
1212 self.ui.debug(_("found new changeset %s\n") %
1209 short(n[1]))
1213 short(n[1]))
1210 fetch[n[1]] = 1 # earliest unknown
1214 fetch[n[1]] = 1 # earliest unknown
1211 for p in n[2:4]:
1215 for p in n[2:4]:
1212 if p in m:
1216 if p in m:
1213 base[p] = 1 # latest known
1217 base[p] = 1 # latest known
1214
1218
1215 for p in n[2:4]:
1219 for p in n[2:4]:
1216 if p not in req and p not in m:
1220 if p not in req and p not in m:
1217 r.append(p)
1221 r.append(p)
1218 req[p] = 1
1222 req[p] = 1
1219 seen[n[0]] = 1
1223 seen[n[0]] = 1
1220
1224
1221 if r:
1225 if r:
1222 reqcnt += 1
1226 reqcnt += 1
1223 self.ui.debug(_("request %d: %s\n") %
1227 self.ui.debug(_("request %d: %s\n") %
1224 (reqcnt, " ".join(map(short, r))))
1228 (reqcnt, " ".join(map(short, r))))
1225 for p in xrange(0, len(r), 10):
1229 for p in xrange(0, len(r), 10):
1226 for b in remote.branches(r[p:p+10]):
1230 for b in remote.branches(r[p:p+10]):
1227 self.ui.debug(_("received %s:%s\n") %
1231 self.ui.debug(_("received %s:%s\n") %
1228 (short(b[0]), short(b[1])))
1232 (short(b[0]), short(b[1])))
1229 unknown.append(b)
1233 unknown.append(b)
1230
1234
1231 # do binary search on the branches we found
1235 # do binary search on the branches we found
1232 while search:
1236 while search:
1233 n = search.pop(0)
1237 n = search.pop(0)
1234 reqcnt += 1
1238 reqcnt += 1
1235 l = remote.between([(n[0], n[1])])[0]
1239 l = remote.between([(n[0], n[1])])[0]
1236 l.append(n[1])
1240 l.append(n[1])
1237 p = n[0]
1241 p = n[0]
1238 f = 1
1242 f = 1
1239 for i in l:
1243 for i in l:
1240 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1244 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1241 if i in m:
1245 if i in m:
1242 if f <= 2:
1246 if f <= 2:
1243 self.ui.debug(_("found new branch changeset %s\n") %
1247 self.ui.debug(_("found new branch changeset %s\n") %
1244 short(p))
1248 short(p))
1245 fetch[p] = 1
1249 fetch[p] = 1
1246 base[i] = 1
1250 base[i] = 1
1247 else:
1251 else:
1248 self.ui.debug(_("narrowed branch search to %s:%s\n")
1252 self.ui.debug(_("narrowed branch search to %s:%s\n")
1249 % (short(p), short(i)))
1253 % (short(p), short(i)))
1250 search.append((p, i))
1254 search.append((p, i))
1251 break
1255 break
1252 p, f = i, f * 2
1256 p, f = i, f * 2
1253
1257
1254 # sanity check our fetch list
1258 # sanity check our fetch list
1255 for f in fetch.keys():
1259 for f in fetch.keys():
1256 if f in m:
1260 if f in m:
1257 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1261 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1258
1262
1259 if base.keys() == [nullid]:
1263 if base.keys() == [nullid]:
1260 if force:
1264 if force:
1261 self.ui.warn(_("warning: repository is unrelated\n"))
1265 self.ui.warn(_("warning: repository is unrelated\n"))
1262 else:
1266 else:
1263 raise util.Abort(_("repository is unrelated"))
1267 raise util.Abort(_("repository is unrelated"))
1264
1268
1265 self.ui.debug(_("found new changesets starting at ") +
1269 self.ui.debug(_("found new changesets starting at ") +
1266 " ".join([short(f) for f in fetch]) + "\n")
1270 " ".join([short(f) for f in fetch]) + "\n")
1267
1271
1268 self.ui.debug(_("%d total queries\n") % reqcnt)
1272 self.ui.debug(_("%d total queries\n") % reqcnt)
1269
1273
1270 return fetch.keys()
1274 return fetch.keys()
1271
1275
1272 def findoutgoing(self, remote, base=None, heads=None, force=False):
1276 def findoutgoing(self, remote, base=None, heads=None, force=False):
1273 """Return list of nodes that are roots of subsets not in remote
1277 """Return list of nodes that are roots of subsets not in remote
1274
1278
1275 If base dict is specified, assume that these nodes and their parents
1279 If base dict is specified, assume that these nodes and their parents
1276 exist on the remote side.
1280 exist on the remote side.
1277 If a list of heads is specified, return only nodes which are heads
1281 If a list of heads is specified, return only nodes which are heads
1278 or ancestors of these heads, and return a second element which
1282 or ancestors of these heads, and return a second element which
1279 contains all remote heads which get new children.
1283 contains all remote heads which get new children.
1280 """
1284 """
1281 if base == None:
1285 if base == None:
1282 base = {}
1286 base = {}
1283 self.findincoming(remote, base, heads, force=force)
1287 self.findincoming(remote, base, heads, force=force)
1284
1288
1285 self.ui.debug(_("common changesets up to ")
1289 self.ui.debug(_("common changesets up to ")
1286 + " ".join(map(short, base.keys())) + "\n")
1290 + " ".join(map(short, base.keys())) + "\n")
1287
1291
1288 remain = dict.fromkeys(self.changelog.nodemap)
1292 remain = dict.fromkeys(self.changelog.nodemap)
1289
1293
1290 # prune everything remote has from the tree
1294 # prune everything remote has from the tree
1291 del remain[nullid]
1295 del remain[nullid]
1292 remove = base.keys()
1296 remove = base.keys()
1293 while remove:
1297 while remove:
1294 n = remove.pop(0)
1298 n = remove.pop(0)
1295 if n in remain:
1299 if n in remain:
1296 del remain[n]
1300 del remain[n]
1297 for p in self.changelog.parents(n):
1301 for p in self.changelog.parents(n):
1298 remove.append(p)
1302 remove.append(p)
1299
1303
1300 # find every node whose parents have been pruned
1304 # find every node whose parents have been pruned
1301 subset = []
1305 subset = []
1302 # find every remote head that will get new children
1306 # find every remote head that will get new children
1303 updated_heads = {}
1307 updated_heads = {}
1304 for n in remain:
1308 for n in remain:
1305 p1, p2 = self.changelog.parents(n)
1309 p1, p2 = self.changelog.parents(n)
1306 if p1 not in remain and p2 not in remain:
1310 if p1 not in remain and p2 not in remain:
1307 subset.append(n)
1311 subset.append(n)
1308 if heads:
1312 if heads:
1309 if p1 in heads:
1313 if p1 in heads:
1310 updated_heads[p1] = True
1314 updated_heads[p1] = True
1311 if p2 in heads:
1315 if p2 in heads:
1312 updated_heads[p2] = True
1316 updated_heads[p2] = True
1313
1317
1314 # this is the set of all roots we have to push
1318 # this is the set of all roots we have to push
1315 if heads:
1319 if heads:
1316 return subset, updated_heads.keys()
1320 return subset, updated_heads.keys()
1317 else:
1321 else:
1318 return subset
1322 return subset
1319
1323
1320 def pull(self, remote, heads=None, force=False, lock=None):
1324 def pull(self, remote, heads=None, force=False, lock=None):
1321 mylock = False
1325 mylock = False
1322 if not lock:
1326 if not lock:
1323 lock = self.lock()
1327 lock = self.lock()
1324 mylock = True
1328 mylock = True
1325
1329
1326 try:
1330 try:
1327 fetch = self.findincoming(remote, force=force)
1331 fetch = self.findincoming(remote, force=force)
1328 if fetch == [nullid]:
1332 if fetch == [nullid]:
1329 self.ui.status(_("requesting all changes\n"))
1333 self.ui.status(_("requesting all changes\n"))
1330
1334
1331 if not fetch:
1335 if not fetch:
1332 self.ui.status(_("no changes found\n"))
1336 self.ui.status(_("no changes found\n"))
1333 return 0
1337 return 0
1334
1338
1335 if heads is None:
1339 if heads is None:
1336 cg = remote.changegroup(fetch, 'pull')
1340 cg = remote.changegroup(fetch, 'pull')
1337 else:
1341 else:
1338 if 'changegroupsubset' not in remote.capabilities:
1342 if 'changegroupsubset' not in remote.capabilities:
1339 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1343 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1340 cg = remote.changegroupsubset(fetch, heads, 'pull')
1344 cg = remote.changegroupsubset(fetch, heads, 'pull')
1341 return self.addchangegroup(cg, 'pull', remote.url())
1345 return self.addchangegroup(cg, 'pull', remote.url())
1342 finally:
1346 finally:
1343 if mylock:
1347 if mylock:
1344 lock.release()
1348 lock.release()
1345
1349
1346 def push(self, remote, force=False, revs=None):
1350 def push(self, remote, force=False, revs=None):
1347 # there are two ways to push to remote repo:
1351 # there are two ways to push to remote repo:
1348 #
1352 #
1349 # addchangegroup assumes local user can lock remote
1353 # addchangegroup assumes local user can lock remote
1350 # repo (local filesystem, old ssh servers).
1354 # repo (local filesystem, old ssh servers).
1351 #
1355 #
1352 # unbundle assumes local user cannot lock remote repo (new ssh
1356 # unbundle assumes local user cannot lock remote repo (new ssh
1353 # servers, http servers).
1357 # servers, http servers).
1354
1358
1355 if remote.capable('unbundle'):
1359 if remote.capable('unbundle'):
1356 return self.push_unbundle(remote, force, revs)
1360 return self.push_unbundle(remote, force, revs)
1357 return self.push_addchangegroup(remote, force, revs)
1361 return self.push_addchangegroup(remote, force, revs)
1358
1362
1359 def prepush(self, remote, force, revs):
1363 def prepush(self, remote, force, revs):
1360 base = {}
1364 base = {}
1361 remote_heads = remote.heads()
1365 remote_heads = remote.heads()
1362 inc = self.findincoming(remote, base, remote_heads, force=force)
1366 inc = self.findincoming(remote, base, remote_heads, force=force)
1363
1367
1364 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1368 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1365 if revs is not None:
1369 if revs is not None:
1366 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1370 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1367 else:
1371 else:
1368 bases, heads = update, self.changelog.heads()
1372 bases, heads = update, self.changelog.heads()
1369
1373
1370 if not bases:
1374 if not bases:
1371 self.ui.status(_("no changes found\n"))
1375 self.ui.status(_("no changes found\n"))
1372 return None, 1
1376 return None, 1
1373 elif not force:
1377 elif not force:
1374 # check if we're creating new remote heads
1378 # check if we're creating new remote heads
1375 # to be a remote head after push, node must be either
1379 # to be a remote head after push, node must be either
1376 # - unknown locally
1380 # - unknown locally
1377 # - a local outgoing head descended from update
1381 # - a local outgoing head descended from update
1378 # - a remote head that's known locally and not
1382 # - a remote head that's known locally and not
1379 # ancestral to an outgoing head
1383 # ancestral to an outgoing head
1380
1384
1381 warn = 0
1385 warn = 0
1382
1386
1383 if remote_heads == [nullid]:
1387 if remote_heads == [nullid]:
1384 warn = 0
1388 warn = 0
1385 elif not revs and len(heads) > len(remote_heads):
1389 elif not revs and len(heads) > len(remote_heads):
1386 warn = 1
1390 warn = 1
1387 else:
1391 else:
1388 newheads = list(heads)
1392 newheads = list(heads)
1389 for r in remote_heads:
1393 for r in remote_heads:
1390 if r in self.changelog.nodemap:
1394 if r in self.changelog.nodemap:
1391 desc = self.changelog.heads(r)
1395 desc = self.changelog.heads(r)
1392 l = [h for h in heads if h in desc]
1396 l = [h for h in heads if h in desc]
1393 if not l:
1397 if not l:
1394 newheads.append(r)
1398 newheads.append(r)
1395 else:
1399 else:
1396 newheads.append(r)
1400 newheads.append(r)
1397 if len(newheads) > len(remote_heads):
1401 if len(newheads) > len(remote_heads):
1398 warn = 1
1402 warn = 1
1399
1403
1400 if warn:
1404 if warn:
1401 self.ui.warn(_("abort: push creates new remote branches!\n"))
1405 self.ui.warn(_("abort: push creates new remote branches!\n"))
1402 self.ui.status(_("(did you forget to merge?"
1406 self.ui.status(_("(did you forget to merge?"
1403 " use push -f to force)\n"))
1407 " use push -f to force)\n"))
1404 return None, 1
1408 return None, 1
1405 elif inc:
1409 elif inc:
1406 self.ui.warn(_("note: unsynced remote changes!\n"))
1410 self.ui.warn(_("note: unsynced remote changes!\n"))
1407
1411
1408
1412
1409 if revs is None:
1413 if revs is None:
1410 cg = self.changegroup(update, 'push')
1414 cg = self.changegroup(update, 'push')
1411 else:
1415 else:
1412 cg = self.changegroupsubset(update, revs, 'push')
1416 cg = self.changegroupsubset(update, revs, 'push')
1413 return cg, remote_heads
1417 return cg, remote_heads
1414
1418
1415 def push_addchangegroup(self, remote, force, revs):
1419 def push_addchangegroup(self, remote, force, revs):
1416 lock = remote.lock()
1420 lock = remote.lock()
1417
1421
1418 ret = self.prepush(remote, force, revs)
1422 ret = self.prepush(remote, force, revs)
1419 if ret[0] is not None:
1423 if ret[0] is not None:
1420 cg, remote_heads = ret
1424 cg, remote_heads = ret
1421 return remote.addchangegroup(cg, 'push', self.url())
1425 return remote.addchangegroup(cg, 'push', self.url())
1422 return ret[1]
1426 return ret[1]
1423
1427
1424 def push_unbundle(self, remote, force, revs):
1428 def push_unbundle(self, remote, force, revs):
1425 # local repo finds heads on server, finds out what revs it
1429 # local repo finds heads on server, finds out what revs it
1426 # must push. once revs transferred, if server finds it has
1430 # must push. once revs transferred, if server finds it has
1427 # different heads (someone else won commit/push race), server
1431 # different heads (someone else won commit/push race), server
1428 # aborts.
1432 # aborts.
1429
1433
1430 ret = self.prepush(remote, force, revs)
1434 ret = self.prepush(remote, force, revs)
1431 if ret[0] is not None:
1435 if ret[0] is not None:
1432 cg, remote_heads = ret
1436 cg, remote_heads = ret
1433 if force: remote_heads = ['force']
1437 if force: remote_heads = ['force']
1434 return remote.unbundle(cg, remote_heads, 'push')
1438 return remote.unbundle(cg, remote_heads, 'push')
1435 return ret[1]
1439 return ret[1]
1436
1440
1437 def changegroupinfo(self, nodes):
1441 def changegroupinfo(self, nodes):
1438 self.ui.note(_("%d changesets found\n") % len(nodes))
1442 self.ui.note(_("%d changesets found\n") % len(nodes))
1439 if self.ui.debugflag:
1443 if self.ui.debugflag:
1440 self.ui.debug(_("List of changesets:\n"))
1444 self.ui.debug(_("List of changesets:\n"))
1441 for node in nodes:
1445 for node in nodes:
1442 self.ui.debug("%s\n" % hex(node))
1446 self.ui.debug("%s\n" % hex(node))
1443
1447
1444 def changegroupsubset(self, bases, heads, source):
1448 def changegroupsubset(self, bases, heads, source):
1445 """This function generates a changegroup consisting of all the nodes
1449 """This function generates a changegroup consisting of all the nodes
1446 that are descendents of any of the bases, and ancestors of any of
1450 that are descendents of any of the bases, and ancestors of any of
1447 the heads.
1451 the heads.
1448
1452
1449 It is fairly complex as determining which filenodes and which
1453 It is fairly complex as determining which filenodes and which
1450 manifest nodes need to be included for the changeset to be complete
1454 manifest nodes need to be included for the changeset to be complete
1451 is non-trivial.
1455 is non-trivial.
1452
1456
1453 Another wrinkle is doing the reverse, figuring out which changeset in
1457 Another wrinkle is doing the reverse, figuring out which changeset in
1454 the changegroup a particular filenode or manifestnode belongs to."""
1458 the changegroup a particular filenode or manifestnode belongs to."""
1455
1459
1456 self.hook('preoutgoing', throw=True, source=source)
1460 self.hook('preoutgoing', throw=True, source=source)
1457
1461
1458 # Set up some initial variables
1462 # Set up some initial variables
1459 # Make it easy to refer to self.changelog
1463 # Make it easy to refer to self.changelog
1460 cl = self.changelog
1464 cl = self.changelog
1461 # msng is short for missing - compute the list of changesets in this
1465 # msng is short for missing - compute the list of changesets in this
1462 # changegroup.
1466 # changegroup.
1463 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1467 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1464 self.changegroupinfo(msng_cl_lst)
1468 self.changegroupinfo(msng_cl_lst)
1465 # Some bases may turn out to be superfluous, and some heads may be
1469 # Some bases may turn out to be superfluous, and some heads may be
1466 # too. nodesbetween will return the minimal set of bases and heads
1470 # too. nodesbetween will return the minimal set of bases and heads
1467 # necessary to re-create the changegroup.
1471 # necessary to re-create the changegroup.
1468
1472
1469 # Known heads are the list of heads that it is assumed the recipient
1473 # Known heads are the list of heads that it is assumed the recipient
1470 # of this changegroup will know about.
1474 # of this changegroup will know about.
1471 knownheads = {}
1475 knownheads = {}
1472 # We assume that all parents of bases are known heads.
1476 # We assume that all parents of bases are known heads.
1473 for n in bases:
1477 for n in bases:
1474 for p in cl.parents(n):
1478 for p in cl.parents(n):
1475 if p != nullid:
1479 if p != nullid:
1476 knownheads[p] = 1
1480 knownheads[p] = 1
1477 knownheads = knownheads.keys()
1481 knownheads = knownheads.keys()
1478 if knownheads:
1482 if knownheads:
1479 # Now that we know what heads are known, we can compute which
1483 # Now that we know what heads are known, we can compute which
1480 # changesets are known. The recipient must know about all
1484 # changesets are known. The recipient must know about all
1481 # changesets required to reach the known heads from the null
1485 # changesets required to reach the known heads from the null
1482 # changeset.
1486 # changeset.
1483 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1487 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1484 junk = None
1488 junk = None
1485 # Transform the list into an ersatz set.
1489 # Transform the list into an ersatz set.
1486 has_cl_set = dict.fromkeys(has_cl_set)
1490 has_cl_set = dict.fromkeys(has_cl_set)
1487 else:
1491 else:
1488 # If there were no known heads, the recipient cannot be assumed to
1492 # If there were no known heads, the recipient cannot be assumed to
1489 # know about any changesets.
1493 # know about any changesets.
1490 has_cl_set = {}
1494 has_cl_set = {}
1491
1495
1492 # Make it easy to refer to self.manifest
1496 # Make it easy to refer to self.manifest
1493 mnfst = self.manifest
1497 mnfst = self.manifest
1494 # We don't know which manifests are missing yet
1498 # We don't know which manifests are missing yet
1495 msng_mnfst_set = {}
1499 msng_mnfst_set = {}
1496 # Nor do we know which filenodes are missing.
1500 # Nor do we know which filenodes are missing.
1497 msng_filenode_set = {}
1501 msng_filenode_set = {}
1498
1502
1499 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1503 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1500 junk = None
1504 junk = None
1501
1505
1502 # A changeset always belongs to itself, so the changenode lookup
1506 # A changeset always belongs to itself, so the changenode lookup
1503 # function for a changenode is identity.
1507 # function for a changenode is identity.
1504 def identity(x):
1508 def identity(x):
1505 return x
1509 return x
1506
1510
1507 # A function generating function. Sets up an environment for the
1511 # A function generating function. Sets up an environment for the
1508 # inner function.
1512 # inner function.
1509 def cmp_by_rev_func(revlog):
1513 def cmp_by_rev_func(revlog):
1510 # Compare two nodes by their revision number in the environment's
1514 # Compare two nodes by their revision number in the environment's
1511 # revision history. Since the revision number both represents the
1515 # revision history. Since the revision number both represents the
1512 # most efficient order to read the nodes in, and represents a
1516 # most efficient order to read the nodes in, and represents a
1513 # topological sorting of the nodes, this function is often useful.
1517 # topological sorting of the nodes, this function is often useful.
1514 def cmp_by_rev(a, b):
1518 def cmp_by_rev(a, b):
1515 return cmp(revlog.rev(a), revlog.rev(b))
1519 return cmp(revlog.rev(a), revlog.rev(b))
1516 return cmp_by_rev
1520 return cmp_by_rev
1517
1521
1518 # If we determine that a particular file or manifest node must be a
1522 # If we determine that a particular file or manifest node must be a
1519 # node that the recipient of the changegroup will already have, we can
1523 # node that the recipient of the changegroup will already have, we can
1520 # also assume the recipient will have all the parents. This function
1524 # also assume the recipient will have all the parents. This function
1521 # prunes them from the set of missing nodes.
1525 # prunes them from the set of missing nodes.
1522 def prune_parents(revlog, hasset, msngset):
1526 def prune_parents(revlog, hasset, msngset):
1523 haslst = hasset.keys()
1527 haslst = hasset.keys()
1524 haslst.sort(cmp_by_rev_func(revlog))
1528 haslst.sort(cmp_by_rev_func(revlog))
1525 for node in haslst:
1529 for node in haslst:
1526 parentlst = [p for p in revlog.parents(node) if p != nullid]
1530 parentlst = [p for p in revlog.parents(node) if p != nullid]
1527 while parentlst:
1531 while parentlst:
1528 n = parentlst.pop()
1532 n = parentlst.pop()
1529 if n not in hasset:
1533 if n not in hasset:
1530 hasset[n] = 1
1534 hasset[n] = 1
1531 p = [p for p in revlog.parents(n) if p != nullid]
1535 p = [p for p in revlog.parents(n) if p != nullid]
1532 parentlst.extend(p)
1536 parentlst.extend(p)
1533 for n in hasset:
1537 for n in hasset:
1534 msngset.pop(n, None)
1538 msngset.pop(n, None)
1535
1539
1536 # This is a function generating function used to set up an environment
1540 # This is a function generating function used to set up an environment
1537 # for the inner function to execute in.
1541 # for the inner function to execute in.
1538 def manifest_and_file_collector(changedfileset):
1542 def manifest_and_file_collector(changedfileset):
1539 # This is an information gathering function that gathers
1543 # This is an information gathering function that gathers
1540 # information from each changeset node that goes out as part of
1544 # information from each changeset node that goes out as part of
1541 # the changegroup. The information gathered is a list of which
1545 # the changegroup. The information gathered is a list of which
1542 # manifest nodes are potentially required (the recipient may
1546 # manifest nodes are potentially required (the recipient may
1543 # already have them) and total list of all files which were
1547 # already have them) and total list of all files which were
1544 # changed in any changeset in the changegroup.
1548 # changed in any changeset in the changegroup.
1545 #
1549 #
1546 # We also remember the first changenode we saw any manifest
1550 # We also remember the first changenode we saw any manifest
1547 # referenced by so we can later determine which changenode 'owns'
1551 # referenced by so we can later determine which changenode 'owns'
1548 # the manifest.
1552 # the manifest.
1549 def collect_manifests_and_files(clnode):
1553 def collect_manifests_and_files(clnode):
1550 c = cl.read(clnode)
1554 c = cl.read(clnode)
1551 for f in c[3]:
1555 for f in c[3]:
1552 # This is to make sure we only have one instance of each
1556 # This is to make sure we only have one instance of each
1553 # filename string for each filename.
1557 # filename string for each filename.
1554 changedfileset.setdefault(f, f)
1558 changedfileset.setdefault(f, f)
1555 msng_mnfst_set.setdefault(c[0], clnode)
1559 msng_mnfst_set.setdefault(c[0], clnode)
1556 return collect_manifests_and_files
1560 return collect_manifests_and_files
1557
1561
1558 # Figure out which manifest nodes (of the ones we think might be part
1562 # Figure out which manifest nodes (of the ones we think might be part
1559 # of the changegroup) the recipient must know about and remove them
1563 # of the changegroup) the recipient must know about and remove them
1560 # from the changegroup.
1564 # from the changegroup.
1561 def prune_manifests():
1565 def prune_manifests():
1562 has_mnfst_set = {}
1566 has_mnfst_set = {}
1563 for n in msng_mnfst_set:
1567 for n in msng_mnfst_set:
1564 # If a 'missing' manifest thinks it belongs to a changenode
1568 # If a 'missing' manifest thinks it belongs to a changenode
1565 # the recipient is assumed to have, obviously the recipient
1569 # the recipient is assumed to have, obviously the recipient
1566 # must have that manifest.
1570 # must have that manifest.
1567 linknode = cl.node(mnfst.linkrev(n))
1571 linknode = cl.node(mnfst.linkrev(n))
1568 if linknode in has_cl_set:
1572 if linknode in has_cl_set:
1569 has_mnfst_set[n] = 1
1573 has_mnfst_set[n] = 1
1570 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1574 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1571
1575
1572 # Use the information collected in collect_manifests_and_files to say
1576 # Use the information collected in collect_manifests_and_files to say
1573 # which changenode any manifestnode belongs to.
1577 # which changenode any manifestnode belongs to.
1574 def lookup_manifest_link(mnfstnode):
1578 def lookup_manifest_link(mnfstnode):
1575 return msng_mnfst_set[mnfstnode]
1579 return msng_mnfst_set[mnfstnode]
1576
1580
1577 # A function generating function that sets up the initial environment
1581 # A function generating function that sets up the initial environment
1578 # the inner function.
1582 # the inner function.
1579 def filenode_collector(changedfiles):
1583 def filenode_collector(changedfiles):
1580 next_rev = [0]
1584 next_rev = [0]
1581 # This gathers information from each manifestnode included in the
1585 # This gathers information from each manifestnode included in the
1582 # changegroup about which filenodes the manifest node references
1586 # changegroup about which filenodes the manifest node references
1583 # so we can include those in the changegroup too.
1587 # so we can include those in the changegroup too.
1584 #
1588 #
1585 # It also remembers which changenode each filenode belongs to. It
1589 # It also remembers which changenode each filenode belongs to. It
1586 # does this by assuming the a filenode belongs to the changenode
1590 # does this by assuming the a filenode belongs to the changenode
1587 # the first manifest that references it belongs to.
1591 # the first manifest that references it belongs to.
1588 def collect_msng_filenodes(mnfstnode):
1592 def collect_msng_filenodes(mnfstnode):
1589 r = mnfst.rev(mnfstnode)
1593 r = mnfst.rev(mnfstnode)
1590 if r == next_rev[0]:
1594 if r == next_rev[0]:
1591 # If the last rev we looked at was the one just previous,
1595 # If the last rev we looked at was the one just previous,
1592 # we only need to see a diff.
1596 # we only need to see a diff.
1593 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1597 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1594 # For each line in the delta
1598 # For each line in the delta
1595 for dline in delta.splitlines():
1599 for dline in delta.splitlines():
1596 # get the filename and filenode for that line
1600 # get the filename and filenode for that line
1597 f, fnode = dline.split('\0')
1601 f, fnode = dline.split('\0')
1598 fnode = bin(fnode[:40])
1602 fnode = bin(fnode[:40])
1599 f = changedfiles.get(f, None)
1603 f = changedfiles.get(f, None)
1600 # And if the file is in the list of files we care
1604 # And if the file is in the list of files we care
1601 # about.
1605 # about.
1602 if f is not None:
1606 if f is not None:
1603 # Get the changenode this manifest belongs to
1607 # Get the changenode this manifest belongs to
1604 clnode = msng_mnfst_set[mnfstnode]
1608 clnode = msng_mnfst_set[mnfstnode]
1605 # Create the set of filenodes for the file if
1609 # Create the set of filenodes for the file if
1606 # there isn't one already.
1610 # there isn't one already.
1607 ndset = msng_filenode_set.setdefault(f, {})
1611 ndset = msng_filenode_set.setdefault(f, {})
1608 # And set the filenode's changelog node to the
1612 # And set the filenode's changelog node to the
1609 # manifest's if it hasn't been set already.
1613 # manifest's if it hasn't been set already.
1610 ndset.setdefault(fnode, clnode)
1614 ndset.setdefault(fnode, clnode)
1611 else:
1615 else:
1612 # Otherwise we need a full manifest.
1616 # Otherwise we need a full manifest.
1613 m = mnfst.read(mnfstnode)
1617 m = mnfst.read(mnfstnode)
1614 # For every file in we care about.
1618 # For every file in we care about.
1615 for f in changedfiles:
1619 for f in changedfiles:
1616 fnode = m.get(f, None)
1620 fnode = m.get(f, None)
1617 # If it's in the manifest
1621 # If it's in the manifest
1618 if fnode is not None:
1622 if fnode is not None:
1619 # See comments above.
1623 # See comments above.
1620 clnode = msng_mnfst_set[mnfstnode]
1624 clnode = msng_mnfst_set[mnfstnode]
1621 ndset = msng_filenode_set.setdefault(f, {})
1625 ndset = msng_filenode_set.setdefault(f, {})
1622 ndset.setdefault(fnode, clnode)
1626 ndset.setdefault(fnode, clnode)
1623 # Remember the revision we hope to see next.
1627 # Remember the revision we hope to see next.
1624 next_rev[0] = r + 1
1628 next_rev[0] = r + 1
1625 return collect_msng_filenodes
1629 return collect_msng_filenodes
1626
1630
1627 # We have a list of filenodes we think we need for a file, lets remove
1631 # We have a list of filenodes we think we need for a file, lets remove
1628 # all those we now the recipient must have.
1632 # all those we now the recipient must have.
1629 def prune_filenodes(f, filerevlog):
1633 def prune_filenodes(f, filerevlog):
1630 msngset = msng_filenode_set[f]
1634 msngset = msng_filenode_set[f]
1631 hasset = {}
1635 hasset = {}
1632 # If a 'missing' filenode thinks it belongs to a changenode we
1636 # If a 'missing' filenode thinks it belongs to a changenode we
1633 # assume the recipient must have, then the recipient must have
1637 # assume the recipient must have, then the recipient must have
1634 # that filenode.
1638 # that filenode.
1635 for n in msngset:
1639 for n in msngset:
1636 clnode = cl.node(filerevlog.linkrev(n))
1640 clnode = cl.node(filerevlog.linkrev(n))
1637 if clnode in has_cl_set:
1641 if clnode in has_cl_set:
1638 hasset[n] = 1
1642 hasset[n] = 1
1639 prune_parents(filerevlog, hasset, msngset)
1643 prune_parents(filerevlog, hasset, msngset)
1640
1644
1641 # A function generator function that sets up the a context for the
1645 # A function generator function that sets up the a context for the
1642 # inner function.
1646 # inner function.
1643 def lookup_filenode_link_func(fname):
1647 def lookup_filenode_link_func(fname):
1644 msngset = msng_filenode_set[fname]
1648 msngset = msng_filenode_set[fname]
1645 # Lookup the changenode the filenode belongs to.
1649 # Lookup the changenode the filenode belongs to.
1646 def lookup_filenode_link(fnode):
1650 def lookup_filenode_link(fnode):
1647 return msngset[fnode]
1651 return msngset[fnode]
1648 return lookup_filenode_link
1652 return lookup_filenode_link
1649
1653
1650 # Now that we have all theses utility functions to help out and
1654 # Now that we have all theses utility functions to help out and
1651 # logically divide up the task, generate the group.
1655 # logically divide up the task, generate the group.
1652 def gengroup():
1656 def gengroup():
1653 # The set of changed files starts empty.
1657 # The set of changed files starts empty.
1654 changedfiles = {}
1658 changedfiles = {}
1655 # Create a changenode group generator that will call our functions
1659 # Create a changenode group generator that will call our functions
1656 # back to lookup the owning changenode and collect information.
1660 # back to lookup the owning changenode and collect information.
1657 group = cl.group(msng_cl_lst, identity,
1661 group = cl.group(msng_cl_lst, identity,
1658 manifest_and_file_collector(changedfiles))
1662 manifest_and_file_collector(changedfiles))
1659 for chnk in group:
1663 for chnk in group:
1660 yield chnk
1664 yield chnk
1661
1665
1662 # The list of manifests has been collected by the generator
1666 # The list of manifests has been collected by the generator
1663 # calling our functions back.
1667 # calling our functions back.
1664 prune_manifests()
1668 prune_manifests()
1665 msng_mnfst_lst = msng_mnfst_set.keys()
1669 msng_mnfst_lst = msng_mnfst_set.keys()
1666 # Sort the manifestnodes by revision number.
1670 # Sort the manifestnodes by revision number.
1667 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1671 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1668 # Create a generator for the manifestnodes that calls our lookup
1672 # Create a generator for the manifestnodes that calls our lookup
1669 # and data collection functions back.
1673 # and data collection functions back.
1670 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1674 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1671 filenode_collector(changedfiles))
1675 filenode_collector(changedfiles))
1672 for chnk in group:
1676 for chnk in group:
1673 yield chnk
1677 yield chnk
1674
1678
1675 # These are no longer needed, dereference and toss the memory for
1679 # These are no longer needed, dereference and toss the memory for
1676 # them.
1680 # them.
1677 msng_mnfst_lst = None
1681 msng_mnfst_lst = None
1678 msng_mnfst_set.clear()
1682 msng_mnfst_set.clear()
1679
1683
1680 changedfiles = changedfiles.keys()
1684 changedfiles = changedfiles.keys()
1681 changedfiles.sort()
1685 changedfiles.sort()
1682 # Go through all our files in order sorted by name.
1686 # Go through all our files in order sorted by name.
1683 for fname in changedfiles:
1687 for fname in changedfiles:
1684 filerevlog = self.file(fname)
1688 filerevlog = self.file(fname)
1685 # Toss out the filenodes that the recipient isn't really
1689 # Toss out the filenodes that the recipient isn't really
1686 # missing.
1690 # missing.
1687 if msng_filenode_set.has_key(fname):
1691 if msng_filenode_set.has_key(fname):
1688 prune_filenodes(fname, filerevlog)
1692 prune_filenodes(fname, filerevlog)
1689 msng_filenode_lst = msng_filenode_set[fname].keys()
1693 msng_filenode_lst = msng_filenode_set[fname].keys()
1690 else:
1694 else:
1691 msng_filenode_lst = []
1695 msng_filenode_lst = []
1692 # If any filenodes are left, generate the group for them,
1696 # If any filenodes are left, generate the group for them,
1693 # otherwise don't bother.
1697 # otherwise don't bother.
1694 if len(msng_filenode_lst) > 0:
1698 if len(msng_filenode_lst) > 0:
1695 yield changegroup.genchunk(fname)
1699 yield changegroup.genchunk(fname)
1696 # Sort the filenodes by their revision #
1700 # Sort the filenodes by their revision #
1697 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1701 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1698 # Create a group generator and only pass in a changenode
1702 # Create a group generator and only pass in a changenode
1699 # lookup function as we need to collect no information
1703 # lookup function as we need to collect no information
1700 # from filenodes.
1704 # from filenodes.
1701 group = filerevlog.group(msng_filenode_lst,
1705 group = filerevlog.group(msng_filenode_lst,
1702 lookup_filenode_link_func(fname))
1706 lookup_filenode_link_func(fname))
1703 for chnk in group:
1707 for chnk in group:
1704 yield chnk
1708 yield chnk
1705 if msng_filenode_set.has_key(fname):
1709 if msng_filenode_set.has_key(fname):
1706 # Don't need this anymore, toss it to free memory.
1710 # Don't need this anymore, toss it to free memory.
1707 del msng_filenode_set[fname]
1711 del msng_filenode_set[fname]
1708 # Signal that no more groups are left.
1712 # Signal that no more groups are left.
1709 yield changegroup.closechunk()
1713 yield changegroup.closechunk()
1710
1714
1711 if msng_cl_lst:
1715 if msng_cl_lst:
1712 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1716 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1713
1717
1714 return util.chunkbuffer(gengroup())
1718 return util.chunkbuffer(gengroup())
1715
1719
1716 def changegroup(self, basenodes, source):
1720 def changegroup(self, basenodes, source):
1717 """Generate a changegroup of all nodes that we have that a recipient
1721 """Generate a changegroup of all nodes that we have that a recipient
1718 doesn't.
1722 doesn't.
1719
1723
1720 This is much easier than the previous function as we can assume that
1724 This is much easier than the previous function as we can assume that
1721 the recipient has any changenode we aren't sending them."""
1725 the recipient has any changenode we aren't sending them."""
1722
1726
1723 self.hook('preoutgoing', throw=True, source=source)
1727 self.hook('preoutgoing', throw=True, source=source)
1724
1728
1725 cl = self.changelog
1729 cl = self.changelog
1726 nodes = cl.nodesbetween(basenodes, None)[0]
1730 nodes = cl.nodesbetween(basenodes, None)[0]
1727 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1731 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1728 self.changegroupinfo(nodes)
1732 self.changegroupinfo(nodes)
1729
1733
1730 def identity(x):
1734 def identity(x):
1731 return x
1735 return x
1732
1736
1733 def gennodelst(revlog):
1737 def gennodelst(revlog):
1734 for r in xrange(0, revlog.count()):
1738 for r in xrange(0, revlog.count()):
1735 n = revlog.node(r)
1739 n = revlog.node(r)
1736 if revlog.linkrev(n) in revset:
1740 if revlog.linkrev(n) in revset:
1737 yield n
1741 yield n
1738
1742
1739 def changed_file_collector(changedfileset):
1743 def changed_file_collector(changedfileset):
1740 def collect_changed_files(clnode):
1744 def collect_changed_files(clnode):
1741 c = cl.read(clnode)
1745 c = cl.read(clnode)
1742 for fname in c[3]:
1746 for fname in c[3]:
1743 changedfileset[fname] = 1
1747 changedfileset[fname] = 1
1744 return collect_changed_files
1748 return collect_changed_files
1745
1749
1746 def lookuprevlink_func(revlog):
1750 def lookuprevlink_func(revlog):
1747 def lookuprevlink(n):
1751 def lookuprevlink(n):
1748 return cl.node(revlog.linkrev(n))
1752 return cl.node(revlog.linkrev(n))
1749 return lookuprevlink
1753 return lookuprevlink
1750
1754
1751 def gengroup():
1755 def gengroup():
1752 # construct a list of all changed files
1756 # construct a list of all changed files
1753 changedfiles = {}
1757 changedfiles = {}
1754
1758
1755 for chnk in cl.group(nodes, identity,
1759 for chnk in cl.group(nodes, identity,
1756 changed_file_collector(changedfiles)):
1760 changed_file_collector(changedfiles)):
1757 yield chnk
1761 yield chnk
1758 changedfiles = changedfiles.keys()
1762 changedfiles = changedfiles.keys()
1759 changedfiles.sort()
1763 changedfiles.sort()
1760
1764
1761 mnfst = self.manifest
1765 mnfst = self.manifest
1762 nodeiter = gennodelst(mnfst)
1766 nodeiter = gennodelst(mnfst)
1763 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1767 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1764 yield chnk
1768 yield chnk
1765
1769
1766 for fname in changedfiles:
1770 for fname in changedfiles:
1767 filerevlog = self.file(fname)
1771 filerevlog = self.file(fname)
1768 nodeiter = gennodelst(filerevlog)
1772 nodeiter = gennodelst(filerevlog)
1769 nodeiter = list(nodeiter)
1773 nodeiter = list(nodeiter)
1770 if nodeiter:
1774 if nodeiter:
1771 yield changegroup.genchunk(fname)
1775 yield changegroup.genchunk(fname)
1772 lookup = lookuprevlink_func(filerevlog)
1776 lookup = lookuprevlink_func(filerevlog)
1773 for chnk in filerevlog.group(nodeiter, lookup):
1777 for chnk in filerevlog.group(nodeiter, lookup):
1774 yield chnk
1778 yield chnk
1775
1779
1776 yield changegroup.closechunk()
1780 yield changegroup.closechunk()
1777
1781
1778 if nodes:
1782 if nodes:
1779 self.hook('outgoing', node=hex(nodes[0]), source=source)
1783 self.hook('outgoing', node=hex(nodes[0]), source=source)
1780
1784
1781 return util.chunkbuffer(gengroup())
1785 return util.chunkbuffer(gengroup())
1782
1786
1783 def addchangegroup(self, source, srctype, url):
1787 def addchangegroup(self, source, srctype, url):
1784 """add changegroup to repo.
1788 """add changegroup to repo.
1785
1789
1786 return values:
1790 return values:
1787 - nothing changed or no source: 0
1791 - nothing changed or no source: 0
1788 - more heads than before: 1+added heads (2..n)
1792 - more heads than before: 1+added heads (2..n)
1789 - less heads than before: -1-removed heads (-2..-n)
1793 - less heads than before: -1-removed heads (-2..-n)
1790 - number of heads stays the same: 1
1794 - number of heads stays the same: 1
1791 """
1795 """
1792 def csmap(x):
1796 def csmap(x):
1793 self.ui.debug(_("add changeset %s\n") % short(x))
1797 self.ui.debug(_("add changeset %s\n") % short(x))
1794 return cl.count()
1798 return cl.count()
1795
1799
1796 def revmap(x):
1800 def revmap(x):
1797 return cl.rev(x)
1801 return cl.rev(x)
1798
1802
1799 if not source:
1803 if not source:
1800 return 0
1804 return 0
1801
1805
1802 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1806 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1803
1807
1804 changesets = files = revisions = 0
1808 changesets = files = revisions = 0
1805
1809
1806 tr = self.transaction()
1810 tr = self.transaction()
1807
1811
1808 # write changelog data to temp files so concurrent readers will not see
1812 # write changelog data to temp files so concurrent readers will not see
1809 # inconsistent view
1813 # inconsistent view
1810 cl = None
1814 cl = None
1811 try:
1815 try:
1812 cl = appendfile.appendchangelog(self.sopener,
1816 cl = appendfile.appendchangelog(self.sopener,
1813 self.changelog.version)
1817 self.changelog.version)
1814
1818
1815 oldheads = len(cl.heads())
1819 oldheads = len(cl.heads())
1816
1820
1817 # pull off the changeset group
1821 # pull off the changeset group
1818 self.ui.status(_("adding changesets\n"))
1822 self.ui.status(_("adding changesets\n"))
1819 cor = cl.count() - 1
1823 cor = cl.count() - 1
1820 chunkiter = changegroup.chunkiter(source)
1824 chunkiter = changegroup.chunkiter(source)
1821 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1825 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1822 raise util.Abort(_("received changelog group is empty"))
1826 raise util.Abort(_("received changelog group is empty"))
1823 cnr = cl.count() - 1
1827 cnr = cl.count() - 1
1824 changesets = cnr - cor
1828 changesets = cnr - cor
1825
1829
1826 # pull off the manifest group
1830 # pull off the manifest group
1827 self.ui.status(_("adding manifests\n"))
1831 self.ui.status(_("adding manifests\n"))
1828 chunkiter = changegroup.chunkiter(source)
1832 chunkiter = changegroup.chunkiter(source)
1829 # no need to check for empty manifest group here:
1833 # no need to check for empty manifest group here:
1830 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1834 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1831 # no new manifest will be created and the manifest group will
1835 # no new manifest will be created and the manifest group will
1832 # be empty during the pull
1836 # be empty during the pull
1833 self.manifest.addgroup(chunkiter, revmap, tr)
1837 self.manifest.addgroup(chunkiter, revmap, tr)
1834
1838
1835 # process the files
1839 # process the files
1836 self.ui.status(_("adding file changes\n"))
1840 self.ui.status(_("adding file changes\n"))
1837 while 1:
1841 while 1:
1838 f = changegroup.getchunk(source)
1842 f = changegroup.getchunk(source)
1839 if not f:
1843 if not f:
1840 break
1844 break
1841 self.ui.debug(_("adding %s revisions\n") % f)
1845 self.ui.debug(_("adding %s revisions\n") % f)
1842 fl = self.file(f)
1846 fl = self.file(f)
1843 o = fl.count()
1847 o = fl.count()
1844 chunkiter = changegroup.chunkiter(source)
1848 chunkiter = changegroup.chunkiter(source)
1845 if fl.addgroup(chunkiter, revmap, tr) is None:
1849 if fl.addgroup(chunkiter, revmap, tr) is None:
1846 raise util.Abort(_("received file revlog group is empty"))
1850 raise util.Abort(_("received file revlog group is empty"))
1847 revisions += fl.count() - o
1851 revisions += fl.count() - o
1848 files += 1
1852 files += 1
1849
1853
1850 cl.writedata()
1854 cl.writedata()
1851 finally:
1855 finally:
1852 if cl:
1856 if cl:
1853 cl.cleanup()
1857 cl.cleanup()
1854
1858
1855 # make changelog see real files again
1859 # make changelog see real files again
1856 self.changelog = changelog.changelog(self.sopener,
1860 self.changelog = changelog.changelog(self.sopener,
1857 self.changelog.version)
1861 self.changelog.version)
1858 self.changelog.checkinlinesize(tr)
1862 self.changelog.checkinlinesize(tr)
1859
1863
1860 newheads = len(self.changelog.heads())
1864 newheads = len(self.changelog.heads())
1861 heads = ""
1865 heads = ""
1862 if oldheads and newheads != oldheads:
1866 if oldheads and newheads != oldheads:
1863 heads = _(" (%+d heads)") % (newheads - oldheads)
1867 heads = _(" (%+d heads)") % (newheads - oldheads)
1864
1868
1865 self.ui.status(_("added %d changesets"
1869 self.ui.status(_("added %d changesets"
1866 " with %d changes to %d files%s\n")
1870 " with %d changes to %d files%s\n")
1867 % (changesets, revisions, files, heads))
1871 % (changesets, revisions, files, heads))
1868
1872
1869 if changesets > 0:
1873 if changesets > 0:
1870 self.hook('pretxnchangegroup', throw=True,
1874 self.hook('pretxnchangegroup', throw=True,
1871 node=hex(self.changelog.node(cor+1)), source=srctype,
1875 node=hex(self.changelog.node(cor+1)), source=srctype,
1872 url=url)
1876 url=url)
1873
1877
1874 tr.close()
1878 tr.close()
1875
1879
1876 if changesets > 0:
1880 if changesets > 0:
1877 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1881 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1878 source=srctype, url=url)
1882 source=srctype, url=url)
1879
1883
1880 for i in xrange(cor + 1, cnr + 1):
1884 for i in xrange(cor + 1, cnr + 1):
1881 self.hook("incoming", node=hex(self.changelog.node(i)),
1885 self.hook("incoming", node=hex(self.changelog.node(i)),
1882 source=srctype, url=url)
1886 source=srctype, url=url)
1883
1887
1884 # never return 0 here:
1888 # never return 0 here:
1885 if newheads < oldheads:
1889 if newheads < oldheads:
1886 return newheads - oldheads - 1
1890 return newheads - oldheads - 1
1887 else:
1891 else:
1888 return newheads - oldheads + 1
1892 return newheads - oldheads + 1
1889
1893
1890
1894
1891 def stream_in(self, remote):
1895 def stream_in(self, remote):
1892 fp = remote.stream_out()
1896 fp = remote.stream_out()
1893 l = fp.readline()
1897 l = fp.readline()
1894 try:
1898 try:
1895 resp = int(l)
1899 resp = int(l)
1896 except ValueError:
1900 except ValueError:
1897 raise util.UnexpectedOutput(
1901 raise util.UnexpectedOutput(
1898 _('Unexpected response from remote server:'), l)
1902 _('Unexpected response from remote server:'), l)
1899 if resp == 1:
1903 if resp == 1:
1900 raise util.Abort(_('operation forbidden by server'))
1904 raise util.Abort(_('operation forbidden by server'))
1901 elif resp == 2:
1905 elif resp == 2:
1902 raise util.Abort(_('locking the remote repository failed'))
1906 raise util.Abort(_('locking the remote repository failed'))
1903 elif resp != 0:
1907 elif resp != 0:
1904 raise util.Abort(_('the server sent an unknown error code'))
1908 raise util.Abort(_('the server sent an unknown error code'))
1905 self.ui.status(_('streaming all changes\n'))
1909 self.ui.status(_('streaming all changes\n'))
1906 l = fp.readline()
1910 l = fp.readline()
1907 try:
1911 try:
1908 total_files, total_bytes = map(int, l.split(' ', 1))
1912 total_files, total_bytes = map(int, l.split(' ', 1))
1909 except ValueError, TypeError:
1913 except ValueError, TypeError:
1910 raise util.UnexpectedOutput(
1914 raise util.UnexpectedOutput(
1911 _('Unexpected response from remote server:'), l)
1915 _('Unexpected response from remote server:'), l)
1912 self.ui.status(_('%d files to transfer, %s of data\n') %
1916 self.ui.status(_('%d files to transfer, %s of data\n') %
1913 (total_files, util.bytecount(total_bytes)))
1917 (total_files, util.bytecount(total_bytes)))
1914 start = time.time()
1918 start = time.time()
1915 for i in xrange(total_files):
1919 for i in xrange(total_files):
1916 # XXX doesn't support '\n' or '\r' in filenames
1920 # XXX doesn't support '\n' or '\r' in filenames
1917 l = fp.readline()
1921 l = fp.readline()
1918 try:
1922 try:
1919 name, size = l.split('\0', 1)
1923 name, size = l.split('\0', 1)
1920 size = int(size)
1924 size = int(size)
1921 except ValueError, TypeError:
1925 except ValueError, TypeError:
1922 raise util.UnexpectedOutput(
1926 raise util.UnexpectedOutput(
1923 _('Unexpected response from remote server:'), l)
1927 _('Unexpected response from remote server:'), l)
1924 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1928 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1925 ofp = self.sopener(name, 'w')
1929 ofp = self.sopener(name, 'w')
1926 for chunk in util.filechunkiter(fp, limit=size):
1930 for chunk in util.filechunkiter(fp, limit=size):
1927 ofp.write(chunk)
1931 ofp.write(chunk)
1928 ofp.close()
1932 ofp.close()
1929 elapsed = time.time() - start
1933 elapsed = time.time() - start
1930 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1934 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1931 (util.bytecount(total_bytes), elapsed,
1935 (util.bytecount(total_bytes), elapsed,
1932 util.bytecount(total_bytes / elapsed)))
1936 util.bytecount(total_bytes / elapsed)))
1933 self.reload()
1937 self.reload()
1934 return len(self.heads()) + 1
1938 return len(self.heads()) + 1
1935
1939
1936 def clone(self, remote, heads=[], stream=False):
1940 def clone(self, remote, heads=[], stream=False):
1937 '''clone remote repository.
1941 '''clone remote repository.
1938
1942
1939 keyword arguments:
1943 keyword arguments:
1940 heads: list of revs to clone (forces use of pull)
1944 heads: list of revs to clone (forces use of pull)
1941 stream: use streaming clone if possible'''
1945 stream: use streaming clone if possible'''
1942
1946
1943 # now, all clients that can request uncompressed clones can
1947 # now, all clients that can request uncompressed clones can
1944 # read repo formats supported by all servers that can serve
1948 # read repo formats supported by all servers that can serve
1945 # them.
1949 # them.
1946
1950
1947 # if revlog format changes, client will have to check version
1951 # if revlog format changes, client will have to check version
1948 # and format flags on "stream" capability, and use
1952 # and format flags on "stream" capability, and use
1949 # uncompressed only if compatible.
1953 # uncompressed only if compatible.
1950
1954
1951 if stream and not heads and remote.capable('stream'):
1955 if stream and not heads and remote.capable('stream'):
1952 return self.stream_in(remote)
1956 return self.stream_in(remote)
1953 return self.pull(remote, heads)
1957 return self.pull(remote, heads)
1954
1958
1955 # used to avoid circular references so destructors work
1959 # used to avoid circular references so destructors work
1956 def aftertrans(files):
1960 def aftertrans(files):
1957 renamefiles = [tuple(t) for t in files]
1961 renamefiles = [tuple(t) for t in files]
1958 def a():
1962 def a():
1959 for src, dest in renamefiles:
1963 for src, dest in renamefiles:
1960 util.rename(src, dest)
1964 util.rename(src, dest)
1961 return a
1965 return a
1962
1966
1963 def instance(ui, path, create):
1967 def instance(ui, path, create):
1964 return localrepository(ui, util.drop_scheme('file', path), create)
1968 return localrepository(ui, util.drop_scheme('file', path), create)
1965
1969
1966 def islocal(path):
1970 def islocal(path):
1967 return True
1971 return True
@@ -1,496 +1,495 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "errno util os tempfile")
11 demandload(globals(), "errno util os tempfile")
12
12
13 def filemerge(repo, fw, fo, wctx, mctx):
13 def filemerge(repo, fw, fo, wctx, mctx):
14 """perform a 3-way merge in the working directory
14 """perform a 3-way merge in the working directory
15
15
16 fw = filename in the working directory
16 fw = filename in the working directory
17 fo = filename in other parent
17 fo = filename in other parent
18 wctx, mctx = working and merge changecontexts
18 wctx, mctx = working and merge changecontexts
19 """
19 """
20
20
21 def temp(prefix, ctx):
21 def temp(prefix, ctx):
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
24 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
25 repo.wwrite(ctx.path(), ctx.data(), f)
25 repo.wwrite(ctx.path(), ctx.data(), f)
26 f.close()
26 f.close()
27 return name
27 return name
28
28
29 fcm = wctx.filectx(fw)
29 fcm = wctx.filectx(fw)
30 fco = mctx.filectx(fo)
30 fco = mctx.filectx(fo)
31
31
32 if not fco.cmp(fcm.data()): # files identical?
32 if not fco.cmp(fcm.data()): # files identical?
33 return None
33 return None
34
34
35 fca = fcm.ancestor(fco)
35 fca = fcm.ancestor(fco)
36 if not fca:
36 if not fca:
37 fca = repo.filectx(fw, fileid=nullrev)
37 fca = repo.filectx(fw, fileid=nullrev)
38 a = repo.wjoin(fw)
38 a = repo.wjoin(fw)
39 b = temp("base", fca)
39 b = temp("base", fca)
40 c = temp("other", fco)
40 c = temp("other", fco)
41
41
42 if fw != fo:
42 if fw != fo:
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 else:
44 else:
45 repo.ui.status(_("merging %s\n") % fw)
45 repo.ui.status(_("merging %s\n") % fw)
46
46
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': str(wctx.parents()[0]),
53 'HG_MY_NODE': str(wctx.parents()[0]),
54 'HG_OTHER_NODE': str(mctx)})
54 'HG_OTHER_NODE': str(mctx)})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57
57
58 os.unlink(b)
58 os.unlink(b)
59 os.unlink(c)
59 os.unlink(c)
60 return r
60 return r
61
61
62 def checkunknown(wctx, mctx):
62 def checkunknown(wctx, mctx):
63 "check for collisions between unknown files and files in mctx"
63 "check for collisions between unknown files and files in mctx"
64 man = mctx.manifest()
64 man = mctx.manifest()
65 for f in wctx.unknown():
65 for f in wctx.unknown():
66 if f in man:
66 if f in man:
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 raise util.Abort(_("untracked local file '%s' differs"\
68 raise util.Abort(_("untracked local file '%s' differs"\
69 " from remote version") % f)
69 " from remote version") % f)
70
70
71 def checkcollision(mctx):
71 def checkcollision(mctx):
72 "check for case folding collisions in the destination context"
72 "check for case folding collisions in the destination context"
73 folded = {}
73 folded = {}
74 for fn in mctx.manifest():
74 for fn in mctx.manifest():
75 fold = fn.lower()
75 fold = fn.lower()
76 if fold in folded:
76 if fold in folded:
77 raise util.Abort(_("case-folding collision between %s and %s")
77 raise util.Abort(_("case-folding collision between %s and %s")
78 % (fn, folded[fold]))
78 % (fn, folded[fold]))
79 folded[fold] = fn
79 folded[fold] = fn
80
80
81 def forgetremoved(wctx, mctx):
81 def forgetremoved(wctx, mctx):
82 """
82 """
83 Forget removed files
83 Forget removed files
84
84
85 If we're jumping between revisions (as opposed to merging), and if
85 If we're jumping between revisions (as opposed to merging), and if
86 neither the working directory nor the target rev has the file,
86 neither the working directory nor the target rev has the file,
87 then we need to remove it from the dirstate, to prevent the
87 then we need to remove it from the dirstate, to prevent the
88 dirstate from listing the file when it is no longer in the
88 dirstate from listing the file when it is no longer in the
89 manifest.
89 manifest.
90 """
90 """
91
91
92 action = []
92 action = []
93 man = mctx.manifest()
93 man = mctx.manifest()
94 for f in wctx.deleted() + wctx.removed():
94 for f in wctx.deleted() + wctx.removed():
95 if f not in man:
95 if f not in man:
96 action.append((f, "f"))
96 action.append((f, "f"))
97
97
98 return action
98 return action
99
99
100 def findcopies(repo, m1, m2, ma, limit):
100 def findcopies(repo, m1, m2, ma, limit):
101 """
101 """
102 Find moves and copies between m1 and m2 back to limit linkrev
102 Find moves and copies between m1 and m2 back to limit linkrev
103 """
103 """
104
104
105 def findold(fctx):
105 def findold(fctx):
106 "find files that path was copied from, back to linkrev limit"
106 "find files that path was copied from, back to linkrev limit"
107 old = {}
107 old = {}
108 orig = fctx.path()
108 orig = fctx.path()
109 visit = [fctx]
109 visit = [fctx]
110 while visit:
110 while visit:
111 fc = visit.pop()
111 fc = visit.pop()
112 if fc.rev() < limit:
112 if fc.rev() < limit:
113 continue
113 continue
114 if fc.path() != orig and fc.path() not in old:
114 if fc.path() != orig and fc.path() not in old:
115 old[fc.path()] = 1
115 old[fc.path()] = 1
116 visit += fc.parents()
116 visit += fc.parents()
117
117
118 old = old.keys()
118 old = old.keys()
119 old.sort()
119 old.sort()
120 return old
120 return old
121
121
122 def nonoverlap(d1, d2, d3):
122 def nonoverlap(d1, d2, d3):
123 "Return list of elements in d1 not in d2 or d3"
123 "Return list of elements in d1 not in d2 or d3"
124 l = [d for d in d1 if d not in d3 and d not in d2]
124 l = [d for d in d1 if d not in d3 and d not in d2]
125 l.sort()
125 l.sort()
126 return l
126 return l
127
127
128 def checkcopies(c, man):
128 def checkcopies(c, man):
129 '''check possible copies for filectx c'''
129 '''check possible copies for filectx c'''
130 for of in findold(c):
130 for of in findold(c):
131 if of not in man:
131 if of not in man:
132 return
132 return
133 c2 = ctx(of, man[of])
133 c2 = ctx(of, man[of])
134 ca = c.ancestor(c2)
134 ca = c.ancestor(c2)
135 if not ca: # unrelated
135 if not ca: # unrelated
136 return
136 return
137 if ca.path() == c.path() or ca.path() == c2.path():
137 if ca.path() == c.path() or ca.path() == c2.path():
138 fullcopy[c.path()] = of
138 fullcopy[c.path()] = of
139 if c == ca or c2 == ca: # no merge needed, ignore copy
139 if c == ca or c2 == ca: # no merge needed, ignore copy
140 return
140 return
141 copy[c.path()] = of
141 copy[c.path()] = of
142
142
143 def dirs(files):
143 def dirs(files):
144 d = {}
144 d = {}
145 for f in files:
145 for f in files:
146 d[os.path.dirname(f)] = True
146 d[os.path.dirname(f)] = True
147 return d
147 return d
148
148
149 if not repo.ui.configbool("merge", "followcopies", True):
149 if not repo.ui.configbool("merge", "followcopies", True):
150 return {}
150 return {}
151
151
152 # avoid silly behavior for update from empty dir
152 # avoid silly behavior for update from empty dir
153 if not m1 or not m2 or not ma:
153 if not m1 or not m2 or not ma:
154 return {}
154 return {}
155
155
156 dcopies = repo.dirstate.copies()
156 dcopies = repo.dirstate.copies()
157 copy = {}
157 copy = {}
158 fullcopy = {}
158 fullcopy = {}
159 u1 = nonoverlap(m1, m2, ma)
159 u1 = nonoverlap(m1, m2, ma)
160 u2 = nonoverlap(m2, m1, ma)
160 u2 = nonoverlap(m2, m1, ma)
161 ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20]))
161 ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20]))
162
162
163 for f in u1:
163 for f in u1:
164 checkcopies(ctx(dcopies.get(f, f), m1[f]), m2)
164 checkcopies(ctx(dcopies.get(f, f), m1[f]), m2)
165
165
166 for f in u2:
166 for f in u2:
167 checkcopies(ctx(f, m2[f]), m1)
167 checkcopies(ctx(f, m2[f]), m1)
168
168
169 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
169 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
170 return copy
170 return copy
171
171
172 # generate a directory move map
172 # generate a directory move map
173 d1, d2 = dirs(m1), dirs(m2)
173 d1, d2 = dirs(m1), dirs(m2)
174 invalid = {}
174 invalid = {}
175 dirmove = {}
175 dirmove = {}
176
176
177 for dst, src in fullcopy.items():
177 for dst, src in fullcopy.items():
178 dsrc, ddst = os.path.dirname(src), os.path.dirname(dst)
178 dsrc, ddst = os.path.dirname(src), os.path.dirname(dst)
179 if dsrc in invalid:
179 if dsrc in invalid:
180 continue
180 continue
181 elif (dsrc in d1 and ddst in d1) or (dsrc in d2 and ddst in d2):
181 elif (dsrc in d1 and ddst in d1) or (dsrc in d2 and ddst in d2):
182 invalid[dsrc] = True
182 invalid[dsrc] = True
183 elif dsrc in dirmove and dirmove[dsrc] != ddst:
183 elif dsrc in dirmove and dirmove[dsrc] != ddst:
184 invalid[dsrc] = True
184 invalid[dsrc] = True
185 del dirmove[dsrc]
185 del dirmove[dsrc]
186 else:
186 else:
187 dirmove[dsrc] = ddst
187 dirmove[dsrc] = ddst
188
188
189 del d1, d2, invalid
189 del d1, d2, invalid
190
190
191 if not dirmove:
191 if not dirmove:
192 return copy
192 return copy
193
193
194 # check unaccounted nonoverlapping files
194 # check unaccounted nonoverlapping files
195 for f in u1 + u2:
195 for f in u1 + u2:
196 if f not in fullcopy:
196 if f not in fullcopy:
197 d = os.path.dirname(f)
197 d = os.path.dirname(f)
198 if d in dirmove:
198 if d in dirmove:
199 copy[f] = dirmove[d] + "/" + os.path.basename(f)
199 copy[f] = dirmove[d] + "/" + os.path.basename(f)
200
200
201 return copy
201 return copy
202
202
203 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
203 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
204 """
204 """
205 Merge p1 and p2 with ancestor ma and generate merge action list
205 Merge p1 and p2 with ancestor ma and generate merge action list
206
206
207 overwrite = whether we clobber working files
207 overwrite = whether we clobber working files
208 partial = function to filter file lists
208 partial = function to filter file lists
209 """
209 """
210
210
211 repo.ui.note(_("resolving manifests\n"))
211 repo.ui.note(_("resolving manifests\n"))
212 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
212 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
213 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
213 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
214
214
215 m1 = p1.manifest()
215 m1 = p1.manifest()
216 m2 = p2.manifest()
216 m2 = p2.manifest()
217 ma = pa.manifest()
217 ma = pa.manifest()
218 backwards = (pa == p2)
218 backwards = (pa == p2)
219 action = []
219 action = []
220 copy = {}
220 copy = {}
221
221
222 def fmerge(f, f2=None, fa=None):
222 def fmerge(f, f2=None, fa=None):
223 """merge executable flags"""
223 """merge executable flags"""
224 if not f2:
224 if not f2:
225 f2 = f
225 f2 = f
226 fa = f
226 fa = f
227 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
227 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
228 return ((a^b) | (a^c)) ^ a
228 return ((a^b) | (a^c)) ^ a
229
229
230 def act(msg, m, f, *args):
230 def act(msg, m, f, *args):
231 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
231 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
232 action.append((f, m) + args)
232 action.append((f, m) + args)
233
233
234 if not (backwards or overwrite):
234 if not (backwards or overwrite):
235 copy = findcopies(repo, m1, m2, ma, pa.rev())
235 copy = findcopies(repo, m1, m2, ma, pa.rev())
236 copied = dict.fromkeys(copy.values())
236 copied = dict.fromkeys(copy.values())
237
237
238 # Compare manifests
238 # Compare manifests
239 for f, n in m1.iteritems():
239 for f, n in m1.iteritems():
240 if partial and not partial(f):
240 if partial and not partial(f):
241 continue
241 continue
242 if f in m2:
242 if f in m2:
243 # are files different?
243 # are files different?
244 if n != m2[f]:
244 if n != m2[f]:
245 a = ma.get(f, nullid)
245 a = ma.get(f, nullid)
246 # are both different from the ancestor?
246 # are both different from the ancestor?
247 if not overwrite and n != a and m2[f] != a:
247 if not overwrite and n != a and m2[f] != a:
248 act("versions differ", "m", f, f, f, fmerge(f), False)
248 act("versions differ", "m", f, f, f, fmerge(f), False)
249 # are we clobbering?
249 # are we clobbering?
250 # is remote's version newer?
250 # is remote's version newer?
251 # or are we going back in time and clean?
251 # or are we going back in time and clean?
252 elif overwrite or m2[f] != a or (backwards and not n[20:]):
252 elif overwrite or m2[f] != a or (backwards and not n[20:]):
253 act("remote is newer", "g", f, m2.execf(f))
253 act("remote is newer", "g", f, m2.execf(f))
254 # local is newer, not overwrite, check mode bits
254 # local is newer, not overwrite, check mode bits
255 elif fmerge(f) != m1.execf(f):
255 elif fmerge(f) != m1.execf(f):
256 act("update permissions", "e", f, m2.execf(f))
256 act("update permissions", "e", f, m2.execf(f))
257 # contents same, check mode bits
257 # contents same, check mode bits
258 elif m1.execf(f) != m2.execf(f):
258 elif m1.execf(f) != m2.execf(f):
259 if overwrite or fmerge(f) != m1.execf(f):
259 if overwrite or fmerge(f) != m1.execf(f):
260 act("update permissions", "e", f, m2.execf(f))
260 act("update permissions", "e", f, m2.execf(f))
261 elif f in copied:
261 elif f in copied:
262 continue
262 continue
263 elif f in copy:
263 elif f in copy:
264 f2 = copy[f]
264 f2 = copy[f]
265 if f2 not in m2: # directory rename
265 if f2 not in m2: # directory rename
266 act("remote renamed directory to " + f2, "d",
266 act("remote renamed directory to " + f2, "d",
267 f, None, f2, m1.execf(f))
267 f, None, f2, m1.execf(f))
268 elif f2 in m1: # case 2 A,B/B/B
268 elif f2 in m1: # case 2 A,B/B/B
269 act("local copied to " + f2, "m",
269 act("local copied to " + f2, "m",
270 f, f2, f, fmerge(f, f2, f2), False)
270 f, f2, f, fmerge(f, f2, f2), False)
271 else: # case 4,21 A/B/B
271 else: # case 4,21 A/B/B
272 act("local moved to " + f2, "m",
272 act("local moved to " + f2, "m",
273 f, f2, f, fmerge(f, f2, f2), False)
273 f, f2, f, fmerge(f, f2, f2), False)
274 elif f in ma:
274 elif f in ma:
275 if n != ma[f] and not overwrite:
275 if n != ma[f] and not overwrite:
276 if repo.ui.prompt(
276 if repo.ui.prompt(
277 (_(" local changed %s which remote deleted\n") % f) +
277 (_(" local changed %s which remote deleted\n") % f) +
278 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
278 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
279 act("prompt delete", "r", f)
279 act("prompt delete", "r", f)
280 else:
280 else:
281 act("other deleted", "r", f)
281 act("other deleted", "r", f)
282 else:
282 else:
283 # file is created on branch or in working directory
283 # file is created on branch or in working directory
284 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
284 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
285 act("remote deleted", "r", f)
285 act("remote deleted", "r", f)
286
286
287 for f, n in m2.iteritems():
287 for f, n in m2.iteritems():
288 if partial and not partial(f):
288 if partial and not partial(f):
289 continue
289 continue
290 if f in m1:
290 if f in m1:
291 continue
291 continue
292 if f in copied:
292 if f in copied:
293 continue
293 continue
294 if f in copy:
294 if f in copy:
295 f2 = copy[f]
295 f2 = copy[f]
296 if f2 not in m1: # directory rename
296 if f2 not in m1: # directory rename
297 act("local renamed directory to " + f2, "d",
297 act("local renamed directory to " + f2, "d",
298 None, f, f2, m2.execf(f))
298 None, f, f2, m2.execf(f))
299 elif f2 in m2: # rename case 1, A/A,B/A
299 elif f2 in m2: # rename case 1, A/A,B/A
300 act("remote copied to " + f, "m",
300 act("remote copied to " + f, "m",
301 f2, f, f, fmerge(f2, f, f2), False)
301 f2, f, f, fmerge(f2, f, f2), False)
302 else: # case 3,20 A/B/A
302 else: # case 3,20 A/B/A
303 act("remote moved to " + f, "m",
303 act("remote moved to " + f, "m",
304 f2, f, f, fmerge(f2, f, f2), True)
304 f2, f, f, fmerge(f2, f, f2), True)
305 elif f in ma:
305 elif f in ma:
306 if overwrite or backwards:
306 if overwrite or backwards:
307 act("recreating", "g", f, m2.execf(f))
307 act("recreating", "g", f, m2.execf(f))
308 elif n != ma[f]:
308 elif n != ma[f]:
309 if repo.ui.prompt(
309 if repo.ui.prompt(
310 (_("remote changed %s which local deleted\n") % f) +
310 (_("remote changed %s which local deleted\n") % f) +
311 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
311 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
312 act("prompt recreating", "g", f, m2.execf(f))
312 act("prompt recreating", "g", f, m2.execf(f))
313 else:
313 else:
314 act("remote created", "g", f, m2.execf(f))
314 act("remote created", "g", f, m2.execf(f))
315
315
316 return action
316 return action
317
317
318 def applyupdates(repo, action, wctx, mctx):
318 def applyupdates(repo, action, wctx, mctx):
319 "apply the merge action list to the working directory"
319 "apply the merge action list to the working directory"
320
320
321 updated, merged, removed, unresolved = 0, 0, 0, 0
321 updated, merged, removed, unresolved = 0, 0, 0, 0
322 action.sort()
322 action.sort()
323 for a in action:
323 for a in action:
324 f, m = a[:2]
324 f, m = a[:2]
325 if f and f[0] == "/":
325 if f and f[0] == "/":
326 continue
326 continue
327 if m == "r": # remove
327 if m == "r": # remove
328 repo.ui.note(_("removing %s\n") % f)
328 repo.ui.note(_("removing %s\n") % f)
329 util.audit_path(f)
329 util.audit_path(f)
330 try:
330 try:
331 util.unlink(repo.wjoin(f))
331 util.unlink(repo.wjoin(f))
332 except OSError, inst:
332 except OSError, inst:
333 if inst.errno != errno.ENOENT:
333 if inst.errno != errno.ENOENT:
334 repo.ui.warn(_("update failed to remove %s: %s!\n") %
334 repo.ui.warn(_("update failed to remove %s: %s!\n") %
335 (f, inst.strerror))
335 (f, inst.strerror))
336 removed += 1
336 removed += 1
337 elif m == "m": # merge
337 elif m == "m": # merge
338 f2, fd, flag, move = a[2:]
338 f2, fd, flag, move = a[2:]
339 r = filemerge(repo, f, f2, wctx, mctx)
339 r = filemerge(repo, f, f2, wctx, mctx)
340 if r > 0:
340 if r > 0:
341 unresolved += 1
341 unresolved += 1
342 else:
342 else:
343 if r is None:
343 if r is None:
344 updated += 1
344 updated += 1
345 else:
345 else:
346 merged += 1
346 merged += 1
347 if f != fd:
347 if f != fd:
348 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
348 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
349 repo.wwrite(fd, repo.wread(f))
349 repo.wwrite(fd, repo.wread(f))
350 if move:
350 if move:
351 repo.ui.debug(_("removing %s\n") % f)
351 repo.ui.debug(_("removing %s\n") % f)
352 os.unlink(repo.wjoin(f))
352 os.unlink(repo.wjoin(f))
353 util.set_exec(repo.wjoin(fd), flag)
353 util.set_exec(repo.wjoin(fd), flag)
354 elif m == "g": # get
354 elif m == "g": # get
355 flag = a[2]
355 flag = a[2]
356 repo.ui.note(_("getting %s\n") % f)
356 repo.ui.note(_("getting %s\n") % f)
357 t = mctx.filectx(f).data()
357 t = mctx.filectx(f).data()
358 repo.wwrite(f, t)
358 repo.wwrite(f, t)
359 util.set_exec(repo.wjoin(f), flag)
359 util.set_exec(repo.wjoin(f), flag)
360 updated += 1
360 updated += 1
361 elif m == "d": # directory rename
361 elif m == "d": # directory rename
362 f2, fd, flag = a[2:]
362 f2, fd, flag = a[2:]
363 if f:
363 if f:
364 repo.ui.note(_("moving %s to %s\n") % (f, fd))
364 repo.ui.note(_("moving %s to %s\n") % (f, fd))
365 t = wctx.filectx(f).data()
365 t = wctx.filectx(f).data()
366 repo.wwrite(fd, t)
366 repo.wwrite(fd, t)
367 util.set_exec(repo.wjoin(fd), flag)
367 util.set_exec(repo.wjoin(fd), flag)
368 util.unlink(repo.wjoin(f))
368 util.unlink(repo.wjoin(f))
369 if f2:
369 if f2:
370 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
370 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
371 t = mctx.filectx(f2).data()
371 t = mctx.filectx(f2).data()
372 repo.wwrite(fd, t)
372 repo.wwrite(fd, t)
373 util.set_exec(repo.wjoin(fd), flag)
373 util.set_exec(repo.wjoin(fd), flag)
374 updated += 1
374 updated += 1
375 elif m == "e": # exec
375 elif m == "e": # exec
376 flag = a[2]
376 flag = a[2]
377 util.set_exec(repo.wjoin(f), flag)
377 util.set_exec(repo.wjoin(f), flag)
378
378
379 return updated, merged, removed, unresolved
379 return updated, merged, removed, unresolved
380
380
381 def recordupdates(repo, action, branchmerge):
381 def recordupdates(repo, action, branchmerge):
382 "record merge actions to the dirstate"
382 "record merge actions to the dirstate"
383
383
384 for a in action:
384 for a in action:
385 f, m = a[:2]
385 f, m = a[:2]
386 if m == "r": # remove
386 if m == "r": # remove
387 if branchmerge:
387 if branchmerge:
388 repo.dirstate.update([f], 'r')
388 repo.dirstate.update([f], 'r')
389 else:
389 else:
390 repo.dirstate.forget([f])
390 repo.dirstate.forget([f])
391 elif m == "f": # forget
391 elif m == "f": # forget
392 repo.dirstate.forget([f])
392 repo.dirstate.forget([f])
393 elif m == "g": # get
393 elif m == "g": # get
394 if branchmerge:
394 if branchmerge:
395 repo.dirstate.update([f], 'n', st_mtime=-1)
395 repo.dirstate.update([f], 'n', st_mtime=-1)
396 else:
396 else:
397 repo.dirstate.update([f], 'n')
397 repo.dirstate.update([f], 'n')
398 elif m == "m": # merge
398 elif m == "m": # merge
399 f2, fd, flag, move = a[2:]
399 f2, fd, flag, move = a[2:]
400 if branchmerge:
400 if branchmerge:
401 # We've done a branch merge, mark this file as merged
401 # We've done a branch merge, mark this file as merged
402 # so that we properly record the merger later
402 # so that we properly record the merger later
403 repo.dirstate.update([fd], 'm')
403 repo.dirstate.update([fd], 'm')
404 if f != f2: # copy/rename
404 if f != f2: # copy/rename
405 if move:
405 if move:
406 repo.dirstate.update([f], 'r')
406 repo.dirstate.update([f], 'r')
407 if f != fd:
407 if f != fd:
408 repo.dirstate.copy(f, fd)
408 repo.dirstate.copy(f, fd)
409 else:
409 else:
410 repo.dirstate.copy(f2, fd)
410 repo.dirstate.copy(f2, fd)
411 else:
411 else:
412 # We've update-merged a locally modified file, so
412 # We've update-merged a locally modified file, so
413 # we set the dirstate to emulate a normal checkout
413 # we set the dirstate to emulate a normal checkout
414 # of that file some time in the past. Thus our
414 # of that file some time in the past. Thus our
415 # merge will appear as a normal local file
415 # merge will appear as a normal local file
416 # modification.
416 # modification.
417 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
417 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
418 if move:
418 if move:
419 repo.dirstate.forget([f])
419 repo.dirstate.forget([f])
420 elif m == "d": # directory rename
420 elif m == "d": # directory rename
421 f2, fd, flag = a[2:]
421 f2, fd, flag = a[2:]
422 if branchmerge:
422 if branchmerge:
423 repo.dirstate.update([fd], 'a')
423 repo.dirstate.update([fd], 'a')
424 if f:
424 if f:
425 repo.dirstate.update([f], 'r')
425 repo.dirstate.update([f], 'r')
426 repo.dirstate.copy(f, fd)
426 repo.dirstate.copy(f, fd)
427 if f2:
427 if f2:
428 repo.dirstate.copy(f2, fd)
428 repo.dirstate.copy(f2, fd)
429 else:
429 else:
430 repo.dirstate.update([fd], 'n')
430 repo.dirstate.update([fd], 'n')
431 if f:
431 if f:
432 repo.dirstate.forget([f])
432 repo.dirstate.forget([f])
433
433
434 def update(repo, node, branchmerge, force, partial, wlock):
434 def update(repo, node, branchmerge, force, partial, wlock):
435 """
435 """
436 Perform a merge between the working directory and the given node
436 Perform a merge between the working directory and the given node
437
437
438 branchmerge = whether to merge between branches
438 branchmerge = whether to merge between branches
439 force = whether to force branch merging or file overwriting
439 force = whether to force branch merging or file overwriting
440 partial = a function to filter file lists (dirstate not updated)
440 partial = a function to filter file lists (dirstate not updated)
441 wlock = working dir lock, if already held
441 wlock = working dir lock, if already held
442 """
442 """
443
443
444 if not wlock:
444 if not wlock:
445 wlock = repo.wlock()
445 wlock = repo.wlock()
446
446
447 overwrite = force and not branchmerge
447 overwrite = force and not branchmerge
448 forcemerge = force and branchmerge
448 forcemerge = force and branchmerge
449 wc = repo.workingctx()
449 wc = repo.workingctx()
450 pl = wc.parents()
450 pl = wc.parents()
451 p1, p2 = pl[0], repo.changectx(node)
451 p1, p2 = pl[0], repo.changectx(node)
452 pa = p1.ancestor(p2)
452 pa = p1.ancestor(p2)
453 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
453 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
454
454
455 ### check phase
455 ### check phase
456 if not overwrite and len(pl) > 1:
456 if not overwrite and len(pl) > 1:
457 raise util.Abort(_("outstanding uncommitted merges"))
457 raise util.Abort(_("outstanding uncommitted merges"))
458 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
458 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
459 if branchmerge:
459 if branchmerge:
460 raise util.Abort(_("there is nothing to merge, just use "
460 raise util.Abort(_("there is nothing to merge, just use "
461 "'hg update' or look at 'hg heads'"))
461 "'hg update' or look at 'hg heads'"))
462 elif not (overwrite or branchmerge):
462 elif not (overwrite or branchmerge):
463 raise util.Abort(_("update spans branches, use 'hg merge' "
463 raise util.Abort(_("update spans branches, use 'hg merge' "
464 "or 'hg update -C' to lose changes"))
464 "or 'hg update -C' to lose changes"))
465 if branchmerge and not forcemerge:
465 if branchmerge and not forcemerge:
466 if wc.files():
466 if wc.files():
467 raise util.Abort(_("outstanding uncommitted changes"))
467 raise util.Abort(_("outstanding uncommitted changes"))
468
468
469 ### calculate phase
469 ### calculate phase
470 action = []
470 action = []
471 if not force:
471 if not force:
472 checkunknown(wc, p2)
472 checkunknown(wc, p2)
473 if not util.checkfolding(repo.path):
473 if not util.checkfolding(repo.path):
474 checkcollision(p2)
474 checkcollision(p2)
475 if not branchmerge:
475 if not branchmerge:
476 action += forgetremoved(wc, p2)
476 action += forgetremoved(wc, p2)
477 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
477 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
478
478
479 ### apply phase
479 ### apply phase
480 if not branchmerge: # just jump to the new rev
480 if not branchmerge: # just jump to the new rev
481 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
481 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
482 if not partial:
482 if not partial:
483 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
483 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
484
484
485 stats = applyupdates(repo, action, wc, p2)
485 stats = applyupdates(repo, action, wc, p2)
486
486
487 if not partial:
487 if not partial:
488 recordupdates(repo, action, branchmerge)
488 recordupdates(repo, action, branchmerge)
489 repo.dirstate.setparents(fp1, fp2)
489 repo.dirstate.setparents(fp1, fp2)
490 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
490 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
491 if not branchmerge:
491 if not branchmerge:
492 b = util.tolocal(p2.branch())
493 repo.opener("branch", "w").write(p2.branch() + "\n")
492 repo.opener("branch", "w").write(p2.branch() + "\n")
494
493
495 return stats
494 return stats
496
495
@@ -1,58 +1,61 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init t
3 hg init t
4 cd t
4 cd t
5
5
6 # we need a repo with some legacy latin-1 changesets
6 # we need a repo with some legacy latin-1 changesets
7 hg unbundle $TESTDIR/legacy-encoding.hg
7 hg unbundle $TESTDIR/legacy-encoding.hg
8 hg co
8 hg co
9
9
10 python << EOF
10 python << EOF
11 f = file('latin-1', 'w'); f.write("latin-1 e' encoded: \xe9"); f.close()
11 f = file('latin-1', 'w'); f.write("latin-1 e' encoded: \xe9"); f.close()
12 f = file('utf-8', 'w'); f.write("utf-8 e' encoded: \xc3\xa9"); f.close()
12 f = file('utf-8', 'w'); f.write("utf-8 e' encoded: \xc3\xa9"); f.close()
13 f = file('latin-1-tag', 'w'); f.write("\xe9"); f.close()
13 f = file('latin-1-tag', 'w'); f.write("\xe9"); f.close()
14 EOF
14 EOF
15
15
16 echo % should fail with encoding error
16 echo % should fail with encoding error
17 echo "plain old ascii" > a
17 echo "plain old ascii" > a
18 hg st
18 hg st
19 HGENCODING=ascii hg ci -l latin-1 -d "1000000 0"
19 HGENCODING=ascii hg ci -l latin-1 -d "1000000 0"
20
20
21 echo % these should work
21 echo % these should work
22 echo "latin-1" > a
22 echo "latin-1" > a
23 HGENCODING=latin-1 hg ci -l latin-1 -d "1000000 0"
23 HGENCODING=latin-1 hg ci -l latin-1 -d "1000000 0"
24 echo "utf-8" > a
24 echo "utf-8" > a
25 HGENCODING=utf-8 hg ci -l utf-8 -d "1000000 0"
25 HGENCODING=utf-8 hg ci -l utf-8 -d "1000000 0"
26
26
27 HGENCODING=latin-1 hg tag -d "1000000 0" `cat latin-1-tag`
27 HGENCODING=latin-1 hg tag -d "1000000 0" `cat latin-1-tag`
28 cp latin-1-tag .hg/branch
28 HGENCODING=latin-1 hg branch `cat latin-1-tag`
29 HGENCODING=latin-1 hg ci -d "1000000 0" -m 'latin1 branch'
29 HGENCODING=latin-1 hg ci -d "1000000 0" -m 'latin1 branch'
30 rm .hg/branch
30 rm .hg/branch
31
31
32 echo % ascii
32 echo % ascii
33 hg --encoding ascii log
33 hg --encoding ascii log
34 echo % latin-1
34 echo % latin-1
35 hg --encoding latin-1 log
35 hg --encoding latin-1 log
36 echo % utf-8
36 echo % utf-8
37 hg --encoding utf-8 log
37 hg --encoding utf-8 log
38 echo % ascii
38 echo % ascii
39 HGENCODING=ascii hg tags
39 HGENCODING=ascii hg tags
40 echo % latin-1
40 echo % latin-1
41 HGENCODING=latin-1 hg tags
41 HGENCODING=latin-1 hg tags
42 echo % utf-8
42 echo % utf-8
43 HGENCODING=utf-8 hg tags
43 HGENCODING=utf-8 hg tags
44 echo % ascii
44 echo % ascii
45 HGENCODING=ascii hg branches
45 HGENCODING=ascii hg branches
46 echo % latin-1
46 echo % latin-1
47 HGENCODING=latin-1 hg branches
47 HGENCODING=latin-1 hg branches
48 echo % utf-8
48 echo % utf-8
49 HGENCODING=utf-8 hg branches
49 HGENCODING=utf-8 hg branches
50
50
51 echo '[ui]' >> .hg/hgrc
51 echo '[ui]' >> .hg/hgrc
52 echo 'fallbackencoding = koi8-r' >> .hg/hgrc
52 echo 'fallbackencoding = koi8-r' >> .hg/hgrc
53 echo % utf-8
53 echo % utf-8
54 HGENCODING=utf-8 hg log
54 HGENCODING=utf-8 hg log
55
55
56 HGENCODING=dolphin hg log
56 HGENCODING=dolphin hg log
57
57
58 HGENCODING=ascii hg branch `cat latin-1-tag`
59 cp latin-1-tag .hg/branch
60 HGENCODING=latin-1 hg ci -d "1000000 0" -m 'should fail'
58 exit 0
61 exit 0
@@ -1,167 +1,169 b''
1 adding changesets
1 adding changesets
2 adding manifests
2 adding manifests
3 adding file changes
3 adding file changes
4 added 2 changesets with 2 changes to 1 files
4 added 2 changesets with 2 changes to 1 files
5 (run 'hg update' to get a working copy)
5 (run 'hg update' to get a working copy)
6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
7 % should fail with encoding error
7 % should fail with encoding error
8 M a
8 M a
9 ? latin-1
9 ? latin-1
10 ? latin-1-tag
10 ? latin-1-tag
11 ? utf-8
11 ? utf-8
12 abort: decoding near ' encoded: �': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)!
12 abort: decoding near ' encoded: �': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)!
13 transaction abort!
13 transaction abort!
14 rollback completed
14 rollback completed
15 % these should work
15 % these should work
16 % ascii
16 % ascii
17 changeset: 5:db5520b4645f
17 changeset: 5:db5520b4645f
18 branch: ?
18 branch: ?
19 tag: tip
19 tag: tip
20 user: test
20 user: test
21 date: Mon Jan 12 13:46:40 1970 +0000
21 date: Mon Jan 12 13:46:40 1970 +0000
22 summary: latin1 branch
22 summary: latin1 branch
23
23
24 changeset: 4:9cff3c980b58
24 changeset: 4:9cff3c980b58
25 user: test
25 user: test
26 date: Mon Jan 12 13:46:40 1970 +0000
26 date: Mon Jan 12 13:46:40 1970 +0000
27 summary: Added tag ? for changeset 770b9b11621d
27 summary: Added tag ? for changeset 770b9b11621d
28
28
29 changeset: 3:770b9b11621d
29 changeset: 3:770b9b11621d
30 tag: ?
30 tag: ?
31 user: test
31 user: test
32 date: Mon Jan 12 13:46:40 1970 +0000
32 date: Mon Jan 12 13:46:40 1970 +0000
33 summary: utf-8 e' encoded: ?
33 summary: utf-8 e' encoded: ?
34
34
35 changeset: 2:0572af48b948
35 changeset: 2:0572af48b948
36 user: test
36 user: test
37 date: Mon Jan 12 13:46:40 1970 +0000
37 date: Mon Jan 12 13:46:40 1970 +0000
38 summary: latin-1 e' encoded: ?
38 summary: latin-1 e' encoded: ?
39
39
40 changeset: 1:0e5b7e3f9c4a
40 changeset: 1:0e5b7e3f9c4a
41 user: test
41 user: test
42 date: Mon Jan 12 13:46:40 1970 +0000
42 date: Mon Jan 12 13:46:40 1970 +0000
43 summary: koi8-r: ????? = u'\u0440\u0442\u0443\u0442\u044c'
43 summary: koi8-r: ????? = u'\u0440\u0442\u0443\u0442\u044c'
44
44
45 changeset: 0:1e78a93102a3
45 changeset: 0:1e78a93102a3
46 user: test
46 user: test
47 date: Mon Jan 12 13:46:40 1970 +0000
47 date: Mon Jan 12 13:46:40 1970 +0000
48 summary: latin-1 e': ? = u'\xe9'
48 summary: latin-1 e': ? = u'\xe9'
49
49
50 % latin-1
50 % latin-1
51 changeset: 5:db5520b4645f
51 changeset: 5:db5520b4645f
52 branch: �
52 branch: �
53 tag: tip
53 tag: tip
54 user: test
54 user: test
55 date: Mon Jan 12 13:46:40 1970 +0000
55 date: Mon Jan 12 13:46:40 1970 +0000
56 summary: latin1 branch
56 summary: latin1 branch
57
57
58 changeset: 4:9cff3c980b58
58 changeset: 4:9cff3c980b58
59 user: test
59 user: test
60 date: Mon Jan 12 13:46:40 1970 +0000
60 date: Mon Jan 12 13:46:40 1970 +0000
61 summary: Added tag � for changeset 770b9b11621d
61 summary: Added tag � for changeset 770b9b11621d
62
62
63 changeset: 3:770b9b11621d
63 changeset: 3:770b9b11621d
64 tag: �
64 tag: �
65 user: test
65 user: test
66 date: Mon Jan 12 13:46:40 1970 +0000
66 date: Mon Jan 12 13:46:40 1970 +0000
67 summary: utf-8 e' encoded: �
67 summary: utf-8 e' encoded: �
68
68
69 changeset: 2:0572af48b948
69 changeset: 2:0572af48b948
70 user: test
70 user: test
71 date: Mon Jan 12 13:46:40 1970 +0000
71 date: Mon Jan 12 13:46:40 1970 +0000
72 summary: latin-1 e' encoded: �
72 summary: latin-1 e' encoded: �
73
73
74 changeset: 1:0e5b7e3f9c4a
74 changeset: 1:0e5b7e3f9c4a
75 user: test
75 user: test
76 date: Mon Jan 12 13:46:40 1970 +0000
76 date: Mon Jan 12 13:46:40 1970 +0000
77 summary: koi8-r: ����� = u'\u0440\u0442\u0443\u0442\u044c'
77 summary: koi8-r: ����� = u'\u0440\u0442\u0443\u0442\u044c'
78
78
79 changeset: 0:1e78a93102a3
79 changeset: 0:1e78a93102a3
80 user: test
80 user: test
81 date: Mon Jan 12 13:46:40 1970 +0000
81 date: Mon Jan 12 13:46:40 1970 +0000
82 summary: latin-1 e': � = u'\xe9'
82 summary: latin-1 e': � = u'\xe9'
83
83
84 % utf-8
84 % utf-8
85 changeset: 5:db5520b4645f
85 changeset: 5:db5520b4645f
86 branch: é
86 branch: é
87 tag: tip
87 tag: tip
88 user: test
88 user: test
89 date: Mon Jan 12 13:46:40 1970 +0000
89 date: Mon Jan 12 13:46:40 1970 +0000
90 summary: latin1 branch
90 summary: latin1 branch
91
91
92 changeset: 4:9cff3c980b58
92 changeset: 4:9cff3c980b58
93 user: test
93 user: test
94 date: Mon Jan 12 13:46:40 1970 +0000
94 date: Mon Jan 12 13:46:40 1970 +0000
95 summary: Added tag é for changeset 770b9b11621d
95 summary: Added tag é for changeset 770b9b11621d
96
96
97 changeset: 3:770b9b11621d
97 changeset: 3:770b9b11621d
98 tag: é
98 tag: é
99 user: test
99 user: test
100 date: Mon Jan 12 13:46:40 1970 +0000
100 date: Mon Jan 12 13:46:40 1970 +0000
101 summary: utf-8 e' encoded: é
101 summary: utf-8 e' encoded: é
102
102
103 changeset: 2:0572af48b948
103 changeset: 2:0572af48b948
104 user: test
104 user: test
105 date: Mon Jan 12 13:46:40 1970 +0000
105 date: Mon Jan 12 13:46:40 1970 +0000
106 summary: latin-1 e' encoded: é
106 summary: latin-1 e' encoded: é
107
107
108 changeset: 1:0e5b7e3f9c4a
108 changeset: 1:0e5b7e3f9c4a
109 user: test
109 user: test
110 date: Mon Jan 12 13:46:40 1970 +0000
110 date: Mon Jan 12 13:46:40 1970 +0000
111 summary: koi8-r: ÒÔÕÔØ = u'\u0440\u0442\u0443\u0442\u044c'
111 summary: koi8-r: ÒÔÕÔØ = u'\u0440\u0442\u0443\u0442\u044c'
112
112
113 changeset: 0:1e78a93102a3
113 changeset: 0:1e78a93102a3
114 user: test
114 user: test
115 date: Mon Jan 12 13:46:40 1970 +0000
115 date: Mon Jan 12 13:46:40 1970 +0000
116 summary: latin-1 e': é = u'\xe9'
116 summary: latin-1 e': é = u'\xe9'
117
117
118 % ascii
118 % ascii
119 tip 5:db5520b4645f
119 tip 5:db5520b4645f
120 ? 3:770b9b11621d
120 ? 3:770b9b11621d
121 % latin-1
121 % latin-1
122 tip 5:db5520b4645f
122 tip 5:db5520b4645f
123 � 3:770b9b11621d
123 � 3:770b9b11621d
124 % utf-8
124 % utf-8
125 tip 5:db5520b4645f
125 tip 5:db5520b4645f
126 é 3:770b9b11621d
126 é 3:770b9b11621d
127 % ascii
127 % ascii
128 ? 5:db5520b4645f
128 ? 5:db5520b4645f
129 % latin-1
129 % latin-1
130 � 5:db5520b4645f
130 � 5:db5520b4645f
131 % utf-8
131 % utf-8
132 é 5:db5520b4645f
132 é 5:db5520b4645f
133 % utf-8
133 % utf-8
134 changeset: 5:db5520b4645f
134 changeset: 5:db5520b4645f
135 branch: é
135 branch: é
136 tag: tip
136 tag: tip
137 user: test
137 user: test
138 date: Mon Jan 12 13:46:40 1970 +0000
138 date: Mon Jan 12 13:46:40 1970 +0000
139 summary: latin1 branch
139 summary: latin1 branch
140
140
141 changeset: 4:9cff3c980b58
141 changeset: 4:9cff3c980b58
142 user: test
142 user: test
143 date: Mon Jan 12 13:46:40 1970 +0000
143 date: Mon Jan 12 13:46:40 1970 +0000
144 summary: Added tag é for changeset 770b9b11621d
144 summary: Added tag é for changeset 770b9b11621d
145
145
146 changeset: 3:770b9b11621d
146 changeset: 3:770b9b11621d
147 tag: é
147 tag: é
148 user: test
148 user: test
149 date: Mon Jan 12 13:46:40 1970 +0000
149 date: Mon Jan 12 13:46:40 1970 +0000
150 summary: utf-8 e' encoded: é
150 summary: utf-8 e' encoded: é
151
151
152 changeset: 2:0572af48b948
152 changeset: 2:0572af48b948
153 user: test
153 user: test
154 date: Mon Jan 12 13:46:40 1970 +0000
154 date: Mon Jan 12 13:46:40 1970 +0000
155 summary: latin-1 e' encoded: é
155 summary: latin-1 e' encoded: é
156
156
157 changeset: 1:0e5b7e3f9c4a
157 changeset: 1:0e5b7e3f9c4a
158 user: test
158 user: test
159 date: Mon Jan 12 13:46:40 1970 +0000
159 date: Mon Jan 12 13:46:40 1970 +0000
160 summary: koi8-r: ртуть = u'\u0440\u0442\u0443\u0442\u044c'
160 summary: koi8-r: ртуть = u'\u0440\u0442\u0443\u0442\u044c'
161
161
162 changeset: 0:1e78a93102a3
162 changeset: 0:1e78a93102a3
163 user: test
163 user: test
164 date: Mon Jan 12 13:46:40 1970 +0000
164 date: Mon Jan 12 13:46:40 1970 +0000
165 summary: latin-1 e': И = u'\xe9'
165 summary: latin-1 e': И = u'\xe9'
166
166
167 abort: unknown encoding: dolphin, please check your locale settings
167 abort: unknown encoding: dolphin, please check your locale settings
168 abort: decoding near '�': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)!
169 abort: branch name not in UTF-8!
General Comments 0
You need to be logged in to leave comments. Login now