##// END OF EJS Templates
Handle transcoding of tags
Matt Mackall -
r3772:73860ffb default
parent child Browse files
Show More
@@ -1,3058 +1,3060 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
13 demandload(globals(), "difflib patch time")
13 demandload(globals(), "difflib patch time")
14 demandload(globals(), "traceback errno version atexit")
14 demandload(globals(), "traceback errno version atexit")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit.
60 The files will be added to the repository at the next commit.
61
61
62 If no names are given, add all files in the repository.
62 If no names are given, add all files in the repository.
63 """
63 """
64
64
65 names = []
65 names = []
66 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
66 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 if exact:
67 if exact:
68 if ui.verbose:
68 if ui.verbose:
69 ui.status(_('adding %s\n') % rel)
69 ui.status(_('adding %s\n') % rel)
70 names.append(abs)
70 names.append(abs)
71 elif repo.dirstate.state(abs) == '?':
71 elif repo.dirstate.state(abs) == '?':
72 ui.status(_('adding %s\n') % rel)
72 ui.status(_('adding %s\n') % rel)
73 names.append(abs)
73 names.append(abs)
74 if not opts.get('dry_run'):
74 if not opts.get('dry_run'):
75 repo.add(names)
75 repo.add(names)
76
76
77 def addremove(ui, repo, *pats, **opts):
77 def addremove(ui, repo, *pats, **opts):
78 """add all new files, delete all missing files
78 """add all new files, delete all missing files
79
79
80 Add all new files and remove all missing files from the repository.
80 Add all new files and remove all missing files from the repository.
81
81
82 New files are ignored if they match any of the patterns in .hgignore. As
82 New files are ignored if they match any of the patterns in .hgignore. As
83 with add, these changes take effect at the next commit.
83 with add, these changes take effect at the next commit.
84
84
85 Use the -s option to detect renamed files. With a parameter > 0,
85 Use the -s option to detect renamed files. With a parameter > 0,
86 this compares every removed file with every added file and records
86 this compares every removed file with every added file and records
87 those similar enough as renames. This option takes a percentage
87 those similar enough as renames. This option takes a percentage
88 between 0 (disabled) and 100 (files must be identical) as its
88 between 0 (disabled) and 100 (files must be identical) as its
89 parameter. Detecting renamed files this way can be expensive.
89 parameter. Detecting renamed files this way can be expensive.
90 """
90 """
91 sim = float(opts.get('similarity') or 0)
91 sim = float(opts.get('similarity') or 0)
92 if sim < 0 or sim > 100:
92 if sim < 0 or sim > 100:
93 raise util.Abort(_('similarity must be between 0 and 100'))
93 raise util.Abort(_('similarity must be between 0 and 100'))
94 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
94 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95
95
96 def annotate(ui, repo, *pats, **opts):
96 def annotate(ui, repo, *pats, **opts):
97 """show changeset information per file line
97 """show changeset information per file line
98
98
99 List changes in files, showing the revision id responsible for each line
99 List changes in files, showing the revision id responsible for each line
100
100
101 This command is useful to discover who did a change or when a change took
101 This command is useful to discover who did a change or when a change took
102 place.
102 place.
103
103
104 Without the -a option, annotate will avoid processing files it
104 Without the -a option, annotate will avoid processing files it
105 detects as binary. With -a, annotate will generate an annotation
105 detects as binary. With -a, annotate will generate an annotation
106 anyway, probably with undesirable results.
106 anyway, probably with undesirable results.
107 """
107 """
108 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
108 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109
109
110 if not pats:
110 if not pats:
111 raise util.Abort(_('at least one file name or pattern required'))
111 raise util.Abort(_('at least one file name or pattern required'))
112
112
113 opmap = [['user', lambda x: ui.shortuser(x.user())],
113 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 ['number', lambda x: str(x.rev())],
114 ['number', lambda x: str(x.rev())],
115 ['changeset', lambda x: short(x.node())],
115 ['changeset', lambda x: short(x.node())],
116 ['date', getdate], ['follow', lambda x: x.path()]]
116 ['date', getdate], ['follow', lambda x: x.path()]]
117 if (not opts['user'] and not opts['changeset'] and not opts['date']
117 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 and not opts['follow']):
118 and not opts['follow']):
119 opts['number'] = 1
119 opts['number'] = 1
120
120
121 ctx = repo.changectx(opts['rev'])
121 ctx = repo.changectx(opts['rev'])
122
122
123 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
123 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 node=ctx.node()):
124 node=ctx.node()):
125 fctx = ctx.filectx(abs)
125 fctx = ctx.filectx(abs)
126 if not opts['text'] and util.binary(fctx.data()):
126 if not opts['text'] and util.binary(fctx.data()):
127 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
127 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 continue
128 continue
129
129
130 lines = fctx.annotate(follow=opts.get('follow'))
130 lines = fctx.annotate(follow=opts.get('follow'))
131 pieces = []
131 pieces = []
132
132
133 for o, f in opmap:
133 for o, f in opmap:
134 if opts[o]:
134 if opts[o]:
135 l = [f(n) for n, dummy in lines]
135 l = [f(n) for n, dummy in lines]
136 if l:
136 if l:
137 m = max(map(len, l))
137 m = max(map(len, l))
138 pieces.append(["%*s" % (m, x) for x in l])
138 pieces.append(["%*s" % (m, x) for x in l])
139
139
140 if pieces:
140 if pieces:
141 for p, l in zip(zip(*pieces), lines):
141 for p, l in zip(zip(*pieces), lines):
142 ui.write("%s: %s" % (" ".join(p), l[1]))
142 ui.write("%s: %s" % (" ".join(p), l[1]))
143
143
144 def archive(ui, repo, dest, **opts):
144 def archive(ui, repo, dest, **opts):
145 '''create unversioned archive of a repository revision
145 '''create unversioned archive of a repository revision
146
146
147 By default, the revision used is the parent of the working
147 By default, the revision used is the parent of the working
148 directory; use "-r" to specify a different revision.
148 directory; use "-r" to specify a different revision.
149
149
150 To specify the type of archive to create, use "-t". Valid
150 To specify the type of archive to create, use "-t". Valid
151 types are:
151 types are:
152
152
153 "files" (default): a directory full of files
153 "files" (default): a directory full of files
154 "tar": tar archive, uncompressed
154 "tar": tar archive, uncompressed
155 "tbz2": tar archive, compressed using bzip2
155 "tbz2": tar archive, compressed using bzip2
156 "tgz": tar archive, compressed using gzip
156 "tgz": tar archive, compressed using gzip
157 "uzip": zip archive, uncompressed
157 "uzip": zip archive, uncompressed
158 "zip": zip archive, compressed using deflate
158 "zip": zip archive, compressed using deflate
159
159
160 The exact name of the destination archive or directory is given
160 The exact name of the destination archive or directory is given
161 using a format string; see "hg help export" for details.
161 using a format string; see "hg help export" for details.
162
162
163 Each member added to an archive file has a directory prefix
163 Each member added to an archive file has a directory prefix
164 prepended. Use "-p" to specify a format string for the prefix.
164 prepended. Use "-p" to specify a format string for the prefix.
165 The default is the basename of the archive, with suffixes removed.
165 The default is the basename of the archive, with suffixes removed.
166 '''
166 '''
167
167
168 node = repo.changectx(opts['rev']).node()
168 node = repo.changectx(opts['rev']).node()
169 dest = cmdutil.make_filename(repo, dest, node)
169 dest = cmdutil.make_filename(repo, dest, node)
170 if os.path.realpath(dest) == repo.root:
170 if os.path.realpath(dest) == repo.root:
171 raise util.Abort(_('repository root cannot be destination'))
171 raise util.Abort(_('repository root cannot be destination'))
172 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
172 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 kind = opts.get('type') or 'files'
173 kind = opts.get('type') or 'files'
174 prefix = opts['prefix']
174 prefix = opts['prefix']
175 if dest == '-':
175 if dest == '-':
176 if kind == 'files':
176 if kind == 'files':
177 raise util.Abort(_('cannot archive plain files to stdout'))
177 raise util.Abort(_('cannot archive plain files to stdout'))
178 dest = sys.stdout
178 dest = sys.stdout
179 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
179 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 prefix = cmdutil.make_filename(repo, prefix, node)
180 prefix = cmdutil.make_filename(repo, prefix, node)
181 archival.archive(repo, dest, node, kind, not opts['no_decode'],
181 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 matchfn, prefix)
182 matchfn, prefix)
183
183
184 def backout(ui, repo, rev, **opts):
184 def backout(ui, repo, rev, **opts):
185 '''reverse effect of earlier changeset
185 '''reverse effect of earlier changeset
186
186
187 Commit the backed out changes as a new changeset. The new
187 Commit the backed out changes as a new changeset. The new
188 changeset is a child of the backed out changeset.
188 changeset is a child of the backed out changeset.
189
189
190 If you back out a changeset other than the tip, a new head is
190 If you back out a changeset other than the tip, a new head is
191 created. This head is the parent of the working directory. If
191 created. This head is the parent of the working directory. If
192 you back out an old changeset, your working directory will appear
192 you back out an old changeset, your working directory will appear
193 old after the backout. You should merge the backout changeset
193 old after the backout. You should merge the backout changeset
194 with another head.
194 with another head.
195
195
196 The --merge option remembers the parent of the working directory
196 The --merge option remembers the parent of the working directory
197 before starting the backout, then merges the new head with that
197 before starting the backout, then merges the new head with that
198 changeset afterwards. This saves you from doing the merge by
198 changeset afterwards. This saves you from doing the merge by
199 hand. The result of this merge is not committed, as for a normal
199 hand. The result of this merge is not committed, as for a normal
200 merge.'''
200 merge.'''
201
201
202 bail_if_changed(repo)
202 bail_if_changed(repo)
203 op1, op2 = repo.dirstate.parents()
203 op1, op2 = repo.dirstate.parents()
204 if op2 != nullid:
204 if op2 != nullid:
205 raise util.Abort(_('outstanding uncommitted merge'))
205 raise util.Abort(_('outstanding uncommitted merge'))
206 node = repo.lookup(rev)
206 node = repo.lookup(rev)
207 p1, p2 = repo.changelog.parents(node)
207 p1, p2 = repo.changelog.parents(node)
208 if p1 == nullid:
208 if p1 == nullid:
209 raise util.Abort(_('cannot back out a change with no parents'))
209 raise util.Abort(_('cannot back out a change with no parents'))
210 if p2 != nullid:
210 if p2 != nullid:
211 if not opts['parent']:
211 if not opts['parent']:
212 raise util.Abort(_('cannot back out a merge changeset without '
212 raise util.Abort(_('cannot back out a merge changeset without '
213 '--parent'))
213 '--parent'))
214 p = repo.lookup(opts['parent'])
214 p = repo.lookup(opts['parent'])
215 if p not in (p1, p2):
215 if p not in (p1, p2):
216 raise util.Abort(_('%s is not a parent of %s') %
216 raise util.Abort(_('%s is not a parent of %s') %
217 (short(p), short(node)))
217 (short(p), short(node)))
218 parent = p
218 parent = p
219 else:
219 else:
220 if opts['parent']:
220 if opts['parent']:
221 raise util.Abort(_('cannot use --parent on non-merge changeset'))
221 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 parent = p1
222 parent = p1
223 hg.clean(repo, node, show_stats=False)
223 hg.clean(repo, node, show_stats=False)
224 revert_opts = opts.copy()
224 revert_opts = opts.copy()
225 revert_opts['all'] = True
225 revert_opts['all'] = True
226 revert_opts['rev'] = hex(parent)
226 revert_opts['rev'] = hex(parent)
227 revert(ui, repo, **revert_opts)
227 revert(ui, repo, **revert_opts)
228 commit_opts = opts.copy()
228 commit_opts = opts.copy()
229 commit_opts['addremove'] = False
229 commit_opts['addremove'] = False
230 if not commit_opts['message'] and not commit_opts['logfile']:
230 if not commit_opts['message'] and not commit_opts['logfile']:
231 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
231 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
232 commit_opts['force_editor'] = True
232 commit_opts['force_editor'] = True
233 commit(ui, repo, **commit_opts)
233 commit(ui, repo, **commit_opts)
234 def nice(node):
234 def nice(node):
235 return '%d:%s' % (repo.changelog.rev(node), short(node))
235 return '%d:%s' % (repo.changelog.rev(node), short(node))
236 ui.status(_('changeset %s backs out changeset %s\n') %
236 ui.status(_('changeset %s backs out changeset %s\n') %
237 (nice(repo.changelog.tip()), nice(node)))
237 (nice(repo.changelog.tip()), nice(node)))
238 if op1 != node:
238 if op1 != node:
239 if opts['merge']:
239 if opts['merge']:
240 ui.status(_('merging with changeset %s\n') % nice(op1))
240 ui.status(_('merging with changeset %s\n') % nice(op1))
241 n = _lookup(repo, hex(op1))
241 n = _lookup(repo, hex(op1))
242 hg.merge(repo, n)
242 hg.merge(repo, n)
243 else:
243 else:
244 ui.status(_('the backout changeset is a new head - '
244 ui.status(_('the backout changeset is a new head - '
245 'do not forget to merge\n'))
245 'do not forget to merge\n'))
246 ui.status(_('(use "backout --merge" '
246 ui.status(_('(use "backout --merge" '
247 'if you want to auto-merge)\n'))
247 'if you want to auto-merge)\n'))
248
248
249 def branch(ui, repo, label=None):
249 def branch(ui, repo, label=None):
250 """set or show the current branch name
250 """set or show the current branch name
251
251
252 With <name>, set the current branch name. Otherwise, show the
252 With <name>, set the current branch name. Otherwise, show the
253 current branch name.
253 current branch name.
254 """
254 """
255
255
256 if label is not None:
256 if label is not None:
257 repo.opener("branch", "w").write(label)
257 repo.opener("branch", "w").write(label)
258 else:
258 else:
259 b = repo.workingctx().branch()
259 b = repo.workingctx().branch()
260 if b:
260 if b:
261 ui.write("%s\n" % b)
261 ui.write("%s\n" % b)
262
262
263 def branches(ui, repo):
263 def branches(ui, repo):
264 """list repository named branches
264 """list repository named branches
265
265
266 List the repository's named branches.
266 List the repository's named branches.
267 """
267 """
268 b = repo.branchtags()
268 b = repo.branchtags()
269 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
269 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
270 l.sort()
270 l.sort()
271 for r, n, t in l:
271 for r, n, t in l:
272 hexfunc = ui.debugflag and hex or short
272 hexfunc = ui.debugflag and hex or short
273 if ui.quiet:
273 if ui.quiet:
274 ui.write("%s\n" % t)
274 ui.write("%s\n" % t)
275 else:
275 else:
276 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
276 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
277
277
278 def bundle(ui, repo, fname, dest=None, **opts):
278 def bundle(ui, repo, fname, dest=None, **opts):
279 """create a changegroup file
279 """create a changegroup file
280
280
281 Generate a compressed changegroup file collecting changesets not
281 Generate a compressed changegroup file collecting changesets not
282 found in the other repository.
282 found in the other repository.
283
283
284 If no destination repository is specified the destination is assumed
284 If no destination repository is specified the destination is assumed
285 to have all the nodes specified by one or more --base parameters.
285 to have all the nodes specified by one or more --base parameters.
286
286
287 The bundle file can then be transferred using conventional means and
287 The bundle file can then be transferred using conventional means and
288 applied to another repository with the unbundle or pull command.
288 applied to another repository with the unbundle or pull command.
289 This is useful when direct push and pull are not available or when
289 This is useful when direct push and pull are not available or when
290 exporting an entire repository is undesirable.
290 exporting an entire repository is undesirable.
291
291
292 Applying bundles preserves all changeset contents including
292 Applying bundles preserves all changeset contents including
293 permissions, copy/rename information, and revision history.
293 permissions, copy/rename information, and revision history.
294 """
294 """
295 revs = opts.get('rev') or None
295 revs = opts.get('rev') or None
296 if revs:
296 if revs:
297 revs = [repo.lookup(rev) for rev in revs]
297 revs = [repo.lookup(rev) for rev in revs]
298 base = opts.get('base')
298 base = opts.get('base')
299 if base:
299 if base:
300 if dest:
300 if dest:
301 raise util.Abort(_("--base is incompatible with specifiying "
301 raise util.Abort(_("--base is incompatible with specifiying "
302 "a destination"))
302 "a destination"))
303 base = [repo.lookup(rev) for rev in base]
303 base = [repo.lookup(rev) for rev in base]
304 # create the right base
304 # create the right base
305 # XXX: nodesbetween / changegroup* should be "fixed" instead
305 # XXX: nodesbetween / changegroup* should be "fixed" instead
306 o = []
306 o = []
307 has = {nullid: None}
307 has = {nullid: None}
308 for n in base:
308 for n in base:
309 has.update(repo.changelog.reachable(n))
309 has.update(repo.changelog.reachable(n))
310 if revs:
310 if revs:
311 visit = list(revs)
311 visit = list(revs)
312 else:
312 else:
313 visit = repo.changelog.heads()
313 visit = repo.changelog.heads()
314 seen = {}
314 seen = {}
315 while visit:
315 while visit:
316 n = visit.pop(0)
316 n = visit.pop(0)
317 parents = [p for p in repo.changelog.parents(n) if p not in has]
317 parents = [p for p in repo.changelog.parents(n) if p not in has]
318 if len(parents) == 0:
318 if len(parents) == 0:
319 o.insert(0, n)
319 o.insert(0, n)
320 else:
320 else:
321 for p in parents:
321 for p in parents:
322 if p not in seen:
322 if p not in seen:
323 seen[p] = 1
323 seen[p] = 1
324 visit.append(p)
324 visit.append(p)
325 else:
325 else:
326 setremoteconfig(ui, opts)
326 setremoteconfig(ui, opts)
327 dest = ui.expandpath(dest or 'default-push', dest or 'default')
327 dest = ui.expandpath(dest or 'default-push', dest or 'default')
328 other = hg.repository(ui, dest)
328 other = hg.repository(ui, dest)
329 o = repo.findoutgoing(other, force=opts['force'])
329 o = repo.findoutgoing(other, force=opts['force'])
330
330
331 if revs:
331 if revs:
332 cg = repo.changegroupsubset(o, revs, 'bundle')
332 cg = repo.changegroupsubset(o, revs, 'bundle')
333 else:
333 else:
334 cg = repo.changegroup(o, 'bundle')
334 cg = repo.changegroup(o, 'bundle')
335 changegroup.writebundle(cg, fname, "HG10BZ")
335 changegroup.writebundle(cg, fname, "HG10BZ")
336
336
337 def cat(ui, repo, file1, *pats, **opts):
337 def cat(ui, repo, file1, *pats, **opts):
338 """output the latest or given revisions of files
338 """output the latest or given revisions of files
339
339
340 Print the specified files as they were at the given revision.
340 Print the specified files as they were at the given revision.
341 If no revision is given then working dir parent is used, or tip
341 If no revision is given then working dir parent is used, or tip
342 if no revision is checked out.
342 if no revision is checked out.
343
343
344 Output may be to a file, in which case the name of the file is
344 Output may be to a file, in which case the name of the file is
345 given using a format string. The formatting rules are the same as
345 given using a format string. The formatting rules are the same as
346 for the export command, with the following additions:
346 for the export command, with the following additions:
347
347
348 %s basename of file being printed
348 %s basename of file being printed
349 %d dirname of file being printed, or '.' if in repo root
349 %d dirname of file being printed, or '.' if in repo root
350 %p root-relative path name of file being printed
350 %p root-relative path name of file being printed
351 """
351 """
352 ctx = repo.changectx(opts['rev'])
352 ctx = repo.changectx(opts['rev'])
353 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
353 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
354 ctx.node()):
354 ctx.node()):
355 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
355 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
356 fp.write(ctx.filectx(abs).data())
356 fp.write(ctx.filectx(abs).data())
357
357
358 def clone(ui, source, dest=None, **opts):
358 def clone(ui, source, dest=None, **opts):
359 """make a copy of an existing repository
359 """make a copy of an existing repository
360
360
361 Create a copy of an existing repository in a new directory.
361 Create a copy of an existing repository in a new directory.
362
362
363 If no destination directory name is specified, it defaults to the
363 If no destination directory name is specified, it defaults to the
364 basename of the source.
364 basename of the source.
365
365
366 The location of the source is added to the new repository's
366 The location of the source is added to the new repository's
367 .hg/hgrc file, as the default to be used for future pulls.
367 .hg/hgrc file, as the default to be used for future pulls.
368
368
369 For efficiency, hardlinks are used for cloning whenever the source
369 For efficiency, hardlinks are used for cloning whenever the source
370 and destination are on the same filesystem (note this applies only
370 and destination are on the same filesystem (note this applies only
371 to the repository data, not to the checked out files). Some
371 to the repository data, not to the checked out files). Some
372 filesystems, such as AFS, implement hardlinking incorrectly, but
372 filesystems, such as AFS, implement hardlinking incorrectly, but
373 do not report errors. In these cases, use the --pull option to
373 do not report errors. In these cases, use the --pull option to
374 avoid hardlinking.
374 avoid hardlinking.
375
375
376 You can safely clone repositories and checked out files using full
376 You can safely clone repositories and checked out files using full
377 hardlinks with
377 hardlinks with
378
378
379 $ cp -al REPO REPOCLONE
379 $ cp -al REPO REPOCLONE
380
380
381 which is the fastest way to clone. However, the operation is not
381 which is the fastest way to clone. However, the operation is not
382 atomic (making sure REPO is not modified during the operation is
382 atomic (making sure REPO is not modified during the operation is
383 up to you) and you have to make sure your editor breaks hardlinks
383 up to you) and you have to make sure your editor breaks hardlinks
384 (Emacs and most Linux Kernel tools do so).
384 (Emacs and most Linux Kernel tools do so).
385
385
386 If you use the -r option to clone up to a specific revision, no
386 If you use the -r option to clone up to a specific revision, no
387 subsequent revisions will be present in the cloned repository.
387 subsequent revisions will be present in the cloned repository.
388 This option implies --pull, even on local repositories.
388 This option implies --pull, even on local repositories.
389
389
390 See pull for valid source format details.
390 See pull for valid source format details.
391
391
392 It is possible to specify an ssh:// URL as the destination, but no
392 It is possible to specify an ssh:// URL as the destination, but no
393 .hg/hgrc and working directory will be created on the remote side.
393 .hg/hgrc and working directory will be created on the remote side.
394 Look at the help text for the pull command for important details
394 Look at the help text for the pull command for important details
395 about ssh:// URLs.
395 about ssh:// URLs.
396 """
396 """
397 setremoteconfig(ui, opts)
397 setremoteconfig(ui, opts)
398 hg.clone(ui, ui.expandpath(source), dest,
398 hg.clone(ui, ui.expandpath(source), dest,
399 pull=opts['pull'],
399 pull=opts['pull'],
400 stream=opts['uncompressed'],
400 stream=opts['uncompressed'],
401 rev=opts['rev'],
401 rev=opts['rev'],
402 update=not opts['noupdate'])
402 update=not opts['noupdate'])
403
403
404 def commit(ui, repo, *pats, **opts):
404 def commit(ui, repo, *pats, **opts):
405 """commit the specified files or all outstanding changes
405 """commit the specified files or all outstanding changes
406
406
407 Commit changes to the given files into the repository.
407 Commit changes to the given files into the repository.
408
408
409 If a list of files is omitted, all changes reported by "hg status"
409 If a list of files is omitted, all changes reported by "hg status"
410 will be committed.
410 will be committed.
411
411
412 If no commit message is specified, the editor configured in your hgrc
412 If no commit message is specified, the editor configured in your hgrc
413 or in the EDITOR environment variable is started to enter a message.
413 or in the EDITOR environment variable is started to enter a message.
414 """
414 """
415 message = logmessage(opts)
415 message = logmessage(opts)
416
416
417 if opts['addremove']:
417 if opts['addremove']:
418 cmdutil.addremove(repo, pats, opts)
418 cmdutil.addremove(repo, pats, opts)
419 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
419 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
420 if pats:
420 if pats:
421 status = repo.status(files=fns, match=match)
421 status = repo.status(files=fns, match=match)
422 modified, added, removed, deleted, unknown = status[:5]
422 modified, added, removed, deleted, unknown = status[:5]
423 files = modified + added + removed
423 files = modified + added + removed
424 for f in fns:
424 for f in fns:
425 if f not in modified + added + removed:
425 if f not in modified + added + removed:
426 if f in unknown:
426 if f in unknown:
427 raise util.Abort(_("file %s not tracked!") % f)
427 raise util.Abort(_("file %s not tracked!") % f)
428 else:
428 else:
429 raise util.Abort(_("file %s not found!") % f)
429 raise util.Abort(_("file %s not found!") % f)
430 else:
430 else:
431 files = []
431 files = []
432 try:
432 try:
433 repo.commit(files, message, opts['user'], opts['date'], match,
433 repo.commit(files, message, opts['user'], opts['date'], match,
434 force_editor=opts.get('force_editor'))
434 force_editor=opts.get('force_editor'))
435 except ValueError, inst:
435 except ValueError, inst:
436 raise util.Abort(str(inst))
436 raise util.Abort(str(inst))
437
437
438 def docopy(ui, repo, pats, opts, wlock):
438 def docopy(ui, repo, pats, opts, wlock):
439 # called with the repo lock held
439 # called with the repo lock held
440 #
440 #
441 # hgsep => pathname that uses "/" to separate directories
441 # hgsep => pathname that uses "/" to separate directories
442 # ossep => pathname that uses os.sep to separate directories
442 # ossep => pathname that uses os.sep to separate directories
443 cwd = repo.getcwd()
443 cwd = repo.getcwd()
444 errors = 0
444 errors = 0
445 copied = []
445 copied = []
446 targets = {}
446 targets = {}
447
447
448 # abs: hgsep
448 # abs: hgsep
449 # rel: ossep
449 # rel: ossep
450 # return: hgsep
450 # return: hgsep
451 def okaytocopy(abs, rel, exact):
451 def okaytocopy(abs, rel, exact):
452 reasons = {'?': _('is not managed'),
452 reasons = {'?': _('is not managed'),
453 'a': _('has been marked for add'),
453 'a': _('has been marked for add'),
454 'r': _('has been marked for remove')}
454 'r': _('has been marked for remove')}
455 state = repo.dirstate.state(abs)
455 state = repo.dirstate.state(abs)
456 reason = reasons.get(state)
456 reason = reasons.get(state)
457 if reason:
457 if reason:
458 if state == 'a':
458 if state == 'a':
459 origsrc = repo.dirstate.copied(abs)
459 origsrc = repo.dirstate.copied(abs)
460 if origsrc is not None:
460 if origsrc is not None:
461 return origsrc
461 return origsrc
462 if exact:
462 if exact:
463 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
463 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
464 else:
464 else:
465 return abs
465 return abs
466
466
467 # origsrc: hgsep
467 # origsrc: hgsep
468 # abssrc: hgsep
468 # abssrc: hgsep
469 # relsrc: ossep
469 # relsrc: ossep
470 # target: ossep
470 # target: ossep
471 def copy(origsrc, abssrc, relsrc, target, exact):
471 def copy(origsrc, abssrc, relsrc, target, exact):
472 abstarget = util.canonpath(repo.root, cwd, target)
472 abstarget = util.canonpath(repo.root, cwd, target)
473 reltarget = util.pathto(cwd, abstarget)
473 reltarget = util.pathto(cwd, abstarget)
474 prevsrc = targets.get(abstarget)
474 prevsrc = targets.get(abstarget)
475 if prevsrc is not None:
475 if prevsrc is not None:
476 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
476 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
477 (reltarget, util.localpath(abssrc),
477 (reltarget, util.localpath(abssrc),
478 util.localpath(prevsrc)))
478 util.localpath(prevsrc)))
479 return
479 return
480 if (not opts['after'] and os.path.exists(reltarget) or
480 if (not opts['after'] and os.path.exists(reltarget) or
481 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
481 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
482 if not opts['force']:
482 if not opts['force']:
483 ui.warn(_('%s: not overwriting - file exists\n') %
483 ui.warn(_('%s: not overwriting - file exists\n') %
484 reltarget)
484 reltarget)
485 return
485 return
486 if not opts['after'] and not opts.get('dry_run'):
486 if not opts['after'] and not opts.get('dry_run'):
487 os.unlink(reltarget)
487 os.unlink(reltarget)
488 if opts['after']:
488 if opts['after']:
489 if not os.path.exists(reltarget):
489 if not os.path.exists(reltarget):
490 return
490 return
491 else:
491 else:
492 targetdir = os.path.dirname(reltarget) or '.'
492 targetdir = os.path.dirname(reltarget) or '.'
493 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
493 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
494 os.makedirs(targetdir)
494 os.makedirs(targetdir)
495 try:
495 try:
496 restore = repo.dirstate.state(abstarget) == 'r'
496 restore = repo.dirstate.state(abstarget) == 'r'
497 if restore and not opts.get('dry_run'):
497 if restore and not opts.get('dry_run'):
498 repo.undelete([abstarget], wlock)
498 repo.undelete([abstarget], wlock)
499 try:
499 try:
500 if not opts.get('dry_run'):
500 if not opts.get('dry_run'):
501 util.copyfile(relsrc, reltarget)
501 util.copyfile(relsrc, reltarget)
502 restore = False
502 restore = False
503 finally:
503 finally:
504 if restore:
504 if restore:
505 repo.remove([abstarget], wlock)
505 repo.remove([abstarget], wlock)
506 except IOError, inst:
506 except IOError, inst:
507 if inst.errno == errno.ENOENT:
507 if inst.errno == errno.ENOENT:
508 ui.warn(_('%s: deleted in working copy\n') % relsrc)
508 ui.warn(_('%s: deleted in working copy\n') % relsrc)
509 else:
509 else:
510 ui.warn(_('%s: cannot copy - %s\n') %
510 ui.warn(_('%s: cannot copy - %s\n') %
511 (relsrc, inst.strerror))
511 (relsrc, inst.strerror))
512 errors += 1
512 errors += 1
513 return
513 return
514 if ui.verbose or not exact:
514 if ui.verbose or not exact:
515 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
515 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
516 targets[abstarget] = abssrc
516 targets[abstarget] = abssrc
517 if abstarget != origsrc and not opts.get('dry_run'):
517 if abstarget != origsrc and not opts.get('dry_run'):
518 repo.copy(origsrc, abstarget, wlock)
518 repo.copy(origsrc, abstarget, wlock)
519 copied.append((abssrc, relsrc, exact))
519 copied.append((abssrc, relsrc, exact))
520
520
521 # pat: ossep
521 # pat: ossep
522 # dest ossep
522 # dest ossep
523 # srcs: list of (hgsep, hgsep, ossep, bool)
523 # srcs: list of (hgsep, hgsep, ossep, bool)
524 # return: function that takes hgsep and returns ossep
524 # return: function that takes hgsep and returns ossep
525 def targetpathfn(pat, dest, srcs):
525 def targetpathfn(pat, dest, srcs):
526 if os.path.isdir(pat):
526 if os.path.isdir(pat):
527 abspfx = util.canonpath(repo.root, cwd, pat)
527 abspfx = util.canonpath(repo.root, cwd, pat)
528 abspfx = util.localpath(abspfx)
528 abspfx = util.localpath(abspfx)
529 if destdirexists:
529 if destdirexists:
530 striplen = len(os.path.split(abspfx)[0])
530 striplen = len(os.path.split(abspfx)[0])
531 else:
531 else:
532 striplen = len(abspfx)
532 striplen = len(abspfx)
533 if striplen:
533 if striplen:
534 striplen += len(os.sep)
534 striplen += len(os.sep)
535 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
535 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
536 elif destdirexists:
536 elif destdirexists:
537 res = lambda p: os.path.join(dest,
537 res = lambda p: os.path.join(dest,
538 os.path.basename(util.localpath(p)))
538 os.path.basename(util.localpath(p)))
539 else:
539 else:
540 res = lambda p: dest
540 res = lambda p: dest
541 return res
541 return res
542
542
543 # pat: ossep
543 # pat: ossep
544 # dest ossep
544 # dest ossep
545 # srcs: list of (hgsep, hgsep, ossep, bool)
545 # srcs: list of (hgsep, hgsep, ossep, bool)
546 # return: function that takes hgsep and returns ossep
546 # return: function that takes hgsep and returns ossep
547 def targetpathafterfn(pat, dest, srcs):
547 def targetpathafterfn(pat, dest, srcs):
548 if util.patkind(pat, None)[0]:
548 if util.patkind(pat, None)[0]:
549 # a mercurial pattern
549 # a mercurial pattern
550 res = lambda p: os.path.join(dest,
550 res = lambda p: os.path.join(dest,
551 os.path.basename(util.localpath(p)))
551 os.path.basename(util.localpath(p)))
552 else:
552 else:
553 abspfx = util.canonpath(repo.root, cwd, pat)
553 abspfx = util.canonpath(repo.root, cwd, pat)
554 if len(abspfx) < len(srcs[0][0]):
554 if len(abspfx) < len(srcs[0][0]):
555 # A directory. Either the target path contains the last
555 # A directory. Either the target path contains the last
556 # component of the source path or it does not.
556 # component of the source path or it does not.
557 def evalpath(striplen):
557 def evalpath(striplen):
558 score = 0
558 score = 0
559 for s in srcs:
559 for s in srcs:
560 t = os.path.join(dest, util.localpath(s[0])[striplen:])
560 t = os.path.join(dest, util.localpath(s[0])[striplen:])
561 if os.path.exists(t):
561 if os.path.exists(t):
562 score += 1
562 score += 1
563 return score
563 return score
564
564
565 abspfx = util.localpath(abspfx)
565 abspfx = util.localpath(abspfx)
566 striplen = len(abspfx)
566 striplen = len(abspfx)
567 if striplen:
567 if striplen:
568 striplen += len(os.sep)
568 striplen += len(os.sep)
569 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
569 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
570 score = evalpath(striplen)
570 score = evalpath(striplen)
571 striplen1 = len(os.path.split(abspfx)[0])
571 striplen1 = len(os.path.split(abspfx)[0])
572 if striplen1:
572 if striplen1:
573 striplen1 += len(os.sep)
573 striplen1 += len(os.sep)
574 if evalpath(striplen1) > score:
574 if evalpath(striplen1) > score:
575 striplen = striplen1
575 striplen = striplen1
576 res = lambda p: os.path.join(dest,
576 res = lambda p: os.path.join(dest,
577 util.localpath(p)[striplen:])
577 util.localpath(p)[striplen:])
578 else:
578 else:
579 # a file
579 # a file
580 if destdirexists:
580 if destdirexists:
581 res = lambda p: os.path.join(dest,
581 res = lambda p: os.path.join(dest,
582 os.path.basename(util.localpath(p)))
582 os.path.basename(util.localpath(p)))
583 else:
583 else:
584 res = lambda p: dest
584 res = lambda p: dest
585 return res
585 return res
586
586
587
587
588 pats = list(pats)
588 pats = list(pats)
589 if not pats:
589 if not pats:
590 raise util.Abort(_('no source or destination specified'))
590 raise util.Abort(_('no source or destination specified'))
591 if len(pats) == 1:
591 if len(pats) == 1:
592 raise util.Abort(_('no destination specified'))
592 raise util.Abort(_('no destination specified'))
593 dest = pats.pop()
593 dest = pats.pop()
594 destdirexists = os.path.isdir(dest)
594 destdirexists = os.path.isdir(dest)
595 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
595 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
596 raise util.Abort(_('with multiple sources, destination must be an '
596 raise util.Abort(_('with multiple sources, destination must be an '
597 'existing directory'))
597 'existing directory'))
598 if opts['after']:
598 if opts['after']:
599 tfn = targetpathafterfn
599 tfn = targetpathafterfn
600 else:
600 else:
601 tfn = targetpathfn
601 tfn = targetpathfn
602 copylist = []
602 copylist = []
603 for pat in pats:
603 for pat in pats:
604 srcs = []
604 srcs = []
605 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
605 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
606 origsrc = okaytocopy(abssrc, relsrc, exact)
606 origsrc = okaytocopy(abssrc, relsrc, exact)
607 if origsrc:
607 if origsrc:
608 srcs.append((origsrc, abssrc, relsrc, exact))
608 srcs.append((origsrc, abssrc, relsrc, exact))
609 if not srcs:
609 if not srcs:
610 continue
610 continue
611 copylist.append((tfn(pat, dest, srcs), srcs))
611 copylist.append((tfn(pat, dest, srcs), srcs))
612 if not copylist:
612 if not copylist:
613 raise util.Abort(_('no files to copy'))
613 raise util.Abort(_('no files to copy'))
614
614
615 for targetpath, srcs in copylist:
615 for targetpath, srcs in copylist:
616 for origsrc, abssrc, relsrc, exact in srcs:
616 for origsrc, abssrc, relsrc, exact in srcs:
617 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
617 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
618
618
619 if errors:
619 if errors:
620 ui.warn(_('(consider using --after)\n'))
620 ui.warn(_('(consider using --after)\n'))
621 return errors, copied
621 return errors, copied
622
622
623 def copy(ui, repo, *pats, **opts):
623 def copy(ui, repo, *pats, **opts):
624 """mark files as copied for the next commit
624 """mark files as copied for the next commit
625
625
626 Mark dest as having copies of source files. If dest is a
626 Mark dest as having copies of source files. If dest is a
627 directory, copies are put in that directory. If dest is a file,
627 directory, copies are put in that directory. If dest is a file,
628 there can only be one source.
628 there can only be one source.
629
629
630 By default, this command copies the contents of files as they
630 By default, this command copies the contents of files as they
631 stand in the working directory. If invoked with --after, the
631 stand in the working directory. If invoked with --after, the
632 operation is recorded, but no copying is performed.
632 operation is recorded, but no copying is performed.
633
633
634 This command takes effect in the next commit.
634 This command takes effect in the next commit.
635 """
635 """
636 wlock = repo.wlock(0)
636 wlock = repo.wlock(0)
637 errs, copied = docopy(ui, repo, pats, opts, wlock)
637 errs, copied = docopy(ui, repo, pats, opts, wlock)
638 return errs
638 return errs
639
639
640 def debugancestor(ui, index, rev1, rev2):
640 def debugancestor(ui, index, rev1, rev2):
641 """find the ancestor revision of two revisions in a given index"""
641 """find the ancestor revision of two revisions in a given index"""
642 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
642 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
643 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
643 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
644 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
644 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
645
645
646 def debugcomplete(ui, cmd='', **opts):
646 def debugcomplete(ui, cmd='', **opts):
647 """returns the completion list associated with the given command"""
647 """returns the completion list associated with the given command"""
648
648
649 if opts['options']:
649 if opts['options']:
650 options = []
650 options = []
651 otables = [globalopts]
651 otables = [globalopts]
652 if cmd:
652 if cmd:
653 aliases, entry = findcmd(ui, cmd)
653 aliases, entry = findcmd(ui, cmd)
654 otables.append(entry[1])
654 otables.append(entry[1])
655 for t in otables:
655 for t in otables:
656 for o in t:
656 for o in t:
657 if o[0]:
657 if o[0]:
658 options.append('-%s' % o[0])
658 options.append('-%s' % o[0])
659 options.append('--%s' % o[1])
659 options.append('--%s' % o[1])
660 ui.write("%s\n" % "\n".join(options))
660 ui.write("%s\n" % "\n".join(options))
661 return
661 return
662
662
663 clist = findpossible(ui, cmd).keys()
663 clist = findpossible(ui, cmd).keys()
664 clist.sort()
664 clist.sort()
665 ui.write("%s\n" % "\n".join(clist))
665 ui.write("%s\n" % "\n".join(clist))
666
666
667 def debugrebuildstate(ui, repo, rev=None):
667 def debugrebuildstate(ui, repo, rev=None):
668 """rebuild the dirstate as it would look like for the given revision"""
668 """rebuild the dirstate as it would look like for the given revision"""
669 if not rev:
669 if not rev:
670 rev = repo.changelog.tip()
670 rev = repo.changelog.tip()
671 else:
671 else:
672 rev = repo.lookup(rev)
672 rev = repo.lookup(rev)
673 change = repo.changelog.read(rev)
673 change = repo.changelog.read(rev)
674 n = change[0]
674 n = change[0]
675 files = repo.manifest.read(n)
675 files = repo.manifest.read(n)
676 wlock = repo.wlock()
676 wlock = repo.wlock()
677 repo.dirstate.rebuild(rev, files)
677 repo.dirstate.rebuild(rev, files)
678
678
679 def debugcheckstate(ui, repo):
679 def debugcheckstate(ui, repo):
680 """validate the correctness of the current dirstate"""
680 """validate the correctness of the current dirstate"""
681 parent1, parent2 = repo.dirstate.parents()
681 parent1, parent2 = repo.dirstate.parents()
682 repo.dirstate.read()
682 repo.dirstate.read()
683 dc = repo.dirstate.map
683 dc = repo.dirstate.map
684 keys = dc.keys()
684 keys = dc.keys()
685 keys.sort()
685 keys.sort()
686 m1n = repo.changelog.read(parent1)[0]
686 m1n = repo.changelog.read(parent1)[0]
687 m2n = repo.changelog.read(parent2)[0]
687 m2n = repo.changelog.read(parent2)[0]
688 m1 = repo.manifest.read(m1n)
688 m1 = repo.manifest.read(m1n)
689 m2 = repo.manifest.read(m2n)
689 m2 = repo.manifest.read(m2n)
690 errors = 0
690 errors = 0
691 for f in dc:
691 for f in dc:
692 state = repo.dirstate.state(f)
692 state = repo.dirstate.state(f)
693 if state in "nr" and f not in m1:
693 if state in "nr" and f not in m1:
694 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
694 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
695 errors += 1
695 errors += 1
696 if state in "a" and f in m1:
696 if state in "a" and f in m1:
697 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
697 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
698 errors += 1
698 errors += 1
699 if state in "m" and f not in m1 and f not in m2:
699 if state in "m" and f not in m1 and f not in m2:
700 ui.warn(_("%s in state %s, but not in either manifest\n") %
700 ui.warn(_("%s in state %s, but not in either manifest\n") %
701 (f, state))
701 (f, state))
702 errors += 1
702 errors += 1
703 for f in m1:
703 for f in m1:
704 state = repo.dirstate.state(f)
704 state = repo.dirstate.state(f)
705 if state not in "nrm":
705 if state not in "nrm":
706 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
706 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
707 errors += 1
707 errors += 1
708 if errors:
708 if errors:
709 error = _(".hg/dirstate inconsistent with current parent's manifest")
709 error = _(".hg/dirstate inconsistent with current parent's manifest")
710 raise util.Abort(error)
710 raise util.Abort(error)
711
711
712 def showconfig(ui, repo, *values, **opts):
712 def showconfig(ui, repo, *values, **opts):
713 """show combined config settings from all hgrc files
713 """show combined config settings from all hgrc files
714
714
715 With no args, print names and values of all config items.
715 With no args, print names and values of all config items.
716
716
717 With one arg of the form section.name, print just the value of
717 With one arg of the form section.name, print just the value of
718 that config item.
718 that config item.
719
719
720 With multiple args, print names and values of all config items
720 With multiple args, print names and values of all config items
721 with matching section names."""
721 with matching section names."""
722
722
723 untrusted = bool(opts.get('untrusted'))
723 untrusted = bool(opts.get('untrusted'))
724 if values:
724 if values:
725 if len([v for v in values if '.' in v]) > 1:
725 if len([v for v in values if '.' in v]) > 1:
726 raise util.Abort(_('only one config item permitted'))
726 raise util.Abort(_('only one config item permitted'))
727 for section, name, value in ui.walkconfig(untrusted=untrusted):
727 for section, name, value in ui.walkconfig(untrusted=untrusted):
728 sectname = section + '.' + name
728 sectname = section + '.' + name
729 if values:
729 if values:
730 for v in values:
730 for v in values:
731 if v == section:
731 if v == section:
732 ui.write('%s=%s\n' % (sectname, value))
732 ui.write('%s=%s\n' % (sectname, value))
733 elif v == sectname:
733 elif v == sectname:
734 ui.write(value, '\n')
734 ui.write(value, '\n')
735 else:
735 else:
736 ui.write('%s=%s\n' % (sectname, value))
736 ui.write('%s=%s\n' % (sectname, value))
737
737
738 def debugsetparents(ui, repo, rev1, rev2=None):
738 def debugsetparents(ui, repo, rev1, rev2=None):
739 """manually set the parents of the current working directory
739 """manually set the parents of the current working directory
740
740
741 This is useful for writing repository conversion tools, but should
741 This is useful for writing repository conversion tools, but should
742 be used with care.
742 be used with care.
743 """
743 """
744
744
745 if not rev2:
745 if not rev2:
746 rev2 = hex(nullid)
746 rev2 = hex(nullid)
747
747
748 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
748 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
749
749
750 def debugstate(ui, repo):
750 def debugstate(ui, repo):
751 """show the contents of the current dirstate"""
751 """show the contents of the current dirstate"""
752 repo.dirstate.read()
752 repo.dirstate.read()
753 dc = repo.dirstate.map
753 dc = repo.dirstate.map
754 keys = dc.keys()
754 keys = dc.keys()
755 keys.sort()
755 keys.sort()
756 for file_ in keys:
756 for file_ in keys:
757 ui.write("%c %3o %10d %s %s\n"
757 ui.write("%c %3o %10d %s %s\n"
758 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
758 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
759 time.strftime("%x %X",
759 time.strftime("%x %X",
760 time.localtime(dc[file_][3])), file_))
760 time.localtime(dc[file_][3])), file_))
761 for f in repo.dirstate.copies():
761 for f in repo.dirstate.copies():
762 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
762 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
763
763
764 def debugdata(ui, file_, rev):
764 def debugdata(ui, file_, rev):
765 """dump the contents of an data file revision"""
765 """dump the contents of an data file revision"""
766 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
766 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
767 file_[:-2] + ".i", file_, 0)
767 file_[:-2] + ".i", file_, 0)
768 try:
768 try:
769 ui.write(r.revision(r.lookup(rev)))
769 ui.write(r.revision(r.lookup(rev)))
770 except KeyError:
770 except KeyError:
771 raise util.Abort(_('invalid revision identifier %s') % rev)
771 raise util.Abort(_('invalid revision identifier %s') % rev)
772
772
773 def debugindex(ui, file_):
773 def debugindex(ui, file_):
774 """dump the contents of an index file"""
774 """dump the contents of an index file"""
775 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
775 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
776 ui.write(" rev offset length base linkrev" +
776 ui.write(" rev offset length base linkrev" +
777 " nodeid p1 p2\n")
777 " nodeid p1 p2\n")
778 for i in xrange(r.count()):
778 for i in xrange(r.count()):
779 node = r.node(i)
779 node = r.node(i)
780 pp = r.parents(node)
780 pp = r.parents(node)
781 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
781 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
782 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
782 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
783 short(node), short(pp[0]), short(pp[1])))
783 short(node), short(pp[0]), short(pp[1])))
784
784
785 def debugindexdot(ui, file_):
785 def debugindexdot(ui, file_):
786 """dump an index DAG as a .dot file"""
786 """dump an index DAG as a .dot file"""
787 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
787 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
788 ui.write("digraph G {\n")
788 ui.write("digraph G {\n")
789 for i in xrange(r.count()):
789 for i in xrange(r.count()):
790 node = r.node(i)
790 node = r.node(i)
791 pp = r.parents(node)
791 pp = r.parents(node)
792 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
792 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
793 if pp[1] != nullid:
793 if pp[1] != nullid:
794 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
794 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
795 ui.write("}\n")
795 ui.write("}\n")
796
796
797 def debugrename(ui, repo, file1, *pats, **opts):
797 def debugrename(ui, repo, file1, *pats, **opts):
798 """dump rename information"""
798 """dump rename information"""
799
799
800 ctx = repo.changectx(opts.get('rev', 'tip'))
800 ctx = repo.changectx(opts.get('rev', 'tip'))
801 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
801 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
802 ctx.node()):
802 ctx.node()):
803 m = ctx.filectx(abs).renamed()
803 m = ctx.filectx(abs).renamed()
804 if m:
804 if m:
805 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
805 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
806 else:
806 else:
807 ui.write(_("%s not renamed\n") % rel)
807 ui.write(_("%s not renamed\n") % rel)
808
808
809 def debugwalk(ui, repo, *pats, **opts):
809 def debugwalk(ui, repo, *pats, **opts):
810 """show how files match on given patterns"""
810 """show how files match on given patterns"""
811 items = list(cmdutil.walk(repo, pats, opts))
811 items = list(cmdutil.walk(repo, pats, opts))
812 if not items:
812 if not items:
813 return
813 return
814 fmt = '%%s %%-%ds %%-%ds %%s' % (
814 fmt = '%%s %%-%ds %%-%ds %%s' % (
815 max([len(abs) for (src, abs, rel, exact) in items]),
815 max([len(abs) for (src, abs, rel, exact) in items]),
816 max([len(rel) for (src, abs, rel, exact) in items]))
816 max([len(rel) for (src, abs, rel, exact) in items]))
817 for src, abs, rel, exact in items:
817 for src, abs, rel, exact in items:
818 line = fmt % (src, abs, rel, exact and 'exact' or '')
818 line = fmt % (src, abs, rel, exact and 'exact' or '')
819 ui.write("%s\n" % line.rstrip())
819 ui.write("%s\n" % line.rstrip())
820
820
821 def diff(ui, repo, *pats, **opts):
821 def diff(ui, repo, *pats, **opts):
822 """diff repository (or selected files)
822 """diff repository (or selected files)
823
823
824 Show differences between revisions for the specified files.
824 Show differences between revisions for the specified files.
825
825
826 Differences between files are shown using the unified diff format.
826 Differences between files are shown using the unified diff format.
827
827
828 When two revision arguments are given, then changes are shown
828 When two revision arguments are given, then changes are shown
829 between those revisions. If only one revision is specified then
829 between those revisions. If only one revision is specified then
830 that revision is compared to the working directory, and, when no
830 that revision is compared to the working directory, and, when no
831 revisions are specified, the working directory files are compared
831 revisions are specified, the working directory files are compared
832 to its parent.
832 to its parent.
833
833
834 Without the -a option, diff will avoid generating diffs of files
834 Without the -a option, diff will avoid generating diffs of files
835 it detects as binary. With -a, diff will generate a diff anyway,
835 it detects as binary. With -a, diff will generate a diff anyway,
836 probably with undesirable results.
836 probably with undesirable results.
837 """
837 """
838 node1, node2 = cmdutil.revpair(repo, opts['rev'])
838 node1, node2 = cmdutil.revpair(repo, opts['rev'])
839
839
840 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
840 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
841
841
842 patch.diff(repo, node1, node2, fns, match=matchfn,
842 patch.diff(repo, node1, node2, fns, match=matchfn,
843 opts=patch.diffopts(ui, opts))
843 opts=patch.diffopts(ui, opts))
844
844
845 def export(ui, repo, *changesets, **opts):
845 def export(ui, repo, *changesets, **opts):
846 """dump the header and diffs for one or more changesets
846 """dump the header and diffs for one or more changesets
847
847
848 Print the changeset header and diffs for one or more revisions.
848 Print the changeset header and diffs for one or more revisions.
849
849
850 The information shown in the changeset header is: author,
850 The information shown in the changeset header is: author,
851 changeset hash, parent and commit comment.
851 changeset hash, parent and commit comment.
852
852
853 Output may be to a file, in which case the name of the file is
853 Output may be to a file, in which case the name of the file is
854 given using a format string. The formatting rules are as follows:
854 given using a format string. The formatting rules are as follows:
855
855
856 %% literal "%" character
856 %% literal "%" character
857 %H changeset hash (40 bytes of hexadecimal)
857 %H changeset hash (40 bytes of hexadecimal)
858 %N number of patches being generated
858 %N number of patches being generated
859 %R changeset revision number
859 %R changeset revision number
860 %b basename of the exporting repository
860 %b basename of the exporting repository
861 %h short-form changeset hash (12 bytes of hexadecimal)
861 %h short-form changeset hash (12 bytes of hexadecimal)
862 %n zero-padded sequence number, starting at 1
862 %n zero-padded sequence number, starting at 1
863 %r zero-padded changeset revision number
863 %r zero-padded changeset revision number
864
864
865 Without the -a option, export will avoid generating diffs of files
865 Without the -a option, export will avoid generating diffs of files
866 it detects as binary. With -a, export will generate a diff anyway,
866 it detects as binary. With -a, export will generate a diff anyway,
867 probably with undesirable results.
867 probably with undesirable results.
868
868
869 With the --switch-parent option, the diff will be against the second
869 With the --switch-parent option, the diff will be against the second
870 parent. It can be useful to review a merge.
870 parent. It can be useful to review a merge.
871 """
871 """
872 if not changesets:
872 if not changesets:
873 raise util.Abort(_("export requires at least one changeset"))
873 raise util.Abort(_("export requires at least one changeset"))
874 revs = cmdutil.revrange(repo, changesets)
874 revs = cmdutil.revrange(repo, changesets)
875 if len(revs) > 1:
875 if len(revs) > 1:
876 ui.note(_('exporting patches:\n'))
876 ui.note(_('exporting patches:\n'))
877 else:
877 else:
878 ui.note(_('exporting patch:\n'))
878 ui.note(_('exporting patch:\n'))
879 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
879 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
880 switch_parent=opts['switch_parent'],
880 switch_parent=opts['switch_parent'],
881 opts=patch.diffopts(ui, opts))
881 opts=patch.diffopts(ui, opts))
882
882
883 def grep(ui, repo, pattern, *pats, **opts):
883 def grep(ui, repo, pattern, *pats, **opts):
884 """search for a pattern in specified files and revisions
884 """search for a pattern in specified files and revisions
885
885
886 Search revisions of files for a regular expression.
886 Search revisions of files for a regular expression.
887
887
888 This command behaves differently than Unix grep. It only accepts
888 This command behaves differently than Unix grep. It only accepts
889 Python/Perl regexps. It searches repository history, not the
889 Python/Perl regexps. It searches repository history, not the
890 working directory. It always prints the revision number in which
890 working directory. It always prints the revision number in which
891 a match appears.
891 a match appears.
892
892
893 By default, grep only prints output for the first revision of a
893 By default, grep only prints output for the first revision of a
894 file in which it finds a match. To get it to print every revision
894 file in which it finds a match. To get it to print every revision
895 that contains a change in match status ("-" for a match that
895 that contains a change in match status ("-" for a match that
896 becomes a non-match, or "+" for a non-match that becomes a match),
896 becomes a non-match, or "+" for a non-match that becomes a match),
897 use the --all flag.
897 use the --all flag.
898 """
898 """
899 reflags = 0
899 reflags = 0
900 if opts['ignore_case']:
900 if opts['ignore_case']:
901 reflags |= re.I
901 reflags |= re.I
902 regexp = re.compile(pattern, reflags)
902 regexp = re.compile(pattern, reflags)
903 sep, eol = ':', '\n'
903 sep, eol = ':', '\n'
904 if opts['print0']:
904 if opts['print0']:
905 sep = eol = '\0'
905 sep = eol = '\0'
906
906
907 fcache = {}
907 fcache = {}
908 def getfile(fn):
908 def getfile(fn):
909 if fn not in fcache:
909 if fn not in fcache:
910 fcache[fn] = repo.file(fn)
910 fcache[fn] = repo.file(fn)
911 return fcache[fn]
911 return fcache[fn]
912
912
913 def matchlines(body):
913 def matchlines(body):
914 begin = 0
914 begin = 0
915 linenum = 0
915 linenum = 0
916 while True:
916 while True:
917 match = regexp.search(body, begin)
917 match = regexp.search(body, begin)
918 if not match:
918 if not match:
919 break
919 break
920 mstart, mend = match.span()
920 mstart, mend = match.span()
921 linenum += body.count('\n', begin, mstart) + 1
921 linenum += body.count('\n', begin, mstart) + 1
922 lstart = body.rfind('\n', begin, mstart) + 1 or begin
922 lstart = body.rfind('\n', begin, mstart) + 1 or begin
923 lend = body.find('\n', mend)
923 lend = body.find('\n', mend)
924 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
924 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
925 begin = lend + 1
925 begin = lend + 1
926
926
927 class linestate(object):
927 class linestate(object):
928 def __init__(self, line, linenum, colstart, colend):
928 def __init__(self, line, linenum, colstart, colend):
929 self.line = line
929 self.line = line
930 self.linenum = linenum
930 self.linenum = linenum
931 self.colstart = colstart
931 self.colstart = colstart
932 self.colend = colend
932 self.colend = colend
933
933
934 def __eq__(self, other):
934 def __eq__(self, other):
935 return self.line == other.line
935 return self.line == other.line
936
936
937 matches = {}
937 matches = {}
938 copies = {}
938 copies = {}
939 def grepbody(fn, rev, body):
939 def grepbody(fn, rev, body):
940 matches[rev].setdefault(fn, [])
940 matches[rev].setdefault(fn, [])
941 m = matches[rev][fn]
941 m = matches[rev][fn]
942 for lnum, cstart, cend, line in matchlines(body):
942 for lnum, cstart, cend, line in matchlines(body):
943 s = linestate(line, lnum, cstart, cend)
943 s = linestate(line, lnum, cstart, cend)
944 m.append(s)
944 m.append(s)
945
945
946 def difflinestates(a, b):
946 def difflinestates(a, b):
947 sm = difflib.SequenceMatcher(None, a, b)
947 sm = difflib.SequenceMatcher(None, a, b)
948 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
948 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
949 if tag == 'insert':
949 if tag == 'insert':
950 for i in xrange(blo, bhi):
950 for i in xrange(blo, bhi):
951 yield ('+', b[i])
951 yield ('+', b[i])
952 elif tag == 'delete':
952 elif tag == 'delete':
953 for i in xrange(alo, ahi):
953 for i in xrange(alo, ahi):
954 yield ('-', a[i])
954 yield ('-', a[i])
955 elif tag == 'replace':
955 elif tag == 'replace':
956 for i in xrange(alo, ahi):
956 for i in xrange(alo, ahi):
957 yield ('-', a[i])
957 yield ('-', a[i])
958 for i in xrange(blo, bhi):
958 for i in xrange(blo, bhi):
959 yield ('+', b[i])
959 yield ('+', b[i])
960
960
961 prev = {}
961 prev = {}
962 def display(fn, rev, states, prevstates):
962 def display(fn, rev, states, prevstates):
963 counts = {'-': 0, '+': 0}
963 counts = {'-': 0, '+': 0}
964 filerevmatches = {}
964 filerevmatches = {}
965 if incrementing or not opts['all']:
965 if incrementing or not opts['all']:
966 a, b, r = prevstates, states, rev
966 a, b, r = prevstates, states, rev
967 else:
967 else:
968 a, b, r = states, prevstates, prev.get(fn, -1)
968 a, b, r = states, prevstates, prev.get(fn, -1)
969 for change, l in difflinestates(a, b):
969 for change, l in difflinestates(a, b):
970 cols = [fn, str(r)]
970 cols = [fn, str(r)]
971 if opts['line_number']:
971 if opts['line_number']:
972 cols.append(str(l.linenum))
972 cols.append(str(l.linenum))
973 if opts['all']:
973 if opts['all']:
974 cols.append(change)
974 cols.append(change)
975 if opts['user']:
975 if opts['user']:
976 cols.append(ui.shortuser(get(r)[1]))
976 cols.append(ui.shortuser(get(r)[1]))
977 if opts['files_with_matches']:
977 if opts['files_with_matches']:
978 c = (fn, r)
978 c = (fn, r)
979 if c in filerevmatches:
979 if c in filerevmatches:
980 continue
980 continue
981 filerevmatches[c] = 1
981 filerevmatches[c] = 1
982 else:
982 else:
983 cols.append(l.line)
983 cols.append(l.line)
984 ui.write(sep.join(cols), eol)
984 ui.write(sep.join(cols), eol)
985 counts[change] += 1
985 counts[change] += 1
986 return counts['+'], counts['-']
986 return counts['+'], counts['-']
987
987
988 fstate = {}
988 fstate = {}
989 skip = {}
989 skip = {}
990 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
990 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
991 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
991 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
992 count = 0
992 count = 0
993 incrementing = False
993 incrementing = False
994 follow = opts.get('follow')
994 follow = opts.get('follow')
995 for st, rev, fns in changeiter:
995 for st, rev, fns in changeiter:
996 if st == 'window':
996 if st == 'window':
997 incrementing = rev
997 incrementing = rev
998 matches.clear()
998 matches.clear()
999 elif st == 'add':
999 elif st == 'add':
1000 mf = repo.changectx(rev).manifest()
1000 mf = repo.changectx(rev).manifest()
1001 matches[rev] = {}
1001 matches[rev] = {}
1002 for fn in fns:
1002 for fn in fns:
1003 if fn in skip:
1003 if fn in skip:
1004 continue
1004 continue
1005 fstate.setdefault(fn, {})
1005 fstate.setdefault(fn, {})
1006 try:
1006 try:
1007 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1007 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1008 if follow:
1008 if follow:
1009 copied = getfile(fn).renamed(mf[fn])
1009 copied = getfile(fn).renamed(mf[fn])
1010 if copied:
1010 if copied:
1011 copies.setdefault(rev, {})[fn] = copied[0]
1011 copies.setdefault(rev, {})[fn] = copied[0]
1012 except KeyError:
1012 except KeyError:
1013 pass
1013 pass
1014 elif st == 'iter':
1014 elif st == 'iter':
1015 states = matches[rev].items()
1015 states = matches[rev].items()
1016 states.sort()
1016 states.sort()
1017 for fn, m in states:
1017 for fn, m in states:
1018 copy = copies.get(rev, {}).get(fn)
1018 copy = copies.get(rev, {}).get(fn)
1019 if fn in skip:
1019 if fn in skip:
1020 if copy:
1020 if copy:
1021 skip[copy] = True
1021 skip[copy] = True
1022 continue
1022 continue
1023 if incrementing or not opts['all'] or fstate[fn]:
1023 if incrementing or not opts['all'] or fstate[fn]:
1024 pos, neg = display(fn, rev, m, fstate[fn])
1024 pos, neg = display(fn, rev, m, fstate[fn])
1025 count += pos + neg
1025 count += pos + neg
1026 if pos and not opts['all']:
1026 if pos and not opts['all']:
1027 skip[fn] = True
1027 skip[fn] = True
1028 if copy:
1028 if copy:
1029 skip[copy] = True
1029 skip[copy] = True
1030 fstate[fn] = m
1030 fstate[fn] = m
1031 if copy:
1031 if copy:
1032 fstate[copy] = m
1032 fstate[copy] = m
1033 prev[fn] = rev
1033 prev[fn] = rev
1034
1034
1035 if not incrementing:
1035 if not incrementing:
1036 fstate = fstate.items()
1036 fstate = fstate.items()
1037 fstate.sort()
1037 fstate.sort()
1038 for fn, state in fstate:
1038 for fn, state in fstate:
1039 if fn in skip:
1039 if fn in skip:
1040 continue
1040 continue
1041 if fn not in copies.get(prev[fn], {}):
1041 if fn not in copies.get(prev[fn], {}):
1042 display(fn, rev, {}, state)
1042 display(fn, rev, {}, state)
1043 return (count == 0 and 1) or 0
1043 return (count == 0 and 1) or 0
1044
1044
1045 def heads(ui, repo, **opts):
1045 def heads(ui, repo, **opts):
1046 """show current repository heads
1046 """show current repository heads
1047
1047
1048 Show all repository head changesets.
1048 Show all repository head changesets.
1049
1049
1050 Repository "heads" are changesets that don't have children
1050 Repository "heads" are changesets that don't have children
1051 changesets. They are where development generally takes place and
1051 changesets. They are where development generally takes place and
1052 are the usual targets for update and merge operations.
1052 are the usual targets for update and merge operations.
1053 """
1053 """
1054 if opts['rev']:
1054 if opts['rev']:
1055 heads = repo.heads(repo.lookup(opts['rev']))
1055 heads = repo.heads(repo.lookup(opts['rev']))
1056 else:
1056 else:
1057 heads = repo.heads()
1057 heads = repo.heads()
1058 displayer = cmdutil.show_changeset(ui, repo, opts)
1058 displayer = cmdutil.show_changeset(ui, repo, opts)
1059 for n in heads:
1059 for n in heads:
1060 displayer.show(changenode=n)
1060 displayer.show(changenode=n)
1061
1061
1062 def help_(ui, name=None, with_version=False):
1062 def help_(ui, name=None, with_version=False):
1063 """show help for a command, extension, or list of commands
1063 """show help for a command, extension, or list of commands
1064
1064
1065 With no arguments, print a list of commands and short help.
1065 With no arguments, print a list of commands and short help.
1066
1066
1067 Given a command name, print help for that command.
1067 Given a command name, print help for that command.
1068
1068
1069 Given an extension name, print help for that extension, and the
1069 Given an extension name, print help for that extension, and the
1070 commands it provides."""
1070 commands it provides."""
1071 option_lists = []
1071 option_lists = []
1072
1072
1073 def helpcmd(name):
1073 def helpcmd(name):
1074 if with_version:
1074 if with_version:
1075 version_(ui)
1075 version_(ui)
1076 ui.write('\n')
1076 ui.write('\n')
1077 aliases, i = findcmd(ui, name)
1077 aliases, i = findcmd(ui, name)
1078 # synopsis
1078 # synopsis
1079 ui.write("%s\n\n" % i[2])
1079 ui.write("%s\n\n" % i[2])
1080
1080
1081 # description
1081 # description
1082 doc = i[0].__doc__
1082 doc = i[0].__doc__
1083 if not doc:
1083 if not doc:
1084 doc = _("(No help text available)")
1084 doc = _("(No help text available)")
1085 if ui.quiet:
1085 if ui.quiet:
1086 doc = doc.splitlines(0)[0]
1086 doc = doc.splitlines(0)[0]
1087 ui.write("%s\n" % doc.rstrip())
1087 ui.write("%s\n" % doc.rstrip())
1088
1088
1089 if not ui.quiet:
1089 if not ui.quiet:
1090 # aliases
1090 # aliases
1091 if len(aliases) > 1:
1091 if len(aliases) > 1:
1092 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1092 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1093
1093
1094 # options
1094 # options
1095 if i[1]:
1095 if i[1]:
1096 option_lists.append(("options", i[1]))
1096 option_lists.append(("options", i[1]))
1097
1097
1098 def helplist(select=None):
1098 def helplist(select=None):
1099 h = {}
1099 h = {}
1100 cmds = {}
1100 cmds = {}
1101 for c, e in table.items():
1101 for c, e in table.items():
1102 f = c.split("|", 1)[0]
1102 f = c.split("|", 1)[0]
1103 if select and not select(f):
1103 if select and not select(f):
1104 continue
1104 continue
1105 if name == "shortlist" and not f.startswith("^"):
1105 if name == "shortlist" and not f.startswith("^"):
1106 continue
1106 continue
1107 f = f.lstrip("^")
1107 f = f.lstrip("^")
1108 if not ui.debugflag and f.startswith("debug"):
1108 if not ui.debugflag and f.startswith("debug"):
1109 continue
1109 continue
1110 doc = e[0].__doc__
1110 doc = e[0].__doc__
1111 if not doc:
1111 if not doc:
1112 doc = _("(No help text available)")
1112 doc = _("(No help text available)")
1113 h[f] = doc.splitlines(0)[0].rstrip()
1113 h[f] = doc.splitlines(0)[0].rstrip()
1114 cmds[f] = c.lstrip("^")
1114 cmds[f] = c.lstrip("^")
1115
1115
1116 fns = h.keys()
1116 fns = h.keys()
1117 fns.sort()
1117 fns.sort()
1118 m = max(map(len, fns))
1118 m = max(map(len, fns))
1119 for f in fns:
1119 for f in fns:
1120 if ui.verbose:
1120 if ui.verbose:
1121 commands = cmds[f].replace("|",", ")
1121 commands = cmds[f].replace("|",", ")
1122 ui.write(" %s:\n %s\n"%(commands, h[f]))
1122 ui.write(" %s:\n %s\n"%(commands, h[f]))
1123 else:
1123 else:
1124 ui.write(' %-*s %s\n' % (m, f, h[f]))
1124 ui.write(' %-*s %s\n' % (m, f, h[f]))
1125
1125
1126 def helpext(name):
1126 def helpext(name):
1127 try:
1127 try:
1128 mod = findext(name)
1128 mod = findext(name)
1129 except KeyError:
1129 except KeyError:
1130 raise UnknownCommand(name)
1130 raise UnknownCommand(name)
1131
1131
1132 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1132 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1133 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1133 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1134 for d in doc[1:]:
1134 for d in doc[1:]:
1135 ui.write(d, '\n')
1135 ui.write(d, '\n')
1136
1136
1137 ui.status('\n')
1137 ui.status('\n')
1138 if ui.verbose:
1138 if ui.verbose:
1139 ui.status(_('list of commands:\n\n'))
1139 ui.status(_('list of commands:\n\n'))
1140 else:
1140 else:
1141 ui.status(_('list of commands (use "hg help -v %s" '
1141 ui.status(_('list of commands (use "hg help -v %s" '
1142 'to show aliases and global options):\n\n') % name)
1142 'to show aliases and global options):\n\n') % name)
1143
1143
1144 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1144 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1145 helplist(modcmds.has_key)
1145 helplist(modcmds.has_key)
1146
1146
1147 if name and name != 'shortlist':
1147 if name and name != 'shortlist':
1148 try:
1148 try:
1149 helpcmd(name)
1149 helpcmd(name)
1150 except UnknownCommand:
1150 except UnknownCommand:
1151 helpext(name)
1151 helpext(name)
1152
1152
1153 else:
1153 else:
1154 # program name
1154 # program name
1155 if ui.verbose or with_version:
1155 if ui.verbose or with_version:
1156 version_(ui)
1156 version_(ui)
1157 else:
1157 else:
1158 ui.status(_("Mercurial Distributed SCM\n"))
1158 ui.status(_("Mercurial Distributed SCM\n"))
1159 ui.status('\n')
1159 ui.status('\n')
1160
1160
1161 # list of commands
1161 # list of commands
1162 if name == "shortlist":
1162 if name == "shortlist":
1163 ui.status(_('basic commands (use "hg help" '
1163 ui.status(_('basic commands (use "hg help" '
1164 'for the full list or option "-v" for details):\n\n'))
1164 'for the full list or option "-v" for details):\n\n'))
1165 elif ui.verbose:
1165 elif ui.verbose:
1166 ui.status(_('list of commands:\n\n'))
1166 ui.status(_('list of commands:\n\n'))
1167 else:
1167 else:
1168 ui.status(_('list of commands (use "hg help -v" '
1168 ui.status(_('list of commands (use "hg help -v" '
1169 'to show aliases and global options):\n\n'))
1169 'to show aliases and global options):\n\n'))
1170
1170
1171 helplist()
1171 helplist()
1172
1172
1173 # global options
1173 # global options
1174 if ui.verbose:
1174 if ui.verbose:
1175 option_lists.append(("global options", globalopts))
1175 option_lists.append(("global options", globalopts))
1176
1176
1177 # list all option lists
1177 # list all option lists
1178 opt_output = []
1178 opt_output = []
1179 for title, options in option_lists:
1179 for title, options in option_lists:
1180 opt_output.append(("\n%s:\n" % title, None))
1180 opt_output.append(("\n%s:\n" % title, None))
1181 for shortopt, longopt, default, desc in options:
1181 for shortopt, longopt, default, desc in options:
1182 if "DEPRECATED" in desc and not ui.verbose: continue
1182 if "DEPRECATED" in desc and not ui.verbose: continue
1183 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1183 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1184 longopt and " --%s" % longopt),
1184 longopt and " --%s" % longopt),
1185 "%s%s" % (desc,
1185 "%s%s" % (desc,
1186 default
1186 default
1187 and _(" (default: %s)") % default
1187 and _(" (default: %s)") % default
1188 or "")))
1188 or "")))
1189
1189
1190 if opt_output:
1190 if opt_output:
1191 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1191 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1192 for first, second in opt_output:
1192 for first, second in opt_output:
1193 if second:
1193 if second:
1194 ui.write(" %-*s %s\n" % (opts_len, first, second))
1194 ui.write(" %-*s %s\n" % (opts_len, first, second))
1195 else:
1195 else:
1196 ui.write("%s\n" % first)
1196 ui.write("%s\n" % first)
1197
1197
1198 def identify(ui, repo):
1198 def identify(ui, repo):
1199 """print information about the working copy
1199 """print information about the working copy
1200
1200
1201 Print a short summary of the current state of the repo.
1201 Print a short summary of the current state of the repo.
1202
1202
1203 This summary identifies the repository state using one or two parent
1203 This summary identifies the repository state using one or two parent
1204 hash identifiers, followed by a "+" if there are uncommitted changes
1204 hash identifiers, followed by a "+" if there are uncommitted changes
1205 in the working directory, followed by a list of tags for this revision.
1205 in the working directory, followed by a list of tags for this revision.
1206 """
1206 """
1207 parents = [p for p in repo.dirstate.parents() if p != nullid]
1207 parents = [p for p in repo.dirstate.parents() if p != nullid]
1208 if not parents:
1208 if not parents:
1209 ui.write(_("unknown\n"))
1209 ui.write(_("unknown\n"))
1210 return
1210 return
1211
1211
1212 hexfunc = ui.debugflag and hex or short
1212 hexfunc = ui.debugflag and hex or short
1213 modified, added, removed, deleted = repo.status()[:4]
1213 modified, added, removed, deleted = repo.status()[:4]
1214 output = ["%s%s" %
1214 output = ["%s%s" %
1215 ('+'.join([hexfunc(parent) for parent in parents]),
1215 ('+'.join([hexfunc(parent) for parent in parents]),
1216 (modified or added or removed or deleted) and "+" or "")]
1216 (modified or added or removed or deleted) and "+" or "")]
1217
1217
1218 if not ui.quiet:
1218 if not ui.quiet:
1219
1219
1220 branch = repo.workingctx().branch()
1220 branch = repo.workingctx().branch()
1221 if branch:
1221 if branch:
1222 output.append("(%s)" % branch)
1222 output.append("(%s)" % branch)
1223
1223
1224 # multiple tags for a single parent separated by '/'
1224 # multiple tags for a single parent separated by '/'
1225 parenttags = ['/'.join(tags)
1225 parenttags = ['/'.join(tags)
1226 for tags in map(repo.nodetags, parents) if tags]
1226 for tags in map(repo.nodetags, parents) if tags]
1227 # tags for multiple parents separated by ' + '
1227 # tags for multiple parents separated by ' + '
1228 if parenttags:
1228 if parenttags:
1229 output.append(' + '.join(parenttags))
1229 output.append(' + '.join(parenttags))
1230
1230
1231 ui.write("%s\n" % ' '.join(output))
1231 ui.write("%s\n" % ' '.join(output))
1232
1232
1233 def import_(ui, repo, patch1, *patches, **opts):
1233 def import_(ui, repo, patch1, *patches, **opts):
1234 """import an ordered set of patches
1234 """import an ordered set of patches
1235
1235
1236 Import a list of patches and commit them individually.
1236 Import a list of patches and commit them individually.
1237
1237
1238 If there are outstanding changes in the working directory, import
1238 If there are outstanding changes in the working directory, import
1239 will abort unless given the -f flag.
1239 will abort unless given the -f flag.
1240
1240
1241 You can import a patch straight from a mail message. Even patches
1241 You can import a patch straight from a mail message. Even patches
1242 as attachments work (body part must be type text/plain or
1242 as attachments work (body part must be type text/plain or
1243 text/x-patch to be used). From and Subject headers of email
1243 text/x-patch to be used). From and Subject headers of email
1244 message are used as default committer and commit message. All
1244 message are used as default committer and commit message. All
1245 text/plain body parts before first diff are added to commit
1245 text/plain body parts before first diff are added to commit
1246 message.
1246 message.
1247
1247
1248 If imported patch was generated by hg export, user and description
1248 If imported patch was generated by hg export, user and description
1249 from patch override values from message headers and body. Values
1249 from patch override values from message headers and body. Values
1250 given on command line with -m and -u override these.
1250 given on command line with -m and -u override these.
1251
1251
1252 To read a patch from standard input, use patch name "-".
1252 To read a patch from standard input, use patch name "-".
1253 """
1253 """
1254 patches = (patch1,) + patches
1254 patches = (patch1,) + patches
1255
1255
1256 if not opts['force']:
1256 if not opts['force']:
1257 bail_if_changed(repo)
1257 bail_if_changed(repo)
1258
1258
1259 d = opts["base"]
1259 d = opts["base"]
1260 strip = opts["strip"]
1260 strip = opts["strip"]
1261
1261
1262 wlock = repo.wlock()
1262 wlock = repo.wlock()
1263 lock = repo.lock()
1263 lock = repo.lock()
1264
1264
1265 for p in patches:
1265 for p in patches:
1266 pf = os.path.join(d, p)
1266 pf = os.path.join(d, p)
1267
1267
1268 if pf == '-':
1268 if pf == '-':
1269 ui.status(_("applying patch from stdin\n"))
1269 ui.status(_("applying patch from stdin\n"))
1270 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1270 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1271 else:
1271 else:
1272 ui.status(_("applying %s\n") % p)
1272 ui.status(_("applying %s\n") % p)
1273 tmpname, message, user, date = patch.extract(ui, file(pf))
1273 tmpname, message, user, date = patch.extract(ui, file(pf))
1274
1274
1275 if tmpname is None:
1275 if tmpname is None:
1276 raise util.Abort(_('no diffs found'))
1276 raise util.Abort(_('no diffs found'))
1277
1277
1278 try:
1278 try:
1279 if opts['message']:
1279 if opts['message']:
1280 # pickup the cmdline msg
1280 # pickup the cmdline msg
1281 message = opts['message']
1281 message = opts['message']
1282 elif message:
1282 elif message:
1283 # pickup the patch msg
1283 # pickup the patch msg
1284 message = message.strip()
1284 message = message.strip()
1285 else:
1285 else:
1286 # launch the editor
1286 # launch the editor
1287 message = None
1287 message = None
1288 ui.debug(_('message:\n%s\n') % message)
1288 ui.debug(_('message:\n%s\n') % message)
1289
1289
1290 files = {}
1290 files = {}
1291 try:
1291 try:
1292 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1292 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1293 files=files)
1293 files=files)
1294 finally:
1294 finally:
1295 files = patch.updatedir(ui, repo, files, wlock=wlock)
1295 files = patch.updatedir(ui, repo, files, wlock=wlock)
1296 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1296 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1297 finally:
1297 finally:
1298 os.unlink(tmpname)
1298 os.unlink(tmpname)
1299
1299
1300 def incoming(ui, repo, source="default", **opts):
1300 def incoming(ui, repo, source="default", **opts):
1301 """show new changesets found in source
1301 """show new changesets found in source
1302
1302
1303 Show new changesets found in the specified path/URL or the default
1303 Show new changesets found in the specified path/URL or the default
1304 pull location. These are the changesets that would be pulled if a pull
1304 pull location. These are the changesets that would be pulled if a pull
1305 was requested.
1305 was requested.
1306
1306
1307 For remote repository, using --bundle avoids downloading the changesets
1307 For remote repository, using --bundle avoids downloading the changesets
1308 twice if the incoming is followed by a pull.
1308 twice if the incoming is followed by a pull.
1309
1309
1310 See pull for valid source format details.
1310 See pull for valid source format details.
1311 """
1311 """
1312 source = ui.expandpath(source)
1312 source = ui.expandpath(source)
1313 setremoteconfig(ui, opts)
1313 setremoteconfig(ui, opts)
1314
1314
1315 other = hg.repository(ui, source)
1315 other = hg.repository(ui, source)
1316 incoming = repo.findincoming(other, force=opts["force"])
1316 incoming = repo.findincoming(other, force=opts["force"])
1317 if not incoming:
1317 if not incoming:
1318 ui.status(_("no changes found\n"))
1318 ui.status(_("no changes found\n"))
1319 return
1319 return
1320
1320
1321 cleanup = None
1321 cleanup = None
1322 try:
1322 try:
1323 fname = opts["bundle"]
1323 fname = opts["bundle"]
1324 if fname or not other.local():
1324 if fname or not other.local():
1325 # create a bundle (uncompressed if other repo is not local)
1325 # create a bundle (uncompressed if other repo is not local)
1326 cg = other.changegroup(incoming, "incoming")
1326 cg = other.changegroup(incoming, "incoming")
1327 bundletype = other.local() and "HG10BZ" or "HG10UN"
1327 bundletype = other.local() and "HG10BZ" or "HG10UN"
1328 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1328 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1329 # keep written bundle?
1329 # keep written bundle?
1330 if opts["bundle"]:
1330 if opts["bundle"]:
1331 cleanup = None
1331 cleanup = None
1332 if not other.local():
1332 if not other.local():
1333 # use the created uncompressed bundlerepo
1333 # use the created uncompressed bundlerepo
1334 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1334 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1335
1335
1336 revs = None
1336 revs = None
1337 if opts['rev']:
1337 if opts['rev']:
1338 revs = [other.lookup(rev) for rev in opts['rev']]
1338 revs = [other.lookup(rev) for rev in opts['rev']]
1339 o = other.changelog.nodesbetween(incoming, revs)[0]
1339 o = other.changelog.nodesbetween(incoming, revs)[0]
1340 if opts['newest_first']:
1340 if opts['newest_first']:
1341 o.reverse()
1341 o.reverse()
1342 displayer = cmdutil.show_changeset(ui, other, opts)
1342 displayer = cmdutil.show_changeset(ui, other, opts)
1343 for n in o:
1343 for n in o:
1344 parents = [p for p in other.changelog.parents(n) if p != nullid]
1344 parents = [p for p in other.changelog.parents(n) if p != nullid]
1345 if opts['no_merges'] and len(parents) == 2:
1345 if opts['no_merges'] and len(parents) == 2:
1346 continue
1346 continue
1347 displayer.show(changenode=n)
1347 displayer.show(changenode=n)
1348 finally:
1348 finally:
1349 if hasattr(other, 'close'):
1349 if hasattr(other, 'close'):
1350 other.close()
1350 other.close()
1351 if cleanup:
1351 if cleanup:
1352 os.unlink(cleanup)
1352 os.unlink(cleanup)
1353
1353
1354 def init(ui, dest=".", **opts):
1354 def init(ui, dest=".", **opts):
1355 """create a new repository in the given directory
1355 """create a new repository in the given directory
1356
1356
1357 Initialize a new repository in the given directory. If the given
1357 Initialize a new repository in the given directory. If the given
1358 directory does not exist, it is created.
1358 directory does not exist, it is created.
1359
1359
1360 If no directory is given, the current directory is used.
1360 If no directory is given, the current directory is used.
1361
1361
1362 It is possible to specify an ssh:// URL as the destination.
1362 It is possible to specify an ssh:// URL as the destination.
1363 Look at the help text for the pull command for important details
1363 Look at the help text for the pull command for important details
1364 about ssh:// URLs.
1364 about ssh:// URLs.
1365 """
1365 """
1366 setremoteconfig(ui, opts)
1366 setremoteconfig(ui, opts)
1367 hg.repository(ui, dest, create=1)
1367 hg.repository(ui, dest, create=1)
1368
1368
1369 def locate(ui, repo, *pats, **opts):
1369 def locate(ui, repo, *pats, **opts):
1370 """locate files matching specific patterns
1370 """locate files matching specific patterns
1371
1371
1372 Print all files under Mercurial control whose names match the
1372 Print all files under Mercurial control whose names match the
1373 given patterns.
1373 given patterns.
1374
1374
1375 This command searches the current directory and its
1375 This command searches the current directory and its
1376 subdirectories. To search an entire repository, move to the root
1376 subdirectories. To search an entire repository, move to the root
1377 of the repository.
1377 of the repository.
1378
1378
1379 If no patterns are given to match, this command prints all file
1379 If no patterns are given to match, this command prints all file
1380 names.
1380 names.
1381
1381
1382 If you want to feed the output of this command into the "xargs"
1382 If you want to feed the output of this command into the "xargs"
1383 command, use the "-0" option to both this command and "xargs".
1383 command, use the "-0" option to both this command and "xargs".
1384 This will avoid the problem of "xargs" treating single filenames
1384 This will avoid the problem of "xargs" treating single filenames
1385 that contain white space as multiple filenames.
1385 that contain white space as multiple filenames.
1386 """
1386 """
1387 end = opts['print0'] and '\0' or '\n'
1387 end = opts['print0'] and '\0' or '\n'
1388 rev = opts['rev']
1388 rev = opts['rev']
1389 if rev:
1389 if rev:
1390 node = repo.lookup(rev)
1390 node = repo.lookup(rev)
1391 else:
1391 else:
1392 node = None
1392 node = None
1393
1393
1394 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1394 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1395 head='(?:.*/|)'):
1395 head='(?:.*/|)'):
1396 if not node and repo.dirstate.state(abs) == '?':
1396 if not node and repo.dirstate.state(abs) == '?':
1397 continue
1397 continue
1398 if opts['fullpath']:
1398 if opts['fullpath']:
1399 ui.write(os.path.join(repo.root, abs), end)
1399 ui.write(os.path.join(repo.root, abs), end)
1400 else:
1400 else:
1401 ui.write(((pats and rel) or abs), end)
1401 ui.write(((pats and rel) or abs), end)
1402
1402
1403 def log(ui, repo, *pats, **opts):
1403 def log(ui, repo, *pats, **opts):
1404 """show revision history of entire repository or files
1404 """show revision history of entire repository or files
1405
1405
1406 Print the revision history of the specified files or the entire
1406 Print the revision history of the specified files or the entire
1407 project.
1407 project.
1408
1408
1409 File history is shown without following rename or copy history of
1409 File history is shown without following rename or copy history of
1410 files. Use -f/--follow with a file name to follow history across
1410 files. Use -f/--follow with a file name to follow history across
1411 renames and copies. --follow without a file name will only show
1411 renames and copies. --follow without a file name will only show
1412 ancestors or descendants of the starting revision. --follow-first
1412 ancestors or descendants of the starting revision. --follow-first
1413 only follows the first parent of merge revisions.
1413 only follows the first parent of merge revisions.
1414
1414
1415 If no revision range is specified, the default is tip:0 unless
1415 If no revision range is specified, the default is tip:0 unless
1416 --follow is set, in which case the working directory parent is
1416 --follow is set, in which case the working directory parent is
1417 used as the starting revision.
1417 used as the starting revision.
1418
1418
1419 By default this command outputs: changeset id and hash, tags,
1419 By default this command outputs: changeset id and hash, tags,
1420 non-trivial parents, user, date and time, and a summary for each
1420 non-trivial parents, user, date and time, and a summary for each
1421 commit. When the -v/--verbose switch is used, the list of changed
1421 commit. When the -v/--verbose switch is used, the list of changed
1422 files and full commit message is shown.
1422 files and full commit message is shown.
1423 """
1423 """
1424
1424
1425 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1425 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1426 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1426 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1427
1427
1428 if opts['limit']:
1428 if opts['limit']:
1429 try:
1429 try:
1430 limit = int(opts['limit'])
1430 limit = int(opts['limit'])
1431 except ValueError:
1431 except ValueError:
1432 raise util.Abort(_('limit must be a positive integer'))
1432 raise util.Abort(_('limit must be a positive integer'))
1433 if limit <= 0: raise util.Abort(_('limit must be positive'))
1433 if limit <= 0: raise util.Abort(_('limit must be positive'))
1434 else:
1434 else:
1435 limit = sys.maxint
1435 limit = sys.maxint
1436 count = 0
1436 count = 0
1437
1437
1438 if opts['copies'] and opts['rev']:
1438 if opts['copies'] and opts['rev']:
1439 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1439 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1440 else:
1440 else:
1441 endrev = repo.changelog.count()
1441 endrev = repo.changelog.count()
1442 rcache = {}
1442 rcache = {}
1443 ncache = {}
1443 ncache = {}
1444 dcache = []
1444 dcache = []
1445 def getrenamed(fn, rev, man):
1445 def getrenamed(fn, rev, man):
1446 '''looks up all renames for a file (up to endrev) the first
1446 '''looks up all renames for a file (up to endrev) the first
1447 time the file is given. It indexes on the changerev and only
1447 time the file is given. It indexes on the changerev and only
1448 parses the manifest if linkrev != changerev.
1448 parses the manifest if linkrev != changerev.
1449 Returns rename info for fn at changerev rev.'''
1449 Returns rename info for fn at changerev rev.'''
1450 if fn not in rcache:
1450 if fn not in rcache:
1451 rcache[fn] = {}
1451 rcache[fn] = {}
1452 ncache[fn] = {}
1452 ncache[fn] = {}
1453 fl = repo.file(fn)
1453 fl = repo.file(fn)
1454 for i in xrange(fl.count()):
1454 for i in xrange(fl.count()):
1455 node = fl.node(i)
1455 node = fl.node(i)
1456 lr = fl.linkrev(node)
1456 lr = fl.linkrev(node)
1457 renamed = fl.renamed(node)
1457 renamed = fl.renamed(node)
1458 rcache[fn][lr] = renamed
1458 rcache[fn][lr] = renamed
1459 if renamed:
1459 if renamed:
1460 ncache[fn][node] = renamed
1460 ncache[fn][node] = renamed
1461 if lr >= endrev:
1461 if lr >= endrev:
1462 break
1462 break
1463 if rev in rcache[fn]:
1463 if rev in rcache[fn]:
1464 return rcache[fn][rev]
1464 return rcache[fn][rev]
1465 mr = repo.manifest.rev(man)
1465 mr = repo.manifest.rev(man)
1466 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1466 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1467 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1467 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1468 if not dcache or dcache[0] != man:
1468 if not dcache or dcache[0] != man:
1469 dcache[:] = [man, repo.manifest.readdelta(man)]
1469 dcache[:] = [man, repo.manifest.readdelta(man)]
1470 if fn in dcache[1]:
1470 if fn in dcache[1]:
1471 return ncache[fn].get(dcache[1][fn])
1471 return ncache[fn].get(dcache[1][fn])
1472 return None
1472 return None
1473
1473
1474 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
1474 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
1475 for st, rev, fns in changeiter:
1475 for st, rev, fns in changeiter:
1476 if st == 'add':
1476 if st == 'add':
1477 changenode = repo.changelog.node(rev)
1477 changenode = repo.changelog.node(rev)
1478 parents = [p for p in repo.changelog.parentrevs(rev)
1478 parents = [p for p in repo.changelog.parentrevs(rev)
1479 if p != nullrev]
1479 if p != nullrev]
1480 if opts['no_merges'] and len(parents) == 2:
1480 if opts['no_merges'] and len(parents) == 2:
1481 continue
1481 continue
1482 if opts['only_merges'] and len(parents) != 2:
1482 if opts['only_merges'] and len(parents) != 2:
1483 continue
1483 continue
1484
1484
1485 if opts['keyword']:
1485 if opts['keyword']:
1486 changes = get(rev)
1486 changes = get(rev)
1487 miss = 0
1487 miss = 0
1488 for k in [kw.lower() for kw in opts['keyword']]:
1488 for k in [kw.lower() for kw in opts['keyword']]:
1489 if not (k in changes[1].lower() or
1489 if not (k in changes[1].lower() or
1490 k in changes[4].lower() or
1490 k in changes[4].lower() or
1491 k in " ".join(changes[3][:20]).lower()):
1491 k in " ".join(changes[3][:20]).lower()):
1492 miss = 1
1492 miss = 1
1493 break
1493 break
1494 if miss:
1494 if miss:
1495 continue
1495 continue
1496
1496
1497 copies = []
1497 copies = []
1498 if opts.get('copies') and rev:
1498 if opts.get('copies') and rev:
1499 mf = get(rev)[0]
1499 mf = get(rev)[0]
1500 for fn in get(rev)[3]:
1500 for fn in get(rev)[3]:
1501 rename = getrenamed(fn, rev, mf)
1501 rename = getrenamed(fn, rev, mf)
1502 if rename:
1502 if rename:
1503 copies.append((fn, rename[0]))
1503 copies.append((fn, rename[0]))
1504 displayer.show(rev, changenode, copies=copies)
1504 displayer.show(rev, changenode, copies=copies)
1505 elif st == 'iter':
1505 elif st == 'iter':
1506 if count == limit: break
1506 if count == limit: break
1507 if displayer.flush(rev):
1507 if displayer.flush(rev):
1508 count += 1
1508 count += 1
1509
1509
1510 def manifest(ui, repo, rev=None):
1510 def manifest(ui, repo, rev=None):
1511 """output the latest or given revision of the project manifest
1511 """output the latest or given revision of the project manifest
1512
1512
1513 Print a list of version controlled files for the given revision.
1513 Print a list of version controlled files for the given revision.
1514
1514
1515 The manifest is the list of files being version controlled. If no revision
1515 The manifest is the list of files being version controlled. If no revision
1516 is given then the first parent of the working directory is used.
1516 is given then the first parent of the working directory is used.
1517
1517
1518 With -v flag, print file permissions. With --debug flag, print
1518 With -v flag, print file permissions. With --debug flag, print
1519 file revision hashes.
1519 file revision hashes.
1520 """
1520 """
1521
1521
1522 m = repo.changectx(rev).manifest()
1522 m = repo.changectx(rev).manifest()
1523 files = m.keys()
1523 files = m.keys()
1524 files.sort()
1524 files.sort()
1525
1525
1526 for f in files:
1526 for f in files:
1527 if ui.debugflag:
1527 if ui.debugflag:
1528 ui.write("%40s " % hex(m[f]))
1528 ui.write("%40s " % hex(m[f]))
1529 if ui.verbose:
1529 if ui.verbose:
1530 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1530 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1531 ui.write("%s\n" % f)
1531 ui.write("%s\n" % f)
1532
1532
1533 def merge(ui, repo, node=None, force=None, branch=None):
1533 def merge(ui, repo, node=None, force=None, branch=None):
1534 """Merge working directory with another revision
1534 """Merge working directory with another revision
1535
1535
1536 Merge the contents of the current working directory and the
1536 Merge the contents of the current working directory and the
1537 requested revision. Files that changed between either parent are
1537 requested revision. Files that changed between either parent are
1538 marked as changed for the next commit and a commit must be
1538 marked as changed for the next commit and a commit must be
1539 performed before any further updates are allowed.
1539 performed before any further updates are allowed.
1540
1540
1541 If no revision is specified, the working directory's parent is a
1541 If no revision is specified, the working directory's parent is a
1542 head revision, and the repository contains exactly one other head,
1542 head revision, and the repository contains exactly one other head,
1543 the other head is merged with by default. Otherwise, an explicit
1543 the other head is merged with by default. Otherwise, an explicit
1544 revision to merge with must be provided.
1544 revision to merge with must be provided.
1545 """
1545 """
1546
1546
1547 if node or branch:
1547 if node or branch:
1548 node = _lookup(repo, node, branch)
1548 node = _lookup(repo, node, branch)
1549 else:
1549 else:
1550 heads = repo.heads()
1550 heads = repo.heads()
1551 if len(heads) > 2:
1551 if len(heads) > 2:
1552 raise util.Abort(_('repo has %d heads - '
1552 raise util.Abort(_('repo has %d heads - '
1553 'please merge with an explicit rev') %
1553 'please merge with an explicit rev') %
1554 len(heads))
1554 len(heads))
1555 if len(heads) == 1:
1555 if len(heads) == 1:
1556 raise util.Abort(_('there is nothing to merge - '
1556 raise util.Abort(_('there is nothing to merge - '
1557 'use "hg update" instead'))
1557 'use "hg update" instead'))
1558 parent = repo.dirstate.parents()[0]
1558 parent = repo.dirstate.parents()[0]
1559 if parent not in heads:
1559 if parent not in heads:
1560 raise util.Abort(_('working dir not at a head rev - '
1560 raise util.Abort(_('working dir not at a head rev - '
1561 'use "hg update" or merge with an explicit rev'))
1561 'use "hg update" or merge with an explicit rev'))
1562 node = parent == heads[0] and heads[-1] or heads[0]
1562 node = parent == heads[0] and heads[-1] or heads[0]
1563 return hg.merge(repo, node, force=force)
1563 return hg.merge(repo, node, force=force)
1564
1564
1565 def outgoing(ui, repo, dest=None, **opts):
1565 def outgoing(ui, repo, dest=None, **opts):
1566 """show changesets not found in destination
1566 """show changesets not found in destination
1567
1567
1568 Show changesets not found in the specified destination repository or
1568 Show changesets not found in the specified destination repository or
1569 the default push location. These are the changesets that would be pushed
1569 the default push location. These are the changesets that would be pushed
1570 if a push was requested.
1570 if a push was requested.
1571
1571
1572 See pull for valid destination format details.
1572 See pull for valid destination format details.
1573 """
1573 """
1574 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1574 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1575 setremoteconfig(ui, opts)
1575 setremoteconfig(ui, opts)
1576 revs = None
1576 revs = None
1577 if opts['rev']:
1577 if opts['rev']:
1578 revs = [repo.lookup(rev) for rev in opts['rev']]
1578 revs = [repo.lookup(rev) for rev in opts['rev']]
1579
1579
1580 other = hg.repository(ui, dest)
1580 other = hg.repository(ui, dest)
1581 o = repo.findoutgoing(other, force=opts['force'])
1581 o = repo.findoutgoing(other, force=opts['force'])
1582 if not o:
1582 if not o:
1583 ui.status(_("no changes found\n"))
1583 ui.status(_("no changes found\n"))
1584 return
1584 return
1585 o = repo.changelog.nodesbetween(o, revs)[0]
1585 o = repo.changelog.nodesbetween(o, revs)[0]
1586 if opts['newest_first']:
1586 if opts['newest_first']:
1587 o.reverse()
1587 o.reverse()
1588 displayer = cmdutil.show_changeset(ui, repo, opts)
1588 displayer = cmdutil.show_changeset(ui, repo, opts)
1589 for n in o:
1589 for n in o:
1590 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1590 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1591 if opts['no_merges'] and len(parents) == 2:
1591 if opts['no_merges'] and len(parents) == 2:
1592 continue
1592 continue
1593 displayer.show(changenode=n)
1593 displayer.show(changenode=n)
1594
1594
1595 def parents(ui, repo, file_=None, **opts):
1595 def parents(ui, repo, file_=None, **opts):
1596 """show the parents of the working dir or revision
1596 """show the parents of the working dir or revision
1597
1597
1598 Print the working directory's parent revisions.
1598 Print the working directory's parent revisions.
1599 """
1599 """
1600 rev = opts.get('rev')
1600 rev = opts.get('rev')
1601 if rev:
1601 if rev:
1602 if file_:
1602 if file_:
1603 ctx = repo.filectx(file_, changeid=rev)
1603 ctx = repo.filectx(file_, changeid=rev)
1604 else:
1604 else:
1605 ctx = repo.changectx(rev)
1605 ctx = repo.changectx(rev)
1606 p = [cp.node() for cp in ctx.parents()]
1606 p = [cp.node() for cp in ctx.parents()]
1607 else:
1607 else:
1608 p = repo.dirstate.parents()
1608 p = repo.dirstate.parents()
1609
1609
1610 displayer = cmdutil.show_changeset(ui, repo, opts)
1610 displayer = cmdutil.show_changeset(ui, repo, opts)
1611 for n in p:
1611 for n in p:
1612 if n != nullid:
1612 if n != nullid:
1613 displayer.show(changenode=n)
1613 displayer.show(changenode=n)
1614
1614
1615 def paths(ui, repo, search=None):
1615 def paths(ui, repo, search=None):
1616 """show definition of symbolic path names
1616 """show definition of symbolic path names
1617
1617
1618 Show definition of symbolic path name NAME. If no name is given, show
1618 Show definition of symbolic path name NAME. If no name is given, show
1619 definition of available names.
1619 definition of available names.
1620
1620
1621 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1621 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1622 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1622 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1623 """
1623 """
1624 if search:
1624 if search:
1625 for name, path in ui.configitems("paths"):
1625 for name, path in ui.configitems("paths"):
1626 if name == search:
1626 if name == search:
1627 ui.write("%s\n" % path)
1627 ui.write("%s\n" % path)
1628 return
1628 return
1629 ui.warn(_("not found!\n"))
1629 ui.warn(_("not found!\n"))
1630 return 1
1630 return 1
1631 else:
1631 else:
1632 for name, path in ui.configitems("paths"):
1632 for name, path in ui.configitems("paths"):
1633 ui.write("%s = %s\n" % (name, path))
1633 ui.write("%s = %s\n" % (name, path))
1634
1634
1635 def postincoming(ui, repo, modheads, optupdate):
1635 def postincoming(ui, repo, modheads, optupdate):
1636 if modheads == 0:
1636 if modheads == 0:
1637 return
1637 return
1638 if optupdate:
1638 if optupdate:
1639 if modheads == 1:
1639 if modheads == 1:
1640 return hg.update(repo, repo.changelog.tip()) # update
1640 return hg.update(repo, repo.changelog.tip()) # update
1641 else:
1641 else:
1642 ui.status(_("not updating, since new heads added\n"))
1642 ui.status(_("not updating, since new heads added\n"))
1643 if modheads > 1:
1643 if modheads > 1:
1644 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1644 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1645 else:
1645 else:
1646 ui.status(_("(run 'hg update' to get a working copy)\n"))
1646 ui.status(_("(run 'hg update' to get a working copy)\n"))
1647
1647
1648 def pull(ui, repo, source="default", **opts):
1648 def pull(ui, repo, source="default", **opts):
1649 """pull changes from the specified source
1649 """pull changes from the specified source
1650
1650
1651 Pull changes from a remote repository to a local one.
1651 Pull changes from a remote repository to a local one.
1652
1652
1653 This finds all changes from the repository at the specified path
1653 This finds all changes from the repository at the specified path
1654 or URL and adds them to the local repository. By default, this
1654 or URL and adds them to the local repository. By default, this
1655 does not update the copy of the project in the working directory.
1655 does not update the copy of the project in the working directory.
1656
1656
1657 Valid URLs are of the form:
1657 Valid URLs are of the form:
1658
1658
1659 local/filesystem/path (or file://local/filesystem/path)
1659 local/filesystem/path (or file://local/filesystem/path)
1660 http://[user@]host[:port]/[path]
1660 http://[user@]host[:port]/[path]
1661 https://[user@]host[:port]/[path]
1661 https://[user@]host[:port]/[path]
1662 ssh://[user@]host[:port]/[path]
1662 ssh://[user@]host[:port]/[path]
1663 static-http://host[:port]/[path]
1663 static-http://host[:port]/[path]
1664
1664
1665 Paths in the local filesystem can either point to Mercurial
1665 Paths in the local filesystem can either point to Mercurial
1666 repositories or to bundle files (as created by 'hg bundle' or
1666 repositories or to bundle files (as created by 'hg bundle' or
1667 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1667 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1668 allows access to a Mercurial repository where you simply use a web
1668 allows access to a Mercurial repository where you simply use a web
1669 server to publish the .hg directory as static content.
1669 server to publish the .hg directory as static content.
1670
1670
1671 Some notes about using SSH with Mercurial:
1671 Some notes about using SSH with Mercurial:
1672 - SSH requires an accessible shell account on the destination machine
1672 - SSH requires an accessible shell account on the destination machine
1673 and a copy of hg in the remote path or specified with as remotecmd.
1673 and a copy of hg in the remote path or specified with as remotecmd.
1674 - path is relative to the remote user's home directory by default.
1674 - path is relative to the remote user's home directory by default.
1675 Use an extra slash at the start of a path to specify an absolute path:
1675 Use an extra slash at the start of a path to specify an absolute path:
1676 ssh://example.com//tmp/repository
1676 ssh://example.com//tmp/repository
1677 - Mercurial doesn't use its own compression via SSH; the right thing
1677 - Mercurial doesn't use its own compression via SSH; the right thing
1678 to do is to configure it in your ~/.ssh/config, e.g.:
1678 to do is to configure it in your ~/.ssh/config, e.g.:
1679 Host *.mylocalnetwork.example.com
1679 Host *.mylocalnetwork.example.com
1680 Compression no
1680 Compression no
1681 Host *
1681 Host *
1682 Compression yes
1682 Compression yes
1683 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1683 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1684 with the --ssh command line option.
1684 with the --ssh command line option.
1685 """
1685 """
1686 source = ui.expandpath(source)
1686 source = ui.expandpath(source)
1687 setremoteconfig(ui, opts)
1687 setremoteconfig(ui, opts)
1688
1688
1689 other = hg.repository(ui, source)
1689 other = hg.repository(ui, source)
1690 ui.status(_('pulling from %s\n') % (source))
1690 ui.status(_('pulling from %s\n') % (source))
1691 revs = None
1691 revs = None
1692 if opts['rev']:
1692 if opts['rev']:
1693 if 'lookup' in other.capabilities:
1693 if 'lookup' in other.capabilities:
1694 revs = [other.lookup(rev) for rev in opts['rev']]
1694 revs = [other.lookup(rev) for rev in opts['rev']]
1695 else:
1695 else:
1696 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1696 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1697 raise util.Abort(error)
1697 raise util.Abort(error)
1698 modheads = repo.pull(other, heads=revs, force=opts['force'])
1698 modheads = repo.pull(other, heads=revs, force=opts['force'])
1699 return postincoming(ui, repo, modheads, opts['update'])
1699 return postincoming(ui, repo, modheads, opts['update'])
1700
1700
1701 def push(ui, repo, dest=None, **opts):
1701 def push(ui, repo, dest=None, **opts):
1702 """push changes to the specified destination
1702 """push changes to the specified destination
1703
1703
1704 Push changes from the local repository to the given destination.
1704 Push changes from the local repository to the given destination.
1705
1705
1706 This is the symmetrical operation for pull. It helps to move
1706 This is the symmetrical operation for pull. It helps to move
1707 changes from the current repository to a different one. If the
1707 changes from the current repository to a different one. If the
1708 destination is local this is identical to a pull in that directory
1708 destination is local this is identical to a pull in that directory
1709 from the current one.
1709 from the current one.
1710
1710
1711 By default, push will refuse to run if it detects the result would
1711 By default, push will refuse to run if it detects the result would
1712 increase the number of remote heads. This generally indicates the
1712 increase the number of remote heads. This generally indicates the
1713 the client has forgotten to sync and merge before pushing.
1713 the client has forgotten to sync and merge before pushing.
1714
1714
1715 Valid URLs are of the form:
1715 Valid URLs are of the form:
1716
1716
1717 local/filesystem/path (or file://local/filesystem/path)
1717 local/filesystem/path (or file://local/filesystem/path)
1718 ssh://[user@]host[:port]/[path]
1718 ssh://[user@]host[:port]/[path]
1719 http://[user@]host[:port]/[path]
1719 http://[user@]host[:port]/[path]
1720 https://[user@]host[:port]/[path]
1720 https://[user@]host[:port]/[path]
1721
1721
1722 Look at the help text for the pull command for important details
1722 Look at the help text for the pull command for important details
1723 about ssh:// URLs.
1723 about ssh:// URLs.
1724
1724
1725 Pushing to http:// and https:// URLs is only possible, if this
1725 Pushing to http:// and https:// URLs is only possible, if this
1726 feature is explicitly enabled on the remote Mercurial server.
1726 feature is explicitly enabled on the remote Mercurial server.
1727 """
1727 """
1728 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1728 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1729 setremoteconfig(ui, opts)
1729 setremoteconfig(ui, opts)
1730
1730
1731 other = hg.repository(ui, dest)
1731 other = hg.repository(ui, dest)
1732 ui.status('pushing to %s\n' % (dest))
1732 ui.status('pushing to %s\n' % (dest))
1733 revs = None
1733 revs = None
1734 if opts['rev']:
1734 if opts['rev']:
1735 revs = [repo.lookup(rev) for rev in opts['rev']]
1735 revs = [repo.lookup(rev) for rev in opts['rev']]
1736 r = repo.push(other, opts['force'], revs=revs)
1736 r = repo.push(other, opts['force'], revs=revs)
1737 return r == 0
1737 return r == 0
1738
1738
1739 def rawcommit(ui, repo, *pats, **opts):
1739 def rawcommit(ui, repo, *pats, **opts):
1740 """raw commit interface (DEPRECATED)
1740 """raw commit interface (DEPRECATED)
1741
1741
1742 (DEPRECATED)
1742 (DEPRECATED)
1743 Lowlevel commit, for use in helper scripts.
1743 Lowlevel commit, for use in helper scripts.
1744
1744
1745 This command is not intended to be used by normal users, as it is
1745 This command is not intended to be used by normal users, as it is
1746 primarily useful for importing from other SCMs.
1746 primarily useful for importing from other SCMs.
1747
1747
1748 This command is now deprecated and will be removed in a future
1748 This command is now deprecated and will be removed in a future
1749 release, please use debugsetparents and commit instead.
1749 release, please use debugsetparents and commit instead.
1750 """
1750 """
1751
1751
1752 ui.warn(_("(the rawcommit command is deprecated)\n"))
1752 ui.warn(_("(the rawcommit command is deprecated)\n"))
1753
1753
1754 message = logmessage(opts)
1754 message = logmessage(opts)
1755
1755
1756 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1756 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1757 if opts['files']:
1757 if opts['files']:
1758 files += open(opts['files']).read().splitlines()
1758 files += open(opts['files']).read().splitlines()
1759
1759
1760 parents = [repo.lookup(p) for p in opts['parent']]
1760 parents = [repo.lookup(p) for p in opts['parent']]
1761
1761
1762 try:
1762 try:
1763 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1763 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1764 except ValueError, inst:
1764 except ValueError, inst:
1765 raise util.Abort(str(inst))
1765 raise util.Abort(str(inst))
1766
1766
1767 def recover(ui, repo):
1767 def recover(ui, repo):
1768 """roll back an interrupted transaction
1768 """roll back an interrupted transaction
1769
1769
1770 Recover from an interrupted commit or pull.
1770 Recover from an interrupted commit or pull.
1771
1771
1772 This command tries to fix the repository status after an interrupted
1772 This command tries to fix the repository status after an interrupted
1773 operation. It should only be necessary when Mercurial suggests it.
1773 operation. It should only be necessary when Mercurial suggests it.
1774 """
1774 """
1775 if repo.recover():
1775 if repo.recover():
1776 return hg.verify(repo)
1776 return hg.verify(repo)
1777 return 1
1777 return 1
1778
1778
1779 def remove(ui, repo, *pats, **opts):
1779 def remove(ui, repo, *pats, **opts):
1780 """remove the specified files on the next commit
1780 """remove the specified files on the next commit
1781
1781
1782 Schedule the indicated files for removal from the repository.
1782 Schedule the indicated files for removal from the repository.
1783
1783
1784 This command schedules the files to be removed at the next commit.
1784 This command schedules the files to be removed at the next commit.
1785 This only removes files from the current branch, not from the
1785 This only removes files from the current branch, not from the
1786 entire project history. If the files still exist in the working
1786 entire project history. If the files still exist in the working
1787 directory, they will be deleted from it. If invoked with --after,
1787 directory, they will be deleted from it. If invoked with --after,
1788 files that have been manually deleted are marked as removed.
1788 files that have been manually deleted are marked as removed.
1789
1789
1790 Modified files and added files are not removed by default. To
1790 Modified files and added files are not removed by default. To
1791 remove them, use the -f/--force option.
1791 remove them, use the -f/--force option.
1792 """
1792 """
1793 names = []
1793 names = []
1794 if not opts['after'] and not pats:
1794 if not opts['after'] and not pats:
1795 raise util.Abort(_('no files specified'))
1795 raise util.Abort(_('no files specified'))
1796 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1796 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1797 exact = dict.fromkeys(files)
1797 exact = dict.fromkeys(files)
1798 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
1798 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
1799 modified, added, removed, deleted, unknown = mardu
1799 modified, added, removed, deleted, unknown = mardu
1800 remove, forget = [], []
1800 remove, forget = [], []
1801 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1801 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1802 reason = None
1802 reason = None
1803 if abs not in deleted and opts['after']:
1803 if abs not in deleted and opts['after']:
1804 reason = _('is still present')
1804 reason = _('is still present')
1805 elif abs in modified and not opts['force']:
1805 elif abs in modified and not opts['force']:
1806 reason = _('is modified (use -f to force removal)')
1806 reason = _('is modified (use -f to force removal)')
1807 elif abs in added:
1807 elif abs in added:
1808 if opts['force']:
1808 if opts['force']:
1809 forget.append(abs)
1809 forget.append(abs)
1810 continue
1810 continue
1811 reason = _('has been marked for add (use -f to force removal)')
1811 reason = _('has been marked for add (use -f to force removal)')
1812 elif abs in unknown:
1812 elif abs in unknown:
1813 reason = _('is not managed')
1813 reason = _('is not managed')
1814 elif abs in removed:
1814 elif abs in removed:
1815 continue
1815 continue
1816 if reason:
1816 if reason:
1817 if exact:
1817 if exact:
1818 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1818 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1819 else:
1819 else:
1820 if ui.verbose or not exact:
1820 if ui.verbose or not exact:
1821 ui.status(_('removing %s\n') % rel)
1821 ui.status(_('removing %s\n') % rel)
1822 remove.append(abs)
1822 remove.append(abs)
1823 repo.forget(forget)
1823 repo.forget(forget)
1824 repo.remove(remove, unlink=not opts['after'])
1824 repo.remove(remove, unlink=not opts['after'])
1825
1825
1826 def rename(ui, repo, *pats, **opts):
1826 def rename(ui, repo, *pats, **opts):
1827 """rename files; equivalent of copy + remove
1827 """rename files; equivalent of copy + remove
1828
1828
1829 Mark dest as copies of sources; mark sources for deletion. If
1829 Mark dest as copies of sources; mark sources for deletion. If
1830 dest is a directory, copies are put in that directory. If dest is
1830 dest is a directory, copies are put in that directory. If dest is
1831 a file, there can only be one source.
1831 a file, there can only be one source.
1832
1832
1833 By default, this command copies the contents of files as they
1833 By default, this command copies the contents of files as they
1834 stand in the working directory. If invoked with --after, the
1834 stand in the working directory. If invoked with --after, the
1835 operation is recorded, but no copying is performed.
1835 operation is recorded, but no copying is performed.
1836
1836
1837 This command takes effect in the next commit.
1837 This command takes effect in the next commit.
1838 """
1838 """
1839 wlock = repo.wlock(0)
1839 wlock = repo.wlock(0)
1840 errs, copied = docopy(ui, repo, pats, opts, wlock)
1840 errs, copied = docopy(ui, repo, pats, opts, wlock)
1841 names = []
1841 names = []
1842 for abs, rel, exact in copied:
1842 for abs, rel, exact in copied:
1843 if ui.verbose or not exact:
1843 if ui.verbose or not exact:
1844 ui.status(_('removing %s\n') % rel)
1844 ui.status(_('removing %s\n') % rel)
1845 names.append(abs)
1845 names.append(abs)
1846 if not opts.get('dry_run'):
1846 if not opts.get('dry_run'):
1847 repo.remove(names, True, wlock)
1847 repo.remove(names, True, wlock)
1848 return errs
1848 return errs
1849
1849
1850 def revert(ui, repo, *pats, **opts):
1850 def revert(ui, repo, *pats, **opts):
1851 """revert files or dirs to their states as of some revision
1851 """revert files or dirs to their states as of some revision
1852
1852
1853 With no revision specified, revert the named files or directories
1853 With no revision specified, revert the named files or directories
1854 to the contents they had in the parent of the working directory.
1854 to the contents they had in the parent of the working directory.
1855 This restores the contents of the affected files to an unmodified
1855 This restores the contents of the affected files to an unmodified
1856 state. If the working directory has two parents, you must
1856 state. If the working directory has two parents, you must
1857 explicitly specify the revision to revert to.
1857 explicitly specify the revision to revert to.
1858
1858
1859 Modified files are saved with a .orig suffix before reverting.
1859 Modified files are saved with a .orig suffix before reverting.
1860 To disable these backups, use --no-backup.
1860 To disable these backups, use --no-backup.
1861
1861
1862 Using the -r option, revert the given files or directories to their
1862 Using the -r option, revert the given files or directories to their
1863 contents as of a specific revision. This can be helpful to "roll
1863 contents as of a specific revision. This can be helpful to "roll
1864 back" some or all of a change that should not have been committed.
1864 back" some or all of a change that should not have been committed.
1865
1865
1866 Revert modifies the working directory. It does not commit any
1866 Revert modifies the working directory. It does not commit any
1867 changes, or change the parent of the working directory. If you
1867 changes, or change the parent of the working directory. If you
1868 revert to a revision other than the parent of the working
1868 revert to a revision other than the parent of the working
1869 directory, the reverted files will thus appear modified
1869 directory, the reverted files will thus appear modified
1870 afterwards.
1870 afterwards.
1871
1871
1872 If a file has been deleted, it is recreated. If the executable
1872 If a file has been deleted, it is recreated. If the executable
1873 mode of a file was changed, it is reset.
1873 mode of a file was changed, it is reset.
1874
1874
1875 If names are given, all files matching the names are reverted.
1875 If names are given, all files matching the names are reverted.
1876
1876
1877 If no arguments are given, no files are reverted.
1877 If no arguments are given, no files are reverted.
1878 """
1878 """
1879
1879
1880 if not pats and not opts['all']:
1880 if not pats and not opts['all']:
1881 raise util.Abort(_('no files or directories specified; '
1881 raise util.Abort(_('no files or directories specified; '
1882 'use --all to revert the whole repo'))
1882 'use --all to revert the whole repo'))
1883
1883
1884 parent, p2 = repo.dirstate.parents()
1884 parent, p2 = repo.dirstate.parents()
1885 if not opts['rev'] and p2 != nullid:
1885 if not opts['rev'] and p2 != nullid:
1886 raise util.Abort(_('uncommitted merge - please provide a '
1886 raise util.Abort(_('uncommitted merge - please provide a '
1887 'specific revision'))
1887 'specific revision'))
1888 node = repo.changectx(opts['rev']).node()
1888 node = repo.changectx(opts['rev']).node()
1889 mf = repo.manifest.read(repo.changelog.read(node)[0])
1889 mf = repo.manifest.read(repo.changelog.read(node)[0])
1890 if node == parent:
1890 if node == parent:
1891 pmf = mf
1891 pmf = mf
1892 else:
1892 else:
1893 pmf = None
1893 pmf = None
1894
1894
1895 wlock = repo.wlock()
1895 wlock = repo.wlock()
1896
1896
1897 # need all matching names in dirstate and manifest of target rev,
1897 # need all matching names in dirstate and manifest of target rev,
1898 # so have to walk both. do not print errors if files exist in one
1898 # so have to walk both. do not print errors if files exist in one
1899 # but not other.
1899 # but not other.
1900
1900
1901 names = {}
1901 names = {}
1902 target_only = {}
1902 target_only = {}
1903
1903
1904 # walk dirstate.
1904 # walk dirstate.
1905
1905
1906 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
1906 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
1907 badmatch=mf.has_key):
1907 badmatch=mf.has_key):
1908 names[abs] = (rel, exact)
1908 names[abs] = (rel, exact)
1909 if src == 'b':
1909 if src == 'b':
1910 target_only[abs] = True
1910 target_only[abs] = True
1911
1911
1912 # walk target manifest.
1912 # walk target manifest.
1913
1913
1914 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1914 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1915 badmatch=names.has_key):
1915 badmatch=names.has_key):
1916 if abs in names: continue
1916 if abs in names: continue
1917 names[abs] = (rel, exact)
1917 names[abs] = (rel, exact)
1918 target_only[abs] = True
1918 target_only[abs] = True
1919
1919
1920 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
1920 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
1921 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
1921 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
1922
1922
1923 revert = ([], _('reverting %s\n'))
1923 revert = ([], _('reverting %s\n'))
1924 add = ([], _('adding %s\n'))
1924 add = ([], _('adding %s\n'))
1925 remove = ([], _('removing %s\n'))
1925 remove = ([], _('removing %s\n'))
1926 forget = ([], _('forgetting %s\n'))
1926 forget = ([], _('forgetting %s\n'))
1927 undelete = ([], _('undeleting %s\n'))
1927 undelete = ([], _('undeleting %s\n'))
1928 update = {}
1928 update = {}
1929
1929
1930 disptable = (
1930 disptable = (
1931 # dispatch table:
1931 # dispatch table:
1932 # file state
1932 # file state
1933 # action if in target manifest
1933 # action if in target manifest
1934 # action if not in target manifest
1934 # action if not in target manifest
1935 # make backup if in target manifest
1935 # make backup if in target manifest
1936 # make backup if not in target manifest
1936 # make backup if not in target manifest
1937 (modified, revert, remove, True, True),
1937 (modified, revert, remove, True, True),
1938 (added, revert, forget, True, False),
1938 (added, revert, forget, True, False),
1939 (removed, undelete, None, False, False),
1939 (removed, undelete, None, False, False),
1940 (deleted, revert, remove, False, False),
1940 (deleted, revert, remove, False, False),
1941 (unknown, add, None, True, False),
1941 (unknown, add, None, True, False),
1942 (target_only, add, None, False, False),
1942 (target_only, add, None, False, False),
1943 )
1943 )
1944
1944
1945 entries = names.items()
1945 entries = names.items()
1946 entries.sort()
1946 entries.sort()
1947
1947
1948 for abs, (rel, exact) in entries:
1948 for abs, (rel, exact) in entries:
1949 mfentry = mf.get(abs)
1949 mfentry = mf.get(abs)
1950 def handle(xlist, dobackup):
1950 def handle(xlist, dobackup):
1951 xlist[0].append(abs)
1951 xlist[0].append(abs)
1952 update[abs] = 1
1952 update[abs] = 1
1953 if dobackup and not opts['no_backup'] and os.path.exists(rel):
1953 if dobackup and not opts['no_backup'] and os.path.exists(rel):
1954 bakname = "%s.orig" % rel
1954 bakname = "%s.orig" % rel
1955 ui.note(_('saving current version of %s as %s\n') %
1955 ui.note(_('saving current version of %s as %s\n') %
1956 (rel, bakname))
1956 (rel, bakname))
1957 if not opts.get('dry_run'):
1957 if not opts.get('dry_run'):
1958 util.copyfile(rel, bakname)
1958 util.copyfile(rel, bakname)
1959 if ui.verbose or not exact:
1959 if ui.verbose or not exact:
1960 ui.status(xlist[1] % rel)
1960 ui.status(xlist[1] % rel)
1961 for table, hitlist, misslist, backuphit, backupmiss in disptable:
1961 for table, hitlist, misslist, backuphit, backupmiss in disptable:
1962 if abs not in table: continue
1962 if abs not in table: continue
1963 # file has changed in dirstate
1963 # file has changed in dirstate
1964 if mfentry:
1964 if mfentry:
1965 handle(hitlist, backuphit)
1965 handle(hitlist, backuphit)
1966 elif misslist is not None:
1966 elif misslist is not None:
1967 handle(misslist, backupmiss)
1967 handle(misslist, backupmiss)
1968 else:
1968 else:
1969 if exact: ui.warn(_('file not managed: %s\n') % rel)
1969 if exact: ui.warn(_('file not managed: %s\n') % rel)
1970 break
1970 break
1971 else:
1971 else:
1972 # file has not changed in dirstate
1972 # file has not changed in dirstate
1973 if node == parent:
1973 if node == parent:
1974 if exact: ui.warn(_('no changes needed to %s\n') % rel)
1974 if exact: ui.warn(_('no changes needed to %s\n') % rel)
1975 continue
1975 continue
1976 if pmf is None:
1976 if pmf is None:
1977 # only need parent manifest in this unlikely case,
1977 # only need parent manifest in this unlikely case,
1978 # so do not read by default
1978 # so do not read by default
1979 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
1979 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
1980 if abs in pmf:
1980 if abs in pmf:
1981 if mfentry:
1981 if mfentry:
1982 # if version of file is same in parent and target
1982 # if version of file is same in parent and target
1983 # manifests, do nothing
1983 # manifests, do nothing
1984 if pmf[abs] != mfentry:
1984 if pmf[abs] != mfentry:
1985 handle(revert, False)
1985 handle(revert, False)
1986 else:
1986 else:
1987 handle(remove, False)
1987 handle(remove, False)
1988
1988
1989 if not opts.get('dry_run'):
1989 if not opts.get('dry_run'):
1990 repo.dirstate.forget(forget[0])
1990 repo.dirstate.forget(forget[0])
1991 r = hg.revert(repo, node, update.has_key, wlock)
1991 r = hg.revert(repo, node, update.has_key, wlock)
1992 repo.dirstate.update(add[0], 'a')
1992 repo.dirstate.update(add[0], 'a')
1993 repo.dirstate.update(undelete[0], 'n')
1993 repo.dirstate.update(undelete[0], 'n')
1994 repo.dirstate.update(remove[0], 'r')
1994 repo.dirstate.update(remove[0], 'r')
1995 return r
1995 return r
1996
1996
1997 def rollback(ui, repo):
1997 def rollback(ui, repo):
1998 """roll back the last transaction in this repository
1998 """roll back the last transaction in this repository
1999
1999
2000 Roll back the last transaction in this repository, restoring the
2000 Roll back the last transaction in this repository, restoring the
2001 project to its state prior to the transaction.
2001 project to its state prior to the transaction.
2002
2002
2003 Transactions are used to encapsulate the effects of all commands
2003 Transactions are used to encapsulate the effects of all commands
2004 that create new changesets or propagate existing changesets into a
2004 that create new changesets or propagate existing changesets into a
2005 repository. For example, the following commands are transactional,
2005 repository. For example, the following commands are transactional,
2006 and their effects can be rolled back:
2006 and their effects can be rolled back:
2007
2007
2008 commit
2008 commit
2009 import
2009 import
2010 pull
2010 pull
2011 push (with this repository as destination)
2011 push (with this repository as destination)
2012 unbundle
2012 unbundle
2013
2013
2014 This command should be used with care. There is only one level of
2014 This command should be used with care. There is only one level of
2015 rollback, and there is no way to undo a rollback.
2015 rollback, and there is no way to undo a rollback.
2016
2016
2017 This command is not intended for use on public repositories. Once
2017 This command is not intended for use on public repositories. Once
2018 changes are visible for pull by other users, rolling a transaction
2018 changes are visible for pull by other users, rolling a transaction
2019 back locally is ineffective (someone else may already have pulled
2019 back locally is ineffective (someone else may already have pulled
2020 the changes). Furthermore, a race is possible with readers of the
2020 the changes). Furthermore, a race is possible with readers of the
2021 repository; for example an in-progress pull from the repository
2021 repository; for example an in-progress pull from the repository
2022 may fail if a rollback is performed.
2022 may fail if a rollback is performed.
2023 """
2023 """
2024 repo.rollback()
2024 repo.rollback()
2025
2025
2026 def root(ui, repo):
2026 def root(ui, repo):
2027 """print the root (top) of the current working dir
2027 """print the root (top) of the current working dir
2028
2028
2029 Print the root directory of the current repository.
2029 Print the root directory of the current repository.
2030 """
2030 """
2031 ui.write(repo.root + "\n")
2031 ui.write(repo.root + "\n")
2032
2032
2033 def serve(ui, repo, **opts):
2033 def serve(ui, repo, **opts):
2034 """export the repository via HTTP
2034 """export the repository via HTTP
2035
2035
2036 Start a local HTTP repository browser and pull server.
2036 Start a local HTTP repository browser and pull server.
2037
2037
2038 By default, the server logs accesses to stdout and errors to
2038 By default, the server logs accesses to stdout and errors to
2039 stderr. Use the "-A" and "-E" options to log to files.
2039 stderr. Use the "-A" and "-E" options to log to files.
2040 """
2040 """
2041
2041
2042 if opts["stdio"]:
2042 if opts["stdio"]:
2043 if repo is None:
2043 if repo is None:
2044 raise hg.RepoError(_("There is no Mercurial repository here"
2044 raise hg.RepoError(_("There is no Mercurial repository here"
2045 " (.hg not found)"))
2045 " (.hg not found)"))
2046 s = sshserver.sshserver(ui, repo)
2046 s = sshserver.sshserver(ui, repo)
2047 s.serve_forever()
2047 s.serve_forever()
2048
2048
2049 optlist = ("name templates style address port ipv6"
2049 optlist = ("name templates style address port ipv6"
2050 " accesslog errorlog webdir_conf")
2050 " accesslog errorlog webdir_conf")
2051 for o in optlist.split():
2051 for o in optlist.split():
2052 if opts[o]:
2052 if opts[o]:
2053 ui.setconfig("web", o, str(opts[o]))
2053 ui.setconfig("web", o, str(opts[o]))
2054
2054
2055 if repo is None and not ui.config("web", "webdir_conf"):
2055 if repo is None and not ui.config("web", "webdir_conf"):
2056 raise hg.RepoError(_("There is no Mercurial repository here"
2056 raise hg.RepoError(_("There is no Mercurial repository here"
2057 " (.hg not found)"))
2057 " (.hg not found)"))
2058
2058
2059 if opts['daemon'] and not opts['daemon_pipefds']:
2059 if opts['daemon'] and not opts['daemon_pipefds']:
2060 rfd, wfd = os.pipe()
2060 rfd, wfd = os.pipe()
2061 args = sys.argv[:]
2061 args = sys.argv[:]
2062 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2062 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2063 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2063 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2064 args[0], args)
2064 args[0], args)
2065 os.close(wfd)
2065 os.close(wfd)
2066 os.read(rfd, 1)
2066 os.read(rfd, 1)
2067 os._exit(0)
2067 os._exit(0)
2068
2068
2069 httpd = hgweb.server.create_server(ui, repo)
2069 httpd = hgweb.server.create_server(ui, repo)
2070
2070
2071 if ui.verbose:
2071 if ui.verbose:
2072 if httpd.port != 80:
2072 if httpd.port != 80:
2073 ui.status(_('listening at http://%s:%d/\n') %
2073 ui.status(_('listening at http://%s:%d/\n') %
2074 (httpd.addr, httpd.port))
2074 (httpd.addr, httpd.port))
2075 else:
2075 else:
2076 ui.status(_('listening at http://%s/\n') % httpd.addr)
2076 ui.status(_('listening at http://%s/\n') % httpd.addr)
2077
2077
2078 if opts['pid_file']:
2078 if opts['pid_file']:
2079 fp = open(opts['pid_file'], 'w')
2079 fp = open(opts['pid_file'], 'w')
2080 fp.write(str(os.getpid()) + '\n')
2080 fp.write(str(os.getpid()) + '\n')
2081 fp.close()
2081 fp.close()
2082
2082
2083 if opts['daemon_pipefds']:
2083 if opts['daemon_pipefds']:
2084 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2084 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2085 os.close(rfd)
2085 os.close(rfd)
2086 os.write(wfd, 'y')
2086 os.write(wfd, 'y')
2087 os.close(wfd)
2087 os.close(wfd)
2088 sys.stdout.flush()
2088 sys.stdout.flush()
2089 sys.stderr.flush()
2089 sys.stderr.flush()
2090 fd = os.open(util.nulldev, os.O_RDWR)
2090 fd = os.open(util.nulldev, os.O_RDWR)
2091 if fd != 0: os.dup2(fd, 0)
2091 if fd != 0: os.dup2(fd, 0)
2092 if fd != 1: os.dup2(fd, 1)
2092 if fd != 1: os.dup2(fd, 1)
2093 if fd != 2: os.dup2(fd, 2)
2093 if fd != 2: os.dup2(fd, 2)
2094 if fd not in (0, 1, 2): os.close(fd)
2094 if fd not in (0, 1, 2): os.close(fd)
2095
2095
2096 httpd.serve_forever()
2096 httpd.serve_forever()
2097
2097
2098 def status(ui, repo, *pats, **opts):
2098 def status(ui, repo, *pats, **opts):
2099 """show changed files in the working directory
2099 """show changed files in the working directory
2100
2100
2101 Show status of files in the repository. If names are given, only
2101 Show status of files in the repository. If names are given, only
2102 files that match are shown. Files that are clean or ignored, are
2102 files that match are shown. Files that are clean or ignored, are
2103 not listed unless -c (clean), -i (ignored) or -A is given.
2103 not listed unless -c (clean), -i (ignored) or -A is given.
2104
2104
2105 If one revision is given, it is used as the base revision.
2105 If one revision is given, it is used as the base revision.
2106 If two revisions are given, the difference between them is shown.
2106 If two revisions are given, the difference between them is shown.
2107
2107
2108 The codes used to show the status of files are:
2108 The codes used to show the status of files are:
2109 M = modified
2109 M = modified
2110 A = added
2110 A = added
2111 R = removed
2111 R = removed
2112 C = clean
2112 C = clean
2113 ! = deleted, but still tracked
2113 ! = deleted, but still tracked
2114 ? = not tracked
2114 ? = not tracked
2115 I = ignored (not shown by default)
2115 I = ignored (not shown by default)
2116 = the previous added file was copied from here
2116 = the previous added file was copied from here
2117 """
2117 """
2118
2118
2119 all = opts['all']
2119 all = opts['all']
2120 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2120 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2121
2121
2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2123 cwd = (pats and repo.getcwd()) or ''
2123 cwd = (pats and repo.getcwd()) or ''
2124 modified, added, removed, deleted, unknown, ignored, clean = [
2124 modified, added, removed, deleted, unknown, ignored, clean = [
2125 [util.pathto(cwd, x) for x in n]
2125 [util.pathto(cwd, x) for x in n]
2126 for n in repo.status(node1=node1, node2=node2, files=files,
2126 for n in repo.status(node1=node1, node2=node2, files=files,
2127 match=matchfn,
2127 match=matchfn,
2128 list_ignored=all or opts['ignored'],
2128 list_ignored=all or opts['ignored'],
2129 list_clean=all or opts['clean'])]
2129 list_clean=all or opts['clean'])]
2130
2130
2131 changetypes = (('modified', 'M', modified),
2131 changetypes = (('modified', 'M', modified),
2132 ('added', 'A', added),
2132 ('added', 'A', added),
2133 ('removed', 'R', removed),
2133 ('removed', 'R', removed),
2134 ('deleted', '!', deleted),
2134 ('deleted', '!', deleted),
2135 ('unknown', '?', unknown),
2135 ('unknown', '?', unknown),
2136 ('ignored', 'I', ignored))
2136 ('ignored', 'I', ignored))
2137
2137
2138 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2138 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2139
2139
2140 end = opts['print0'] and '\0' or '\n'
2140 end = opts['print0'] and '\0' or '\n'
2141
2141
2142 for opt, char, changes in ([ct for ct in explicit_changetypes
2142 for opt, char, changes in ([ct for ct in explicit_changetypes
2143 if all or opts[ct[0]]]
2143 if all or opts[ct[0]]]
2144 or changetypes):
2144 or changetypes):
2145 if opts['no_status']:
2145 if opts['no_status']:
2146 format = "%%s%s" % end
2146 format = "%%s%s" % end
2147 else:
2147 else:
2148 format = "%s %%s%s" % (char, end)
2148 format = "%s %%s%s" % (char, end)
2149
2149
2150 for f in changes:
2150 for f in changes:
2151 ui.write(format % f)
2151 ui.write(format % f)
2152 if ((all or opts.get('copies')) and not opts.get('no_status')):
2152 if ((all or opts.get('copies')) and not opts.get('no_status')):
2153 copied = repo.dirstate.copied(f)
2153 copied = repo.dirstate.copied(f)
2154 if copied:
2154 if copied:
2155 ui.write(' %s%s' % (copied, end))
2155 ui.write(' %s%s' % (copied, end))
2156
2156
2157 def tag(ui, repo, name, rev_=None, **opts):
2157 def tag(ui, repo, name, rev_=None, **opts):
2158 """add a tag for the current tip or a given revision
2158 """add a tag for the current tip or a given revision
2159
2159
2160 Name a particular revision using <name>.
2160 Name a particular revision using <name>.
2161
2161
2162 Tags are used to name particular revisions of the repository and are
2162 Tags are used to name particular revisions of the repository and are
2163 very useful to compare different revision, to go back to significant
2163 very useful to compare different revision, to go back to significant
2164 earlier versions or to mark branch points as releases, etc.
2164 earlier versions or to mark branch points as releases, etc.
2165
2165
2166 If no revision is given, the parent of the working directory is used.
2166 If no revision is given, the parent of the working directory is used.
2167
2167
2168 To facilitate version control, distribution, and merging of tags,
2168 To facilitate version control, distribution, and merging of tags,
2169 they are stored as a file named ".hgtags" which is managed
2169 they are stored as a file named ".hgtags" which is managed
2170 similarly to other project files and can be hand-edited if
2170 similarly to other project files and can be hand-edited if
2171 necessary. The file '.hg/localtags' is used for local tags (not
2171 necessary. The file '.hg/localtags' is used for local tags (not
2172 shared among repositories).
2172 shared among repositories).
2173 """
2173 """
2174 if name in ['tip', '.']:
2174 if name in ['tip', '.']:
2175 raise util.Abort(_("the name '%s' is reserved") % name)
2175 raise util.Abort(_("the name '%s' is reserved") % name)
2176 if rev_ is not None:
2176 if rev_ is not None:
2177 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2177 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2178 "please use 'hg tag [-r REV] NAME' instead\n"))
2178 "please use 'hg tag [-r REV] NAME' instead\n"))
2179 if opts['rev']:
2179 if opts['rev']:
2180 raise util.Abort(_("use only one form to specify the revision"))
2180 raise util.Abort(_("use only one form to specify the revision"))
2181 if opts['rev']:
2181 if opts['rev']:
2182 rev_ = opts['rev']
2182 rev_ = opts['rev']
2183 if not rev_ and repo.dirstate.parents()[1] != nullid:
2183 if not rev_ and repo.dirstate.parents()[1] != nullid:
2184 raise util.Abort(_('uncommitted merge - please provide a '
2184 raise util.Abort(_('uncommitted merge - please provide a '
2185 'specific revision'))
2185 'specific revision'))
2186 r = repo.changectx(rev_).node()
2186 r = repo.changectx(rev_).node()
2187
2187
2188 message = opts['message']
2188 message = opts['message']
2189 if not message:
2189 if not message:
2190 message = _('Added tag %s for changeset %s') % (name, short(r))
2190 message = _('Added tag %s for changeset %s') % (name, short(r))
2191
2191
2192 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2192 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2193
2193
2194 def tags(ui, repo):
2194 def tags(ui, repo):
2195 """list repository tags
2195 """list repository tags
2196
2196
2197 List the repository tags.
2197 List the repository tags.
2198
2198
2199 This lists both regular and local tags.
2199 This lists both regular and local tags.
2200 """
2200 """
2201
2201
2202 l = repo.tagslist()
2202 l = repo.tagslist()
2203 l.reverse()
2203 l.reverse()
2204 hexfunc = ui.debugflag and hex or short
2204 hexfunc = ui.debugflag and hex or short
2205 for t, n in l:
2205 for t, n in l:
2206 try:
2206 try:
2207 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2207 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2208 except KeyError:
2208 except KeyError:
2209 r = " ?:?"
2209 r = " ?:?"
2210 if ui.quiet:
2210 if ui.quiet:
2211 ui.write("%s\n" % t)
2211 ui.write("%s\n" % t)
2212 else:
2212 else:
2213 ui.write("%-30s %s\n" % (t, r))
2213 t = util.localsub(t, 30)
2214 t += " " * (30 - util.locallen(t))
2215 ui.write("%s %s\n" % (t, r))
2214
2216
2215 def tip(ui, repo, **opts):
2217 def tip(ui, repo, **opts):
2216 """show the tip revision
2218 """show the tip revision
2217
2219
2218 Show the tip revision.
2220 Show the tip revision.
2219 """
2221 """
2220 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2222 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2221
2223
2222 def unbundle(ui, repo, fname, **opts):
2224 def unbundle(ui, repo, fname, **opts):
2223 """apply a changegroup file
2225 """apply a changegroup file
2224
2226
2225 Apply a compressed changegroup file generated by the bundle
2227 Apply a compressed changegroup file generated by the bundle
2226 command.
2228 command.
2227 """
2229 """
2228 gen = changegroup.readbundle(urllib.urlopen(fname))
2230 gen = changegroup.readbundle(urllib.urlopen(fname))
2229 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2231 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2230 return postincoming(ui, repo, modheads, opts['update'])
2232 return postincoming(ui, repo, modheads, opts['update'])
2231
2233
2232 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2234 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2233 branch=None):
2235 branch=None):
2234 """update or merge working directory
2236 """update or merge working directory
2235
2237
2236 Update the working directory to the specified revision.
2238 Update the working directory to the specified revision.
2237
2239
2238 If there are no outstanding changes in the working directory and
2240 If there are no outstanding changes in the working directory and
2239 there is a linear relationship between the current version and the
2241 there is a linear relationship between the current version and the
2240 requested version, the result is the requested version.
2242 requested version, the result is the requested version.
2241
2243
2242 To merge the working directory with another revision, use the
2244 To merge the working directory with another revision, use the
2243 merge command.
2245 merge command.
2244
2246
2245 By default, update will refuse to run if doing so would require
2247 By default, update will refuse to run if doing so would require
2246 merging or discarding local changes.
2248 merging or discarding local changes.
2247 """
2249 """
2248 node = _lookup(repo, node, branch)
2250 node = _lookup(repo, node, branch)
2249 if clean:
2251 if clean:
2250 return hg.clean(repo, node)
2252 return hg.clean(repo, node)
2251 else:
2253 else:
2252 return hg.update(repo, node)
2254 return hg.update(repo, node)
2253
2255
2254 def _lookup(repo, node, branch=None):
2256 def _lookup(repo, node, branch=None):
2255 if branch:
2257 if branch:
2256 repo.ui.warn(_("the --branch option is deprecated, "
2258 repo.ui.warn(_("the --branch option is deprecated, "
2257 "please use 'hg branch' instead\n"))
2259 "please use 'hg branch' instead\n"))
2258 br = repo.branchlookup(branch=branch)
2260 br = repo.branchlookup(branch=branch)
2259 found = []
2261 found = []
2260 for x in br:
2262 for x in br:
2261 if branch in br[x]:
2263 if branch in br[x]:
2262 found.append(x)
2264 found.append(x)
2263 if len(found) > 1:
2265 if len(found) > 1:
2264 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2266 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2265 for x in found:
2267 for x in found:
2266 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2268 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2267 raise util.Abort("")
2269 raise util.Abort("")
2268 if len(found) == 1:
2270 if len(found) == 1:
2269 node = found[0]
2271 node = found[0]
2270 repo.ui.warn(_("Using head %s for branch %s\n")
2272 repo.ui.warn(_("Using head %s for branch %s\n")
2271 % (short(node), branch))
2273 % (short(node), branch))
2272 else:
2274 else:
2273 raise util.Abort(_("branch %s not found") % branch)
2275 raise util.Abort(_("branch %s not found") % branch)
2274 else:
2276 else:
2275 node = node and repo.lookup(node) or repo.changelog.tip()
2277 node = node and repo.lookup(node) or repo.changelog.tip()
2276 return node
2278 return node
2277
2279
2278 def verify(ui, repo):
2280 def verify(ui, repo):
2279 """verify the integrity of the repository
2281 """verify the integrity of the repository
2280
2282
2281 Verify the integrity of the current repository.
2283 Verify the integrity of the current repository.
2282
2284
2283 This will perform an extensive check of the repository's
2285 This will perform an extensive check of the repository's
2284 integrity, validating the hashes and checksums of each entry in
2286 integrity, validating the hashes and checksums of each entry in
2285 the changelog, manifest, and tracked files, as well as the
2287 the changelog, manifest, and tracked files, as well as the
2286 integrity of their crosslinks and indices.
2288 integrity of their crosslinks and indices.
2287 """
2289 """
2288 return hg.verify(repo)
2290 return hg.verify(repo)
2289
2291
2290 def version_(ui):
2292 def version_(ui):
2291 """output version and copyright information"""
2293 """output version and copyright information"""
2292 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2294 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2293 % version.get_version())
2295 % version.get_version())
2294 ui.status(_(
2296 ui.status(_(
2295 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2297 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2296 "This is free software; see the source for copying conditions. "
2298 "This is free software; see the source for copying conditions. "
2297 "There is NO\nwarranty; "
2299 "There is NO\nwarranty; "
2298 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2300 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2299 ))
2301 ))
2300
2302
2301 # Command options and aliases are listed here, alphabetically
2303 # Command options and aliases are listed here, alphabetically
2302
2304
2303 globalopts = [
2305 globalopts = [
2304 ('R', 'repository', '',
2306 ('R', 'repository', '',
2305 _('repository root directory or symbolic path name')),
2307 _('repository root directory or symbolic path name')),
2306 ('', 'cwd', '', _('change working directory')),
2308 ('', 'cwd', '', _('change working directory')),
2307 ('y', 'noninteractive', None,
2309 ('y', 'noninteractive', None,
2308 _('do not prompt, assume \'yes\' for any required answers')),
2310 _('do not prompt, assume \'yes\' for any required answers')),
2309 ('q', 'quiet', None, _('suppress output')),
2311 ('q', 'quiet', None, _('suppress output')),
2310 ('v', 'verbose', None, _('enable additional output')),
2312 ('v', 'verbose', None, _('enable additional output')),
2311 ('', 'config', [], _('set/override config option')),
2313 ('', 'config', [], _('set/override config option')),
2312 ('', 'debug', None, _('enable debugging output')),
2314 ('', 'debug', None, _('enable debugging output')),
2313 ('', 'debugger', None, _('start debugger')),
2315 ('', 'debugger', None, _('start debugger')),
2314 ('', 'lsprof', None, _('print improved command execution profile')),
2316 ('', 'lsprof', None, _('print improved command execution profile')),
2315 ('', 'traceback', None, _('print traceback on exception')),
2317 ('', 'traceback', None, _('print traceback on exception')),
2316 ('', 'time', None, _('time how long the command takes')),
2318 ('', 'time', None, _('time how long the command takes')),
2317 ('', 'profile', None, _('print command execution profile')),
2319 ('', 'profile', None, _('print command execution profile')),
2318 ('', 'version', None, _('output version information and exit')),
2320 ('', 'version', None, _('output version information and exit')),
2319 ('h', 'help', None, _('display help and exit')),
2321 ('h', 'help', None, _('display help and exit')),
2320 ]
2322 ]
2321
2323
2322 dryrunopts = [('n', 'dry-run', None,
2324 dryrunopts = [('n', 'dry-run', None,
2323 _('do not perform actions, just print output'))]
2325 _('do not perform actions, just print output'))]
2324
2326
2325 remoteopts = [
2327 remoteopts = [
2326 ('e', 'ssh', '', _('specify ssh command to use')),
2328 ('e', 'ssh', '', _('specify ssh command to use')),
2327 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2329 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2328 ]
2330 ]
2329
2331
2330 walkopts = [
2332 walkopts = [
2331 ('I', 'include', [], _('include names matching the given patterns')),
2333 ('I', 'include', [], _('include names matching the given patterns')),
2332 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2334 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2333 ]
2335 ]
2334
2336
2335 table = {
2337 table = {
2336 "^add":
2338 "^add":
2337 (add,
2339 (add,
2338 walkopts + dryrunopts,
2340 walkopts + dryrunopts,
2339 _('hg add [OPTION]... [FILE]...')),
2341 _('hg add [OPTION]... [FILE]...')),
2340 "addremove":
2342 "addremove":
2341 (addremove,
2343 (addremove,
2342 [('s', 'similarity', '',
2344 [('s', 'similarity', '',
2343 _('guess renamed files by similarity (0<=s<=100)')),
2345 _('guess renamed files by similarity (0<=s<=100)')),
2344 ] + walkopts + dryrunopts,
2346 ] + walkopts + dryrunopts,
2345 _('hg addremove [OPTION]... [FILE]...')),
2347 _('hg addremove [OPTION]... [FILE]...')),
2346 "^annotate":
2348 "^annotate":
2347 (annotate,
2349 (annotate,
2348 [('r', 'rev', '', _('annotate the specified revision')),
2350 [('r', 'rev', '', _('annotate the specified revision')),
2349 ('f', 'follow', None, _('follow file copies and renames')),
2351 ('f', 'follow', None, _('follow file copies and renames')),
2350 ('a', 'text', None, _('treat all files as text')),
2352 ('a', 'text', None, _('treat all files as text')),
2351 ('u', 'user', None, _('list the author')),
2353 ('u', 'user', None, _('list the author')),
2352 ('d', 'date', None, _('list the date')),
2354 ('d', 'date', None, _('list the date')),
2353 ('n', 'number', None, _('list the revision number (default)')),
2355 ('n', 'number', None, _('list the revision number (default)')),
2354 ('c', 'changeset', None, _('list the changeset')),
2356 ('c', 'changeset', None, _('list the changeset')),
2355 ] + walkopts,
2357 ] + walkopts,
2356 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2358 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2357 "archive":
2359 "archive":
2358 (archive,
2360 (archive,
2359 [('', 'no-decode', None, _('do not pass files through decoders')),
2361 [('', 'no-decode', None, _('do not pass files through decoders')),
2360 ('p', 'prefix', '', _('directory prefix for files in archive')),
2362 ('p', 'prefix', '', _('directory prefix for files in archive')),
2361 ('r', 'rev', '', _('revision to distribute')),
2363 ('r', 'rev', '', _('revision to distribute')),
2362 ('t', 'type', '', _('type of distribution to create')),
2364 ('t', 'type', '', _('type of distribution to create')),
2363 ] + walkopts,
2365 ] + walkopts,
2364 _('hg archive [OPTION]... DEST')),
2366 _('hg archive [OPTION]... DEST')),
2365 "backout":
2367 "backout":
2366 (backout,
2368 (backout,
2367 [('', 'merge', None,
2369 [('', 'merge', None,
2368 _('merge with old dirstate parent after backout')),
2370 _('merge with old dirstate parent after backout')),
2369 ('m', 'message', '', _('use <text> as commit message')),
2371 ('m', 'message', '', _('use <text> as commit message')),
2370 ('l', 'logfile', '', _('read commit message from <file>')),
2372 ('l', 'logfile', '', _('read commit message from <file>')),
2371 ('d', 'date', '', _('record datecode as commit date')),
2373 ('d', 'date', '', _('record datecode as commit date')),
2372 ('', 'parent', '', _('parent to choose when backing out merge')),
2374 ('', 'parent', '', _('parent to choose when backing out merge')),
2373 ('u', 'user', '', _('record user as committer')),
2375 ('u', 'user', '', _('record user as committer')),
2374 ] + walkopts,
2376 ] + walkopts,
2375 _('hg backout [OPTION]... REV')),
2377 _('hg backout [OPTION]... REV')),
2376 "branch": (branch, [], _('hg branch [NAME]')),
2378 "branch": (branch, [], _('hg branch [NAME]')),
2377 "branches": (branches, [], _('hg branches')),
2379 "branches": (branches, [], _('hg branches')),
2378 "bundle":
2380 "bundle":
2379 (bundle,
2381 (bundle,
2380 [('f', 'force', None,
2382 [('f', 'force', None,
2381 _('run even when remote repository is unrelated')),
2383 _('run even when remote repository is unrelated')),
2382 ('r', 'rev', [],
2384 ('r', 'rev', [],
2383 _('a changeset you would like to bundle')),
2385 _('a changeset you would like to bundle')),
2384 ('', 'base', [],
2386 ('', 'base', [],
2385 _('a base changeset to specify instead of a destination')),
2387 _('a base changeset to specify instead of a destination')),
2386 ] + remoteopts,
2388 ] + remoteopts,
2387 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2389 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2388 "cat":
2390 "cat":
2389 (cat,
2391 (cat,
2390 [('o', 'output', '', _('print output to file with formatted name')),
2392 [('o', 'output', '', _('print output to file with formatted name')),
2391 ('r', 'rev', '', _('print the given revision')),
2393 ('r', 'rev', '', _('print the given revision')),
2392 ] + walkopts,
2394 ] + walkopts,
2393 _('hg cat [OPTION]... FILE...')),
2395 _('hg cat [OPTION]... FILE...')),
2394 "^clone":
2396 "^clone":
2395 (clone,
2397 (clone,
2396 [('U', 'noupdate', None, _('do not update the new working directory')),
2398 [('U', 'noupdate', None, _('do not update the new working directory')),
2397 ('r', 'rev', [],
2399 ('r', 'rev', [],
2398 _('a changeset you would like to have after cloning')),
2400 _('a changeset you would like to have after cloning')),
2399 ('', 'pull', None, _('use pull protocol to copy metadata')),
2401 ('', 'pull', None, _('use pull protocol to copy metadata')),
2400 ('', 'uncompressed', None,
2402 ('', 'uncompressed', None,
2401 _('use uncompressed transfer (fast over LAN)')),
2403 _('use uncompressed transfer (fast over LAN)')),
2402 ] + remoteopts,
2404 ] + remoteopts,
2403 _('hg clone [OPTION]... SOURCE [DEST]')),
2405 _('hg clone [OPTION]... SOURCE [DEST]')),
2404 "^commit|ci":
2406 "^commit|ci":
2405 (commit,
2407 (commit,
2406 [('A', 'addremove', None,
2408 [('A', 'addremove', None,
2407 _('mark new/missing files as added/removed before committing')),
2409 _('mark new/missing files as added/removed before committing')),
2408 ('m', 'message', '', _('use <text> as commit message')),
2410 ('m', 'message', '', _('use <text> as commit message')),
2409 ('l', 'logfile', '', _('read the commit message from <file>')),
2411 ('l', 'logfile', '', _('read the commit message from <file>')),
2410 ('d', 'date', '', _('record datecode as commit date')),
2412 ('d', 'date', '', _('record datecode as commit date')),
2411 ('u', 'user', '', _('record user as commiter')),
2413 ('u', 'user', '', _('record user as commiter')),
2412 ] + walkopts,
2414 ] + walkopts,
2413 _('hg commit [OPTION]... [FILE]...')),
2415 _('hg commit [OPTION]... [FILE]...')),
2414 "copy|cp":
2416 "copy|cp":
2415 (copy,
2417 (copy,
2416 [('A', 'after', None, _('record a copy that has already occurred')),
2418 [('A', 'after', None, _('record a copy that has already occurred')),
2417 ('f', 'force', None,
2419 ('f', 'force', None,
2418 _('forcibly copy over an existing managed file')),
2420 _('forcibly copy over an existing managed file')),
2419 ] + walkopts + dryrunopts,
2421 ] + walkopts + dryrunopts,
2420 _('hg copy [OPTION]... [SOURCE]... DEST')),
2422 _('hg copy [OPTION]... [SOURCE]... DEST')),
2421 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2423 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2422 "debugcomplete":
2424 "debugcomplete":
2423 (debugcomplete,
2425 (debugcomplete,
2424 [('o', 'options', None, _('show the command options'))],
2426 [('o', 'options', None, _('show the command options'))],
2425 _('debugcomplete [-o] CMD')),
2427 _('debugcomplete [-o] CMD')),
2426 "debugrebuildstate":
2428 "debugrebuildstate":
2427 (debugrebuildstate,
2429 (debugrebuildstate,
2428 [('r', 'rev', '', _('revision to rebuild to'))],
2430 [('r', 'rev', '', _('revision to rebuild to'))],
2429 _('debugrebuildstate [-r REV] [REV]')),
2431 _('debugrebuildstate [-r REV] [REV]')),
2430 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2432 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2431 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2433 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2432 "debugstate": (debugstate, [], _('debugstate')),
2434 "debugstate": (debugstate, [], _('debugstate')),
2433 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2435 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2434 "debugindex": (debugindex, [], _('debugindex FILE')),
2436 "debugindex": (debugindex, [], _('debugindex FILE')),
2435 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2437 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2436 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2438 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2437 "debugwalk":
2439 "debugwalk":
2438 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2440 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2439 "^diff":
2441 "^diff":
2440 (diff,
2442 (diff,
2441 [('r', 'rev', [], _('revision')),
2443 [('r', 'rev', [], _('revision')),
2442 ('a', 'text', None, _('treat all files as text')),
2444 ('a', 'text', None, _('treat all files as text')),
2443 ('p', 'show-function', None,
2445 ('p', 'show-function', None,
2444 _('show which function each change is in')),
2446 _('show which function each change is in')),
2445 ('g', 'git', None, _('use git extended diff format')),
2447 ('g', 'git', None, _('use git extended diff format')),
2446 ('', 'nodates', None, _("don't include dates in diff headers")),
2448 ('', 'nodates', None, _("don't include dates in diff headers")),
2447 ('w', 'ignore-all-space', None,
2449 ('w', 'ignore-all-space', None,
2448 _('ignore white space when comparing lines')),
2450 _('ignore white space when comparing lines')),
2449 ('b', 'ignore-space-change', None,
2451 ('b', 'ignore-space-change', None,
2450 _('ignore changes in the amount of white space')),
2452 _('ignore changes in the amount of white space')),
2451 ('B', 'ignore-blank-lines', None,
2453 ('B', 'ignore-blank-lines', None,
2452 _('ignore changes whose lines are all blank')),
2454 _('ignore changes whose lines are all blank')),
2453 ] + walkopts,
2455 ] + walkopts,
2454 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2456 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2455 "^export":
2457 "^export":
2456 (export,
2458 (export,
2457 [('o', 'output', '', _('print output to file with formatted name')),
2459 [('o', 'output', '', _('print output to file with formatted name')),
2458 ('a', 'text', None, _('treat all files as text')),
2460 ('a', 'text', None, _('treat all files as text')),
2459 ('g', 'git', None, _('use git extended diff format')),
2461 ('g', 'git', None, _('use git extended diff format')),
2460 ('', 'nodates', None, _("don't include dates in diff headers")),
2462 ('', 'nodates', None, _("don't include dates in diff headers")),
2461 ('', 'switch-parent', None, _('diff against the second parent'))],
2463 ('', 'switch-parent', None, _('diff against the second parent'))],
2462 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2464 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2463 "grep":
2465 "grep":
2464 (grep,
2466 (grep,
2465 [('0', 'print0', None, _('end fields with NUL')),
2467 [('0', 'print0', None, _('end fields with NUL')),
2466 ('', 'all', None, _('print all revisions that match')),
2468 ('', 'all', None, _('print all revisions that match')),
2467 ('f', 'follow', None,
2469 ('f', 'follow', None,
2468 _('follow changeset history, or file history across copies and renames')),
2470 _('follow changeset history, or file history across copies and renames')),
2469 ('i', 'ignore-case', None, _('ignore case when matching')),
2471 ('i', 'ignore-case', None, _('ignore case when matching')),
2470 ('l', 'files-with-matches', None,
2472 ('l', 'files-with-matches', None,
2471 _('print only filenames and revs that match')),
2473 _('print only filenames and revs that match')),
2472 ('n', 'line-number', None, _('print matching line numbers')),
2474 ('n', 'line-number', None, _('print matching line numbers')),
2473 ('r', 'rev', [], _('search in given revision range')),
2475 ('r', 'rev', [], _('search in given revision range')),
2474 ('u', 'user', None, _('print user who committed change')),
2476 ('u', 'user', None, _('print user who committed change')),
2475 ] + walkopts,
2477 ] + walkopts,
2476 _('hg grep [OPTION]... PATTERN [FILE]...')),
2478 _('hg grep [OPTION]... PATTERN [FILE]...')),
2477 "heads":
2479 "heads":
2478 (heads,
2480 (heads,
2479 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2481 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2480 ('', 'style', '', _('display using template map file')),
2482 ('', 'style', '', _('display using template map file')),
2481 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2483 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2482 ('', 'template', '', _('display with template'))],
2484 ('', 'template', '', _('display with template'))],
2483 _('hg heads [-r REV]')),
2485 _('hg heads [-r REV]')),
2484 "help": (help_, [], _('hg help [COMMAND]')),
2486 "help": (help_, [], _('hg help [COMMAND]')),
2485 "identify|id": (identify, [], _('hg identify')),
2487 "identify|id": (identify, [], _('hg identify')),
2486 "import|patch":
2488 "import|patch":
2487 (import_,
2489 (import_,
2488 [('p', 'strip', 1,
2490 [('p', 'strip', 1,
2489 _('directory strip option for patch. This has the same\n'
2491 _('directory strip option for patch. This has the same\n'
2490 'meaning as the corresponding patch option')),
2492 'meaning as the corresponding patch option')),
2491 ('m', 'message', '', _('use <text> as commit message')),
2493 ('m', 'message', '', _('use <text> as commit message')),
2492 ('b', 'base', '', _('base path (DEPRECATED)')),
2494 ('b', 'base', '', _('base path (DEPRECATED)')),
2493 ('f', 'force', None,
2495 ('f', 'force', None,
2494 _('skip check for outstanding uncommitted changes'))],
2496 _('skip check for outstanding uncommitted changes'))],
2495 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2497 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2496 "incoming|in": (incoming,
2498 "incoming|in": (incoming,
2497 [('M', 'no-merges', None, _('do not show merges')),
2499 [('M', 'no-merges', None, _('do not show merges')),
2498 ('f', 'force', None,
2500 ('f', 'force', None,
2499 _('run even when remote repository is unrelated')),
2501 _('run even when remote repository is unrelated')),
2500 ('', 'style', '', _('display using template map file')),
2502 ('', 'style', '', _('display using template map file')),
2501 ('n', 'newest-first', None, _('show newest record first')),
2503 ('n', 'newest-first', None, _('show newest record first')),
2502 ('', 'bundle', '', _('file to store the bundles into')),
2504 ('', 'bundle', '', _('file to store the bundles into')),
2503 ('p', 'patch', None, _('show patch')),
2505 ('p', 'patch', None, _('show patch')),
2504 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2506 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2505 ('', 'template', '', _('display with template')),
2507 ('', 'template', '', _('display with template')),
2506 ] + remoteopts,
2508 ] + remoteopts,
2507 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2509 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2508 ' [--bundle FILENAME] [SOURCE]')),
2510 ' [--bundle FILENAME] [SOURCE]')),
2509 "^init":
2511 "^init":
2510 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2512 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2511 "locate":
2513 "locate":
2512 (locate,
2514 (locate,
2513 [('r', 'rev', '', _('search the repository as it stood at rev')),
2515 [('r', 'rev', '', _('search the repository as it stood at rev')),
2514 ('0', 'print0', None,
2516 ('0', 'print0', None,
2515 _('end filenames with NUL, for use with xargs')),
2517 _('end filenames with NUL, for use with xargs')),
2516 ('f', 'fullpath', None,
2518 ('f', 'fullpath', None,
2517 _('print complete paths from the filesystem root')),
2519 _('print complete paths from the filesystem root')),
2518 ] + walkopts,
2520 ] + walkopts,
2519 _('hg locate [OPTION]... [PATTERN]...')),
2521 _('hg locate [OPTION]... [PATTERN]...')),
2520 "^log|history":
2522 "^log|history":
2521 (log,
2523 (log,
2522 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2524 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2523 ('f', 'follow', None,
2525 ('f', 'follow', None,
2524 _('follow changeset history, or file history across copies and renames')),
2526 _('follow changeset history, or file history across copies and renames')),
2525 ('', 'follow-first', None,
2527 ('', 'follow-first', None,
2526 _('only follow the first parent of merge changesets')),
2528 _('only follow the first parent of merge changesets')),
2527 ('C', 'copies', None, _('show copied files')),
2529 ('C', 'copies', None, _('show copied files')),
2528 ('k', 'keyword', [], _('search for a keyword')),
2530 ('k', 'keyword', [], _('search for a keyword')),
2529 ('l', 'limit', '', _('limit number of changes displayed')),
2531 ('l', 'limit', '', _('limit number of changes displayed')),
2530 ('r', 'rev', [], _('show the specified revision or range')),
2532 ('r', 'rev', [], _('show the specified revision or range')),
2531 ('', 'removed', None, _('include revs where files were removed')),
2533 ('', 'removed', None, _('include revs where files were removed')),
2532 ('M', 'no-merges', None, _('do not show merges')),
2534 ('M', 'no-merges', None, _('do not show merges')),
2533 ('', 'style', '', _('display using template map file')),
2535 ('', 'style', '', _('display using template map file')),
2534 ('m', 'only-merges', None, _('show only merges')),
2536 ('m', 'only-merges', None, _('show only merges')),
2535 ('p', 'patch', None, _('show patch')),
2537 ('p', 'patch', None, _('show patch')),
2536 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2538 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2537 ('', 'template', '', _('display with template')),
2539 ('', 'template', '', _('display with template')),
2538 ] + walkopts,
2540 ] + walkopts,
2539 _('hg log [OPTION]... [FILE]')),
2541 _('hg log [OPTION]... [FILE]')),
2540 "manifest": (manifest, [], _('hg manifest [REV]')),
2542 "manifest": (manifest, [], _('hg manifest [REV]')),
2541 "merge":
2543 "merge":
2542 (merge,
2544 (merge,
2543 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2545 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2544 ('f', 'force', None, _('force a merge with outstanding changes'))],
2546 ('f', 'force', None, _('force a merge with outstanding changes'))],
2545 _('hg merge [-f] [REV]')),
2547 _('hg merge [-f] [REV]')),
2546 "outgoing|out": (outgoing,
2548 "outgoing|out": (outgoing,
2547 [('M', 'no-merges', None, _('do not show merges')),
2549 [('M', 'no-merges', None, _('do not show merges')),
2548 ('f', 'force', None,
2550 ('f', 'force', None,
2549 _('run even when remote repository is unrelated')),
2551 _('run even when remote repository is unrelated')),
2550 ('p', 'patch', None, _('show patch')),
2552 ('p', 'patch', None, _('show patch')),
2551 ('', 'style', '', _('display using template map file')),
2553 ('', 'style', '', _('display using template map file')),
2552 ('r', 'rev', [], _('a specific revision you would like to push')),
2554 ('r', 'rev', [], _('a specific revision you would like to push')),
2553 ('n', 'newest-first', None, _('show newest record first')),
2555 ('n', 'newest-first', None, _('show newest record first')),
2554 ('', 'template', '', _('display with template')),
2556 ('', 'template', '', _('display with template')),
2555 ] + remoteopts,
2557 ] + remoteopts,
2556 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2558 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2557 "^parents":
2559 "^parents":
2558 (parents,
2560 (parents,
2559 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2561 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2560 ('r', 'rev', '', _('show parents from the specified rev')),
2562 ('r', 'rev', '', _('show parents from the specified rev')),
2561 ('', 'style', '', _('display using template map file')),
2563 ('', 'style', '', _('display using template map file')),
2562 ('', 'template', '', _('display with template'))],
2564 ('', 'template', '', _('display with template'))],
2563 _('hg parents [-r REV] [FILE]')),
2565 _('hg parents [-r REV] [FILE]')),
2564 "paths": (paths, [], _('hg paths [NAME]')),
2566 "paths": (paths, [], _('hg paths [NAME]')),
2565 "^pull":
2567 "^pull":
2566 (pull,
2568 (pull,
2567 [('u', 'update', None,
2569 [('u', 'update', None,
2568 _('update to new tip if changesets were pulled')),
2570 _('update to new tip if changesets were pulled')),
2569 ('f', 'force', None,
2571 ('f', 'force', None,
2570 _('run even when remote repository is unrelated')),
2572 _('run even when remote repository is unrelated')),
2571 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2573 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2572 ] + remoteopts,
2574 ] + remoteopts,
2573 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2575 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2574 "^push":
2576 "^push":
2575 (push,
2577 (push,
2576 [('f', 'force', None, _('force push')),
2578 [('f', 'force', None, _('force push')),
2577 ('r', 'rev', [], _('a specific revision you would like to push')),
2579 ('r', 'rev', [], _('a specific revision you would like to push')),
2578 ] + remoteopts,
2580 ] + remoteopts,
2579 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2581 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2580 "debugrawcommit|rawcommit":
2582 "debugrawcommit|rawcommit":
2581 (rawcommit,
2583 (rawcommit,
2582 [('p', 'parent', [], _('parent')),
2584 [('p', 'parent', [], _('parent')),
2583 ('d', 'date', '', _('date code')),
2585 ('d', 'date', '', _('date code')),
2584 ('u', 'user', '', _('user')),
2586 ('u', 'user', '', _('user')),
2585 ('F', 'files', '', _('file list')),
2587 ('F', 'files', '', _('file list')),
2586 ('m', 'message', '', _('commit message')),
2588 ('m', 'message', '', _('commit message')),
2587 ('l', 'logfile', '', _('commit message file'))],
2589 ('l', 'logfile', '', _('commit message file'))],
2588 _('hg debugrawcommit [OPTION]... [FILE]...')),
2590 _('hg debugrawcommit [OPTION]... [FILE]...')),
2589 "recover": (recover, [], _('hg recover')),
2591 "recover": (recover, [], _('hg recover')),
2590 "^remove|rm":
2592 "^remove|rm":
2591 (remove,
2593 (remove,
2592 [('A', 'after', None, _('record remove that has already occurred')),
2594 [('A', 'after', None, _('record remove that has already occurred')),
2593 ('f', 'force', None, _('remove file even if modified')),
2595 ('f', 'force', None, _('remove file even if modified')),
2594 ] + walkopts,
2596 ] + walkopts,
2595 _('hg remove [OPTION]... FILE...')),
2597 _('hg remove [OPTION]... FILE...')),
2596 "rename|mv":
2598 "rename|mv":
2597 (rename,
2599 (rename,
2598 [('A', 'after', None, _('record a rename that has already occurred')),
2600 [('A', 'after', None, _('record a rename that has already occurred')),
2599 ('f', 'force', None,
2601 ('f', 'force', None,
2600 _('forcibly copy over an existing managed file')),
2602 _('forcibly copy over an existing managed file')),
2601 ] + walkopts + dryrunopts,
2603 ] + walkopts + dryrunopts,
2602 _('hg rename [OPTION]... SOURCE... DEST')),
2604 _('hg rename [OPTION]... SOURCE... DEST')),
2603 "^revert":
2605 "^revert":
2604 (revert,
2606 (revert,
2605 [('a', 'all', None, _('revert all changes when no arguments given')),
2607 [('a', 'all', None, _('revert all changes when no arguments given')),
2606 ('r', 'rev', '', _('revision to revert to')),
2608 ('r', 'rev', '', _('revision to revert to')),
2607 ('', 'no-backup', None, _('do not save backup copies of files')),
2609 ('', 'no-backup', None, _('do not save backup copies of files')),
2608 ] + walkopts + dryrunopts,
2610 ] + walkopts + dryrunopts,
2609 _('hg revert [-r REV] [NAME]...')),
2611 _('hg revert [-r REV] [NAME]...')),
2610 "rollback": (rollback, [], _('hg rollback')),
2612 "rollback": (rollback, [], _('hg rollback')),
2611 "root": (root, [], _('hg root')),
2613 "root": (root, [], _('hg root')),
2612 "showconfig|debugconfig":
2614 "showconfig|debugconfig":
2613 (showconfig,
2615 (showconfig,
2614 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2616 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2615 _('showconfig [-u] [NAME]...')),
2617 _('showconfig [-u] [NAME]...')),
2616 "^serve":
2618 "^serve":
2617 (serve,
2619 (serve,
2618 [('A', 'accesslog', '', _('name of access log file to write to')),
2620 [('A', 'accesslog', '', _('name of access log file to write to')),
2619 ('d', 'daemon', None, _('run server in background')),
2621 ('d', 'daemon', None, _('run server in background')),
2620 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2622 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2621 ('E', 'errorlog', '', _('name of error log file to write to')),
2623 ('E', 'errorlog', '', _('name of error log file to write to')),
2622 ('p', 'port', 0, _('port to use (default: 8000)')),
2624 ('p', 'port', 0, _('port to use (default: 8000)')),
2623 ('a', 'address', '', _('address to use')),
2625 ('a', 'address', '', _('address to use')),
2624 ('n', 'name', '',
2626 ('n', 'name', '',
2625 _('name to show in web pages (default: working dir)')),
2627 _('name to show in web pages (default: working dir)')),
2626 ('', 'webdir-conf', '', _('name of the webdir config file'
2628 ('', 'webdir-conf', '', _('name of the webdir config file'
2627 ' (serve more than one repo)')),
2629 ' (serve more than one repo)')),
2628 ('', 'pid-file', '', _('name of file to write process ID to')),
2630 ('', 'pid-file', '', _('name of file to write process ID to')),
2629 ('', 'stdio', None, _('for remote clients')),
2631 ('', 'stdio', None, _('for remote clients')),
2630 ('t', 'templates', '', _('web templates to use')),
2632 ('t', 'templates', '', _('web templates to use')),
2631 ('', 'style', '', _('template style to use')),
2633 ('', 'style', '', _('template style to use')),
2632 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2634 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2633 _('hg serve [OPTION]...')),
2635 _('hg serve [OPTION]...')),
2634 "^status|st":
2636 "^status|st":
2635 (status,
2637 (status,
2636 [('A', 'all', None, _('show status of all files')),
2638 [('A', 'all', None, _('show status of all files')),
2637 ('m', 'modified', None, _('show only modified files')),
2639 ('m', 'modified', None, _('show only modified files')),
2638 ('a', 'added', None, _('show only added files')),
2640 ('a', 'added', None, _('show only added files')),
2639 ('r', 'removed', None, _('show only removed files')),
2641 ('r', 'removed', None, _('show only removed files')),
2640 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2642 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2641 ('c', 'clean', None, _('show only files without changes')),
2643 ('c', 'clean', None, _('show only files without changes')),
2642 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2644 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2643 ('i', 'ignored', None, _('show ignored files')),
2645 ('i', 'ignored', None, _('show ignored files')),
2644 ('n', 'no-status', None, _('hide status prefix')),
2646 ('n', 'no-status', None, _('hide status prefix')),
2645 ('C', 'copies', None, _('show source of copied files')),
2647 ('C', 'copies', None, _('show source of copied files')),
2646 ('0', 'print0', None,
2648 ('0', 'print0', None,
2647 _('end filenames with NUL, for use with xargs')),
2649 _('end filenames with NUL, for use with xargs')),
2648 ('', 'rev', [], _('show difference from revision')),
2650 ('', 'rev', [], _('show difference from revision')),
2649 ] + walkopts,
2651 ] + walkopts,
2650 _('hg status [OPTION]... [FILE]...')),
2652 _('hg status [OPTION]... [FILE]...')),
2651 "tag":
2653 "tag":
2652 (tag,
2654 (tag,
2653 [('l', 'local', None, _('make the tag local')),
2655 [('l', 'local', None, _('make the tag local')),
2654 ('m', 'message', '', _('message for tag commit log entry')),
2656 ('m', 'message', '', _('message for tag commit log entry')),
2655 ('d', 'date', '', _('record datecode as commit date')),
2657 ('d', 'date', '', _('record datecode as commit date')),
2656 ('u', 'user', '', _('record user as commiter')),
2658 ('u', 'user', '', _('record user as commiter')),
2657 ('r', 'rev', '', _('revision to tag'))],
2659 ('r', 'rev', '', _('revision to tag'))],
2658 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2660 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2659 "tags": (tags, [], _('hg tags')),
2661 "tags": (tags, [], _('hg tags')),
2660 "tip":
2662 "tip":
2661 (tip,
2663 (tip,
2662 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2664 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2663 ('', 'style', '', _('display using template map file')),
2665 ('', 'style', '', _('display using template map file')),
2664 ('p', 'patch', None, _('show patch')),
2666 ('p', 'patch', None, _('show patch')),
2665 ('', 'template', '', _('display with template'))],
2667 ('', 'template', '', _('display with template'))],
2666 _('hg tip [-p]')),
2668 _('hg tip [-p]')),
2667 "unbundle":
2669 "unbundle":
2668 (unbundle,
2670 (unbundle,
2669 [('u', 'update', None,
2671 [('u', 'update', None,
2670 _('update to new tip if changesets were unbundled'))],
2672 _('update to new tip if changesets were unbundled'))],
2671 _('hg unbundle [-u] FILE')),
2673 _('hg unbundle [-u] FILE')),
2672 "^update|up|checkout|co":
2674 "^update|up|checkout|co":
2673 (update,
2675 (update,
2674 [('b', 'branch', '',
2676 [('b', 'branch', '',
2675 _('checkout the head of a specific branch (DEPRECATED)')),
2677 _('checkout the head of a specific branch (DEPRECATED)')),
2676 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
2678 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
2677 ('C', 'clean', None, _('overwrite locally modified files')),
2679 ('C', 'clean', None, _('overwrite locally modified files')),
2678 ('f', 'force', None, _('force a merge with outstanding changes'))],
2680 ('f', 'force', None, _('force a merge with outstanding changes'))],
2679 _('hg update [-C] [-f] [REV]')),
2681 _('hg update [-C] [-f] [REV]')),
2680 "verify": (verify, [], _('hg verify')),
2682 "verify": (verify, [], _('hg verify')),
2681 "version": (version_, [], _('hg version')),
2683 "version": (version_, [], _('hg version')),
2682 }
2684 }
2683
2685
2684 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2686 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2685 " debugindex debugindexdot")
2687 " debugindex debugindexdot")
2686 optionalrepo = ("paths serve showconfig")
2688 optionalrepo = ("paths serve showconfig")
2687
2689
2688 def findpossible(ui, cmd):
2690 def findpossible(ui, cmd):
2689 """
2691 """
2690 Return cmd -> (aliases, command table entry)
2692 Return cmd -> (aliases, command table entry)
2691 for each matching command.
2693 for each matching command.
2692 Return debug commands (or their aliases) only if no normal command matches.
2694 Return debug commands (or their aliases) only if no normal command matches.
2693 """
2695 """
2694 choice = {}
2696 choice = {}
2695 debugchoice = {}
2697 debugchoice = {}
2696 for e in table.keys():
2698 for e in table.keys():
2697 aliases = e.lstrip("^").split("|")
2699 aliases = e.lstrip("^").split("|")
2698 found = None
2700 found = None
2699 if cmd in aliases:
2701 if cmd in aliases:
2700 found = cmd
2702 found = cmd
2701 elif not ui.config("ui", "strict"):
2703 elif not ui.config("ui", "strict"):
2702 for a in aliases:
2704 for a in aliases:
2703 if a.startswith(cmd):
2705 if a.startswith(cmd):
2704 found = a
2706 found = a
2705 break
2707 break
2706 if found is not None:
2708 if found is not None:
2707 if aliases[0].startswith("debug") or found.startswith("debug"):
2709 if aliases[0].startswith("debug") or found.startswith("debug"):
2708 debugchoice[found] = (aliases, table[e])
2710 debugchoice[found] = (aliases, table[e])
2709 else:
2711 else:
2710 choice[found] = (aliases, table[e])
2712 choice[found] = (aliases, table[e])
2711
2713
2712 if not choice and debugchoice:
2714 if not choice and debugchoice:
2713 choice = debugchoice
2715 choice = debugchoice
2714
2716
2715 return choice
2717 return choice
2716
2718
2717 def findcmd(ui, cmd):
2719 def findcmd(ui, cmd):
2718 """Return (aliases, command table entry) for command string."""
2720 """Return (aliases, command table entry) for command string."""
2719 choice = findpossible(ui, cmd)
2721 choice = findpossible(ui, cmd)
2720
2722
2721 if choice.has_key(cmd):
2723 if choice.has_key(cmd):
2722 return choice[cmd]
2724 return choice[cmd]
2723
2725
2724 if len(choice) > 1:
2726 if len(choice) > 1:
2725 clist = choice.keys()
2727 clist = choice.keys()
2726 clist.sort()
2728 clist.sort()
2727 raise AmbiguousCommand(cmd, clist)
2729 raise AmbiguousCommand(cmd, clist)
2728
2730
2729 if choice:
2731 if choice:
2730 return choice.values()[0]
2732 return choice.values()[0]
2731
2733
2732 raise UnknownCommand(cmd)
2734 raise UnknownCommand(cmd)
2733
2735
2734 def catchterm(*args):
2736 def catchterm(*args):
2735 raise util.SignalInterrupt
2737 raise util.SignalInterrupt
2736
2738
2737 def run():
2739 def run():
2738 sys.exit(dispatch(sys.argv[1:]))
2740 sys.exit(dispatch(sys.argv[1:]))
2739
2741
2740 class ParseError(Exception):
2742 class ParseError(Exception):
2741 """Exception raised on errors in parsing the command line."""
2743 """Exception raised on errors in parsing the command line."""
2742
2744
2743 def parse(ui, args):
2745 def parse(ui, args):
2744 options = {}
2746 options = {}
2745 cmdoptions = {}
2747 cmdoptions = {}
2746
2748
2747 try:
2749 try:
2748 args = fancyopts.fancyopts(args, globalopts, options)
2750 args = fancyopts.fancyopts(args, globalopts, options)
2749 except fancyopts.getopt.GetoptError, inst:
2751 except fancyopts.getopt.GetoptError, inst:
2750 raise ParseError(None, inst)
2752 raise ParseError(None, inst)
2751
2753
2752 if args:
2754 if args:
2753 cmd, args = args[0], args[1:]
2755 cmd, args = args[0], args[1:]
2754 aliases, i = findcmd(ui, cmd)
2756 aliases, i = findcmd(ui, cmd)
2755 cmd = aliases[0]
2757 cmd = aliases[0]
2756 defaults = ui.config("defaults", cmd)
2758 defaults = ui.config("defaults", cmd)
2757 if defaults:
2759 if defaults:
2758 args = shlex.split(defaults) + args
2760 args = shlex.split(defaults) + args
2759 c = list(i[1])
2761 c = list(i[1])
2760 else:
2762 else:
2761 cmd = None
2763 cmd = None
2762 c = []
2764 c = []
2763
2765
2764 # combine global options into local
2766 # combine global options into local
2765 for o in globalopts:
2767 for o in globalopts:
2766 c.append((o[0], o[1], options[o[1]], o[3]))
2768 c.append((o[0], o[1], options[o[1]], o[3]))
2767
2769
2768 try:
2770 try:
2769 args = fancyopts.fancyopts(args, c, cmdoptions)
2771 args = fancyopts.fancyopts(args, c, cmdoptions)
2770 except fancyopts.getopt.GetoptError, inst:
2772 except fancyopts.getopt.GetoptError, inst:
2771 raise ParseError(cmd, inst)
2773 raise ParseError(cmd, inst)
2772
2774
2773 # separate global options back out
2775 # separate global options back out
2774 for o in globalopts:
2776 for o in globalopts:
2775 n = o[1]
2777 n = o[1]
2776 options[n] = cmdoptions[n]
2778 options[n] = cmdoptions[n]
2777 del cmdoptions[n]
2779 del cmdoptions[n]
2778
2780
2779 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2781 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2780
2782
2781 external = {}
2783 external = {}
2782
2784
2783 def findext(name):
2785 def findext(name):
2784 '''return module with given extension name'''
2786 '''return module with given extension name'''
2785 try:
2787 try:
2786 return sys.modules[external[name]]
2788 return sys.modules[external[name]]
2787 except KeyError:
2789 except KeyError:
2788 for k, v in external.iteritems():
2790 for k, v in external.iteritems():
2789 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
2791 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
2790 return sys.modules[v]
2792 return sys.modules[v]
2791 raise KeyError(name)
2793 raise KeyError(name)
2792
2794
2793 def load_extensions(ui):
2795 def load_extensions(ui):
2794 added = []
2796 added = []
2795 for ext_name, load_from_name in ui.extensions():
2797 for ext_name, load_from_name in ui.extensions():
2796 if ext_name in external:
2798 if ext_name in external:
2797 continue
2799 continue
2798 try:
2800 try:
2799 if load_from_name:
2801 if load_from_name:
2800 # the module will be loaded in sys.modules
2802 # the module will be loaded in sys.modules
2801 # choose an unique name so that it doesn't
2803 # choose an unique name so that it doesn't
2802 # conflicts with other modules
2804 # conflicts with other modules
2803 module_name = "hgext_%s" % ext_name.replace('.', '_')
2805 module_name = "hgext_%s" % ext_name.replace('.', '_')
2804 mod = imp.load_source(module_name, load_from_name)
2806 mod = imp.load_source(module_name, load_from_name)
2805 else:
2807 else:
2806 def importh(name):
2808 def importh(name):
2807 mod = __import__(name)
2809 mod = __import__(name)
2808 components = name.split('.')
2810 components = name.split('.')
2809 for comp in components[1:]:
2811 for comp in components[1:]:
2810 mod = getattr(mod, comp)
2812 mod = getattr(mod, comp)
2811 return mod
2813 return mod
2812 try:
2814 try:
2813 mod = importh("hgext.%s" % ext_name)
2815 mod = importh("hgext.%s" % ext_name)
2814 except ImportError:
2816 except ImportError:
2815 mod = importh(ext_name)
2817 mod = importh(ext_name)
2816 external[ext_name] = mod.__name__
2818 external[ext_name] = mod.__name__
2817 added.append((mod, ext_name))
2819 added.append((mod, ext_name))
2818 except (util.SignalInterrupt, KeyboardInterrupt):
2820 except (util.SignalInterrupt, KeyboardInterrupt):
2819 raise
2821 raise
2820 except Exception, inst:
2822 except Exception, inst:
2821 ui.warn(_("*** failed to import extension %s: %s\n") %
2823 ui.warn(_("*** failed to import extension %s: %s\n") %
2822 (ext_name, inst))
2824 (ext_name, inst))
2823 if ui.print_exc():
2825 if ui.print_exc():
2824 return 1
2826 return 1
2825
2827
2826 for mod, name in added:
2828 for mod, name in added:
2827 uisetup = getattr(mod, 'uisetup', None)
2829 uisetup = getattr(mod, 'uisetup', None)
2828 if uisetup:
2830 if uisetup:
2829 uisetup(ui)
2831 uisetup(ui)
2830 cmdtable = getattr(mod, 'cmdtable', {})
2832 cmdtable = getattr(mod, 'cmdtable', {})
2831 for t in cmdtable:
2833 for t in cmdtable:
2832 if t in table:
2834 if t in table:
2833 ui.warn(_("module %s overrides %s\n") % (name, t))
2835 ui.warn(_("module %s overrides %s\n") % (name, t))
2834 table.update(cmdtable)
2836 table.update(cmdtable)
2835
2837
2836 def parseconfig(config):
2838 def parseconfig(config):
2837 """parse the --config options from the command line"""
2839 """parse the --config options from the command line"""
2838 parsed = []
2840 parsed = []
2839 for cfg in config:
2841 for cfg in config:
2840 try:
2842 try:
2841 name, value = cfg.split('=', 1)
2843 name, value = cfg.split('=', 1)
2842 section, name = name.split('.', 1)
2844 section, name = name.split('.', 1)
2843 if not section or not name:
2845 if not section or not name:
2844 raise IndexError
2846 raise IndexError
2845 parsed.append((section, name, value))
2847 parsed.append((section, name, value))
2846 except (IndexError, ValueError):
2848 except (IndexError, ValueError):
2847 raise util.Abort(_('malformed --config option: %s') % cfg)
2849 raise util.Abort(_('malformed --config option: %s') % cfg)
2848 return parsed
2850 return parsed
2849
2851
2850 def dispatch(args):
2852 def dispatch(args):
2851 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
2853 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
2852 num = getattr(signal, name, None)
2854 num = getattr(signal, name, None)
2853 if num: signal.signal(num, catchterm)
2855 if num: signal.signal(num, catchterm)
2854
2856
2855 try:
2857 try:
2856 u = ui.ui(traceback='--traceback' in sys.argv[1:])
2858 u = ui.ui(traceback='--traceback' in sys.argv[1:])
2857 except util.Abort, inst:
2859 except util.Abort, inst:
2858 sys.stderr.write(_("abort: %s\n") % inst)
2860 sys.stderr.write(_("abort: %s\n") % inst)
2859 return -1
2861 return -1
2860
2862
2861 load_extensions(u)
2863 load_extensions(u)
2862 u.addreadhook(load_extensions)
2864 u.addreadhook(load_extensions)
2863
2865
2864 try:
2866 try:
2865 cmd, func, args, options, cmdoptions = parse(u, args)
2867 cmd, func, args, options, cmdoptions = parse(u, args)
2866 if options["time"]:
2868 if options["time"]:
2867 def get_times():
2869 def get_times():
2868 t = os.times()
2870 t = os.times()
2869 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2871 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2870 t = (t[0], t[1], t[2], t[3], time.clock())
2872 t = (t[0], t[1], t[2], t[3], time.clock())
2871 return t
2873 return t
2872 s = get_times()
2874 s = get_times()
2873 def print_time():
2875 def print_time():
2874 t = get_times()
2876 t = get_times()
2875 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2877 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2876 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2878 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2877 atexit.register(print_time)
2879 atexit.register(print_time)
2878
2880
2879 # enter the debugger before command execution
2881 # enter the debugger before command execution
2880 if options['debugger']:
2882 if options['debugger']:
2881 pdb.set_trace()
2883 pdb.set_trace()
2882
2884
2883 try:
2885 try:
2884 if options['cwd']:
2886 if options['cwd']:
2885 try:
2887 try:
2886 os.chdir(options['cwd'])
2888 os.chdir(options['cwd'])
2887 except OSError, inst:
2889 except OSError, inst:
2888 raise util.Abort('%s: %s' %
2890 raise util.Abort('%s: %s' %
2889 (options['cwd'], inst.strerror))
2891 (options['cwd'], inst.strerror))
2890
2892
2891 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2893 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2892 not options["noninteractive"], options["traceback"],
2894 not options["noninteractive"], options["traceback"],
2893 parseconfig(options["config"]))
2895 parseconfig(options["config"]))
2894
2896
2895 path = u.expandpath(options["repository"]) or ""
2897 path = u.expandpath(options["repository"]) or ""
2896 repo = path and hg.repository(u, path=path) or None
2898 repo = path and hg.repository(u, path=path) or None
2897 if repo and not repo.local():
2899 if repo and not repo.local():
2898 raise util.Abort(_("repository '%s' is not local") % path)
2900 raise util.Abort(_("repository '%s' is not local") % path)
2899
2901
2900 if options['help']:
2902 if options['help']:
2901 return help_(u, cmd, options['version'])
2903 return help_(u, cmd, options['version'])
2902 elif options['version']:
2904 elif options['version']:
2903 return version_(u)
2905 return version_(u)
2904 elif not cmd:
2906 elif not cmd:
2905 return help_(u, 'shortlist')
2907 return help_(u, 'shortlist')
2906
2908
2907 if cmd not in norepo.split():
2909 if cmd not in norepo.split():
2908 try:
2910 try:
2909 if not repo:
2911 if not repo:
2910 repo = hg.repository(u, path=path)
2912 repo = hg.repository(u, path=path)
2911 u = repo.ui
2913 u = repo.ui
2912 for name in external.itervalues():
2914 for name in external.itervalues():
2913 mod = sys.modules[name]
2915 mod = sys.modules[name]
2914 if hasattr(mod, 'reposetup'):
2916 if hasattr(mod, 'reposetup'):
2915 mod.reposetup(u, repo)
2917 mod.reposetup(u, repo)
2916 hg.repo_setup_hooks.append(mod.reposetup)
2918 hg.repo_setup_hooks.append(mod.reposetup)
2917 except hg.RepoError:
2919 except hg.RepoError:
2918 if cmd not in optionalrepo.split():
2920 if cmd not in optionalrepo.split():
2919 raise
2921 raise
2920 d = lambda: func(u, repo, *args, **cmdoptions)
2922 d = lambda: func(u, repo, *args, **cmdoptions)
2921 else:
2923 else:
2922 d = lambda: func(u, *args, **cmdoptions)
2924 d = lambda: func(u, *args, **cmdoptions)
2923
2925
2924 try:
2926 try:
2925 if options['profile']:
2927 if options['profile']:
2926 import hotshot, hotshot.stats
2928 import hotshot, hotshot.stats
2927 prof = hotshot.Profile("hg.prof")
2929 prof = hotshot.Profile("hg.prof")
2928 try:
2930 try:
2929 try:
2931 try:
2930 return prof.runcall(d)
2932 return prof.runcall(d)
2931 except:
2933 except:
2932 try:
2934 try:
2933 u.warn(_('exception raised - generating '
2935 u.warn(_('exception raised - generating '
2934 'profile anyway\n'))
2936 'profile anyway\n'))
2935 except:
2937 except:
2936 pass
2938 pass
2937 raise
2939 raise
2938 finally:
2940 finally:
2939 prof.close()
2941 prof.close()
2940 stats = hotshot.stats.load("hg.prof")
2942 stats = hotshot.stats.load("hg.prof")
2941 stats.strip_dirs()
2943 stats.strip_dirs()
2942 stats.sort_stats('time', 'calls')
2944 stats.sort_stats('time', 'calls')
2943 stats.print_stats(40)
2945 stats.print_stats(40)
2944 elif options['lsprof']:
2946 elif options['lsprof']:
2945 try:
2947 try:
2946 from mercurial import lsprof
2948 from mercurial import lsprof
2947 except ImportError:
2949 except ImportError:
2948 raise util.Abort(_(
2950 raise util.Abort(_(
2949 'lsprof not available - install from '
2951 'lsprof not available - install from '
2950 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
2952 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
2951 p = lsprof.Profiler()
2953 p = lsprof.Profiler()
2952 p.enable(subcalls=True)
2954 p.enable(subcalls=True)
2953 try:
2955 try:
2954 return d()
2956 return d()
2955 finally:
2957 finally:
2956 p.disable()
2958 p.disable()
2957 stats = lsprof.Stats(p.getstats())
2959 stats = lsprof.Stats(p.getstats())
2958 stats.sort()
2960 stats.sort()
2959 stats.pprint(top=10, file=sys.stderr, climit=5)
2961 stats.pprint(top=10, file=sys.stderr, climit=5)
2960 else:
2962 else:
2961 return d()
2963 return d()
2962 finally:
2964 finally:
2963 u.flush()
2965 u.flush()
2964 except:
2966 except:
2965 # enter the debugger when we hit an exception
2967 # enter the debugger when we hit an exception
2966 if options['debugger']:
2968 if options['debugger']:
2967 pdb.post_mortem(sys.exc_info()[2])
2969 pdb.post_mortem(sys.exc_info()[2])
2968 u.print_exc()
2970 u.print_exc()
2969 raise
2971 raise
2970 except ParseError, inst:
2972 except ParseError, inst:
2971 if inst.args[0]:
2973 if inst.args[0]:
2972 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2974 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2973 help_(u, inst.args[0])
2975 help_(u, inst.args[0])
2974 else:
2976 else:
2975 u.warn(_("hg: %s\n") % inst.args[1])
2977 u.warn(_("hg: %s\n") % inst.args[1])
2976 help_(u, 'shortlist')
2978 help_(u, 'shortlist')
2977 except AmbiguousCommand, inst:
2979 except AmbiguousCommand, inst:
2978 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
2980 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
2979 (inst.args[0], " ".join(inst.args[1])))
2981 (inst.args[0], " ".join(inst.args[1])))
2980 except UnknownCommand, inst:
2982 except UnknownCommand, inst:
2981 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2983 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2982 help_(u, 'shortlist')
2984 help_(u, 'shortlist')
2983 except hg.RepoError, inst:
2985 except hg.RepoError, inst:
2984 u.warn(_("abort: %s!\n") % inst)
2986 u.warn(_("abort: %s!\n") % inst)
2985 except lock.LockHeld, inst:
2987 except lock.LockHeld, inst:
2986 if inst.errno == errno.ETIMEDOUT:
2988 if inst.errno == errno.ETIMEDOUT:
2987 reason = _('timed out waiting for lock held by %s') % inst.locker
2989 reason = _('timed out waiting for lock held by %s') % inst.locker
2988 else:
2990 else:
2989 reason = _('lock held by %s') % inst.locker
2991 reason = _('lock held by %s') % inst.locker
2990 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
2992 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
2991 except lock.LockUnavailable, inst:
2993 except lock.LockUnavailable, inst:
2992 u.warn(_("abort: could not lock %s: %s\n") %
2994 u.warn(_("abort: could not lock %s: %s\n") %
2993 (inst.desc or inst.filename, inst.strerror))
2995 (inst.desc or inst.filename, inst.strerror))
2994 except revlog.RevlogError, inst:
2996 except revlog.RevlogError, inst:
2995 u.warn(_("abort: %s!\n") % inst)
2997 u.warn(_("abort: %s!\n") % inst)
2996 except util.SignalInterrupt:
2998 except util.SignalInterrupt:
2997 u.warn(_("killed!\n"))
2999 u.warn(_("killed!\n"))
2998 except KeyboardInterrupt:
3000 except KeyboardInterrupt:
2999 try:
3001 try:
3000 u.warn(_("interrupted!\n"))
3002 u.warn(_("interrupted!\n"))
3001 except IOError, inst:
3003 except IOError, inst:
3002 if inst.errno == errno.EPIPE:
3004 if inst.errno == errno.EPIPE:
3003 if u.debugflag:
3005 if u.debugflag:
3004 u.warn(_("\nbroken pipe\n"))
3006 u.warn(_("\nbroken pipe\n"))
3005 else:
3007 else:
3006 raise
3008 raise
3007 except IOError, inst:
3009 except IOError, inst:
3008 if hasattr(inst, "code"):
3010 if hasattr(inst, "code"):
3009 u.warn(_("abort: %s\n") % inst)
3011 u.warn(_("abort: %s\n") % inst)
3010 elif hasattr(inst, "reason"):
3012 elif hasattr(inst, "reason"):
3011 u.warn(_("abort: error: %s\n") % inst.reason[1])
3013 u.warn(_("abort: error: %s\n") % inst.reason[1])
3012 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3014 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3013 if u.debugflag:
3015 if u.debugflag:
3014 u.warn(_("broken pipe\n"))
3016 u.warn(_("broken pipe\n"))
3015 elif getattr(inst, "strerror", None):
3017 elif getattr(inst, "strerror", None):
3016 if getattr(inst, "filename", None):
3018 if getattr(inst, "filename", None):
3017 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3019 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3018 else:
3020 else:
3019 u.warn(_("abort: %s\n") % inst.strerror)
3021 u.warn(_("abort: %s\n") % inst.strerror)
3020 else:
3022 else:
3021 raise
3023 raise
3022 except OSError, inst:
3024 except OSError, inst:
3023 if getattr(inst, "filename", None):
3025 if getattr(inst, "filename", None):
3024 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3026 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3025 else:
3027 else:
3026 u.warn(_("abort: %s\n") % inst.strerror)
3028 u.warn(_("abort: %s\n") % inst.strerror)
3027 except util.UnexpectedOutput, inst:
3029 except util.UnexpectedOutput, inst:
3028 u.warn(_("abort: %s") % inst[0])
3030 u.warn(_("abort: %s") % inst[0])
3029 if not isinstance(inst[1], basestring):
3031 if not isinstance(inst[1], basestring):
3030 u.warn(" %r\n" % (inst[1],))
3032 u.warn(" %r\n" % (inst[1],))
3031 elif not inst[1]:
3033 elif not inst[1]:
3032 u.warn(_(" empty string\n"))
3034 u.warn(_(" empty string\n"))
3033 else:
3035 else:
3034 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3036 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3035 except util.Abort, inst:
3037 except util.Abort, inst:
3036 u.warn(_("abort: %s\n") % inst)
3038 u.warn(_("abort: %s\n") % inst)
3037 except TypeError, inst:
3039 except TypeError, inst:
3038 # was this an argument error?
3040 # was this an argument error?
3039 tb = traceback.extract_tb(sys.exc_info()[2])
3041 tb = traceback.extract_tb(sys.exc_info()[2])
3040 if len(tb) > 2: # no
3042 if len(tb) > 2: # no
3041 raise
3043 raise
3042 u.debug(inst, "\n")
3044 u.debug(inst, "\n")
3043 u.warn(_("%s: invalid arguments\n") % cmd)
3045 u.warn(_("%s: invalid arguments\n") % cmd)
3044 help_(u, cmd)
3046 help_(u, cmd)
3045 except SystemExit, inst:
3047 except SystemExit, inst:
3046 # Commands shouldn't sys.exit directly, but give a return code.
3048 # Commands shouldn't sys.exit directly, but give a return code.
3047 # Just in case catch this and and pass exit code to caller.
3049 # Just in case catch this and and pass exit code to caller.
3048 return inst.code
3050 return inst.code
3049 except:
3051 except:
3050 u.warn(_("** unknown exception encountered, details follow\n"))
3052 u.warn(_("** unknown exception encountered, details follow\n"))
3051 u.warn(_("** report bug details to "
3053 u.warn(_("** report bug details to "
3052 "http://www.selenic.com/mercurial/bts\n"))
3054 "http://www.selenic.com/mercurial/bts\n"))
3053 u.warn(_("** or mercurial@selenic.com\n"))
3055 u.warn(_("** or mercurial@selenic.com\n"))
3054 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3056 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3055 % version.get_version())
3057 % version.get_version())
3056 raise
3058 raise
3057
3059
3058 return -1
3060 return -1
@@ -1,1903 +1,1909 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("There is no Mercurial repository"
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 " here (.hg not found)"))
32 path = p
32 path = p
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34
34
35 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
36 if create:
36 if create:
37 if not os.path.exists(path):
37 if not os.path.exists(path):
38 os.mkdir(path)
38 os.mkdir(path)
39 os.mkdir(self.path)
39 os.mkdir(self.path)
40 else:
40 else:
41 raise repo.RepoError(_("repository %s not found") % path)
41 raise repo.RepoError(_("repository %s not found") % path)
42 elif create:
42 elif create:
43 raise repo.RepoError(_("repository %s already exists") % path)
43 raise repo.RepoError(_("repository %s already exists") % path)
44
44
45 self.root = os.path.realpath(path)
45 self.root = os.path.realpath(path)
46 self.origroot = path
46 self.origroot = path
47 self.ui = ui.ui(parentui=parentui)
47 self.ui = ui.ui(parentui=parentui)
48 self.opener = util.opener(self.path)
48 self.opener = util.opener(self.path)
49 self.sopener = util.opener(self.path)
49 self.sopener = util.opener(self.path)
50 self.wopener = util.opener(self.root)
50 self.wopener = util.opener(self.root)
51
51
52 try:
52 try:
53 self.ui.readconfig(self.join("hgrc"), self.root)
53 self.ui.readconfig(self.join("hgrc"), self.root)
54 except IOError:
54 except IOError:
55 pass
55 pass
56
56
57 v = self.ui.configrevlog()
57 v = self.ui.configrevlog()
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
60 fl = v.get('flags', None)
60 fl = v.get('flags', None)
61 flags = 0
61 flags = 0
62 if fl != None:
62 if fl != None:
63 for x in fl.split():
63 for x in fl.split():
64 flags |= revlog.flagstr(x)
64 flags |= revlog.flagstr(x)
65 elif self.revlogv1:
65 elif self.revlogv1:
66 flags = revlog.REVLOG_DEFAULT_FLAGS
66 flags = revlog.REVLOG_DEFAULT_FLAGS
67
67
68 v = self.revlogversion | flags
68 v = self.revlogversion | flags
69 self.manifest = manifest.manifest(self.sopener, v)
69 self.manifest = manifest.manifest(self.sopener, v)
70 self.changelog = changelog.changelog(self.sopener, v)
70 self.changelog = changelog.changelog(self.sopener, v)
71
71
72 # the changelog might not have the inline index flag
72 # the changelog might not have the inline index flag
73 # on. If the format of the changelog is the same as found in
73 # on. If the format of the changelog is the same as found in
74 # .hgrc, apply any flags found in the .hgrc as well.
74 # .hgrc, apply any flags found in the .hgrc as well.
75 # Otherwise, just version from the changelog
75 # Otherwise, just version from the changelog
76 v = self.changelog.version
76 v = self.changelog.version
77 if v == self.revlogversion:
77 if v == self.revlogversion:
78 v |= flags
78 v |= flags
79 self.revlogversion = v
79 self.revlogversion = v
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self.branchcache = None
82 self.branchcache = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.encodepats = None
84 self.encodepats = None
85 self.decodepats = None
85 self.decodepats = None
86 self.transhandle = None
86 self.transhandle = None
87
87
88 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
88 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
89
89
90 def url(self):
90 def url(self):
91 return 'file:' + self.root
91 return 'file:' + self.root
92
92
93 def hook(self, name, throw=False, **args):
93 def hook(self, name, throw=False, **args):
94 def callhook(hname, funcname):
94 def callhook(hname, funcname):
95 '''call python hook. hook is callable object, looked up as
95 '''call python hook. hook is callable object, looked up as
96 name in python module. if callable returns "true", hook
96 name in python module. if callable returns "true", hook
97 fails, else passes. if hook raises exception, treated as
97 fails, else passes. if hook raises exception, treated as
98 hook failure. exception propagates if throw is "true".
98 hook failure. exception propagates if throw is "true".
99
99
100 reason for "true" meaning "hook failed" is so that
100 reason for "true" meaning "hook failed" is so that
101 unmodified commands (e.g. mercurial.commands.update) can
101 unmodified commands (e.g. mercurial.commands.update) can
102 be run as hooks without wrappers to convert return values.'''
102 be run as hooks without wrappers to convert return values.'''
103
103
104 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
104 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
105 d = funcname.rfind('.')
105 d = funcname.rfind('.')
106 if d == -1:
106 if d == -1:
107 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
107 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
108 % (hname, funcname))
108 % (hname, funcname))
109 modname = funcname[:d]
109 modname = funcname[:d]
110 try:
110 try:
111 obj = __import__(modname)
111 obj = __import__(modname)
112 except ImportError:
112 except ImportError:
113 try:
113 try:
114 # extensions are loaded with hgext_ prefix
114 # extensions are loaded with hgext_ prefix
115 obj = __import__("hgext_%s" % modname)
115 obj = __import__("hgext_%s" % modname)
116 except ImportError:
116 except ImportError:
117 raise util.Abort(_('%s hook is invalid '
117 raise util.Abort(_('%s hook is invalid '
118 '(import of "%s" failed)') %
118 '(import of "%s" failed)') %
119 (hname, modname))
119 (hname, modname))
120 try:
120 try:
121 for p in funcname.split('.')[1:]:
121 for p in funcname.split('.')[1:]:
122 obj = getattr(obj, p)
122 obj = getattr(obj, p)
123 except AttributeError, err:
123 except AttributeError, err:
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not defined)') %
125 '("%s" is not defined)') %
126 (hname, funcname))
126 (hname, funcname))
127 if not callable(obj):
127 if not callable(obj):
128 raise util.Abort(_('%s hook is invalid '
128 raise util.Abort(_('%s hook is invalid '
129 '("%s" is not callable)') %
129 '("%s" is not callable)') %
130 (hname, funcname))
130 (hname, funcname))
131 try:
131 try:
132 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
132 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
133 except (KeyboardInterrupt, util.SignalInterrupt):
133 except (KeyboardInterrupt, util.SignalInterrupt):
134 raise
134 raise
135 except Exception, exc:
135 except Exception, exc:
136 if isinstance(exc, util.Abort):
136 if isinstance(exc, util.Abort):
137 self.ui.warn(_('error: %s hook failed: %s\n') %
137 self.ui.warn(_('error: %s hook failed: %s\n') %
138 (hname, exc.args[0]))
138 (hname, exc.args[0]))
139 else:
139 else:
140 self.ui.warn(_('error: %s hook raised an exception: '
140 self.ui.warn(_('error: %s hook raised an exception: '
141 '%s\n') % (hname, exc))
141 '%s\n') % (hname, exc))
142 if throw:
142 if throw:
143 raise
143 raise
144 self.ui.print_exc()
144 self.ui.print_exc()
145 return True
145 return True
146 if r:
146 if r:
147 if throw:
147 if throw:
148 raise util.Abort(_('%s hook failed') % hname)
148 raise util.Abort(_('%s hook failed') % hname)
149 self.ui.warn(_('warning: %s hook failed\n') % hname)
149 self.ui.warn(_('warning: %s hook failed\n') % hname)
150 return r
150 return r
151
151
152 def runhook(name, cmd):
152 def runhook(name, cmd):
153 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
153 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
154 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
154 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
155 r = util.system(cmd, environ=env, cwd=self.root)
155 r = util.system(cmd, environ=env, cwd=self.root)
156 if r:
156 if r:
157 desc, r = util.explain_exit(r)
157 desc, r = util.explain_exit(r)
158 if throw:
158 if throw:
159 raise util.Abort(_('%s hook %s') % (name, desc))
159 raise util.Abort(_('%s hook %s') % (name, desc))
160 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
160 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
161 return r
161 return r
162
162
163 r = False
163 r = False
164 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
164 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
165 if hname.split(".", 1)[0] == name and cmd]
165 if hname.split(".", 1)[0] == name and cmd]
166 hooks.sort()
166 hooks.sort()
167 for hname, cmd in hooks:
167 for hname, cmd in hooks:
168 if cmd.startswith('python:'):
168 if cmd.startswith('python:'):
169 r = callhook(hname, cmd[7:].strip()) or r
169 r = callhook(hname, cmd[7:].strip()) or r
170 else:
170 else:
171 r = runhook(hname, cmd) or r
171 r = runhook(hname, cmd) or r
172 return r
172 return r
173
173
174 tag_disallowed = ':\r\n'
174 tag_disallowed = ':\r\n'
175
175
176 def tag(self, name, node, message, local, user, date):
176 def tag(self, name, node, message, local, user, date):
177 '''tag a revision with a symbolic name.
177 '''tag a revision with a symbolic name.
178
178
179 if local is True, the tag is stored in a per-repository file.
179 if local is True, the tag is stored in a per-repository file.
180 otherwise, it is stored in the .hgtags file, and a new
180 otherwise, it is stored in the .hgtags file, and a new
181 changeset is committed with the change.
181 changeset is committed with the change.
182
182
183 keyword arguments:
183 keyword arguments:
184
184
185 local: whether to store tag in non-version-controlled file
185 local: whether to store tag in non-version-controlled file
186 (default False)
186 (default False)
187
187
188 message: commit message to use if committing
188 message: commit message to use if committing
189
189
190 user: name of user to use if committing
190 user: name of user to use if committing
191
191
192 date: date tuple to use if committing'''
192 date: date tuple to use if committing'''
193
193
194 for c in self.tag_disallowed:
194 for c in self.tag_disallowed:
195 if c in name:
195 if c in name:
196 raise util.Abort(_('%r cannot be used in a tag name') % c)
196 raise util.Abort(_('%r cannot be used in a tag name') % c)
197
197
198 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
198 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
199
199
200 if local:
200 if local:
201 # local tags are stored in the current charset
201 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
202 self.hook('tag', node=hex(node), tag=name, local=local)
203 self.hook('tag', node=hex(node), tag=name, local=local)
203 return
204 return
204
205
205 for x in self.status()[:5]:
206 for x in self.status()[:5]:
206 if '.hgtags' in x:
207 if '.hgtags' in x:
207 raise util.Abort(_('working copy of .hgtags is changed '
208 raise util.Abort(_('working copy of .hgtags is changed '
208 '(please commit .hgtags manually)'))
209 '(please commit .hgtags manually)'))
209
210
210 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
211 # committed tags are stored in UTF-8
212 line = '%s %s\n' % (hex(node), util.fromlocal(name))
213 self.wfile('.hgtags', 'ab').write(line)
211 if self.dirstate.state('.hgtags') == '?':
214 if self.dirstate.state('.hgtags') == '?':
212 self.add(['.hgtags'])
215 self.add(['.hgtags'])
213
216
214 self.commit(['.hgtags'], message, user, date)
217 self.commit(['.hgtags'], message, user, date)
215 self.hook('tag', node=hex(node), tag=name, local=local)
218 self.hook('tag', node=hex(node), tag=name, local=local)
216
219
217 def tags(self):
220 def tags(self):
218 '''return a mapping of tag to node'''
221 '''return a mapping of tag to node'''
219 if not self.tagscache:
222 if not self.tagscache:
220 self.tagscache = {}
223 self.tagscache = {}
221
224
222 def parsetag(line, context):
225 def parsetag(line, context):
223 if not line:
226 if not line:
224 return
227 return
225 s = l.split(" ", 1)
228 s = l.split(" ", 1)
226 if len(s) != 2:
229 if len(s) != 2:
227 self.ui.warn(_("%s: cannot parse entry\n") % context)
230 self.ui.warn(_("%s: cannot parse entry\n") % context)
228 return
231 return
229 node, key = s
232 node, key = s
230 key = key.strip()
233 key = util.tolocal(key.strip()) # stored in UTF-8
231 try:
234 try:
232 bin_n = bin(node)
235 bin_n = bin(node)
233 except TypeError:
236 except TypeError:
234 self.ui.warn(_("%s: node '%s' is not well formed\n") %
237 self.ui.warn(_("%s: node '%s' is not well formed\n") %
235 (context, node))
238 (context, node))
236 return
239 return
237 if bin_n not in self.changelog.nodemap:
240 if bin_n not in self.changelog.nodemap:
238 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
241 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
239 (context, key))
242 (context, key))
240 return
243 return
241 self.tagscache[key] = bin_n
244 self.tagscache[key] = bin_n
242
245
243 # read the tags file from each head, ending with the tip,
246 # read the tags file from each head, ending with the tip,
244 # and add each tag found to the map, with "newer" ones
247 # and add each tag found to the map, with "newer" ones
245 # taking precedence
248 # taking precedence
246 f = None
249 f = None
247 for rev, node, fnode in self._hgtagsnodes():
250 for rev, node, fnode in self._hgtagsnodes():
248 f = (f and f.filectx(fnode) or
251 f = (f and f.filectx(fnode) or
249 self.filectx('.hgtags', fileid=fnode))
252 self.filectx('.hgtags', fileid=fnode))
250 count = 0
253 count = 0
251 for l in f.data().splitlines():
254 for l in f.data().splitlines():
252 count += 1
255 count += 1
253 parsetag(l, _("%s, line %d") % (str(f), count))
256 parsetag(l, _("%s, line %d") % (str(f), count))
254
257
255 try:
258 try:
256 f = self.opener("localtags")
259 f = self.opener("localtags")
257 count = 0
260 count = 0
258 for l in f:
261 for l in f:
262 # localtags are stored in the local character set
263 # while the internal tag table is stored in UTF-8
264 l = util.fromlocal(l)
259 count += 1
265 count += 1
260 parsetag(l, _("localtags, line %d") % count)
266 parsetag(l, _("localtags, line %d") % count)
261 except IOError:
267 except IOError:
262 pass
268 pass
263
269
264 self.tagscache['tip'] = self.changelog.tip()
270 self.tagscache['tip'] = self.changelog.tip()
265
271
266 return self.tagscache
272 return self.tagscache
267
273
268 def _hgtagsnodes(self):
274 def _hgtagsnodes(self):
269 heads = self.heads()
275 heads = self.heads()
270 heads.reverse()
276 heads.reverse()
271 last = {}
277 last = {}
272 ret = []
278 ret = []
273 for node in heads:
279 for node in heads:
274 c = self.changectx(node)
280 c = self.changectx(node)
275 rev = c.rev()
281 rev = c.rev()
276 try:
282 try:
277 fnode = c.filenode('.hgtags')
283 fnode = c.filenode('.hgtags')
278 except repo.LookupError:
284 except repo.LookupError:
279 continue
285 continue
280 ret.append((rev, node, fnode))
286 ret.append((rev, node, fnode))
281 if fnode in last:
287 if fnode in last:
282 ret[last[fnode]] = None
288 ret[last[fnode]] = None
283 last[fnode] = len(ret) - 1
289 last[fnode] = len(ret) - 1
284 return [item for item in ret if item]
290 return [item for item in ret if item]
285
291
286 def tagslist(self):
292 def tagslist(self):
287 '''return a list of tags ordered by revision'''
293 '''return a list of tags ordered by revision'''
288 l = []
294 l = []
289 for t, n in self.tags().items():
295 for t, n in self.tags().items():
290 try:
296 try:
291 r = self.changelog.rev(n)
297 r = self.changelog.rev(n)
292 except:
298 except:
293 r = -2 # sort to the beginning of the list if unknown
299 r = -2 # sort to the beginning of the list if unknown
294 l.append((r, t, n))
300 l.append((r, t, n))
295 l.sort()
301 l.sort()
296 return [(t, n) for r, t, n in l]
302 return [(t, n) for r, t, n in l]
297
303
298 def nodetags(self, node):
304 def nodetags(self, node):
299 '''return the tags associated with a node'''
305 '''return the tags associated with a node'''
300 if not self.nodetagscache:
306 if not self.nodetagscache:
301 self.nodetagscache = {}
307 self.nodetagscache = {}
302 for t, n in self.tags().items():
308 for t, n in self.tags().items():
303 self.nodetagscache.setdefault(n, []).append(t)
309 self.nodetagscache.setdefault(n, []).append(t)
304 return self.nodetagscache.get(node, [])
310 return self.nodetagscache.get(node, [])
305
311
306 def branchtags(self):
312 def branchtags(self):
307 if self.branchcache != None:
313 if self.branchcache != None:
308 return self.branchcache
314 return self.branchcache
309
315
310 self.branchcache = {} # avoid recursion in changectx
316 self.branchcache = {} # avoid recursion in changectx
311
317
312 partial, last, lrev = self._readbranchcache()
318 partial, last, lrev = self._readbranchcache()
313
319
314 tiprev = self.changelog.count() - 1
320 tiprev = self.changelog.count() - 1
315 if lrev != tiprev:
321 if lrev != tiprev:
316 self._updatebranchcache(partial, lrev+1, tiprev+1)
322 self._updatebranchcache(partial, lrev+1, tiprev+1)
317 self._writebranchcache(partial, self.changelog.tip(), tiprev)
323 self._writebranchcache(partial, self.changelog.tip(), tiprev)
318
324
319 self.branchcache = partial
325 self.branchcache = partial
320 return self.branchcache
326 return self.branchcache
321
327
322 def _readbranchcache(self):
328 def _readbranchcache(self):
323 partial = {}
329 partial = {}
324 try:
330 try:
325 f = self.opener("branches.cache")
331 f = self.opener("branches.cache")
326 lines = f.read().split('\n')
332 lines = f.read().split('\n')
327 f.close()
333 f.close()
328 last, lrev = lines.pop(0).rstrip().split(" ", 1)
334 last, lrev = lines.pop(0).rstrip().split(" ", 1)
329 last, lrev = bin(last), int(lrev)
335 last, lrev = bin(last), int(lrev)
330 if not (lrev < self.changelog.count() and
336 if not (lrev < self.changelog.count() and
331 self.changelog.node(lrev) == last): # sanity check
337 self.changelog.node(lrev) == last): # sanity check
332 # invalidate the cache
338 # invalidate the cache
333 raise ValueError('Invalid branch cache: unknown tip')
339 raise ValueError('Invalid branch cache: unknown tip')
334 for l in lines:
340 for l in lines:
335 if not l: continue
341 if not l: continue
336 node, label = l.rstrip().split(" ", 1)
342 node, label = l.rstrip().split(" ", 1)
337 partial[label] = bin(node)
343 partial[label] = bin(node)
338 except (KeyboardInterrupt, util.SignalInterrupt):
344 except (KeyboardInterrupt, util.SignalInterrupt):
339 raise
345 raise
340 except Exception, inst:
346 except Exception, inst:
341 if self.ui.debugflag:
347 if self.ui.debugflag:
342 self.ui.warn(str(inst), '\n')
348 self.ui.warn(str(inst), '\n')
343 partial, last, lrev = {}, nullid, nullrev
349 partial, last, lrev = {}, nullid, nullrev
344 return partial, last, lrev
350 return partial, last, lrev
345
351
346 def _writebranchcache(self, branches, tip, tiprev):
352 def _writebranchcache(self, branches, tip, tiprev):
347 try:
353 try:
348 f = self.opener("branches.cache", "w")
354 f = self.opener("branches.cache", "w")
349 f.write("%s %s\n" % (hex(tip), tiprev))
355 f.write("%s %s\n" % (hex(tip), tiprev))
350 for label, node in branches.iteritems():
356 for label, node in branches.iteritems():
351 f.write("%s %s\n" % (hex(node), label))
357 f.write("%s %s\n" % (hex(node), label))
352 except IOError:
358 except IOError:
353 pass
359 pass
354
360
355 def _updatebranchcache(self, partial, start, end):
361 def _updatebranchcache(self, partial, start, end):
356 for r in xrange(start, end):
362 for r in xrange(start, end):
357 c = self.changectx(r)
363 c = self.changectx(r)
358 b = c.branch()
364 b = c.branch()
359 if b:
365 if b:
360 partial[b] = c.node()
366 partial[b] = c.node()
361
367
362 def lookup(self, key):
368 def lookup(self, key):
363 if key == '.':
369 if key == '.':
364 key = self.dirstate.parents()[0]
370 key = self.dirstate.parents()[0]
365 if key == nullid:
371 if key == nullid:
366 raise repo.RepoError(_("no revision checked out"))
372 raise repo.RepoError(_("no revision checked out"))
367 n = self.changelog._match(key)
373 n = self.changelog._match(key)
368 if n:
374 if n:
369 return n
375 return n
370 if key in self.tags():
376 if key in self.tags():
371 return self.tags()[key]
377 return self.tags()[key]
372 if key in self.branchtags():
378 if key in self.branchtags():
373 return self.branchtags()[key]
379 return self.branchtags()[key]
374 n = self.changelog._partialmatch(key)
380 n = self.changelog._partialmatch(key)
375 if n:
381 if n:
376 return n
382 return n
377 raise repo.RepoError(_("unknown revision '%s'") % key)
383 raise repo.RepoError(_("unknown revision '%s'") % key)
378
384
379 def dev(self):
385 def dev(self):
380 return os.lstat(self.path).st_dev
386 return os.lstat(self.path).st_dev
381
387
382 def local(self):
388 def local(self):
383 return True
389 return True
384
390
385 def join(self, f):
391 def join(self, f):
386 return os.path.join(self.path, f)
392 return os.path.join(self.path, f)
387
393
388 def sjoin(self, f):
394 def sjoin(self, f):
389 return os.path.join(self.path, f)
395 return os.path.join(self.path, f)
390
396
391 def wjoin(self, f):
397 def wjoin(self, f):
392 return os.path.join(self.root, f)
398 return os.path.join(self.root, f)
393
399
394 def file(self, f):
400 def file(self, f):
395 if f[0] == '/':
401 if f[0] == '/':
396 f = f[1:]
402 f = f[1:]
397 return filelog.filelog(self.sopener, f, self.revlogversion)
403 return filelog.filelog(self.sopener, f, self.revlogversion)
398
404
399 def changectx(self, changeid=None):
405 def changectx(self, changeid=None):
400 return context.changectx(self, changeid)
406 return context.changectx(self, changeid)
401
407
402 def workingctx(self):
408 def workingctx(self):
403 return context.workingctx(self)
409 return context.workingctx(self)
404
410
405 def parents(self, changeid=None):
411 def parents(self, changeid=None):
406 '''
412 '''
407 get list of changectxs for parents of changeid or working directory
413 get list of changectxs for parents of changeid or working directory
408 '''
414 '''
409 if changeid is None:
415 if changeid is None:
410 pl = self.dirstate.parents()
416 pl = self.dirstate.parents()
411 else:
417 else:
412 n = self.changelog.lookup(changeid)
418 n = self.changelog.lookup(changeid)
413 pl = self.changelog.parents(n)
419 pl = self.changelog.parents(n)
414 if pl[1] == nullid:
420 if pl[1] == nullid:
415 return [self.changectx(pl[0])]
421 return [self.changectx(pl[0])]
416 return [self.changectx(pl[0]), self.changectx(pl[1])]
422 return [self.changectx(pl[0]), self.changectx(pl[1])]
417
423
418 def filectx(self, path, changeid=None, fileid=None):
424 def filectx(self, path, changeid=None, fileid=None):
419 """changeid can be a changeset revision, node, or tag.
425 """changeid can be a changeset revision, node, or tag.
420 fileid can be a file revision or node."""
426 fileid can be a file revision or node."""
421 return context.filectx(self, path, changeid, fileid)
427 return context.filectx(self, path, changeid, fileid)
422
428
423 def getcwd(self):
429 def getcwd(self):
424 return self.dirstate.getcwd()
430 return self.dirstate.getcwd()
425
431
426 def wfile(self, f, mode='r'):
432 def wfile(self, f, mode='r'):
427 return self.wopener(f, mode)
433 return self.wopener(f, mode)
428
434
429 def wread(self, filename):
435 def wread(self, filename):
430 if self.encodepats == None:
436 if self.encodepats == None:
431 l = []
437 l = []
432 for pat, cmd in self.ui.configitems("encode"):
438 for pat, cmd in self.ui.configitems("encode"):
433 mf = util.matcher(self.root, "", [pat], [], [])[1]
439 mf = util.matcher(self.root, "", [pat], [], [])[1]
434 l.append((mf, cmd))
440 l.append((mf, cmd))
435 self.encodepats = l
441 self.encodepats = l
436
442
437 data = self.wopener(filename, 'r').read()
443 data = self.wopener(filename, 'r').read()
438
444
439 for mf, cmd in self.encodepats:
445 for mf, cmd in self.encodepats:
440 if mf(filename):
446 if mf(filename):
441 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
447 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
442 data = util.filter(data, cmd)
448 data = util.filter(data, cmd)
443 break
449 break
444
450
445 return data
451 return data
446
452
447 def wwrite(self, filename, data, fd=None):
453 def wwrite(self, filename, data, fd=None):
448 if self.decodepats == None:
454 if self.decodepats == None:
449 l = []
455 l = []
450 for pat, cmd in self.ui.configitems("decode"):
456 for pat, cmd in self.ui.configitems("decode"):
451 mf = util.matcher(self.root, "", [pat], [], [])[1]
457 mf = util.matcher(self.root, "", [pat], [], [])[1]
452 l.append((mf, cmd))
458 l.append((mf, cmd))
453 self.decodepats = l
459 self.decodepats = l
454
460
455 for mf, cmd in self.decodepats:
461 for mf, cmd in self.decodepats:
456 if mf(filename):
462 if mf(filename):
457 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
463 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
458 data = util.filter(data, cmd)
464 data = util.filter(data, cmd)
459 break
465 break
460
466
461 if fd:
467 if fd:
462 return fd.write(data)
468 return fd.write(data)
463 return self.wopener(filename, 'w').write(data)
469 return self.wopener(filename, 'w').write(data)
464
470
465 def transaction(self):
471 def transaction(self):
466 tr = self.transhandle
472 tr = self.transhandle
467 if tr != None and tr.running():
473 if tr != None and tr.running():
468 return tr.nest()
474 return tr.nest()
469
475
470 # save dirstate for rollback
476 # save dirstate for rollback
471 try:
477 try:
472 ds = self.opener("dirstate").read()
478 ds = self.opener("dirstate").read()
473 except IOError:
479 except IOError:
474 ds = ""
480 ds = ""
475 self.opener("journal.dirstate", "w").write(ds)
481 self.opener("journal.dirstate", "w").write(ds)
476
482
477 tr = transaction.transaction(self.ui.warn, self.sopener,
483 tr = transaction.transaction(self.ui.warn, self.sopener,
478 self.sjoin("journal"),
484 self.sjoin("journal"),
479 aftertrans(self.path))
485 aftertrans(self.path))
480 self.transhandle = tr
486 self.transhandle = tr
481 return tr
487 return tr
482
488
483 def recover(self):
489 def recover(self):
484 l = self.lock()
490 l = self.lock()
485 if os.path.exists(self.sjoin("journal")):
491 if os.path.exists(self.sjoin("journal")):
486 self.ui.status(_("rolling back interrupted transaction\n"))
492 self.ui.status(_("rolling back interrupted transaction\n"))
487 transaction.rollback(self.sopener, self.sjoin("journal"))
493 transaction.rollback(self.sopener, self.sjoin("journal"))
488 self.reload()
494 self.reload()
489 return True
495 return True
490 else:
496 else:
491 self.ui.warn(_("no interrupted transaction available\n"))
497 self.ui.warn(_("no interrupted transaction available\n"))
492 return False
498 return False
493
499
494 def rollback(self, wlock=None):
500 def rollback(self, wlock=None):
495 if not wlock:
501 if not wlock:
496 wlock = self.wlock()
502 wlock = self.wlock()
497 l = self.lock()
503 l = self.lock()
498 if os.path.exists(self.sjoin("undo")):
504 if os.path.exists(self.sjoin("undo")):
499 self.ui.status(_("rolling back last transaction\n"))
505 self.ui.status(_("rolling back last transaction\n"))
500 transaction.rollback(self.sopener, self.sjoin("undo"))
506 transaction.rollback(self.sopener, self.sjoin("undo"))
501 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
507 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
502 self.reload()
508 self.reload()
503 self.wreload()
509 self.wreload()
504 else:
510 else:
505 self.ui.warn(_("no rollback information available\n"))
511 self.ui.warn(_("no rollback information available\n"))
506
512
507 def wreload(self):
513 def wreload(self):
508 self.dirstate.read()
514 self.dirstate.read()
509
515
510 def reload(self):
516 def reload(self):
511 self.changelog.load()
517 self.changelog.load()
512 self.manifest.load()
518 self.manifest.load()
513 self.tagscache = None
519 self.tagscache = None
514 self.nodetagscache = None
520 self.nodetagscache = None
515
521
516 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
522 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
517 desc=None):
523 desc=None):
518 try:
524 try:
519 l = lock.lock(lockname, 0, releasefn, desc=desc)
525 l = lock.lock(lockname, 0, releasefn, desc=desc)
520 except lock.LockHeld, inst:
526 except lock.LockHeld, inst:
521 if not wait:
527 if not wait:
522 raise
528 raise
523 self.ui.warn(_("waiting for lock on %s held by %r\n") %
529 self.ui.warn(_("waiting for lock on %s held by %r\n") %
524 (desc, inst.locker))
530 (desc, inst.locker))
525 # default to 600 seconds timeout
531 # default to 600 seconds timeout
526 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
532 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
527 releasefn, desc=desc)
533 releasefn, desc=desc)
528 if acquirefn:
534 if acquirefn:
529 acquirefn()
535 acquirefn()
530 return l
536 return l
531
537
532 def lock(self, wait=1):
538 def lock(self, wait=1):
533 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
539 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
534 desc=_('repository %s') % self.origroot)
540 desc=_('repository %s') % self.origroot)
535
541
536 def wlock(self, wait=1):
542 def wlock(self, wait=1):
537 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
543 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
538 self.wreload,
544 self.wreload,
539 desc=_('working directory of %s') % self.origroot)
545 desc=_('working directory of %s') % self.origroot)
540
546
541 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
547 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
542 """
548 """
543 commit an individual file as part of a larger transaction
549 commit an individual file as part of a larger transaction
544 """
550 """
545
551
546 t = self.wread(fn)
552 t = self.wread(fn)
547 fl = self.file(fn)
553 fl = self.file(fn)
548 fp1 = manifest1.get(fn, nullid)
554 fp1 = manifest1.get(fn, nullid)
549 fp2 = manifest2.get(fn, nullid)
555 fp2 = manifest2.get(fn, nullid)
550
556
551 meta = {}
557 meta = {}
552 cp = self.dirstate.copied(fn)
558 cp = self.dirstate.copied(fn)
553 if cp:
559 if cp:
554 meta["copy"] = cp
560 meta["copy"] = cp
555 if not manifest2: # not a branch merge
561 if not manifest2: # not a branch merge
556 meta["copyrev"] = hex(manifest1.get(cp, nullid))
562 meta["copyrev"] = hex(manifest1.get(cp, nullid))
557 fp2 = nullid
563 fp2 = nullid
558 elif fp2 != nullid: # copied on remote side
564 elif fp2 != nullid: # copied on remote side
559 meta["copyrev"] = hex(manifest1.get(cp, nullid))
565 meta["copyrev"] = hex(manifest1.get(cp, nullid))
560 elif fp1 != nullid: # copied on local side, reversed
566 elif fp1 != nullid: # copied on local side, reversed
561 meta["copyrev"] = hex(manifest2.get(cp))
567 meta["copyrev"] = hex(manifest2.get(cp))
562 fp2 = nullid
568 fp2 = nullid
563 else: # directory rename
569 else: # directory rename
564 meta["copyrev"] = hex(manifest1.get(cp, nullid))
570 meta["copyrev"] = hex(manifest1.get(cp, nullid))
565 self.ui.debug(_(" %s: copy %s:%s\n") %
571 self.ui.debug(_(" %s: copy %s:%s\n") %
566 (fn, cp, meta["copyrev"]))
572 (fn, cp, meta["copyrev"]))
567 fp1 = nullid
573 fp1 = nullid
568 elif fp2 != nullid:
574 elif fp2 != nullid:
569 # is one parent an ancestor of the other?
575 # is one parent an ancestor of the other?
570 fpa = fl.ancestor(fp1, fp2)
576 fpa = fl.ancestor(fp1, fp2)
571 if fpa == fp1:
577 if fpa == fp1:
572 fp1, fp2 = fp2, nullid
578 fp1, fp2 = fp2, nullid
573 elif fpa == fp2:
579 elif fpa == fp2:
574 fp2 = nullid
580 fp2 = nullid
575
581
576 # is the file unmodified from the parent? report existing entry
582 # is the file unmodified from the parent? report existing entry
577 if fp2 == nullid and not fl.cmp(fp1, t):
583 if fp2 == nullid and not fl.cmp(fp1, t):
578 return fp1
584 return fp1
579
585
580 changelist.append(fn)
586 changelist.append(fn)
581 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
587 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
582
588
583 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
589 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
584 if p1 is None:
590 if p1 is None:
585 p1, p2 = self.dirstate.parents()
591 p1, p2 = self.dirstate.parents()
586 return self.commit(files=files, text=text, user=user, date=date,
592 return self.commit(files=files, text=text, user=user, date=date,
587 p1=p1, p2=p2, wlock=wlock)
593 p1=p1, p2=p2, wlock=wlock)
588
594
589 def commit(self, files=None, text="", user=None, date=None,
595 def commit(self, files=None, text="", user=None, date=None,
590 match=util.always, force=False, lock=None, wlock=None,
596 match=util.always, force=False, lock=None, wlock=None,
591 force_editor=False, p1=None, p2=None, extra={}):
597 force_editor=False, p1=None, p2=None, extra={}):
592
598
593 commit = []
599 commit = []
594 remove = []
600 remove = []
595 changed = []
601 changed = []
596 use_dirstate = (p1 is None) # not rawcommit
602 use_dirstate = (p1 is None) # not rawcommit
597 extra = extra.copy()
603 extra = extra.copy()
598
604
599 if use_dirstate:
605 if use_dirstate:
600 if files:
606 if files:
601 for f in files:
607 for f in files:
602 s = self.dirstate.state(f)
608 s = self.dirstate.state(f)
603 if s in 'nmai':
609 if s in 'nmai':
604 commit.append(f)
610 commit.append(f)
605 elif s == 'r':
611 elif s == 'r':
606 remove.append(f)
612 remove.append(f)
607 else:
613 else:
608 self.ui.warn(_("%s not tracked!\n") % f)
614 self.ui.warn(_("%s not tracked!\n") % f)
609 else:
615 else:
610 changes = self.status(match=match)[:5]
616 changes = self.status(match=match)[:5]
611 modified, added, removed, deleted, unknown = changes
617 modified, added, removed, deleted, unknown = changes
612 commit = modified + added
618 commit = modified + added
613 remove = removed
619 remove = removed
614 else:
620 else:
615 commit = files
621 commit = files
616
622
617 if use_dirstate:
623 if use_dirstate:
618 p1, p2 = self.dirstate.parents()
624 p1, p2 = self.dirstate.parents()
619 update_dirstate = True
625 update_dirstate = True
620 else:
626 else:
621 p1, p2 = p1, p2 or nullid
627 p1, p2 = p1, p2 or nullid
622 update_dirstate = (self.dirstate.parents()[0] == p1)
628 update_dirstate = (self.dirstate.parents()[0] == p1)
623
629
624 c1 = self.changelog.read(p1)
630 c1 = self.changelog.read(p1)
625 c2 = self.changelog.read(p2)
631 c2 = self.changelog.read(p2)
626 m1 = self.manifest.read(c1[0]).copy()
632 m1 = self.manifest.read(c1[0]).copy()
627 m2 = self.manifest.read(c2[0])
633 m2 = self.manifest.read(c2[0])
628
634
629 if use_dirstate:
635 if use_dirstate:
630 branchname = self.workingctx().branch()
636 branchname = self.workingctx().branch()
631 else:
637 else:
632 branchname = ""
638 branchname = ""
633
639
634 if use_dirstate:
640 if use_dirstate:
635 oldname = c1[5].get("branch", "")
641 oldname = c1[5].get("branch", "")
636 if not commit and not remove and not force and p2 == nullid and \
642 if not commit and not remove and not force and p2 == nullid and \
637 branchname == oldname:
643 branchname == oldname:
638 self.ui.status(_("nothing changed\n"))
644 self.ui.status(_("nothing changed\n"))
639 return None
645 return None
640
646
641 xp1 = hex(p1)
647 xp1 = hex(p1)
642 if p2 == nullid: xp2 = ''
648 if p2 == nullid: xp2 = ''
643 else: xp2 = hex(p2)
649 else: xp2 = hex(p2)
644
650
645 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
651 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
646
652
647 if not wlock:
653 if not wlock:
648 wlock = self.wlock()
654 wlock = self.wlock()
649 if not lock:
655 if not lock:
650 lock = self.lock()
656 lock = self.lock()
651 tr = self.transaction()
657 tr = self.transaction()
652
658
653 # check in files
659 # check in files
654 new = {}
660 new = {}
655 linkrev = self.changelog.count()
661 linkrev = self.changelog.count()
656 commit.sort()
662 commit.sort()
657 for f in commit:
663 for f in commit:
658 self.ui.note(f + "\n")
664 self.ui.note(f + "\n")
659 try:
665 try:
660 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
666 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
661 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
667 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
662 except IOError:
668 except IOError:
663 if use_dirstate:
669 if use_dirstate:
664 self.ui.warn(_("trouble committing %s!\n") % f)
670 self.ui.warn(_("trouble committing %s!\n") % f)
665 raise
671 raise
666 else:
672 else:
667 remove.append(f)
673 remove.append(f)
668
674
669 # update manifest
675 # update manifest
670 m1.update(new)
676 m1.update(new)
671 remove.sort()
677 remove.sort()
672
678
673 for f in remove:
679 for f in remove:
674 if f in m1:
680 if f in m1:
675 del m1[f]
681 del m1[f]
676 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
682 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
677
683
678 # add changeset
684 # add changeset
679 new = new.keys()
685 new = new.keys()
680 new.sort()
686 new.sort()
681
687
682 user = user or self.ui.username()
688 user = user or self.ui.username()
683 if not text or force_editor:
689 if not text or force_editor:
684 edittext = []
690 edittext = []
685 if text:
691 if text:
686 edittext.append(text)
692 edittext.append(text)
687 edittext.append("")
693 edittext.append("")
688 edittext.append("HG: user: %s" % user)
694 edittext.append("HG: user: %s" % user)
689 if p2 != nullid:
695 if p2 != nullid:
690 edittext.append("HG: branch merge")
696 edittext.append("HG: branch merge")
691 edittext.extend(["HG: changed %s" % f for f in changed])
697 edittext.extend(["HG: changed %s" % f for f in changed])
692 edittext.extend(["HG: removed %s" % f for f in remove])
698 edittext.extend(["HG: removed %s" % f for f in remove])
693 if not changed and not remove:
699 if not changed and not remove:
694 edittext.append("HG: no files changed")
700 edittext.append("HG: no files changed")
695 edittext.append("")
701 edittext.append("")
696 # run editor in the repository root
702 # run editor in the repository root
697 olddir = os.getcwd()
703 olddir = os.getcwd()
698 os.chdir(self.root)
704 os.chdir(self.root)
699 text = self.ui.edit("\n".join(edittext), user)
705 text = self.ui.edit("\n".join(edittext), user)
700 os.chdir(olddir)
706 os.chdir(olddir)
701
707
702 lines = [line.rstrip() for line in text.rstrip().splitlines()]
708 lines = [line.rstrip() for line in text.rstrip().splitlines()]
703 while lines and not lines[0]:
709 while lines and not lines[0]:
704 del lines[0]
710 del lines[0]
705 if not lines:
711 if not lines:
706 return None
712 return None
707 text = '\n'.join(lines)
713 text = '\n'.join(lines)
708 if branchname:
714 if branchname:
709 extra["branch"] = branchname
715 extra["branch"] = branchname
710 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
716 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
711 user, date, extra)
717 user, date, extra)
712 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
718 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
713 parent2=xp2)
719 parent2=xp2)
714 tr.close()
720 tr.close()
715
721
716 if use_dirstate or update_dirstate:
722 if use_dirstate or update_dirstate:
717 self.dirstate.setparents(n)
723 self.dirstate.setparents(n)
718 if use_dirstate:
724 if use_dirstate:
719 self.dirstate.update(new, "n")
725 self.dirstate.update(new, "n")
720 self.dirstate.forget(remove)
726 self.dirstate.forget(remove)
721
727
722 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
728 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
723 return n
729 return n
724
730
725 def walk(self, node=None, files=[], match=util.always, badmatch=None):
731 def walk(self, node=None, files=[], match=util.always, badmatch=None):
726 '''
732 '''
727 walk recursively through the directory tree or a given
733 walk recursively through the directory tree or a given
728 changeset, finding all files matched by the match
734 changeset, finding all files matched by the match
729 function
735 function
730
736
731 results are yielded in a tuple (src, filename), where src
737 results are yielded in a tuple (src, filename), where src
732 is one of:
738 is one of:
733 'f' the file was found in the directory tree
739 'f' the file was found in the directory tree
734 'm' the file was only in the dirstate and not in the tree
740 'm' the file was only in the dirstate and not in the tree
735 'b' file was not found and matched badmatch
741 'b' file was not found and matched badmatch
736 '''
742 '''
737
743
738 if node:
744 if node:
739 fdict = dict.fromkeys(files)
745 fdict = dict.fromkeys(files)
740 for fn in self.manifest.read(self.changelog.read(node)[0]):
746 for fn in self.manifest.read(self.changelog.read(node)[0]):
741 for ffn in fdict:
747 for ffn in fdict:
742 # match if the file is the exact name or a directory
748 # match if the file is the exact name or a directory
743 if ffn == fn or fn.startswith("%s/" % ffn):
749 if ffn == fn or fn.startswith("%s/" % ffn):
744 del fdict[ffn]
750 del fdict[ffn]
745 break
751 break
746 if match(fn):
752 if match(fn):
747 yield 'm', fn
753 yield 'm', fn
748 for fn in fdict:
754 for fn in fdict:
749 if badmatch and badmatch(fn):
755 if badmatch and badmatch(fn):
750 if match(fn):
756 if match(fn):
751 yield 'b', fn
757 yield 'b', fn
752 else:
758 else:
753 self.ui.warn(_('%s: No such file in rev %s\n') % (
759 self.ui.warn(_('%s: No such file in rev %s\n') % (
754 util.pathto(self.getcwd(), fn), short(node)))
760 util.pathto(self.getcwd(), fn), short(node)))
755 else:
761 else:
756 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
762 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
757 yield src, fn
763 yield src, fn
758
764
759 def status(self, node1=None, node2=None, files=[], match=util.always,
765 def status(self, node1=None, node2=None, files=[], match=util.always,
760 wlock=None, list_ignored=False, list_clean=False):
766 wlock=None, list_ignored=False, list_clean=False):
761 """return status of files between two nodes or node and working directory
767 """return status of files between two nodes or node and working directory
762
768
763 If node1 is None, use the first dirstate parent instead.
769 If node1 is None, use the first dirstate parent instead.
764 If node2 is None, compare node1 with working directory.
770 If node2 is None, compare node1 with working directory.
765 """
771 """
766
772
767 def fcmp(fn, mf):
773 def fcmp(fn, mf):
768 t1 = self.wread(fn)
774 t1 = self.wread(fn)
769 return self.file(fn).cmp(mf.get(fn, nullid), t1)
775 return self.file(fn).cmp(mf.get(fn, nullid), t1)
770
776
771 def mfmatches(node):
777 def mfmatches(node):
772 change = self.changelog.read(node)
778 change = self.changelog.read(node)
773 mf = self.manifest.read(change[0]).copy()
779 mf = self.manifest.read(change[0]).copy()
774 for fn in mf.keys():
780 for fn in mf.keys():
775 if not match(fn):
781 if not match(fn):
776 del mf[fn]
782 del mf[fn]
777 return mf
783 return mf
778
784
779 modified, added, removed, deleted, unknown = [], [], [], [], []
785 modified, added, removed, deleted, unknown = [], [], [], [], []
780 ignored, clean = [], []
786 ignored, clean = [], []
781
787
782 compareworking = False
788 compareworking = False
783 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
789 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
784 compareworking = True
790 compareworking = True
785
791
786 if not compareworking:
792 if not compareworking:
787 # read the manifest from node1 before the manifest from node2,
793 # read the manifest from node1 before the manifest from node2,
788 # so that we'll hit the manifest cache if we're going through
794 # so that we'll hit the manifest cache if we're going through
789 # all the revisions in parent->child order.
795 # all the revisions in parent->child order.
790 mf1 = mfmatches(node1)
796 mf1 = mfmatches(node1)
791
797
792 # are we comparing the working directory?
798 # are we comparing the working directory?
793 if not node2:
799 if not node2:
794 if not wlock:
800 if not wlock:
795 try:
801 try:
796 wlock = self.wlock(wait=0)
802 wlock = self.wlock(wait=0)
797 except lock.LockException:
803 except lock.LockException:
798 wlock = None
804 wlock = None
799 (lookup, modified, added, removed, deleted, unknown,
805 (lookup, modified, added, removed, deleted, unknown,
800 ignored, clean) = self.dirstate.status(files, match,
806 ignored, clean) = self.dirstate.status(files, match,
801 list_ignored, list_clean)
807 list_ignored, list_clean)
802
808
803 # are we comparing working dir against its parent?
809 # are we comparing working dir against its parent?
804 if compareworking:
810 if compareworking:
805 if lookup:
811 if lookup:
806 # do a full compare of any files that might have changed
812 # do a full compare of any files that might have changed
807 mf2 = mfmatches(self.dirstate.parents()[0])
813 mf2 = mfmatches(self.dirstate.parents()[0])
808 for f in lookup:
814 for f in lookup:
809 if fcmp(f, mf2):
815 if fcmp(f, mf2):
810 modified.append(f)
816 modified.append(f)
811 else:
817 else:
812 clean.append(f)
818 clean.append(f)
813 if wlock is not None:
819 if wlock is not None:
814 self.dirstate.update([f], "n")
820 self.dirstate.update([f], "n")
815 else:
821 else:
816 # we are comparing working dir against non-parent
822 # we are comparing working dir against non-parent
817 # generate a pseudo-manifest for the working dir
823 # generate a pseudo-manifest for the working dir
818 # XXX: create it in dirstate.py ?
824 # XXX: create it in dirstate.py ?
819 mf2 = mfmatches(self.dirstate.parents()[0])
825 mf2 = mfmatches(self.dirstate.parents()[0])
820 for f in lookup + modified + added:
826 for f in lookup + modified + added:
821 mf2[f] = ""
827 mf2[f] = ""
822 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
828 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
823 for f in removed:
829 for f in removed:
824 if f in mf2:
830 if f in mf2:
825 del mf2[f]
831 del mf2[f]
826 else:
832 else:
827 # we are comparing two revisions
833 # we are comparing two revisions
828 mf2 = mfmatches(node2)
834 mf2 = mfmatches(node2)
829
835
830 if not compareworking:
836 if not compareworking:
831 # flush lists from dirstate before comparing manifests
837 # flush lists from dirstate before comparing manifests
832 modified, added, clean = [], [], []
838 modified, added, clean = [], [], []
833
839
834 # make sure to sort the files so we talk to the disk in a
840 # make sure to sort the files so we talk to the disk in a
835 # reasonable order
841 # reasonable order
836 mf2keys = mf2.keys()
842 mf2keys = mf2.keys()
837 mf2keys.sort()
843 mf2keys.sort()
838 for fn in mf2keys:
844 for fn in mf2keys:
839 if mf1.has_key(fn):
845 if mf1.has_key(fn):
840 if mf1.flags(fn) != mf2.flags(fn) or \
846 if mf1.flags(fn) != mf2.flags(fn) or \
841 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
847 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
842 modified.append(fn)
848 modified.append(fn)
843 elif list_clean:
849 elif list_clean:
844 clean.append(fn)
850 clean.append(fn)
845 del mf1[fn]
851 del mf1[fn]
846 else:
852 else:
847 added.append(fn)
853 added.append(fn)
848
854
849 removed = mf1.keys()
855 removed = mf1.keys()
850
856
851 # sort and return results:
857 # sort and return results:
852 for l in modified, added, removed, deleted, unknown, ignored, clean:
858 for l in modified, added, removed, deleted, unknown, ignored, clean:
853 l.sort()
859 l.sort()
854 return (modified, added, removed, deleted, unknown, ignored, clean)
860 return (modified, added, removed, deleted, unknown, ignored, clean)
855
861
856 def add(self, list, wlock=None):
862 def add(self, list, wlock=None):
857 if not wlock:
863 if not wlock:
858 wlock = self.wlock()
864 wlock = self.wlock()
859 for f in list:
865 for f in list:
860 p = self.wjoin(f)
866 p = self.wjoin(f)
861 if not os.path.exists(p):
867 if not os.path.exists(p):
862 self.ui.warn(_("%s does not exist!\n") % f)
868 self.ui.warn(_("%s does not exist!\n") % f)
863 elif not os.path.isfile(p):
869 elif not os.path.isfile(p):
864 self.ui.warn(_("%s not added: only files supported currently\n")
870 self.ui.warn(_("%s not added: only files supported currently\n")
865 % f)
871 % f)
866 elif self.dirstate.state(f) in 'an':
872 elif self.dirstate.state(f) in 'an':
867 self.ui.warn(_("%s already tracked!\n") % f)
873 self.ui.warn(_("%s already tracked!\n") % f)
868 else:
874 else:
869 self.dirstate.update([f], "a")
875 self.dirstate.update([f], "a")
870
876
871 def forget(self, list, wlock=None):
877 def forget(self, list, wlock=None):
872 if not wlock:
878 if not wlock:
873 wlock = self.wlock()
879 wlock = self.wlock()
874 for f in list:
880 for f in list:
875 if self.dirstate.state(f) not in 'ai':
881 if self.dirstate.state(f) not in 'ai':
876 self.ui.warn(_("%s not added!\n") % f)
882 self.ui.warn(_("%s not added!\n") % f)
877 else:
883 else:
878 self.dirstate.forget([f])
884 self.dirstate.forget([f])
879
885
880 def remove(self, list, unlink=False, wlock=None):
886 def remove(self, list, unlink=False, wlock=None):
881 if unlink:
887 if unlink:
882 for f in list:
888 for f in list:
883 try:
889 try:
884 util.unlink(self.wjoin(f))
890 util.unlink(self.wjoin(f))
885 except OSError, inst:
891 except OSError, inst:
886 if inst.errno != errno.ENOENT:
892 if inst.errno != errno.ENOENT:
887 raise
893 raise
888 if not wlock:
894 if not wlock:
889 wlock = self.wlock()
895 wlock = self.wlock()
890 for f in list:
896 for f in list:
891 p = self.wjoin(f)
897 p = self.wjoin(f)
892 if os.path.exists(p):
898 if os.path.exists(p):
893 self.ui.warn(_("%s still exists!\n") % f)
899 self.ui.warn(_("%s still exists!\n") % f)
894 elif self.dirstate.state(f) == 'a':
900 elif self.dirstate.state(f) == 'a':
895 self.dirstate.forget([f])
901 self.dirstate.forget([f])
896 elif f not in self.dirstate:
902 elif f not in self.dirstate:
897 self.ui.warn(_("%s not tracked!\n") % f)
903 self.ui.warn(_("%s not tracked!\n") % f)
898 else:
904 else:
899 self.dirstate.update([f], "r")
905 self.dirstate.update([f], "r")
900
906
901 def undelete(self, list, wlock=None):
907 def undelete(self, list, wlock=None):
902 p = self.dirstate.parents()[0]
908 p = self.dirstate.parents()[0]
903 mn = self.changelog.read(p)[0]
909 mn = self.changelog.read(p)[0]
904 m = self.manifest.read(mn)
910 m = self.manifest.read(mn)
905 if not wlock:
911 if not wlock:
906 wlock = self.wlock()
912 wlock = self.wlock()
907 for f in list:
913 for f in list:
908 if self.dirstate.state(f) not in "r":
914 if self.dirstate.state(f) not in "r":
909 self.ui.warn("%s not removed!\n" % f)
915 self.ui.warn("%s not removed!\n" % f)
910 else:
916 else:
911 t = self.file(f).read(m[f])
917 t = self.file(f).read(m[f])
912 self.wwrite(f, t)
918 self.wwrite(f, t)
913 util.set_exec(self.wjoin(f), m.execf(f))
919 util.set_exec(self.wjoin(f), m.execf(f))
914 self.dirstate.update([f], "n")
920 self.dirstate.update([f], "n")
915
921
916 def copy(self, source, dest, wlock=None):
922 def copy(self, source, dest, wlock=None):
917 p = self.wjoin(dest)
923 p = self.wjoin(dest)
918 if not os.path.exists(p):
924 if not os.path.exists(p):
919 self.ui.warn(_("%s does not exist!\n") % dest)
925 self.ui.warn(_("%s does not exist!\n") % dest)
920 elif not os.path.isfile(p):
926 elif not os.path.isfile(p):
921 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
927 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
922 else:
928 else:
923 if not wlock:
929 if not wlock:
924 wlock = self.wlock()
930 wlock = self.wlock()
925 if self.dirstate.state(dest) == '?':
931 if self.dirstate.state(dest) == '?':
926 self.dirstate.update([dest], "a")
932 self.dirstate.update([dest], "a")
927 self.dirstate.copy(source, dest)
933 self.dirstate.copy(source, dest)
928
934
929 def heads(self, start=None):
935 def heads(self, start=None):
930 heads = self.changelog.heads(start)
936 heads = self.changelog.heads(start)
931 # sort the output in rev descending order
937 # sort the output in rev descending order
932 heads = [(-self.changelog.rev(h), h) for h in heads]
938 heads = [(-self.changelog.rev(h), h) for h in heads]
933 heads.sort()
939 heads.sort()
934 return [n for (r, n) in heads]
940 return [n for (r, n) in heads]
935
941
936 # branchlookup returns a dict giving a list of branches for
942 # branchlookup returns a dict giving a list of branches for
937 # each head. A branch is defined as the tag of a node or
943 # each head. A branch is defined as the tag of a node or
938 # the branch of the node's parents. If a node has multiple
944 # the branch of the node's parents. If a node has multiple
939 # branch tags, tags are eliminated if they are visible from other
945 # branch tags, tags are eliminated if they are visible from other
940 # branch tags.
946 # branch tags.
941 #
947 #
942 # So, for this graph: a->b->c->d->e
948 # So, for this graph: a->b->c->d->e
943 # \ /
949 # \ /
944 # aa -----/
950 # aa -----/
945 # a has tag 2.6.12
951 # a has tag 2.6.12
946 # d has tag 2.6.13
952 # d has tag 2.6.13
947 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
953 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
948 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
954 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
949 # from the list.
955 # from the list.
950 #
956 #
951 # It is possible that more than one head will have the same branch tag.
957 # It is possible that more than one head will have the same branch tag.
952 # callers need to check the result for multiple heads under the same
958 # callers need to check the result for multiple heads under the same
953 # branch tag if that is a problem for them (ie checkout of a specific
959 # branch tag if that is a problem for them (ie checkout of a specific
954 # branch).
960 # branch).
955 #
961 #
956 # passing in a specific branch will limit the depth of the search
962 # passing in a specific branch will limit the depth of the search
957 # through the parents. It won't limit the branches returned in the
963 # through the parents. It won't limit the branches returned in the
958 # result though.
964 # result though.
959 def branchlookup(self, heads=None, branch=None):
965 def branchlookup(self, heads=None, branch=None):
960 if not heads:
966 if not heads:
961 heads = self.heads()
967 heads = self.heads()
962 headt = [ h for h in heads ]
968 headt = [ h for h in heads ]
963 chlog = self.changelog
969 chlog = self.changelog
964 branches = {}
970 branches = {}
965 merges = []
971 merges = []
966 seenmerge = {}
972 seenmerge = {}
967
973
968 # traverse the tree once for each head, recording in the branches
974 # traverse the tree once for each head, recording in the branches
969 # dict which tags are visible from this head. The branches
975 # dict which tags are visible from this head. The branches
970 # dict also records which tags are visible from each tag
976 # dict also records which tags are visible from each tag
971 # while we traverse.
977 # while we traverse.
972 while headt or merges:
978 while headt or merges:
973 if merges:
979 if merges:
974 n, found = merges.pop()
980 n, found = merges.pop()
975 visit = [n]
981 visit = [n]
976 else:
982 else:
977 h = headt.pop()
983 h = headt.pop()
978 visit = [h]
984 visit = [h]
979 found = [h]
985 found = [h]
980 seen = {}
986 seen = {}
981 while visit:
987 while visit:
982 n = visit.pop()
988 n = visit.pop()
983 if n in seen:
989 if n in seen:
984 continue
990 continue
985 pp = chlog.parents(n)
991 pp = chlog.parents(n)
986 tags = self.nodetags(n)
992 tags = self.nodetags(n)
987 if tags:
993 if tags:
988 for x in tags:
994 for x in tags:
989 if x == 'tip':
995 if x == 'tip':
990 continue
996 continue
991 for f in found:
997 for f in found:
992 branches.setdefault(f, {})[n] = 1
998 branches.setdefault(f, {})[n] = 1
993 branches.setdefault(n, {})[n] = 1
999 branches.setdefault(n, {})[n] = 1
994 break
1000 break
995 if n not in found:
1001 if n not in found:
996 found.append(n)
1002 found.append(n)
997 if branch in tags:
1003 if branch in tags:
998 continue
1004 continue
999 seen[n] = 1
1005 seen[n] = 1
1000 if pp[1] != nullid and n not in seenmerge:
1006 if pp[1] != nullid and n not in seenmerge:
1001 merges.append((pp[1], [x for x in found]))
1007 merges.append((pp[1], [x for x in found]))
1002 seenmerge[n] = 1
1008 seenmerge[n] = 1
1003 if pp[0] != nullid:
1009 if pp[0] != nullid:
1004 visit.append(pp[0])
1010 visit.append(pp[0])
1005 # traverse the branches dict, eliminating branch tags from each
1011 # traverse the branches dict, eliminating branch tags from each
1006 # head that are visible from another branch tag for that head.
1012 # head that are visible from another branch tag for that head.
1007 out = {}
1013 out = {}
1008 viscache = {}
1014 viscache = {}
1009 for h in heads:
1015 for h in heads:
1010 def visible(node):
1016 def visible(node):
1011 if node in viscache:
1017 if node in viscache:
1012 return viscache[node]
1018 return viscache[node]
1013 ret = {}
1019 ret = {}
1014 visit = [node]
1020 visit = [node]
1015 while visit:
1021 while visit:
1016 x = visit.pop()
1022 x = visit.pop()
1017 if x in viscache:
1023 if x in viscache:
1018 ret.update(viscache[x])
1024 ret.update(viscache[x])
1019 elif x not in ret:
1025 elif x not in ret:
1020 ret[x] = 1
1026 ret[x] = 1
1021 if x in branches:
1027 if x in branches:
1022 visit[len(visit):] = branches[x].keys()
1028 visit[len(visit):] = branches[x].keys()
1023 viscache[node] = ret
1029 viscache[node] = ret
1024 return ret
1030 return ret
1025 if h not in branches:
1031 if h not in branches:
1026 continue
1032 continue
1027 # O(n^2), but somewhat limited. This only searches the
1033 # O(n^2), but somewhat limited. This only searches the
1028 # tags visible from a specific head, not all the tags in the
1034 # tags visible from a specific head, not all the tags in the
1029 # whole repo.
1035 # whole repo.
1030 for b in branches[h]:
1036 for b in branches[h]:
1031 vis = False
1037 vis = False
1032 for bb in branches[h].keys():
1038 for bb in branches[h].keys():
1033 if b != bb:
1039 if b != bb:
1034 if b in visible(bb):
1040 if b in visible(bb):
1035 vis = True
1041 vis = True
1036 break
1042 break
1037 if not vis:
1043 if not vis:
1038 l = out.setdefault(h, [])
1044 l = out.setdefault(h, [])
1039 l[len(l):] = self.nodetags(b)
1045 l[len(l):] = self.nodetags(b)
1040 return out
1046 return out
1041
1047
1042 def branches(self, nodes):
1048 def branches(self, nodes):
1043 if not nodes:
1049 if not nodes:
1044 nodes = [self.changelog.tip()]
1050 nodes = [self.changelog.tip()]
1045 b = []
1051 b = []
1046 for n in nodes:
1052 for n in nodes:
1047 t = n
1053 t = n
1048 while 1:
1054 while 1:
1049 p = self.changelog.parents(n)
1055 p = self.changelog.parents(n)
1050 if p[1] != nullid or p[0] == nullid:
1056 if p[1] != nullid or p[0] == nullid:
1051 b.append((t, n, p[0], p[1]))
1057 b.append((t, n, p[0], p[1]))
1052 break
1058 break
1053 n = p[0]
1059 n = p[0]
1054 return b
1060 return b
1055
1061
1056 def between(self, pairs):
1062 def between(self, pairs):
1057 r = []
1063 r = []
1058
1064
1059 for top, bottom in pairs:
1065 for top, bottom in pairs:
1060 n, l, i = top, [], 0
1066 n, l, i = top, [], 0
1061 f = 1
1067 f = 1
1062
1068
1063 while n != bottom:
1069 while n != bottom:
1064 p = self.changelog.parents(n)[0]
1070 p = self.changelog.parents(n)[0]
1065 if i == f:
1071 if i == f:
1066 l.append(n)
1072 l.append(n)
1067 f = f * 2
1073 f = f * 2
1068 n = p
1074 n = p
1069 i += 1
1075 i += 1
1070
1076
1071 r.append(l)
1077 r.append(l)
1072
1078
1073 return r
1079 return r
1074
1080
1075 def findincoming(self, remote, base=None, heads=None, force=False):
1081 def findincoming(self, remote, base=None, heads=None, force=False):
1076 """Return list of roots of the subsets of missing nodes from remote
1082 """Return list of roots of the subsets of missing nodes from remote
1077
1083
1078 If base dict is specified, assume that these nodes and their parents
1084 If base dict is specified, assume that these nodes and their parents
1079 exist on the remote side and that no child of a node of base exists
1085 exist on the remote side and that no child of a node of base exists
1080 in both remote and self.
1086 in both remote and self.
1081 Furthermore base will be updated to include the nodes that exists
1087 Furthermore base will be updated to include the nodes that exists
1082 in self and remote but no children exists in self and remote.
1088 in self and remote but no children exists in self and remote.
1083 If a list of heads is specified, return only nodes which are heads
1089 If a list of heads is specified, return only nodes which are heads
1084 or ancestors of these heads.
1090 or ancestors of these heads.
1085
1091
1086 All the ancestors of base are in self and in remote.
1092 All the ancestors of base are in self and in remote.
1087 All the descendants of the list returned are missing in self.
1093 All the descendants of the list returned are missing in self.
1088 (and so we know that the rest of the nodes are missing in remote, see
1094 (and so we know that the rest of the nodes are missing in remote, see
1089 outgoing)
1095 outgoing)
1090 """
1096 """
1091 m = self.changelog.nodemap
1097 m = self.changelog.nodemap
1092 search = []
1098 search = []
1093 fetch = {}
1099 fetch = {}
1094 seen = {}
1100 seen = {}
1095 seenbranch = {}
1101 seenbranch = {}
1096 if base == None:
1102 if base == None:
1097 base = {}
1103 base = {}
1098
1104
1099 if not heads:
1105 if not heads:
1100 heads = remote.heads()
1106 heads = remote.heads()
1101
1107
1102 if self.changelog.tip() == nullid:
1108 if self.changelog.tip() == nullid:
1103 base[nullid] = 1
1109 base[nullid] = 1
1104 if heads != [nullid]:
1110 if heads != [nullid]:
1105 return [nullid]
1111 return [nullid]
1106 return []
1112 return []
1107
1113
1108 # assume we're closer to the tip than the root
1114 # assume we're closer to the tip than the root
1109 # and start by examining the heads
1115 # and start by examining the heads
1110 self.ui.status(_("searching for changes\n"))
1116 self.ui.status(_("searching for changes\n"))
1111
1117
1112 unknown = []
1118 unknown = []
1113 for h in heads:
1119 for h in heads:
1114 if h not in m:
1120 if h not in m:
1115 unknown.append(h)
1121 unknown.append(h)
1116 else:
1122 else:
1117 base[h] = 1
1123 base[h] = 1
1118
1124
1119 if not unknown:
1125 if not unknown:
1120 return []
1126 return []
1121
1127
1122 req = dict.fromkeys(unknown)
1128 req = dict.fromkeys(unknown)
1123 reqcnt = 0
1129 reqcnt = 0
1124
1130
1125 # search through remote branches
1131 # search through remote branches
1126 # a 'branch' here is a linear segment of history, with four parts:
1132 # a 'branch' here is a linear segment of history, with four parts:
1127 # head, root, first parent, second parent
1133 # head, root, first parent, second parent
1128 # (a branch always has two parents (or none) by definition)
1134 # (a branch always has two parents (or none) by definition)
1129 unknown = remote.branches(unknown)
1135 unknown = remote.branches(unknown)
1130 while unknown:
1136 while unknown:
1131 r = []
1137 r = []
1132 while unknown:
1138 while unknown:
1133 n = unknown.pop(0)
1139 n = unknown.pop(0)
1134 if n[0] in seen:
1140 if n[0] in seen:
1135 continue
1141 continue
1136
1142
1137 self.ui.debug(_("examining %s:%s\n")
1143 self.ui.debug(_("examining %s:%s\n")
1138 % (short(n[0]), short(n[1])))
1144 % (short(n[0]), short(n[1])))
1139 if n[0] == nullid: # found the end of the branch
1145 if n[0] == nullid: # found the end of the branch
1140 pass
1146 pass
1141 elif n in seenbranch:
1147 elif n in seenbranch:
1142 self.ui.debug(_("branch already found\n"))
1148 self.ui.debug(_("branch already found\n"))
1143 continue
1149 continue
1144 elif n[1] and n[1] in m: # do we know the base?
1150 elif n[1] and n[1] in m: # do we know the base?
1145 self.ui.debug(_("found incomplete branch %s:%s\n")
1151 self.ui.debug(_("found incomplete branch %s:%s\n")
1146 % (short(n[0]), short(n[1])))
1152 % (short(n[0]), short(n[1])))
1147 search.append(n) # schedule branch range for scanning
1153 search.append(n) # schedule branch range for scanning
1148 seenbranch[n] = 1
1154 seenbranch[n] = 1
1149 else:
1155 else:
1150 if n[1] not in seen and n[1] not in fetch:
1156 if n[1] not in seen and n[1] not in fetch:
1151 if n[2] in m and n[3] in m:
1157 if n[2] in m and n[3] in m:
1152 self.ui.debug(_("found new changeset %s\n") %
1158 self.ui.debug(_("found new changeset %s\n") %
1153 short(n[1]))
1159 short(n[1]))
1154 fetch[n[1]] = 1 # earliest unknown
1160 fetch[n[1]] = 1 # earliest unknown
1155 for p in n[2:4]:
1161 for p in n[2:4]:
1156 if p in m:
1162 if p in m:
1157 base[p] = 1 # latest known
1163 base[p] = 1 # latest known
1158
1164
1159 for p in n[2:4]:
1165 for p in n[2:4]:
1160 if p not in req and p not in m:
1166 if p not in req and p not in m:
1161 r.append(p)
1167 r.append(p)
1162 req[p] = 1
1168 req[p] = 1
1163 seen[n[0]] = 1
1169 seen[n[0]] = 1
1164
1170
1165 if r:
1171 if r:
1166 reqcnt += 1
1172 reqcnt += 1
1167 self.ui.debug(_("request %d: %s\n") %
1173 self.ui.debug(_("request %d: %s\n") %
1168 (reqcnt, " ".join(map(short, r))))
1174 (reqcnt, " ".join(map(short, r))))
1169 for p in xrange(0, len(r), 10):
1175 for p in xrange(0, len(r), 10):
1170 for b in remote.branches(r[p:p+10]):
1176 for b in remote.branches(r[p:p+10]):
1171 self.ui.debug(_("received %s:%s\n") %
1177 self.ui.debug(_("received %s:%s\n") %
1172 (short(b[0]), short(b[1])))
1178 (short(b[0]), short(b[1])))
1173 unknown.append(b)
1179 unknown.append(b)
1174
1180
1175 # do binary search on the branches we found
1181 # do binary search on the branches we found
1176 while search:
1182 while search:
1177 n = search.pop(0)
1183 n = search.pop(0)
1178 reqcnt += 1
1184 reqcnt += 1
1179 l = remote.between([(n[0], n[1])])[0]
1185 l = remote.between([(n[0], n[1])])[0]
1180 l.append(n[1])
1186 l.append(n[1])
1181 p = n[0]
1187 p = n[0]
1182 f = 1
1188 f = 1
1183 for i in l:
1189 for i in l:
1184 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1190 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1185 if i in m:
1191 if i in m:
1186 if f <= 2:
1192 if f <= 2:
1187 self.ui.debug(_("found new branch changeset %s\n") %
1193 self.ui.debug(_("found new branch changeset %s\n") %
1188 short(p))
1194 short(p))
1189 fetch[p] = 1
1195 fetch[p] = 1
1190 base[i] = 1
1196 base[i] = 1
1191 else:
1197 else:
1192 self.ui.debug(_("narrowed branch search to %s:%s\n")
1198 self.ui.debug(_("narrowed branch search to %s:%s\n")
1193 % (short(p), short(i)))
1199 % (short(p), short(i)))
1194 search.append((p, i))
1200 search.append((p, i))
1195 break
1201 break
1196 p, f = i, f * 2
1202 p, f = i, f * 2
1197
1203
1198 # sanity check our fetch list
1204 # sanity check our fetch list
1199 for f in fetch.keys():
1205 for f in fetch.keys():
1200 if f in m:
1206 if f in m:
1201 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1207 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1202
1208
1203 if base.keys() == [nullid]:
1209 if base.keys() == [nullid]:
1204 if force:
1210 if force:
1205 self.ui.warn(_("warning: repository is unrelated\n"))
1211 self.ui.warn(_("warning: repository is unrelated\n"))
1206 else:
1212 else:
1207 raise util.Abort(_("repository is unrelated"))
1213 raise util.Abort(_("repository is unrelated"))
1208
1214
1209 self.ui.debug(_("found new changesets starting at ") +
1215 self.ui.debug(_("found new changesets starting at ") +
1210 " ".join([short(f) for f in fetch]) + "\n")
1216 " ".join([short(f) for f in fetch]) + "\n")
1211
1217
1212 self.ui.debug(_("%d total queries\n") % reqcnt)
1218 self.ui.debug(_("%d total queries\n") % reqcnt)
1213
1219
1214 return fetch.keys()
1220 return fetch.keys()
1215
1221
1216 def findoutgoing(self, remote, base=None, heads=None, force=False):
1222 def findoutgoing(self, remote, base=None, heads=None, force=False):
1217 """Return list of nodes that are roots of subsets not in remote
1223 """Return list of nodes that are roots of subsets not in remote
1218
1224
1219 If base dict is specified, assume that these nodes and their parents
1225 If base dict is specified, assume that these nodes and their parents
1220 exist on the remote side.
1226 exist on the remote side.
1221 If a list of heads is specified, return only nodes which are heads
1227 If a list of heads is specified, return only nodes which are heads
1222 or ancestors of these heads, and return a second element which
1228 or ancestors of these heads, and return a second element which
1223 contains all remote heads which get new children.
1229 contains all remote heads which get new children.
1224 """
1230 """
1225 if base == None:
1231 if base == None:
1226 base = {}
1232 base = {}
1227 self.findincoming(remote, base, heads, force=force)
1233 self.findincoming(remote, base, heads, force=force)
1228
1234
1229 self.ui.debug(_("common changesets up to ")
1235 self.ui.debug(_("common changesets up to ")
1230 + " ".join(map(short, base.keys())) + "\n")
1236 + " ".join(map(short, base.keys())) + "\n")
1231
1237
1232 remain = dict.fromkeys(self.changelog.nodemap)
1238 remain = dict.fromkeys(self.changelog.nodemap)
1233
1239
1234 # prune everything remote has from the tree
1240 # prune everything remote has from the tree
1235 del remain[nullid]
1241 del remain[nullid]
1236 remove = base.keys()
1242 remove = base.keys()
1237 while remove:
1243 while remove:
1238 n = remove.pop(0)
1244 n = remove.pop(0)
1239 if n in remain:
1245 if n in remain:
1240 del remain[n]
1246 del remain[n]
1241 for p in self.changelog.parents(n):
1247 for p in self.changelog.parents(n):
1242 remove.append(p)
1248 remove.append(p)
1243
1249
1244 # find every node whose parents have been pruned
1250 # find every node whose parents have been pruned
1245 subset = []
1251 subset = []
1246 # find every remote head that will get new children
1252 # find every remote head that will get new children
1247 updated_heads = {}
1253 updated_heads = {}
1248 for n in remain:
1254 for n in remain:
1249 p1, p2 = self.changelog.parents(n)
1255 p1, p2 = self.changelog.parents(n)
1250 if p1 not in remain and p2 not in remain:
1256 if p1 not in remain and p2 not in remain:
1251 subset.append(n)
1257 subset.append(n)
1252 if heads:
1258 if heads:
1253 if p1 in heads:
1259 if p1 in heads:
1254 updated_heads[p1] = True
1260 updated_heads[p1] = True
1255 if p2 in heads:
1261 if p2 in heads:
1256 updated_heads[p2] = True
1262 updated_heads[p2] = True
1257
1263
1258 # this is the set of all roots we have to push
1264 # this is the set of all roots we have to push
1259 if heads:
1265 if heads:
1260 return subset, updated_heads.keys()
1266 return subset, updated_heads.keys()
1261 else:
1267 else:
1262 return subset
1268 return subset
1263
1269
1264 def pull(self, remote, heads=None, force=False, lock=None):
1270 def pull(self, remote, heads=None, force=False, lock=None):
1265 mylock = False
1271 mylock = False
1266 if not lock:
1272 if not lock:
1267 lock = self.lock()
1273 lock = self.lock()
1268 mylock = True
1274 mylock = True
1269
1275
1270 try:
1276 try:
1271 fetch = self.findincoming(remote, force=force)
1277 fetch = self.findincoming(remote, force=force)
1272 if fetch == [nullid]:
1278 if fetch == [nullid]:
1273 self.ui.status(_("requesting all changes\n"))
1279 self.ui.status(_("requesting all changes\n"))
1274
1280
1275 if not fetch:
1281 if not fetch:
1276 self.ui.status(_("no changes found\n"))
1282 self.ui.status(_("no changes found\n"))
1277 return 0
1283 return 0
1278
1284
1279 if heads is None:
1285 if heads is None:
1280 cg = remote.changegroup(fetch, 'pull')
1286 cg = remote.changegroup(fetch, 'pull')
1281 else:
1287 else:
1282 if 'changegroupsubset' not in remote.capabilities:
1288 if 'changegroupsubset' not in remote.capabilities:
1283 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1289 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1284 cg = remote.changegroupsubset(fetch, heads, 'pull')
1290 cg = remote.changegroupsubset(fetch, heads, 'pull')
1285 return self.addchangegroup(cg, 'pull', remote.url())
1291 return self.addchangegroup(cg, 'pull', remote.url())
1286 finally:
1292 finally:
1287 if mylock:
1293 if mylock:
1288 lock.release()
1294 lock.release()
1289
1295
1290 def push(self, remote, force=False, revs=None):
1296 def push(self, remote, force=False, revs=None):
1291 # there are two ways to push to remote repo:
1297 # there are two ways to push to remote repo:
1292 #
1298 #
1293 # addchangegroup assumes local user can lock remote
1299 # addchangegroup assumes local user can lock remote
1294 # repo (local filesystem, old ssh servers).
1300 # repo (local filesystem, old ssh servers).
1295 #
1301 #
1296 # unbundle assumes local user cannot lock remote repo (new ssh
1302 # unbundle assumes local user cannot lock remote repo (new ssh
1297 # servers, http servers).
1303 # servers, http servers).
1298
1304
1299 if remote.capable('unbundle'):
1305 if remote.capable('unbundle'):
1300 return self.push_unbundle(remote, force, revs)
1306 return self.push_unbundle(remote, force, revs)
1301 return self.push_addchangegroup(remote, force, revs)
1307 return self.push_addchangegroup(remote, force, revs)
1302
1308
1303 def prepush(self, remote, force, revs):
1309 def prepush(self, remote, force, revs):
1304 base = {}
1310 base = {}
1305 remote_heads = remote.heads()
1311 remote_heads = remote.heads()
1306 inc = self.findincoming(remote, base, remote_heads, force=force)
1312 inc = self.findincoming(remote, base, remote_heads, force=force)
1307
1313
1308 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1314 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1309 if revs is not None:
1315 if revs is not None:
1310 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1316 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1311 else:
1317 else:
1312 bases, heads = update, self.changelog.heads()
1318 bases, heads = update, self.changelog.heads()
1313
1319
1314 if not bases:
1320 if not bases:
1315 self.ui.status(_("no changes found\n"))
1321 self.ui.status(_("no changes found\n"))
1316 return None, 1
1322 return None, 1
1317 elif not force:
1323 elif not force:
1318 # check if we're creating new remote heads
1324 # check if we're creating new remote heads
1319 # to be a remote head after push, node must be either
1325 # to be a remote head after push, node must be either
1320 # - unknown locally
1326 # - unknown locally
1321 # - a local outgoing head descended from update
1327 # - a local outgoing head descended from update
1322 # - a remote head that's known locally and not
1328 # - a remote head that's known locally and not
1323 # ancestral to an outgoing head
1329 # ancestral to an outgoing head
1324
1330
1325 warn = 0
1331 warn = 0
1326
1332
1327 if remote_heads == [nullid]:
1333 if remote_heads == [nullid]:
1328 warn = 0
1334 warn = 0
1329 elif not revs and len(heads) > len(remote_heads):
1335 elif not revs and len(heads) > len(remote_heads):
1330 warn = 1
1336 warn = 1
1331 else:
1337 else:
1332 newheads = list(heads)
1338 newheads = list(heads)
1333 for r in remote_heads:
1339 for r in remote_heads:
1334 if r in self.changelog.nodemap:
1340 if r in self.changelog.nodemap:
1335 desc = self.changelog.heads(r)
1341 desc = self.changelog.heads(r)
1336 l = [h for h in heads if h in desc]
1342 l = [h for h in heads if h in desc]
1337 if not l:
1343 if not l:
1338 newheads.append(r)
1344 newheads.append(r)
1339 else:
1345 else:
1340 newheads.append(r)
1346 newheads.append(r)
1341 if len(newheads) > len(remote_heads):
1347 if len(newheads) > len(remote_heads):
1342 warn = 1
1348 warn = 1
1343
1349
1344 if warn:
1350 if warn:
1345 self.ui.warn(_("abort: push creates new remote branches!\n"))
1351 self.ui.warn(_("abort: push creates new remote branches!\n"))
1346 self.ui.status(_("(did you forget to merge?"
1352 self.ui.status(_("(did you forget to merge?"
1347 " use push -f to force)\n"))
1353 " use push -f to force)\n"))
1348 return None, 1
1354 return None, 1
1349 elif inc:
1355 elif inc:
1350 self.ui.warn(_("note: unsynced remote changes!\n"))
1356 self.ui.warn(_("note: unsynced remote changes!\n"))
1351
1357
1352
1358
1353 if revs is None:
1359 if revs is None:
1354 cg = self.changegroup(update, 'push')
1360 cg = self.changegroup(update, 'push')
1355 else:
1361 else:
1356 cg = self.changegroupsubset(update, revs, 'push')
1362 cg = self.changegroupsubset(update, revs, 'push')
1357 return cg, remote_heads
1363 return cg, remote_heads
1358
1364
1359 def push_addchangegroup(self, remote, force, revs):
1365 def push_addchangegroup(self, remote, force, revs):
1360 lock = remote.lock()
1366 lock = remote.lock()
1361
1367
1362 ret = self.prepush(remote, force, revs)
1368 ret = self.prepush(remote, force, revs)
1363 if ret[0] is not None:
1369 if ret[0] is not None:
1364 cg, remote_heads = ret
1370 cg, remote_heads = ret
1365 return remote.addchangegroup(cg, 'push', self.url())
1371 return remote.addchangegroup(cg, 'push', self.url())
1366 return ret[1]
1372 return ret[1]
1367
1373
1368 def push_unbundle(self, remote, force, revs):
1374 def push_unbundle(self, remote, force, revs):
1369 # local repo finds heads on server, finds out what revs it
1375 # local repo finds heads on server, finds out what revs it
1370 # must push. once revs transferred, if server finds it has
1376 # must push. once revs transferred, if server finds it has
1371 # different heads (someone else won commit/push race), server
1377 # different heads (someone else won commit/push race), server
1372 # aborts.
1378 # aborts.
1373
1379
1374 ret = self.prepush(remote, force, revs)
1380 ret = self.prepush(remote, force, revs)
1375 if ret[0] is not None:
1381 if ret[0] is not None:
1376 cg, remote_heads = ret
1382 cg, remote_heads = ret
1377 if force: remote_heads = ['force']
1383 if force: remote_heads = ['force']
1378 return remote.unbundle(cg, remote_heads, 'push')
1384 return remote.unbundle(cg, remote_heads, 'push')
1379 return ret[1]
1385 return ret[1]
1380
1386
1381 def changegroupinfo(self, nodes):
1387 def changegroupinfo(self, nodes):
1382 self.ui.note(_("%d changesets found\n") % len(nodes))
1388 self.ui.note(_("%d changesets found\n") % len(nodes))
1383 if self.ui.debugflag:
1389 if self.ui.debugflag:
1384 self.ui.debug(_("List of changesets:\n"))
1390 self.ui.debug(_("List of changesets:\n"))
1385 for node in nodes:
1391 for node in nodes:
1386 self.ui.debug("%s\n" % hex(node))
1392 self.ui.debug("%s\n" % hex(node))
1387
1393
1388 def changegroupsubset(self, bases, heads, source):
1394 def changegroupsubset(self, bases, heads, source):
1389 """This function generates a changegroup consisting of all the nodes
1395 """This function generates a changegroup consisting of all the nodes
1390 that are descendents of any of the bases, and ancestors of any of
1396 that are descendents of any of the bases, and ancestors of any of
1391 the heads.
1397 the heads.
1392
1398
1393 It is fairly complex as determining which filenodes and which
1399 It is fairly complex as determining which filenodes and which
1394 manifest nodes need to be included for the changeset to be complete
1400 manifest nodes need to be included for the changeset to be complete
1395 is non-trivial.
1401 is non-trivial.
1396
1402
1397 Another wrinkle is doing the reverse, figuring out which changeset in
1403 Another wrinkle is doing the reverse, figuring out which changeset in
1398 the changegroup a particular filenode or manifestnode belongs to."""
1404 the changegroup a particular filenode or manifestnode belongs to."""
1399
1405
1400 self.hook('preoutgoing', throw=True, source=source)
1406 self.hook('preoutgoing', throw=True, source=source)
1401
1407
1402 # Set up some initial variables
1408 # Set up some initial variables
1403 # Make it easy to refer to self.changelog
1409 # Make it easy to refer to self.changelog
1404 cl = self.changelog
1410 cl = self.changelog
1405 # msng is short for missing - compute the list of changesets in this
1411 # msng is short for missing - compute the list of changesets in this
1406 # changegroup.
1412 # changegroup.
1407 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1413 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1408 self.changegroupinfo(msng_cl_lst)
1414 self.changegroupinfo(msng_cl_lst)
1409 # Some bases may turn out to be superfluous, and some heads may be
1415 # Some bases may turn out to be superfluous, and some heads may be
1410 # too. nodesbetween will return the minimal set of bases and heads
1416 # too. nodesbetween will return the minimal set of bases and heads
1411 # necessary to re-create the changegroup.
1417 # necessary to re-create the changegroup.
1412
1418
1413 # Known heads are the list of heads that it is assumed the recipient
1419 # Known heads are the list of heads that it is assumed the recipient
1414 # of this changegroup will know about.
1420 # of this changegroup will know about.
1415 knownheads = {}
1421 knownheads = {}
1416 # We assume that all parents of bases are known heads.
1422 # We assume that all parents of bases are known heads.
1417 for n in bases:
1423 for n in bases:
1418 for p in cl.parents(n):
1424 for p in cl.parents(n):
1419 if p != nullid:
1425 if p != nullid:
1420 knownheads[p] = 1
1426 knownheads[p] = 1
1421 knownheads = knownheads.keys()
1427 knownheads = knownheads.keys()
1422 if knownheads:
1428 if knownheads:
1423 # Now that we know what heads are known, we can compute which
1429 # Now that we know what heads are known, we can compute which
1424 # changesets are known. The recipient must know about all
1430 # changesets are known. The recipient must know about all
1425 # changesets required to reach the known heads from the null
1431 # changesets required to reach the known heads from the null
1426 # changeset.
1432 # changeset.
1427 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1433 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1428 junk = None
1434 junk = None
1429 # Transform the list into an ersatz set.
1435 # Transform the list into an ersatz set.
1430 has_cl_set = dict.fromkeys(has_cl_set)
1436 has_cl_set = dict.fromkeys(has_cl_set)
1431 else:
1437 else:
1432 # If there were no known heads, the recipient cannot be assumed to
1438 # If there were no known heads, the recipient cannot be assumed to
1433 # know about any changesets.
1439 # know about any changesets.
1434 has_cl_set = {}
1440 has_cl_set = {}
1435
1441
1436 # Make it easy to refer to self.manifest
1442 # Make it easy to refer to self.manifest
1437 mnfst = self.manifest
1443 mnfst = self.manifest
1438 # We don't know which manifests are missing yet
1444 # We don't know which manifests are missing yet
1439 msng_mnfst_set = {}
1445 msng_mnfst_set = {}
1440 # Nor do we know which filenodes are missing.
1446 # Nor do we know which filenodes are missing.
1441 msng_filenode_set = {}
1447 msng_filenode_set = {}
1442
1448
1443 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1449 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1444 junk = None
1450 junk = None
1445
1451
1446 # A changeset always belongs to itself, so the changenode lookup
1452 # A changeset always belongs to itself, so the changenode lookup
1447 # function for a changenode is identity.
1453 # function for a changenode is identity.
1448 def identity(x):
1454 def identity(x):
1449 return x
1455 return x
1450
1456
1451 # A function generating function. Sets up an environment for the
1457 # A function generating function. Sets up an environment for the
1452 # inner function.
1458 # inner function.
1453 def cmp_by_rev_func(revlog):
1459 def cmp_by_rev_func(revlog):
1454 # Compare two nodes by their revision number in the environment's
1460 # Compare two nodes by their revision number in the environment's
1455 # revision history. Since the revision number both represents the
1461 # revision history. Since the revision number both represents the
1456 # most efficient order to read the nodes in, and represents a
1462 # most efficient order to read the nodes in, and represents a
1457 # topological sorting of the nodes, this function is often useful.
1463 # topological sorting of the nodes, this function is often useful.
1458 def cmp_by_rev(a, b):
1464 def cmp_by_rev(a, b):
1459 return cmp(revlog.rev(a), revlog.rev(b))
1465 return cmp(revlog.rev(a), revlog.rev(b))
1460 return cmp_by_rev
1466 return cmp_by_rev
1461
1467
1462 # If we determine that a particular file or manifest node must be a
1468 # If we determine that a particular file or manifest node must be a
1463 # node that the recipient of the changegroup will already have, we can
1469 # node that the recipient of the changegroup will already have, we can
1464 # also assume the recipient will have all the parents. This function
1470 # also assume the recipient will have all the parents. This function
1465 # prunes them from the set of missing nodes.
1471 # prunes them from the set of missing nodes.
1466 def prune_parents(revlog, hasset, msngset):
1472 def prune_parents(revlog, hasset, msngset):
1467 haslst = hasset.keys()
1473 haslst = hasset.keys()
1468 haslst.sort(cmp_by_rev_func(revlog))
1474 haslst.sort(cmp_by_rev_func(revlog))
1469 for node in haslst:
1475 for node in haslst:
1470 parentlst = [p for p in revlog.parents(node) if p != nullid]
1476 parentlst = [p for p in revlog.parents(node) if p != nullid]
1471 while parentlst:
1477 while parentlst:
1472 n = parentlst.pop()
1478 n = parentlst.pop()
1473 if n not in hasset:
1479 if n not in hasset:
1474 hasset[n] = 1
1480 hasset[n] = 1
1475 p = [p for p in revlog.parents(n) if p != nullid]
1481 p = [p for p in revlog.parents(n) if p != nullid]
1476 parentlst.extend(p)
1482 parentlst.extend(p)
1477 for n in hasset:
1483 for n in hasset:
1478 msngset.pop(n, None)
1484 msngset.pop(n, None)
1479
1485
1480 # This is a function generating function used to set up an environment
1486 # This is a function generating function used to set up an environment
1481 # for the inner function to execute in.
1487 # for the inner function to execute in.
1482 def manifest_and_file_collector(changedfileset):
1488 def manifest_and_file_collector(changedfileset):
1483 # This is an information gathering function that gathers
1489 # This is an information gathering function that gathers
1484 # information from each changeset node that goes out as part of
1490 # information from each changeset node that goes out as part of
1485 # the changegroup. The information gathered is a list of which
1491 # the changegroup. The information gathered is a list of which
1486 # manifest nodes are potentially required (the recipient may
1492 # manifest nodes are potentially required (the recipient may
1487 # already have them) and total list of all files which were
1493 # already have them) and total list of all files which were
1488 # changed in any changeset in the changegroup.
1494 # changed in any changeset in the changegroup.
1489 #
1495 #
1490 # We also remember the first changenode we saw any manifest
1496 # We also remember the first changenode we saw any manifest
1491 # referenced by so we can later determine which changenode 'owns'
1497 # referenced by so we can later determine which changenode 'owns'
1492 # the manifest.
1498 # the manifest.
1493 def collect_manifests_and_files(clnode):
1499 def collect_manifests_and_files(clnode):
1494 c = cl.read(clnode)
1500 c = cl.read(clnode)
1495 for f in c[3]:
1501 for f in c[3]:
1496 # This is to make sure we only have one instance of each
1502 # This is to make sure we only have one instance of each
1497 # filename string for each filename.
1503 # filename string for each filename.
1498 changedfileset.setdefault(f, f)
1504 changedfileset.setdefault(f, f)
1499 msng_mnfst_set.setdefault(c[0], clnode)
1505 msng_mnfst_set.setdefault(c[0], clnode)
1500 return collect_manifests_and_files
1506 return collect_manifests_and_files
1501
1507
1502 # Figure out which manifest nodes (of the ones we think might be part
1508 # Figure out which manifest nodes (of the ones we think might be part
1503 # of the changegroup) the recipient must know about and remove them
1509 # of the changegroup) the recipient must know about and remove them
1504 # from the changegroup.
1510 # from the changegroup.
1505 def prune_manifests():
1511 def prune_manifests():
1506 has_mnfst_set = {}
1512 has_mnfst_set = {}
1507 for n in msng_mnfst_set:
1513 for n in msng_mnfst_set:
1508 # If a 'missing' manifest thinks it belongs to a changenode
1514 # If a 'missing' manifest thinks it belongs to a changenode
1509 # the recipient is assumed to have, obviously the recipient
1515 # the recipient is assumed to have, obviously the recipient
1510 # must have that manifest.
1516 # must have that manifest.
1511 linknode = cl.node(mnfst.linkrev(n))
1517 linknode = cl.node(mnfst.linkrev(n))
1512 if linknode in has_cl_set:
1518 if linknode in has_cl_set:
1513 has_mnfst_set[n] = 1
1519 has_mnfst_set[n] = 1
1514 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1520 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1515
1521
1516 # Use the information collected in collect_manifests_and_files to say
1522 # Use the information collected in collect_manifests_and_files to say
1517 # which changenode any manifestnode belongs to.
1523 # which changenode any manifestnode belongs to.
1518 def lookup_manifest_link(mnfstnode):
1524 def lookup_manifest_link(mnfstnode):
1519 return msng_mnfst_set[mnfstnode]
1525 return msng_mnfst_set[mnfstnode]
1520
1526
1521 # A function generating function that sets up the initial environment
1527 # A function generating function that sets up the initial environment
1522 # the inner function.
1528 # the inner function.
1523 def filenode_collector(changedfiles):
1529 def filenode_collector(changedfiles):
1524 next_rev = [0]
1530 next_rev = [0]
1525 # This gathers information from each manifestnode included in the
1531 # This gathers information from each manifestnode included in the
1526 # changegroup about which filenodes the manifest node references
1532 # changegroup about which filenodes the manifest node references
1527 # so we can include those in the changegroup too.
1533 # so we can include those in the changegroup too.
1528 #
1534 #
1529 # It also remembers which changenode each filenode belongs to. It
1535 # It also remembers which changenode each filenode belongs to. It
1530 # does this by assuming the a filenode belongs to the changenode
1536 # does this by assuming the a filenode belongs to the changenode
1531 # the first manifest that references it belongs to.
1537 # the first manifest that references it belongs to.
1532 def collect_msng_filenodes(mnfstnode):
1538 def collect_msng_filenodes(mnfstnode):
1533 r = mnfst.rev(mnfstnode)
1539 r = mnfst.rev(mnfstnode)
1534 if r == next_rev[0]:
1540 if r == next_rev[0]:
1535 # If the last rev we looked at was the one just previous,
1541 # If the last rev we looked at was the one just previous,
1536 # we only need to see a diff.
1542 # we only need to see a diff.
1537 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1543 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1538 # For each line in the delta
1544 # For each line in the delta
1539 for dline in delta.splitlines():
1545 for dline in delta.splitlines():
1540 # get the filename and filenode for that line
1546 # get the filename and filenode for that line
1541 f, fnode = dline.split('\0')
1547 f, fnode = dline.split('\0')
1542 fnode = bin(fnode[:40])
1548 fnode = bin(fnode[:40])
1543 f = changedfiles.get(f, None)
1549 f = changedfiles.get(f, None)
1544 # And if the file is in the list of files we care
1550 # And if the file is in the list of files we care
1545 # about.
1551 # about.
1546 if f is not None:
1552 if f is not None:
1547 # Get the changenode this manifest belongs to
1553 # Get the changenode this manifest belongs to
1548 clnode = msng_mnfst_set[mnfstnode]
1554 clnode = msng_mnfst_set[mnfstnode]
1549 # Create the set of filenodes for the file if
1555 # Create the set of filenodes for the file if
1550 # there isn't one already.
1556 # there isn't one already.
1551 ndset = msng_filenode_set.setdefault(f, {})
1557 ndset = msng_filenode_set.setdefault(f, {})
1552 # And set the filenode's changelog node to the
1558 # And set the filenode's changelog node to the
1553 # manifest's if it hasn't been set already.
1559 # manifest's if it hasn't been set already.
1554 ndset.setdefault(fnode, clnode)
1560 ndset.setdefault(fnode, clnode)
1555 else:
1561 else:
1556 # Otherwise we need a full manifest.
1562 # Otherwise we need a full manifest.
1557 m = mnfst.read(mnfstnode)
1563 m = mnfst.read(mnfstnode)
1558 # For every file in we care about.
1564 # For every file in we care about.
1559 for f in changedfiles:
1565 for f in changedfiles:
1560 fnode = m.get(f, None)
1566 fnode = m.get(f, None)
1561 # If it's in the manifest
1567 # If it's in the manifest
1562 if fnode is not None:
1568 if fnode is not None:
1563 # See comments above.
1569 # See comments above.
1564 clnode = msng_mnfst_set[mnfstnode]
1570 clnode = msng_mnfst_set[mnfstnode]
1565 ndset = msng_filenode_set.setdefault(f, {})
1571 ndset = msng_filenode_set.setdefault(f, {})
1566 ndset.setdefault(fnode, clnode)
1572 ndset.setdefault(fnode, clnode)
1567 # Remember the revision we hope to see next.
1573 # Remember the revision we hope to see next.
1568 next_rev[0] = r + 1
1574 next_rev[0] = r + 1
1569 return collect_msng_filenodes
1575 return collect_msng_filenodes
1570
1576
1571 # We have a list of filenodes we think we need for a file, lets remove
1577 # We have a list of filenodes we think we need for a file, lets remove
1572 # all those we now the recipient must have.
1578 # all those we now the recipient must have.
1573 def prune_filenodes(f, filerevlog):
1579 def prune_filenodes(f, filerevlog):
1574 msngset = msng_filenode_set[f]
1580 msngset = msng_filenode_set[f]
1575 hasset = {}
1581 hasset = {}
1576 # If a 'missing' filenode thinks it belongs to a changenode we
1582 # If a 'missing' filenode thinks it belongs to a changenode we
1577 # assume the recipient must have, then the recipient must have
1583 # assume the recipient must have, then the recipient must have
1578 # that filenode.
1584 # that filenode.
1579 for n in msngset:
1585 for n in msngset:
1580 clnode = cl.node(filerevlog.linkrev(n))
1586 clnode = cl.node(filerevlog.linkrev(n))
1581 if clnode in has_cl_set:
1587 if clnode in has_cl_set:
1582 hasset[n] = 1
1588 hasset[n] = 1
1583 prune_parents(filerevlog, hasset, msngset)
1589 prune_parents(filerevlog, hasset, msngset)
1584
1590
1585 # A function generator function that sets up the a context for the
1591 # A function generator function that sets up the a context for the
1586 # inner function.
1592 # inner function.
1587 def lookup_filenode_link_func(fname):
1593 def lookup_filenode_link_func(fname):
1588 msngset = msng_filenode_set[fname]
1594 msngset = msng_filenode_set[fname]
1589 # Lookup the changenode the filenode belongs to.
1595 # Lookup the changenode the filenode belongs to.
1590 def lookup_filenode_link(fnode):
1596 def lookup_filenode_link(fnode):
1591 return msngset[fnode]
1597 return msngset[fnode]
1592 return lookup_filenode_link
1598 return lookup_filenode_link
1593
1599
1594 # Now that we have all theses utility functions to help out and
1600 # Now that we have all theses utility functions to help out and
1595 # logically divide up the task, generate the group.
1601 # logically divide up the task, generate the group.
1596 def gengroup():
1602 def gengroup():
1597 # The set of changed files starts empty.
1603 # The set of changed files starts empty.
1598 changedfiles = {}
1604 changedfiles = {}
1599 # Create a changenode group generator that will call our functions
1605 # Create a changenode group generator that will call our functions
1600 # back to lookup the owning changenode and collect information.
1606 # back to lookup the owning changenode and collect information.
1601 group = cl.group(msng_cl_lst, identity,
1607 group = cl.group(msng_cl_lst, identity,
1602 manifest_and_file_collector(changedfiles))
1608 manifest_and_file_collector(changedfiles))
1603 for chnk in group:
1609 for chnk in group:
1604 yield chnk
1610 yield chnk
1605
1611
1606 # The list of manifests has been collected by the generator
1612 # The list of manifests has been collected by the generator
1607 # calling our functions back.
1613 # calling our functions back.
1608 prune_manifests()
1614 prune_manifests()
1609 msng_mnfst_lst = msng_mnfst_set.keys()
1615 msng_mnfst_lst = msng_mnfst_set.keys()
1610 # Sort the manifestnodes by revision number.
1616 # Sort the manifestnodes by revision number.
1611 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1617 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1612 # Create a generator for the manifestnodes that calls our lookup
1618 # Create a generator for the manifestnodes that calls our lookup
1613 # and data collection functions back.
1619 # and data collection functions back.
1614 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1620 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1615 filenode_collector(changedfiles))
1621 filenode_collector(changedfiles))
1616 for chnk in group:
1622 for chnk in group:
1617 yield chnk
1623 yield chnk
1618
1624
1619 # These are no longer needed, dereference and toss the memory for
1625 # These are no longer needed, dereference and toss the memory for
1620 # them.
1626 # them.
1621 msng_mnfst_lst = None
1627 msng_mnfst_lst = None
1622 msng_mnfst_set.clear()
1628 msng_mnfst_set.clear()
1623
1629
1624 changedfiles = changedfiles.keys()
1630 changedfiles = changedfiles.keys()
1625 changedfiles.sort()
1631 changedfiles.sort()
1626 # Go through all our files in order sorted by name.
1632 # Go through all our files in order sorted by name.
1627 for fname in changedfiles:
1633 for fname in changedfiles:
1628 filerevlog = self.file(fname)
1634 filerevlog = self.file(fname)
1629 # Toss out the filenodes that the recipient isn't really
1635 # Toss out the filenodes that the recipient isn't really
1630 # missing.
1636 # missing.
1631 if msng_filenode_set.has_key(fname):
1637 if msng_filenode_set.has_key(fname):
1632 prune_filenodes(fname, filerevlog)
1638 prune_filenodes(fname, filerevlog)
1633 msng_filenode_lst = msng_filenode_set[fname].keys()
1639 msng_filenode_lst = msng_filenode_set[fname].keys()
1634 else:
1640 else:
1635 msng_filenode_lst = []
1641 msng_filenode_lst = []
1636 # If any filenodes are left, generate the group for them,
1642 # If any filenodes are left, generate the group for them,
1637 # otherwise don't bother.
1643 # otherwise don't bother.
1638 if len(msng_filenode_lst) > 0:
1644 if len(msng_filenode_lst) > 0:
1639 yield changegroup.genchunk(fname)
1645 yield changegroup.genchunk(fname)
1640 # Sort the filenodes by their revision #
1646 # Sort the filenodes by their revision #
1641 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1647 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1642 # Create a group generator and only pass in a changenode
1648 # Create a group generator and only pass in a changenode
1643 # lookup function as we need to collect no information
1649 # lookup function as we need to collect no information
1644 # from filenodes.
1650 # from filenodes.
1645 group = filerevlog.group(msng_filenode_lst,
1651 group = filerevlog.group(msng_filenode_lst,
1646 lookup_filenode_link_func(fname))
1652 lookup_filenode_link_func(fname))
1647 for chnk in group:
1653 for chnk in group:
1648 yield chnk
1654 yield chnk
1649 if msng_filenode_set.has_key(fname):
1655 if msng_filenode_set.has_key(fname):
1650 # Don't need this anymore, toss it to free memory.
1656 # Don't need this anymore, toss it to free memory.
1651 del msng_filenode_set[fname]
1657 del msng_filenode_set[fname]
1652 # Signal that no more groups are left.
1658 # Signal that no more groups are left.
1653 yield changegroup.closechunk()
1659 yield changegroup.closechunk()
1654
1660
1655 if msng_cl_lst:
1661 if msng_cl_lst:
1656 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1662 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1657
1663
1658 return util.chunkbuffer(gengroup())
1664 return util.chunkbuffer(gengroup())
1659
1665
1660 def changegroup(self, basenodes, source):
1666 def changegroup(self, basenodes, source):
1661 """Generate a changegroup of all nodes that we have that a recipient
1667 """Generate a changegroup of all nodes that we have that a recipient
1662 doesn't.
1668 doesn't.
1663
1669
1664 This is much easier than the previous function as we can assume that
1670 This is much easier than the previous function as we can assume that
1665 the recipient has any changenode we aren't sending them."""
1671 the recipient has any changenode we aren't sending them."""
1666
1672
1667 self.hook('preoutgoing', throw=True, source=source)
1673 self.hook('preoutgoing', throw=True, source=source)
1668
1674
1669 cl = self.changelog
1675 cl = self.changelog
1670 nodes = cl.nodesbetween(basenodes, None)[0]
1676 nodes = cl.nodesbetween(basenodes, None)[0]
1671 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1677 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1672 self.changegroupinfo(nodes)
1678 self.changegroupinfo(nodes)
1673
1679
1674 def identity(x):
1680 def identity(x):
1675 return x
1681 return x
1676
1682
1677 def gennodelst(revlog):
1683 def gennodelst(revlog):
1678 for r in xrange(0, revlog.count()):
1684 for r in xrange(0, revlog.count()):
1679 n = revlog.node(r)
1685 n = revlog.node(r)
1680 if revlog.linkrev(n) in revset:
1686 if revlog.linkrev(n) in revset:
1681 yield n
1687 yield n
1682
1688
1683 def changed_file_collector(changedfileset):
1689 def changed_file_collector(changedfileset):
1684 def collect_changed_files(clnode):
1690 def collect_changed_files(clnode):
1685 c = cl.read(clnode)
1691 c = cl.read(clnode)
1686 for fname in c[3]:
1692 for fname in c[3]:
1687 changedfileset[fname] = 1
1693 changedfileset[fname] = 1
1688 return collect_changed_files
1694 return collect_changed_files
1689
1695
1690 def lookuprevlink_func(revlog):
1696 def lookuprevlink_func(revlog):
1691 def lookuprevlink(n):
1697 def lookuprevlink(n):
1692 return cl.node(revlog.linkrev(n))
1698 return cl.node(revlog.linkrev(n))
1693 return lookuprevlink
1699 return lookuprevlink
1694
1700
1695 def gengroup():
1701 def gengroup():
1696 # construct a list of all changed files
1702 # construct a list of all changed files
1697 changedfiles = {}
1703 changedfiles = {}
1698
1704
1699 for chnk in cl.group(nodes, identity,
1705 for chnk in cl.group(nodes, identity,
1700 changed_file_collector(changedfiles)):
1706 changed_file_collector(changedfiles)):
1701 yield chnk
1707 yield chnk
1702 changedfiles = changedfiles.keys()
1708 changedfiles = changedfiles.keys()
1703 changedfiles.sort()
1709 changedfiles.sort()
1704
1710
1705 mnfst = self.manifest
1711 mnfst = self.manifest
1706 nodeiter = gennodelst(mnfst)
1712 nodeiter = gennodelst(mnfst)
1707 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1713 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1708 yield chnk
1714 yield chnk
1709
1715
1710 for fname in changedfiles:
1716 for fname in changedfiles:
1711 filerevlog = self.file(fname)
1717 filerevlog = self.file(fname)
1712 nodeiter = gennodelst(filerevlog)
1718 nodeiter = gennodelst(filerevlog)
1713 nodeiter = list(nodeiter)
1719 nodeiter = list(nodeiter)
1714 if nodeiter:
1720 if nodeiter:
1715 yield changegroup.genchunk(fname)
1721 yield changegroup.genchunk(fname)
1716 lookup = lookuprevlink_func(filerevlog)
1722 lookup = lookuprevlink_func(filerevlog)
1717 for chnk in filerevlog.group(nodeiter, lookup):
1723 for chnk in filerevlog.group(nodeiter, lookup):
1718 yield chnk
1724 yield chnk
1719
1725
1720 yield changegroup.closechunk()
1726 yield changegroup.closechunk()
1721
1727
1722 if nodes:
1728 if nodes:
1723 self.hook('outgoing', node=hex(nodes[0]), source=source)
1729 self.hook('outgoing', node=hex(nodes[0]), source=source)
1724
1730
1725 return util.chunkbuffer(gengroup())
1731 return util.chunkbuffer(gengroup())
1726
1732
1727 def addchangegroup(self, source, srctype, url):
1733 def addchangegroup(self, source, srctype, url):
1728 """add changegroup to repo.
1734 """add changegroup to repo.
1729 returns number of heads modified or added + 1."""
1735 returns number of heads modified or added + 1."""
1730
1736
1731 def csmap(x):
1737 def csmap(x):
1732 self.ui.debug(_("add changeset %s\n") % short(x))
1738 self.ui.debug(_("add changeset %s\n") % short(x))
1733 return cl.count()
1739 return cl.count()
1734
1740
1735 def revmap(x):
1741 def revmap(x):
1736 return cl.rev(x)
1742 return cl.rev(x)
1737
1743
1738 if not source:
1744 if not source:
1739 return 0
1745 return 0
1740
1746
1741 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1747 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1742
1748
1743 changesets = files = revisions = 0
1749 changesets = files = revisions = 0
1744
1750
1745 tr = self.transaction()
1751 tr = self.transaction()
1746
1752
1747 # write changelog data to temp files so concurrent readers will not see
1753 # write changelog data to temp files so concurrent readers will not see
1748 # inconsistent view
1754 # inconsistent view
1749 cl = None
1755 cl = None
1750 try:
1756 try:
1751 cl = appendfile.appendchangelog(self.sopener,
1757 cl = appendfile.appendchangelog(self.sopener,
1752 self.changelog.version)
1758 self.changelog.version)
1753
1759
1754 oldheads = len(cl.heads())
1760 oldheads = len(cl.heads())
1755
1761
1756 # pull off the changeset group
1762 # pull off the changeset group
1757 self.ui.status(_("adding changesets\n"))
1763 self.ui.status(_("adding changesets\n"))
1758 cor = cl.count() - 1
1764 cor = cl.count() - 1
1759 chunkiter = changegroup.chunkiter(source)
1765 chunkiter = changegroup.chunkiter(source)
1760 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1766 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1761 raise util.Abort(_("received changelog group is empty"))
1767 raise util.Abort(_("received changelog group is empty"))
1762 cnr = cl.count() - 1
1768 cnr = cl.count() - 1
1763 changesets = cnr - cor
1769 changesets = cnr - cor
1764
1770
1765 # pull off the manifest group
1771 # pull off the manifest group
1766 self.ui.status(_("adding manifests\n"))
1772 self.ui.status(_("adding manifests\n"))
1767 chunkiter = changegroup.chunkiter(source)
1773 chunkiter = changegroup.chunkiter(source)
1768 # no need to check for empty manifest group here:
1774 # no need to check for empty manifest group here:
1769 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1775 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1770 # no new manifest will be created and the manifest group will
1776 # no new manifest will be created and the manifest group will
1771 # be empty during the pull
1777 # be empty during the pull
1772 self.manifest.addgroup(chunkiter, revmap, tr)
1778 self.manifest.addgroup(chunkiter, revmap, tr)
1773
1779
1774 # process the files
1780 # process the files
1775 self.ui.status(_("adding file changes\n"))
1781 self.ui.status(_("adding file changes\n"))
1776 while 1:
1782 while 1:
1777 f = changegroup.getchunk(source)
1783 f = changegroup.getchunk(source)
1778 if not f:
1784 if not f:
1779 break
1785 break
1780 self.ui.debug(_("adding %s revisions\n") % f)
1786 self.ui.debug(_("adding %s revisions\n") % f)
1781 fl = self.file(f)
1787 fl = self.file(f)
1782 o = fl.count()
1788 o = fl.count()
1783 chunkiter = changegroup.chunkiter(source)
1789 chunkiter = changegroup.chunkiter(source)
1784 if fl.addgroup(chunkiter, revmap, tr) is None:
1790 if fl.addgroup(chunkiter, revmap, tr) is None:
1785 raise util.Abort(_("received file revlog group is empty"))
1791 raise util.Abort(_("received file revlog group is empty"))
1786 revisions += fl.count() - o
1792 revisions += fl.count() - o
1787 files += 1
1793 files += 1
1788
1794
1789 cl.writedata()
1795 cl.writedata()
1790 finally:
1796 finally:
1791 if cl:
1797 if cl:
1792 cl.cleanup()
1798 cl.cleanup()
1793
1799
1794 # make changelog see real files again
1800 # make changelog see real files again
1795 self.changelog = changelog.changelog(self.sopener,
1801 self.changelog = changelog.changelog(self.sopener,
1796 self.changelog.version)
1802 self.changelog.version)
1797 self.changelog.checkinlinesize(tr)
1803 self.changelog.checkinlinesize(tr)
1798
1804
1799 newheads = len(self.changelog.heads())
1805 newheads = len(self.changelog.heads())
1800 heads = ""
1806 heads = ""
1801 if oldheads and newheads != oldheads:
1807 if oldheads and newheads != oldheads:
1802 heads = _(" (%+d heads)") % (newheads - oldheads)
1808 heads = _(" (%+d heads)") % (newheads - oldheads)
1803
1809
1804 self.ui.status(_("added %d changesets"
1810 self.ui.status(_("added %d changesets"
1805 " with %d changes to %d files%s\n")
1811 " with %d changes to %d files%s\n")
1806 % (changesets, revisions, files, heads))
1812 % (changesets, revisions, files, heads))
1807
1813
1808 if changesets > 0:
1814 if changesets > 0:
1809 self.hook('pretxnchangegroup', throw=True,
1815 self.hook('pretxnchangegroup', throw=True,
1810 node=hex(self.changelog.node(cor+1)), source=srctype,
1816 node=hex(self.changelog.node(cor+1)), source=srctype,
1811 url=url)
1817 url=url)
1812
1818
1813 tr.close()
1819 tr.close()
1814
1820
1815 if changesets > 0:
1821 if changesets > 0:
1816 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1822 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1817 source=srctype, url=url)
1823 source=srctype, url=url)
1818
1824
1819 for i in xrange(cor + 1, cnr + 1):
1825 for i in xrange(cor + 1, cnr + 1):
1820 self.hook("incoming", node=hex(self.changelog.node(i)),
1826 self.hook("incoming", node=hex(self.changelog.node(i)),
1821 source=srctype, url=url)
1827 source=srctype, url=url)
1822
1828
1823 return newheads - oldheads + 1
1829 return newheads - oldheads + 1
1824
1830
1825
1831
1826 def stream_in(self, remote):
1832 def stream_in(self, remote):
1827 fp = remote.stream_out()
1833 fp = remote.stream_out()
1828 l = fp.readline()
1834 l = fp.readline()
1829 try:
1835 try:
1830 resp = int(l)
1836 resp = int(l)
1831 except ValueError:
1837 except ValueError:
1832 raise util.UnexpectedOutput(
1838 raise util.UnexpectedOutput(
1833 _('Unexpected response from remote server:'), l)
1839 _('Unexpected response from remote server:'), l)
1834 if resp == 1:
1840 if resp == 1:
1835 raise util.Abort(_('operation forbidden by server'))
1841 raise util.Abort(_('operation forbidden by server'))
1836 elif resp == 2:
1842 elif resp == 2:
1837 raise util.Abort(_('locking the remote repository failed'))
1843 raise util.Abort(_('locking the remote repository failed'))
1838 elif resp != 0:
1844 elif resp != 0:
1839 raise util.Abort(_('the server sent an unknown error code'))
1845 raise util.Abort(_('the server sent an unknown error code'))
1840 self.ui.status(_('streaming all changes\n'))
1846 self.ui.status(_('streaming all changes\n'))
1841 l = fp.readline()
1847 l = fp.readline()
1842 try:
1848 try:
1843 total_files, total_bytes = map(int, l.split(' ', 1))
1849 total_files, total_bytes = map(int, l.split(' ', 1))
1844 except ValueError, TypeError:
1850 except ValueError, TypeError:
1845 raise util.UnexpectedOutput(
1851 raise util.UnexpectedOutput(
1846 _('Unexpected response from remote server:'), l)
1852 _('Unexpected response from remote server:'), l)
1847 self.ui.status(_('%d files to transfer, %s of data\n') %
1853 self.ui.status(_('%d files to transfer, %s of data\n') %
1848 (total_files, util.bytecount(total_bytes)))
1854 (total_files, util.bytecount(total_bytes)))
1849 start = time.time()
1855 start = time.time()
1850 for i in xrange(total_files):
1856 for i in xrange(total_files):
1851 # XXX doesn't support '\n' or '\r' in filenames
1857 # XXX doesn't support '\n' or '\r' in filenames
1852 l = fp.readline()
1858 l = fp.readline()
1853 try:
1859 try:
1854 name, size = l.split('\0', 1)
1860 name, size = l.split('\0', 1)
1855 size = int(size)
1861 size = int(size)
1856 except ValueError, TypeError:
1862 except ValueError, TypeError:
1857 raise util.UnexpectedOutput(
1863 raise util.UnexpectedOutput(
1858 _('Unexpected response from remote server:'), l)
1864 _('Unexpected response from remote server:'), l)
1859 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1865 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1860 ofp = self.sopener(name, 'w')
1866 ofp = self.sopener(name, 'w')
1861 for chunk in util.filechunkiter(fp, limit=size):
1867 for chunk in util.filechunkiter(fp, limit=size):
1862 ofp.write(chunk)
1868 ofp.write(chunk)
1863 ofp.close()
1869 ofp.close()
1864 elapsed = time.time() - start
1870 elapsed = time.time() - start
1865 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1871 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1866 (util.bytecount(total_bytes), elapsed,
1872 (util.bytecount(total_bytes), elapsed,
1867 util.bytecount(total_bytes / elapsed)))
1873 util.bytecount(total_bytes / elapsed)))
1868 self.reload()
1874 self.reload()
1869 return len(self.heads()) + 1
1875 return len(self.heads()) + 1
1870
1876
1871 def clone(self, remote, heads=[], stream=False):
1877 def clone(self, remote, heads=[], stream=False):
1872 '''clone remote repository.
1878 '''clone remote repository.
1873
1879
1874 keyword arguments:
1880 keyword arguments:
1875 heads: list of revs to clone (forces use of pull)
1881 heads: list of revs to clone (forces use of pull)
1876 stream: use streaming clone if possible'''
1882 stream: use streaming clone if possible'''
1877
1883
1878 # now, all clients that can request uncompressed clones can
1884 # now, all clients that can request uncompressed clones can
1879 # read repo formats supported by all servers that can serve
1885 # read repo formats supported by all servers that can serve
1880 # them.
1886 # them.
1881
1887
1882 # if revlog format changes, client will have to check version
1888 # if revlog format changes, client will have to check version
1883 # and format flags on "stream" capability, and use
1889 # and format flags on "stream" capability, and use
1884 # uncompressed only if compatible.
1890 # uncompressed only if compatible.
1885
1891
1886 if stream and not heads and remote.capable('stream'):
1892 if stream and not heads and remote.capable('stream'):
1887 return self.stream_in(remote)
1893 return self.stream_in(remote)
1888 return self.pull(remote, heads)
1894 return self.pull(remote, heads)
1889
1895
1890 # used to avoid circular references so destructors work
1896 # used to avoid circular references so destructors work
1891 def aftertrans(base):
1897 def aftertrans(base):
1892 p = base
1898 p = base
1893 def a():
1899 def a():
1894 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1900 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1895 util.rename(os.path.join(p, "journal.dirstate"),
1901 util.rename(os.path.join(p, "journal.dirstate"),
1896 os.path.join(p, "undo.dirstate"))
1902 os.path.join(p, "undo.dirstate"))
1897 return a
1903 return a
1898
1904
1899 def instance(ui, path, create):
1905 def instance(ui, path, create):
1900 return localrepository(ui, util.drop_scheme('file', path), create)
1906 return localrepository(ui, util.drop_scheme('file', path), create)
1901
1907
1902 def islocal(path):
1908 def islocal(path):
1903 return True
1909 return True
General Comments 0
You need to be logged in to leave comments. Login now