##// END OF EJS Templates
Handle transcoding of branch names
Matt Mackall -
r3773:b1eeaeb9 default
parent child Browse files
Show More
@@ -1,3060 +1,3062 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
13 demandload(globals(), "difflib patch time")
13 demandload(globals(), "difflib patch time")
14 demandload(globals(), "traceback errno version atexit")
14 demandload(globals(), "traceback errno version atexit")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit.
60 The files will be added to the repository at the next commit.
61
61
62 If no names are given, add all files in the repository.
62 If no names are given, add all files in the repository.
63 """
63 """
64
64
65 names = []
65 names = []
66 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
66 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 if exact:
67 if exact:
68 if ui.verbose:
68 if ui.verbose:
69 ui.status(_('adding %s\n') % rel)
69 ui.status(_('adding %s\n') % rel)
70 names.append(abs)
70 names.append(abs)
71 elif repo.dirstate.state(abs) == '?':
71 elif repo.dirstate.state(abs) == '?':
72 ui.status(_('adding %s\n') % rel)
72 ui.status(_('adding %s\n') % rel)
73 names.append(abs)
73 names.append(abs)
74 if not opts.get('dry_run'):
74 if not opts.get('dry_run'):
75 repo.add(names)
75 repo.add(names)
76
76
77 def addremove(ui, repo, *pats, **opts):
77 def addremove(ui, repo, *pats, **opts):
78 """add all new files, delete all missing files
78 """add all new files, delete all missing files
79
79
80 Add all new files and remove all missing files from the repository.
80 Add all new files and remove all missing files from the repository.
81
81
82 New files are ignored if they match any of the patterns in .hgignore. As
82 New files are ignored if they match any of the patterns in .hgignore. As
83 with add, these changes take effect at the next commit.
83 with add, these changes take effect at the next commit.
84
84
85 Use the -s option to detect renamed files. With a parameter > 0,
85 Use the -s option to detect renamed files. With a parameter > 0,
86 this compares every removed file with every added file and records
86 this compares every removed file with every added file and records
87 those similar enough as renames. This option takes a percentage
87 those similar enough as renames. This option takes a percentage
88 between 0 (disabled) and 100 (files must be identical) as its
88 between 0 (disabled) and 100 (files must be identical) as its
89 parameter. Detecting renamed files this way can be expensive.
89 parameter. Detecting renamed files this way can be expensive.
90 """
90 """
91 sim = float(opts.get('similarity') or 0)
91 sim = float(opts.get('similarity') or 0)
92 if sim < 0 or sim > 100:
92 if sim < 0 or sim > 100:
93 raise util.Abort(_('similarity must be between 0 and 100'))
93 raise util.Abort(_('similarity must be between 0 and 100'))
94 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
94 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95
95
96 def annotate(ui, repo, *pats, **opts):
96 def annotate(ui, repo, *pats, **opts):
97 """show changeset information per file line
97 """show changeset information per file line
98
98
99 List changes in files, showing the revision id responsible for each line
99 List changes in files, showing the revision id responsible for each line
100
100
101 This command is useful to discover who did a change or when a change took
101 This command is useful to discover who did a change or when a change took
102 place.
102 place.
103
103
104 Without the -a option, annotate will avoid processing files it
104 Without the -a option, annotate will avoid processing files it
105 detects as binary. With -a, annotate will generate an annotation
105 detects as binary. With -a, annotate will generate an annotation
106 anyway, probably with undesirable results.
106 anyway, probably with undesirable results.
107 """
107 """
108 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
108 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109
109
110 if not pats:
110 if not pats:
111 raise util.Abort(_('at least one file name or pattern required'))
111 raise util.Abort(_('at least one file name or pattern required'))
112
112
113 opmap = [['user', lambda x: ui.shortuser(x.user())],
113 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 ['number', lambda x: str(x.rev())],
114 ['number', lambda x: str(x.rev())],
115 ['changeset', lambda x: short(x.node())],
115 ['changeset', lambda x: short(x.node())],
116 ['date', getdate], ['follow', lambda x: x.path()]]
116 ['date', getdate], ['follow', lambda x: x.path()]]
117 if (not opts['user'] and not opts['changeset'] and not opts['date']
117 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 and not opts['follow']):
118 and not opts['follow']):
119 opts['number'] = 1
119 opts['number'] = 1
120
120
121 ctx = repo.changectx(opts['rev'])
121 ctx = repo.changectx(opts['rev'])
122
122
123 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
123 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 node=ctx.node()):
124 node=ctx.node()):
125 fctx = ctx.filectx(abs)
125 fctx = ctx.filectx(abs)
126 if not opts['text'] and util.binary(fctx.data()):
126 if not opts['text'] and util.binary(fctx.data()):
127 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
127 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 continue
128 continue
129
129
130 lines = fctx.annotate(follow=opts.get('follow'))
130 lines = fctx.annotate(follow=opts.get('follow'))
131 pieces = []
131 pieces = []
132
132
133 for o, f in opmap:
133 for o, f in opmap:
134 if opts[o]:
134 if opts[o]:
135 l = [f(n) for n, dummy in lines]
135 l = [f(n) for n, dummy in lines]
136 if l:
136 if l:
137 m = max(map(len, l))
137 m = max(map(len, l))
138 pieces.append(["%*s" % (m, x) for x in l])
138 pieces.append(["%*s" % (m, x) for x in l])
139
139
140 if pieces:
140 if pieces:
141 for p, l in zip(zip(*pieces), lines):
141 for p, l in zip(zip(*pieces), lines):
142 ui.write("%s: %s" % (" ".join(p), l[1]))
142 ui.write("%s: %s" % (" ".join(p), l[1]))
143
143
144 def archive(ui, repo, dest, **opts):
144 def archive(ui, repo, dest, **opts):
145 '''create unversioned archive of a repository revision
145 '''create unversioned archive of a repository revision
146
146
147 By default, the revision used is the parent of the working
147 By default, the revision used is the parent of the working
148 directory; use "-r" to specify a different revision.
148 directory; use "-r" to specify a different revision.
149
149
150 To specify the type of archive to create, use "-t". Valid
150 To specify the type of archive to create, use "-t". Valid
151 types are:
151 types are:
152
152
153 "files" (default): a directory full of files
153 "files" (default): a directory full of files
154 "tar": tar archive, uncompressed
154 "tar": tar archive, uncompressed
155 "tbz2": tar archive, compressed using bzip2
155 "tbz2": tar archive, compressed using bzip2
156 "tgz": tar archive, compressed using gzip
156 "tgz": tar archive, compressed using gzip
157 "uzip": zip archive, uncompressed
157 "uzip": zip archive, uncompressed
158 "zip": zip archive, compressed using deflate
158 "zip": zip archive, compressed using deflate
159
159
160 The exact name of the destination archive or directory is given
160 The exact name of the destination archive or directory is given
161 using a format string; see "hg help export" for details.
161 using a format string; see "hg help export" for details.
162
162
163 Each member added to an archive file has a directory prefix
163 Each member added to an archive file has a directory prefix
164 prepended. Use "-p" to specify a format string for the prefix.
164 prepended. Use "-p" to specify a format string for the prefix.
165 The default is the basename of the archive, with suffixes removed.
165 The default is the basename of the archive, with suffixes removed.
166 '''
166 '''
167
167
168 node = repo.changectx(opts['rev']).node()
168 node = repo.changectx(opts['rev']).node()
169 dest = cmdutil.make_filename(repo, dest, node)
169 dest = cmdutil.make_filename(repo, dest, node)
170 if os.path.realpath(dest) == repo.root:
170 if os.path.realpath(dest) == repo.root:
171 raise util.Abort(_('repository root cannot be destination'))
171 raise util.Abort(_('repository root cannot be destination'))
172 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
172 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 kind = opts.get('type') or 'files'
173 kind = opts.get('type') or 'files'
174 prefix = opts['prefix']
174 prefix = opts['prefix']
175 if dest == '-':
175 if dest == '-':
176 if kind == 'files':
176 if kind == 'files':
177 raise util.Abort(_('cannot archive plain files to stdout'))
177 raise util.Abort(_('cannot archive plain files to stdout'))
178 dest = sys.stdout
178 dest = sys.stdout
179 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
179 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 prefix = cmdutil.make_filename(repo, prefix, node)
180 prefix = cmdutil.make_filename(repo, prefix, node)
181 archival.archive(repo, dest, node, kind, not opts['no_decode'],
181 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 matchfn, prefix)
182 matchfn, prefix)
183
183
184 def backout(ui, repo, rev, **opts):
184 def backout(ui, repo, rev, **opts):
185 '''reverse effect of earlier changeset
185 '''reverse effect of earlier changeset
186
186
187 Commit the backed out changes as a new changeset. The new
187 Commit the backed out changes as a new changeset. The new
188 changeset is a child of the backed out changeset.
188 changeset is a child of the backed out changeset.
189
189
190 If you back out a changeset other than the tip, a new head is
190 If you back out a changeset other than the tip, a new head is
191 created. This head is the parent of the working directory. If
191 created. This head is the parent of the working directory. If
192 you back out an old changeset, your working directory will appear
192 you back out an old changeset, your working directory will appear
193 old after the backout. You should merge the backout changeset
193 old after the backout. You should merge the backout changeset
194 with another head.
194 with another head.
195
195
196 The --merge option remembers the parent of the working directory
196 The --merge option remembers the parent of the working directory
197 before starting the backout, then merges the new head with that
197 before starting the backout, then merges the new head with that
198 changeset afterwards. This saves you from doing the merge by
198 changeset afterwards. This saves you from doing the merge by
199 hand. The result of this merge is not committed, as for a normal
199 hand. The result of this merge is not committed, as for a normal
200 merge.'''
200 merge.'''
201
201
202 bail_if_changed(repo)
202 bail_if_changed(repo)
203 op1, op2 = repo.dirstate.parents()
203 op1, op2 = repo.dirstate.parents()
204 if op2 != nullid:
204 if op2 != nullid:
205 raise util.Abort(_('outstanding uncommitted merge'))
205 raise util.Abort(_('outstanding uncommitted merge'))
206 node = repo.lookup(rev)
206 node = repo.lookup(rev)
207 p1, p2 = repo.changelog.parents(node)
207 p1, p2 = repo.changelog.parents(node)
208 if p1 == nullid:
208 if p1 == nullid:
209 raise util.Abort(_('cannot back out a change with no parents'))
209 raise util.Abort(_('cannot back out a change with no parents'))
210 if p2 != nullid:
210 if p2 != nullid:
211 if not opts['parent']:
211 if not opts['parent']:
212 raise util.Abort(_('cannot back out a merge changeset without '
212 raise util.Abort(_('cannot back out a merge changeset without '
213 '--parent'))
213 '--parent'))
214 p = repo.lookup(opts['parent'])
214 p = repo.lookup(opts['parent'])
215 if p not in (p1, p2):
215 if p not in (p1, p2):
216 raise util.Abort(_('%s is not a parent of %s') %
216 raise util.Abort(_('%s is not a parent of %s') %
217 (short(p), short(node)))
217 (short(p), short(node)))
218 parent = p
218 parent = p
219 else:
219 else:
220 if opts['parent']:
220 if opts['parent']:
221 raise util.Abort(_('cannot use --parent on non-merge changeset'))
221 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 parent = p1
222 parent = p1
223 hg.clean(repo, node, show_stats=False)
223 hg.clean(repo, node, show_stats=False)
224 revert_opts = opts.copy()
224 revert_opts = opts.copy()
225 revert_opts['all'] = True
225 revert_opts['all'] = True
226 revert_opts['rev'] = hex(parent)
226 revert_opts['rev'] = hex(parent)
227 revert(ui, repo, **revert_opts)
227 revert(ui, repo, **revert_opts)
228 commit_opts = opts.copy()
228 commit_opts = opts.copy()
229 commit_opts['addremove'] = False
229 commit_opts['addremove'] = False
230 if not commit_opts['message'] and not commit_opts['logfile']:
230 if not commit_opts['message'] and not commit_opts['logfile']:
231 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
231 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
232 commit_opts['force_editor'] = True
232 commit_opts['force_editor'] = True
233 commit(ui, repo, **commit_opts)
233 commit(ui, repo, **commit_opts)
234 def nice(node):
234 def nice(node):
235 return '%d:%s' % (repo.changelog.rev(node), short(node))
235 return '%d:%s' % (repo.changelog.rev(node), short(node))
236 ui.status(_('changeset %s backs out changeset %s\n') %
236 ui.status(_('changeset %s backs out changeset %s\n') %
237 (nice(repo.changelog.tip()), nice(node)))
237 (nice(repo.changelog.tip()), nice(node)))
238 if op1 != node:
238 if op1 != node:
239 if opts['merge']:
239 if opts['merge']:
240 ui.status(_('merging with changeset %s\n') % nice(op1))
240 ui.status(_('merging with changeset %s\n') % nice(op1))
241 n = _lookup(repo, hex(op1))
241 n = _lookup(repo, hex(op1))
242 hg.merge(repo, n)
242 hg.merge(repo, n)
243 else:
243 else:
244 ui.status(_('the backout changeset is a new head - '
244 ui.status(_('the backout changeset is a new head - '
245 'do not forget to merge\n'))
245 'do not forget to merge\n'))
246 ui.status(_('(use "backout --merge" '
246 ui.status(_('(use "backout --merge" '
247 'if you want to auto-merge)\n'))
247 'if you want to auto-merge)\n'))
248
248
249 def branch(ui, repo, label=None):
249 def branch(ui, repo, label=None):
250 """set or show the current branch name
250 """set or show the current branch name
251
251
252 With <name>, set the current branch name. Otherwise, show the
252 With <name>, set the current branch name. Otherwise, show the
253 current branch name.
253 current branch name.
254 """
254 """
255
255
256 if label is not None:
256 if label is not None:
257 repo.opener("branch", "w").write(label)
257 repo.opener("branch", "w").write(label)
258 else:
258 else:
259 b = repo.workingctx().branch()
259 b = repo.workingctx().branch()
260 if b:
260 if b:
261 ui.write("%s\n" % b)
261 ui.write("%s\n" % b)
262
262
263 def branches(ui, repo):
263 def branches(ui, repo):
264 """list repository named branches
264 """list repository named branches
265
265
266 List the repository's named branches.
266 List the repository's named branches.
267 """
267 """
268 b = repo.branchtags()
268 b = repo.branchtags()
269 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
269 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
270 l.sort()
270 l.sort()
271 for r, n, t in l:
271 for r, n, t in l:
272 hexfunc = ui.debugflag and hex or short
272 hexfunc = ui.debugflag and hex or short
273 if ui.quiet:
273 if ui.quiet:
274 ui.write("%s\n" % t)
274 ui.write("%s\n" % t)
275 else:
275 else:
276 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
276 t = util.localsub(t, 30)
277 t += " " * (30 - util.locallen(t))
278 ui.write("%s %s:%s\n" % (t, -r, hexfunc(n)))
277
279
278 def bundle(ui, repo, fname, dest=None, **opts):
280 def bundle(ui, repo, fname, dest=None, **opts):
279 """create a changegroup file
281 """create a changegroup file
280
282
281 Generate a compressed changegroup file collecting changesets not
283 Generate a compressed changegroup file collecting changesets not
282 found in the other repository.
284 found in the other repository.
283
285
284 If no destination repository is specified the destination is assumed
286 If no destination repository is specified the destination is assumed
285 to have all the nodes specified by one or more --base parameters.
287 to have all the nodes specified by one or more --base parameters.
286
288
287 The bundle file can then be transferred using conventional means and
289 The bundle file can then be transferred using conventional means and
288 applied to another repository with the unbundle or pull command.
290 applied to another repository with the unbundle or pull command.
289 This is useful when direct push and pull are not available or when
291 This is useful when direct push and pull are not available or when
290 exporting an entire repository is undesirable.
292 exporting an entire repository is undesirable.
291
293
292 Applying bundles preserves all changeset contents including
294 Applying bundles preserves all changeset contents including
293 permissions, copy/rename information, and revision history.
295 permissions, copy/rename information, and revision history.
294 """
296 """
295 revs = opts.get('rev') or None
297 revs = opts.get('rev') or None
296 if revs:
298 if revs:
297 revs = [repo.lookup(rev) for rev in revs]
299 revs = [repo.lookup(rev) for rev in revs]
298 base = opts.get('base')
300 base = opts.get('base')
299 if base:
301 if base:
300 if dest:
302 if dest:
301 raise util.Abort(_("--base is incompatible with specifiying "
303 raise util.Abort(_("--base is incompatible with specifiying "
302 "a destination"))
304 "a destination"))
303 base = [repo.lookup(rev) for rev in base]
305 base = [repo.lookup(rev) for rev in base]
304 # create the right base
306 # create the right base
305 # XXX: nodesbetween / changegroup* should be "fixed" instead
307 # XXX: nodesbetween / changegroup* should be "fixed" instead
306 o = []
308 o = []
307 has = {nullid: None}
309 has = {nullid: None}
308 for n in base:
310 for n in base:
309 has.update(repo.changelog.reachable(n))
311 has.update(repo.changelog.reachable(n))
310 if revs:
312 if revs:
311 visit = list(revs)
313 visit = list(revs)
312 else:
314 else:
313 visit = repo.changelog.heads()
315 visit = repo.changelog.heads()
314 seen = {}
316 seen = {}
315 while visit:
317 while visit:
316 n = visit.pop(0)
318 n = visit.pop(0)
317 parents = [p for p in repo.changelog.parents(n) if p not in has]
319 parents = [p for p in repo.changelog.parents(n) if p not in has]
318 if len(parents) == 0:
320 if len(parents) == 0:
319 o.insert(0, n)
321 o.insert(0, n)
320 else:
322 else:
321 for p in parents:
323 for p in parents:
322 if p not in seen:
324 if p not in seen:
323 seen[p] = 1
325 seen[p] = 1
324 visit.append(p)
326 visit.append(p)
325 else:
327 else:
326 setremoteconfig(ui, opts)
328 setremoteconfig(ui, opts)
327 dest = ui.expandpath(dest or 'default-push', dest or 'default')
329 dest = ui.expandpath(dest or 'default-push', dest or 'default')
328 other = hg.repository(ui, dest)
330 other = hg.repository(ui, dest)
329 o = repo.findoutgoing(other, force=opts['force'])
331 o = repo.findoutgoing(other, force=opts['force'])
330
332
331 if revs:
333 if revs:
332 cg = repo.changegroupsubset(o, revs, 'bundle')
334 cg = repo.changegroupsubset(o, revs, 'bundle')
333 else:
335 else:
334 cg = repo.changegroup(o, 'bundle')
336 cg = repo.changegroup(o, 'bundle')
335 changegroup.writebundle(cg, fname, "HG10BZ")
337 changegroup.writebundle(cg, fname, "HG10BZ")
336
338
337 def cat(ui, repo, file1, *pats, **opts):
339 def cat(ui, repo, file1, *pats, **opts):
338 """output the latest or given revisions of files
340 """output the latest or given revisions of files
339
341
340 Print the specified files as they were at the given revision.
342 Print the specified files as they were at the given revision.
341 If no revision is given then working dir parent is used, or tip
343 If no revision is given then working dir parent is used, or tip
342 if no revision is checked out.
344 if no revision is checked out.
343
345
344 Output may be to a file, in which case the name of the file is
346 Output may be to a file, in which case the name of the file is
345 given using a format string. The formatting rules are the same as
347 given using a format string. The formatting rules are the same as
346 for the export command, with the following additions:
348 for the export command, with the following additions:
347
349
348 %s basename of file being printed
350 %s basename of file being printed
349 %d dirname of file being printed, or '.' if in repo root
351 %d dirname of file being printed, or '.' if in repo root
350 %p root-relative path name of file being printed
352 %p root-relative path name of file being printed
351 """
353 """
352 ctx = repo.changectx(opts['rev'])
354 ctx = repo.changectx(opts['rev'])
353 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
355 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
354 ctx.node()):
356 ctx.node()):
355 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
357 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
356 fp.write(ctx.filectx(abs).data())
358 fp.write(ctx.filectx(abs).data())
357
359
358 def clone(ui, source, dest=None, **opts):
360 def clone(ui, source, dest=None, **opts):
359 """make a copy of an existing repository
361 """make a copy of an existing repository
360
362
361 Create a copy of an existing repository in a new directory.
363 Create a copy of an existing repository in a new directory.
362
364
363 If no destination directory name is specified, it defaults to the
365 If no destination directory name is specified, it defaults to the
364 basename of the source.
366 basename of the source.
365
367
366 The location of the source is added to the new repository's
368 The location of the source is added to the new repository's
367 .hg/hgrc file, as the default to be used for future pulls.
369 .hg/hgrc file, as the default to be used for future pulls.
368
370
369 For efficiency, hardlinks are used for cloning whenever the source
371 For efficiency, hardlinks are used for cloning whenever the source
370 and destination are on the same filesystem (note this applies only
372 and destination are on the same filesystem (note this applies only
371 to the repository data, not to the checked out files). Some
373 to the repository data, not to the checked out files). Some
372 filesystems, such as AFS, implement hardlinking incorrectly, but
374 filesystems, such as AFS, implement hardlinking incorrectly, but
373 do not report errors. In these cases, use the --pull option to
375 do not report errors. In these cases, use the --pull option to
374 avoid hardlinking.
376 avoid hardlinking.
375
377
376 You can safely clone repositories and checked out files using full
378 You can safely clone repositories and checked out files using full
377 hardlinks with
379 hardlinks with
378
380
379 $ cp -al REPO REPOCLONE
381 $ cp -al REPO REPOCLONE
380
382
381 which is the fastest way to clone. However, the operation is not
383 which is the fastest way to clone. However, the operation is not
382 atomic (making sure REPO is not modified during the operation is
384 atomic (making sure REPO is not modified during the operation is
383 up to you) and you have to make sure your editor breaks hardlinks
385 up to you) and you have to make sure your editor breaks hardlinks
384 (Emacs and most Linux Kernel tools do so).
386 (Emacs and most Linux Kernel tools do so).
385
387
386 If you use the -r option to clone up to a specific revision, no
388 If you use the -r option to clone up to a specific revision, no
387 subsequent revisions will be present in the cloned repository.
389 subsequent revisions will be present in the cloned repository.
388 This option implies --pull, even on local repositories.
390 This option implies --pull, even on local repositories.
389
391
390 See pull for valid source format details.
392 See pull for valid source format details.
391
393
392 It is possible to specify an ssh:// URL as the destination, but no
394 It is possible to specify an ssh:// URL as the destination, but no
393 .hg/hgrc and working directory will be created on the remote side.
395 .hg/hgrc and working directory will be created on the remote side.
394 Look at the help text for the pull command for important details
396 Look at the help text for the pull command for important details
395 about ssh:// URLs.
397 about ssh:// URLs.
396 """
398 """
397 setremoteconfig(ui, opts)
399 setremoteconfig(ui, opts)
398 hg.clone(ui, ui.expandpath(source), dest,
400 hg.clone(ui, ui.expandpath(source), dest,
399 pull=opts['pull'],
401 pull=opts['pull'],
400 stream=opts['uncompressed'],
402 stream=opts['uncompressed'],
401 rev=opts['rev'],
403 rev=opts['rev'],
402 update=not opts['noupdate'])
404 update=not opts['noupdate'])
403
405
404 def commit(ui, repo, *pats, **opts):
406 def commit(ui, repo, *pats, **opts):
405 """commit the specified files or all outstanding changes
407 """commit the specified files or all outstanding changes
406
408
407 Commit changes to the given files into the repository.
409 Commit changes to the given files into the repository.
408
410
409 If a list of files is omitted, all changes reported by "hg status"
411 If a list of files is omitted, all changes reported by "hg status"
410 will be committed.
412 will be committed.
411
413
412 If no commit message is specified, the editor configured in your hgrc
414 If no commit message is specified, the editor configured in your hgrc
413 or in the EDITOR environment variable is started to enter a message.
415 or in the EDITOR environment variable is started to enter a message.
414 """
416 """
415 message = logmessage(opts)
417 message = logmessage(opts)
416
418
417 if opts['addremove']:
419 if opts['addremove']:
418 cmdutil.addremove(repo, pats, opts)
420 cmdutil.addremove(repo, pats, opts)
419 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
421 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
420 if pats:
422 if pats:
421 status = repo.status(files=fns, match=match)
423 status = repo.status(files=fns, match=match)
422 modified, added, removed, deleted, unknown = status[:5]
424 modified, added, removed, deleted, unknown = status[:5]
423 files = modified + added + removed
425 files = modified + added + removed
424 for f in fns:
426 for f in fns:
425 if f not in modified + added + removed:
427 if f not in modified + added + removed:
426 if f in unknown:
428 if f in unknown:
427 raise util.Abort(_("file %s not tracked!") % f)
429 raise util.Abort(_("file %s not tracked!") % f)
428 else:
430 else:
429 raise util.Abort(_("file %s not found!") % f)
431 raise util.Abort(_("file %s not found!") % f)
430 else:
432 else:
431 files = []
433 files = []
432 try:
434 try:
433 repo.commit(files, message, opts['user'], opts['date'], match,
435 repo.commit(files, message, opts['user'], opts['date'], match,
434 force_editor=opts.get('force_editor'))
436 force_editor=opts.get('force_editor'))
435 except ValueError, inst:
437 except ValueError, inst:
436 raise util.Abort(str(inst))
438 raise util.Abort(str(inst))
437
439
438 def docopy(ui, repo, pats, opts, wlock):
440 def docopy(ui, repo, pats, opts, wlock):
439 # called with the repo lock held
441 # called with the repo lock held
440 #
442 #
441 # hgsep => pathname that uses "/" to separate directories
443 # hgsep => pathname that uses "/" to separate directories
442 # ossep => pathname that uses os.sep to separate directories
444 # ossep => pathname that uses os.sep to separate directories
443 cwd = repo.getcwd()
445 cwd = repo.getcwd()
444 errors = 0
446 errors = 0
445 copied = []
447 copied = []
446 targets = {}
448 targets = {}
447
449
448 # abs: hgsep
450 # abs: hgsep
449 # rel: ossep
451 # rel: ossep
450 # return: hgsep
452 # return: hgsep
451 def okaytocopy(abs, rel, exact):
453 def okaytocopy(abs, rel, exact):
452 reasons = {'?': _('is not managed'),
454 reasons = {'?': _('is not managed'),
453 'a': _('has been marked for add'),
455 'a': _('has been marked for add'),
454 'r': _('has been marked for remove')}
456 'r': _('has been marked for remove')}
455 state = repo.dirstate.state(abs)
457 state = repo.dirstate.state(abs)
456 reason = reasons.get(state)
458 reason = reasons.get(state)
457 if reason:
459 if reason:
458 if state == 'a':
460 if state == 'a':
459 origsrc = repo.dirstate.copied(abs)
461 origsrc = repo.dirstate.copied(abs)
460 if origsrc is not None:
462 if origsrc is not None:
461 return origsrc
463 return origsrc
462 if exact:
464 if exact:
463 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
465 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
464 else:
466 else:
465 return abs
467 return abs
466
468
467 # origsrc: hgsep
469 # origsrc: hgsep
468 # abssrc: hgsep
470 # abssrc: hgsep
469 # relsrc: ossep
471 # relsrc: ossep
470 # target: ossep
472 # target: ossep
471 def copy(origsrc, abssrc, relsrc, target, exact):
473 def copy(origsrc, abssrc, relsrc, target, exact):
472 abstarget = util.canonpath(repo.root, cwd, target)
474 abstarget = util.canonpath(repo.root, cwd, target)
473 reltarget = util.pathto(cwd, abstarget)
475 reltarget = util.pathto(cwd, abstarget)
474 prevsrc = targets.get(abstarget)
476 prevsrc = targets.get(abstarget)
475 if prevsrc is not None:
477 if prevsrc is not None:
476 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
478 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
477 (reltarget, util.localpath(abssrc),
479 (reltarget, util.localpath(abssrc),
478 util.localpath(prevsrc)))
480 util.localpath(prevsrc)))
479 return
481 return
480 if (not opts['after'] and os.path.exists(reltarget) or
482 if (not opts['after'] and os.path.exists(reltarget) or
481 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
483 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
482 if not opts['force']:
484 if not opts['force']:
483 ui.warn(_('%s: not overwriting - file exists\n') %
485 ui.warn(_('%s: not overwriting - file exists\n') %
484 reltarget)
486 reltarget)
485 return
487 return
486 if not opts['after'] and not opts.get('dry_run'):
488 if not opts['after'] and not opts.get('dry_run'):
487 os.unlink(reltarget)
489 os.unlink(reltarget)
488 if opts['after']:
490 if opts['after']:
489 if not os.path.exists(reltarget):
491 if not os.path.exists(reltarget):
490 return
492 return
491 else:
493 else:
492 targetdir = os.path.dirname(reltarget) or '.'
494 targetdir = os.path.dirname(reltarget) or '.'
493 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
495 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
494 os.makedirs(targetdir)
496 os.makedirs(targetdir)
495 try:
497 try:
496 restore = repo.dirstate.state(abstarget) == 'r'
498 restore = repo.dirstate.state(abstarget) == 'r'
497 if restore and not opts.get('dry_run'):
499 if restore and not opts.get('dry_run'):
498 repo.undelete([abstarget], wlock)
500 repo.undelete([abstarget], wlock)
499 try:
501 try:
500 if not opts.get('dry_run'):
502 if not opts.get('dry_run'):
501 util.copyfile(relsrc, reltarget)
503 util.copyfile(relsrc, reltarget)
502 restore = False
504 restore = False
503 finally:
505 finally:
504 if restore:
506 if restore:
505 repo.remove([abstarget], wlock)
507 repo.remove([abstarget], wlock)
506 except IOError, inst:
508 except IOError, inst:
507 if inst.errno == errno.ENOENT:
509 if inst.errno == errno.ENOENT:
508 ui.warn(_('%s: deleted in working copy\n') % relsrc)
510 ui.warn(_('%s: deleted in working copy\n') % relsrc)
509 else:
511 else:
510 ui.warn(_('%s: cannot copy - %s\n') %
512 ui.warn(_('%s: cannot copy - %s\n') %
511 (relsrc, inst.strerror))
513 (relsrc, inst.strerror))
512 errors += 1
514 errors += 1
513 return
515 return
514 if ui.verbose or not exact:
516 if ui.verbose or not exact:
515 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
517 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
516 targets[abstarget] = abssrc
518 targets[abstarget] = abssrc
517 if abstarget != origsrc and not opts.get('dry_run'):
519 if abstarget != origsrc and not opts.get('dry_run'):
518 repo.copy(origsrc, abstarget, wlock)
520 repo.copy(origsrc, abstarget, wlock)
519 copied.append((abssrc, relsrc, exact))
521 copied.append((abssrc, relsrc, exact))
520
522
521 # pat: ossep
523 # pat: ossep
522 # dest ossep
524 # dest ossep
523 # srcs: list of (hgsep, hgsep, ossep, bool)
525 # srcs: list of (hgsep, hgsep, ossep, bool)
524 # return: function that takes hgsep and returns ossep
526 # return: function that takes hgsep and returns ossep
525 def targetpathfn(pat, dest, srcs):
527 def targetpathfn(pat, dest, srcs):
526 if os.path.isdir(pat):
528 if os.path.isdir(pat):
527 abspfx = util.canonpath(repo.root, cwd, pat)
529 abspfx = util.canonpath(repo.root, cwd, pat)
528 abspfx = util.localpath(abspfx)
530 abspfx = util.localpath(abspfx)
529 if destdirexists:
531 if destdirexists:
530 striplen = len(os.path.split(abspfx)[0])
532 striplen = len(os.path.split(abspfx)[0])
531 else:
533 else:
532 striplen = len(abspfx)
534 striplen = len(abspfx)
533 if striplen:
535 if striplen:
534 striplen += len(os.sep)
536 striplen += len(os.sep)
535 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
537 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
536 elif destdirexists:
538 elif destdirexists:
537 res = lambda p: os.path.join(dest,
539 res = lambda p: os.path.join(dest,
538 os.path.basename(util.localpath(p)))
540 os.path.basename(util.localpath(p)))
539 else:
541 else:
540 res = lambda p: dest
542 res = lambda p: dest
541 return res
543 return res
542
544
543 # pat: ossep
545 # pat: ossep
544 # dest ossep
546 # dest ossep
545 # srcs: list of (hgsep, hgsep, ossep, bool)
547 # srcs: list of (hgsep, hgsep, ossep, bool)
546 # return: function that takes hgsep and returns ossep
548 # return: function that takes hgsep and returns ossep
547 def targetpathafterfn(pat, dest, srcs):
549 def targetpathafterfn(pat, dest, srcs):
548 if util.patkind(pat, None)[0]:
550 if util.patkind(pat, None)[0]:
549 # a mercurial pattern
551 # a mercurial pattern
550 res = lambda p: os.path.join(dest,
552 res = lambda p: os.path.join(dest,
551 os.path.basename(util.localpath(p)))
553 os.path.basename(util.localpath(p)))
552 else:
554 else:
553 abspfx = util.canonpath(repo.root, cwd, pat)
555 abspfx = util.canonpath(repo.root, cwd, pat)
554 if len(abspfx) < len(srcs[0][0]):
556 if len(abspfx) < len(srcs[0][0]):
555 # A directory. Either the target path contains the last
557 # A directory. Either the target path contains the last
556 # component of the source path or it does not.
558 # component of the source path or it does not.
557 def evalpath(striplen):
559 def evalpath(striplen):
558 score = 0
560 score = 0
559 for s in srcs:
561 for s in srcs:
560 t = os.path.join(dest, util.localpath(s[0])[striplen:])
562 t = os.path.join(dest, util.localpath(s[0])[striplen:])
561 if os.path.exists(t):
563 if os.path.exists(t):
562 score += 1
564 score += 1
563 return score
565 return score
564
566
565 abspfx = util.localpath(abspfx)
567 abspfx = util.localpath(abspfx)
566 striplen = len(abspfx)
568 striplen = len(abspfx)
567 if striplen:
569 if striplen:
568 striplen += len(os.sep)
570 striplen += len(os.sep)
569 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
571 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
570 score = evalpath(striplen)
572 score = evalpath(striplen)
571 striplen1 = len(os.path.split(abspfx)[0])
573 striplen1 = len(os.path.split(abspfx)[0])
572 if striplen1:
574 if striplen1:
573 striplen1 += len(os.sep)
575 striplen1 += len(os.sep)
574 if evalpath(striplen1) > score:
576 if evalpath(striplen1) > score:
575 striplen = striplen1
577 striplen = striplen1
576 res = lambda p: os.path.join(dest,
578 res = lambda p: os.path.join(dest,
577 util.localpath(p)[striplen:])
579 util.localpath(p)[striplen:])
578 else:
580 else:
579 # a file
581 # a file
580 if destdirexists:
582 if destdirexists:
581 res = lambda p: os.path.join(dest,
583 res = lambda p: os.path.join(dest,
582 os.path.basename(util.localpath(p)))
584 os.path.basename(util.localpath(p)))
583 else:
585 else:
584 res = lambda p: dest
586 res = lambda p: dest
585 return res
587 return res
586
588
587
589
588 pats = list(pats)
590 pats = list(pats)
589 if not pats:
591 if not pats:
590 raise util.Abort(_('no source or destination specified'))
592 raise util.Abort(_('no source or destination specified'))
591 if len(pats) == 1:
593 if len(pats) == 1:
592 raise util.Abort(_('no destination specified'))
594 raise util.Abort(_('no destination specified'))
593 dest = pats.pop()
595 dest = pats.pop()
594 destdirexists = os.path.isdir(dest)
596 destdirexists = os.path.isdir(dest)
595 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
597 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
596 raise util.Abort(_('with multiple sources, destination must be an '
598 raise util.Abort(_('with multiple sources, destination must be an '
597 'existing directory'))
599 'existing directory'))
598 if opts['after']:
600 if opts['after']:
599 tfn = targetpathafterfn
601 tfn = targetpathafterfn
600 else:
602 else:
601 tfn = targetpathfn
603 tfn = targetpathfn
602 copylist = []
604 copylist = []
603 for pat in pats:
605 for pat in pats:
604 srcs = []
606 srcs = []
605 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
607 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
606 origsrc = okaytocopy(abssrc, relsrc, exact)
608 origsrc = okaytocopy(abssrc, relsrc, exact)
607 if origsrc:
609 if origsrc:
608 srcs.append((origsrc, abssrc, relsrc, exact))
610 srcs.append((origsrc, abssrc, relsrc, exact))
609 if not srcs:
611 if not srcs:
610 continue
612 continue
611 copylist.append((tfn(pat, dest, srcs), srcs))
613 copylist.append((tfn(pat, dest, srcs), srcs))
612 if not copylist:
614 if not copylist:
613 raise util.Abort(_('no files to copy'))
615 raise util.Abort(_('no files to copy'))
614
616
615 for targetpath, srcs in copylist:
617 for targetpath, srcs in copylist:
616 for origsrc, abssrc, relsrc, exact in srcs:
618 for origsrc, abssrc, relsrc, exact in srcs:
617 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
619 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
618
620
619 if errors:
621 if errors:
620 ui.warn(_('(consider using --after)\n'))
622 ui.warn(_('(consider using --after)\n'))
621 return errors, copied
623 return errors, copied
622
624
623 def copy(ui, repo, *pats, **opts):
625 def copy(ui, repo, *pats, **opts):
624 """mark files as copied for the next commit
626 """mark files as copied for the next commit
625
627
626 Mark dest as having copies of source files. If dest is a
628 Mark dest as having copies of source files. If dest is a
627 directory, copies are put in that directory. If dest is a file,
629 directory, copies are put in that directory. If dest is a file,
628 there can only be one source.
630 there can only be one source.
629
631
630 By default, this command copies the contents of files as they
632 By default, this command copies the contents of files as they
631 stand in the working directory. If invoked with --after, the
633 stand in the working directory. If invoked with --after, the
632 operation is recorded, but no copying is performed.
634 operation is recorded, but no copying is performed.
633
635
634 This command takes effect in the next commit.
636 This command takes effect in the next commit.
635 """
637 """
636 wlock = repo.wlock(0)
638 wlock = repo.wlock(0)
637 errs, copied = docopy(ui, repo, pats, opts, wlock)
639 errs, copied = docopy(ui, repo, pats, opts, wlock)
638 return errs
640 return errs
639
641
640 def debugancestor(ui, index, rev1, rev2):
642 def debugancestor(ui, index, rev1, rev2):
641 """find the ancestor revision of two revisions in a given index"""
643 """find the ancestor revision of two revisions in a given index"""
642 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
644 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
643 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
645 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
644 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
646 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
645
647
646 def debugcomplete(ui, cmd='', **opts):
648 def debugcomplete(ui, cmd='', **opts):
647 """returns the completion list associated with the given command"""
649 """returns the completion list associated with the given command"""
648
650
649 if opts['options']:
651 if opts['options']:
650 options = []
652 options = []
651 otables = [globalopts]
653 otables = [globalopts]
652 if cmd:
654 if cmd:
653 aliases, entry = findcmd(ui, cmd)
655 aliases, entry = findcmd(ui, cmd)
654 otables.append(entry[1])
656 otables.append(entry[1])
655 for t in otables:
657 for t in otables:
656 for o in t:
658 for o in t:
657 if o[0]:
659 if o[0]:
658 options.append('-%s' % o[0])
660 options.append('-%s' % o[0])
659 options.append('--%s' % o[1])
661 options.append('--%s' % o[1])
660 ui.write("%s\n" % "\n".join(options))
662 ui.write("%s\n" % "\n".join(options))
661 return
663 return
662
664
663 clist = findpossible(ui, cmd).keys()
665 clist = findpossible(ui, cmd).keys()
664 clist.sort()
666 clist.sort()
665 ui.write("%s\n" % "\n".join(clist))
667 ui.write("%s\n" % "\n".join(clist))
666
668
667 def debugrebuildstate(ui, repo, rev=None):
669 def debugrebuildstate(ui, repo, rev=None):
668 """rebuild the dirstate as it would look like for the given revision"""
670 """rebuild the dirstate as it would look like for the given revision"""
669 if not rev:
671 if not rev:
670 rev = repo.changelog.tip()
672 rev = repo.changelog.tip()
671 else:
673 else:
672 rev = repo.lookup(rev)
674 rev = repo.lookup(rev)
673 change = repo.changelog.read(rev)
675 change = repo.changelog.read(rev)
674 n = change[0]
676 n = change[0]
675 files = repo.manifest.read(n)
677 files = repo.manifest.read(n)
676 wlock = repo.wlock()
678 wlock = repo.wlock()
677 repo.dirstate.rebuild(rev, files)
679 repo.dirstate.rebuild(rev, files)
678
680
679 def debugcheckstate(ui, repo):
681 def debugcheckstate(ui, repo):
680 """validate the correctness of the current dirstate"""
682 """validate the correctness of the current dirstate"""
681 parent1, parent2 = repo.dirstate.parents()
683 parent1, parent2 = repo.dirstate.parents()
682 repo.dirstate.read()
684 repo.dirstate.read()
683 dc = repo.dirstate.map
685 dc = repo.dirstate.map
684 keys = dc.keys()
686 keys = dc.keys()
685 keys.sort()
687 keys.sort()
686 m1n = repo.changelog.read(parent1)[0]
688 m1n = repo.changelog.read(parent1)[0]
687 m2n = repo.changelog.read(parent2)[0]
689 m2n = repo.changelog.read(parent2)[0]
688 m1 = repo.manifest.read(m1n)
690 m1 = repo.manifest.read(m1n)
689 m2 = repo.manifest.read(m2n)
691 m2 = repo.manifest.read(m2n)
690 errors = 0
692 errors = 0
691 for f in dc:
693 for f in dc:
692 state = repo.dirstate.state(f)
694 state = repo.dirstate.state(f)
693 if state in "nr" and f not in m1:
695 if state in "nr" and f not in m1:
694 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
696 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
695 errors += 1
697 errors += 1
696 if state in "a" and f in m1:
698 if state in "a" and f in m1:
697 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
699 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
698 errors += 1
700 errors += 1
699 if state in "m" and f not in m1 and f not in m2:
701 if state in "m" and f not in m1 and f not in m2:
700 ui.warn(_("%s in state %s, but not in either manifest\n") %
702 ui.warn(_("%s in state %s, but not in either manifest\n") %
701 (f, state))
703 (f, state))
702 errors += 1
704 errors += 1
703 for f in m1:
705 for f in m1:
704 state = repo.dirstate.state(f)
706 state = repo.dirstate.state(f)
705 if state not in "nrm":
707 if state not in "nrm":
706 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
708 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
707 errors += 1
709 errors += 1
708 if errors:
710 if errors:
709 error = _(".hg/dirstate inconsistent with current parent's manifest")
711 error = _(".hg/dirstate inconsistent with current parent's manifest")
710 raise util.Abort(error)
712 raise util.Abort(error)
711
713
712 def showconfig(ui, repo, *values, **opts):
714 def showconfig(ui, repo, *values, **opts):
713 """show combined config settings from all hgrc files
715 """show combined config settings from all hgrc files
714
716
715 With no args, print names and values of all config items.
717 With no args, print names and values of all config items.
716
718
717 With one arg of the form section.name, print just the value of
719 With one arg of the form section.name, print just the value of
718 that config item.
720 that config item.
719
721
720 With multiple args, print names and values of all config items
722 With multiple args, print names and values of all config items
721 with matching section names."""
723 with matching section names."""
722
724
723 untrusted = bool(opts.get('untrusted'))
725 untrusted = bool(opts.get('untrusted'))
724 if values:
726 if values:
725 if len([v for v in values if '.' in v]) > 1:
727 if len([v for v in values if '.' in v]) > 1:
726 raise util.Abort(_('only one config item permitted'))
728 raise util.Abort(_('only one config item permitted'))
727 for section, name, value in ui.walkconfig(untrusted=untrusted):
729 for section, name, value in ui.walkconfig(untrusted=untrusted):
728 sectname = section + '.' + name
730 sectname = section + '.' + name
729 if values:
731 if values:
730 for v in values:
732 for v in values:
731 if v == section:
733 if v == section:
732 ui.write('%s=%s\n' % (sectname, value))
734 ui.write('%s=%s\n' % (sectname, value))
733 elif v == sectname:
735 elif v == sectname:
734 ui.write(value, '\n')
736 ui.write(value, '\n')
735 else:
737 else:
736 ui.write('%s=%s\n' % (sectname, value))
738 ui.write('%s=%s\n' % (sectname, value))
737
739
738 def debugsetparents(ui, repo, rev1, rev2=None):
740 def debugsetparents(ui, repo, rev1, rev2=None):
739 """manually set the parents of the current working directory
741 """manually set the parents of the current working directory
740
742
741 This is useful for writing repository conversion tools, but should
743 This is useful for writing repository conversion tools, but should
742 be used with care.
744 be used with care.
743 """
745 """
744
746
745 if not rev2:
747 if not rev2:
746 rev2 = hex(nullid)
748 rev2 = hex(nullid)
747
749
748 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
750 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
749
751
750 def debugstate(ui, repo):
752 def debugstate(ui, repo):
751 """show the contents of the current dirstate"""
753 """show the contents of the current dirstate"""
752 repo.dirstate.read()
754 repo.dirstate.read()
753 dc = repo.dirstate.map
755 dc = repo.dirstate.map
754 keys = dc.keys()
756 keys = dc.keys()
755 keys.sort()
757 keys.sort()
756 for file_ in keys:
758 for file_ in keys:
757 ui.write("%c %3o %10d %s %s\n"
759 ui.write("%c %3o %10d %s %s\n"
758 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
760 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
759 time.strftime("%x %X",
761 time.strftime("%x %X",
760 time.localtime(dc[file_][3])), file_))
762 time.localtime(dc[file_][3])), file_))
761 for f in repo.dirstate.copies():
763 for f in repo.dirstate.copies():
762 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
763
765
764 def debugdata(ui, file_, rev):
766 def debugdata(ui, file_, rev):
765 """dump the contents of an data file revision"""
767 """dump the contents of an data file revision"""
766 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
768 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
767 file_[:-2] + ".i", file_, 0)
769 file_[:-2] + ".i", file_, 0)
768 try:
770 try:
769 ui.write(r.revision(r.lookup(rev)))
771 ui.write(r.revision(r.lookup(rev)))
770 except KeyError:
772 except KeyError:
771 raise util.Abort(_('invalid revision identifier %s') % rev)
773 raise util.Abort(_('invalid revision identifier %s') % rev)
772
774
773 def debugindex(ui, file_):
775 def debugindex(ui, file_):
774 """dump the contents of an index file"""
776 """dump the contents of an index file"""
775 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
777 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
776 ui.write(" rev offset length base linkrev" +
778 ui.write(" rev offset length base linkrev" +
777 " nodeid p1 p2\n")
779 " nodeid p1 p2\n")
778 for i in xrange(r.count()):
780 for i in xrange(r.count()):
779 node = r.node(i)
781 node = r.node(i)
780 pp = r.parents(node)
782 pp = r.parents(node)
781 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
783 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
782 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
784 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
783 short(node), short(pp[0]), short(pp[1])))
785 short(node), short(pp[0]), short(pp[1])))
784
786
785 def debugindexdot(ui, file_):
787 def debugindexdot(ui, file_):
786 """dump an index DAG as a .dot file"""
788 """dump an index DAG as a .dot file"""
787 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
789 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
788 ui.write("digraph G {\n")
790 ui.write("digraph G {\n")
789 for i in xrange(r.count()):
791 for i in xrange(r.count()):
790 node = r.node(i)
792 node = r.node(i)
791 pp = r.parents(node)
793 pp = r.parents(node)
792 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
794 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
793 if pp[1] != nullid:
795 if pp[1] != nullid:
794 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
796 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
795 ui.write("}\n")
797 ui.write("}\n")
796
798
797 def debugrename(ui, repo, file1, *pats, **opts):
799 def debugrename(ui, repo, file1, *pats, **opts):
798 """dump rename information"""
800 """dump rename information"""
799
801
800 ctx = repo.changectx(opts.get('rev', 'tip'))
802 ctx = repo.changectx(opts.get('rev', 'tip'))
801 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
803 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
802 ctx.node()):
804 ctx.node()):
803 m = ctx.filectx(abs).renamed()
805 m = ctx.filectx(abs).renamed()
804 if m:
806 if m:
805 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
807 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
806 else:
808 else:
807 ui.write(_("%s not renamed\n") % rel)
809 ui.write(_("%s not renamed\n") % rel)
808
810
809 def debugwalk(ui, repo, *pats, **opts):
811 def debugwalk(ui, repo, *pats, **opts):
810 """show how files match on given patterns"""
812 """show how files match on given patterns"""
811 items = list(cmdutil.walk(repo, pats, opts))
813 items = list(cmdutil.walk(repo, pats, opts))
812 if not items:
814 if not items:
813 return
815 return
814 fmt = '%%s %%-%ds %%-%ds %%s' % (
816 fmt = '%%s %%-%ds %%-%ds %%s' % (
815 max([len(abs) for (src, abs, rel, exact) in items]),
817 max([len(abs) for (src, abs, rel, exact) in items]),
816 max([len(rel) for (src, abs, rel, exact) in items]))
818 max([len(rel) for (src, abs, rel, exact) in items]))
817 for src, abs, rel, exact in items:
819 for src, abs, rel, exact in items:
818 line = fmt % (src, abs, rel, exact and 'exact' or '')
820 line = fmt % (src, abs, rel, exact and 'exact' or '')
819 ui.write("%s\n" % line.rstrip())
821 ui.write("%s\n" % line.rstrip())
820
822
821 def diff(ui, repo, *pats, **opts):
823 def diff(ui, repo, *pats, **opts):
822 """diff repository (or selected files)
824 """diff repository (or selected files)
823
825
824 Show differences between revisions for the specified files.
826 Show differences between revisions for the specified files.
825
827
826 Differences between files are shown using the unified diff format.
828 Differences between files are shown using the unified diff format.
827
829
828 When two revision arguments are given, then changes are shown
830 When two revision arguments are given, then changes are shown
829 between those revisions. If only one revision is specified then
831 between those revisions. If only one revision is specified then
830 that revision is compared to the working directory, and, when no
832 that revision is compared to the working directory, and, when no
831 revisions are specified, the working directory files are compared
833 revisions are specified, the working directory files are compared
832 to its parent.
834 to its parent.
833
835
834 Without the -a option, diff will avoid generating diffs of files
836 Without the -a option, diff will avoid generating diffs of files
835 it detects as binary. With -a, diff will generate a diff anyway,
837 it detects as binary. With -a, diff will generate a diff anyway,
836 probably with undesirable results.
838 probably with undesirable results.
837 """
839 """
838 node1, node2 = cmdutil.revpair(repo, opts['rev'])
840 node1, node2 = cmdutil.revpair(repo, opts['rev'])
839
841
840 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
842 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
841
843
842 patch.diff(repo, node1, node2, fns, match=matchfn,
844 patch.diff(repo, node1, node2, fns, match=matchfn,
843 opts=patch.diffopts(ui, opts))
845 opts=patch.diffopts(ui, opts))
844
846
845 def export(ui, repo, *changesets, **opts):
847 def export(ui, repo, *changesets, **opts):
846 """dump the header and diffs for one or more changesets
848 """dump the header and diffs for one or more changesets
847
849
848 Print the changeset header and diffs for one or more revisions.
850 Print the changeset header and diffs for one or more revisions.
849
851
850 The information shown in the changeset header is: author,
852 The information shown in the changeset header is: author,
851 changeset hash, parent and commit comment.
853 changeset hash, parent and commit comment.
852
854
853 Output may be to a file, in which case the name of the file is
855 Output may be to a file, in which case the name of the file is
854 given using a format string. The formatting rules are as follows:
856 given using a format string. The formatting rules are as follows:
855
857
856 %% literal "%" character
858 %% literal "%" character
857 %H changeset hash (40 bytes of hexadecimal)
859 %H changeset hash (40 bytes of hexadecimal)
858 %N number of patches being generated
860 %N number of patches being generated
859 %R changeset revision number
861 %R changeset revision number
860 %b basename of the exporting repository
862 %b basename of the exporting repository
861 %h short-form changeset hash (12 bytes of hexadecimal)
863 %h short-form changeset hash (12 bytes of hexadecimal)
862 %n zero-padded sequence number, starting at 1
864 %n zero-padded sequence number, starting at 1
863 %r zero-padded changeset revision number
865 %r zero-padded changeset revision number
864
866
865 Without the -a option, export will avoid generating diffs of files
867 Without the -a option, export will avoid generating diffs of files
866 it detects as binary. With -a, export will generate a diff anyway,
868 it detects as binary. With -a, export will generate a diff anyway,
867 probably with undesirable results.
869 probably with undesirable results.
868
870
869 With the --switch-parent option, the diff will be against the second
871 With the --switch-parent option, the diff will be against the second
870 parent. It can be useful to review a merge.
872 parent. It can be useful to review a merge.
871 """
873 """
872 if not changesets:
874 if not changesets:
873 raise util.Abort(_("export requires at least one changeset"))
875 raise util.Abort(_("export requires at least one changeset"))
874 revs = cmdutil.revrange(repo, changesets)
876 revs = cmdutil.revrange(repo, changesets)
875 if len(revs) > 1:
877 if len(revs) > 1:
876 ui.note(_('exporting patches:\n'))
878 ui.note(_('exporting patches:\n'))
877 else:
879 else:
878 ui.note(_('exporting patch:\n'))
880 ui.note(_('exporting patch:\n'))
879 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
881 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
880 switch_parent=opts['switch_parent'],
882 switch_parent=opts['switch_parent'],
881 opts=patch.diffopts(ui, opts))
883 opts=patch.diffopts(ui, opts))
882
884
883 def grep(ui, repo, pattern, *pats, **opts):
885 def grep(ui, repo, pattern, *pats, **opts):
884 """search for a pattern in specified files and revisions
886 """search for a pattern in specified files and revisions
885
887
886 Search revisions of files for a regular expression.
888 Search revisions of files for a regular expression.
887
889
888 This command behaves differently than Unix grep. It only accepts
890 This command behaves differently than Unix grep. It only accepts
889 Python/Perl regexps. It searches repository history, not the
891 Python/Perl regexps. It searches repository history, not the
890 working directory. It always prints the revision number in which
892 working directory. It always prints the revision number in which
891 a match appears.
893 a match appears.
892
894
893 By default, grep only prints output for the first revision of a
895 By default, grep only prints output for the first revision of a
894 file in which it finds a match. To get it to print every revision
896 file in which it finds a match. To get it to print every revision
895 that contains a change in match status ("-" for a match that
897 that contains a change in match status ("-" for a match that
896 becomes a non-match, or "+" for a non-match that becomes a match),
898 becomes a non-match, or "+" for a non-match that becomes a match),
897 use the --all flag.
899 use the --all flag.
898 """
900 """
899 reflags = 0
901 reflags = 0
900 if opts['ignore_case']:
902 if opts['ignore_case']:
901 reflags |= re.I
903 reflags |= re.I
902 regexp = re.compile(pattern, reflags)
904 regexp = re.compile(pattern, reflags)
903 sep, eol = ':', '\n'
905 sep, eol = ':', '\n'
904 if opts['print0']:
906 if opts['print0']:
905 sep = eol = '\0'
907 sep = eol = '\0'
906
908
907 fcache = {}
909 fcache = {}
908 def getfile(fn):
910 def getfile(fn):
909 if fn not in fcache:
911 if fn not in fcache:
910 fcache[fn] = repo.file(fn)
912 fcache[fn] = repo.file(fn)
911 return fcache[fn]
913 return fcache[fn]
912
914
913 def matchlines(body):
915 def matchlines(body):
914 begin = 0
916 begin = 0
915 linenum = 0
917 linenum = 0
916 while True:
918 while True:
917 match = regexp.search(body, begin)
919 match = regexp.search(body, begin)
918 if not match:
920 if not match:
919 break
921 break
920 mstart, mend = match.span()
922 mstart, mend = match.span()
921 linenum += body.count('\n', begin, mstart) + 1
923 linenum += body.count('\n', begin, mstart) + 1
922 lstart = body.rfind('\n', begin, mstart) + 1 or begin
924 lstart = body.rfind('\n', begin, mstart) + 1 or begin
923 lend = body.find('\n', mend)
925 lend = body.find('\n', mend)
924 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
926 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
925 begin = lend + 1
927 begin = lend + 1
926
928
927 class linestate(object):
929 class linestate(object):
928 def __init__(self, line, linenum, colstart, colend):
930 def __init__(self, line, linenum, colstart, colend):
929 self.line = line
931 self.line = line
930 self.linenum = linenum
932 self.linenum = linenum
931 self.colstart = colstart
933 self.colstart = colstart
932 self.colend = colend
934 self.colend = colend
933
935
934 def __eq__(self, other):
936 def __eq__(self, other):
935 return self.line == other.line
937 return self.line == other.line
936
938
937 matches = {}
939 matches = {}
938 copies = {}
940 copies = {}
939 def grepbody(fn, rev, body):
941 def grepbody(fn, rev, body):
940 matches[rev].setdefault(fn, [])
942 matches[rev].setdefault(fn, [])
941 m = matches[rev][fn]
943 m = matches[rev][fn]
942 for lnum, cstart, cend, line in matchlines(body):
944 for lnum, cstart, cend, line in matchlines(body):
943 s = linestate(line, lnum, cstart, cend)
945 s = linestate(line, lnum, cstart, cend)
944 m.append(s)
946 m.append(s)
945
947
946 def difflinestates(a, b):
948 def difflinestates(a, b):
947 sm = difflib.SequenceMatcher(None, a, b)
949 sm = difflib.SequenceMatcher(None, a, b)
948 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
950 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
949 if tag == 'insert':
951 if tag == 'insert':
950 for i in xrange(blo, bhi):
952 for i in xrange(blo, bhi):
951 yield ('+', b[i])
953 yield ('+', b[i])
952 elif tag == 'delete':
954 elif tag == 'delete':
953 for i in xrange(alo, ahi):
955 for i in xrange(alo, ahi):
954 yield ('-', a[i])
956 yield ('-', a[i])
955 elif tag == 'replace':
957 elif tag == 'replace':
956 for i in xrange(alo, ahi):
958 for i in xrange(alo, ahi):
957 yield ('-', a[i])
959 yield ('-', a[i])
958 for i in xrange(blo, bhi):
960 for i in xrange(blo, bhi):
959 yield ('+', b[i])
961 yield ('+', b[i])
960
962
961 prev = {}
963 prev = {}
962 def display(fn, rev, states, prevstates):
964 def display(fn, rev, states, prevstates):
963 counts = {'-': 0, '+': 0}
965 counts = {'-': 0, '+': 0}
964 filerevmatches = {}
966 filerevmatches = {}
965 if incrementing or not opts['all']:
967 if incrementing or not opts['all']:
966 a, b, r = prevstates, states, rev
968 a, b, r = prevstates, states, rev
967 else:
969 else:
968 a, b, r = states, prevstates, prev.get(fn, -1)
970 a, b, r = states, prevstates, prev.get(fn, -1)
969 for change, l in difflinestates(a, b):
971 for change, l in difflinestates(a, b):
970 cols = [fn, str(r)]
972 cols = [fn, str(r)]
971 if opts['line_number']:
973 if opts['line_number']:
972 cols.append(str(l.linenum))
974 cols.append(str(l.linenum))
973 if opts['all']:
975 if opts['all']:
974 cols.append(change)
976 cols.append(change)
975 if opts['user']:
977 if opts['user']:
976 cols.append(ui.shortuser(get(r)[1]))
978 cols.append(ui.shortuser(get(r)[1]))
977 if opts['files_with_matches']:
979 if opts['files_with_matches']:
978 c = (fn, r)
980 c = (fn, r)
979 if c in filerevmatches:
981 if c in filerevmatches:
980 continue
982 continue
981 filerevmatches[c] = 1
983 filerevmatches[c] = 1
982 else:
984 else:
983 cols.append(l.line)
985 cols.append(l.line)
984 ui.write(sep.join(cols), eol)
986 ui.write(sep.join(cols), eol)
985 counts[change] += 1
987 counts[change] += 1
986 return counts['+'], counts['-']
988 return counts['+'], counts['-']
987
989
988 fstate = {}
990 fstate = {}
989 skip = {}
991 skip = {}
990 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
992 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
991 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
993 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
992 count = 0
994 count = 0
993 incrementing = False
995 incrementing = False
994 follow = opts.get('follow')
996 follow = opts.get('follow')
995 for st, rev, fns in changeiter:
997 for st, rev, fns in changeiter:
996 if st == 'window':
998 if st == 'window':
997 incrementing = rev
999 incrementing = rev
998 matches.clear()
1000 matches.clear()
999 elif st == 'add':
1001 elif st == 'add':
1000 mf = repo.changectx(rev).manifest()
1002 mf = repo.changectx(rev).manifest()
1001 matches[rev] = {}
1003 matches[rev] = {}
1002 for fn in fns:
1004 for fn in fns:
1003 if fn in skip:
1005 if fn in skip:
1004 continue
1006 continue
1005 fstate.setdefault(fn, {})
1007 fstate.setdefault(fn, {})
1006 try:
1008 try:
1007 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1009 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1008 if follow:
1010 if follow:
1009 copied = getfile(fn).renamed(mf[fn])
1011 copied = getfile(fn).renamed(mf[fn])
1010 if copied:
1012 if copied:
1011 copies.setdefault(rev, {})[fn] = copied[0]
1013 copies.setdefault(rev, {})[fn] = copied[0]
1012 except KeyError:
1014 except KeyError:
1013 pass
1015 pass
1014 elif st == 'iter':
1016 elif st == 'iter':
1015 states = matches[rev].items()
1017 states = matches[rev].items()
1016 states.sort()
1018 states.sort()
1017 for fn, m in states:
1019 for fn, m in states:
1018 copy = copies.get(rev, {}).get(fn)
1020 copy = copies.get(rev, {}).get(fn)
1019 if fn in skip:
1021 if fn in skip:
1020 if copy:
1022 if copy:
1021 skip[copy] = True
1023 skip[copy] = True
1022 continue
1024 continue
1023 if incrementing or not opts['all'] or fstate[fn]:
1025 if incrementing or not opts['all'] or fstate[fn]:
1024 pos, neg = display(fn, rev, m, fstate[fn])
1026 pos, neg = display(fn, rev, m, fstate[fn])
1025 count += pos + neg
1027 count += pos + neg
1026 if pos and not opts['all']:
1028 if pos and not opts['all']:
1027 skip[fn] = True
1029 skip[fn] = True
1028 if copy:
1030 if copy:
1029 skip[copy] = True
1031 skip[copy] = True
1030 fstate[fn] = m
1032 fstate[fn] = m
1031 if copy:
1033 if copy:
1032 fstate[copy] = m
1034 fstate[copy] = m
1033 prev[fn] = rev
1035 prev[fn] = rev
1034
1036
1035 if not incrementing:
1037 if not incrementing:
1036 fstate = fstate.items()
1038 fstate = fstate.items()
1037 fstate.sort()
1039 fstate.sort()
1038 for fn, state in fstate:
1040 for fn, state in fstate:
1039 if fn in skip:
1041 if fn in skip:
1040 continue
1042 continue
1041 if fn not in copies.get(prev[fn], {}):
1043 if fn not in copies.get(prev[fn], {}):
1042 display(fn, rev, {}, state)
1044 display(fn, rev, {}, state)
1043 return (count == 0 and 1) or 0
1045 return (count == 0 and 1) or 0
1044
1046
1045 def heads(ui, repo, **opts):
1047 def heads(ui, repo, **opts):
1046 """show current repository heads
1048 """show current repository heads
1047
1049
1048 Show all repository head changesets.
1050 Show all repository head changesets.
1049
1051
1050 Repository "heads" are changesets that don't have children
1052 Repository "heads" are changesets that don't have children
1051 changesets. They are where development generally takes place and
1053 changesets. They are where development generally takes place and
1052 are the usual targets for update and merge operations.
1054 are the usual targets for update and merge operations.
1053 """
1055 """
1054 if opts['rev']:
1056 if opts['rev']:
1055 heads = repo.heads(repo.lookup(opts['rev']))
1057 heads = repo.heads(repo.lookup(opts['rev']))
1056 else:
1058 else:
1057 heads = repo.heads()
1059 heads = repo.heads()
1058 displayer = cmdutil.show_changeset(ui, repo, opts)
1060 displayer = cmdutil.show_changeset(ui, repo, opts)
1059 for n in heads:
1061 for n in heads:
1060 displayer.show(changenode=n)
1062 displayer.show(changenode=n)
1061
1063
1062 def help_(ui, name=None, with_version=False):
1064 def help_(ui, name=None, with_version=False):
1063 """show help for a command, extension, or list of commands
1065 """show help for a command, extension, or list of commands
1064
1066
1065 With no arguments, print a list of commands and short help.
1067 With no arguments, print a list of commands and short help.
1066
1068
1067 Given a command name, print help for that command.
1069 Given a command name, print help for that command.
1068
1070
1069 Given an extension name, print help for that extension, and the
1071 Given an extension name, print help for that extension, and the
1070 commands it provides."""
1072 commands it provides."""
1071 option_lists = []
1073 option_lists = []
1072
1074
1073 def helpcmd(name):
1075 def helpcmd(name):
1074 if with_version:
1076 if with_version:
1075 version_(ui)
1077 version_(ui)
1076 ui.write('\n')
1078 ui.write('\n')
1077 aliases, i = findcmd(ui, name)
1079 aliases, i = findcmd(ui, name)
1078 # synopsis
1080 # synopsis
1079 ui.write("%s\n\n" % i[2])
1081 ui.write("%s\n\n" % i[2])
1080
1082
1081 # description
1083 # description
1082 doc = i[0].__doc__
1084 doc = i[0].__doc__
1083 if not doc:
1085 if not doc:
1084 doc = _("(No help text available)")
1086 doc = _("(No help text available)")
1085 if ui.quiet:
1087 if ui.quiet:
1086 doc = doc.splitlines(0)[0]
1088 doc = doc.splitlines(0)[0]
1087 ui.write("%s\n" % doc.rstrip())
1089 ui.write("%s\n" % doc.rstrip())
1088
1090
1089 if not ui.quiet:
1091 if not ui.quiet:
1090 # aliases
1092 # aliases
1091 if len(aliases) > 1:
1093 if len(aliases) > 1:
1092 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1094 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1093
1095
1094 # options
1096 # options
1095 if i[1]:
1097 if i[1]:
1096 option_lists.append(("options", i[1]))
1098 option_lists.append(("options", i[1]))
1097
1099
1098 def helplist(select=None):
1100 def helplist(select=None):
1099 h = {}
1101 h = {}
1100 cmds = {}
1102 cmds = {}
1101 for c, e in table.items():
1103 for c, e in table.items():
1102 f = c.split("|", 1)[0]
1104 f = c.split("|", 1)[0]
1103 if select and not select(f):
1105 if select and not select(f):
1104 continue
1106 continue
1105 if name == "shortlist" and not f.startswith("^"):
1107 if name == "shortlist" and not f.startswith("^"):
1106 continue
1108 continue
1107 f = f.lstrip("^")
1109 f = f.lstrip("^")
1108 if not ui.debugflag and f.startswith("debug"):
1110 if not ui.debugflag and f.startswith("debug"):
1109 continue
1111 continue
1110 doc = e[0].__doc__
1112 doc = e[0].__doc__
1111 if not doc:
1113 if not doc:
1112 doc = _("(No help text available)")
1114 doc = _("(No help text available)")
1113 h[f] = doc.splitlines(0)[0].rstrip()
1115 h[f] = doc.splitlines(0)[0].rstrip()
1114 cmds[f] = c.lstrip("^")
1116 cmds[f] = c.lstrip("^")
1115
1117
1116 fns = h.keys()
1118 fns = h.keys()
1117 fns.sort()
1119 fns.sort()
1118 m = max(map(len, fns))
1120 m = max(map(len, fns))
1119 for f in fns:
1121 for f in fns:
1120 if ui.verbose:
1122 if ui.verbose:
1121 commands = cmds[f].replace("|",", ")
1123 commands = cmds[f].replace("|",", ")
1122 ui.write(" %s:\n %s\n"%(commands, h[f]))
1124 ui.write(" %s:\n %s\n"%(commands, h[f]))
1123 else:
1125 else:
1124 ui.write(' %-*s %s\n' % (m, f, h[f]))
1126 ui.write(' %-*s %s\n' % (m, f, h[f]))
1125
1127
1126 def helpext(name):
1128 def helpext(name):
1127 try:
1129 try:
1128 mod = findext(name)
1130 mod = findext(name)
1129 except KeyError:
1131 except KeyError:
1130 raise UnknownCommand(name)
1132 raise UnknownCommand(name)
1131
1133
1132 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1134 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1133 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1135 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1134 for d in doc[1:]:
1136 for d in doc[1:]:
1135 ui.write(d, '\n')
1137 ui.write(d, '\n')
1136
1138
1137 ui.status('\n')
1139 ui.status('\n')
1138 if ui.verbose:
1140 if ui.verbose:
1139 ui.status(_('list of commands:\n\n'))
1141 ui.status(_('list of commands:\n\n'))
1140 else:
1142 else:
1141 ui.status(_('list of commands (use "hg help -v %s" '
1143 ui.status(_('list of commands (use "hg help -v %s" '
1142 'to show aliases and global options):\n\n') % name)
1144 'to show aliases and global options):\n\n') % name)
1143
1145
1144 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1146 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
1145 helplist(modcmds.has_key)
1147 helplist(modcmds.has_key)
1146
1148
1147 if name and name != 'shortlist':
1149 if name and name != 'shortlist':
1148 try:
1150 try:
1149 helpcmd(name)
1151 helpcmd(name)
1150 except UnknownCommand:
1152 except UnknownCommand:
1151 helpext(name)
1153 helpext(name)
1152
1154
1153 else:
1155 else:
1154 # program name
1156 # program name
1155 if ui.verbose or with_version:
1157 if ui.verbose or with_version:
1156 version_(ui)
1158 version_(ui)
1157 else:
1159 else:
1158 ui.status(_("Mercurial Distributed SCM\n"))
1160 ui.status(_("Mercurial Distributed SCM\n"))
1159 ui.status('\n')
1161 ui.status('\n')
1160
1162
1161 # list of commands
1163 # list of commands
1162 if name == "shortlist":
1164 if name == "shortlist":
1163 ui.status(_('basic commands (use "hg help" '
1165 ui.status(_('basic commands (use "hg help" '
1164 'for the full list or option "-v" for details):\n\n'))
1166 'for the full list or option "-v" for details):\n\n'))
1165 elif ui.verbose:
1167 elif ui.verbose:
1166 ui.status(_('list of commands:\n\n'))
1168 ui.status(_('list of commands:\n\n'))
1167 else:
1169 else:
1168 ui.status(_('list of commands (use "hg help -v" '
1170 ui.status(_('list of commands (use "hg help -v" '
1169 'to show aliases and global options):\n\n'))
1171 'to show aliases and global options):\n\n'))
1170
1172
1171 helplist()
1173 helplist()
1172
1174
1173 # global options
1175 # global options
1174 if ui.verbose:
1176 if ui.verbose:
1175 option_lists.append(("global options", globalopts))
1177 option_lists.append(("global options", globalopts))
1176
1178
1177 # list all option lists
1179 # list all option lists
1178 opt_output = []
1180 opt_output = []
1179 for title, options in option_lists:
1181 for title, options in option_lists:
1180 opt_output.append(("\n%s:\n" % title, None))
1182 opt_output.append(("\n%s:\n" % title, None))
1181 for shortopt, longopt, default, desc in options:
1183 for shortopt, longopt, default, desc in options:
1182 if "DEPRECATED" in desc and not ui.verbose: continue
1184 if "DEPRECATED" in desc and not ui.verbose: continue
1183 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1185 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1184 longopt and " --%s" % longopt),
1186 longopt and " --%s" % longopt),
1185 "%s%s" % (desc,
1187 "%s%s" % (desc,
1186 default
1188 default
1187 and _(" (default: %s)") % default
1189 and _(" (default: %s)") % default
1188 or "")))
1190 or "")))
1189
1191
1190 if opt_output:
1192 if opt_output:
1191 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1193 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1192 for first, second in opt_output:
1194 for first, second in opt_output:
1193 if second:
1195 if second:
1194 ui.write(" %-*s %s\n" % (opts_len, first, second))
1196 ui.write(" %-*s %s\n" % (opts_len, first, second))
1195 else:
1197 else:
1196 ui.write("%s\n" % first)
1198 ui.write("%s\n" % first)
1197
1199
1198 def identify(ui, repo):
1200 def identify(ui, repo):
1199 """print information about the working copy
1201 """print information about the working copy
1200
1202
1201 Print a short summary of the current state of the repo.
1203 Print a short summary of the current state of the repo.
1202
1204
1203 This summary identifies the repository state using one or two parent
1205 This summary identifies the repository state using one or two parent
1204 hash identifiers, followed by a "+" if there are uncommitted changes
1206 hash identifiers, followed by a "+" if there are uncommitted changes
1205 in the working directory, followed by a list of tags for this revision.
1207 in the working directory, followed by a list of tags for this revision.
1206 """
1208 """
1207 parents = [p for p in repo.dirstate.parents() if p != nullid]
1209 parents = [p for p in repo.dirstate.parents() if p != nullid]
1208 if not parents:
1210 if not parents:
1209 ui.write(_("unknown\n"))
1211 ui.write(_("unknown\n"))
1210 return
1212 return
1211
1213
1212 hexfunc = ui.debugflag and hex or short
1214 hexfunc = ui.debugflag and hex or short
1213 modified, added, removed, deleted = repo.status()[:4]
1215 modified, added, removed, deleted = repo.status()[:4]
1214 output = ["%s%s" %
1216 output = ["%s%s" %
1215 ('+'.join([hexfunc(parent) for parent in parents]),
1217 ('+'.join([hexfunc(parent) for parent in parents]),
1216 (modified or added or removed or deleted) and "+" or "")]
1218 (modified or added or removed or deleted) and "+" or "")]
1217
1219
1218 if not ui.quiet:
1220 if not ui.quiet:
1219
1221
1220 branch = repo.workingctx().branch()
1222 branch = repo.workingctx().branch()
1221 if branch:
1223 if branch:
1222 output.append("(%s)" % branch)
1224 output.append("(%s)" % branch)
1223
1225
1224 # multiple tags for a single parent separated by '/'
1226 # multiple tags for a single parent separated by '/'
1225 parenttags = ['/'.join(tags)
1227 parenttags = ['/'.join(tags)
1226 for tags in map(repo.nodetags, parents) if tags]
1228 for tags in map(repo.nodetags, parents) if tags]
1227 # tags for multiple parents separated by ' + '
1229 # tags for multiple parents separated by ' + '
1228 if parenttags:
1230 if parenttags:
1229 output.append(' + '.join(parenttags))
1231 output.append(' + '.join(parenttags))
1230
1232
1231 ui.write("%s\n" % ' '.join(output))
1233 ui.write("%s\n" % ' '.join(output))
1232
1234
1233 def import_(ui, repo, patch1, *patches, **opts):
1235 def import_(ui, repo, patch1, *patches, **opts):
1234 """import an ordered set of patches
1236 """import an ordered set of patches
1235
1237
1236 Import a list of patches and commit them individually.
1238 Import a list of patches and commit them individually.
1237
1239
1238 If there are outstanding changes in the working directory, import
1240 If there are outstanding changes in the working directory, import
1239 will abort unless given the -f flag.
1241 will abort unless given the -f flag.
1240
1242
1241 You can import a patch straight from a mail message. Even patches
1243 You can import a patch straight from a mail message. Even patches
1242 as attachments work (body part must be type text/plain or
1244 as attachments work (body part must be type text/plain or
1243 text/x-patch to be used). From and Subject headers of email
1245 text/x-patch to be used). From and Subject headers of email
1244 message are used as default committer and commit message. All
1246 message are used as default committer and commit message. All
1245 text/plain body parts before first diff are added to commit
1247 text/plain body parts before first diff are added to commit
1246 message.
1248 message.
1247
1249
1248 If imported patch was generated by hg export, user and description
1250 If imported patch was generated by hg export, user and description
1249 from patch override values from message headers and body. Values
1251 from patch override values from message headers and body. Values
1250 given on command line with -m and -u override these.
1252 given on command line with -m and -u override these.
1251
1253
1252 To read a patch from standard input, use patch name "-".
1254 To read a patch from standard input, use patch name "-".
1253 """
1255 """
1254 patches = (patch1,) + patches
1256 patches = (patch1,) + patches
1255
1257
1256 if not opts['force']:
1258 if not opts['force']:
1257 bail_if_changed(repo)
1259 bail_if_changed(repo)
1258
1260
1259 d = opts["base"]
1261 d = opts["base"]
1260 strip = opts["strip"]
1262 strip = opts["strip"]
1261
1263
1262 wlock = repo.wlock()
1264 wlock = repo.wlock()
1263 lock = repo.lock()
1265 lock = repo.lock()
1264
1266
1265 for p in patches:
1267 for p in patches:
1266 pf = os.path.join(d, p)
1268 pf = os.path.join(d, p)
1267
1269
1268 if pf == '-':
1270 if pf == '-':
1269 ui.status(_("applying patch from stdin\n"))
1271 ui.status(_("applying patch from stdin\n"))
1270 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1272 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1271 else:
1273 else:
1272 ui.status(_("applying %s\n") % p)
1274 ui.status(_("applying %s\n") % p)
1273 tmpname, message, user, date = patch.extract(ui, file(pf))
1275 tmpname, message, user, date = patch.extract(ui, file(pf))
1274
1276
1275 if tmpname is None:
1277 if tmpname is None:
1276 raise util.Abort(_('no diffs found'))
1278 raise util.Abort(_('no diffs found'))
1277
1279
1278 try:
1280 try:
1279 if opts['message']:
1281 if opts['message']:
1280 # pickup the cmdline msg
1282 # pickup the cmdline msg
1281 message = opts['message']
1283 message = opts['message']
1282 elif message:
1284 elif message:
1283 # pickup the patch msg
1285 # pickup the patch msg
1284 message = message.strip()
1286 message = message.strip()
1285 else:
1287 else:
1286 # launch the editor
1288 # launch the editor
1287 message = None
1289 message = None
1288 ui.debug(_('message:\n%s\n') % message)
1290 ui.debug(_('message:\n%s\n') % message)
1289
1291
1290 files = {}
1292 files = {}
1291 try:
1293 try:
1292 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1294 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1293 files=files)
1295 files=files)
1294 finally:
1296 finally:
1295 files = patch.updatedir(ui, repo, files, wlock=wlock)
1297 files = patch.updatedir(ui, repo, files, wlock=wlock)
1296 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1298 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1297 finally:
1299 finally:
1298 os.unlink(tmpname)
1300 os.unlink(tmpname)
1299
1301
1300 def incoming(ui, repo, source="default", **opts):
1302 def incoming(ui, repo, source="default", **opts):
1301 """show new changesets found in source
1303 """show new changesets found in source
1302
1304
1303 Show new changesets found in the specified path/URL or the default
1305 Show new changesets found in the specified path/URL or the default
1304 pull location. These are the changesets that would be pulled if a pull
1306 pull location. These are the changesets that would be pulled if a pull
1305 was requested.
1307 was requested.
1306
1308
1307 For remote repository, using --bundle avoids downloading the changesets
1309 For remote repository, using --bundle avoids downloading the changesets
1308 twice if the incoming is followed by a pull.
1310 twice if the incoming is followed by a pull.
1309
1311
1310 See pull for valid source format details.
1312 See pull for valid source format details.
1311 """
1313 """
1312 source = ui.expandpath(source)
1314 source = ui.expandpath(source)
1313 setremoteconfig(ui, opts)
1315 setremoteconfig(ui, opts)
1314
1316
1315 other = hg.repository(ui, source)
1317 other = hg.repository(ui, source)
1316 incoming = repo.findincoming(other, force=opts["force"])
1318 incoming = repo.findincoming(other, force=opts["force"])
1317 if not incoming:
1319 if not incoming:
1318 ui.status(_("no changes found\n"))
1320 ui.status(_("no changes found\n"))
1319 return
1321 return
1320
1322
1321 cleanup = None
1323 cleanup = None
1322 try:
1324 try:
1323 fname = opts["bundle"]
1325 fname = opts["bundle"]
1324 if fname or not other.local():
1326 if fname or not other.local():
1325 # create a bundle (uncompressed if other repo is not local)
1327 # create a bundle (uncompressed if other repo is not local)
1326 cg = other.changegroup(incoming, "incoming")
1328 cg = other.changegroup(incoming, "incoming")
1327 bundletype = other.local() and "HG10BZ" or "HG10UN"
1329 bundletype = other.local() and "HG10BZ" or "HG10UN"
1328 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1330 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1329 # keep written bundle?
1331 # keep written bundle?
1330 if opts["bundle"]:
1332 if opts["bundle"]:
1331 cleanup = None
1333 cleanup = None
1332 if not other.local():
1334 if not other.local():
1333 # use the created uncompressed bundlerepo
1335 # use the created uncompressed bundlerepo
1334 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1336 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1335
1337
1336 revs = None
1338 revs = None
1337 if opts['rev']:
1339 if opts['rev']:
1338 revs = [other.lookup(rev) for rev in opts['rev']]
1340 revs = [other.lookup(rev) for rev in opts['rev']]
1339 o = other.changelog.nodesbetween(incoming, revs)[0]
1341 o = other.changelog.nodesbetween(incoming, revs)[0]
1340 if opts['newest_first']:
1342 if opts['newest_first']:
1341 o.reverse()
1343 o.reverse()
1342 displayer = cmdutil.show_changeset(ui, other, opts)
1344 displayer = cmdutil.show_changeset(ui, other, opts)
1343 for n in o:
1345 for n in o:
1344 parents = [p for p in other.changelog.parents(n) if p != nullid]
1346 parents = [p for p in other.changelog.parents(n) if p != nullid]
1345 if opts['no_merges'] and len(parents) == 2:
1347 if opts['no_merges'] and len(parents) == 2:
1346 continue
1348 continue
1347 displayer.show(changenode=n)
1349 displayer.show(changenode=n)
1348 finally:
1350 finally:
1349 if hasattr(other, 'close'):
1351 if hasattr(other, 'close'):
1350 other.close()
1352 other.close()
1351 if cleanup:
1353 if cleanup:
1352 os.unlink(cleanup)
1354 os.unlink(cleanup)
1353
1355
1354 def init(ui, dest=".", **opts):
1356 def init(ui, dest=".", **opts):
1355 """create a new repository in the given directory
1357 """create a new repository in the given directory
1356
1358
1357 Initialize a new repository in the given directory. If the given
1359 Initialize a new repository in the given directory. If the given
1358 directory does not exist, it is created.
1360 directory does not exist, it is created.
1359
1361
1360 If no directory is given, the current directory is used.
1362 If no directory is given, the current directory is used.
1361
1363
1362 It is possible to specify an ssh:// URL as the destination.
1364 It is possible to specify an ssh:// URL as the destination.
1363 Look at the help text for the pull command for important details
1365 Look at the help text for the pull command for important details
1364 about ssh:// URLs.
1366 about ssh:// URLs.
1365 """
1367 """
1366 setremoteconfig(ui, opts)
1368 setremoteconfig(ui, opts)
1367 hg.repository(ui, dest, create=1)
1369 hg.repository(ui, dest, create=1)
1368
1370
1369 def locate(ui, repo, *pats, **opts):
1371 def locate(ui, repo, *pats, **opts):
1370 """locate files matching specific patterns
1372 """locate files matching specific patterns
1371
1373
1372 Print all files under Mercurial control whose names match the
1374 Print all files under Mercurial control whose names match the
1373 given patterns.
1375 given patterns.
1374
1376
1375 This command searches the current directory and its
1377 This command searches the current directory and its
1376 subdirectories. To search an entire repository, move to the root
1378 subdirectories. To search an entire repository, move to the root
1377 of the repository.
1379 of the repository.
1378
1380
1379 If no patterns are given to match, this command prints all file
1381 If no patterns are given to match, this command prints all file
1380 names.
1382 names.
1381
1383
1382 If you want to feed the output of this command into the "xargs"
1384 If you want to feed the output of this command into the "xargs"
1383 command, use the "-0" option to both this command and "xargs".
1385 command, use the "-0" option to both this command and "xargs".
1384 This will avoid the problem of "xargs" treating single filenames
1386 This will avoid the problem of "xargs" treating single filenames
1385 that contain white space as multiple filenames.
1387 that contain white space as multiple filenames.
1386 """
1388 """
1387 end = opts['print0'] and '\0' or '\n'
1389 end = opts['print0'] and '\0' or '\n'
1388 rev = opts['rev']
1390 rev = opts['rev']
1389 if rev:
1391 if rev:
1390 node = repo.lookup(rev)
1392 node = repo.lookup(rev)
1391 else:
1393 else:
1392 node = None
1394 node = None
1393
1395
1394 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1396 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1395 head='(?:.*/|)'):
1397 head='(?:.*/|)'):
1396 if not node and repo.dirstate.state(abs) == '?':
1398 if not node and repo.dirstate.state(abs) == '?':
1397 continue
1399 continue
1398 if opts['fullpath']:
1400 if opts['fullpath']:
1399 ui.write(os.path.join(repo.root, abs), end)
1401 ui.write(os.path.join(repo.root, abs), end)
1400 else:
1402 else:
1401 ui.write(((pats and rel) or abs), end)
1403 ui.write(((pats and rel) or abs), end)
1402
1404
1403 def log(ui, repo, *pats, **opts):
1405 def log(ui, repo, *pats, **opts):
1404 """show revision history of entire repository or files
1406 """show revision history of entire repository or files
1405
1407
1406 Print the revision history of the specified files or the entire
1408 Print the revision history of the specified files or the entire
1407 project.
1409 project.
1408
1410
1409 File history is shown without following rename or copy history of
1411 File history is shown without following rename or copy history of
1410 files. Use -f/--follow with a file name to follow history across
1412 files. Use -f/--follow with a file name to follow history across
1411 renames and copies. --follow without a file name will only show
1413 renames and copies. --follow without a file name will only show
1412 ancestors or descendants of the starting revision. --follow-first
1414 ancestors or descendants of the starting revision. --follow-first
1413 only follows the first parent of merge revisions.
1415 only follows the first parent of merge revisions.
1414
1416
1415 If no revision range is specified, the default is tip:0 unless
1417 If no revision range is specified, the default is tip:0 unless
1416 --follow is set, in which case the working directory parent is
1418 --follow is set, in which case the working directory parent is
1417 used as the starting revision.
1419 used as the starting revision.
1418
1420
1419 By default this command outputs: changeset id and hash, tags,
1421 By default this command outputs: changeset id and hash, tags,
1420 non-trivial parents, user, date and time, and a summary for each
1422 non-trivial parents, user, date and time, and a summary for each
1421 commit. When the -v/--verbose switch is used, the list of changed
1423 commit. When the -v/--verbose switch is used, the list of changed
1422 files and full commit message is shown.
1424 files and full commit message is shown.
1423 """
1425 """
1424
1426
1425 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1427 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1426 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1428 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1427
1429
1428 if opts['limit']:
1430 if opts['limit']:
1429 try:
1431 try:
1430 limit = int(opts['limit'])
1432 limit = int(opts['limit'])
1431 except ValueError:
1433 except ValueError:
1432 raise util.Abort(_('limit must be a positive integer'))
1434 raise util.Abort(_('limit must be a positive integer'))
1433 if limit <= 0: raise util.Abort(_('limit must be positive'))
1435 if limit <= 0: raise util.Abort(_('limit must be positive'))
1434 else:
1436 else:
1435 limit = sys.maxint
1437 limit = sys.maxint
1436 count = 0
1438 count = 0
1437
1439
1438 if opts['copies'] and opts['rev']:
1440 if opts['copies'] and opts['rev']:
1439 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1441 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1440 else:
1442 else:
1441 endrev = repo.changelog.count()
1443 endrev = repo.changelog.count()
1442 rcache = {}
1444 rcache = {}
1443 ncache = {}
1445 ncache = {}
1444 dcache = []
1446 dcache = []
1445 def getrenamed(fn, rev, man):
1447 def getrenamed(fn, rev, man):
1446 '''looks up all renames for a file (up to endrev) the first
1448 '''looks up all renames for a file (up to endrev) the first
1447 time the file is given. It indexes on the changerev and only
1449 time the file is given. It indexes on the changerev and only
1448 parses the manifest if linkrev != changerev.
1450 parses the manifest if linkrev != changerev.
1449 Returns rename info for fn at changerev rev.'''
1451 Returns rename info for fn at changerev rev.'''
1450 if fn not in rcache:
1452 if fn not in rcache:
1451 rcache[fn] = {}
1453 rcache[fn] = {}
1452 ncache[fn] = {}
1454 ncache[fn] = {}
1453 fl = repo.file(fn)
1455 fl = repo.file(fn)
1454 for i in xrange(fl.count()):
1456 for i in xrange(fl.count()):
1455 node = fl.node(i)
1457 node = fl.node(i)
1456 lr = fl.linkrev(node)
1458 lr = fl.linkrev(node)
1457 renamed = fl.renamed(node)
1459 renamed = fl.renamed(node)
1458 rcache[fn][lr] = renamed
1460 rcache[fn][lr] = renamed
1459 if renamed:
1461 if renamed:
1460 ncache[fn][node] = renamed
1462 ncache[fn][node] = renamed
1461 if lr >= endrev:
1463 if lr >= endrev:
1462 break
1464 break
1463 if rev in rcache[fn]:
1465 if rev in rcache[fn]:
1464 return rcache[fn][rev]
1466 return rcache[fn][rev]
1465 mr = repo.manifest.rev(man)
1467 mr = repo.manifest.rev(man)
1466 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1468 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1467 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1469 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1468 if not dcache or dcache[0] != man:
1470 if not dcache or dcache[0] != man:
1469 dcache[:] = [man, repo.manifest.readdelta(man)]
1471 dcache[:] = [man, repo.manifest.readdelta(man)]
1470 if fn in dcache[1]:
1472 if fn in dcache[1]:
1471 return ncache[fn].get(dcache[1][fn])
1473 return ncache[fn].get(dcache[1][fn])
1472 return None
1474 return None
1473
1475
1474 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
1476 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
1475 for st, rev, fns in changeiter:
1477 for st, rev, fns in changeiter:
1476 if st == 'add':
1478 if st == 'add':
1477 changenode = repo.changelog.node(rev)
1479 changenode = repo.changelog.node(rev)
1478 parents = [p for p in repo.changelog.parentrevs(rev)
1480 parents = [p for p in repo.changelog.parentrevs(rev)
1479 if p != nullrev]
1481 if p != nullrev]
1480 if opts['no_merges'] and len(parents) == 2:
1482 if opts['no_merges'] and len(parents) == 2:
1481 continue
1483 continue
1482 if opts['only_merges'] and len(parents) != 2:
1484 if opts['only_merges'] and len(parents) != 2:
1483 continue
1485 continue
1484
1486
1485 if opts['keyword']:
1487 if opts['keyword']:
1486 changes = get(rev)
1488 changes = get(rev)
1487 miss = 0
1489 miss = 0
1488 for k in [kw.lower() for kw in opts['keyword']]:
1490 for k in [kw.lower() for kw in opts['keyword']]:
1489 if not (k in changes[1].lower() or
1491 if not (k in changes[1].lower() or
1490 k in changes[4].lower() or
1492 k in changes[4].lower() or
1491 k in " ".join(changes[3][:20]).lower()):
1493 k in " ".join(changes[3][:20]).lower()):
1492 miss = 1
1494 miss = 1
1493 break
1495 break
1494 if miss:
1496 if miss:
1495 continue
1497 continue
1496
1498
1497 copies = []
1499 copies = []
1498 if opts.get('copies') and rev:
1500 if opts.get('copies') and rev:
1499 mf = get(rev)[0]
1501 mf = get(rev)[0]
1500 for fn in get(rev)[3]:
1502 for fn in get(rev)[3]:
1501 rename = getrenamed(fn, rev, mf)
1503 rename = getrenamed(fn, rev, mf)
1502 if rename:
1504 if rename:
1503 copies.append((fn, rename[0]))
1505 copies.append((fn, rename[0]))
1504 displayer.show(rev, changenode, copies=copies)
1506 displayer.show(rev, changenode, copies=copies)
1505 elif st == 'iter':
1507 elif st == 'iter':
1506 if count == limit: break
1508 if count == limit: break
1507 if displayer.flush(rev):
1509 if displayer.flush(rev):
1508 count += 1
1510 count += 1
1509
1511
1510 def manifest(ui, repo, rev=None):
1512 def manifest(ui, repo, rev=None):
1511 """output the latest or given revision of the project manifest
1513 """output the latest or given revision of the project manifest
1512
1514
1513 Print a list of version controlled files for the given revision.
1515 Print a list of version controlled files for the given revision.
1514
1516
1515 The manifest is the list of files being version controlled. If no revision
1517 The manifest is the list of files being version controlled. If no revision
1516 is given then the first parent of the working directory is used.
1518 is given then the first parent of the working directory is used.
1517
1519
1518 With -v flag, print file permissions. With --debug flag, print
1520 With -v flag, print file permissions. With --debug flag, print
1519 file revision hashes.
1521 file revision hashes.
1520 """
1522 """
1521
1523
1522 m = repo.changectx(rev).manifest()
1524 m = repo.changectx(rev).manifest()
1523 files = m.keys()
1525 files = m.keys()
1524 files.sort()
1526 files.sort()
1525
1527
1526 for f in files:
1528 for f in files:
1527 if ui.debugflag:
1529 if ui.debugflag:
1528 ui.write("%40s " % hex(m[f]))
1530 ui.write("%40s " % hex(m[f]))
1529 if ui.verbose:
1531 if ui.verbose:
1530 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1532 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1531 ui.write("%s\n" % f)
1533 ui.write("%s\n" % f)
1532
1534
1533 def merge(ui, repo, node=None, force=None, branch=None):
1535 def merge(ui, repo, node=None, force=None, branch=None):
1534 """Merge working directory with another revision
1536 """Merge working directory with another revision
1535
1537
1536 Merge the contents of the current working directory and the
1538 Merge the contents of the current working directory and the
1537 requested revision. Files that changed between either parent are
1539 requested revision. Files that changed between either parent are
1538 marked as changed for the next commit and a commit must be
1540 marked as changed for the next commit and a commit must be
1539 performed before any further updates are allowed.
1541 performed before any further updates are allowed.
1540
1542
1541 If no revision is specified, the working directory's parent is a
1543 If no revision is specified, the working directory's parent is a
1542 head revision, and the repository contains exactly one other head,
1544 head revision, and the repository contains exactly one other head,
1543 the other head is merged with by default. Otherwise, an explicit
1545 the other head is merged with by default. Otherwise, an explicit
1544 revision to merge with must be provided.
1546 revision to merge with must be provided.
1545 """
1547 """
1546
1548
1547 if node or branch:
1549 if node or branch:
1548 node = _lookup(repo, node, branch)
1550 node = _lookup(repo, node, branch)
1549 else:
1551 else:
1550 heads = repo.heads()
1552 heads = repo.heads()
1551 if len(heads) > 2:
1553 if len(heads) > 2:
1552 raise util.Abort(_('repo has %d heads - '
1554 raise util.Abort(_('repo has %d heads - '
1553 'please merge with an explicit rev') %
1555 'please merge with an explicit rev') %
1554 len(heads))
1556 len(heads))
1555 if len(heads) == 1:
1557 if len(heads) == 1:
1556 raise util.Abort(_('there is nothing to merge - '
1558 raise util.Abort(_('there is nothing to merge - '
1557 'use "hg update" instead'))
1559 'use "hg update" instead'))
1558 parent = repo.dirstate.parents()[0]
1560 parent = repo.dirstate.parents()[0]
1559 if parent not in heads:
1561 if parent not in heads:
1560 raise util.Abort(_('working dir not at a head rev - '
1562 raise util.Abort(_('working dir not at a head rev - '
1561 'use "hg update" or merge with an explicit rev'))
1563 'use "hg update" or merge with an explicit rev'))
1562 node = parent == heads[0] and heads[-1] or heads[0]
1564 node = parent == heads[0] and heads[-1] or heads[0]
1563 return hg.merge(repo, node, force=force)
1565 return hg.merge(repo, node, force=force)
1564
1566
1565 def outgoing(ui, repo, dest=None, **opts):
1567 def outgoing(ui, repo, dest=None, **opts):
1566 """show changesets not found in destination
1568 """show changesets not found in destination
1567
1569
1568 Show changesets not found in the specified destination repository or
1570 Show changesets not found in the specified destination repository or
1569 the default push location. These are the changesets that would be pushed
1571 the default push location. These are the changesets that would be pushed
1570 if a push was requested.
1572 if a push was requested.
1571
1573
1572 See pull for valid destination format details.
1574 See pull for valid destination format details.
1573 """
1575 """
1574 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1576 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1575 setremoteconfig(ui, opts)
1577 setremoteconfig(ui, opts)
1576 revs = None
1578 revs = None
1577 if opts['rev']:
1579 if opts['rev']:
1578 revs = [repo.lookup(rev) for rev in opts['rev']]
1580 revs = [repo.lookup(rev) for rev in opts['rev']]
1579
1581
1580 other = hg.repository(ui, dest)
1582 other = hg.repository(ui, dest)
1581 o = repo.findoutgoing(other, force=opts['force'])
1583 o = repo.findoutgoing(other, force=opts['force'])
1582 if not o:
1584 if not o:
1583 ui.status(_("no changes found\n"))
1585 ui.status(_("no changes found\n"))
1584 return
1586 return
1585 o = repo.changelog.nodesbetween(o, revs)[0]
1587 o = repo.changelog.nodesbetween(o, revs)[0]
1586 if opts['newest_first']:
1588 if opts['newest_first']:
1587 o.reverse()
1589 o.reverse()
1588 displayer = cmdutil.show_changeset(ui, repo, opts)
1590 displayer = cmdutil.show_changeset(ui, repo, opts)
1589 for n in o:
1591 for n in o:
1590 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1592 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1591 if opts['no_merges'] and len(parents) == 2:
1593 if opts['no_merges'] and len(parents) == 2:
1592 continue
1594 continue
1593 displayer.show(changenode=n)
1595 displayer.show(changenode=n)
1594
1596
1595 def parents(ui, repo, file_=None, **opts):
1597 def parents(ui, repo, file_=None, **opts):
1596 """show the parents of the working dir or revision
1598 """show the parents of the working dir or revision
1597
1599
1598 Print the working directory's parent revisions.
1600 Print the working directory's parent revisions.
1599 """
1601 """
1600 rev = opts.get('rev')
1602 rev = opts.get('rev')
1601 if rev:
1603 if rev:
1602 if file_:
1604 if file_:
1603 ctx = repo.filectx(file_, changeid=rev)
1605 ctx = repo.filectx(file_, changeid=rev)
1604 else:
1606 else:
1605 ctx = repo.changectx(rev)
1607 ctx = repo.changectx(rev)
1606 p = [cp.node() for cp in ctx.parents()]
1608 p = [cp.node() for cp in ctx.parents()]
1607 else:
1609 else:
1608 p = repo.dirstate.parents()
1610 p = repo.dirstate.parents()
1609
1611
1610 displayer = cmdutil.show_changeset(ui, repo, opts)
1612 displayer = cmdutil.show_changeset(ui, repo, opts)
1611 for n in p:
1613 for n in p:
1612 if n != nullid:
1614 if n != nullid:
1613 displayer.show(changenode=n)
1615 displayer.show(changenode=n)
1614
1616
1615 def paths(ui, repo, search=None):
1617 def paths(ui, repo, search=None):
1616 """show definition of symbolic path names
1618 """show definition of symbolic path names
1617
1619
1618 Show definition of symbolic path name NAME. If no name is given, show
1620 Show definition of symbolic path name NAME. If no name is given, show
1619 definition of available names.
1621 definition of available names.
1620
1622
1621 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1623 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1622 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1624 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1623 """
1625 """
1624 if search:
1626 if search:
1625 for name, path in ui.configitems("paths"):
1627 for name, path in ui.configitems("paths"):
1626 if name == search:
1628 if name == search:
1627 ui.write("%s\n" % path)
1629 ui.write("%s\n" % path)
1628 return
1630 return
1629 ui.warn(_("not found!\n"))
1631 ui.warn(_("not found!\n"))
1630 return 1
1632 return 1
1631 else:
1633 else:
1632 for name, path in ui.configitems("paths"):
1634 for name, path in ui.configitems("paths"):
1633 ui.write("%s = %s\n" % (name, path))
1635 ui.write("%s = %s\n" % (name, path))
1634
1636
1635 def postincoming(ui, repo, modheads, optupdate):
1637 def postincoming(ui, repo, modheads, optupdate):
1636 if modheads == 0:
1638 if modheads == 0:
1637 return
1639 return
1638 if optupdate:
1640 if optupdate:
1639 if modheads == 1:
1641 if modheads == 1:
1640 return hg.update(repo, repo.changelog.tip()) # update
1642 return hg.update(repo, repo.changelog.tip()) # update
1641 else:
1643 else:
1642 ui.status(_("not updating, since new heads added\n"))
1644 ui.status(_("not updating, since new heads added\n"))
1643 if modheads > 1:
1645 if modheads > 1:
1644 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1646 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1645 else:
1647 else:
1646 ui.status(_("(run 'hg update' to get a working copy)\n"))
1648 ui.status(_("(run 'hg update' to get a working copy)\n"))
1647
1649
1648 def pull(ui, repo, source="default", **opts):
1650 def pull(ui, repo, source="default", **opts):
1649 """pull changes from the specified source
1651 """pull changes from the specified source
1650
1652
1651 Pull changes from a remote repository to a local one.
1653 Pull changes from a remote repository to a local one.
1652
1654
1653 This finds all changes from the repository at the specified path
1655 This finds all changes from the repository at the specified path
1654 or URL and adds them to the local repository. By default, this
1656 or URL and adds them to the local repository. By default, this
1655 does not update the copy of the project in the working directory.
1657 does not update the copy of the project in the working directory.
1656
1658
1657 Valid URLs are of the form:
1659 Valid URLs are of the form:
1658
1660
1659 local/filesystem/path (or file://local/filesystem/path)
1661 local/filesystem/path (or file://local/filesystem/path)
1660 http://[user@]host[:port]/[path]
1662 http://[user@]host[:port]/[path]
1661 https://[user@]host[:port]/[path]
1663 https://[user@]host[:port]/[path]
1662 ssh://[user@]host[:port]/[path]
1664 ssh://[user@]host[:port]/[path]
1663 static-http://host[:port]/[path]
1665 static-http://host[:port]/[path]
1664
1666
1665 Paths in the local filesystem can either point to Mercurial
1667 Paths in the local filesystem can either point to Mercurial
1666 repositories or to bundle files (as created by 'hg bundle' or
1668 repositories or to bundle files (as created by 'hg bundle' or
1667 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1669 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1668 allows access to a Mercurial repository where you simply use a web
1670 allows access to a Mercurial repository where you simply use a web
1669 server to publish the .hg directory as static content.
1671 server to publish the .hg directory as static content.
1670
1672
1671 Some notes about using SSH with Mercurial:
1673 Some notes about using SSH with Mercurial:
1672 - SSH requires an accessible shell account on the destination machine
1674 - SSH requires an accessible shell account on the destination machine
1673 and a copy of hg in the remote path or specified with as remotecmd.
1675 and a copy of hg in the remote path or specified with as remotecmd.
1674 - path is relative to the remote user's home directory by default.
1676 - path is relative to the remote user's home directory by default.
1675 Use an extra slash at the start of a path to specify an absolute path:
1677 Use an extra slash at the start of a path to specify an absolute path:
1676 ssh://example.com//tmp/repository
1678 ssh://example.com//tmp/repository
1677 - Mercurial doesn't use its own compression via SSH; the right thing
1679 - Mercurial doesn't use its own compression via SSH; the right thing
1678 to do is to configure it in your ~/.ssh/config, e.g.:
1680 to do is to configure it in your ~/.ssh/config, e.g.:
1679 Host *.mylocalnetwork.example.com
1681 Host *.mylocalnetwork.example.com
1680 Compression no
1682 Compression no
1681 Host *
1683 Host *
1682 Compression yes
1684 Compression yes
1683 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1685 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1684 with the --ssh command line option.
1686 with the --ssh command line option.
1685 """
1687 """
1686 source = ui.expandpath(source)
1688 source = ui.expandpath(source)
1687 setremoteconfig(ui, opts)
1689 setremoteconfig(ui, opts)
1688
1690
1689 other = hg.repository(ui, source)
1691 other = hg.repository(ui, source)
1690 ui.status(_('pulling from %s\n') % (source))
1692 ui.status(_('pulling from %s\n') % (source))
1691 revs = None
1693 revs = None
1692 if opts['rev']:
1694 if opts['rev']:
1693 if 'lookup' in other.capabilities:
1695 if 'lookup' in other.capabilities:
1694 revs = [other.lookup(rev) for rev in opts['rev']]
1696 revs = [other.lookup(rev) for rev in opts['rev']]
1695 else:
1697 else:
1696 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1698 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1697 raise util.Abort(error)
1699 raise util.Abort(error)
1698 modheads = repo.pull(other, heads=revs, force=opts['force'])
1700 modheads = repo.pull(other, heads=revs, force=opts['force'])
1699 return postincoming(ui, repo, modheads, opts['update'])
1701 return postincoming(ui, repo, modheads, opts['update'])
1700
1702
1701 def push(ui, repo, dest=None, **opts):
1703 def push(ui, repo, dest=None, **opts):
1702 """push changes to the specified destination
1704 """push changes to the specified destination
1703
1705
1704 Push changes from the local repository to the given destination.
1706 Push changes from the local repository to the given destination.
1705
1707
1706 This is the symmetrical operation for pull. It helps to move
1708 This is the symmetrical operation for pull. It helps to move
1707 changes from the current repository to a different one. If the
1709 changes from the current repository to a different one. If the
1708 destination is local this is identical to a pull in that directory
1710 destination is local this is identical to a pull in that directory
1709 from the current one.
1711 from the current one.
1710
1712
1711 By default, push will refuse to run if it detects the result would
1713 By default, push will refuse to run if it detects the result would
1712 increase the number of remote heads. This generally indicates the
1714 increase the number of remote heads. This generally indicates the
1713 the client has forgotten to sync and merge before pushing.
1715 the client has forgotten to sync and merge before pushing.
1714
1716
1715 Valid URLs are of the form:
1717 Valid URLs are of the form:
1716
1718
1717 local/filesystem/path (or file://local/filesystem/path)
1719 local/filesystem/path (or file://local/filesystem/path)
1718 ssh://[user@]host[:port]/[path]
1720 ssh://[user@]host[:port]/[path]
1719 http://[user@]host[:port]/[path]
1721 http://[user@]host[:port]/[path]
1720 https://[user@]host[:port]/[path]
1722 https://[user@]host[:port]/[path]
1721
1723
1722 Look at the help text for the pull command for important details
1724 Look at the help text for the pull command for important details
1723 about ssh:// URLs.
1725 about ssh:// URLs.
1724
1726
1725 Pushing to http:// and https:// URLs is only possible, if this
1727 Pushing to http:// and https:// URLs is only possible, if this
1726 feature is explicitly enabled on the remote Mercurial server.
1728 feature is explicitly enabled on the remote Mercurial server.
1727 """
1729 """
1728 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1730 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1729 setremoteconfig(ui, opts)
1731 setremoteconfig(ui, opts)
1730
1732
1731 other = hg.repository(ui, dest)
1733 other = hg.repository(ui, dest)
1732 ui.status('pushing to %s\n' % (dest))
1734 ui.status('pushing to %s\n' % (dest))
1733 revs = None
1735 revs = None
1734 if opts['rev']:
1736 if opts['rev']:
1735 revs = [repo.lookup(rev) for rev in opts['rev']]
1737 revs = [repo.lookup(rev) for rev in opts['rev']]
1736 r = repo.push(other, opts['force'], revs=revs)
1738 r = repo.push(other, opts['force'], revs=revs)
1737 return r == 0
1739 return r == 0
1738
1740
1739 def rawcommit(ui, repo, *pats, **opts):
1741 def rawcommit(ui, repo, *pats, **opts):
1740 """raw commit interface (DEPRECATED)
1742 """raw commit interface (DEPRECATED)
1741
1743
1742 (DEPRECATED)
1744 (DEPRECATED)
1743 Lowlevel commit, for use in helper scripts.
1745 Lowlevel commit, for use in helper scripts.
1744
1746
1745 This command is not intended to be used by normal users, as it is
1747 This command is not intended to be used by normal users, as it is
1746 primarily useful for importing from other SCMs.
1748 primarily useful for importing from other SCMs.
1747
1749
1748 This command is now deprecated and will be removed in a future
1750 This command is now deprecated and will be removed in a future
1749 release, please use debugsetparents and commit instead.
1751 release, please use debugsetparents and commit instead.
1750 """
1752 """
1751
1753
1752 ui.warn(_("(the rawcommit command is deprecated)\n"))
1754 ui.warn(_("(the rawcommit command is deprecated)\n"))
1753
1755
1754 message = logmessage(opts)
1756 message = logmessage(opts)
1755
1757
1756 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1758 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
1757 if opts['files']:
1759 if opts['files']:
1758 files += open(opts['files']).read().splitlines()
1760 files += open(opts['files']).read().splitlines()
1759
1761
1760 parents = [repo.lookup(p) for p in opts['parent']]
1762 parents = [repo.lookup(p) for p in opts['parent']]
1761
1763
1762 try:
1764 try:
1763 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1765 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
1764 except ValueError, inst:
1766 except ValueError, inst:
1765 raise util.Abort(str(inst))
1767 raise util.Abort(str(inst))
1766
1768
1767 def recover(ui, repo):
1769 def recover(ui, repo):
1768 """roll back an interrupted transaction
1770 """roll back an interrupted transaction
1769
1771
1770 Recover from an interrupted commit or pull.
1772 Recover from an interrupted commit or pull.
1771
1773
1772 This command tries to fix the repository status after an interrupted
1774 This command tries to fix the repository status after an interrupted
1773 operation. It should only be necessary when Mercurial suggests it.
1775 operation. It should only be necessary when Mercurial suggests it.
1774 """
1776 """
1775 if repo.recover():
1777 if repo.recover():
1776 return hg.verify(repo)
1778 return hg.verify(repo)
1777 return 1
1779 return 1
1778
1780
1779 def remove(ui, repo, *pats, **opts):
1781 def remove(ui, repo, *pats, **opts):
1780 """remove the specified files on the next commit
1782 """remove the specified files on the next commit
1781
1783
1782 Schedule the indicated files for removal from the repository.
1784 Schedule the indicated files for removal from the repository.
1783
1785
1784 This command schedules the files to be removed at the next commit.
1786 This command schedules the files to be removed at the next commit.
1785 This only removes files from the current branch, not from the
1787 This only removes files from the current branch, not from the
1786 entire project history. If the files still exist in the working
1788 entire project history. If the files still exist in the working
1787 directory, they will be deleted from it. If invoked with --after,
1789 directory, they will be deleted from it. If invoked with --after,
1788 files that have been manually deleted are marked as removed.
1790 files that have been manually deleted are marked as removed.
1789
1791
1790 Modified files and added files are not removed by default. To
1792 Modified files and added files are not removed by default. To
1791 remove them, use the -f/--force option.
1793 remove them, use the -f/--force option.
1792 """
1794 """
1793 names = []
1795 names = []
1794 if not opts['after'] and not pats:
1796 if not opts['after'] and not pats:
1795 raise util.Abort(_('no files specified'))
1797 raise util.Abort(_('no files specified'))
1796 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1798 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1797 exact = dict.fromkeys(files)
1799 exact = dict.fromkeys(files)
1798 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
1800 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
1799 modified, added, removed, deleted, unknown = mardu
1801 modified, added, removed, deleted, unknown = mardu
1800 remove, forget = [], []
1802 remove, forget = [], []
1801 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1803 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1802 reason = None
1804 reason = None
1803 if abs not in deleted and opts['after']:
1805 if abs not in deleted and opts['after']:
1804 reason = _('is still present')
1806 reason = _('is still present')
1805 elif abs in modified and not opts['force']:
1807 elif abs in modified and not opts['force']:
1806 reason = _('is modified (use -f to force removal)')
1808 reason = _('is modified (use -f to force removal)')
1807 elif abs in added:
1809 elif abs in added:
1808 if opts['force']:
1810 if opts['force']:
1809 forget.append(abs)
1811 forget.append(abs)
1810 continue
1812 continue
1811 reason = _('has been marked for add (use -f to force removal)')
1813 reason = _('has been marked for add (use -f to force removal)')
1812 elif abs in unknown:
1814 elif abs in unknown:
1813 reason = _('is not managed')
1815 reason = _('is not managed')
1814 elif abs in removed:
1816 elif abs in removed:
1815 continue
1817 continue
1816 if reason:
1818 if reason:
1817 if exact:
1819 if exact:
1818 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1820 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
1819 else:
1821 else:
1820 if ui.verbose or not exact:
1822 if ui.verbose or not exact:
1821 ui.status(_('removing %s\n') % rel)
1823 ui.status(_('removing %s\n') % rel)
1822 remove.append(abs)
1824 remove.append(abs)
1823 repo.forget(forget)
1825 repo.forget(forget)
1824 repo.remove(remove, unlink=not opts['after'])
1826 repo.remove(remove, unlink=not opts['after'])
1825
1827
1826 def rename(ui, repo, *pats, **opts):
1828 def rename(ui, repo, *pats, **opts):
1827 """rename files; equivalent of copy + remove
1829 """rename files; equivalent of copy + remove
1828
1830
1829 Mark dest as copies of sources; mark sources for deletion. If
1831 Mark dest as copies of sources; mark sources for deletion. If
1830 dest is a directory, copies are put in that directory. If dest is
1832 dest is a directory, copies are put in that directory. If dest is
1831 a file, there can only be one source.
1833 a file, there can only be one source.
1832
1834
1833 By default, this command copies the contents of files as they
1835 By default, this command copies the contents of files as they
1834 stand in the working directory. If invoked with --after, the
1836 stand in the working directory. If invoked with --after, the
1835 operation is recorded, but no copying is performed.
1837 operation is recorded, but no copying is performed.
1836
1838
1837 This command takes effect in the next commit.
1839 This command takes effect in the next commit.
1838 """
1840 """
1839 wlock = repo.wlock(0)
1841 wlock = repo.wlock(0)
1840 errs, copied = docopy(ui, repo, pats, opts, wlock)
1842 errs, copied = docopy(ui, repo, pats, opts, wlock)
1841 names = []
1843 names = []
1842 for abs, rel, exact in copied:
1844 for abs, rel, exact in copied:
1843 if ui.verbose or not exact:
1845 if ui.verbose or not exact:
1844 ui.status(_('removing %s\n') % rel)
1846 ui.status(_('removing %s\n') % rel)
1845 names.append(abs)
1847 names.append(abs)
1846 if not opts.get('dry_run'):
1848 if not opts.get('dry_run'):
1847 repo.remove(names, True, wlock)
1849 repo.remove(names, True, wlock)
1848 return errs
1850 return errs
1849
1851
1850 def revert(ui, repo, *pats, **opts):
1852 def revert(ui, repo, *pats, **opts):
1851 """revert files or dirs to their states as of some revision
1853 """revert files or dirs to their states as of some revision
1852
1854
1853 With no revision specified, revert the named files or directories
1855 With no revision specified, revert the named files or directories
1854 to the contents they had in the parent of the working directory.
1856 to the contents they had in the parent of the working directory.
1855 This restores the contents of the affected files to an unmodified
1857 This restores the contents of the affected files to an unmodified
1856 state. If the working directory has two parents, you must
1858 state. If the working directory has two parents, you must
1857 explicitly specify the revision to revert to.
1859 explicitly specify the revision to revert to.
1858
1860
1859 Modified files are saved with a .orig suffix before reverting.
1861 Modified files are saved with a .orig suffix before reverting.
1860 To disable these backups, use --no-backup.
1862 To disable these backups, use --no-backup.
1861
1863
1862 Using the -r option, revert the given files or directories to their
1864 Using the -r option, revert the given files or directories to their
1863 contents as of a specific revision. This can be helpful to "roll
1865 contents as of a specific revision. This can be helpful to "roll
1864 back" some or all of a change that should not have been committed.
1866 back" some or all of a change that should not have been committed.
1865
1867
1866 Revert modifies the working directory. It does not commit any
1868 Revert modifies the working directory. It does not commit any
1867 changes, or change the parent of the working directory. If you
1869 changes, or change the parent of the working directory. If you
1868 revert to a revision other than the parent of the working
1870 revert to a revision other than the parent of the working
1869 directory, the reverted files will thus appear modified
1871 directory, the reverted files will thus appear modified
1870 afterwards.
1872 afterwards.
1871
1873
1872 If a file has been deleted, it is recreated. If the executable
1874 If a file has been deleted, it is recreated. If the executable
1873 mode of a file was changed, it is reset.
1875 mode of a file was changed, it is reset.
1874
1876
1875 If names are given, all files matching the names are reverted.
1877 If names are given, all files matching the names are reverted.
1876
1878
1877 If no arguments are given, no files are reverted.
1879 If no arguments are given, no files are reverted.
1878 """
1880 """
1879
1881
1880 if not pats and not opts['all']:
1882 if not pats and not opts['all']:
1881 raise util.Abort(_('no files or directories specified; '
1883 raise util.Abort(_('no files or directories specified; '
1882 'use --all to revert the whole repo'))
1884 'use --all to revert the whole repo'))
1883
1885
1884 parent, p2 = repo.dirstate.parents()
1886 parent, p2 = repo.dirstate.parents()
1885 if not opts['rev'] and p2 != nullid:
1887 if not opts['rev'] and p2 != nullid:
1886 raise util.Abort(_('uncommitted merge - please provide a '
1888 raise util.Abort(_('uncommitted merge - please provide a '
1887 'specific revision'))
1889 'specific revision'))
1888 node = repo.changectx(opts['rev']).node()
1890 node = repo.changectx(opts['rev']).node()
1889 mf = repo.manifest.read(repo.changelog.read(node)[0])
1891 mf = repo.manifest.read(repo.changelog.read(node)[0])
1890 if node == parent:
1892 if node == parent:
1891 pmf = mf
1893 pmf = mf
1892 else:
1894 else:
1893 pmf = None
1895 pmf = None
1894
1896
1895 wlock = repo.wlock()
1897 wlock = repo.wlock()
1896
1898
1897 # need all matching names in dirstate and manifest of target rev,
1899 # need all matching names in dirstate and manifest of target rev,
1898 # so have to walk both. do not print errors if files exist in one
1900 # so have to walk both. do not print errors if files exist in one
1899 # but not other.
1901 # but not other.
1900
1902
1901 names = {}
1903 names = {}
1902 target_only = {}
1904 target_only = {}
1903
1905
1904 # walk dirstate.
1906 # walk dirstate.
1905
1907
1906 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
1908 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
1907 badmatch=mf.has_key):
1909 badmatch=mf.has_key):
1908 names[abs] = (rel, exact)
1910 names[abs] = (rel, exact)
1909 if src == 'b':
1911 if src == 'b':
1910 target_only[abs] = True
1912 target_only[abs] = True
1911
1913
1912 # walk target manifest.
1914 # walk target manifest.
1913
1915
1914 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1916 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1915 badmatch=names.has_key):
1917 badmatch=names.has_key):
1916 if abs in names: continue
1918 if abs in names: continue
1917 names[abs] = (rel, exact)
1919 names[abs] = (rel, exact)
1918 target_only[abs] = True
1920 target_only[abs] = True
1919
1921
1920 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
1922 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
1921 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
1923 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
1922
1924
1923 revert = ([], _('reverting %s\n'))
1925 revert = ([], _('reverting %s\n'))
1924 add = ([], _('adding %s\n'))
1926 add = ([], _('adding %s\n'))
1925 remove = ([], _('removing %s\n'))
1927 remove = ([], _('removing %s\n'))
1926 forget = ([], _('forgetting %s\n'))
1928 forget = ([], _('forgetting %s\n'))
1927 undelete = ([], _('undeleting %s\n'))
1929 undelete = ([], _('undeleting %s\n'))
1928 update = {}
1930 update = {}
1929
1931
1930 disptable = (
1932 disptable = (
1931 # dispatch table:
1933 # dispatch table:
1932 # file state
1934 # file state
1933 # action if in target manifest
1935 # action if in target manifest
1934 # action if not in target manifest
1936 # action if not in target manifest
1935 # make backup if in target manifest
1937 # make backup if in target manifest
1936 # make backup if not in target manifest
1938 # make backup if not in target manifest
1937 (modified, revert, remove, True, True),
1939 (modified, revert, remove, True, True),
1938 (added, revert, forget, True, False),
1940 (added, revert, forget, True, False),
1939 (removed, undelete, None, False, False),
1941 (removed, undelete, None, False, False),
1940 (deleted, revert, remove, False, False),
1942 (deleted, revert, remove, False, False),
1941 (unknown, add, None, True, False),
1943 (unknown, add, None, True, False),
1942 (target_only, add, None, False, False),
1944 (target_only, add, None, False, False),
1943 )
1945 )
1944
1946
1945 entries = names.items()
1947 entries = names.items()
1946 entries.sort()
1948 entries.sort()
1947
1949
1948 for abs, (rel, exact) in entries:
1950 for abs, (rel, exact) in entries:
1949 mfentry = mf.get(abs)
1951 mfentry = mf.get(abs)
1950 def handle(xlist, dobackup):
1952 def handle(xlist, dobackup):
1951 xlist[0].append(abs)
1953 xlist[0].append(abs)
1952 update[abs] = 1
1954 update[abs] = 1
1953 if dobackup and not opts['no_backup'] and os.path.exists(rel):
1955 if dobackup and not opts['no_backup'] and os.path.exists(rel):
1954 bakname = "%s.orig" % rel
1956 bakname = "%s.orig" % rel
1955 ui.note(_('saving current version of %s as %s\n') %
1957 ui.note(_('saving current version of %s as %s\n') %
1956 (rel, bakname))
1958 (rel, bakname))
1957 if not opts.get('dry_run'):
1959 if not opts.get('dry_run'):
1958 util.copyfile(rel, bakname)
1960 util.copyfile(rel, bakname)
1959 if ui.verbose or not exact:
1961 if ui.verbose or not exact:
1960 ui.status(xlist[1] % rel)
1962 ui.status(xlist[1] % rel)
1961 for table, hitlist, misslist, backuphit, backupmiss in disptable:
1963 for table, hitlist, misslist, backuphit, backupmiss in disptable:
1962 if abs not in table: continue
1964 if abs not in table: continue
1963 # file has changed in dirstate
1965 # file has changed in dirstate
1964 if mfentry:
1966 if mfentry:
1965 handle(hitlist, backuphit)
1967 handle(hitlist, backuphit)
1966 elif misslist is not None:
1968 elif misslist is not None:
1967 handle(misslist, backupmiss)
1969 handle(misslist, backupmiss)
1968 else:
1970 else:
1969 if exact: ui.warn(_('file not managed: %s\n') % rel)
1971 if exact: ui.warn(_('file not managed: %s\n') % rel)
1970 break
1972 break
1971 else:
1973 else:
1972 # file has not changed in dirstate
1974 # file has not changed in dirstate
1973 if node == parent:
1975 if node == parent:
1974 if exact: ui.warn(_('no changes needed to %s\n') % rel)
1976 if exact: ui.warn(_('no changes needed to %s\n') % rel)
1975 continue
1977 continue
1976 if pmf is None:
1978 if pmf is None:
1977 # only need parent manifest in this unlikely case,
1979 # only need parent manifest in this unlikely case,
1978 # so do not read by default
1980 # so do not read by default
1979 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
1981 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
1980 if abs in pmf:
1982 if abs in pmf:
1981 if mfentry:
1983 if mfentry:
1982 # if version of file is same in parent and target
1984 # if version of file is same in parent and target
1983 # manifests, do nothing
1985 # manifests, do nothing
1984 if pmf[abs] != mfentry:
1986 if pmf[abs] != mfentry:
1985 handle(revert, False)
1987 handle(revert, False)
1986 else:
1988 else:
1987 handle(remove, False)
1989 handle(remove, False)
1988
1990
1989 if not opts.get('dry_run'):
1991 if not opts.get('dry_run'):
1990 repo.dirstate.forget(forget[0])
1992 repo.dirstate.forget(forget[0])
1991 r = hg.revert(repo, node, update.has_key, wlock)
1993 r = hg.revert(repo, node, update.has_key, wlock)
1992 repo.dirstate.update(add[0], 'a')
1994 repo.dirstate.update(add[0], 'a')
1993 repo.dirstate.update(undelete[0], 'n')
1995 repo.dirstate.update(undelete[0], 'n')
1994 repo.dirstate.update(remove[0], 'r')
1996 repo.dirstate.update(remove[0], 'r')
1995 return r
1997 return r
1996
1998
1997 def rollback(ui, repo):
1999 def rollback(ui, repo):
1998 """roll back the last transaction in this repository
2000 """roll back the last transaction in this repository
1999
2001
2000 Roll back the last transaction in this repository, restoring the
2002 Roll back the last transaction in this repository, restoring the
2001 project to its state prior to the transaction.
2003 project to its state prior to the transaction.
2002
2004
2003 Transactions are used to encapsulate the effects of all commands
2005 Transactions are used to encapsulate the effects of all commands
2004 that create new changesets or propagate existing changesets into a
2006 that create new changesets or propagate existing changesets into a
2005 repository. For example, the following commands are transactional,
2007 repository. For example, the following commands are transactional,
2006 and their effects can be rolled back:
2008 and their effects can be rolled back:
2007
2009
2008 commit
2010 commit
2009 import
2011 import
2010 pull
2012 pull
2011 push (with this repository as destination)
2013 push (with this repository as destination)
2012 unbundle
2014 unbundle
2013
2015
2014 This command should be used with care. There is only one level of
2016 This command should be used with care. There is only one level of
2015 rollback, and there is no way to undo a rollback.
2017 rollback, and there is no way to undo a rollback.
2016
2018
2017 This command is not intended for use on public repositories. Once
2019 This command is not intended for use on public repositories. Once
2018 changes are visible for pull by other users, rolling a transaction
2020 changes are visible for pull by other users, rolling a transaction
2019 back locally is ineffective (someone else may already have pulled
2021 back locally is ineffective (someone else may already have pulled
2020 the changes). Furthermore, a race is possible with readers of the
2022 the changes). Furthermore, a race is possible with readers of the
2021 repository; for example an in-progress pull from the repository
2023 repository; for example an in-progress pull from the repository
2022 may fail if a rollback is performed.
2024 may fail if a rollback is performed.
2023 """
2025 """
2024 repo.rollback()
2026 repo.rollback()
2025
2027
2026 def root(ui, repo):
2028 def root(ui, repo):
2027 """print the root (top) of the current working dir
2029 """print the root (top) of the current working dir
2028
2030
2029 Print the root directory of the current repository.
2031 Print the root directory of the current repository.
2030 """
2032 """
2031 ui.write(repo.root + "\n")
2033 ui.write(repo.root + "\n")
2032
2034
2033 def serve(ui, repo, **opts):
2035 def serve(ui, repo, **opts):
2034 """export the repository via HTTP
2036 """export the repository via HTTP
2035
2037
2036 Start a local HTTP repository browser and pull server.
2038 Start a local HTTP repository browser and pull server.
2037
2039
2038 By default, the server logs accesses to stdout and errors to
2040 By default, the server logs accesses to stdout and errors to
2039 stderr. Use the "-A" and "-E" options to log to files.
2041 stderr. Use the "-A" and "-E" options to log to files.
2040 """
2042 """
2041
2043
2042 if opts["stdio"]:
2044 if opts["stdio"]:
2043 if repo is None:
2045 if repo is None:
2044 raise hg.RepoError(_("There is no Mercurial repository here"
2046 raise hg.RepoError(_("There is no Mercurial repository here"
2045 " (.hg not found)"))
2047 " (.hg not found)"))
2046 s = sshserver.sshserver(ui, repo)
2048 s = sshserver.sshserver(ui, repo)
2047 s.serve_forever()
2049 s.serve_forever()
2048
2050
2049 optlist = ("name templates style address port ipv6"
2051 optlist = ("name templates style address port ipv6"
2050 " accesslog errorlog webdir_conf")
2052 " accesslog errorlog webdir_conf")
2051 for o in optlist.split():
2053 for o in optlist.split():
2052 if opts[o]:
2054 if opts[o]:
2053 ui.setconfig("web", o, str(opts[o]))
2055 ui.setconfig("web", o, str(opts[o]))
2054
2056
2055 if repo is None and not ui.config("web", "webdir_conf"):
2057 if repo is None and not ui.config("web", "webdir_conf"):
2056 raise hg.RepoError(_("There is no Mercurial repository here"
2058 raise hg.RepoError(_("There is no Mercurial repository here"
2057 " (.hg not found)"))
2059 " (.hg not found)"))
2058
2060
2059 if opts['daemon'] and not opts['daemon_pipefds']:
2061 if opts['daemon'] and not opts['daemon_pipefds']:
2060 rfd, wfd = os.pipe()
2062 rfd, wfd = os.pipe()
2061 args = sys.argv[:]
2063 args = sys.argv[:]
2062 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2064 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2063 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2065 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2064 args[0], args)
2066 args[0], args)
2065 os.close(wfd)
2067 os.close(wfd)
2066 os.read(rfd, 1)
2068 os.read(rfd, 1)
2067 os._exit(0)
2069 os._exit(0)
2068
2070
2069 httpd = hgweb.server.create_server(ui, repo)
2071 httpd = hgweb.server.create_server(ui, repo)
2070
2072
2071 if ui.verbose:
2073 if ui.verbose:
2072 if httpd.port != 80:
2074 if httpd.port != 80:
2073 ui.status(_('listening at http://%s:%d/\n') %
2075 ui.status(_('listening at http://%s:%d/\n') %
2074 (httpd.addr, httpd.port))
2076 (httpd.addr, httpd.port))
2075 else:
2077 else:
2076 ui.status(_('listening at http://%s/\n') % httpd.addr)
2078 ui.status(_('listening at http://%s/\n') % httpd.addr)
2077
2079
2078 if opts['pid_file']:
2080 if opts['pid_file']:
2079 fp = open(opts['pid_file'], 'w')
2081 fp = open(opts['pid_file'], 'w')
2080 fp.write(str(os.getpid()) + '\n')
2082 fp.write(str(os.getpid()) + '\n')
2081 fp.close()
2083 fp.close()
2082
2084
2083 if opts['daemon_pipefds']:
2085 if opts['daemon_pipefds']:
2084 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2086 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2085 os.close(rfd)
2087 os.close(rfd)
2086 os.write(wfd, 'y')
2088 os.write(wfd, 'y')
2087 os.close(wfd)
2089 os.close(wfd)
2088 sys.stdout.flush()
2090 sys.stdout.flush()
2089 sys.stderr.flush()
2091 sys.stderr.flush()
2090 fd = os.open(util.nulldev, os.O_RDWR)
2092 fd = os.open(util.nulldev, os.O_RDWR)
2091 if fd != 0: os.dup2(fd, 0)
2093 if fd != 0: os.dup2(fd, 0)
2092 if fd != 1: os.dup2(fd, 1)
2094 if fd != 1: os.dup2(fd, 1)
2093 if fd != 2: os.dup2(fd, 2)
2095 if fd != 2: os.dup2(fd, 2)
2094 if fd not in (0, 1, 2): os.close(fd)
2096 if fd not in (0, 1, 2): os.close(fd)
2095
2097
2096 httpd.serve_forever()
2098 httpd.serve_forever()
2097
2099
2098 def status(ui, repo, *pats, **opts):
2100 def status(ui, repo, *pats, **opts):
2099 """show changed files in the working directory
2101 """show changed files in the working directory
2100
2102
2101 Show status of files in the repository. If names are given, only
2103 Show status of files in the repository. If names are given, only
2102 files that match are shown. Files that are clean or ignored, are
2104 files that match are shown. Files that are clean or ignored, are
2103 not listed unless -c (clean), -i (ignored) or -A is given.
2105 not listed unless -c (clean), -i (ignored) or -A is given.
2104
2106
2105 If one revision is given, it is used as the base revision.
2107 If one revision is given, it is used as the base revision.
2106 If two revisions are given, the difference between them is shown.
2108 If two revisions are given, the difference between them is shown.
2107
2109
2108 The codes used to show the status of files are:
2110 The codes used to show the status of files are:
2109 M = modified
2111 M = modified
2110 A = added
2112 A = added
2111 R = removed
2113 R = removed
2112 C = clean
2114 C = clean
2113 ! = deleted, but still tracked
2115 ! = deleted, but still tracked
2114 ? = not tracked
2116 ? = not tracked
2115 I = ignored (not shown by default)
2117 I = ignored (not shown by default)
2116 = the previous added file was copied from here
2118 = the previous added file was copied from here
2117 """
2119 """
2118
2120
2119 all = opts['all']
2121 all = opts['all']
2120 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2122 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2121
2123
2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2124 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2123 cwd = (pats and repo.getcwd()) or ''
2125 cwd = (pats and repo.getcwd()) or ''
2124 modified, added, removed, deleted, unknown, ignored, clean = [
2126 modified, added, removed, deleted, unknown, ignored, clean = [
2125 [util.pathto(cwd, x) for x in n]
2127 [util.pathto(cwd, x) for x in n]
2126 for n in repo.status(node1=node1, node2=node2, files=files,
2128 for n in repo.status(node1=node1, node2=node2, files=files,
2127 match=matchfn,
2129 match=matchfn,
2128 list_ignored=all or opts['ignored'],
2130 list_ignored=all or opts['ignored'],
2129 list_clean=all or opts['clean'])]
2131 list_clean=all or opts['clean'])]
2130
2132
2131 changetypes = (('modified', 'M', modified),
2133 changetypes = (('modified', 'M', modified),
2132 ('added', 'A', added),
2134 ('added', 'A', added),
2133 ('removed', 'R', removed),
2135 ('removed', 'R', removed),
2134 ('deleted', '!', deleted),
2136 ('deleted', '!', deleted),
2135 ('unknown', '?', unknown),
2137 ('unknown', '?', unknown),
2136 ('ignored', 'I', ignored))
2138 ('ignored', 'I', ignored))
2137
2139
2138 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2140 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2139
2141
2140 end = opts['print0'] and '\0' or '\n'
2142 end = opts['print0'] and '\0' or '\n'
2141
2143
2142 for opt, char, changes in ([ct for ct in explicit_changetypes
2144 for opt, char, changes in ([ct for ct in explicit_changetypes
2143 if all or opts[ct[0]]]
2145 if all or opts[ct[0]]]
2144 or changetypes):
2146 or changetypes):
2145 if opts['no_status']:
2147 if opts['no_status']:
2146 format = "%%s%s" % end
2148 format = "%%s%s" % end
2147 else:
2149 else:
2148 format = "%s %%s%s" % (char, end)
2150 format = "%s %%s%s" % (char, end)
2149
2151
2150 for f in changes:
2152 for f in changes:
2151 ui.write(format % f)
2153 ui.write(format % f)
2152 if ((all or opts.get('copies')) and not opts.get('no_status')):
2154 if ((all or opts.get('copies')) and not opts.get('no_status')):
2153 copied = repo.dirstate.copied(f)
2155 copied = repo.dirstate.copied(f)
2154 if copied:
2156 if copied:
2155 ui.write(' %s%s' % (copied, end))
2157 ui.write(' %s%s' % (copied, end))
2156
2158
2157 def tag(ui, repo, name, rev_=None, **opts):
2159 def tag(ui, repo, name, rev_=None, **opts):
2158 """add a tag for the current tip or a given revision
2160 """add a tag for the current tip or a given revision
2159
2161
2160 Name a particular revision using <name>.
2162 Name a particular revision using <name>.
2161
2163
2162 Tags are used to name particular revisions of the repository and are
2164 Tags are used to name particular revisions of the repository and are
2163 very useful to compare different revision, to go back to significant
2165 very useful to compare different revision, to go back to significant
2164 earlier versions or to mark branch points as releases, etc.
2166 earlier versions or to mark branch points as releases, etc.
2165
2167
2166 If no revision is given, the parent of the working directory is used.
2168 If no revision is given, the parent of the working directory is used.
2167
2169
2168 To facilitate version control, distribution, and merging of tags,
2170 To facilitate version control, distribution, and merging of tags,
2169 they are stored as a file named ".hgtags" which is managed
2171 they are stored as a file named ".hgtags" which is managed
2170 similarly to other project files and can be hand-edited if
2172 similarly to other project files and can be hand-edited if
2171 necessary. The file '.hg/localtags' is used for local tags (not
2173 necessary. The file '.hg/localtags' is used for local tags (not
2172 shared among repositories).
2174 shared among repositories).
2173 """
2175 """
2174 if name in ['tip', '.']:
2176 if name in ['tip', '.']:
2175 raise util.Abort(_("the name '%s' is reserved") % name)
2177 raise util.Abort(_("the name '%s' is reserved") % name)
2176 if rev_ is not None:
2178 if rev_ is not None:
2177 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2179 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2178 "please use 'hg tag [-r REV] NAME' instead\n"))
2180 "please use 'hg tag [-r REV] NAME' instead\n"))
2179 if opts['rev']:
2181 if opts['rev']:
2180 raise util.Abort(_("use only one form to specify the revision"))
2182 raise util.Abort(_("use only one form to specify the revision"))
2181 if opts['rev']:
2183 if opts['rev']:
2182 rev_ = opts['rev']
2184 rev_ = opts['rev']
2183 if not rev_ and repo.dirstate.parents()[1] != nullid:
2185 if not rev_ and repo.dirstate.parents()[1] != nullid:
2184 raise util.Abort(_('uncommitted merge - please provide a '
2186 raise util.Abort(_('uncommitted merge - please provide a '
2185 'specific revision'))
2187 'specific revision'))
2186 r = repo.changectx(rev_).node()
2188 r = repo.changectx(rev_).node()
2187
2189
2188 message = opts['message']
2190 message = opts['message']
2189 if not message:
2191 if not message:
2190 message = _('Added tag %s for changeset %s') % (name, short(r))
2192 message = _('Added tag %s for changeset %s') % (name, short(r))
2191
2193
2192 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2194 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2193
2195
2194 def tags(ui, repo):
2196 def tags(ui, repo):
2195 """list repository tags
2197 """list repository tags
2196
2198
2197 List the repository tags.
2199 List the repository tags.
2198
2200
2199 This lists both regular and local tags.
2201 This lists both regular and local tags.
2200 """
2202 """
2201
2203
2202 l = repo.tagslist()
2204 l = repo.tagslist()
2203 l.reverse()
2205 l.reverse()
2204 hexfunc = ui.debugflag and hex or short
2206 hexfunc = ui.debugflag and hex or short
2205 for t, n in l:
2207 for t, n in l:
2206 try:
2208 try:
2207 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2209 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2208 except KeyError:
2210 except KeyError:
2209 r = " ?:?"
2211 r = " ?:?"
2210 if ui.quiet:
2212 if ui.quiet:
2211 ui.write("%s\n" % t)
2213 ui.write("%s\n" % t)
2212 else:
2214 else:
2213 t = util.localsub(t, 30)
2215 t = util.localsub(t, 30)
2214 t += " " * (30 - util.locallen(t))
2216 t += " " * (30 - util.locallen(t))
2215 ui.write("%s %s\n" % (t, r))
2217 ui.write("%s %s\n" % (t, r))
2216
2218
2217 def tip(ui, repo, **opts):
2219 def tip(ui, repo, **opts):
2218 """show the tip revision
2220 """show the tip revision
2219
2221
2220 Show the tip revision.
2222 Show the tip revision.
2221 """
2223 """
2222 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2224 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2223
2225
2224 def unbundle(ui, repo, fname, **opts):
2226 def unbundle(ui, repo, fname, **opts):
2225 """apply a changegroup file
2227 """apply a changegroup file
2226
2228
2227 Apply a compressed changegroup file generated by the bundle
2229 Apply a compressed changegroup file generated by the bundle
2228 command.
2230 command.
2229 """
2231 """
2230 gen = changegroup.readbundle(urllib.urlopen(fname))
2232 gen = changegroup.readbundle(urllib.urlopen(fname))
2231 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2233 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2232 return postincoming(ui, repo, modheads, opts['update'])
2234 return postincoming(ui, repo, modheads, opts['update'])
2233
2235
2234 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2236 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2235 branch=None):
2237 branch=None):
2236 """update or merge working directory
2238 """update or merge working directory
2237
2239
2238 Update the working directory to the specified revision.
2240 Update the working directory to the specified revision.
2239
2241
2240 If there are no outstanding changes in the working directory and
2242 If there are no outstanding changes in the working directory and
2241 there is a linear relationship between the current version and the
2243 there is a linear relationship between the current version and the
2242 requested version, the result is the requested version.
2244 requested version, the result is the requested version.
2243
2245
2244 To merge the working directory with another revision, use the
2246 To merge the working directory with another revision, use the
2245 merge command.
2247 merge command.
2246
2248
2247 By default, update will refuse to run if doing so would require
2249 By default, update will refuse to run if doing so would require
2248 merging or discarding local changes.
2250 merging or discarding local changes.
2249 """
2251 """
2250 node = _lookup(repo, node, branch)
2252 node = _lookup(repo, node, branch)
2251 if clean:
2253 if clean:
2252 return hg.clean(repo, node)
2254 return hg.clean(repo, node)
2253 else:
2255 else:
2254 return hg.update(repo, node)
2256 return hg.update(repo, node)
2255
2257
2256 def _lookup(repo, node, branch=None):
2258 def _lookup(repo, node, branch=None):
2257 if branch:
2259 if branch:
2258 repo.ui.warn(_("the --branch option is deprecated, "
2260 repo.ui.warn(_("the --branch option is deprecated, "
2259 "please use 'hg branch' instead\n"))
2261 "please use 'hg branch' instead\n"))
2260 br = repo.branchlookup(branch=branch)
2262 br = repo.branchlookup(branch=branch)
2261 found = []
2263 found = []
2262 for x in br:
2264 for x in br:
2263 if branch in br[x]:
2265 if branch in br[x]:
2264 found.append(x)
2266 found.append(x)
2265 if len(found) > 1:
2267 if len(found) > 1:
2266 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2268 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2267 for x in found:
2269 for x in found:
2268 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2270 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2269 raise util.Abort("")
2271 raise util.Abort("")
2270 if len(found) == 1:
2272 if len(found) == 1:
2271 node = found[0]
2273 node = found[0]
2272 repo.ui.warn(_("Using head %s for branch %s\n")
2274 repo.ui.warn(_("Using head %s for branch %s\n")
2273 % (short(node), branch))
2275 % (short(node), branch))
2274 else:
2276 else:
2275 raise util.Abort(_("branch %s not found") % branch)
2277 raise util.Abort(_("branch %s not found") % branch)
2276 else:
2278 else:
2277 node = node and repo.lookup(node) or repo.changelog.tip()
2279 node = node and repo.lookup(node) or repo.changelog.tip()
2278 return node
2280 return node
2279
2281
2280 def verify(ui, repo):
2282 def verify(ui, repo):
2281 """verify the integrity of the repository
2283 """verify the integrity of the repository
2282
2284
2283 Verify the integrity of the current repository.
2285 Verify the integrity of the current repository.
2284
2286
2285 This will perform an extensive check of the repository's
2287 This will perform an extensive check of the repository's
2286 integrity, validating the hashes and checksums of each entry in
2288 integrity, validating the hashes and checksums of each entry in
2287 the changelog, manifest, and tracked files, as well as the
2289 the changelog, manifest, and tracked files, as well as the
2288 integrity of their crosslinks and indices.
2290 integrity of their crosslinks and indices.
2289 """
2291 """
2290 return hg.verify(repo)
2292 return hg.verify(repo)
2291
2293
2292 def version_(ui):
2294 def version_(ui):
2293 """output version and copyright information"""
2295 """output version and copyright information"""
2294 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2296 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2295 % version.get_version())
2297 % version.get_version())
2296 ui.status(_(
2298 ui.status(_(
2297 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2299 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2298 "This is free software; see the source for copying conditions. "
2300 "This is free software; see the source for copying conditions. "
2299 "There is NO\nwarranty; "
2301 "There is NO\nwarranty; "
2300 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2302 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2301 ))
2303 ))
2302
2304
2303 # Command options and aliases are listed here, alphabetically
2305 # Command options and aliases are listed here, alphabetically
2304
2306
2305 globalopts = [
2307 globalopts = [
2306 ('R', 'repository', '',
2308 ('R', 'repository', '',
2307 _('repository root directory or symbolic path name')),
2309 _('repository root directory or symbolic path name')),
2308 ('', 'cwd', '', _('change working directory')),
2310 ('', 'cwd', '', _('change working directory')),
2309 ('y', 'noninteractive', None,
2311 ('y', 'noninteractive', None,
2310 _('do not prompt, assume \'yes\' for any required answers')),
2312 _('do not prompt, assume \'yes\' for any required answers')),
2311 ('q', 'quiet', None, _('suppress output')),
2313 ('q', 'quiet', None, _('suppress output')),
2312 ('v', 'verbose', None, _('enable additional output')),
2314 ('v', 'verbose', None, _('enable additional output')),
2313 ('', 'config', [], _('set/override config option')),
2315 ('', 'config', [], _('set/override config option')),
2314 ('', 'debug', None, _('enable debugging output')),
2316 ('', 'debug', None, _('enable debugging output')),
2315 ('', 'debugger', None, _('start debugger')),
2317 ('', 'debugger', None, _('start debugger')),
2316 ('', 'lsprof', None, _('print improved command execution profile')),
2318 ('', 'lsprof', None, _('print improved command execution profile')),
2317 ('', 'traceback', None, _('print traceback on exception')),
2319 ('', 'traceback', None, _('print traceback on exception')),
2318 ('', 'time', None, _('time how long the command takes')),
2320 ('', 'time', None, _('time how long the command takes')),
2319 ('', 'profile', None, _('print command execution profile')),
2321 ('', 'profile', None, _('print command execution profile')),
2320 ('', 'version', None, _('output version information and exit')),
2322 ('', 'version', None, _('output version information and exit')),
2321 ('h', 'help', None, _('display help and exit')),
2323 ('h', 'help', None, _('display help and exit')),
2322 ]
2324 ]
2323
2325
2324 dryrunopts = [('n', 'dry-run', None,
2326 dryrunopts = [('n', 'dry-run', None,
2325 _('do not perform actions, just print output'))]
2327 _('do not perform actions, just print output'))]
2326
2328
2327 remoteopts = [
2329 remoteopts = [
2328 ('e', 'ssh', '', _('specify ssh command to use')),
2330 ('e', 'ssh', '', _('specify ssh command to use')),
2329 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2331 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2330 ]
2332 ]
2331
2333
2332 walkopts = [
2334 walkopts = [
2333 ('I', 'include', [], _('include names matching the given patterns')),
2335 ('I', 'include', [], _('include names matching the given patterns')),
2334 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2336 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2335 ]
2337 ]
2336
2338
2337 table = {
2339 table = {
2338 "^add":
2340 "^add":
2339 (add,
2341 (add,
2340 walkopts + dryrunopts,
2342 walkopts + dryrunopts,
2341 _('hg add [OPTION]... [FILE]...')),
2343 _('hg add [OPTION]... [FILE]...')),
2342 "addremove":
2344 "addremove":
2343 (addremove,
2345 (addremove,
2344 [('s', 'similarity', '',
2346 [('s', 'similarity', '',
2345 _('guess renamed files by similarity (0<=s<=100)')),
2347 _('guess renamed files by similarity (0<=s<=100)')),
2346 ] + walkopts + dryrunopts,
2348 ] + walkopts + dryrunopts,
2347 _('hg addremove [OPTION]... [FILE]...')),
2349 _('hg addremove [OPTION]... [FILE]...')),
2348 "^annotate":
2350 "^annotate":
2349 (annotate,
2351 (annotate,
2350 [('r', 'rev', '', _('annotate the specified revision')),
2352 [('r', 'rev', '', _('annotate the specified revision')),
2351 ('f', 'follow', None, _('follow file copies and renames')),
2353 ('f', 'follow', None, _('follow file copies and renames')),
2352 ('a', 'text', None, _('treat all files as text')),
2354 ('a', 'text', None, _('treat all files as text')),
2353 ('u', 'user', None, _('list the author')),
2355 ('u', 'user', None, _('list the author')),
2354 ('d', 'date', None, _('list the date')),
2356 ('d', 'date', None, _('list the date')),
2355 ('n', 'number', None, _('list the revision number (default)')),
2357 ('n', 'number', None, _('list the revision number (default)')),
2356 ('c', 'changeset', None, _('list the changeset')),
2358 ('c', 'changeset', None, _('list the changeset')),
2357 ] + walkopts,
2359 ] + walkopts,
2358 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2360 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2359 "archive":
2361 "archive":
2360 (archive,
2362 (archive,
2361 [('', 'no-decode', None, _('do not pass files through decoders')),
2363 [('', 'no-decode', None, _('do not pass files through decoders')),
2362 ('p', 'prefix', '', _('directory prefix for files in archive')),
2364 ('p', 'prefix', '', _('directory prefix for files in archive')),
2363 ('r', 'rev', '', _('revision to distribute')),
2365 ('r', 'rev', '', _('revision to distribute')),
2364 ('t', 'type', '', _('type of distribution to create')),
2366 ('t', 'type', '', _('type of distribution to create')),
2365 ] + walkopts,
2367 ] + walkopts,
2366 _('hg archive [OPTION]... DEST')),
2368 _('hg archive [OPTION]... DEST')),
2367 "backout":
2369 "backout":
2368 (backout,
2370 (backout,
2369 [('', 'merge', None,
2371 [('', 'merge', None,
2370 _('merge with old dirstate parent after backout')),
2372 _('merge with old dirstate parent after backout')),
2371 ('m', 'message', '', _('use <text> as commit message')),
2373 ('m', 'message', '', _('use <text> as commit message')),
2372 ('l', 'logfile', '', _('read commit message from <file>')),
2374 ('l', 'logfile', '', _('read commit message from <file>')),
2373 ('d', 'date', '', _('record datecode as commit date')),
2375 ('d', 'date', '', _('record datecode as commit date')),
2374 ('', 'parent', '', _('parent to choose when backing out merge')),
2376 ('', 'parent', '', _('parent to choose when backing out merge')),
2375 ('u', 'user', '', _('record user as committer')),
2377 ('u', 'user', '', _('record user as committer')),
2376 ] + walkopts,
2378 ] + walkopts,
2377 _('hg backout [OPTION]... REV')),
2379 _('hg backout [OPTION]... REV')),
2378 "branch": (branch, [], _('hg branch [NAME]')),
2380 "branch": (branch, [], _('hg branch [NAME]')),
2379 "branches": (branches, [], _('hg branches')),
2381 "branches": (branches, [], _('hg branches')),
2380 "bundle":
2382 "bundle":
2381 (bundle,
2383 (bundle,
2382 [('f', 'force', None,
2384 [('f', 'force', None,
2383 _('run even when remote repository is unrelated')),
2385 _('run even when remote repository is unrelated')),
2384 ('r', 'rev', [],
2386 ('r', 'rev', [],
2385 _('a changeset you would like to bundle')),
2387 _('a changeset you would like to bundle')),
2386 ('', 'base', [],
2388 ('', 'base', [],
2387 _('a base changeset to specify instead of a destination')),
2389 _('a base changeset to specify instead of a destination')),
2388 ] + remoteopts,
2390 ] + remoteopts,
2389 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2391 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2390 "cat":
2392 "cat":
2391 (cat,
2393 (cat,
2392 [('o', 'output', '', _('print output to file with formatted name')),
2394 [('o', 'output', '', _('print output to file with formatted name')),
2393 ('r', 'rev', '', _('print the given revision')),
2395 ('r', 'rev', '', _('print the given revision')),
2394 ] + walkopts,
2396 ] + walkopts,
2395 _('hg cat [OPTION]... FILE...')),
2397 _('hg cat [OPTION]... FILE...')),
2396 "^clone":
2398 "^clone":
2397 (clone,
2399 (clone,
2398 [('U', 'noupdate', None, _('do not update the new working directory')),
2400 [('U', 'noupdate', None, _('do not update the new working directory')),
2399 ('r', 'rev', [],
2401 ('r', 'rev', [],
2400 _('a changeset you would like to have after cloning')),
2402 _('a changeset you would like to have after cloning')),
2401 ('', 'pull', None, _('use pull protocol to copy metadata')),
2403 ('', 'pull', None, _('use pull protocol to copy metadata')),
2402 ('', 'uncompressed', None,
2404 ('', 'uncompressed', None,
2403 _('use uncompressed transfer (fast over LAN)')),
2405 _('use uncompressed transfer (fast over LAN)')),
2404 ] + remoteopts,
2406 ] + remoteopts,
2405 _('hg clone [OPTION]... SOURCE [DEST]')),
2407 _('hg clone [OPTION]... SOURCE [DEST]')),
2406 "^commit|ci":
2408 "^commit|ci":
2407 (commit,
2409 (commit,
2408 [('A', 'addremove', None,
2410 [('A', 'addremove', None,
2409 _('mark new/missing files as added/removed before committing')),
2411 _('mark new/missing files as added/removed before committing')),
2410 ('m', 'message', '', _('use <text> as commit message')),
2412 ('m', 'message', '', _('use <text> as commit message')),
2411 ('l', 'logfile', '', _('read the commit message from <file>')),
2413 ('l', 'logfile', '', _('read the commit message from <file>')),
2412 ('d', 'date', '', _('record datecode as commit date')),
2414 ('d', 'date', '', _('record datecode as commit date')),
2413 ('u', 'user', '', _('record user as commiter')),
2415 ('u', 'user', '', _('record user as commiter')),
2414 ] + walkopts,
2416 ] + walkopts,
2415 _('hg commit [OPTION]... [FILE]...')),
2417 _('hg commit [OPTION]... [FILE]...')),
2416 "copy|cp":
2418 "copy|cp":
2417 (copy,
2419 (copy,
2418 [('A', 'after', None, _('record a copy that has already occurred')),
2420 [('A', 'after', None, _('record a copy that has already occurred')),
2419 ('f', 'force', None,
2421 ('f', 'force', None,
2420 _('forcibly copy over an existing managed file')),
2422 _('forcibly copy over an existing managed file')),
2421 ] + walkopts + dryrunopts,
2423 ] + walkopts + dryrunopts,
2422 _('hg copy [OPTION]... [SOURCE]... DEST')),
2424 _('hg copy [OPTION]... [SOURCE]... DEST')),
2423 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2425 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2424 "debugcomplete":
2426 "debugcomplete":
2425 (debugcomplete,
2427 (debugcomplete,
2426 [('o', 'options', None, _('show the command options'))],
2428 [('o', 'options', None, _('show the command options'))],
2427 _('debugcomplete [-o] CMD')),
2429 _('debugcomplete [-o] CMD')),
2428 "debugrebuildstate":
2430 "debugrebuildstate":
2429 (debugrebuildstate,
2431 (debugrebuildstate,
2430 [('r', 'rev', '', _('revision to rebuild to'))],
2432 [('r', 'rev', '', _('revision to rebuild to'))],
2431 _('debugrebuildstate [-r REV] [REV]')),
2433 _('debugrebuildstate [-r REV] [REV]')),
2432 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2434 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2433 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2435 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2434 "debugstate": (debugstate, [], _('debugstate')),
2436 "debugstate": (debugstate, [], _('debugstate')),
2435 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2437 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2436 "debugindex": (debugindex, [], _('debugindex FILE')),
2438 "debugindex": (debugindex, [], _('debugindex FILE')),
2437 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2439 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2438 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2440 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2439 "debugwalk":
2441 "debugwalk":
2440 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2442 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2441 "^diff":
2443 "^diff":
2442 (diff,
2444 (diff,
2443 [('r', 'rev', [], _('revision')),
2445 [('r', 'rev', [], _('revision')),
2444 ('a', 'text', None, _('treat all files as text')),
2446 ('a', 'text', None, _('treat all files as text')),
2445 ('p', 'show-function', None,
2447 ('p', 'show-function', None,
2446 _('show which function each change is in')),
2448 _('show which function each change is in')),
2447 ('g', 'git', None, _('use git extended diff format')),
2449 ('g', 'git', None, _('use git extended diff format')),
2448 ('', 'nodates', None, _("don't include dates in diff headers")),
2450 ('', 'nodates', None, _("don't include dates in diff headers")),
2449 ('w', 'ignore-all-space', None,
2451 ('w', 'ignore-all-space', None,
2450 _('ignore white space when comparing lines')),
2452 _('ignore white space when comparing lines')),
2451 ('b', 'ignore-space-change', None,
2453 ('b', 'ignore-space-change', None,
2452 _('ignore changes in the amount of white space')),
2454 _('ignore changes in the amount of white space')),
2453 ('B', 'ignore-blank-lines', None,
2455 ('B', 'ignore-blank-lines', None,
2454 _('ignore changes whose lines are all blank')),
2456 _('ignore changes whose lines are all blank')),
2455 ] + walkopts,
2457 ] + walkopts,
2456 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2458 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2457 "^export":
2459 "^export":
2458 (export,
2460 (export,
2459 [('o', 'output', '', _('print output to file with formatted name')),
2461 [('o', 'output', '', _('print output to file with formatted name')),
2460 ('a', 'text', None, _('treat all files as text')),
2462 ('a', 'text', None, _('treat all files as text')),
2461 ('g', 'git', None, _('use git extended diff format')),
2463 ('g', 'git', None, _('use git extended diff format')),
2462 ('', 'nodates', None, _("don't include dates in diff headers")),
2464 ('', 'nodates', None, _("don't include dates in diff headers")),
2463 ('', 'switch-parent', None, _('diff against the second parent'))],
2465 ('', 'switch-parent', None, _('diff against the second parent'))],
2464 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2466 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2465 "grep":
2467 "grep":
2466 (grep,
2468 (grep,
2467 [('0', 'print0', None, _('end fields with NUL')),
2469 [('0', 'print0', None, _('end fields with NUL')),
2468 ('', 'all', None, _('print all revisions that match')),
2470 ('', 'all', None, _('print all revisions that match')),
2469 ('f', 'follow', None,
2471 ('f', 'follow', None,
2470 _('follow changeset history, or file history across copies and renames')),
2472 _('follow changeset history, or file history across copies and renames')),
2471 ('i', 'ignore-case', None, _('ignore case when matching')),
2473 ('i', 'ignore-case', None, _('ignore case when matching')),
2472 ('l', 'files-with-matches', None,
2474 ('l', 'files-with-matches', None,
2473 _('print only filenames and revs that match')),
2475 _('print only filenames and revs that match')),
2474 ('n', 'line-number', None, _('print matching line numbers')),
2476 ('n', 'line-number', None, _('print matching line numbers')),
2475 ('r', 'rev', [], _('search in given revision range')),
2477 ('r', 'rev', [], _('search in given revision range')),
2476 ('u', 'user', None, _('print user who committed change')),
2478 ('u', 'user', None, _('print user who committed change')),
2477 ] + walkopts,
2479 ] + walkopts,
2478 _('hg grep [OPTION]... PATTERN [FILE]...')),
2480 _('hg grep [OPTION]... PATTERN [FILE]...')),
2479 "heads":
2481 "heads":
2480 (heads,
2482 (heads,
2481 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2483 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2482 ('', 'style', '', _('display using template map file')),
2484 ('', 'style', '', _('display using template map file')),
2483 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2485 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2484 ('', 'template', '', _('display with template'))],
2486 ('', 'template', '', _('display with template'))],
2485 _('hg heads [-r REV]')),
2487 _('hg heads [-r REV]')),
2486 "help": (help_, [], _('hg help [COMMAND]')),
2488 "help": (help_, [], _('hg help [COMMAND]')),
2487 "identify|id": (identify, [], _('hg identify')),
2489 "identify|id": (identify, [], _('hg identify')),
2488 "import|patch":
2490 "import|patch":
2489 (import_,
2491 (import_,
2490 [('p', 'strip', 1,
2492 [('p', 'strip', 1,
2491 _('directory strip option for patch. This has the same\n'
2493 _('directory strip option for patch. This has the same\n'
2492 'meaning as the corresponding patch option')),
2494 'meaning as the corresponding patch option')),
2493 ('m', 'message', '', _('use <text> as commit message')),
2495 ('m', 'message', '', _('use <text> as commit message')),
2494 ('b', 'base', '', _('base path (DEPRECATED)')),
2496 ('b', 'base', '', _('base path (DEPRECATED)')),
2495 ('f', 'force', None,
2497 ('f', 'force', None,
2496 _('skip check for outstanding uncommitted changes'))],
2498 _('skip check for outstanding uncommitted changes'))],
2497 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2499 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2498 "incoming|in": (incoming,
2500 "incoming|in": (incoming,
2499 [('M', 'no-merges', None, _('do not show merges')),
2501 [('M', 'no-merges', None, _('do not show merges')),
2500 ('f', 'force', None,
2502 ('f', 'force', None,
2501 _('run even when remote repository is unrelated')),
2503 _('run even when remote repository is unrelated')),
2502 ('', 'style', '', _('display using template map file')),
2504 ('', 'style', '', _('display using template map file')),
2503 ('n', 'newest-first', None, _('show newest record first')),
2505 ('n', 'newest-first', None, _('show newest record first')),
2504 ('', 'bundle', '', _('file to store the bundles into')),
2506 ('', 'bundle', '', _('file to store the bundles into')),
2505 ('p', 'patch', None, _('show patch')),
2507 ('p', 'patch', None, _('show patch')),
2506 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2508 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2507 ('', 'template', '', _('display with template')),
2509 ('', 'template', '', _('display with template')),
2508 ] + remoteopts,
2510 ] + remoteopts,
2509 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2511 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2510 ' [--bundle FILENAME] [SOURCE]')),
2512 ' [--bundle FILENAME] [SOURCE]')),
2511 "^init":
2513 "^init":
2512 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2514 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2513 "locate":
2515 "locate":
2514 (locate,
2516 (locate,
2515 [('r', 'rev', '', _('search the repository as it stood at rev')),
2517 [('r', 'rev', '', _('search the repository as it stood at rev')),
2516 ('0', 'print0', None,
2518 ('0', 'print0', None,
2517 _('end filenames with NUL, for use with xargs')),
2519 _('end filenames with NUL, for use with xargs')),
2518 ('f', 'fullpath', None,
2520 ('f', 'fullpath', None,
2519 _('print complete paths from the filesystem root')),
2521 _('print complete paths from the filesystem root')),
2520 ] + walkopts,
2522 ] + walkopts,
2521 _('hg locate [OPTION]... [PATTERN]...')),
2523 _('hg locate [OPTION]... [PATTERN]...')),
2522 "^log|history":
2524 "^log|history":
2523 (log,
2525 (log,
2524 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2526 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2525 ('f', 'follow', None,
2527 ('f', 'follow', None,
2526 _('follow changeset history, or file history across copies and renames')),
2528 _('follow changeset history, or file history across copies and renames')),
2527 ('', 'follow-first', None,
2529 ('', 'follow-first', None,
2528 _('only follow the first parent of merge changesets')),
2530 _('only follow the first parent of merge changesets')),
2529 ('C', 'copies', None, _('show copied files')),
2531 ('C', 'copies', None, _('show copied files')),
2530 ('k', 'keyword', [], _('search for a keyword')),
2532 ('k', 'keyword', [], _('search for a keyword')),
2531 ('l', 'limit', '', _('limit number of changes displayed')),
2533 ('l', 'limit', '', _('limit number of changes displayed')),
2532 ('r', 'rev', [], _('show the specified revision or range')),
2534 ('r', 'rev', [], _('show the specified revision or range')),
2533 ('', 'removed', None, _('include revs where files were removed')),
2535 ('', 'removed', None, _('include revs where files were removed')),
2534 ('M', 'no-merges', None, _('do not show merges')),
2536 ('M', 'no-merges', None, _('do not show merges')),
2535 ('', 'style', '', _('display using template map file')),
2537 ('', 'style', '', _('display using template map file')),
2536 ('m', 'only-merges', None, _('show only merges')),
2538 ('m', 'only-merges', None, _('show only merges')),
2537 ('p', 'patch', None, _('show patch')),
2539 ('p', 'patch', None, _('show patch')),
2538 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2540 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2539 ('', 'template', '', _('display with template')),
2541 ('', 'template', '', _('display with template')),
2540 ] + walkopts,
2542 ] + walkopts,
2541 _('hg log [OPTION]... [FILE]')),
2543 _('hg log [OPTION]... [FILE]')),
2542 "manifest": (manifest, [], _('hg manifest [REV]')),
2544 "manifest": (manifest, [], _('hg manifest [REV]')),
2543 "merge":
2545 "merge":
2544 (merge,
2546 (merge,
2545 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2547 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2546 ('f', 'force', None, _('force a merge with outstanding changes'))],
2548 ('f', 'force', None, _('force a merge with outstanding changes'))],
2547 _('hg merge [-f] [REV]')),
2549 _('hg merge [-f] [REV]')),
2548 "outgoing|out": (outgoing,
2550 "outgoing|out": (outgoing,
2549 [('M', 'no-merges', None, _('do not show merges')),
2551 [('M', 'no-merges', None, _('do not show merges')),
2550 ('f', 'force', None,
2552 ('f', 'force', None,
2551 _('run even when remote repository is unrelated')),
2553 _('run even when remote repository is unrelated')),
2552 ('p', 'patch', None, _('show patch')),
2554 ('p', 'patch', None, _('show patch')),
2553 ('', 'style', '', _('display using template map file')),
2555 ('', 'style', '', _('display using template map file')),
2554 ('r', 'rev', [], _('a specific revision you would like to push')),
2556 ('r', 'rev', [], _('a specific revision you would like to push')),
2555 ('n', 'newest-first', None, _('show newest record first')),
2557 ('n', 'newest-first', None, _('show newest record first')),
2556 ('', 'template', '', _('display with template')),
2558 ('', 'template', '', _('display with template')),
2557 ] + remoteopts,
2559 ] + remoteopts,
2558 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2560 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2559 "^parents":
2561 "^parents":
2560 (parents,
2562 (parents,
2561 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2563 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2562 ('r', 'rev', '', _('show parents from the specified rev')),
2564 ('r', 'rev', '', _('show parents from the specified rev')),
2563 ('', 'style', '', _('display using template map file')),
2565 ('', 'style', '', _('display using template map file')),
2564 ('', 'template', '', _('display with template'))],
2566 ('', 'template', '', _('display with template'))],
2565 _('hg parents [-r REV] [FILE]')),
2567 _('hg parents [-r REV] [FILE]')),
2566 "paths": (paths, [], _('hg paths [NAME]')),
2568 "paths": (paths, [], _('hg paths [NAME]')),
2567 "^pull":
2569 "^pull":
2568 (pull,
2570 (pull,
2569 [('u', 'update', None,
2571 [('u', 'update', None,
2570 _('update to new tip if changesets were pulled')),
2572 _('update to new tip if changesets were pulled')),
2571 ('f', 'force', None,
2573 ('f', 'force', None,
2572 _('run even when remote repository is unrelated')),
2574 _('run even when remote repository is unrelated')),
2573 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2575 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2574 ] + remoteopts,
2576 ] + remoteopts,
2575 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2577 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
2576 "^push":
2578 "^push":
2577 (push,
2579 (push,
2578 [('f', 'force', None, _('force push')),
2580 [('f', 'force', None, _('force push')),
2579 ('r', 'rev', [], _('a specific revision you would like to push')),
2581 ('r', 'rev', [], _('a specific revision you would like to push')),
2580 ] + remoteopts,
2582 ] + remoteopts,
2581 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2583 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
2582 "debugrawcommit|rawcommit":
2584 "debugrawcommit|rawcommit":
2583 (rawcommit,
2585 (rawcommit,
2584 [('p', 'parent', [], _('parent')),
2586 [('p', 'parent', [], _('parent')),
2585 ('d', 'date', '', _('date code')),
2587 ('d', 'date', '', _('date code')),
2586 ('u', 'user', '', _('user')),
2588 ('u', 'user', '', _('user')),
2587 ('F', 'files', '', _('file list')),
2589 ('F', 'files', '', _('file list')),
2588 ('m', 'message', '', _('commit message')),
2590 ('m', 'message', '', _('commit message')),
2589 ('l', 'logfile', '', _('commit message file'))],
2591 ('l', 'logfile', '', _('commit message file'))],
2590 _('hg debugrawcommit [OPTION]... [FILE]...')),
2592 _('hg debugrawcommit [OPTION]... [FILE]...')),
2591 "recover": (recover, [], _('hg recover')),
2593 "recover": (recover, [], _('hg recover')),
2592 "^remove|rm":
2594 "^remove|rm":
2593 (remove,
2595 (remove,
2594 [('A', 'after', None, _('record remove that has already occurred')),
2596 [('A', 'after', None, _('record remove that has already occurred')),
2595 ('f', 'force', None, _('remove file even if modified')),
2597 ('f', 'force', None, _('remove file even if modified')),
2596 ] + walkopts,
2598 ] + walkopts,
2597 _('hg remove [OPTION]... FILE...')),
2599 _('hg remove [OPTION]... FILE...')),
2598 "rename|mv":
2600 "rename|mv":
2599 (rename,
2601 (rename,
2600 [('A', 'after', None, _('record a rename that has already occurred')),
2602 [('A', 'after', None, _('record a rename that has already occurred')),
2601 ('f', 'force', None,
2603 ('f', 'force', None,
2602 _('forcibly copy over an existing managed file')),
2604 _('forcibly copy over an existing managed file')),
2603 ] + walkopts + dryrunopts,
2605 ] + walkopts + dryrunopts,
2604 _('hg rename [OPTION]... SOURCE... DEST')),
2606 _('hg rename [OPTION]... SOURCE... DEST')),
2605 "^revert":
2607 "^revert":
2606 (revert,
2608 (revert,
2607 [('a', 'all', None, _('revert all changes when no arguments given')),
2609 [('a', 'all', None, _('revert all changes when no arguments given')),
2608 ('r', 'rev', '', _('revision to revert to')),
2610 ('r', 'rev', '', _('revision to revert to')),
2609 ('', 'no-backup', None, _('do not save backup copies of files')),
2611 ('', 'no-backup', None, _('do not save backup copies of files')),
2610 ] + walkopts + dryrunopts,
2612 ] + walkopts + dryrunopts,
2611 _('hg revert [-r REV] [NAME]...')),
2613 _('hg revert [-r REV] [NAME]...')),
2612 "rollback": (rollback, [], _('hg rollback')),
2614 "rollback": (rollback, [], _('hg rollback')),
2613 "root": (root, [], _('hg root')),
2615 "root": (root, [], _('hg root')),
2614 "showconfig|debugconfig":
2616 "showconfig|debugconfig":
2615 (showconfig,
2617 (showconfig,
2616 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2618 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2617 _('showconfig [-u] [NAME]...')),
2619 _('showconfig [-u] [NAME]...')),
2618 "^serve":
2620 "^serve":
2619 (serve,
2621 (serve,
2620 [('A', 'accesslog', '', _('name of access log file to write to')),
2622 [('A', 'accesslog', '', _('name of access log file to write to')),
2621 ('d', 'daemon', None, _('run server in background')),
2623 ('d', 'daemon', None, _('run server in background')),
2622 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2624 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2623 ('E', 'errorlog', '', _('name of error log file to write to')),
2625 ('E', 'errorlog', '', _('name of error log file to write to')),
2624 ('p', 'port', 0, _('port to use (default: 8000)')),
2626 ('p', 'port', 0, _('port to use (default: 8000)')),
2625 ('a', 'address', '', _('address to use')),
2627 ('a', 'address', '', _('address to use')),
2626 ('n', 'name', '',
2628 ('n', 'name', '',
2627 _('name to show in web pages (default: working dir)')),
2629 _('name to show in web pages (default: working dir)')),
2628 ('', 'webdir-conf', '', _('name of the webdir config file'
2630 ('', 'webdir-conf', '', _('name of the webdir config file'
2629 ' (serve more than one repo)')),
2631 ' (serve more than one repo)')),
2630 ('', 'pid-file', '', _('name of file to write process ID to')),
2632 ('', 'pid-file', '', _('name of file to write process ID to')),
2631 ('', 'stdio', None, _('for remote clients')),
2633 ('', 'stdio', None, _('for remote clients')),
2632 ('t', 'templates', '', _('web templates to use')),
2634 ('t', 'templates', '', _('web templates to use')),
2633 ('', 'style', '', _('template style to use')),
2635 ('', 'style', '', _('template style to use')),
2634 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2636 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2635 _('hg serve [OPTION]...')),
2637 _('hg serve [OPTION]...')),
2636 "^status|st":
2638 "^status|st":
2637 (status,
2639 (status,
2638 [('A', 'all', None, _('show status of all files')),
2640 [('A', 'all', None, _('show status of all files')),
2639 ('m', 'modified', None, _('show only modified files')),
2641 ('m', 'modified', None, _('show only modified files')),
2640 ('a', 'added', None, _('show only added files')),
2642 ('a', 'added', None, _('show only added files')),
2641 ('r', 'removed', None, _('show only removed files')),
2643 ('r', 'removed', None, _('show only removed files')),
2642 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2644 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2643 ('c', 'clean', None, _('show only files without changes')),
2645 ('c', 'clean', None, _('show only files without changes')),
2644 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2646 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2645 ('i', 'ignored', None, _('show ignored files')),
2647 ('i', 'ignored', None, _('show ignored files')),
2646 ('n', 'no-status', None, _('hide status prefix')),
2648 ('n', 'no-status', None, _('hide status prefix')),
2647 ('C', 'copies', None, _('show source of copied files')),
2649 ('C', 'copies', None, _('show source of copied files')),
2648 ('0', 'print0', None,
2650 ('0', 'print0', None,
2649 _('end filenames with NUL, for use with xargs')),
2651 _('end filenames with NUL, for use with xargs')),
2650 ('', 'rev', [], _('show difference from revision')),
2652 ('', 'rev', [], _('show difference from revision')),
2651 ] + walkopts,
2653 ] + walkopts,
2652 _('hg status [OPTION]... [FILE]...')),
2654 _('hg status [OPTION]... [FILE]...')),
2653 "tag":
2655 "tag":
2654 (tag,
2656 (tag,
2655 [('l', 'local', None, _('make the tag local')),
2657 [('l', 'local', None, _('make the tag local')),
2656 ('m', 'message', '', _('message for tag commit log entry')),
2658 ('m', 'message', '', _('message for tag commit log entry')),
2657 ('d', 'date', '', _('record datecode as commit date')),
2659 ('d', 'date', '', _('record datecode as commit date')),
2658 ('u', 'user', '', _('record user as commiter')),
2660 ('u', 'user', '', _('record user as commiter')),
2659 ('r', 'rev', '', _('revision to tag'))],
2661 ('r', 'rev', '', _('revision to tag'))],
2660 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2662 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2661 "tags": (tags, [], _('hg tags')),
2663 "tags": (tags, [], _('hg tags')),
2662 "tip":
2664 "tip":
2663 (tip,
2665 (tip,
2664 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2666 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2665 ('', 'style', '', _('display using template map file')),
2667 ('', 'style', '', _('display using template map file')),
2666 ('p', 'patch', None, _('show patch')),
2668 ('p', 'patch', None, _('show patch')),
2667 ('', 'template', '', _('display with template'))],
2669 ('', 'template', '', _('display with template'))],
2668 _('hg tip [-p]')),
2670 _('hg tip [-p]')),
2669 "unbundle":
2671 "unbundle":
2670 (unbundle,
2672 (unbundle,
2671 [('u', 'update', None,
2673 [('u', 'update', None,
2672 _('update to new tip if changesets were unbundled'))],
2674 _('update to new tip if changesets were unbundled'))],
2673 _('hg unbundle [-u] FILE')),
2675 _('hg unbundle [-u] FILE')),
2674 "^update|up|checkout|co":
2676 "^update|up|checkout|co":
2675 (update,
2677 (update,
2676 [('b', 'branch', '',
2678 [('b', 'branch', '',
2677 _('checkout the head of a specific branch (DEPRECATED)')),
2679 _('checkout the head of a specific branch (DEPRECATED)')),
2678 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
2680 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
2679 ('C', 'clean', None, _('overwrite locally modified files')),
2681 ('C', 'clean', None, _('overwrite locally modified files')),
2680 ('f', 'force', None, _('force a merge with outstanding changes'))],
2682 ('f', 'force', None, _('force a merge with outstanding changes'))],
2681 _('hg update [-C] [-f] [REV]')),
2683 _('hg update [-C] [-f] [REV]')),
2682 "verify": (verify, [], _('hg verify')),
2684 "verify": (verify, [], _('hg verify')),
2683 "version": (version_, [], _('hg version')),
2685 "version": (version_, [], _('hg version')),
2684 }
2686 }
2685
2687
2686 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2688 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2687 " debugindex debugindexdot")
2689 " debugindex debugindexdot")
2688 optionalrepo = ("paths serve showconfig")
2690 optionalrepo = ("paths serve showconfig")
2689
2691
2690 def findpossible(ui, cmd):
2692 def findpossible(ui, cmd):
2691 """
2693 """
2692 Return cmd -> (aliases, command table entry)
2694 Return cmd -> (aliases, command table entry)
2693 for each matching command.
2695 for each matching command.
2694 Return debug commands (or their aliases) only if no normal command matches.
2696 Return debug commands (or their aliases) only if no normal command matches.
2695 """
2697 """
2696 choice = {}
2698 choice = {}
2697 debugchoice = {}
2699 debugchoice = {}
2698 for e in table.keys():
2700 for e in table.keys():
2699 aliases = e.lstrip("^").split("|")
2701 aliases = e.lstrip("^").split("|")
2700 found = None
2702 found = None
2701 if cmd in aliases:
2703 if cmd in aliases:
2702 found = cmd
2704 found = cmd
2703 elif not ui.config("ui", "strict"):
2705 elif not ui.config("ui", "strict"):
2704 for a in aliases:
2706 for a in aliases:
2705 if a.startswith(cmd):
2707 if a.startswith(cmd):
2706 found = a
2708 found = a
2707 break
2709 break
2708 if found is not None:
2710 if found is not None:
2709 if aliases[0].startswith("debug") or found.startswith("debug"):
2711 if aliases[0].startswith("debug") or found.startswith("debug"):
2710 debugchoice[found] = (aliases, table[e])
2712 debugchoice[found] = (aliases, table[e])
2711 else:
2713 else:
2712 choice[found] = (aliases, table[e])
2714 choice[found] = (aliases, table[e])
2713
2715
2714 if not choice and debugchoice:
2716 if not choice and debugchoice:
2715 choice = debugchoice
2717 choice = debugchoice
2716
2718
2717 return choice
2719 return choice
2718
2720
2719 def findcmd(ui, cmd):
2721 def findcmd(ui, cmd):
2720 """Return (aliases, command table entry) for command string."""
2722 """Return (aliases, command table entry) for command string."""
2721 choice = findpossible(ui, cmd)
2723 choice = findpossible(ui, cmd)
2722
2724
2723 if choice.has_key(cmd):
2725 if choice.has_key(cmd):
2724 return choice[cmd]
2726 return choice[cmd]
2725
2727
2726 if len(choice) > 1:
2728 if len(choice) > 1:
2727 clist = choice.keys()
2729 clist = choice.keys()
2728 clist.sort()
2730 clist.sort()
2729 raise AmbiguousCommand(cmd, clist)
2731 raise AmbiguousCommand(cmd, clist)
2730
2732
2731 if choice:
2733 if choice:
2732 return choice.values()[0]
2734 return choice.values()[0]
2733
2735
2734 raise UnknownCommand(cmd)
2736 raise UnknownCommand(cmd)
2735
2737
2736 def catchterm(*args):
2738 def catchterm(*args):
2737 raise util.SignalInterrupt
2739 raise util.SignalInterrupt
2738
2740
2739 def run():
2741 def run():
2740 sys.exit(dispatch(sys.argv[1:]))
2742 sys.exit(dispatch(sys.argv[1:]))
2741
2743
2742 class ParseError(Exception):
2744 class ParseError(Exception):
2743 """Exception raised on errors in parsing the command line."""
2745 """Exception raised on errors in parsing the command line."""
2744
2746
2745 def parse(ui, args):
2747 def parse(ui, args):
2746 options = {}
2748 options = {}
2747 cmdoptions = {}
2749 cmdoptions = {}
2748
2750
2749 try:
2751 try:
2750 args = fancyopts.fancyopts(args, globalopts, options)
2752 args = fancyopts.fancyopts(args, globalopts, options)
2751 except fancyopts.getopt.GetoptError, inst:
2753 except fancyopts.getopt.GetoptError, inst:
2752 raise ParseError(None, inst)
2754 raise ParseError(None, inst)
2753
2755
2754 if args:
2756 if args:
2755 cmd, args = args[0], args[1:]
2757 cmd, args = args[0], args[1:]
2756 aliases, i = findcmd(ui, cmd)
2758 aliases, i = findcmd(ui, cmd)
2757 cmd = aliases[0]
2759 cmd = aliases[0]
2758 defaults = ui.config("defaults", cmd)
2760 defaults = ui.config("defaults", cmd)
2759 if defaults:
2761 if defaults:
2760 args = shlex.split(defaults) + args
2762 args = shlex.split(defaults) + args
2761 c = list(i[1])
2763 c = list(i[1])
2762 else:
2764 else:
2763 cmd = None
2765 cmd = None
2764 c = []
2766 c = []
2765
2767
2766 # combine global options into local
2768 # combine global options into local
2767 for o in globalopts:
2769 for o in globalopts:
2768 c.append((o[0], o[1], options[o[1]], o[3]))
2770 c.append((o[0], o[1], options[o[1]], o[3]))
2769
2771
2770 try:
2772 try:
2771 args = fancyopts.fancyopts(args, c, cmdoptions)
2773 args = fancyopts.fancyopts(args, c, cmdoptions)
2772 except fancyopts.getopt.GetoptError, inst:
2774 except fancyopts.getopt.GetoptError, inst:
2773 raise ParseError(cmd, inst)
2775 raise ParseError(cmd, inst)
2774
2776
2775 # separate global options back out
2777 # separate global options back out
2776 for o in globalopts:
2778 for o in globalopts:
2777 n = o[1]
2779 n = o[1]
2778 options[n] = cmdoptions[n]
2780 options[n] = cmdoptions[n]
2779 del cmdoptions[n]
2781 del cmdoptions[n]
2780
2782
2781 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2783 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
2782
2784
2783 external = {}
2785 external = {}
2784
2786
2785 def findext(name):
2787 def findext(name):
2786 '''return module with given extension name'''
2788 '''return module with given extension name'''
2787 try:
2789 try:
2788 return sys.modules[external[name]]
2790 return sys.modules[external[name]]
2789 except KeyError:
2791 except KeyError:
2790 for k, v in external.iteritems():
2792 for k, v in external.iteritems():
2791 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
2793 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
2792 return sys.modules[v]
2794 return sys.modules[v]
2793 raise KeyError(name)
2795 raise KeyError(name)
2794
2796
2795 def load_extensions(ui):
2797 def load_extensions(ui):
2796 added = []
2798 added = []
2797 for ext_name, load_from_name in ui.extensions():
2799 for ext_name, load_from_name in ui.extensions():
2798 if ext_name in external:
2800 if ext_name in external:
2799 continue
2801 continue
2800 try:
2802 try:
2801 if load_from_name:
2803 if load_from_name:
2802 # the module will be loaded in sys.modules
2804 # the module will be loaded in sys.modules
2803 # choose an unique name so that it doesn't
2805 # choose an unique name so that it doesn't
2804 # conflicts with other modules
2806 # conflicts with other modules
2805 module_name = "hgext_%s" % ext_name.replace('.', '_')
2807 module_name = "hgext_%s" % ext_name.replace('.', '_')
2806 mod = imp.load_source(module_name, load_from_name)
2808 mod = imp.load_source(module_name, load_from_name)
2807 else:
2809 else:
2808 def importh(name):
2810 def importh(name):
2809 mod = __import__(name)
2811 mod = __import__(name)
2810 components = name.split('.')
2812 components = name.split('.')
2811 for comp in components[1:]:
2813 for comp in components[1:]:
2812 mod = getattr(mod, comp)
2814 mod = getattr(mod, comp)
2813 return mod
2815 return mod
2814 try:
2816 try:
2815 mod = importh("hgext.%s" % ext_name)
2817 mod = importh("hgext.%s" % ext_name)
2816 except ImportError:
2818 except ImportError:
2817 mod = importh(ext_name)
2819 mod = importh(ext_name)
2818 external[ext_name] = mod.__name__
2820 external[ext_name] = mod.__name__
2819 added.append((mod, ext_name))
2821 added.append((mod, ext_name))
2820 except (util.SignalInterrupt, KeyboardInterrupt):
2822 except (util.SignalInterrupt, KeyboardInterrupt):
2821 raise
2823 raise
2822 except Exception, inst:
2824 except Exception, inst:
2823 ui.warn(_("*** failed to import extension %s: %s\n") %
2825 ui.warn(_("*** failed to import extension %s: %s\n") %
2824 (ext_name, inst))
2826 (ext_name, inst))
2825 if ui.print_exc():
2827 if ui.print_exc():
2826 return 1
2828 return 1
2827
2829
2828 for mod, name in added:
2830 for mod, name in added:
2829 uisetup = getattr(mod, 'uisetup', None)
2831 uisetup = getattr(mod, 'uisetup', None)
2830 if uisetup:
2832 if uisetup:
2831 uisetup(ui)
2833 uisetup(ui)
2832 cmdtable = getattr(mod, 'cmdtable', {})
2834 cmdtable = getattr(mod, 'cmdtable', {})
2833 for t in cmdtable:
2835 for t in cmdtable:
2834 if t in table:
2836 if t in table:
2835 ui.warn(_("module %s overrides %s\n") % (name, t))
2837 ui.warn(_("module %s overrides %s\n") % (name, t))
2836 table.update(cmdtable)
2838 table.update(cmdtable)
2837
2839
2838 def parseconfig(config):
2840 def parseconfig(config):
2839 """parse the --config options from the command line"""
2841 """parse the --config options from the command line"""
2840 parsed = []
2842 parsed = []
2841 for cfg in config:
2843 for cfg in config:
2842 try:
2844 try:
2843 name, value = cfg.split('=', 1)
2845 name, value = cfg.split('=', 1)
2844 section, name = name.split('.', 1)
2846 section, name = name.split('.', 1)
2845 if not section or not name:
2847 if not section or not name:
2846 raise IndexError
2848 raise IndexError
2847 parsed.append((section, name, value))
2849 parsed.append((section, name, value))
2848 except (IndexError, ValueError):
2850 except (IndexError, ValueError):
2849 raise util.Abort(_('malformed --config option: %s') % cfg)
2851 raise util.Abort(_('malformed --config option: %s') % cfg)
2850 return parsed
2852 return parsed
2851
2853
2852 def dispatch(args):
2854 def dispatch(args):
2853 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
2855 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
2854 num = getattr(signal, name, None)
2856 num = getattr(signal, name, None)
2855 if num: signal.signal(num, catchterm)
2857 if num: signal.signal(num, catchterm)
2856
2858
2857 try:
2859 try:
2858 u = ui.ui(traceback='--traceback' in sys.argv[1:])
2860 u = ui.ui(traceback='--traceback' in sys.argv[1:])
2859 except util.Abort, inst:
2861 except util.Abort, inst:
2860 sys.stderr.write(_("abort: %s\n") % inst)
2862 sys.stderr.write(_("abort: %s\n") % inst)
2861 return -1
2863 return -1
2862
2864
2863 load_extensions(u)
2865 load_extensions(u)
2864 u.addreadhook(load_extensions)
2866 u.addreadhook(load_extensions)
2865
2867
2866 try:
2868 try:
2867 cmd, func, args, options, cmdoptions = parse(u, args)
2869 cmd, func, args, options, cmdoptions = parse(u, args)
2868 if options["time"]:
2870 if options["time"]:
2869 def get_times():
2871 def get_times():
2870 t = os.times()
2872 t = os.times()
2871 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2873 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
2872 t = (t[0], t[1], t[2], t[3], time.clock())
2874 t = (t[0], t[1], t[2], t[3], time.clock())
2873 return t
2875 return t
2874 s = get_times()
2876 s = get_times()
2875 def print_time():
2877 def print_time():
2876 t = get_times()
2878 t = get_times()
2877 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2879 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
2878 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2880 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
2879 atexit.register(print_time)
2881 atexit.register(print_time)
2880
2882
2881 # enter the debugger before command execution
2883 # enter the debugger before command execution
2882 if options['debugger']:
2884 if options['debugger']:
2883 pdb.set_trace()
2885 pdb.set_trace()
2884
2886
2885 try:
2887 try:
2886 if options['cwd']:
2888 if options['cwd']:
2887 try:
2889 try:
2888 os.chdir(options['cwd'])
2890 os.chdir(options['cwd'])
2889 except OSError, inst:
2891 except OSError, inst:
2890 raise util.Abort('%s: %s' %
2892 raise util.Abort('%s: %s' %
2891 (options['cwd'], inst.strerror))
2893 (options['cwd'], inst.strerror))
2892
2894
2893 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2895 u.updateopts(options["verbose"], options["debug"], options["quiet"],
2894 not options["noninteractive"], options["traceback"],
2896 not options["noninteractive"], options["traceback"],
2895 parseconfig(options["config"]))
2897 parseconfig(options["config"]))
2896
2898
2897 path = u.expandpath(options["repository"]) or ""
2899 path = u.expandpath(options["repository"]) or ""
2898 repo = path and hg.repository(u, path=path) or None
2900 repo = path and hg.repository(u, path=path) or None
2899 if repo and not repo.local():
2901 if repo and not repo.local():
2900 raise util.Abort(_("repository '%s' is not local") % path)
2902 raise util.Abort(_("repository '%s' is not local") % path)
2901
2903
2902 if options['help']:
2904 if options['help']:
2903 return help_(u, cmd, options['version'])
2905 return help_(u, cmd, options['version'])
2904 elif options['version']:
2906 elif options['version']:
2905 return version_(u)
2907 return version_(u)
2906 elif not cmd:
2908 elif not cmd:
2907 return help_(u, 'shortlist')
2909 return help_(u, 'shortlist')
2908
2910
2909 if cmd not in norepo.split():
2911 if cmd not in norepo.split():
2910 try:
2912 try:
2911 if not repo:
2913 if not repo:
2912 repo = hg.repository(u, path=path)
2914 repo = hg.repository(u, path=path)
2913 u = repo.ui
2915 u = repo.ui
2914 for name in external.itervalues():
2916 for name in external.itervalues():
2915 mod = sys.modules[name]
2917 mod = sys.modules[name]
2916 if hasattr(mod, 'reposetup'):
2918 if hasattr(mod, 'reposetup'):
2917 mod.reposetup(u, repo)
2919 mod.reposetup(u, repo)
2918 hg.repo_setup_hooks.append(mod.reposetup)
2920 hg.repo_setup_hooks.append(mod.reposetup)
2919 except hg.RepoError:
2921 except hg.RepoError:
2920 if cmd not in optionalrepo.split():
2922 if cmd not in optionalrepo.split():
2921 raise
2923 raise
2922 d = lambda: func(u, repo, *args, **cmdoptions)
2924 d = lambda: func(u, repo, *args, **cmdoptions)
2923 else:
2925 else:
2924 d = lambda: func(u, *args, **cmdoptions)
2926 d = lambda: func(u, *args, **cmdoptions)
2925
2927
2926 try:
2928 try:
2927 if options['profile']:
2929 if options['profile']:
2928 import hotshot, hotshot.stats
2930 import hotshot, hotshot.stats
2929 prof = hotshot.Profile("hg.prof")
2931 prof = hotshot.Profile("hg.prof")
2930 try:
2932 try:
2931 try:
2933 try:
2932 return prof.runcall(d)
2934 return prof.runcall(d)
2933 except:
2935 except:
2934 try:
2936 try:
2935 u.warn(_('exception raised - generating '
2937 u.warn(_('exception raised - generating '
2936 'profile anyway\n'))
2938 'profile anyway\n'))
2937 except:
2939 except:
2938 pass
2940 pass
2939 raise
2941 raise
2940 finally:
2942 finally:
2941 prof.close()
2943 prof.close()
2942 stats = hotshot.stats.load("hg.prof")
2944 stats = hotshot.stats.load("hg.prof")
2943 stats.strip_dirs()
2945 stats.strip_dirs()
2944 stats.sort_stats('time', 'calls')
2946 stats.sort_stats('time', 'calls')
2945 stats.print_stats(40)
2947 stats.print_stats(40)
2946 elif options['lsprof']:
2948 elif options['lsprof']:
2947 try:
2949 try:
2948 from mercurial import lsprof
2950 from mercurial import lsprof
2949 except ImportError:
2951 except ImportError:
2950 raise util.Abort(_(
2952 raise util.Abort(_(
2951 'lsprof not available - install from '
2953 'lsprof not available - install from '
2952 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
2954 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
2953 p = lsprof.Profiler()
2955 p = lsprof.Profiler()
2954 p.enable(subcalls=True)
2956 p.enable(subcalls=True)
2955 try:
2957 try:
2956 return d()
2958 return d()
2957 finally:
2959 finally:
2958 p.disable()
2960 p.disable()
2959 stats = lsprof.Stats(p.getstats())
2961 stats = lsprof.Stats(p.getstats())
2960 stats.sort()
2962 stats.sort()
2961 stats.pprint(top=10, file=sys.stderr, climit=5)
2963 stats.pprint(top=10, file=sys.stderr, climit=5)
2962 else:
2964 else:
2963 return d()
2965 return d()
2964 finally:
2966 finally:
2965 u.flush()
2967 u.flush()
2966 except:
2968 except:
2967 # enter the debugger when we hit an exception
2969 # enter the debugger when we hit an exception
2968 if options['debugger']:
2970 if options['debugger']:
2969 pdb.post_mortem(sys.exc_info()[2])
2971 pdb.post_mortem(sys.exc_info()[2])
2970 u.print_exc()
2972 u.print_exc()
2971 raise
2973 raise
2972 except ParseError, inst:
2974 except ParseError, inst:
2973 if inst.args[0]:
2975 if inst.args[0]:
2974 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2976 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
2975 help_(u, inst.args[0])
2977 help_(u, inst.args[0])
2976 else:
2978 else:
2977 u.warn(_("hg: %s\n") % inst.args[1])
2979 u.warn(_("hg: %s\n") % inst.args[1])
2978 help_(u, 'shortlist')
2980 help_(u, 'shortlist')
2979 except AmbiguousCommand, inst:
2981 except AmbiguousCommand, inst:
2980 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
2982 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
2981 (inst.args[0], " ".join(inst.args[1])))
2983 (inst.args[0], " ".join(inst.args[1])))
2982 except UnknownCommand, inst:
2984 except UnknownCommand, inst:
2983 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2985 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
2984 help_(u, 'shortlist')
2986 help_(u, 'shortlist')
2985 except hg.RepoError, inst:
2987 except hg.RepoError, inst:
2986 u.warn(_("abort: %s!\n") % inst)
2988 u.warn(_("abort: %s!\n") % inst)
2987 except lock.LockHeld, inst:
2989 except lock.LockHeld, inst:
2988 if inst.errno == errno.ETIMEDOUT:
2990 if inst.errno == errno.ETIMEDOUT:
2989 reason = _('timed out waiting for lock held by %s') % inst.locker
2991 reason = _('timed out waiting for lock held by %s') % inst.locker
2990 else:
2992 else:
2991 reason = _('lock held by %s') % inst.locker
2993 reason = _('lock held by %s') % inst.locker
2992 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
2994 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
2993 except lock.LockUnavailable, inst:
2995 except lock.LockUnavailable, inst:
2994 u.warn(_("abort: could not lock %s: %s\n") %
2996 u.warn(_("abort: could not lock %s: %s\n") %
2995 (inst.desc or inst.filename, inst.strerror))
2997 (inst.desc or inst.filename, inst.strerror))
2996 except revlog.RevlogError, inst:
2998 except revlog.RevlogError, inst:
2997 u.warn(_("abort: %s!\n") % inst)
2999 u.warn(_("abort: %s!\n") % inst)
2998 except util.SignalInterrupt:
3000 except util.SignalInterrupt:
2999 u.warn(_("killed!\n"))
3001 u.warn(_("killed!\n"))
3000 except KeyboardInterrupt:
3002 except KeyboardInterrupt:
3001 try:
3003 try:
3002 u.warn(_("interrupted!\n"))
3004 u.warn(_("interrupted!\n"))
3003 except IOError, inst:
3005 except IOError, inst:
3004 if inst.errno == errno.EPIPE:
3006 if inst.errno == errno.EPIPE:
3005 if u.debugflag:
3007 if u.debugflag:
3006 u.warn(_("\nbroken pipe\n"))
3008 u.warn(_("\nbroken pipe\n"))
3007 else:
3009 else:
3008 raise
3010 raise
3009 except IOError, inst:
3011 except IOError, inst:
3010 if hasattr(inst, "code"):
3012 if hasattr(inst, "code"):
3011 u.warn(_("abort: %s\n") % inst)
3013 u.warn(_("abort: %s\n") % inst)
3012 elif hasattr(inst, "reason"):
3014 elif hasattr(inst, "reason"):
3013 u.warn(_("abort: error: %s\n") % inst.reason[1])
3015 u.warn(_("abort: error: %s\n") % inst.reason[1])
3014 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3016 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3015 if u.debugflag:
3017 if u.debugflag:
3016 u.warn(_("broken pipe\n"))
3018 u.warn(_("broken pipe\n"))
3017 elif getattr(inst, "strerror", None):
3019 elif getattr(inst, "strerror", None):
3018 if getattr(inst, "filename", None):
3020 if getattr(inst, "filename", None):
3019 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3021 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3020 else:
3022 else:
3021 u.warn(_("abort: %s\n") % inst.strerror)
3023 u.warn(_("abort: %s\n") % inst.strerror)
3022 else:
3024 else:
3023 raise
3025 raise
3024 except OSError, inst:
3026 except OSError, inst:
3025 if getattr(inst, "filename", None):
3027 if getattr(inst, "filename", None):
3026 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3028 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3027 else:
3029 else:
3028 u.warn(_("abort: %s\n") % inst.strerror)
3030 u.warn(_("abort: %s\n") % inst.strerror)
3029 except util.UnexpectedOutput, inst:
3031 except util.UnexpectedOutput, inst:
3030 u.warn(_("abort: %s") % inst[0])
3032 u.warn(_("abort: %s") % inst[0])
3031 if not isinstance(inst[1], basestring):
3033 if not isinstance(inst[1], basestring):
3032 u.warn(" %r\n" % (inst[1],))
3034 u.warn(" %r\n" % (inst[1],))
3033 elif not inst[1]:
3035 elif not inst[1]:
3034 u.warn(_(" empty string\n"))
3036 u.warn(_(" empty string\n"))
3035 else:
3037 else:
3036 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3038 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3037 except util.Abort, inst:
3039 except util.Abort, inst:
3038 u.warn(_("abort: %s\n") % inst)
3040 u.warn(_("abort: %s\n") % inst)
3039 except TypeError, inst:
3041 except TypeError, inst:
3040 # was this an argument error?
3042 # was this an argument error?
3041 tb = traceback.extract_tb(sys.exc_info()[2])
3043 tb = traceback.extract_tb(sys.exc_info()[2])
3042 if len(tb) > 2: # no
3044 if len(tb) > 2: # no
3043 raise
3045 raise
3044 u.debug(inst, "\n")
3046 u.debug(inst, "\n")
3045 u.warn(_("%s: invalid arguments\n") % cmd)
3047 u.warn(_("%s: invalid arguments\n") % cmd)
3046 help_(u, cmd)
3048 help_(u, cmd)
3047 except SystemExit, inst:
3049 except SystemExit, inst:
3048 # Commands shouldn't sys.exit directly, but give a return code.
3050 # Commands shouldn't sys.exit directly, but give a return code.
3049 # Just in case catch this and and pass exit code to caller.
3051 # Just in case catch this and and pass exit code to caller.
3050 return inst.code
3052 return inst.code
3051 except:
3053 except:
3052 u.warn(_("** unknown exception encountered, details follow\n"))
3054 u.warn(_("** unknown exception encountered, details follow\n"))
3053 u.warn(_("** report bug details to "
3055 u.warn(_("** report bug details to "
3054 "http://www.selenic.com/mercurial/bts\n"))
3056 "http://www.selenic.com/mercurial/bts\n"))
3055 u.warn(_("** or mercurial@selenic.com\n"))
3057 u.warn(_("** or mercurial@selenic.com\n"))
3056 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3058 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3057 % version.get_version())
3059 % version.get_version())
3058 raise
3060 raise
3059
3061
3060 return -1
3062 return -1
@@ -1,1909 +1,1912 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("There is no Mercurial repository"
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 " here (.hg not found)"))
32 path = p
32 path = p
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34
34
35 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
36 if create:
36 if create:
37 if not os.path.exists(path):
37 if not os.path.exists(path):
38 os.mkdir(path)
38 os.mkdir(path)
39 os.mkdir(self.path)
39 os.mkdir(self.path)
40 else:
40 else:
41 raise repo.RepoError(_("repository %s not found") % path)
41 raise repo.RepoError(_("repository %s not found") % path)
42 elif create:
42 elif create:
43 raise repo.RepoError(_("repository %s already exists") % path)
43 raise repo.RepoError(_("repository %s already exists") % path)
44
44
45 self.root = os.path.realpath(path)
45 self.root = os.path.realpath(path)
46 self.origroot = path
46 self.origroot = path
47 self.ui = ui.ui(parentui=parentui)
47 self.ui = ui.ui(parentui=parentui)
48 self.opener = util.opener(self.path)
48 self.opener = util.opener(self.path)
49 self.sopener = util.opener(self.path)
49 self.sopener = util.opener(self.path)
50 self.wopener = util.opener(self.root)
50 self.wopener = util.opener(self.root)
51
51
52 try:
52 try:
53 self.ui.readconfig(self.join("hgrc"), self.root)
53 self.ui.readconfig(self.join("hgrc"), self.root)
54 except IOError:
54 except IOError:
55 pass
55 pass
56
56
57 v = self.ui.configrevlog()
57 v = self.ui.configrevlog()
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
58 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
59 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
60 fl = v.get('flags', None)
60 fl = v.get('flags', None)
61 flags = 0
61 flags = 0
62 if fl != None:
62 if fl != None:
63 for x in fl.split():
63 for x in fl.split():
64 flags |= revlog.flagstr(x)
64 flags |= revlog.flagstr(x)
65 elif self.revlogv1:
65 elif self.revlogv1:
66 flags = revlog.REVLOG_DEFAULT_FLAGS
66 flags = revlog.REVLOG_DEFAULT_FLAGS
67
67
68 v = self.revlogversion | flags
68 v = self.revlogversion | flags
69 self.manifest = manifest.manifest(self.sopener, v)
69 self.manifest = manifest.manifest(self.sopener, v)
70 self.changelog = changelog.changelog(self.sopener, v)
70 self.changelog = changelog.changelog(self.sopener, v)
71
71
72 # the changelog might not have the inline index flag
72 # the changelog might not have the inline index flag
73 # on. If the format of the changelog is the same as found in
73 # on. If the format of the changelog is the same as found in
74 # .hgrc, apply any flags found in the .hgrc as well.
74 # .hgrc, apply any flags found in the .hgrc as well.
75 # Otherwise, just version from the changelog
75 # Otherwise, just version from the changelog
76 v = self.changelog.version
76 v = self.changelog.version
77 if v == self.revlogversion:
77 if v == self.revlogversion:
78 v |= flags
78 v |= flags
79 self.revlogversion = v
79 self.revlogversion = v
80
80
81 self.tagscache = None
81 self.tagscache = None
82 self.branchcache = None
82 self.branchcache = None
83 self.nodetagscache = None
83 self.nodetagscache = None
84 self.encodepats = None
84 self.encodepats = None
85 self.decodepats = None
85 self.decodepats = None
86 self.transhandle = None
86 self.transhandle = None
87
87
88 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
88 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
89
89
90 def url(self):
90 def url(self):
91 return 'file:' + self.root
91 return 'file:' + self.root
92
92
93 def hook(self, name, throw=False, **args):
93 def hook(self, name, throw=False, **args):
94 def callhook(hname, funcname):
94 def callhook(hname, funcname):
95 '''call python hook. hook is callable object, looked up as
95 '''call python hook. hook is callable object, looked up as
96 name in python module. if callable returns "true", hook
96 name in python module. if callable returns "true", hook
97 fails, else passes. if hook raises exception, treated as
97 fails, else passes. if hook raises exception, treated as
98 hook failure. exception propagates if throw is "true".
98 hook failure. exception propagates if throw is "true".
99
99
100 reason for "true" meaning "hook failed" is so that
100 reason for "true" meaning "hook failed" is so that
101 unmodified commands (e.g. mercurial.commands.update) can
101 unmodified commands (e.g. mercurial.commands.update) can
102 be run as hooks without wrappers to convert return values.'''
102 be run as hooks without wrappers to convert return values.'''
103
103
104 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
104 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
105 d = funcname.rfind('.')
105 d = funcname.rfind('.')
106 if d == -1:
106 if d == -1:
107 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
107 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
108 % (hname, funcname))
108 % (hname, funcname))
109 modname = funcname[:d]
109 modname = funcname[:d]
110 try:
110 try:
111 obj = __import__(modname)
111 obj = __import__(modname)
112 except ImportError:
112 except ImportError:
113 try:
113 try:
114 # extensions are loaded with hgext_ prefix
114 # extensions are loaded with hgext_ prefix
115 obj = __import__("hgext_%s" % modname)
115 obj = __import__("hgext_%s" % modname)
116 except ImportError:
116 except ImportError:
117 raise util.Abort(_('%s hook is invalid '
117 raise util.Abort(_('%s hook is invalid '
118 '(import of "%s" failed)') %
118 '(import of "%s" failed)') %
119 (hname, modname))
119 (hname, modname))
120 try:
120 try:
121 for p in funcname.split('.')[1:]:
121 for p in funcname.split('.')[1:]:
122 obj = getattr(obj, p)
122 obj = getattr(obj, p)
123 except AttributeError, err:
123 except AttributeError, err:
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not defined)') %
125 '("%s" is not defined)') %
126 (hname, funcname))
126 (hname, funcname))
127 if not callable(obj):
127 if not callable(obj):
128 raise util.Abort(_('%s hook is invalid '
128 raise util.Abort(_('%s hook is invalid '
129 '("%s" is not callable)') %
129 '("%s" is not callable)') %
130 (hname, funcname))
130 (hname, funcname))
131 try:
131 try:
132 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
132 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
133 except (KeyboardInterrupt, util.SignalInterrupt):
133 except (KeyboardInterrupt, util.SignalInterrupt):
134 raise
134 raise
135 except Exception, exc:
135 except Exception, exc:
136 if isinstance(exc, util.Abort):
136 if isinstance(exc, util.Abort):
137 self.ui.warn(_('error: %s hook failed: %s\n') %
137 self.ui.warn(_('error: %s hook failed: %s\n') %
138 (hname, exc.args[0]))
138 (hname, exc.args[0]))
139 else:
139 else:
140 self.ui.warn(_('error: %s hook raised an exception: '
140 self.ui.warn(_('error: %s hook raised an exception: '
141 '%s\n') % (hname, exc))
141 '%s\n') % (hname, exc))
142 if throw:
142 if throw:
143 raise
143 raise
144 self.ui.print_exc()
144 self.ui.print_exc()
145 return True
145 return True
146 if r:
146 if r:
147 if throw:
147 if throw:
148 raise util.Abort(_('%s hook failed') % hname)
148 raise util.Abort(_('%s hook failed') % hname)
149 self.ui.warn(_('warning: %s hook failed\n') % hname)
149 self.ui.warn(_('warning: %s hook failed\n') % hname)
150 return r
150 return r
151
151
152 def runhook(name, cmd):
152 def runhook(name, cmd):
153 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
153 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
154 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
154 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
155 r = util.system(cmd, environ=env, cwd=self.root)
155 r = util.system(cmd, environ=env, cwd=self.root)
156 if r:
156 if r:
157 desc, r = util.explain_exit(r)
157 desc, r = util.explain_exit(r)
158 if throw:
158 if throw:
159 raise util.Abort(_('%s hook %s') % (name, desc))
159 raise util.Abort(_('%s hook %s') % (name, desc))
160 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
160 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
161 return r
161 return r
162
162
163 r = False
163 r = False
164 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
164 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
165 if hname.split(".", 1)[0] == name and cmd]
165 if hname.split(".", 1)[0] == name and cmd]
166 hooks.sort()
166 hooks.sort()
167 for hname, cmd in hooks:
167 for hname, cmd in hooks:
168 if cmd.startswith('python:'):
168 if cmd.startswith('python:'):
169 r = callhook(hname, cmd[7:].strip()) or r
169 r = callhook(hname, cmd[7:].strip()) or r
170 else:
170 else:
171 r = runhook(hname, cmd) or r
171 r = runhook(hname, cmd) or r
172 return r
172 return r
173
173
174 tag_disallowed = ':\r\n'
174 tag_disallowed = ':\r\n'
175
175
176 def tag(self, name, node, message, local, user, date):
176 def tag(self, name, node, message, local, user, date):
177 '''tag a revision with a symbolic name.
177 '''tag a revision with a symbolic name.
178
178
179 if local is True, the tag is stored in a per-repository file.
179 if local is True, the tag is stored in a per-repository file.
180 otherwise, it is stored in the .hgtags file, and a new
180 otherwise, it is stored in the .hgtags file, and a new
181 changeset is committed with the change.
181 changeset is committed with the change.
182
182
183 keyword arguments:
183 keyword arguments:
184
184
185 local: whether to store tag in non-version-controlled file
185 local: whether to store tag in non-version-controlled file
186 (default False)
186 (default False)
187
187
188 message: commit message to use if committing
188 message: commit message to use if committing
189
189
190 user: name of user to use if committing
190 user: name of user to use if committing
191
191
192 date: date tuple to use if committing'''
192 date: date tuple to use if committing'''
193
193
194 for c in self.tag_disallowed:
194 for c in self.tag_disallowed:
195 if c in name:
195 if c in name:
196 raise util.Abort(_('%r cannot be used in a tag name') % c)
196 raise util.Abort(_('%r cannot be used in a tag name') % c)
197
197
198 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
198 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
199
199
200 if local:
200 if local:
201 # local tags are stored in the current charset
201 # local tags are stored in the current charset
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
203 self.hook('tag', node=hex(node), tag=name, local=local)
203 self.hook('tag', node=hex(node), tag=name, local=local)
204 return
204 return
205
205
206 for x in self.status()[:5]:
206 for x in self.status()[:5]:
207 if '.hgtags' in x:
207 if '.hgtags' in x:
208 raise util.Abort(_('working copy of .hgtags is changed '
208 raise util.Abort(_('working copy of .hgtags is changed '
209 '(please commit .hgtags manually)'))
209 '(please commit .hgtags manually)'))
210
210
211 # committed tags are stored in UTF-8
211 # committed tags are stored in UTF-8
212 line = '%s %s\n' % (hex(node), util.fromlocal(name))
212 line = '%s %s\n' % (hex(node), util.fromlocal(name))
213 self.wfile('.hgtags', 'ab').write(line)
213 self.wfile('.hgtags', 'ab').write(line)
214 if self.dirstate.state('.hgtags') == '?':
214 if self.dirstate.state('.hgtags') == '?':
215 self.add(['.hgtags'])
215 self.add(['.hgtags'])
216
216
217 self.commit(['.hgtags'], message, user, date)
217 self.commit(['.hgtags'], message, user, date)
218 self.hook('tag', node=hex(node), tag=name, local=local)
218 self.hook('tag', node=hex(node), tag=name, local=local)
219
219
220 def tags(self):
220 def tags(self):
221 '''return a mapping of tag to node'''
221 '''return a mapping of tag to node'''
222 if not self.tagscache:
222 if not self.tagscache:
223 self.tagscache = {}
223 self.tagscache = {}
224
224
225 def parsetag(line, context):
225 def parsetag(line, context):
226 if not line:
226 if not line:
227 return
227 return
228 s = l.split(" ", 1)
228 s = l.split(" ", 1)
229 if len(s) != 2:
229 if len(s) != 2:
230 self.ui.warn(_("%s: cannot parse entry\n") % context)
230 self.ui.warn(_("%s: cannot parse entry\n") % context)
231 return
231 return
232 node, key = s
232 node, key = s
233 key = util.tolocal(key.strip()) # stored in UTF-8
233 key = util.tolocal(key.strip()) # stored in UTF-8
234 try:
234 try:
235 bin_n = bin(node)
235 bin_n = bin(node)
236 except TypeError:
236 except TypeError:
237 self.ui.warn(_("%s: node '%s' is not well formed\n") %
237 self.ui.warn(_("%s: node '%s' is not well formed\n") %
238 (context, node))
238 (context, node))
239 return
239 return
240 if bin_n not in self.changelog.nodemap:
240 if bin_n not in self.changelog.nodemap:
241 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
241 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
242 (context, key))
242 (context, key))
243 return
243 return
244 self.tagscache[key] = bin_n
244 self.tagscache[key] = bin_n
245
245
246 # read the tags file from each head, ending with the tip,
246 # read the tags file from each head, ending with the tip,
247 # and add each tag found to the map, with "newer" ones
247 # and add each tag found to the map, with "newer" ones
248 # taking precedence
248 # taking precedence
249 f = None
249 f = None
250 for rev, node, fnode in self._hgtagsnodes():
250 for rev, node, fnode in self._hgtagsnodes():
251 f = (f and f.filectx(fnode) or
251 f = (f and f.filectx(fnode) or
252 self.filectx('.hgtags', fileid=fnode))
252 self.filectx('.hgtags', fileid=fnode))
253 count = 0
253 count = 0
254 for l in f.data().splitlines():
254 for l in f.data().splitlines():
255 count += 1
255 count += 1
256 parsetag(l, _("%s, line %d") % (str(f), count))
256 parsetag(l, _("%s, line %d") % (str(f), count))
257
257
258 try:
258 try:
259 f = self.opener("localtags")
259 f = self.opener("localtags")
260 count = 0
260 count = 0
261 for l in f:
261 for l in f:
262 # localtags are stored in the local character set
262 # localtags are stored in the local character set
263 # while the internal tag table is stored in UTF-8
263 # while the internal tag table is stored in UTF-8
264 l = util.fromlocal(l)
264 l = util.fromlocal(l)
265 count += 1
265 count += 1
266 parsetag(l, _("localtags, line %d") % count)
266 parsetag(l, _("localtags, line %d") % count)
267 except IOError:
267 except IOError:
268 pass
268 pass
269
269
270 self.tagscache['tip'] = self.changelog.tip()
270 self.tagscache['tip'] = self.changelog.tip()
271
271
272 return self.tagscache
272 return self.tagscache
273
273
274 def _hgtagsnodes(self):
274 def _hgtagsnodes(self):
275 heads = self.heads()
275 heads = self.heads()
276 heads.reverse()
276 heads.reverse()
277 last = {}
277 last = {}
278 ret = []
278 ret = []
279 for node in heads:
279 for node in heads:
280 c = self.changectx(node)
280 c = self.changectx(node)
281 rev = c.rev()
281 rev = c.rev()
282 try:
282 try:
283 fnode = c.filenode('.hgtags')
283 fnode = c.filenode('.hgtags')
284 except repo.LookupError:
284 except repo.LookupError:
285 continue
285 continue
286 ret.append((rev, node, fnode))
286 ret.append((rev, node, fnode))
287 if fnode in last:
287 if fnode in last:
288 ret[last[fnode]] = None
288 ret[last[fnode]] = None
289 last[fnode] = len(ret) - 1
289 last[fnode] = len(ret) - 1
290 return [item for item in ret if item]
290 return [item for item in ret if item]
291
291
292 def tagslist(self):
292 def tagslist(self):
293 '''return a list of tags ordered by revision'''
293 '''return a list of tags ordered by revision'''
294 l = []
294 l = []
295 for t, n in self.tags().items():
295 for t, n in self.tags().items():
296 try:
296 try:
297 r = self.changelog.rev(n)
297 r = self.changelog.rev(n)
298 except:
298 except:
299 r = -2 # sort to the beginning of the list if unknown
299 r = -2 # sort to the beginning of the list if unknown
300 l.append((r, t, n))
300 l.append((r, t, n))
301 l.sort()
301 l.sort()
302 return [(t, n) for r, t, n in l]
302 return [(t, n) for r, t, n in l]
303
303
304 def nodetags(self, node):
304 def nodetags(self, node):
305 '''return the tags associated with a node'''
305 '''return the tags associated with a node'''
306 if not self.nodetagscache:
306 if not self.nodetagscache:
307 self.nodetagscache = {}
307 self.nodetagscache = {}
308 for t, n in self.tags().items():
308 for t, n in self.tags().items():
309 self.nodetagscache.setdefault(n, []).append(t)
309 self.nodetagscache.setdefault(n, []).append(t)
310 return self.nodetagscache.get(node, [])
310 return self.nodetagscache.get(node, [])
311
311
312 def branchtags(self):
312 def branchtags(self):
313 if self.branchcache != None:
313 if self.branchcache != None:
314 return self.branchcache
314 return self.branchcache
315
315
316 self.branchcache = {} # avoid recursion in changectx
316 self.branchcache = {} # avoid recursion in changectx
317
317
318 partial, last, lrev = self._readbranchcache()
318 partial, last, lrev = self._readbranchcache()
319
319
320 tiprev = self.changelog.count() - 1
320 tiprev = self.changelog.count() - 1
321 if lrev != tiprev:
321 if lrev != tiprev:
322 self._updatebranchcache(partial, lrev+1, tiprev+1)
322 self._updatebranchcache(partial, lrev+1, tiprev+1)
323 self._writebranchcache(partial, self.changelog.tip(), tiprev)
323 self._writebranchcache(partial, self.changelog.tip(), tiprev)
324
324
325 self.branchcache = partial
325 # the branch cache is stored on disk as UTF-8, but in the local
326 # charset internally
327 for k, v in partial.items():
328 self.branchcache[util.tolocal(k)] = v
326 return self.branchcache
329 return self.branchcache
327
330
328 def _readbranchcache(self):
331 def _readbranchcache(self):
329 partial = {}
332 partial = {}
330 try:
333 try:
331 f = self.opener("branches.cache")
334 f = self.opener("branches.cache")
332 lines = f.read().split('\n')
335 lines = f.read().split('\n')
333 f.close()
336 f.close()
334 last, lrev = lines.pop(0).rstrip().split(" ", 1)
337 last, lrev = lines.pop(0).rstrip().split(" ", 1)
335 last, lrev = bin(last), int(lrev)
338 last, lrev = bin(last), int(lrev)
336 if not (lrev < self.changelog.count() and
339 if not (lrev < self.changelog.count() and
337 self.changelog.node(lrev) == last): # sanity check
340 self.changelog.node(lrev) == last): # sanity check
338 # invalidate the cache
341 # invalidate the cache
339 raise ValueError('Invalid branch cache: unknown tip')
342 raise ValueError('Invalid branch cache: unknown tip')
340 for l in lines:
343 for l in lines:
341 if not l: continue
344 if not l: continue
342 node, label = l.rstrip().split(" ", 1)
345 node, label = l.rstrip().split(" ", 1)
343 partial[label] = bin(node)
346 partial[label] = bin(node)
344 except (KeyboardInterrupt, util.SignalInterrupt):
347 except (KeyboardInterrupt, util.SignalInterrupt):
345 raise
348 raise
346 except Exception, inst:
349 except Exception, inst:
347 if self.ui.debugflag:
350 if self.ui.debugflag:
348 self.ui.warn(str(inst), '\n')
351 self.ui.warn(str(inst), '\n')
349 partial, last, lrev = {}, nullid, nullrev
352 partial, last, lrev = {}, nullid, nullrev
350 return partial, last, lrev
353 return partial, last, lrev
351
354
352 def _writebranchcache(self, branches, tip, tiprev):
355 def _writebranchcache(self, branches, tip, tiprev):
353 try:
356 try:
354 f = self.opener("branches.cache", "w")
357 f = self.opener("branches.cache", "w")
355 f.write("%s %s\n" % (hex(tip), tiprev))
358 f.write("%s %s\n" % (hex(tip), tiprev))
356 for label, node in branches.iteritems():
359 for label, node in branches.iteritems():
357 f.write("%s %s\n" % (hex(node), label))
360 f.write("%s %s\n" % (hex(node), label))
358 except IOError:
361 except IOError:
359 pass
362 pass
360
363
361 def _updatebranchcache(self, partial, start, end):
364 def _updatebranchcache(self, partial, start, end):
362 for r in xrange(start, end):
365 for r in xrange(start, end):
363 c = self.changectx(r)
366 c = self.changectx(r)
364 b = c.branch()
367 b = c.branch()
365 if b:
368 if b:
366 partial[b] = c.node()
369 partial[b] = c.node()
367
370
368 def lookup(self, key):
371 def lookup(self, key):
369 if key == '.':
372 if key == '.':
370 key = self.dirstate.parents()[0]
373 key = self.dirstate.parents()[0]
371 if key == nullid:
374 if key == nullid:
372 raise repo.RepoError(_("no revision checked out"))
375 raise repo.RepoError(_("no revision checked out"))
373 n = self.changelog._match(key)
376 n = self.changelog._match(key)
374 if n:
377 if n:
375 return n
378 return n
376 if key in self.tags():
379 if key in self.tags():
377 return self.tags()[key]
380 return self.tags()[key]
378 if key in self.branchtags():
381 if key in self.branchtags():
379 return self.branchtags()[key]
382 return self.branchtags()[key]
380 n = self.changelog._partialmatch(key)
383 n = self.changelog._partialmatch(key)
381 if n:
384 if n:
382 return n
385 return n
383 raise repo.RepoError(_("unknown revision '%s'") % key)
386 raise repo.RepoError(_("unknown revision '%s'") % key)
384
387
385 def dev(self):
388 def dev(self):
386 return os.lstat(self.path).st_dev
389 return os.lstat(self.path).st_dev
387
390
388 def local(self):
391 def local(self):
389 return True
392 return True
390
393
391 def join(self, f):
394 def join(self, f):
392 return os.path.join(self.path, f)
395 return os.path.join(self.path, f)
393
396
394 def sjoin(self, f):
397 def sjoin(self, f):
395 return os.path.join(self.path, f)
398 return os.path.join(self.path, f)
396
399
397 def wjoin(self, f):
400 def wjoin(self, f):
398 return os.path.join(self.root, f)
401 return os.path.join(self.root, f)
399
402
400 def file(self, f):
403 def file(self, f):
401 if f[0] == '/':
404 if f[0] == '/':
402 f = f[1:]
405 f = f[1:]
403 return filelog.filelog(self.sopener, f, self.revlogversion)
406 return filelog.filelog(self.sopener, f, self.revlogversion)
404
407
405 def changectx(self, changeid=None):
408 def changectx(self, changeid=None):
406 return context.changectx(self, changeid)
409 return context.changectx(self, changeid)
407
410
408 def workingctx(self):
411 def workingctx(self):
409 return context.workingctx(self)
412 return context.workingctx(self)
410
413
411 def parents(self, changeid=None):
414 def parents(self, changeid=None):
412 '''
415 '''
413 get list of changectxs for parents of changeid or working directory
416 get list of changectxs for parents of changeid or working directory
414 '''
417 '''
415 if changeid is None:
418 if changeid is None:
416 pl = self.dirstate.parents()
419 pl = self.dirstate.parents()
417 else:
420 else:
418 n = self.changelog.lookup(changeid)
421 n = self.changelog.lookup(changeid)
419 pl = self.changelog.parents(n)
422 pl = self.changelog.parents(n)
420 if pl[1] == nullid:
423 if pl[1] == nullid:
421 return [self.changectx(pl[0])]
424 return [self.changectx(pl[0])]
422 return [self.changectx(pl[0]), self.changectx(pl[1])]
425 return [self.changectx(pl[0]), self.changectx(pl[1])]
423
426
424 def filectx(self, path, changeid=None, fileid=None):
427 def filectx(self, path, changeid=None, fileid=None):
425 """changeid can be a changeset revision, node, or tag.
428 """changeid can be a changeset revision, node, or tag.
426 fileid can be a file revision or node."""
429 fileid can be a file revision or node."""
427 return context.filectx(self, path, changeid, fileid)
430 return context.filectx(self, path, changeid, fileid)
428
431
429 def getcwd(self):
432 def getcwd(self):
430 return self.dirstate.getcwd()
433 return self.dirstate.getcwd()
431
434
432 def wfile(self, f, mode='r'):
435 def wfile(self, f, mode='r'):
433 return self.wopener(f, mode)
436 return self.wopener(f, mode)
434
437
435 def wread(self, filename):
438 def wread(self, filename):
436 if self.encodepats == None:
439 if self.encodepats == None:
437 l = []
440 l = []
438 for pat, cmd in self.ui.configitems("encode"):
441 for pat, cmd in self.ui.configitems("encode"):
439 mf = util.matcher(self.root, "", [pat], [], [])[1]
442 mf = util.matcher(self.root, "", [pat], [], [])[1]
440 l.append((mf, cmd))
443 l.append((mf, cmd))
441 self.encodepats = l
444 self.encodepats = l
442
445
443 data = self.wopener(filename, 'r').read()
446 data = self.wopener(filename, 'r').read()
444
447
445 for mf, cmd in self.encodepats:
448 for mf, cmd in self.encodepats:
446 if mf(filename):
449 if mf(filename):
447 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
450 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
448 data = util.filter(data, cmd)
451 data = util.filter(data, cmd)
449 break
452 break
450
453
451 return data
454 return data
452
455
453 def wwrite(self, filename, data, fd=None):
456 def wwrite(self, filename, data, fd=None):
454 if self.decodepats == None:
457 if self.decodepats == None:
455 l = []
458 l = []
456 for pat, cmd in self.ui.configitems("decode"):
459 for pat, cmd in self.ui.configitems("decode"):
457 mf = util.matcher(self.root, "", [pat], [], [])[1]
460 mf = util.matcher(self.root, "", [pat], [], [])[1]
458 l.append((mf, cmd))
461 l.append((mf, cmd))
459 self.decodepats = l
462 self.decodepats = l
460
463
461 for mf, cmd in self.decodepats:
464 for mf, cmd in self.decodepats:
462 if mf(filename):
465 if mf(filename):
463 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
466 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
464 data = util.filter(data, cmd)
467 data = util.filter(data, cmd)
465 break
468 break
466
469
467 if fd:
470 if fd:
468 return fd.write(data)
471 return fd.write(data)
469 return self.wopener(filename, 'w').write(data)
472 return self.wopener(filename, 'w').write(data)
470
473
471 def transaction(self):
474 def transaction(self):
472 tr = self.transhandle
475 tr = self.transhandle
473 if tr != None and tr.running():
476 if tr != None and tr.running():
474 return tr.nest()
477 return tr.nest()
475
478
476 # save dirstate for rollback
479 # save dirstate for rollback
477 try:
480 try:
478 ds = self.opener("dirstate").read()
481 ds = self.opener("dirstate").read()
479 except IOError:
482 except IOError:
480 ds = ""
483 ds = ""
481 self.opener("journal.dirstate", "w").write(ds)
484 self.opener("journal.dirstate", "w").write(ds)
482
485
483 tr = transaction.transaction(self.ui.warn, self.sopener,
486 tr = transaction.transaction(self.ui.warn, self.sopener,
484 self.sjoin("journal"),
487 self.sjoin("journal"),
485 aftertrans(self.path))
488 aftertrans(self.path))
486 self.transhandle = tr
489 self.transhandle = tr
487 return tr
490 return tr
488
491
489 def recover(self):
492 def recover(self):
490 l = self.lock()
493 l = self.lock()
491 if os.path.exists(self.sjoin("journal")):
494 if os.path.exists(self.sjoin("journal")):
492 self.ui.status(_("rolling back interrupted transaction\n"))
495 self.ui.status(_("rolling back interrupted transaction\n"))
493 transaction.rollback(self.sopener, self.sjoin("journal"))
496 transaction.rollback(self.sopener, self.sjoin("journal"))
494 self.reload()
497 self.reload()
495 return True
498 return True
496 else:
499 else:
497 self.ui.warn(_("no interrupted transaction available\n"))
500 self.ui.warn(_("no interrupted transaction available\n"))
498 return False
501 return False
499
502
500 def rollback(self, wlock=None):
503 def rollback(self, wlock=None):
501 if not wlock:
504 if not wlock:
502 wlock = self.wlock()
505 wlock = self.wlock()
503 l = self.lock()
506 l = self.lock()
504 if os.path.exists(self.sjoin("undo")):
507 if os.path.exists(self.sjoin("undo")):
505 self.ui.status(_("rolling back last transaction\n"))
508 self.ui.status(_("rolling back last transaction\n"))
506 transaction.rollback(self.sopener, self.sjoin("undo"))
509 transaction.rollback(self.sopener, self.sjoin("undo"))
507 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
510 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
508 self.reload()
511 self.reload()
509 self.wreload()
512 self.wreload()
510 else:
513 else:
511 self.ui.warn(_("no rollback information available\n"))
514 self.ui.warn(_("no rollback information available\n"))
512
515
513 def wreload(self):
516 def wreload(self):
514 self.dirstate.read()
517 self.dirstate.read()
515
518
516 def reload(self):
519 def reload(self):
517 self.changelog.load()
520 self.changelog.load()
518 self.manifest.load()
521 self.manifest.load()
519 self.tagscache = None
522 self.tagscache = None
520 self.nodetagscache = None
523 self.nodetagscache = None
521
524
522 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
525 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
523 desc=None):
526 desc=None):
524 try:
527 try:
525 l = lock.lock(lockname, 0, releasefn, desc=desc)
528 l = lock.lock(lockname, 0, releasefn, desc=desc)
526 except lock.LockHeld, inst:
529 except lock.LockHeld, inst:
527 if not wait:
530 if not wait:
528 raise
531 raise
529 self.ui.warn(_("waiting for lock on %s held by %r\n") %
532 self.ui.warn(_("waiting for lock on %s held by %r\n") %
530 (desc, inst.locker))
533 (desc, inst.locker))
531 # default to 600 seconds timeout
534 # default to 600 seconds timeout
532 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
535 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
533 releasefn, desc=desc)
536 releasefn, desc=desc)
534 if acquirefn:
537 if acquirefn:
535 acquirefn()
538 acquirefn()
536 return l
539 return l
537
540
538 def lock(self, wait=1):
541 def lock(self, wait=1):
539 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
542 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
540 desc=_('repository %s') % self.origroot)
543 desc=_('repository %s') % self.origroot)
541
544
542 def wlock(self, wait=1):
545 def wlock(self, wait=1):
543 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
546 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
544 self.wreload,
547 self.wreload,
545 desc=_('working directory of %s') % self.origroot)
548 desc=_('working directory of %s') % self.origroot)
546
549
547 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
550 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
548 """
551 """
549 commit an individual file as part of a larger transaction
552 commit an individual file as part of a larger transaction
550 """
553 """
551
554
552 t = self.wread(fn)
555 t = self.wread(fn)
553 fl = self.file(fn)
556 fl = self.file(fn)
554 fp1 = manifest1.get(fn, nullid)
557 fp1 = manifest1.get(fn, nullid)
555 fp2 = manifest2.get(fn, nullid)
558 fp2 = manifest2.get(fn, nullid)
556
559
557 meta = {}
560 meta = {}
558 cp = self.dirstate.copied(fn)
561 cp = self.dirstate.copied(fn)
559 if cp:
562 if cp:
560 meta["copy"] = cp
563 meta["copy"] = cp
561 if not manifest2: # not a branch merge
564 if not manifest2: # not a branch merge
562 meta["copyrev"] = hex(manifest1.get(cp, nullid))
565 meta["copyrev"] = hex(manifest1.get(cp, nullid))
563 fp2 = nullid
566 fp2 = nullid
564 elif fp2 != nullid: # copied on remote side
567 elif fp2 != nullid: # copied on remote side
565 meta["copyrev"] = hex(manifest1.get(cp, nullid))
568 meta["copyrev"] = hex(manifest1.get(cp, nullid))
566 elif fp1 != nullid: # copied on local side, reversed
569 elif fp1 != nullid: # copied on local side, reversed
567 meta["copyrev"] = hex(manifest2.get(cp))
570 meta["copyrev"] = hex(manifest2.get(cp))
568 fp2 = nullid
571 fp2 = nullid
569 else: # directory rename
572 else: # directory rename
570 meta["copyrev"] = hex(manifest1.get(cp, nullid))
573 meta["copyrev"] = hex(manifest1.get(cp, nullid))
571 self.ui.debug(_(" %s: copy %s:%s\n") %
574 self.ui.debug(_(" %s: copy %s:%s\n") %
572 (fn, cp, meta["copyrev"]))
575 (fn, cp, meta["copyrev"]))
573 fp1 = nullid
576 fp1 = nullid
574 elif fp2 != nullid:
577 elif fp2 != nullid:
575 # is one parent an ancestor of the other?
578 # is one parent an ancestor of the other?
576 fpa = fl.ancestor(fp1, fp2)
579 fpa = fl.ancestor(fp1, fp2)
577 if fpa == fp1:
580 if fpa == fp1:
578 fp1, fp2 = fp2, nullid
581 fp1, fp2 = fp2, nullid
579 elif fpa == fp2:
582 elif fpa == fp2:
580 fp2 = nullid
583 fp2 = nullid
581
584
582 # is the file unmodified from the parent? report existing entry
585 # is the file unmodified from the parent? report existing entry
583 if fp2 == nullid and not fl.cmp(fp1, t):
586 if fp2 == nullid and not fl.cmp(fp1, t):
584 return fp1
587 return fp1
585
588
586 changelist.append(fn)
589 changelist.append(fn)
587 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
590 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
588
591
589 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
592 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
590 if p1 is None:
593 if p1 is None:
591 p1, p2 = self.dirstate.parents()
594 p1, p2 = self.dirstate.parents()
592 return self.commit(files=files, text=text, user=user, date=date,
595 return self.commit(files=files, text=text, user=user, date=date,
593 p1=p1, p2=p2, wlock=wlock)
596 p1=p1, p2=p2, wlock=wlock)
594
597
595 def commit(self, files=None, text="", user=None, date=None,
598 def commit(self, files=None, text="", user=None, date=None,
596 match=util.always, force=False, lock=None, wlock=None,
599 match=util.always, force=False, lock=None, wlock=None,
597 force_editor=False, p1=None, p2=None, extra={}):
600 force_editor=False, p1=None, p2=None, extra={}):
598
601
599 commit = []
602 commit = []
600 remove = []
603 remove = []
601 changed = []
604 changed = []
602 use_dirstate = (p1 is None) # not rawcommit
605 use_dirstate = (p1 is None) # not rawcommit
603 extra = extra.copy()
606 extra = extra.copy()
604
607
605 if use_dirstate:
608 if use_dirstate:
606 if files:
609 if files:
607 for f in files:
610 for f in files:
608 s = self.dirstate.state(f)
611 s = self.dirstate.state(f)
609 if s in 'nmai':
612 if s in 'nmai':
610 commit.append(f)
613 commit.append(f)
611 elif s == 'r':
614 elif s == 'r':
612 remove.append(f)
615 remove.append(f)
613 else:
616 else:
614 self.ui.warn(_("%s not tracked!\n") % f)
617 self.ui.warn(_("%s not tracked!\n") % f)
615 else:
618 else:
616 changes = self.status(match=match)[:5]
619 changes = self.status(match=match)[:5]
617 modified, added, removed, deleted, unknown = changes
620 modified, added, removed, deleted, unknown = changes
618 commit = modified + added
621 commit = modified + added
619 remove = removed
622 remove = removed
620 else:
623 else:
621 commit = files
624 commit = files
622
625
623 if use_dirstate:
626 if use_dirstate:
624 p1, p2 = self.dirstate.parents()
627 p1, p2 = self.dirstate.parents()
625 update_dirstate = True
628 update_dirstate = True
626 else:
629 else:
627 p1, p2 = p1, p2 or nullid
630 p1, p2 = p1, p2 or nullid
628 update_dirstate = (self.dirstate.parents()[0] == p1)
631 update_dirstate = (self.dirstate.parents()[0] == p1)
629
632
630 c1 = self.changelog.read(p1)
633 c1 = self.changelog.read(p1)
631 c2 = self.changelog.read(p2)
634 c2 = self.changelog.read(p2)
632 m1 = self.manifest.read(c1[0]).copy()
635 m1 = self.manifest.read(c1[0]).copy()
633 m2 = self.manifest.read(c2[0])
636 m2 = self.manifest.read(c2[0])
634
637
635 if use_dirstate:
638 if use_dirstate:
636 branchname = self.workingctx().branch()
639 branchname = util.fromlocal(self.workingctx().branch())
637 else:
640 else:
638 branchname = ""
641 branchname = ""
639
642
640 if use_dirstate:
643 if use_dirstate:
641 oldname = c1[5].get("branch", "")
644 oldname = c1[5].get("branch", "") # stored in UTF-8
642 if not commit and not remove and not force and p2 == nullid and \
645 if not commit and not remove and not force and p2 == nullid and \
643 branchname == oldname:
646 branchname == oldname:
644 self.ui.status(_("nothing changed\n"))
647 self.ui.status(_("nothing changed\n"))
645 return None
648 return None
646
649
647 xp1 = hex(p1)
650 xp1 = hex(p1)
648 if p2 == nullid: xp2 = ''
651 if p2 == nullid: xp2 = ''
649 else: xp2 = hex(p2)
652 else: xp2 = hex(p2)
650
653
651 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
654 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
652
655
653 if not wlock:
656 if not wlock:
654 wlock = self.wlock()
657 wlock = self.wlock()
655 if not lock:
658 if not lock:
656 lock = self.lock()
659 lock = self.lock()
657 tr = self.transaction()
660 tr = self.transaction()
658
661
659 # check in files
662 # check in files
660 new = {}
663 new = {}
661 linkrev = self.changelog.count()
664 linkrev = self.changelog.count()
662 commit.sort()
665 commit.sort()
663 for f in commit:
666 for f in commit:
664 self.ui.note(f + "\n")
667 self.ui.note(f + "\n")
665 try:
668 try:
666 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
669 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
667 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
670 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
668 except IOError:
671 except IOError:
669 if use_dirstate:
672 if use_dirstate:
670 self.ui.warn(_("trouble committing %s!\n") % f)
673 self.ui.warn(_("trouble committing %s!\n") % f)
671 raise
674 raise
672 else:
675 else:
673 remove.append(f)
676 remove.append(f)
674
677
675 # update manifest
678 # update manifest
676 m1.update(new)
679 m1.update(new)
677 remove.sort()
680 remove.sort()
678
681
679 for f in remove:
682 for f in remove:
680 if f in m1:
683 if f in m1:
681 del m1[f]
684 del m1[f]
682 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
685 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
683
686
684 # add changeset
687 # add changeset
685 new = new.keys()
688 new = new.keys()
686 new.sort()
689 new.sort()
687
690
688 user = user or self.ui.username()
691 user = user or self.ui.username()
689 if not text or force_editor:
692 if not text or force_editor:
690 edittext = []
693 edittext = []
691 if text:
694 if text:
692 edittext.append(text)
695 edittext.append(text)
693 edittext.append("")
696 edittext.append("")
694 edittext.append("HG: user: %s" % user)
697 edittext.append("HG: user: %s" % user)
695 if p2 != nullid:
698 if p2 != nullid:
696 edittext.append("HG: branch merge")
699 edittext.append("HG: branch merge")
697 edittext.extend(["HG: changed %s" % f for f in changed])
700 edittext.extend(["HG: changed %s" % f for f in changed])
698 edittext.extend(["HG: removed %s" % f for f in remove])
701 edittext.extend(["HG: removed %s" % f for f in remove])
699 if not changed and not remove:
702 if not changed and not remove:
700 edittext.append("HG: no files changed")
703 edittext.append("HG: no files changed")
701 edittext.append("")
704 edittext.append("")
702 # run editor in the repository root
705 # run editor in the repository root
703 olddir = os.getcwd()
706 olddir = os.getcwd()
704 os.chdir(self.root)
707 os.chdir(self.root)
705 text = self.ui.edit("\n".join(edittext), user)
708 text = self.ui.edit("\n".join(edittext), user)
706 os.chdir(olddir)
709 os.chdir(olddir)
707
710
708 lines = [line.rstrip() for line in text.rstrip().splitlines()]
711 lines = [line.rstrip() for line in text.rstrip().splitlines()]
709 while lines and not lines[0]:
712 while lines and not lines[0]:
710 del lines[0]
713 del lines[0]
711 if not lines:
714 if not lines:
712 return None
715 return None
713 text = '\n'.join(lines)
716 text = '\n'.join(lines)
714 if branchname:
717 if branchname:
715 extra["branch"] = branchname
718 extra["branch"] = branchname
716 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
719 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
717 user, date, extra)
720 user, date, extra)
718 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
721 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
719 parent2=xp2)
722 parent2=xp2)
720 tr.close()
723 tr.close()
721
724
722 if use_dirstate or update_dirstate:
725 if use_dirstate or update_dirstate:
723 self.dirstate.setparents(n)
726 self.dirstate.setparents(n)
724 if use_dirstate:
727 if use_dirstate:
725 self.dirstate.update(new, "n")
728 self.dirstate.update(new, "n")
726 self.dirstate.forget(remove)
729 self.dirstate.forget(remove)
727
730
728 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
731 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
729 return n
732 return n
730
733
731 def walk(self, node=None, files=[], match=util.always, badmatch=None):
734 def walk(self, node=None, files=[], match=util.always, badmatch=None):
732 '''
735 '''
733 walk recursively through the directory tree or a given
736 walk recursively through the directory tree or a given
734 changeset, finding all files matched by the match
737 changeset, finding all files matched by the match
735 function
738 function
736
739
737 results are yielded in a tuple (src, filename), where src
740 results are yielded in a tuple (src, filename), where src
738 is one of:
741 is one of:
739 'f' the file was found in the directory tree
742 'f' the file was found in the directory tree
740 'm' the file was only in the dirstate and not in the tree
743 'm' the file was only in the dirstate and not in the tree
741 'b' file was not found and matched badmatch
744 'b' file was not found and matched badmatch
742 '''
745 '''
743
746
744 if node:
747 if node:
745 fdict = dict.fromkeys(files)
748 fdict = dict.fromkeys(files)
746 for fn in self.manifest.read(self.changelog.read(node)[0]):
749 for fn in self.manifest.read(self.changelog.read(node)[0]):
747 for ffn in fdict:
750 for ffn in fdict:
748 # match if the file is the exact name or a directory
751 # match if the file is the exact name or a directory
749 if ffn == fn or fn.startswith("%s/" % ffn):
752 if ffn == fn or fn.startswith("%s/" % ffn):
750 del fdict[ffn]
753 del fdict[ffn]
751 break
754 break
752 if match(fn):
755 if match(fn):
753 yield 'm', fn
756 yield 'm', fn
754 for fn in fdict:
757 for fn in fdict:
755 if badmatch and badmatch(fn):
758 if badmatch and badmatch(fn):
756 if match(fn):
759 if match(fn):
757 yield 'b', fn
760 yield 'b', fn
758 else:
761 else:
759 self.ui.warn(_('%s: No such file in rev %s\n') % (
762 self.ui.warn(_('%s: No such file in rev %s\n') % (
760 util.pathto(self.getcwd(), fn), short(node)))
763 util.pathto(self.getcwd(), fn), short(node)))
761 else:
764 else:
762 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
765 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
763 yield src, fn
766 yield src, fn
764
767
765 def status(self, node1=None, node2=None, files=[], match=util.always,
768 def status(self, node1=None, node2=None, files=[], match=util.always,
766 wlock=None, list_ignored=False, list_clean=False):
769 wlock=None, list_ignored=False, list_clean=False):
767 """return status of files between two nodes or node and working directory
770 """return status of files between two nodes or node and working directory
768
771
769 If node1 is None, use the first dirstate parent instead.
772 If node1 is None, use the first dirstate parent instead.
770 If node2 is None, compare node1 with working directory.
773 If node2 is None, compare node1 with working directory.
771 """
774 """
772
775
773 def fcmp(fn, mf):
776 def fcmp(fn, mf):
774 t1 = self.wread(fn)
777 t1 = self.wread(fn)
775 return self.file(fn).cmp(mf.get(fn, nullid), t1)
778 return self.file(fn).cmp(mf.get(fn, nullid), t1)
776
779
777 def mfmatches(node):
780 def mfmatches(node):
778 change = self.changelog.read(node)
781 change = self.changelog.read(node)
779 mf = self.manifest.read(change[0]).copy()
782 mf = self.manifest.read(change[0]).copy()
780 for fn in mf.keys():
783 for fn in mf.keys():
781 if not match(fn):
784 if not match(fn):
782 del mf[fn]
785 del mf[fn]
783 return mf
786 return mf
784
787
785 modified, added, removed, deleted, unknown = [], [], [], [], []
788 modified, added, removed, deleted, unknown = [], [], [], [], []
786 ignored, clean = [], []
789 ignored, clean = [], []
787
790
788 compareworking = False
791 compareworking = False
789 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
792 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
790 compareworking = True
793 compareworking = True
791
794
792 if not compareworking:
795 if not compareworking:
793 # read the manifest from node1 before the manifest from node2,
796 # read the manifest from node1 before the manifest from node2,
794 # so that we'll hit the manifest cache if we're going through
797 # so that we'll hit the manifest cache if we're going through
795 # all the revisions in parent->child order.
798 # all the revisions in parent->child order.
796 mf1 = mfmatches(node1)
799 mf1 = mfmatches(node1)
797
800
798 # are we comparing the working directory?
801 # are we comparing the working directory?
799 if not node2:
802 if not node2:
800 if not wlock:
803 if not wlock:
801 try:
804 try:
802 wlock = self.wlock(wait=0)
805 wlock = self.wlock(wait=0)
803 except lock.LockException:
806 except lock.LockException:
804 wlock = None
807 wlock = None
805 (lookup, modified, added, removed, deleted, unknown,
808 (lookup, modified, added, removed, deleted, unknown,
806 ignored, clean) = self.dirstate.status(files, match,
809 ignored, clean) = self.dirstate.status(files, match,
807 list_ignored, list_clean)
810 list_ignored, list_clean)
808
811
809 # are we comparing working dir against its parent?
812 # are we comparing working dir against its parent?
810 if compareworking:
813 if compareworking:
811 if lookup:
814 if lookup:
812 # do a full compare of any files that might have changed
815 # do a full compare of any files that might have changed
813 mf2 = mfmatches(self.dirstate.parents()[0])
816 mf2 = mfmatches(self.dirstate.parents()[0])
814 for f in lookup:
817 for f in lookup:
815 if fcmp(f, mf2):
818 if fcmp(f, mf2):
816 modified.append(f)
819 modified.append(f)
817 else:
820 else:
818 clean.append(f)
821 clean.append(f)
819 if wlock is not None:
822 if wlock is not None:
820 self.dirstate.update([f], "n")
823 self.dirstate.update([f], "n")
821 else:
824 else:
822 # we are comparing working dir against non-parent
825 # we are comparing working dir against non-parent
823 # generate a pseudo-manifest for the working dir
826 # generate a pseudo-manifest for the working dir
824 # XXX: create it in dirstate.py ?
827 # XXX: create it in dirstate.py ?
825 mf2 = mfmatches(self.dirstate.parents()[0])
828 mf2 = mfmatches(self.dirstate.parents()[0])
826 for f in lookup + modified + added:
829 for f in lookup + modified + added:
827 mf2[f] = ""
830 mf2[f] = ""
828 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
831 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
829 for f in removed:
832 for f in removed:
830 if f in mf2:
833 if f in mf2:
831 del mf2[f]
834 del mf2[f]
832 else:
835 else:
833 # we are comparing two revisions
836 # we are comparing two revisions
834 mf2 = mfmatches(node2)
837 mf2 = mfmatches(node2)
835
838
836 if not compareworking:
839 if not compareworking:
837 # flush lists from dirstate before comparing manifests
840 # flush lists from dirstate before comparing manifests
838 modified, added, clean = [], [], []
841 modified, added, clean = [], [], []
839
842
840 # make sure to sort the files so we talk to the disk in a
843 # make sure to sort the files so we talk to the disk in a
841 # reasonable order
844 # reasonable order
842 mf2keys = mf2.keys()
845 mf2keys = mf2.keys()
843 mf2keys.sort()
846 mf2keys.sort()
844 for fn in mf2keys:
847 for fn in mf2keys:
845 if mf1.has_key(fn):
848 if mf1.has_key(fn):
846 if mf1.flags(fn) != mf2.flags(fn) or \
849 if mf1.flags(fn) != mf2.flags(fn) or \
847 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
850 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
848 modified.append(fn)
851 modified.append(fn)
849 elif list_clean:
852 elif list_clean:
850 clean.append(fn)
853 clean.append(fn)
851 del mf1[fn]
854 del mf1[fn]
852 else:
855 else:
853 added.append(fn)
856 added.append(fn)
854
857
855 removed = mf1.keys()
858 removed = mf1.keys()
856
859
857 # sort and return results:
860 # sort and return results:
858 for l in modified, added, removed, deleted, unknown, ignored, clean:
861 for l in modified, added, removed, deleted, unknown, ignored, clean:
859 l.sort()
862 l.sort()
860 return (modified, added, removed, deleted, unknown, ignored, clean)
863 return (modified, added, removed, deleted, unknown, ignored, clean)
861
864
862 def add(self, list, wlock=None):
865 def add(self, list, wlock=None):
863 if not wlock:
866 if not wlock:
864 wlock = self.wlock()
867 wlock = self.wlock()
865 for f in list:
868 for f in list:
866 p = self.wjoin(f)
869 p = self.wjoin(f)
867 if not os.path.exists(p):
870 if not os.path.exists(p):
868 self.ui.warn(_("%s does not exist!\n") % f)
871 self.ui.warn(_("%s does not exist!\n") % f)
869 elif not os.path.isfile(p):
872 elif not os.path.isfile(p):
870 self.ui.warn(_("%s not added: only files supported currently\n")
873 self.ui.warn(_("%s not added: only files supported currently\n")
871 % f)
874 % f)
872 elif self.dirstate.state(f) in 'an':
875 elif self.dirstate.state(f) in 'an':
873 self.ui.warn(_("%s already tracked!\n") % f)
876 self.ui.warn(_("%s already tracked!\n") % f)
874 else:
877 else:
875 self.dirstate.update([f], "a")
878 self.dirstate.update([f], "a")
876
879
877 def forget(self, list, wlock=None):
880 def forget(self, list, wlock=None):
878 if not wlock:
881 if not wlock:
879 wlock = self.wlock()
882 wlock = self.wlock()
880 for f in list:
883 for f in list:
881 if self.dirstate.state(f) not in 'ai':
884 if self.dirstate.state(f) not in 'ai':
882 self.ui.warn(_("%s not added!\n") % f)
885 self.ui.warn(_("%s not added!\n") % f)
883 else:
886 else:
884 self.dirstate.forget([f])
887 self.dirstate.forget([f])
885
888
886 def remove(self, list, unlink=False, wlock=None):
889 def remove(self, list, unlink=False, wlock=None):
887 if unlink:
890 if unlink:
888 for f in list:
891 for f in list:
889 try:
892 try:
890 util.unlink(self.wjoin(f))
893 util.unlink(self.wjoin(f))
891 except OSError, inst:
894 except OSError, inst:
892 if inst.errno != errno.ENOENT:
895 if inst.errno != errno.ENOENT:
893 raise
896 raise
894 if not wlock:
897 if not wlock:
895 wlock = self.wlock()
898 wlock = self.wlock()
896 for f in list:
899 for f in list:
897 p = self.wjoin(f)
900 p = self.wjoin(f)
898 if os.path.exists(p):
901 if os.path.exists(p):
899 self.ui.warn(_("%s still exists!\n") % f)
902 self.ui.warn(_("%s still exists!\n") % f)
900 elif self.dirstate.state(f) == 'a':
903 elif self.dirstate.state(f) == 'a':
901 self.dirstate.forget([f])
904 self.dirstate.forget([f])
902 elif f not in self.dirstate:
905 elif f not in self.dirstate:
903 self.ui.warn(_("%s not tracked!\n") % f)
906 self.ui.warn(_("%s not tracked!\n") % f)
904 else:
907 else:
905 self.dirstate.update([f], "r")
908 self.dirstate.update([f], "r")
906
909
907 def undelete(self, list, wlock=None):
910 def undelete(self, list, wlock=None):
908 p = self.dirstate.parents()[0]
911 p = self.dirstate.parents()[0]
909 mn = self.changelog.read(p)[0]
912 mn = self.changelog.read(p)[0]
910 m = self.manifest.read(mn)
913 m = self.manifest.read(mn)
911 if not wlock:
914 if not wlock:
912 wlock = self.wlock()
915 wlock = self.wlock()
913 for f in list:
916 for f in list:
914 if self.dirstate.state(f) not in "r":
917 if self.dirstate.state(f) not in "r":
915 self.ui.warn("%s not removed!\n" % f)
918 self.ui.warn("%s not removed!\n" % f)
916 else:
919 else:
917 t = self.file(f).read(m[f])
920 t = self.file(f).read(m[f])
918 self.wwrite(f, t)
921 self.wwrite(f, t)
919 util.set_exec(self.wjoin(f), m.execf(f))
922 util.set_exec(self.wjoin(f), m.execf(f))
920 self.dirstate.update([f], "n")
923 self.dirstate.update([f], "n")
921
924
922 def copy(self, source, dest, wlock=None):
925 def copy(self, source, dest, wlock=None):
923 p = self.wjoin(dest)
926 p = self.wjoin(dest)
924 if not os.path.exists(p):
927 if not os.path.exists(p):
925 self.ui.warn(_("%s does not exist!\n") % dest)
928 self.ui.warn(_("%s does not exist!\n") % dest)
926 elif not os.path.isfile(p):
929 elif not os.path.isfile(p):
927 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
930 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
928 else:
931 else:
929 if not wlock:
932 if not wlock:
930 wlock = self.wlock()
933 wlock = self.wlock()
931 if self.dirstate.state(dest) == '?':
934 if self.dirstate.state(dest) == '?':
932 self.dirstate.update([dest], "a")
935 self.dirstate.update([dest], "a")
933 self.dirstate.copy(source, dest)
936 self.dirstate.copy(source, dest)
934
937
935 def heads(self, start=None):
938 def heads(self, start=None):
936 heads = self.changelog.heads(start)
939 heads = self.changelog.heads(start)
937 # sort the output in rev descending order
940 # sort the output in rev descending order
938 heads = [(-self.changelog.rev(h), h) for h in heads]
941 heads = [(-self.changelog.rev(h), h) for h in heads]
939 heads.sort()
942 heads.sort()
940 return [n for (r, n) in heads]
943 return [n for (r, n) in heads]
941
944
942 # branchlookup returns a dict giving a list of branches for
945 # branchlookup returns a dict giving a list of branches for
943 # each head. A branch is defined as the tag of a node or
946 # each head. A branch is defined as the tag of a node or
944 # the branch of the node's parents. If a node has multiple
947 # the branch of the node's parents. If a node has multiple
945 # branch tags, tags are eliminated if they are visible from other
948 # branch tags, tags are eliminated if they are visible from other
946 # branch tags.
949 # branch tags.
947 #
950 #
948 # So, for this graph: a->b->c->d->e
951 # So, for this graph: a->b->c->d->e
949 # \ /
952 # \ /
950 # aa -----/
953 # aa -----/
951 # a has tag 2.6.12
954 # a has tag 2.6.12
952 # d has tag 2.6.13
955 # d has tag 2.6.13
953 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
956 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
954 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
957 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
955 # from the list.
958 # from the list.
956 #
959 #
957 # It is possible that more than one head will have the same branch tag.
960 # It is possible that more than one head will have the same branch tag.
958 # callers need to check the result for multiple heads under the same
961 # callers need to check the result for multiple heads under the same
959 # branch tag if that is a problem for them (ie checkout of a specific
962 # branch tag if that is a problem for them (ie checkout of a specific
960 # branch).
963 # branch).
961 #
964 #
962 # passing in a specific branch will limit the depth of the search
965 # passing in a specific branch will limit the depth of the search
963 # through the parents. It won't limit the branches returned in the
966 # through the parents. It won't limit the branches returned in the
964 # result though.
967 # result though.
965 def branchlookup(self, heads=None, branch=None):
968 def branchlookup(self, heads=None, branch=None):
966 if not heads:
969 if not heads:
967 heads = self.heads()
970 heads = self.heads()
968 headt = [ h for h in heads ]
971 headt = [ h for h in heads ]
969 chlog = self.changelog
972 chlog = self.changelog
970 branches = {}
973 branches = {}
971 merges = []
974 merges = []
972 seenmerge = {}
975 seenmerge = {}
973
976
974 # traverse the tree once for each head, recording in the branches
977 # traverse the tree once for each head, recording in the branches
975 # dict which tags are visible from this head. The branches
978 # dict which tags are visible from this head. The branches
976 # dict also records which tags are visible from each tag
979 # dict also records which tags are visible from each tag
977 # while we traverse.
980 # while we traverse.
978 while headt or merges:
981 while headt or merges:
979 if merges:
982 if merges:
980 n, found = merges.pop()
983 n, found = merges.pop()
981 visit = [n]
984 visit = [n]
982 else:
985 else:
983 h = headt.pop()
986 h = headt.pop()
984 visit = [h]
987 visit = [h]
985 found = [h]
988 found = [h]
986 seen = {}
989 seen = {}
987 while visit:
990 while visit:
988 n = visit.pop()
991 n = visit.pop()
989 if n in seen:
992 if n in seen:
990 continue
993 continue
991 pp = chlog.parents(n)
994 pp = chlog.parents(n)
992 tags = self.nodetags(n)
995 tags = self.nodetags(n)
993 if tags:
996 if tags:
994 for x in tags:
997 for x in tags:
995 if x == 'tip':
998 if x == 'tip':
996 continue
999 continue
997 for f in found:
1000 for f in found:
998 branches.setdefault(f, {})[n] = 1
1001 branches.setdefault(f, {})[n] = 1
999 branches.setdefault(n, {})[n] = 1
1002 branches.setdefault(n, {})[n] = 1
1000 break
1003 break
1001 if n not in found:
1004 if n not in found:
1002 found.append(n)
1005 found.append(n)
1003 if branch in tags:
1006 if branch in tags:
1004 continue
1007 continue
1005 seen[n] = 1
1008 seen[n] = 1
1006 if pp[1] != nullid and n not in seenmerge:
1009 if pp[1] != nullid and n not in seenmerge:
1007 merges.append((pp[1], [x for x in found]))
1010 merges.append((pp[1], [x for x in found]))
1008 seenmerge[n] = 1
1011 seenmerge[n] = 1
1009 if pp[0] != nullid:
1012 if pp[0] != nullid:
1010 visit.append(pp[0])
1013 visit.append(pp[0])
1011 # traverse the branches dict, eliminating branch tags from each
1014 # traverse the branches dict, eliminating branch tags from each
1012 # head that are visible from another branch tag for that head.
1015 # head that are visible from another branch tag for that head.
1013 out = {}
1016 out = {}
1014 viscache = {}
1017 viscache = {}
1015 for h in heads:
1018 for h in heads:
1016 def visible(node):
1019 def visible(node):
1017 if node in viscache:
1020 if node in viscache:
1018 return viscache[node]
1021 return viscache[node]
1019 ret = {}
1022 ret = {}
1020 visit = [node]
1023 visit = [node]
1021 while visit:
1024 while visit:
1022 x = visit.pop()
1025 x = visit.pop()
1023 if x in viscache:
1026 if x in viscache:
1024 ret.update(viscache[x])
1027 ret.update(viscache[x])
1025 elif x not in ret:
1028 elif x not in ret:
1026 ret[x] = 1
1029 ret[x] = 1
1027 if x in branches:
1030 if x in branches:
1028 visit[len(visit):] = branches[x].keys()
1031 visit[len(visit):] = branches[x].keys()
1029 viscache[node] = ret
1032 viscache[node] = ret
1030 return ret
1033 return ret
1031 if h not in branches:
1034 if h not in branches:
1032 continue
1035 continue
1033 # O(n^2), but somewhat limited. This only searches the
1036 # O(n^2), but somewhat limited. This only searches the
1034 # tags visible from a specific head, not all the tags in the
1037 # tags visible from a specific head, not all the tags in the
1035 # whole repo.
1038 # whole repo.
1036 for b in branches[h]:
1039 for b in branches[h]:
1037 vis = False
1040 vis = False
1038 for bb in branches[h].keys():
1041 for bb in branches[h].keys():
1039 if b != bb:
1042 if b != bb:
1040 if b in visible(bb):
1043 if b in visible(bb):
1041 vis = True
1044 vis = True
1042 break
1045 break
1043 if not vis:
1046 if not vis:
1044 l = out.setdefault(h, [])
1047 l = out.setdefault(h, [])
1045 l[len(l):] = self.nodetags(b)
1048 l[len(l):] = self.nodetags(b)
1046 return out
1049 return out
1047
1050
1048 def branches(self, nodes):
1051 def branches(self, nodes):
1049 if not nodes:
1052 if not nodes:
1050 nodes = [self.changelog.tip()]
1053 nodes = [self.changelog.tip()]
1051 b = []
1054 b = []
1052 for n in nodes:
1055 for n in nodes:
1053 t = n
1056 t = n
1054 while 1:
1057 while 1:
1055 p = self.changelog.parents(n)
1058 p = self.changelog.parents(n)
1056 if p[1] != nullid or p[0] == nullid:
1059 if p[1] != nullid or p[0] == nullid:
1057 b.append((t, n, p[0], p[1]))
1060 b.append((t, n, p[0], p[1]))
1058 break
1061 break
1059 n = p[0]
1062 n = p[0]
1060 return b
1063 return b
1061
1064
1062 def between(self, pairs):
1065 def between(self, pairs):
1063 r = []
1066 r = []
1064
1067
1065 for top, bottom in pairs:
1068 for top, bottom in pairs:
1066 n, l, i = top, [], 0
1069 n, l, i = top, [], 0
1067 f = 1
1070 f = 1
1068
1071
1069 while n != bottom:
1072 while n != bottom:
1070 p = self.changelog.parents(n)[0]
1073 p = self.changelog.parents(n)[0]
1071 if i == f:
1074 if i == f:
1072 l.append(n)
1075 l.append(n)
1073 f = f * 2
1076 f = f * 2
1074 n = p
1077 n = p
1075 i += 1
1078 i += 1
1076
1079
1077 r.append(l)
1080 r.append(l)
1078
1081
1079 return r
1082 return r
1080
1083
1081 def findincoming(self, remote, base=None, heads=None, force=False):
1084 def findincoming(self, remote, base=None, heads=None, force=False):
1082 """Return list of roots of the subsets of missing nodes from remote
1085 """Return list of roots of the subsets of missing nodes from remote
1083
1086
1084 If base dict is specified, assume that these nodes and their parents
1087 If base dict is specified, assume that these nodes and their parents
1085 exist on the remote side and that no child of a node of base exists
1088 exist on the remote side and that no child of a node of base exists
1086 in both remote and self.
1089 in both remote and self.
1087 Furthermore base will be updated to include the nodes that exists
1090 Furthermore base will be updated to include the nodes that exists
1088 in self and remote but no children exists in self and remote.
1091 in self and remote but no children exists in self and remote.
1089 If a list of heads is specified, return only nodes which are heads
1092 If a list of heads is specified, return only nodes which are heads
1090 or ancestors of these heads.
1093 or ancestors of these heads.
1091
1094
1092 All the ancestors of base are in self and in remote.
1095 All the ancestors of base are in self and in remote.
1093 All the descendants of the list returned are missing in self.
1096 All the descendants of the list returned are missing in self.
1094 (and so we know that the rest of the nodes are missing in remote, see
1097 (and so we know that the rest of the nodes are missing in remote, see
1095 outgoing)
1098 outgoing)
1096 """
1099 """
1097 m = self.changelog.nodemap
1100 m = self.changelog.nodemap
1098 search = []
1101 search = []
1099 fetch = {}
1102 fetch = {}
1100 seen = {}
1103 seen = {}
1101 seenbranch = {}
1104 seenbranch = {}
1102 if base == None:
1105 if base == None:
1103 base = {}
1106 base = {}
1104
1107
1105 if not heads:
1108 if not heads:
1106 heads = remote.heads()
1109 heads = remote.heads()
1107
1110
1108 if self.changelog.tip() == nullid:
1111 if self.changelog.tip() == nullid:
1109 base[nullid] = 1
1112 base[nullid] = 1
1110 if heads != [nullid]:
1113 if heads != [nullid]:
1111 return [nullid]
1114 return [nullid]
1112 return []
1115 return []
1113
1116
1114 # assume we're closer to the tip than the root
1117 # assume we're closer to the tip than the root
1115 # and start by examining the heads
1118 # and start by examining the heads
1116 self.ui.status(_("searching for changes\n"))
1119 self.ui.status(_("searching for changes\n"))
1117
1120
1118 unknown = []
1121 unknown = []
1119 for h in heads:
1122 for h in heads:
1120 if h not in m:
1123 if h not in m:
1121 unknown.append(h)
1124 unknown.append(h)
1122 else:
1125 else:
1123 base[h] = 1
1126 base[h] = 1
1124
1127
1125 if not unknown:
1128 if not unknown:
1126 return []
1129 return []
1127
1130
1128 req = dict.fromkeys(unknown)
1131 req = dict.fromkeys(unknown)
1129 reqcnt = 0
1132 reqcnt = 0
1130
1133
1131 # search through remote branches
1134 # search through remote branches
1132 # a 'branch' here is a linear segment of history, with four parts:
1135 # a 'branch' here is a linear segment of history, with four parts:
1133 # head, root, first parent, second parent
1136 # head, root, first parent, second parent
1134 # (a branch always has two parents (or none) by definition)
1137 # (a branch always has two parents (or none) by definition)
1135 unknown = remote.branches(unknown)
1138 unknown = remote.branches(unknown)
1136 while unknown:
1139 while unknown:
1137 r = []
1140 r = []
1138 while unknown:
1141 while unknown:
1139 n = unknown.pop(0)
1142 n = unknown.pop(0)
1140 if n[0] in seen:
1143 if n[0] in seen:
1141 continue
1144 continue
1142
1145
1143 self.ui.debug(_("examining %s:%s\n")
1146 self.ui.debug(_("examining %s:%s\n")
1144 % (short(n[0]), short(n[1])))
1147 % (short(n[0]), short(n[1])))
1145 if n[0] == nullid: # found the end of the branch
1148 if n[0] == nullid: # found the end of the branch
1146 pass
1149 pass
1147 elif n in seenbranch:
1150 elif n in seenbranch:
1148 self.ui.debug(_("branch already found\n"))
1151 self.ui.debug(_("branch already found\n"))
1149 continue
1152 continue
1150 elif n[1] and n[1] in m: # do we know the base?
1153 elif n[1] and n[1] in m: # do we know the base?
1151 self.ui.debug(_("found incomplete branch %s:%s\n")
1154 self.ui.debug(_("found incomplete branch %s:%s\n")
1152 % (short(n[0]), short(n[1])))
1155 % (short(n[0]), short(n[1])))
1153 search.append(n) # schedule branch range for scanning
1156 search.append(n) # schedule branch range for scanning
1154 seenbranch[n] = 1
1157 seenbranch[n] = 1
1155 else:
1158 else:
1156 if n[1] not in seen and n[1] not in fetch:
1159 if n[1] not in seen and n[1] not in fetch:
1157 if n[2] in m and n[3] in m:
1160 if n[2] in m and n[3] in m:
1158 self.ui.debug(_("found new changeset %s\n") %
1161 self.ui.debug(_("found new changeset %s\n") %
1159 short(n[1]))
1162 short(n[1]))
1160 fetch[n[1]] = 1 # earliest unknown
1163 fetch[n[1]] = 1 # earliest unknown
1161 for p in n[2:4]:
1164 for p in n[2:4]:
1162 if p in m:
1165 if p in m:
1163 base[p] = 1 # latest known
1166 base[p] = 1 # latest known
1164
1167
1165 for p in n[2:4]:
1168 for p in n[2:4]:
1166 if p not in req and p not in m:
1169 if p not in req and p not in m:
1167 r.append(p)
1170 r.append(p)
1168 req[p] = 1
1171 req[p] = 1
1169 seen[n[0]] = 1
1172 seen[n[0]] = 1
1170
1173
1171 if r:
1174 if r:
1172 reqcnt += 1
1175 reqcnt += 1
1173 self.ui.debug(_("request %d: %s\n") %
1176 self.ui.debug(_("request %d: %s\n") %
1174 (reqcnt, " ".join(map(short, r))))
1177 (reqcnt, " ".join(map(short, r))))
1175 for p in xrange(0, len(r), 10):
1178 for p in xrange(0, len(r), 10):
1176 for b in remote.branches(r[p:p+10]):
1179 for b in remote.branches(r[p:p+10]):
1177 self.ui.debug(_("received %s:%s\n") %
1180 self.ui.debug(_("received %s:%s\n") %
1178 (short(b[0]), short(b[1])))
1181 (short(b[0]), short(b[1])))
1179 unknown.append(b)
1182 unknown.append(b)
1180
1183
1181 # do binary search on the branches we found
1184 # do binary search on the branches we found
1182 while search:
1185 while search:
1183 n = search.pop(0)
1186 n = search.pop(0)
1184 reqcnt += 1
1187 reqcnt += 1
1185 l = remote.between([(n[0], n[1])])[0]
1188 l = remote.between([(n[0], n[1])])[0]
1186 l.append(n[1])
1189 l.append(n[1])
1187 p = n[0]
1190 p = n[0]
1188 f = 1
1191 f = 1
1189 for i in l:
1192 for i in l:
1190 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1193 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1191 if i in m:
1194 if i in m:
1192 if f <= 2:
1195 if f <= 2:
1193 self.ui.debug(_("found new branch changeset %s\n") %
1196 self.ui.debug(_("found new branch changeset %s\n") %
1194 short(p))
1197 short(p))
1195 fetch[p] = 1
1198 fetch[p] = 1
1196 base[i] = 1
1199 base[i] = 1
1197 else:
1200 else:
1198 self.ui.debug(_("narrowed branch search to %s:%s\n")
1201 self.ui.debug(_("narrowed branch search to %s:%s\n")
1199 % (short(p), short(i)))
1202 % (short(p), short(i)))
1200 search.append((p, i))
1203 search.append((p, i))
1201 break
1204 break
1202 p, f = i, f * 2
1205 p, f = i, f * 2
1203
1206
1204 # sanity check our fetch list
1207 # sanity check our fetch list
1205 for f in fetch.keys():
1208 for f in fetch.keys():
1206 if f in m:
1209 if f in m:
1207 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1210 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1208
1211
1209 if base.keys() == [nullid]:
1212 if base.keys() == [nullid]:
1210 if force:
1213 if force:
1211 self.ui.warn(_("warning: repository is unrelated\n"))
1214 self.ui.warn(_("warning: repository is unrelated\n"))
1212 else:
1215 else:
1213 raise util.Abort(_("repository is unrelated"))
1216 raise util.Abort(_("repository is unrelated"))
1214
1217
1215 self.ui.debug(_("found new changesets starting at ") +
1218 self.ui.debug(_("found new changesets starting at ") +
1216 " ".join([short(f) for f in fetch]) + "\n")
1219 " ".join([short(f) for f in fetch]) + "\n")
1217
1220
1218 self.ui.debug(_("%d total queries\n") % reqcnt)
1221 self.ui.debug(_("%d total queries\n") % reqcnt)
1219
1222
1220 return fetch.keys()
1223 return fetch.keys()
1221
1224
1222 def findoutgoing(self, remote, base=None, heads=None, force=False):
1225 def findoutgoing(self, remote, base=None, heads=None, force=False):
1223 """Return list of nodes that are roots of subsets not in remote
1226 """Return list of nodes that are roots of subsets not in remote
1224
1227
1225 If base dict is specified, assume that these nodes and their parents
1228 If base dict is specified, assume that these nodes and their parents
1226 exist on the remote side.
1229 exist on the remote side.
1227 If a list of heads is specified, return only nodes which are heads
1230 If a list of heads is specified, return only nodes which are heads
1228 or ancestors of these heads, and return a second element which
1231 or ancestors of these heads, and return a second element which
1229 contains all remote heads which get new children.
1232 contains all remote heads which get new children.
1230 """
1233 """
1231 if base == None:
1234 if base == None:
1232 base = {}
1235 base = {}
1233 self.findincoming(remote, base, heads, force=force)
1236 self.findincoming(remote, base, heads, force=force)
1234
1237
1235 self.ui.debug(_("common changesets up to ")
1238 self.ui.debug(_("common changesets up to ")
1236 + " ".join(map(short, base.keys())) + "\n")
1239 + " ".join(map(short, base.keys())) + "\n")
1237
1240
1238 remain = dict.fromkeys(self.changelog.nodemap)
1241 remain = dict.fromkeys(self.changelog.nodemap)
1239
1242
1240 # prune everything remote has from the tree
1243 # prune everything remote has from the tree
1241 del remain[nullid]
1244 del remain[nullid]
1242 remove = base.keys()
1245 remove = base.keys()
1243 while remove:
1246 while remove:
1244 n = remove.pop(0)
1247 n = remove.pop(0)
1245 if n in remain:
1248 if n in remain:
1246 del remain[n]
1249 del remain[n]
1247 for p in self.changelog.parents(n):
1250 for p in self.changelog.parents(n):
1248 remove.append(p)
1251 remove.append(p)
1249
1252
1250 # find every node whose parents have been pruned
1253 # find every node whose parents have been pruned
1251 subset = []
1254 subset = []
1252 # find every remote head that will get new children
1255 # find every remote head that will get new children
1253 updated_heads = {}
1256 updated_heads = {}
1254 for n in remain:
1257 for n in remain:
1255 p1, p2 = self.changelog.parents(n)
1258 p1, p2 = self.changelog.parents(n)
1256 if p1 not in remain and p2 not in remain:
1259 if p1 not in remain and p2 not in remain:
1257 subset.append(n)
1260 subset.append(n)
1258 if heads:
1261 if heads:
1259 if p1 in heads:
1262 if p1 in heads:
1260 updated_heads[p1] = True
1263 updated_heads[p1] = True
1261 if p2 in heads:
1264 if p2 in heads:
1262 updated_heads[p2] = True
1265 updated_heads[p2] = True
1263
1266
1264 # this is the set of all roots we have to push
1267 # this is the set of all roots we have to push
1265 if heads:
1268 if heads:
1266 return subset, updated_heads.keys()
1269 return subset, updated_heads.keys()
1267 else:
1270 else:
1268 return subset
1271 return subset
1269
1272
1270 def pull(self, remote, heads=None, force=False, lock=None):
1273 def pull(self, remote, heads=None, force=False, lock=None):
1271 mylock = False
1274 mylock = False
1272 if not lock:
1275 if not lock:
1273 lock = self.lock()
1276 lock = self.lock()
1274 mylock = True
1277 mylock = True
1275
1278
1276 try:
1279 try:
1277 fetch = self.findincoming(remote, force=force)
1280 fetch = self.findincoming(remote, force=force)
1278 if fetch == [nullid]:
1281 if fetch == [nullid]:
1279 self.ui.status(_("requesting all changes\n"))
1282 self.ui.status(_("requesting all changes\n"))
1280
1283
1281 if not fetch:
1284 if not fetch:
1282 self.ui.status(_("no changes found\n"))
1285 self.ui.status(_("no changes found\n"))
1283 return 0
1286 return 0
1284
1287
1285 if heads is None:
1288 if heads is None:
1286 cg = remote.changegroup(fetch, 'pull')
1289 cg = remote.changegroup(fetch, 'pull')
1287 else:
1290 else:
1288 if 'changegroupsubset' not in remote.capabilities:
1291 if 'changegroupsubset' not in remote.capabilities:
1289 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1292 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1290 cg = remote.changegroupsubset(fetch, heads, 'pull')
1293 cg = remote.changegroupsubset(fetch, heads, 'pull')
1291 return self.addchangegroup(cg, 'pull', remote.url())
1294 return self.addchangegroup(cg, 'pull', remote.url())
1292 finally:
1295 finally:
1293 if mylock:
1296 if mylock:
1294 lock.release()
1297 lock.release()
1295
1298
1296 def push(self, remote, force=False, revs=None):
1299 def push(self, remote, force=False, revs=None):
1297 # there are two ways to push to remote repo:
1300 # there are two ways to push to remote repo:
1298 #
1301 #
1299 # addchangegroup assumes local user can lock remote
1302 # addchangegroup assumes local user can lock remote
1300 # repo (local filesystem, old ssh servers).
1303 # repo (local filesystem, old ssh servers).
1301 #
1304 #
1302 # unbundle assumes local user cannot lock remote repo (new ssh
1305 # unbundle assumes local user cannot lock remote repo (new ssh
1303 # servers, http servers).
1306 # servers, http servers).
1304
1307
1305 if remote.capable('unbundle'):
1308 if remote.capable('unbundle'):
1306 return self.push_unbundle(remote, force, revs)
1309 return self.push_unbundle(remote, force, revs)
1307 return self.push_addchangegroup(remote, force, revs)
1310 return self.push_addchangegroup(remote, force, revs)
1308
1311
1309 def prepush(self, remote, force, revs):
1312 def prepush(self, remote, force, revs):
1310 base = {}
1313 base = {}
1311 remote_heads = remote.heads()
1314 remote_heads = remote.heads()
1312 inc = self.findincoming(remote, base, remote_heads, force=force)
1315 inc = self.findincoming(remote, base, remote_heads, force=force)
1313
1316
1314 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1317 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1315 if revs is not None:
1318 if revs is not None:
1316 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1319 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1317 else:
1320 else:
1318 bases, heads = update, self.changelog.heads()
1321 bases, heads = update, self.changelog.heads()
1319
1322
1320 if not bases:
1323 if not bases:
1321 self.ui.status(_("no changes found\n"))
1324 self.ui.status(_("no changes found\n"))
1322 return None, 1
1325 return None, 1
1323 elif not force:
1326 elif not force:
1324 # check if we're creating new remote heads
1327 # check if we're creating new remote heads
1325 # to be a remote head after push, node must be either
1328 # to be a remote head after push, node must be either
1326 # - unknown locally
1329 # - unknown locally
1327 # - a local outgoing head descended from update
1330 # - a local outgoing head descended from update
1328 # - a remote head that's known locally and not
1331 # - a remote head that's known locally and not
1329 # ancestral to an outgoing head
1332 # ancestral to an outgoing head
1330
1333
1331 warn = 0
1334 warn = 0
1332
1335
1333 if remote_heads == [nullid]:
1336 if remote_heads == [nullid]:
1334 warn = 0
1337 warn = 0
1335 elif not revs and len(heads) > len(remote_heads):
1338 elif not revs and len(heads) > len(remote_heads):
1336 warn = 1
1339 warn = 1
1337 else:
1340 else:
1338 newheads = list(heads)
1341 newheads = list(heads)
1339 for r in remote_heads:
1342 for r in remote_heads:
1340 if r in self.changelog.nodemap:
1343 if r in self.changelog.nodemap:
1341 desc = self.changelog.heads(r)
1344 desc = self.changelog.heads(r)
1342 l = [h for h in heads if h in desc]
1345 l = [h for h in heads if h in desc]
1343 if not l:
1346 if not l:
1344 newheads.append(r)
1347 newheads.append(r)
1345 else:
1348 else:
1346 newheads.append(r)
1349 newheads.append(r)
1347 if len(newheads) > len(remote_heads):
1350 if len(newheads) > len(remote_heads):
1348 warn = 1
1351 warn = 1
1349
1352
1350 if warn:
1353 if warn:
1351 self.ui.warn(_("abort: push creates new remote branches!\n"))
1354 self.ui.warn(_("abort: push creates new remote branches!\n"))
1352 self.ui.status(_("(did you forget to merge?"
1355 self.ui.status(_("(did you forget to merge?"
1353 " use push -f to force)\n"))
1356 " use push -f to force)\n"))
1354 return None, 1
1357 return None, 1
1355 elif inc:
1358 elif inc:
1356 self.ui.warn(_("note: unsynced remote changes!\n"))
1359 self.ui.warn(_("note: unsynced remote changes!\n"))
1357
1360
1358
1361
1359 if revs is None:
1362 if revs is None:
1360 cg = self.changegroup(update, 'push')
1363 cg = self.changegroup(update, 'push')
1361 else:
1364 else:
1362 cg = self.changegroupsubset(update, revs, 'push')
1365 cg = self.changegroupsubset(update, revs, 'push')
1363 return cg, remote_heads
1366 return cg, remote_heads
1364
1367
1365 def push_addchangegroup(self, remote, force, revs):
1368 def push_addchangegroup(self, remote, force, revs):
1366 lock = remote.lock()
1369 lock = remote.lock()
1367
1370
1368 ret = self.prepush(remote, force, revs)
1371 ret = self.prepush(remote, force, revs)
1369 if ret[0] is not None:
1372 if ret[0] is not None:
1370 cg, remote_heads = ret
1373 cg, remote_heads = ret
1371 return remote.addchangegroup(cg, 'push', self.url())
1374 return remote.addchangegroup(cg, 'push', self.url())
1372 return ret[1]
1375 return ret[1]
1373
1376
1374 def push_unbundle(self, remote, force, revs):
1377 def push_unbundle(self, remote, force, revs):
1375 # local repo finds heads on server, finds out what revs it
1378 # local repo finds heads on server, finds out what revs it
1376 # must push. once revs transferred, if server finds it has
1379 # must push. once revs transferred, if server finds it has
1377 # different heads (someone else won commit/push race), server
1380 # different heads (someone else won commit/push race), server
1378 # aborts.
1381 # aborts.
1379
1382
1380 ret = self.prepush(remote, force, revs)
1383 ret = self.prepush(remote, force, revs)
1381 if ret[0] is not None:
1384 if ret[0] is not None:
1382 cg, remote_heads = ret
1385 cg, remote_heads = ret
1383 if force: remote_heads = ['force']
1386 if force: remote_heads = ['force']
1384 return remote.unbundle(cg, remote_heads, 'push')
1387 return remote.unbundle(cg, remote_heads, 'push')
1385 return ret[1]
1388 return ret[1]
1386
1389
1387 def changegroupinfo(self, nodes):
1390 def changegroupinfo(self, nodes):
1388 self.ui.note(_("%d changesets found\n") % len(nodes))
1391 self.ui.note(_("%d changesets found\n") % len(nodes))
1389 if self.ui.debugflag:
1392 if self.ui.debugflag:
1390 self.ui.debug(_("List of changesets:\n"))
1393 self.ui.debug(_("List of changesets:\n"))
1391 for node in nodes:
1394 for node in nodes:
1392 self.ui.debug("%s\n" % hex(node))
1395 self.ui.debug("%s\n" % hex(node))
1393
1396
1394 def changegroupsubset(self, bases, heads, source):
1397 def changegroupsubset(self, bases, heads, source):
1395 """This function generates a changegroup consisting of all the nodes
1398 """This function generates a changegroup consisting of all the nodes
1396 that are descendents of any of the bases, and ancestors of any of
1399 that are descendents of any of the bases, and ancestors of any of
1397 the heads.
1400 the heads.
1398
1401
1399 It is fairly complex as determining which filenodes and which
1402 It is fairly complex as determining which filenodes and which
1400 manifest nodes need to be included for the changeset to be complete
1403 manifest nodes need to be included for the changeset to be complete
1401 is non-trivial.
1404 is non-trivial.
1402
1405
1403 Another wrinkle is doing the reverse, figuring out which changeset in
1406 Another wrinkle is doing the reverse, figuring out which changeset in
1404 the changegroup a particular filenode or manifestnode belongs to."""
1407 the changegroup a particular filenode or manifestnode belongs to."""
1405
1408
1406 self.hook('preoutgoing', throw=True, source=source)
1409 self.hook('preoutgoing', throw=True, source=source)
1407
1410
1408 # Set up some initial variables
1411 # Set up some initial variables
1409 # Make it easy to refer to self.changelog
1412 # Make it easy to refer to self.changelog
1410 cl = self.changelog
1413 cl = self.changelog
1411 # msng is short for missing - compute the list of changesets in this
1414 # msng is short for missing - compute the list of changesets in this
1412 # changegroup.
1415 # changegroup.
1413 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1416 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1414 self.changegroupinfo(msng_cl_lst)
1417 self.changegroupinfo(msng_cl_lst)
1415 # Some bases may turn out to be superfluous, and some heads may be
1418 # Some bases may turn out to be superfluous, and some heads may be
1416 # too. nodesbetween will return the minimal set of bases and heads
1419 # too. nodesbetween will return the minimal set of bases and heads
1417 # necessary to re-create the changegroup.
1420 # necessary to re-create the changegroup.
1418
1421
1419 # Known heads are the list of heads that it is assumed the recipient
1422 # Known heads are the list of heads that it is assumed the recipient
1420 # of this changegroup will know about.
1423 # of this changegroup will know about.
1421 knownheads = {}
1424 knownheads = {}
1422 # We assume that all parents of bases are known heads.
1425 # We assume that all parents of bases are known heads.
1423 for n in bases:
1426 for n in bases:
1424 for p in cl.parents(n):
1427 for p in cl.parents(n):
1425 if p != nullid:
1428 if p != nullid:
1426 knownheads[p] = 1
1429 knownheads[p] = 1
1427 knownheads = knownheads.keys()
1430 knownheads = knownheads.keys()
1428 if knownheads:
1431 if knownheads:
1429 # Now that we know what heads are known, we can compute which
1432 # Now that we know what heads are known, we can compute which
1430 # changesets are known. The recipient must know about all
1433 # changesets are known. The recipient must know about all
1431 # changesets required to reach the known heads from the null
1434 # changesets required to reach the known heads from the null
1432 # changeset.
1435 # changeset.
1433 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1436 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1434 junk = None
1437 junk = None
1435 # Transform the list into an ersatz set.
1438 # Transform the list into an ersatz set.
1436 has_cl_set = dict.fromkeys(has_cl_set)
1439 has_cl_set = dict.fromkeys(has_cl_set)
1437 else:
1440 else:
1438 # If there were no known heads, the recipient cannot be assumed to
1441 # If there were no known heads, the recipient cannot be assumed to
1439 # know about any changesets.
1442 # know about any changesets.
1440 has_cl_set = {}
1443 has_cl_set = {}
1441
1444
1442 # Make it easy to refer to self.manifest
1445 # Make it easy to refer to self.manifest
1443 mnfst = self.manifest
1446 mnfst = self.manifest
1444 # We don't know which manifests are missing yet
1447 # We don't know which manifests are missing yet
1445 msng_mnfst_set = {}
1448 msng_mnfst_set = {}
1446 # Nor do we know which filenodes are missing.
1449 # Nor do we know which filenodes are missing.
1447 msng_filenode_set = {}
1450 msng_filenode_set = {}
1448
1451
1449 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1452 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1450 junk = None
1453 junk = None
1451
1454
1452 # A changeset always belongs to itself, so the changenode lookup
1455 # A changeset always belongs to itself, so the changenode lookup
1453 # function for a changenode is identity.
1456 # function for a changenode is identity.
1454 def identity(x):
1457 def identity(x):
1455 return x
1458 return x
1456
1459
1457 # A function generating function. Sets up an environment for the
1460 # A function generating function. Sets up an environment for the
1458 # inner function.
1461 # inner function.
1459 def cmp_by_rev_func(revlog):
1462 def cmp_by_rev_func(revlog):
1460 # Compare two nodes by their revision number in the environment's
1463 # Compare two nodes by their revision number in the environment's
1461 # revision history. Since the revision number both represents the
1464 # revision history. Since the revision number both represents the
1462 # most efficient order to read the nodes in, and represents a
1465 # most efficient order to read the nodes in, and represents a
1463 # topological sorting of the nodes, this function is often useful.
1466 # topological sorting of the nodes, this function is often useful.
1464 def cmp_by_rev(a, b):
1467 def cmp_by_rev(a, b):
1465 return cmp(revlog.rev(a), revlog.rev(b))
1468 return cmp(revlog.rev(a), revlog.rev(b))
1466 return cmp_by_rev
1469 return cmp_by_rev
1467
1470
1468 # If we determine that a particular file or manifest node must be a
1471 # If we determine that a particular file or manifest node must be a
1469 # node that the recipient of the changegroup will already have, we can
1472 # node that the recipient of the changegroup will already have, we can
1470 # also assume the recipient will have all the parents. This function
1473 # also assume the recipient will have all the parents. This function
1471 # prunes them from the set of missing nodes.
1474 # prunes them from the set of missing nodes.
1472 def prune_parents(revlog, hasset, msngset):
1475 def prune_parents(revlog, hasset, msngset):
1473 haslst = hasset.keys()
1476 haslst = hasset.keys()
1474 haslst.sort(cmp_by_rev_func(revlog))
1477 haslst.sort(cmp_by_rev_func(revlog))
1475 for node in haslst:
1478 for node in haslst:
1476 parentlst = [p for p in revlog.parents(node) if p != nullid]
1479 parentlst = [p for p in revlog.parents(node) if p != nullid]
1477 while parentlst:
1480 while parentlst:
1478 n = parentlst.pop()
1481 n = parentlst.pop()
1479 if n not in hasset:
1482 if n not in hasset:
1480 hasset[n] = 1
1483 hasset[n] = 1
1481 p = [p for p in revlog.parents(n) if p != nullid]
1484 p = [p for p in revlog.parents(n) if p != nullid]
1482 parentlst.extend(p)
1485 parentlst.extend(p)
1483 for n in hasset:
1486 for n in hasset:
1484 msngset.pop(n, None)
1487 msngset.pop(n, None)
1485
1488
1486 # This is a function generating function used to set up an environment
1489 # This is a function generating function used to set up an environment
1487 # for the inner function to execute in.
1490 # for the inner function to execute in.
1488 def manifest_and_file_collector(changedfileset):
1491 def manifest_and_file_collector(changedfileset):
1489 # This is an information gathering function that gathers
1492 # This is an information gathering function that gathers
1490 # information from each changeset node that goes out as part of
1493 # information from each changeset node that goes out as part of
1491 # the changegroup. The information gathered is a list of which
1494 # the changegroup. The information gathered is a list of which
1492 # manifest nodes are potentially required (the recipient may
1495 # manifest nodes are potentially required (the recipient may
1493 # already have them) and total list of all files which were
1496 # already have them) and total list of all files which were
1494 # changed in any changeset in the changegroup.
1497 # changed in any changeset in the changegroup.
1495 #
1498 #
1496 # We also remember the first changenode we saw any manifest
1499 # We also remember the first changenode we saw any manifest
1497 # referenced by so we can later determine which changenode 'owns'
1500 # referenced by so we can later determine which changenode 'owns'
1498 # the manifest.
1501 # the manifest.
1499 def collect_manifests_and_files(clnode):
1502 def collect_manifests_and_files(clnode):
1500 c = cl.read(clnode)
1503 c = cl.read(clnode)
1501 for f in c[3]:
1504 for f in c[3]:
1502 # This is to make sure we only have one instance of each
1505 # This is to make sure we only have one instance of each
1503 # filename string for each filename.
1506 # filename string for each filename.
1504 changedfileset.setdefault(f, f)
1507 changedfileset.setdefault(f, f)
1505 msng_mnfst_set.setdefault(c[0], clnode)
1508 msng_mnfst_set.setdefault(c[0], clnode)
1506 return collect_manifests_and_files
1509 return collect_manifests_and_files
1507
1510
1508 # Figure out which manifest nodes (of the ones we think might be part
1511 # Figure out which manifest nodes (of the ones we think might be part
1509 # of the changegroup) the recipient must know about and remove them
1512 # of the changegroup) the recipient must know about and remove them
1510 # from the changegroup.
1513 # from the changegroup.
1511 def prune_manifests():
1514 def prune_manifests():
1512 has_mnfst_set = {}
1515 has_mnfst_set = {}
1513 for n in msng_mnfst_set:
1516 for n in msng_mnfst_set:
1514 # If a 'missing' manifest thinks it belongs to a changenode
1517 # If a 'missing' manifest thinks it belongs to a changenode
1515 # the recipient is assumed to have, obviously the recipient
1518 # the recipient is assumed to have, obviously the recipient
1516 # must have that manifest.
1519 # must have that manifest.
1517 linknode = cl.node(mnfst.linkrev(n))
1520 linknode = cl.node(mnfst.linkrev(n))
1518 if linknode in has_cl_set:
1521 if linknode in has_cl_set:
1519 has_mnfst_set[n] = 1
1522 has_mnfst_set[n] = 1
1520 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1523 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1521
1524
1522 # Use the information collected in collect_manifests_and_files to say
1525 # Use the information collected in collect_manifests_and_files to say
1523 # which changenode any manifestnode belongs to.
1526 # which changenode any manifestnode belongs to.
1524 def lookup_manifest_link(mnfstnode):
1527 def lookup_manifest_link(mnfstnode):
1525 return msng_mnfst_set[mnfstnode]
1528 return msng_mnfst_set[mnfstnode]
1526
1529
1527 # A function generating function that sets up the initial environment
1530 # A function generating function that sets up the initial environment
1528 # the inner function.
1531 # the inner function.
1529 def filenode_collector(changedfiles):
1532 def filenode_collector(changedfiles):
1530 next_rev = [0]
1533 next_rev = [0]
1531 # This gathers information from each manifestnode included in the
1534 # This gathers information from each manifestnode included in the
1532 # changegroup about which filenodes the manifest node references
1535 # changegroup about which filenodes the manifest node references
1533 # so we can include those in the changegroup too.
1536 # so we can include those in the changegroup too.
1534 #
1537 #
1535 # It also remembers which changenode each filenode belongs to. It
1538 # It also remembers which changenode each filenode belongs to. It
1536 # does this by assuming the a filenode belongs to the changenode
1539 # does this by assuming the a filenode belongs to the changenode
1537 # the first manifest that references it belongs to.
1540 # the first manifest that references it belongs to.
1538 def collect_msng_filenodes(mnfstnode):
1541 def collect_msng_filenodes(mnfstnode):
1539 r = mnfst.rev(mnfstnode)
1542 r = mnfst.rev(mnfstnode)
1540 if r == next_rev[0]:
1543 if r == next_rev[0]:
1541 # If the last rev we looked at was the one just previous,
1544 # If the last rev we looked at was the one just previous,
1542 # we only need to see a diff.
1545 # we only need to see a diff.
1543 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1546 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1544 # For each line in the delta
1547 # For each line in the delta
1545 for dline in delta.splitlines():
1548 for dline in delta.splitlines():
1546 # get the filename and filenode for that line
1549 # get the filename and filenode for that line
1547 f, fnode = dline.split('\0')
1550 f, fnode = dline.split('\0')
1548 fnode = bin(fnode[:40])
1551 fnode = bin(fnode[:40])
1549 f = changedfiles.get(f, None)
1552 f = changedfiles.get(f, None)
1550 # And if the file is in the list of files we care
1553 # And if the file is in the list of files we care
1551 # about.
1554 # about.
1552 if f is not None:
1555 if f is not None:
1553 # Get the changenode this manifest belongs to
1556 # Get the changenode this manifest belongs to
1554 clnode = msng_mnfst_set[mnfstnode]
1557 clnode = msng_mnfst_set[mnfstnode]
1555 # Create the set of filenodes for the file if
1558 # Create the set of filenodes for the file if
1556 # there isn't one already.
1559 # there isn't one already.
1557 ndset = msng_filenode_set.setdefault(f, {})
1560 ndset = msng_filenode_set.setdefault(f, {})
1558 # And set the filenode's changelog node to the
1561 # And set the filenode's changelog node to the
1559 # manifest's if it hasn't been set already.
1562 # manifest's if it hasn't been set already.
1560 ndset.setdefault(fnode, clnode)
1563 ndset.setdefault(fnode, clnode)
1561 else:
1564 else:
1562 # Otherwise we need a full manifest.
1565 # Otherwise we need a full manifest.
1563 m = mnfst.read(mnfstnode)
1566 m = mnfst.read(mnfstnode)
1564 # For every file in we care about.
1567 # For every file in we care about.
1565 for f in changedfiles:
1568 for f in changedfiles:
1566 fnode = m.get(f, None)
1569 fnode = m.get(f, None)
1567 # If it's in the manifest
1570 # If it's in the manifest
1568 if fnode is not None:
1571 if fnode is not None:
1569 # See comments above.
1572 # See comments above.
1570 clnode = msng_mnfst_set[mnfstnode]
1573 clnode = msng_mnfst_set[mnfstnode]
1571 ndset = msng_filenode_set.setdefault(f, {})
1574 ndset = msng_filenode_set.setdefault(f, {})
1572 ndset.setdefault(fnode, clnode)
1575 ndset.setdefault(fnode, clnode)
1573 # Remember the revision we hope to see next.
1576 # Remember the revision we hope to see next.
1574 next_rev[0] = r + 1
1577 next_rev[0] = r + 1
1575 return collect_msng_filenodes
1578 return collect_msng_filenodes
1576
1579
1577 # We have a list of filenodes we think we need for a file, lets remove
1580 # We have a list of filenodes we think we need for a file, lets remove
1578 # all those we now the recipient must have.
1581 # all those we now the recipient must have.
1579 def prune_filenodes(f, filerevlog):
1582 def prune_filenodes(f, filerevlog):
1580 msngset = msng_filenode_set[f]
1583 msngset = msng_filenode_set[f]
1581 hasset = {}
1584 hasset = {}
1582 # If a 'missing' filenode thinks it belongs to a changenode we
1585 # If a 'missing' filenode thinks it belongs to a changenode we
1583 # assume the recipient must have, then the recipient must have
1586 # assume the recipient must have, then the recipient must have
1584 # that filenode.
1587 # that filenode.
1585 for n in msngset:
1588 for n in msngset:
1586 clnode = cl.node(filerevlog.linkrev(n))
1589 clnode = cl.node(filerevlog.linkrev(n))
1587 if clnode in has_cl_set:
1590 if clnode in has_cl_set:
1588 hasset[n] = 1
1591 hasset[n] = 1
1589 prune_parents(filerevlog, hasset, msngset)
1592 prune_parents(filerevlog, hasset, msngset)
1590
1593
1591 # A function generator function that sets up the a context for the
1594 # A function generator function that sets up the a context for the
1592 # inner function.
1595 # inner function.
1593 def lookup_filenode_link_func(fname):
1596 def lookup_filenode_link_func(fname):
1594 msngset = msng_filenode_set[fname]
1597 msngset = msng_filenode_set[fname]
1595 # Lookup the changenode the filenode belongs to.
1598 # Lookup the changenode the filenode belongs to.
1596 def lookup_filenode_link(fnode):
1599 def lookup_filenode_link(fnode):
1597 return msngset[fnode]
1600 return msngset[fnode]
1598 return lookup_filenode_link
1601 return lookup_filenode_link
1599
1602
1600 # Now that we have all theses utility functions to help out and
1603 # Now that we have all theses utility functions to help out and
1601 # logically divide up the task, generate the group.
1604 # logically divide up the task, generate the group.
1602 def gengroup():
1605 def gengroup():
1603 # The set of changed files starts empty.
1606 # The set of changed files starts empty.
1604 changedfiles = {}
1607 changedfiles = {}
1605 # Create a changenode group generator that will call our functions
1608 # Create a changenode group generator that will call our functions
1606 # back to lookup the owning changenode and collect information.
1609 # back to lookup the owning changenode and collect information.
1607 group = cl.group(msng_cl_lst, identity,
1610 group = cl.group(msng_cl_lst, identity,
1608 manifest_and_file_collector(changedfiles))
1611 manifest_and_file_collector(changedfiles))
1609 for chnk in group:
1612 for chnk in group:
1610 yield chnk
1613 yield chnk
1611
1614
1612 # The list of manifests has been collected by the generator
1615 # The list of manifests has been collected by the generator
1613 # calling our functions back.
1616 # calling our functions back.
1614 prune_manifests()
1617 prune_manifests()
1615 msng_mnfst_lst = msng_mnfst_set.keys()
1618 msng_mnfst_lst = msng_mnfst_set.keys()
1616 # Sort the manifestnodes by revision number.
1619 # Sort the manifestnodes by revision number.
1617 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1620 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1618 # Create a generator for the manifestnodes that calls our lookup
1621 # Create a generator for the manifestnodes that calls our lookup
1619 # and data collection functions back.
1622 # and data collection functions back.
1620 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1623 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1621 filenode_collector(changedfiles))
1624 filenode_collector(changedfiles))
1622 for chnk in group:
1625 for chnk in group:
1623 yield chnk
1626 yield chnk
1624
1627
1625 # These are no longer needed, dereference and toss the memory for
1628 # These are no longer needed, dereference and toss the memory for
1626 # them.
1629 # them.
1627 msng_mnfst_lst = None
1630 msng_mnfst_lst = None
1628 msng_mnfst_set.clear()
1631 msng_mnfst_set.clear()
1629
1632
1630 changedfiles = changedfiles.keys()
1633 changedfiles = changedfiles.keys()
1631 changedfiles.sort()
1634 changedfiles.sort()
1632 # Go through all our files in order sorted by name.
1635 # Go through all our files in order sorted by name.
1633 for fname in changedfiles:
1636 for fname in changedfiles:
1634 filerevlog = self.file(fname)
1637 filerevlog = self.file(fname)
1635 # Toss out the filenodes that the recipient isn't really
1638 # Toss out the filenodes that the recipient isn't really
1636 # missing.
1639 # missing.
1637 if msng_filenode_set.has_key(fname):
1640 if msng_filenode_set.has_key(fname):
1638 prune_filenodes(fname, filerevlog)
1641 prune_filenodes(fname, filerevlog)
1639 msng_filenode_lst = msng_filenode_set[fname].keys()
1642 msng_filenode_lst = msng_filenode_set[fname].keys()
1640 else:
1643 else:
1641 msng_filenode_lst = []
1644 msng_filenode_lst = []
1642 # If any filenodes are left, generate the group for them,
1645 # If any filenodes are left, generate the group for them,
1643 # otherwise don't bother.
1646 # otherwise don't bother.
1644 if len(msng_filenode_lst) > 0:
1647 if len(msng_filenode_lst) > 0:
1645 yield changegroup.genchunk(fname)
1648 yield changegroup.genchunk(fname)
1646 # Sort the filenodes by their revision #
1649 # Sort the filenodes by their revision #
1647 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1650 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1648 # Create a group generator and only pass in a changenode
1651 # Create a group generator and only pass in a changenode
1649 # lookup function as we need to collect no information
1652 # lookup function as we need to collect no information
1650 # from filenodes.
1653 # from filenodes.
1651 group = filerevlog.group(msng_filenode_lst,
1654 group = filerevlog.group(msng_filenode_lst,
1652 lookup_filenode_link_func(fname))
1655 lookup_filenode_link_func(fname))
1653 for chnk in group:
1656 for chnk in group:
1654 yield chnk
1657 yield chnk
1655 if msng_filenode_set.has_key(fname):
1658 if msng_filenode_set.has_key(fname):
1656 # Don't need this anymore, toss it to free memory.
1659 # Don't need this anymore, toss it to free memory.
1657 del msng_filenode_set[fname]
1660 del msng_filenode_set[fname]
1658 # Signal that no more groups are left.
1661 # Signal that no more groups are left.
1659 yield changegroup.closechunk()
1662 yield changegroup.closechunk()
1660
1663
1661 if msng_cl_lst:
1664 if msng_cl_lst:
1662 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1665 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1663
1666
1664 return util.chunkbuffer(gengroup())
1667 return util.chunkbuffer(gengroup())
1665
1668
1666 def changegroup(self, basenodes, source):
1669 def changegroup(self, basenodes, source):
1667 """Generate a changegroup of all nodes that we have that a recipient
1670 """Generate a changegroup of all nodes that we have that a recipient
1668 doesn't.
1671 doesn't.
1669
1672
1670 This is much easier than the previous function as we can assume that
1673 This is much easier than the previous function as we can assume that
1671 the recipient has any changenode we aren't sending them."""
1674 the recipient has any changenode we aren't sending them."""
1672
1675
1673 self.hook('preoutgoing', throw=True, source=source)
1676 self.hook('preoutgoing', throw=True, source=source)
1674
1677
1675 cl = self.changelog
1678 cl = self.changelog
1676 nodes = cl.nodesbetween(basenodes, None)[0]
1679 nodes = cl.nodesbetween(basenodes, None)[0]
1677 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1680 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1678 self.changegroupinfo(nodes)
1681 self.changegroupinfo(nodes)
1679
1682
1680 def identity(x):
1683 def identity(x):
1681 return x
1684 return x
1682
1685
1683 def gennodelst(revlog):
1686 def gennodelst(revlog):
1684 for r in xrange(0, revlog.count()):
1687 for r in xrange(0, revlog.count()):
1685 n = revlog.node(r)
1688 n = revlog.node(r)
1686 if revlog.linkrev(n) in revset:
1689 if revlog.linkrev(n) in revset:
1687 yield n
1690 yield n
1688
1691
1689 def changed_file_collector(changedfileset):
1692 def changed_file_collector(changedfileset):
1690 def collect_changed_files(clnode):
1693 def collect_changed_files(clnode):
1691 c = cl.read(clnode)
1694 c = cl.read(clnode)
1692 for fname in c[3]:
1695 for fname in c[3]:
1693 changedfileset[fname] = 1
1696 changedfileset[fname] = 1
1694 return collect_changed_files
1697 return collect_changed_files
1695
1698
1696 def lookuprevlink_func(revlog):
1699 def lookuprevlink_func(revlog):
1697 def lookuprevlink(n):
1700 def lookuprevlink(n):
1698 return cl.node(revlog.linkrev(n))
1701 return cl.node(revlog.linkrev(n))
1699 return lookuprevlink
1702 return lookuprevlink
1700
1703
1701 def gengroup():
1704 def gengroup():
1702 # construct a list of all changed files
1705 # construct a list of all changed files
1703 changedfiles = {}
1706 changedfiles = {}
1704
1707
1705 for chnk in cl.group(nodes, identity,
1708 for chnk in cl.group(nodes, identity,
1706 changed_file_collector(changedfiles)):
1709 changed_file_collector(changedfiles)):
1707 yield chnk
1710 yield chnk
1708 changedfiles = changedfiles.keys()
1711 changedfiles = changedfiles.keys()
1709 changedfiles.sort()
1712 changedfiles.sort()
1710
1713
1711 mnfst = self.manifest
1714 mnfst = self.manifest
1712 nodeiter = gennodelst(mnfst)
1715 nodeiter = gennodelst(mnfst)
1713 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1716 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1714 yield chnk
1717 yield chnk
1715
1718
1716 for fname in changedfiles:
1719 for fname in changedfiles:
1717 filerevlog = self.file(fname)
1720 filerevlog = self.file(fname)
1718 nodeiter = gennodelst(filerevlog)
1721 nodeiter = gennodelst(filerevlog)
1719 nodeiter = list(nodeiter)
1722 nodeiter = list(nodeiter)
1720 if nodeiter:
1723 if nodeiter:
1721 yield changegroup.genchunk(fname)
1724 yield changegroup.genchunk(fname)
1722 lookup = lookuprevlink_func(filerevlog)
1725 lookup = lookuprevlink_func(filerevlog)
1723 for chnk in filerevlog.group(nodeiter, lookup):
1726 for chnk in filerevlog.group(nodeiter, lookup):
1724 yield chnk
1727 yield chnk
1725
1728
1726 yield changegroup.closechunk()
1729 yield changegroup.closechunk()
1727
1730
1728 if nodes:
1731 if nodes:
1729 self.hook('outgoing', node=hex(nodes[0]), source=source)
1732 self.hook('outgoing', node=hex(nodes[0]), source=source)
1730
1733
1731 return util.chunkbuffer(gengroup())
1734 return util.chunkbuffer(gengroup())
1732
1735
1733 def addchangegroup(self, source, srctype, url):
1736 def addchangegroup(self, source, srctype, url):
1734 """add changegroup to repo.
1737 """add changegroup to repo.
1735 returns number of heads modified or added + 1."""
1738 returns number of heads modified or added + 1."""
1736
1739
1737 def csmap(x):
1740 def csmap(x):
1738 self.ui.debug(_("add changeset %s\n") % short(x))
1741 self.ui.debug(_("add changeset %s\n") % short(x))
1739 return cl.count()
1742 return cl.count()
1740
1743
1741 def revmap(x):
1744 def revmap(x):
1742 return cl.rev(x)
1745 return cl.rev(x)
1743
1746
1744 if not source:
1747 if not source:
1745 return 0
1748 return 0
1746
1749
1747 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1750 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1748
1751
1749 changesets = files = revisions = 0
1752 changesets = files = revisions = 0
1750
1753
1751 tr = self.transaction()
1754 tr = self.transaction()
1752
1755
1753 # write changelog data to temp files so concurrent readers will not see
1756 # write changelog data to temp files so concurrent readers will not see
1754 # inconsistent view
1757 # inconsistent view
1755 cl = None
1758 cl = None
1756 try:
1759 try:
1757 cl = appendfile.appendchangelog(self.sopener,
1760 cl = appendfile.appendchangelog(self.sopener,
1758 self.changelog.version)
1761 self.changelog.version)
1759
1762
1760 oldheads = len(cl.heads())
1763 oldheads = len(cl.heads())
1761
1764
1762 # pull off the changeset group
1765 # pull off the changeset group
1763 self.ui.status(_("adding changesets\n"))
1766 self.ui.status(_("adding changesets\n"))
1764 cor = cl.count() - 1
1767 cor = cl.count() - 1
1765 chunkiter = changegroup.chunkiter(source)
1768 chunkiter = changegroup.chunkiter(source)
1766 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1769 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1767 raise util.Abort(_("received changelog group is empty"))
1770 raise util.Abort(_("received changelog group is empty"))
1768 cnr = cl.count() - 1
1771 cnr = cl.count() - 1
1769 changesets = cnr - cor
1772 changesets = cnr - cor
1770
1773
1771 # pull off the manifest group
1774 # pull off the manifest group
1772 self.ui.status(_("adding manifests\n"))
1775 self.ui.status(_("adding manifests\n"))
1773 chunkiter = changegroup.chunkiter(source)
1776 chunkiter = changegroup.chunkiter(source)
1774 # no need to check for empty manifest group here:
1777 # no need to check for empty manifest group here:
1775 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1778 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1776 # no new manifest will be created and the manifest group will
1779 # no new manifest will be created and the manifest group will
1777 # be empty during the pull
1780 # be empty during the pull
1778 self.manifest.addgroup(chunkiter, revmap, tr)
1781 self.manifest.addgroup(chunkiter, revmap, tr)
1779
1782
1780 # process the files
1783 # process the files
1781 self.ui.status(_("adding file changes\n"))
1784 self.ui.status(_("adding file changes\n"))
1782 while 1:
1785 while 1:
1783 f = changegroup.getchunk(source)
1786 f = changegroup.getchunk(source)
1784 if not f:
1787 if not f:
1785 break
1788 break
1786 self.ui.debug(_("adding %s revisions\n") % f)
1789 self.ui.debug(_("adding %s revisions\n") % f)
1787 fl = self.file(f)
1790 fl = self.file(f)
1788 o = fl.count()
1791 o = fl.count()
1789 chunkiter = changegroup.chunkiter(source)
1792 chunkiter = changegroup.chunkiter(source)
1790 if fl.addgroup(chunkiter, revmap, tr) is None:
1793 if fl.addgroup(chunkiter, revmap, tr) is None:
1791 raise util.Abort(_("received file revlog group is empty"))
1794 raise util.Abort(_("received file revlog group is empty"))
1792 revisions += fl.count() - o
1795 revisions += fl.count() - o
1793 files += 1
1796 files += 1
1794
1797
1795 cl.writedata()
1798 cl.writedata()
1796 finally:
1799 finally:
1797 if cl:
1800 if cl:
1798 cl.cleanup()
1801 cl.cleanup()
1799
1802
1800 # make changelog see real files again
1803 # make changelog see real files again
1801 self.changelog = changelog.changelog(self.sopener,
1804 self.changelog = changelog.changelog(self.sopener,
1802 self.changelog.version)
1805 self.changelog.version)
1803 self.changelog.checkinlinesize(tr)
1806 self.changelog.checkinlinesize(tr)
1804
1807
1805 newheads = len(self.changelog.heads())
1808 newheads = len(self.changelog.heads())
1806 heads = ""
1809 heads = ""
1807 if oldheads and newheads != oldheads:
1810 if oldheads and newheads != oldheads:
1808 heads = _(" (%+d heads)") % (newheads - oldheads)
1811 heads = _(" (%+d heads)") % (newheads - oldheads)
1809
1812
1810 self.ui.status(_("added %d changesets"
1813 self.ui.status(_("added %d changesets"
1811 " with %d changes to %d files%s\n")
1814 " with %d changes to %d files%s\n")
1812 % (changesets, revisions, files, heads))
1815 % (changesets, revisions, files, heads))
1813
1816
1814 if changesets > 0:
1817 if changesets > 0:
1815 self.hook('pretxnchangegroup', throw=True,
1818 self.hook('pretxnchangegroup', throw=True,
1816 node=hex(self.changelog.node(cor+1)), source=srctype,
1819 node=hex(self.changelog.node(cor+1)), source=srctype,
1817 url=url)
1820 url=url)
1818
1821
1819 tr.close()
1822 tr.close()
1820
1823
1821 if changesets > 0:
1824 if changesets > 0:
1822 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1825 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1823 source=srctype, url=url)
1826 source=srctype, url=url)
1824
1827
1825 for i in xrange(cor + 1, cnr + 1):
1828 for i in xrange(cor + 1, cnr + 1):
1826 self.hook("incoming", node=hex(self.changelog.node(i)),
1829 self.hook("incoming", node=hex(self.changelog.node(i)),
1827 source=srctype, url=url)
1830 source=srctype, url=url)
1828
1831
1829 return newheads - oldheads + 1
1832 return newheads - oldheads + 1
1830
1833
1831
1834
1832 def stream_in(self, remote):
1835 def stream_in(self, remote):
1833 fp = remote.stream_out()
1836 fp = remote.stream_out()
1834 l = fp.readline()
1837 l = fp.readline()
1835 try:
1838 try:
1836 resp = int(l)
1839 resp = int(l)
1837 except ValueError:
1840 except ValueError:
1838 raise util.UnexpectedOutput(
1841 raise util.UnexpectedOutput(
1839 _('Unexpected response from remote server:'), l)
1842 _('Unexpected response from remote server:'), l)
1840 if resp == 1:
1843 if resp == 1:
1841 raise util.Abort(_('operation forbidden by server'))
1844 raise util.Abort(_('operation forbidden by server'))
1842 elif resp == 2:
1845 elif resp == 2:
1843 raise util.Abort(_('locking the remote repository failed'))
1846 raise util.Abort(_('locking the remote repository failed'))
1844 elif resp != 0:
1847 elif resp != 0:
1845 raise util.Abort(_('the server sent an unknown error code'))
1848 raise util.Abort(_('the server sent an unknown error code'))
1846 self.ui.status(_('streaming all changes\n'))
1849 self.ui.status(_('streaming all changes\n'))
1847 l = fp.readline()
1850 l = fp.readline()
1848 try:
1851 try:
1849 total_files, total_bytes = map(int, l.split(' ', 1))
1852 total_files, total_bytes = map(int, l.split(' ', 1))
1850 except ValueError, TypeError:
1853 except ValueError, TypeError:
1851 raise util.UnexpectedOutput(
1854 raise util.UnexpectedOutput(
1852 _('Unexpected response from remote server:'), l)
1855 _('Unexpected response from remote server:'), l)
1853 self.ui.status(_('%d files to transfer, %s of data\n') %
1856 self.ui.status(_('%d files to transfer, %s of data\n') %
1854 (total_files, util.bytecount(total_bytes)))
1857 (total_files, util.bytecount(total_bytes)))
1855 start = time.time()
1858 start = time.time()
1856 for i in xrange(total_files):
1859 for i in xrange(total_files):
1857 # XXX doesn't support '\n' or '\r' in filenames
1860 # XXX doesn't support '\n' or '\r' in filenames
1858 l = fp.readline()
1861 l = fp.readline()
1859 try:
1862 try:
1860 name, size = l.split('\0', 1)
1863 name, size = l.split('\0', 1)
1861 size = int(size)
1864 size = int(size)
1862 except ValueError, TypeError:
1865 except ValueError, TypeError:
1863 raise util.UnexpectedOutput(
1866 raise util.UnexpectedOutput(
1864 _('Unexpected response from remote server:'), l)
1867 _('Unexpected response from remote server:'), l)
1865 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1868 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1866 ofp = self.sopener(name, 'w')
1869 ofp = self.sopener(name, 'w')
1867 for chunk in util.filechunkiter(fp, limit=size):
1870 for chunk in util.filechunkiter(fp, limit=size):
1868 ofp.write(chunk)
1871 ofp.write(chunk)
1869 ofp.close()
1872 ofp.close()
1870 elapsed = time.time() - start
1873 elapsed = time.time() - start
1871 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1874 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1872 (util.bytecount(total_bytes), elapsed,
1875 (util.bytecount(total_bytes), elapsed,
1873 util.bytecount(total_bytes / elapsed)))
1876 util.bytecount(total_bytes / elapsed)))
1874 self.reload()
1877 self.reload()
1875 return len(self.heads()) + 1
1878 return len(self.heads()) + 1
1876
1879
1877 def clone(self, remote, heads=[], stream=False):
1880 def clone(self, remote, heads=[], stream=False):
1878 '''clone remote repository.
1881 '''clone remote repository.
1879
1882
1880 keyword arguments:
1883 keyword arguments:
1881 heads: list of revs to clone (forces use of pull)
1884 heads: list of revs to clone (forces use of pull)
1882 stream: use streaming clone if possible'''
1885 stream: use streaming clone if possible'''
1883
1886
1884 # now, all clients that can request uncompressed clones can
1887 # now, all clients that can request uncompressed clones can
1885 # read repo formats supported by all servers that can serve
1888 # read repo formats supported by all servers that can serve
1886 # them.
1889 # them.
1887
1890
1888 # if revlog format changes, client will have to check version
1891 # if revlog format changes, client will have to check version
1889 # and format flags on "stream" capability, and use
1892 # and format flags on "stream" capability, and use
1890 # uncompressed only if compatible.
1893 # uncompressed only if compatible.
1891
1894
1892 if stream and not heads and remote.capable('stream'):
1895 if stream and not heads and remote.capable('stream'):
1893 return self.stream_in(remote)
1896 return self.stream_in(remote)
1894 return self.pull(remote, heads)
1897 return self.pull(remote, heads)
1895
1898
1896 # used to avoid circular references so destructors work
1899 # used to avoid circular references so destructors work
1897 def aftertrans(base):
1900 def aftertrans(base):
1898 p = base
1901 p = base
1899 def a():
1902 def a():
1900 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1903 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1901 util.rename(os.path.join(p, "journal.dirstate"),
1904 util.rename(os.path.join(p, "journal.dirstate"),
1902 os.path.join(p, "undo.dirstate"))
1905 os.path.join(p, "undo.dirstate"))
1903 return a
1906 return a
1904
1907
1905 def instance(ui, path, create):
1908 def instance(ui, path, create):
1906 return localrepository(ui, util.drop_scheme('file', path), create)
1909 return localrepository(ui, util.drop_scheme('file', path), create)
1907
1910
1908 def islocal(path):
1911 def islocal(path):
1909 return True
1912 return True
@@ -1,483 +1,484 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "errno util os tempfile")
11 demandload(globals(), "errno util os tempfile")
12
12
13 def filemerge(repo, fw, fo, wctx, mctx):
13 def filemerge(repo, fw, fo, wctx, mctx):
14 """perform a 3-way merge in the working directory
14 """perform a 3-way merge in the working directory
15
15
16 fw = filename in the working directory
16 fw = filename in the working directory
17 fo = filename in other parent
17 fo = filename in other parent
18 wctx, mctx = working and merge changecontexts
18 wctx, mctx = working and merge changecontexts
19 """
19 """
20
20
21 def temp(prefix, ctx):
21 def temp(prefix, ctx):
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
24 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
25 repo.wwrite(ctx.path(), ctx.data(), f)
25 repo.wwrite(ctx.path(), ctx.data(), f)
26 f.close()
26 f.close()
27 return name
27 return name
28
28
29 fcm = wctx.filectx(fw)
29 fcm = wctx.filectx(fw)
30 fco = mctx.filectx(fo)
30 fco = mctx.filectx(fo)
31
31
32 if not fco.cmp(fcm.data()): # files identical?
32 if not fco.cmp(fcm.data()): # files identical?
33 return None
33 return None
34
34
35 fca = fcm.ancestor(fco)
35 fca = fcm.ancestor(fco)
36 if not fca:
36 if not fca:
37 fca = repo.filectx(fw, fileid=nullrev)
37 fca = repo.filectx(fw, fileid=nullrev)
38 a = repo.wjoin(fw)
38 a = repo.wjoin(fw)
39 b = temp("base", fca)
39 b = temp("base", fca)
40 c = temp("other", fco)
40 c = temp("other", fco)
41
41
42 if fw != fo:
42 if fw != fo:
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 else:
44 else:
45 repo.ui.status(_("merging %s\n") % fw)
45 repo.ui.status(_("merging %s\n") % fw)
46
46
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48
48
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 or "hgmerge")
50 or "hgmerge")
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 environ={'HG_FILE': fw,
52 environ={'HG_FILE': fw,
53 'HG_MY_NODE': str(wctx.parents()[0]),
53 'HG_MY_NODE': str(wctx.parents()[0]),
54 'HG_OTHER_NODE': str(mctx)})
54 'HG_OTHER_NODE': str(mctx)})
55 if r:
55 if r:
56 repo.ui.warn(_("merging %s failed!\n") % fw)
56 repo.ui.warn(_("merging %s failed!\n") % fw)
57
57
58 os.unlink(b)
58 os.unlink(b)
59 os.unlink(c)
59 os.unlink(c)
60 return r
60 return r
61
61
62 def checkunknown(wctx, mctx):
62 def checkunknown(wctx, mctx):
63 "check for collisions between unknown files and files in mctx"
63 "check for collisions between unknown files and files in mctx"
64 man = mctx.manifest()
64 man = mctx.manifest()
65 for f in wctx.unknown():
65 for f in wctx.unknown():
66 if f in man:
66 if f in man:
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 raise util.Abort(_("untracked local file '%s' differs"\
68 raise util.Abort(_("untracked local file '%s' differs"\
69 " from remote version") % f)
69 " from remote version") % f)
70
70
71 def forgetremoved(wctx, mctx):
71 def forgetremoved(wctx, mctx):
72 """
72 """
73 Forget removed files
73 Forget removed files
74
74
75 If we're jumping between revisions (as opposed to merging), and if
75 If we're jumping between revisions (as opposed to merging), and if
76 neither the working directory nor the target rev has the file,
76 neither the working directory nor the target rev has the file,
77 then we need to remove it from the dirstate, to prevent the
77 then we need to remove it from the dirstate, to prevent the
78 dirstate from listing the file when it is no longer in the
78 dirstate from listing the file when it is no longer in the
79 manifest.
79 manifest.
80 """
80 """
81
81
82 action = []
82 action = []
83 man = mctx.manifest()
83 man = mctx.manifest()
84 for f in wctx.deleted() + wctx.removed():
84 for f in wctx.deleted() + wctx.removed():
85 if f not in man:
85 if f not in man:
86 action.append((f, "f"))
86 action.append((f, "f"))
87
87
88 return action
88 return action
89
89
90 def findcopies(repo, m1, m2, ma, limit):
90 def findcopies(repo, m1, m2, ma, limit):
91 """
91 """
92 Find moves and copies between m1 and m2 back to limit linkrev
92 Find moves and copies between m1 and m2 back to limit linkrev
93 """
93 """
94
94
95 def findold(fctx):
95 def findold(fctx):
96 "find files that path was copied from, back to linkrev limit"
96 "find files that path was copied from, back to linkrev limit"
97 old = {}
97 old = {}
98 orig = fctx.path()
98 orig = fctx.path()
99 visit = [fctx]
99 visit = [fctx]
100 while visit:
100 while visit:
101 fc = visit.pop()
101 fc = visit.pop()
102 if fc.rev() < limit:
102 if fc.rev() < limit:
103 continue
103 continue
104 if fc.path() != orig and fc.path() not in old:
104 if fc.path() != orig and fc.path() not in old:
105 old[fc.path()] = 1
105 old[fc.path()] = 1
106 visit += fc.parents()
106 visit += fc.parents()
107
107
108 old = old.keys()
108 old = old.keys()
109 old.sort()
109 old.sort()
110 return old
110 return old
111
111
112 def nonoverlap(d1, d2, d3):
112 def nonoverlap(d1, d2, d3):
113 "Return list of elements in d1 not in d2 or d3"
113 "Return list of elements in d1 not in d2 or d3"
114 l = [d for d in d1 if d not in d3 and d not in d2]
114 l = [d for d in d1 if d not in d3 and d not in d2]
115 l.sort()
115 l.sort()
116 return l
116 return l
117
117
118 def checkcopies(c, man):
118 def checkcopies(c, man):
119 '''check possible copies for filectx c'''
119 '''check possible copies for filectx c'''
120 for of in findold(c):
120 for of in findold(c):
121 if of not in man:
121 if of not in man:
122 return
122 return
123 c2 = ctx(of, man[of])
123 c2 = ctx(of, man[of])
124 ca = c.ancestor(c2)
124 ca = c.ancestor(c2)
125 if not ca: # unrelated
125 if not ca: # unrelated
126 return
126 return
127 if ca.path() == c.path() or ca.path() == c2.path():
127 if ca.path() == c.path() or ca.path() == c2.path():
128 fullcopy[c.path()] = of
128 fullcopy[c.path()] = of
129 if c == ca or c2 == ca: # no merge needed, ignore copy
129 if c == ca or c2 == ca: # no merge needed, ignore copy
130 return
130 return
131 copy[c.path()] = of
131 copy[c.path()] = of
132
132
133 def dirs(files):
133 def dirs(files):
134 d = {}
134 d = {}
135 for f in files:
135 for f in files:
136 d[os.path.dirname(f)] = True
136 d[os.path.dirname(f)] = True
137 return d
137 return d
138
138
139 if not repo.ui.configbool("merge", "followcopies", True):
139 if not repo.ui.configbool("merge", "followcopies", True):
140 return {}
140 return {}
141
141
142 # avoid silly behavior for update from empty dir
142 # avoid silly behavior for update from empty dir
143 if not m1 or not m2 or not ma:
143 if not m1 or not m2 or not ma:
144 return {}
144 return {}
145
145
146 dcopies = repo.dirstate.copies()
146 dcopies = repo.dirstate.copies()
147 copy = {}
147 copy = {}
148 fullcopy = {}
148 fullcopy = {}
149 u1 = nonoverlap(m1, m2, ma)
149 u1 = nonoverlap(m1, m2, ma)
150 u2 = nonoverlap(m2, m1, ma)
150 u2 = nonoverlap(m2, m1, ma)
151 ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20]))
151 ctx = util.cachefunc(lambda f, n: repo.filectx(f, fileid=n[:20]))
152
152
153 for f in u1:
153 for f in u1:
154 checkcopies(ctx(dcopies.get(f, f), m1[f]), m2)
154 checkcopies(ctx(dcopies.get(f, f), m1[f]), m2)
155
155
156 for f in u2:
156 for f in u2:
157 checkcopies(ctx(f, m2[f]), m1)
157 checkcopies(ctx(f, m2[f]), m1)
158
158
159 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
159 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
160 return copy
160 return copy
161
161
162 # generate a directory move map
162 # generate a directory move map
163 d1, d2 = dirs(m1), dirs(m2)
163 d1, d2 = dirs(m1), dirs(m2)
164 invalid = {}
164 invalid = {}
165 dirmove = {}
165 dirmove = {}
166
166
167 for dst, src in fullcopy.items():
167 for dst, src in fullcopy.items():
168 dsrc, ddst = os.path.dirname(src), os.path.dirname(dst)
168 dsrc, ddst = os.path.dirname(src), os.path.dirname(dst)
169 if dsrc in invalid:
169 if dsrc in invalid:
170 continue
170 continue
171 elif (dsrc in d1 and ddst in d1) or (dsrc in d2 and ddst in d2):
171 elif (dsrc in d1 and ddst in d1) or (dsrc in d2 and ddst in d2):
172 invalid[dsrc] = True
172 invalid[dsrc] = True
173 elif dsrc in dirmove and dirmove[dsrc] != ddst:
173 elif dsrc in dirmove and dirmove[dsrc] != ddst:
174 invalid[dsrc] = True
174 invalid[dsrc] = True
175 del dirmove[dsrc]
175 del dirmove[dsrc]
176 else:
176 else:
177 dirmove[dsrc] = ddst
177 dirmove[dsrc] = ddst
178
178
179 del d1, d2, invalid
179 del d1, d2, invalid
180
180
181 if not dirmove:
181 if not dirmove:
182 return copy
182 return copy
183
183
184 # check unaccounted nonoverlapping files
184 # check unaccounted nonoverlapping files
185 for f in u1 + u2:
185 for f in u1 + u2:
186 if f not in fullcopy:
186 if f not in fullcopy:
187 d = os.path.dirname(f)
187 d = os.path.dirname(f)
188 if d in dirmove:
188 if d in dirmove:
189 copy[f] = dirmove[d] + "/" + os.path.basename(f)
189 copy[f] = dirmove[d] + "/" + os.path.basename(f)
190
190
191 return copy
191 return copy
192
192
193 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
193 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
194 """
194 """
195 Merge p1 and p2 with ancestor ma and generate merge action list
195 Merge p1 and p2 with ancestor ma and generate merge action list
196
196
197 overwrite = whether we clobber working files
197 overwrite = whether we clobber working files
198 partial = function to filter file lists
198 partial = function to filter file lists
199 """
199 """
200
200
201 repo.ui.note(_("resolving manifests\n"))
201 repo.ui.note(_("resolving manifests\n"))
202 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
202 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
203 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
203 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
204
204
205 m1 = p1.manifest()
205 m1 = p1.manifest()
206 m2 = p2.manifest()
206 m2 = p2.manifest()
207 ma = pa.manifest()
207 ma = pa.manifest()
208 backwards = (pa == p2)
208 backwards = (pa == p2)
209 action = []
209 action = []
210 copy = {}
210 copy = {}
211
211
212 def fmerge(f, f2=None, fa=None):
212 def fmerge(f, f2=None, fa=None):
213 """merge executable flags"""
213 """merge executable flags"""
214 if not f2:
214 if not f2:
215 f2 = f
215 f2 = f
216 fa = f
216 fa = f
217 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
217 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
218 return ((a^b) | (a^c)) ^ a
218 return ((a^b) | (a^c)) ^ a
219
219
220 def act(msg, m, f, *args):
220 def act(msg, m, f, *args):
221 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
221 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
222 action.append((f, m) + args)
222 action.append((f, m) + args)
223
223
224 if not (backwards or overwrite):
224 if not (backwards or overwrite):
225 copy = findcopies(repo, m1, m2, ma, pa.rev())
225 copy = findcopies(repo, m1, m2, ma, pa.rev())
226 copied = dict.fromkeys(copy.values())
226 copied = dict.fromkeys(copy.values())
227
227
228 # Compare manifests
228 # Compare manifests
229 for f, n in m1.iteritems():
229 for f, n in m1.iteritems():
230 if partial and not partial(f):
230 if partial and not partial(f):
231 continue
231 continue
232 if f in m2:
232 if f in m2:
233 # are files different?
233 # are files different?
234 if n != m2[f]:
234 if n != m2[f]:
235 a = ma.get(f, nullid)
235 a = ma.get(f, nullid)
236 # are both different from the ancestor?
236 # are both different from the ancestor?
237 if not overwrite and n != a and m2[f] != a:
237 if not overwrite and n != a and m2[f] != a:
238 act("versions differ", "m", f, f, f, fmerge(f), False)
238 act("versions differ", "m", f, f, f, fmerge(f), False)
239 # are we clobbering?
239 # are we clobbering?
240 # is remote's version newer?
240 # is remote's version newer?
241 # or are we going back in time and clean?
241 # or are we going back in time and clean?
242 elif overwrite or m2[f] != a or (backwards and not n[20:]):
242 elif overwrite or m2[f] != a or (backwards and not n[20:]):
243 act("remote is newer", "g", f, m2.execf(f))
243 act("remote is newer", "g", f, m2.execf(f))
244 # local is newer, not overwrite, check mode bits
244 # local is newer, not overwrite, check mode bits
245 elif fmerge(f) != m1.execf(f):
245 elif fmerge(f) != m1.execf(f):
246 act("update permissions", "e", f, m2.execf(f))
246 act("update permissions", "e", f, m2.execf(f))
247 # contents same, check mode bits
247 # contents same, check mode bits
248 elif m1.execf(f) != m2.execf(f):
248 elif m1.execf(f) != m2.execf(f):
249 if overwrite or fmerge(f) != m1.execf(f):
249 if overwrite or fmerge(f) != m1.execf(f):
250 act("update permissions", "e", f, m2.execf(f))
250 act("update permissions", "e", f, m2.execf(f))
251 elif f in copied:
251 elif f in copied:
252 continue
252 continue
253 elif f in copy:
253 elif f in copy:
254 f2 = copy[f]
254 f2 = copy[f]
255 if f2 not in m2: # directory rename
255 if f2 not in m2: # directory rename
256 act("remote renamed directory to " + f2, "d",
256 act("remote renamed directory to " + f2, "d",
257 f, None, f2, m1.execf(f))
257 f, None, f2, m1.execf(f))
258 elif f2 in m1: # case 2 A,B/B/B
258 elif f2 in m1: # case 2 A,B/B/B
259 act("local copied to " + f2, "m",
259 act("local copied to " + f2, "m",
260 f, f2, f, fmerge(f, f2, f2), False)
260 f, f2, f, fmerge(f, f2, f2), False)
261 else: # case 4,21 A/B/B
261 else: # case 4,21 A/B/B
262 act("local moved to " + f2, "m",
262 act("local moved to " + f2, "m",
263 f, f2, f, fmerge(f, f2, f2), False)
263 f, f2, f, fmerge(f, f2, f2), False)
264 elif f in ma:
264 elif f in ma:
265 if n != ma[f] and not overwrite:
265 if n != ma[f] and not overwrite:
266 if repo.ui.prompt(
266 if repo.ui.prompt(
267 (_(" local changed %s which remote deleted\n") % f) +
267 (_(" local changed %s which remote deleted\n") % f) +
268 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
268 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
269 act("prompt delete", "r", f)
269 act("prompt delete", "r", f)
270 else:
270 else:
271 act("other deleted", "r", f)
271 act("other deleted", "r", f)
272 else:
272 else:
273 # file is created on branch or in working directory
273 # file is created on branch or in working directory
274 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
274 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
275 act("remote deleted", "r", f)
275 act("remote deleted", "r", f)
276
276
277 for f, n in m2.iteritems():
277 for f, n in m2.iteritems():
278 if partial and not partial(f):
278 if partial and not partial(f):
279 continue
279 continue
280 if f in m1:
280 if f in m1:
281 continue
281 continue
282 if f in copied:
282 if f in copied:
283 continue
283 continue
284 if f in copy:
284 if f in copy:
285 f2 = copy[f]
285 f2 = copy[f]
286 if f2 not in m1: # directory rename
286 if f2 not in m1: # directory rename
287 act("local renamed directory to " + f2, "d",
287 act("local renamed directory to " + f2, "d",
288 None, f, f2, m2.execf(f))
288 None, f, f2, m2.execf(f))
289 elif f2 in m2: # rename case 1, A/A,B/A
289 elif f2 in m2: # rename case 1, A/A,B/A
290 act("remote copied to " + f, "m",
290 act("remote copied to " + f, "m",
291 f2, f, f, fmerge(f2, f, f2), False)
291 f2, f, f, fmerge(f2, f, f2), False)
292 else: # case 3,20 A/B/A
292 else: # case 3,20 A/B/A
293 act("remote moved to " + f, "m",
293 act("remote moved to " + f, "m",
294 f2, f, f, fmerge(f2, f, f2), True)
294 f2, f, f, fmerge(f2, f, f2), True)
295 elif f in ma:
295 elif f in ma:
296 if overwrite or backwards:
296 if overwrite or backwards:
297 act("recreating", "g", f, m2.execf(f))
297 act("recreating", "g", f, m2.execf(f))
298 elif n != ma[f]:
298 elif n != ma[f]:
299 if repo.ui.prompt(
299 if repo.ui.prompt(
300 (_("remote changed %s which local deleted\n") % f) +
300 (_("remote changed %s which local deleted\n") % f) +
301 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
301 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
302 act("prompt recreating", "g", f, m2.execf(f))
302 act("prompt recreating", "g", f, m2.execf(f))
303 else:
303 else:
304 act("remote created", "g", f, m2.execf(f))
304 act("remote created", "g", f, m2.execf(f))
305
305
306 return action
306 return action
307
307
308 def applyupdates(repo, action, wctx, mctx):
308 def applyupdates(repo, action, wctx, mctx):
309 "apply the merge action list to the working directory"
309 "apply the merge action list to the working directory"
310
310
311 updated, merged, removed, unresolved = 0, 0, 0, 0
311 updated, merged, removed, unresolved = 0, 0, 0, 0
312 action.sort()
312 action.sort()
313 for a in action:
313 for a in action:
314 f, m = a[:2]
314 f, m = a[:2]
315 if f and f[0] == "/":
315 if f and f[0] == "/":
316 continue
316 continue
317 if m == "r": # remove
317 if m == "r": # remove
318 repo.ui.note(_("removing %s\n") % f)
318 repo.ui.note(_("removing %s\n") % f)
319 util.audit_path(f)
319 util.audit_path(f)
320 try:
320 try:
321 util.unlink(repo.wjoin(f))
321 util.unlink(repo.wjoin(f))
322 except OSError, inst:
322 except OSError, inst:
323 if inst.errno != errno.ENOENT:
323 if inst.errno != errno.ENOENT:
324 repo.ui.warn(_("update failed to remove %s: %s!\n") %
324 repo.ui.warn(_("update failed to remove %s: %s!\n") %
325 (f, inst.strerror))
325 (f, inst.strerror))
326 removed += 1
326 removed += 1
327 elif m == "m": # merge
327 elif m == "m": # merge
328 f2, fd, flag, move = a[2:]
328 f2, fd, flag, move = a[2:]
329 r = filemerge(repo, f, f2, wctx, mctx)
329 r = filemerge(repo, f, f2, wctx, mctx)
330 if r > 0:
330 if r > 0:
331 unresolved += 1
331 unresolved += 1
332 else:
332 else:
333 if r is None:
333 if r is None:
334 updated += 1
334 updated += 1
335 else:
335 else:
336 merged += 1
336 merged += 1
337 if f != fd:
337 if f != fd:
338 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
338 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
339 repo.wwrite(fd, repo.wread(f))
339 repo.wwrite(fd, repo.wread(f))
340 if move:
340 if move:
341 repo.ui.debug(_("removing %s\n") % f)
341 repo.ui.debug(_("removing %s\n") % f)
342 os.unlink(repo.wjoin(f))
342 os.unlink(repo.wjoin(f))
343 util.set_exec(repo.wjoin(fd), flag)
343 util.set_exec(repo.wjoin(fd), flag)
344 elif m == "g": # get
344 elif m == "g": # get
345 flag = a[2]
345 flag = a[2]
346 repo.ui.note(_("getting %s\n") % f)
346 repo.ui.note(_("getting %s\n") % f)
347 t = mctx.filectx(f).data()
347 t = mctx.filectx(f).data()
348 repo.wwrite(f, t)
348 repo.wwrite(f, t)
349 util.set_exec(repo.wjoin(f), flag)
349 util.set_exec(repo.wjoin(f), flag)
350 updated += 1
350 updated += 1
351 elif m == "d": # directory rename
351 elif m == "d": # directory rename
352 f2, fd, flag = a[2:]
352 f2, fd, flag = a[2:]
353 if f:
353 if f:
354 repo.ui.note(_("moving %s to %s\n") % (f, fd))
354 repo.ui.note(_("moving %s to %s\n") % (f, fd))
355 t = wctx.filectx(f).data()
355 t = wctx.filectx(f).data()
356 repo.wwrite(fd, t)
356 repo.wwrite(fd, t)
357 util.set_exec(repo.wjoin(fd), flag)
357 util.set_exec(repo.wjoin(fd), flag)
358 util.unlink(repo.wjoin(f))
358 util.unlink(repo.wjoin(f))
359 if f2:
359 if f2:
360 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
360 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
361 t = mctx.filectx(f2).data()
361 t = mctx.filectx(f2).data()
362 repo.wwrite(fd, t)
362 repo.wwrite(fd, t)
363 util.set_exec(repo.wjoin(fd), flag)
363 util.set_exec(repo.wjoin(fd), flag)
364 updated += 1
364 updated += 1
365 elif m == "e": # exec
365 elif m == "e": # exec
366 flag = a[2]
366 flag = a[2]
367 util.set_exec(repo.wjoin(f), flag)
367 util.set_exec(repo.wjoin(f), flag)
368
368
369 return updated, merged, removed, unresolved
369 return updated, merged, removed, unresolved
370
370
371 def recordupdates(repo, action, branchmerge):
371 def recordupdates(repo, action, branchmerge):
372 "record merge actions to the dirstate"
372 "record merge actions to the dirstate"
373
373
374 for a in action:
374 for a in action:
375 f, m = a[:2]
375 f, m = a[:2]
376 if m == "r": # remove
376 if m == "r": # remove
377 if branchmerge:
377 if branchmerge:
378 repo.dirstate.update([f], 'r')
378 repo.dirstate.update([f], 'r')
379 else:
379 else:
380 repo.dirstate.forget([f])
380 repo.dirstate.forget([f])
381 elif m == "f": # forget
381 elif m == "f": # forget
382 repo.dirstate.forget([f])
382 repo.dirstate.forget([f])
383 elif m == "g": # get
383 elif m == "g": # get
384 if branchmerge:
384 if branchmerge:
385 repo.dirstate.update([f], 'n', st_mtime=-1)
385 repo.dirstate.update([f], 'n', st_mtime=-1)
386 else:
386 else:
387 repo.dirstate.update([f], 'n')
387 repo.dirstate.update([f], 'n')
388 elif m == "m": # merge
388 elif m == "m": # merge
389 f2, fd, flag, move = a[2:]
389 f2, fd, flag, move = a[2:]
390 if branchmerge:
390 if branchmerge:
391 # We've done a branch merge, mark this file as merged
391 # We've done a branch merge, mark this file as merged
392 # so that we properly record the merger later
392 # so that we properly record the merger later
393 repo.dirstate.update([fd], 'm')
393 repo.dirstate.update([fd], 'm')
394 if f != f2: # copy/rename
394 if f != f2: # copy/rename
395 if move:
395 if move:
396 repo.dirstate.update([f], 'r')
396 repo.dirstate.update([f], 'r')
397 if f != fd:
397 if f != fd:
398 repo.dirstate.copy(f, fd)
398 repo.dirstate.copy(f, fd)
399 else:
399 else:
400 repo.dirstate.copy(f2, fd)
400 repo.dirstate.copy(f2, fd)
401 else:
401 else:
402 # We've update-merged a locally modified file, so
402 # We've update-merged a locally modified file, so
403 # we set the dirstate to emulate a normal checkout
403 # we set the dirstate to emulate a normal checkout
404 # of that file some time in the past. Thus our
404 # of that file some time in the past. Thus our
405 # merge will appear as a normal local file
405 # merge will appear as a normal local file
406 # modification.
406 # modification.
407 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
407 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
408 if move:
408 if move:
409 repo.dirstate.forget([f])
409 repo.dirstate.forget([f])
410 elif m == "d": # directory rename
410 elif m == "d": # directory rename
411 f2, fd, flag = a[2:]
411 f2, fd, flag = a[2:]
412 if branchmerge:
412 if branchmerge:
413 repo.dirstate.update([fd], 'a')
413 repo.dirstate.update([fd], 'a')
414 if f:
414 if f:
415 repo.dirstate.update([f], 'r')
415 repo.dirstate.update([f], 'r')
416 repo.dirstate.copy(f, fd)
416 repo.dirstate.copy(f, fd)
417 if f2:
417 if f2:
418 repo.dirstate.copy(f2, fd)
418 repo.dirstate.copy(f2, fd)
419 else:
419 else:
420 repo.dirstate.update([fd], 'n')
420 repo.dirstate.update([fd], 'n')
421 if f:
421 if f:
422 repo.dirstate.forget([f])
422 repo.dirstate.forget([f])
423
423
424 def update(repo, node, branchmerge, force, partial, wlock):
424 def update(repo, node, branchmerge, force, partial, wlock):
425 """
425 """
426 Perform a merge between the working directory and the given node
426 Perform a merge between the working directory and the given node
427
427
428 branchmerge = whether to merge between branches
428 branchmerge = whether to merge between branches
429 force = whether to force branch merging or file overwriting
429 force = whether to force branch merging or file overwriting
430 partial = a function to filter file lists (dirstate not updated)
430 partial = a function to filter file lists (dirstate not updated)
431 wlock = working dir lock, if already held
431 wlock = working dir lock, if already held
432 """
432 """
433
433
434 if not wlock:
434 if not wlock:
435 wlock = repo.wlock()
435 wlock = repo.wlock()
436
436
437 overwrite = force and not branchmerge
437 overwrite = force and not branchmerge
438 forcemerge = force and branchmerge
438 forcemerge = force and branchmerge
439 wc = repo.workingctx()
439 wc = repo.workingctx()
440 pl = wc.parents()
440 pl = wc.parents()
441 p1, p2 = pl[0], repo.changectx(node)
441 p1, p2 = pl[0], repo.changectx(node)
442 pa = p1.ancestor(p2)
442 pa = p1.ancestor(p2)
443 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
443 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
444
444
445 ### check phase
445 ### check phase
446 if not overwrite and len(pl) > 1:
446 if not overwrite and len(pl) > 1:
447 raise util.Abort(_("outstanding uncommitted merges"))
447 raise util.Abort(_("outstanding uncommitted merges"))
448 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
448 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
449 if branchmerge:
449 if branchmerge:
450 raise util.Abort(_("there is nothing to merge, just use "
450 raise util.Abort(_("there is nothing to merge, just use "
451 "'hg update' or look at 'hg heads'"))
451 "'hg update' or look at 'hg heads'"))
452 elif not (overwrite or branchmerge):
452 elif not (overwrite or branchmerge):
453 raise util.Abort(_("update spans branches, use 'hg merge' "
453 raise util.Abort(_("update spans branches, use 'hg merge' "
454 "or 'hg update -C' to lose changes"))
454 "or 'hg update -C' to lose changes"))
455 if branchmerge and not forcemerge:
455 if branchmerge and not forcemerge:
456 if wc.files():
456 if wc.files():
457 raise util.Abort(_("outstanding uncommitted changes"))
457 raise util.Abort(_("outstanding uncommitted changes"))
458
458
459 ### calculate phase
459 ### calculate phase
460 action = []
460 action = []
461 if not force:
461 if not force:
462 checkunknown(wc, p2)
462 checkunknown(wc, p2)
463 if not branchmerge:
463 if not branchmerge:
464 action += forgetremoved(wc, p2)
464 action += forgetremoved(wc, p2)
465 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
465 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
466
466
467 ### apply phase
467 ### apply phase
468 if not branchmerge: # just jump to the new rev
468 if not branchmerge: # just jump to the new rev
469 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
469 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
470 if not partial:
470 if not partial:
471 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
471 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
472
472
473 stats = applyupdates(repo, action, wc, p2)
473 stats = applyupdates(repo, action, wc, p2)
474
474
475 if not partial:
475 if not partial:
476 recordupdates(repo, action, branchmerge)
476 recordupdates(repo, action, branchmerge)
477 repo.dirstate.setparents(fp1, fp2)
477 repo.dirstate.setparents(fp1, fp2)
478 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
478 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
479 if not branchmerge:
479 if not branchmerge:
480 b = util.tolocal(p2.branch())
480 repo.opener("branch", "w").write(p2.branch() + "\n")
481 repo.opener("branch", "w").write(p2.branch() + "\n")
481
482
482 return stats
483 return stats
483
484
General Comments 0
You need to be logged in to leave comments. Login now