##// END OF EJS Templates
Merge with crew
Matt Mackall -
r4276:cb6107f7 merge default
parent child Browse files
Show More
@@ -1,3376 +1,3377 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import traceback, errno, version, atexit, socket
14 import traceback, errno, version, atexit, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, rev, **opts):
185 def backout(ui, repo, rev, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202
202
203 bail_if_changed(repo)
203 bail_if_changed(repo)
204 op1, op2 = repo.dirstate.parents()
204 op1, op2 = repo.dirstate.parents()
205 if op2 != nullid:
205 if op2 != nullid:
206 raise util.Abort(_('outstanding uncommitted merge'))
206 raise util.Abort(_('outstanding uncommitted merge'))
207 node = repo.lookup(rev)
207 node = repo.lookup(rev)
208 p1, p2 = repo.changelog.parents(node)
208 p1, p2 = repo.changelog.parents(node)
209 if p1 == nullid:
209 if p1 == nullid:
210 raise util.Abort(_('cannot back out a change with no parents'))
210 raise util.Abort(_('cannot back out a change with no parents'))
211 if p2 != nullid:
211 if p2 != nullid:
212 if not opts['parent']:
212 if not opts['parent']:
213 raise util.Abort(_('cannot back out a merge changeset without '
213 raise util.Abort(_('cannot back out a merge changeset without '
214 '--parent'))
214 '--parent'))
215 p = repo.lookup(opts['parent'])
215 p = repo.lookup(opts['parent'])
216 if p not in (p1, p2):
216 if p not in (p1, p2):
217 raise util.Abort(_('%s is not a parent of %s') %
217 raise util.Abort(_('%s is not a parent of %s') %
218 (short(p), short(node)))
218 (short(p), short(node)))
219 parent = p
219 parent = p
220 else:
220 else:
221 if opts['parent']:
221 if opts['parent']:
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 parent = p1
223 parent = p1
224 hg.clean(repo, node, show_stats=False)
224 hg.clean(repo, node, show_stats=False)
225 revert_opts = opts.copy()
225 revert_opts = opts.copy()
226 revert_opts['date'] = None
226 revert_opts['date'] = None
227 revert_opts['all'] = True
227 revert_opts['all'] = True
228 revert_opts['rev'] = hex(parent)
228 revert_opts['rev'] = hex(parent)
229 revert(ui, repo, **revert_opts)
229 revert(ui, repo, **revert_opts)
230 commit_opts = opts.copy()
230 commit_opts = opts.copy()
231 commit_opts['addremove'] = False
231 commit_opts['addremove'] = False
232 if not commit_opts['message'] and not commit_opts['logfile']:
232 if not commit_opts['message'] and not commit_opts['logfile']:
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 commit_opts['force_editor'] = True
234 commit_opts['force_editor'] = True
235 commit(ui, repo, **commit_opts)
235 commit(ui, repo, **commit_opts)
236 def nice(node):
236 def nice(node):
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 ui.status(_('changeset %s backs out changeset %s\n') %
238 ui.status(_('changeset %s backs out changeset %s\n') %
239 (nice(repo.changelog.tip()), nice(node)))
239 (nice(repo.changelog.tip()), nice(node)))
240 if op1 != node:
240 if op1 != node:
241 if opts['merge']:
241 if opts['merge']:
242 ui.status(_('merging with changeset %s\n') % nice(op1))
242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 hg.merge(repo, hex(op1))
243 hg.merge(repo, hex(op1))
244 else:
244 else:
245 ui.status(_('the backout changeset is a new head - '
245 ui.status(_('the backout changeset is a new head - '
246 'do not forget to merge\n'))
246 'do not forget to merge\n'))
247 ui.status(_('(use "backout --merge" '
247 ui.status(_('(use "backout --merge" '
248 'if you want to auto-merge)\n'))
248 'if you want to auto-merge)\n'))
249
249
250 def branch(ui, repo, label=None, **opts):
250 def branch(ui, repo, label=None, **opts):
251 """set or show the current branch name
251 """set or show the current branch name
252
252
253 With <name>, set the current branch name. Otherwise, show the
253 With <name>, set the current branch name. Otherwise, show the
254 current branch name.
254 current branch name.
255
255
256 Unless --force is specified, branch will not let you set a
256 Unless --force is specified, branch will not let you set a
257 branch name that shadows an existing branch.
257 branch name that shadows an existing branch.
258 """
258 """
259
259
260 if label:
260 if label:
261 if not opts.get('force') and label in repo.branchtags():
261 if not opts.get('force') and label in repo.branchtags():
262 if label not in [p.branch() for p in repo.workingctx().parents()]:
262 if label not in [p.branch() for p in repo.workingctx().parents()]:
263 raise util.Abort(_('a branch of the same name already exists'
263 raise util.Abort(_('a branch of the same name already exists'
264 ' (use --force to override)'))
264 ' (use --force to override)'))
265 repo.dirstate.setbranch(util.fromlocal(label))
265 repo.dirstate.setbranch(util.fromlocal(label))
266 else:
266 else:
267 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
267 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
268
268
269 def branches(ui, repo):
269 def branches(ui, repo):
270 """list repository named branches
270 """list repository named branches
271
271
272 List the repository's named branches.
272 List the repository's named branches.
273 """
273 """
274 b = repo.branchtags()
274 b = repo.branchtags()
275 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
275 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
276 l.sort()
276 l.sort()
277 for r, n, t in l:
277 for r, n, t in l:
278 hexfunc = ui.debugflag and hex or short
278 hexfunc = ui.debugflag and hex or short
279 if ui.quiet:
279 if ui.quiet:
280 ui.write("%s\n" % t)
280 ui.write("%s\n" % t)
281 else:
281 else:
282 spaces = " " * (30 - util.locallen(t))
282 spaces = " " * (30 - util.locallen(t))
283 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
283 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
284
284
285 def bundle(ui, repo, fname, dest=None, **opts):
285 def bundle(ui, repo, fname, dest=None, **opts):
286 """create a changegroup file
286 """create a changegroup file
287
287
288 Generate a compressed changegroup file collecting changesets not
288 Generate a compressed changegroup file collecting changesets not
289 found in the other repository.
289 found in the other repository.
290
290
291 If no destination repository is specified the destination is assumed
291 If no destination repository is specified the destination is assumed
292 to have all the nodes specified by one or more --base parameters.
292 to have all the nodes specified by one or more --base parameters.
293
293
294 The bundle file can then be transferred using conventional means and
294 The bundle file can then be transferred using conventional means and
295 applied to another repository with the unbundle or pull command.
295 applied to another repository with the unbundle or pull command.
296 This is useful when direct push and pull are not available or when
296 This is useful when direct push and pull are not available or when
297 exporting an entire repository is undesirable.
297 exporting an entire repository is undesirable.
298
298
299 Applying bundles preserves all changeset contents including
299 Applying bundles preserves all changeset contents including
300 permissions, copy/rename information, and revision history.
300 permissions, copy/rename information, and revision history.
301 """
301 """
302 revs = opts.get('rev') or None
302 revs = opts.get('rev') or None
303 if revs:
303 if revs:
304 revs = [repo.lookup(rev) for rev in revs]
304 revs = [repo.lookup(rev) for rev in revs]
305 base = opts.get('base')
305 base = opts.get('base')
306 if base:
306 if base:
307 if dest:
307 if dest:
308 raise util.Abort(_("--base is incompatible with specifiying "
308 raise util.Abort(_("--base is incompatible with specifiying "
309 "a destination"))
309 "a destination"))
310 base = [repo.lookup(rev) for rev in base]
310 base = [repo.lookup(rev) for rev in base]
311 # create the right base
311 # create the right base
312 # XXX: nodesbetween / changegroup* should be "fixed" instead
312 # XXX: nodesbetween / changegroup* should be "fixed" instead
313 o = []
313 o = []
314 has = {nullid: None}
314 has = {nullid: None}
315 for n in base:
315 for n in base:
316 has.update(repo.changelog.reachable(n))
316 has.update(repo.changelog.reachable(n))
317 if revs:
317 if revs:
318 visit = list(revs)
318 visit = list(revs)
319 else:
319 else:
320 visit = repo.changelog.heads()
320 visit = repo.changelog.heads()
321 seen = {}
321 seen = {}
322 while visit:
322 while visit:
323 n = visit.pop(0)
323 n = visit.pop(0)
324 parents = [p for p in repo.changelog.parents(n) if p not in has]
324 parents = [p for p in repo.changelog.parents(n) if p not in has]
325 if len(parents) == 0:
325 if len(parents) == 0:
326 o.insert(0, n)
326 o.insert(0, n)
327 else:
327 else:
328 for p in parents:
328 for p in parents:
329 if p not in seen:
329 if p not in seen:
330 seen[p] = 1
330 seen[p] = 1
331 visit.append(p)
331 visit.append(p)
332 else:
332 else:
333 setremoteconfig(ui, opts)
333 setremoteconfig(ui, opts)
334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
334 dest = ui.expandpath(dest or 'default-push', dest or 'default')
335 other = hg.repository(ui, dest)
335 other = hg.repository(ui, dest)
336 o = repo.findoutgoing(other, force=opts['force'])
336 o = repo.findoutgoing(other, force=opts['force'])
337
337
338 if revs:
338 if revs:
339 cg = repo.changegroupsubset(o, revs, 'bundle')
339 cg = repo.changegroupsubset(o, revs, 'bundle')
340 else:
340 else:
341 cg = repo.changegroup(o, 'bundle')
341 cg = repo.changegroup(o, 'bundle')
342 changegroup.writebundle(cg, fname, "HG10BZ")
342 changegroup.writebundle(cg, fname, "HG10BZ")
343
343
344 def cat(ui, repo, file1, *pats, **opts):
344 def cat(ui, repo, file1, *pats, **opts):
345 """output the current or given revision of files
345 """output the current or given revision of files
346
346
347 Print the specified files as they were at the given revision.
347 Print the specified files as they were at the given revision.
348 If no revision is given, the parent of the working directory is used,
348 If no revision is given, the parent of the working directory is used,
349 or tip if no revision is checked out.
349 or tip if no revision is checked out.
350
350
351 Output may be to a file, in which case the name of the file is
351 Output may be to a file, in which case the name of the file is
352 given using a format string. The formatting rules are the same as
352 given using a format string. The formatting rules are the same as
353 for the export command, with the following additions:
353 for the export command, with the following additions:
354
354
355 %s basename of file being printed
355 %s basename of file being printed
356 %d dirname of file being printed, or '.' if in repo root
356 %d dirname of file being printed, or '.' if in repo root
357 %p root-relative path name of file being printed
357 %p root-relative path name of file being printed
358 """
358 """
359 ctx = repo.changectx(opts['rev'])
359 ctx = repo.changectx(opts['rev'])
360 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
360 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
361 ctx.node()):
361 ctx.node()):
362 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
362 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
363 fp.write(ctx.filectx(abs).data())
363 fp.write(ctx.filectx(abs).data())
364
364
365 def clone(ui, source, dest=None, **opts):
365 def clone(ui, source, dest=None, **opts):
366 """make a copy of an existing repository
366 """make a copy of an existing repository
367
367
368 Create a copy of an existing repository in a new directory.
368 Create a copy of an existing repository in a new directory.
369
369
370 If no destination directory name is specified, it defaults to the
370 If no destination directory name is specified, it defaults to the
371 basename of the source.
371 basename of the source.
372
372
373 The location of the source is added to the new repository's
373 The location of the source is added to the new repository's
374 .hg/hgrc file, as the default to be used for future pulls.
374 .hg/hgrc file, as the default to be used for future pulls.
375
375
376 For efficiency, hardlinks are used for cloning whenever the source
376 For efficiency, hardlinks are used for cloning whenever the source
377 and destination are on the same filesystem (note this applies only
377 and destination are on the same filesystem (note this applies only
378 to the repository data, not to the checked out files). Some
378 to the repository data, not to the checked out files). Some
379 filesystems, such as AFS, implement hardlinking incorrectly, but
379 filesystems, such as AFS, implement hardlinking incorrectly, but
380 do not report errors. In these cases, use the --pull option to
380 do not report errors. In these cases, use the --pull option to
381 avoid hardlinking.
381 avoid hardlinking.
382
382
383 You can safely clone repositories and checked out files using full
383 You can safely clone repositories and checked out files using full
384 hardlinks with
384 hardlinks with
385
385
386 $ cp -al REPO REPOCLONE
386 $ cp -al REPO REPOCLONE
387
387
388 which is the fastest way to clone. However, the operation is not
388 which is the fastest way to clone. However, the operation is not
389 atomic (making sure REPO is not modified during the operation is
389 atomic (making sure REPO is not modified during the operation is
390 up to you) and you have to make sure your editor breaks hardlinks
390 up to you) and you have to make sure your editor breaks hardlinks
391 (Emacs and most Linux Kernel tools do so).
391 (Emacs and most Linux Kernel tools do so).
392
392
393 If you use the -r option to clone up to a specific revision, no
393 If you use the -r option to clone up to a specific revision, no
394 subsequent revisions will be present in the cloned repository.
394 subsequent revisions will be present in the cloned repository.
395 This option implies --pull, even on local repositories.
395 This option implies --pull, even on local repositories.
396
396
397 See pull for valid source format details.
397 See pull for valid source format details.
398
398
399 It is possible to specify an ssh:// URL as the destination, but no
399 It is possible to specify an ssh:// URL as the destination, but no
400 .hg/hgrc and working directory will be created on the remote side.
400 .hg/hgrc and working directory will be created on the remote side.
401 Look at the help text for the pull command for important details
401 Look at the help text for the pull command for important details
402 about ssh:// URLs.
402 about ssh:// URLs.
403 """
403 """
404 setremoteconfig(ui, opts)
404 setremoteconfig(ui, opts)
405 hg.clone(ui, ui.expandpath(source), dest,
405 hg.clone(ui, ui.expandpath(source), dest,
406 pull=opts['pull'],
406 pull=opts['pull'],
407 stream=opts['uncompressed'],
407 stream=opts['uncompressed'],
408 rev=opts['rev'],
408 rev=opts['rev'],
409 update=not opts['noupdate'])
409 update=not opts['noupdate'])
410
410
411 def commit(ui, repo, *pats, **opts):
411 def commit(ui, repo, *pats, **opts):
412 """commit the specified files or all outstanding changes
412 """commit the specified files or all outstanding changes
413
413
414 Commit changes to the given files into the repository.
414 Commit changes to the given files into the repository.
415
415
416 If a list of files is omitted, all changes reported by "hg status"
416 If a list of files is omitted, all changes reported by "hg status"
417 will be committed.
417 will be committed.
418
418
419 If no commit message is specified, the editor configured in your hgrc
419 If no commit message is specified, the editor configured in your hgrc
420 or in the EDITOR environment variable is started to enter a message.
420 or in the EDITOR environment variable is started to enter a message.
421 """
421 """
422 message = logmessage(opts)
422 message = logmessage(opts)
423
423
424 if opts['addremove']:
424 if opts['addremove']:
425 cmdutil.addremove(repo, pats, opts)
425 cmdutil.addremove(repo, pats, opts)
426 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
426 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
427 if pats:
427 if pats:
428 status = repo.status(files=fns, match=match)
428 status = repo.status(files=fns, match=match)
429 modified, added, removed, deleted, unknown = status[:5]
429 modified, added, removed, deleted, unknown = status[:5]
430 files = modified + added + removed
430 files = modified + added + removed
431 slist = None
431 slist = None
432 for f in fns:
432 for f in fns:
433 if f == '.':
433 if f == '.':
434 continue
434 continue
435 if f not in files:
435 if f not in files:
436 rf = repo.wjoin(f)
436 rf = repo.wjoin(f)
437 if f in unknown:
437 if f in unknown:
438 raise util.Abort(_("file %s not tracked!") % rf)
438 raise util.Abort(_("file %s not tracked!") % rf)
439 try:
439 try:
440 mode = os.lstat(rf)[stat.ST_MODE]
440 mode = os.lstat(rf)[stat.ST_MODE]
441 except OSError:
441 except OSError:
442 raise util.Abort(_("file %s not found!") % rf)
442 raise util.Abort(_("file %s not found!") % rf)
443 if stat.S_ISDIR(mode):
443 if stat.S_ISDIR(mode):
444 name = f + '/'
444 name = f + '/'
445 if slist is None:
445 if slist is None:
446 slist = list(files)
446 slist = list(files)
447 slist.sort()
447 slist.sort()
448 i = bisect.bisect(slist, name)
448 i = bisect.bisect(slist, name)
449 if i >= len(slist) or not slist[i].startswith(name):
449 if i >= len(slist) or not slist[i].startswith(name):
450 raise util.Abort(_("no match under directory %s!")
450 raise util.Abort(_("no match under directory %s!")
451 % rf)
451 % rf)
452 elif not stat.S_ISREG(mode):
452 elif not stat.S_ISREG(mode):
453 raise util.Abort(_("can't commit %s: "
453 raise util.Abort(_("can't commit %s: "
454 "unsupported file type!") % rf)
454 "unsupported file type!") % rf)
455 else:
455 else:
456 files = []
456 files = []
457 try:
457 try:
458 repo.commit(files, message, opts['user'], opts['date'], match,
458 repo.commit(files, message, opts['user'], opts['date'], match,
459 force_editor=opts.get('force_editor'))
459 force_editor=opts.get('force_editor'))
460 except ValueError, inst:
460 except ValueError, inst:
461 raise util.Abort(str(inst))
461 raise util.Abort(str(inst))
462
462
463 def docopy(ui, repo, pats, opts, wlock):
463 def docopy(ui, repo, pats, opts, wlock):
464 # called with the repo lock held
464 # called with the repo lock held
465 #
465 #
466 # hgsep => pathname that uses "/" to separate directories
466 # hgsep => pathname that uses "/" to separate directories
467 # ossep => pathname that uses os.sep to separate directories
467 # ossep => pathname that uses os.sep to separate directories
468 cwd = repo.getcwd()
468 cwd = repo.getcwd()
469 errors = 0
469 errors = 0
470 copied = []
470 copied = []
471 targets = {}
471 targets = {}
472
472
473 # abs: hgsep
473 # abs: hgsep
474 # rel: ossep
474 # rel: ossep
475 # return: hgsep
475 # return: hgsep
476 def okaytocopy(abs, rel, exact):
476 def okaytocopy(abs, rel, exact):
477 reasons = {'?': _('is not managed'),
477 reasons = {'?': _('is not managed'),
478 'a': _('has been marked for add'),
478 'a': _('has been marked for add'),
479 'r': _('has been marked for remove')}
479 'r': _('has been marked for remove')}
480 state = repo.dirstate.state(abs)
480 state = repo.dirstate.state(abs)
481 reason = reasons.get(state)
481 reason = reasons.get(state)
482 if reason:
482 if reason:
483 if state == 'a':
483 if state == 'a':
484 origsrc = repo.dirstate.copied(abs)
484 origsrc = repo.dirstate.copied(abs)
485 if origsrc is not None:
485 if origsrc is not None:
486 return origsrc
486 return origsrc
487 if exact:
487 if exact:
488 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
488 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 else:
489 else:
490 return abs
490 return abs
491
491
492 # origsrc: hgsep
492 # origsrc: hgsep
493 # abssrc: hgsep
493 # abssrc: hgsep
494 # relsrc: ossep
494 # relsrc: ossep
495 # target: ossep
495 # target: ossep
496 def copy(origsrc, abssrc, relsrc, target, exact):
496 def copy(origsrc, abssrc, relsrc, target, exact):
497 abstarget = util.canonpath(repo.root, cwd, target)
497 abstarget = util.canonpath(repo.root, cwd, target)
498 reltarget = util.pathto(repo.root, cwd, abstarget)
498 reltarget = util.pathto(repo.root, cwd, abstarget)
499 prevsrc = targets.get(abstarget)
499 prevsrc = targets.get(abstarget)
500 if prevsrc is not None:
500 if prevsrc is not None:
501 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
501 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
502 (reltarget, util.localpath(abssrc),
502 (reltarget, util.localpath(abssrc),
503 util.localpath(prevsrc)))
503 util.localpath(prevsrc)))
504 return
504 return
505 if (not opts['after'] and os.path.exists(reltarget) or
505 if (not opts['after'] and os.path.exists(reltarget) or
506 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
506 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
507 if not opts['force']:
507 if not opts['force']:
508 ui.warn(_('%s: not overwriting - file exists\n') %
508 ui.warn(_('%s: not overwriting - file exists\n') %
509 reltarget)
509 reltarget)
510 return
510 return
511 if not opts['after'] and not opts.get('dry_run'):
511 if not opts['after'] and not opts.get('dry_run'):
512 os.unlink(reltarget)
512 os.unlink(reltarget)
513 if opts['after']:
513 if opts['after']:
514 if not os.path.exists(reltarget):
514 if not os.path.exists(reltarget):
515 return
515 return
516 else:
516 else:
517 targetdir = os.path.dirname(reltarget) or '.'
517 targetdir = os.path.dirname(reltarget) or '.'
518 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
518 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
519 os.makedirs(targetdir)
519 os.makedirs(targetdir)
520 try:
520 try:
521 restore = repo.dirstate.state(abstarget) == 'r'
521 restore = repo.dirstate.state(abstarget) == 'r'
522 if restore and not opts.get('dry_run'):
522 if restore and not opts.get('dry_run'):
523 repo.undelete([abstarget], wlock)
523 repo.undelete([abstarget], wlock)
524 try:
524 try:
525 if not opts.get('dry_run'):
525 if not opts.get('dry_run'):
526 util.copyfile(relsrc, reltarget)
526 util.copyfile(relsrc, reltarget)
527 restore = False
527 restore = False
528 finally:
528 finally:
529 if restore:
529 if restore:
530 repo.remove([abstarget], wlock)
530 repo.remove([abstarget], wlock)
531 except IOError, inst:
531 except IOError, inst:
532 if inst.errno == errno.ENOENT:
532 if inst.errno == errno.ENOENT:
533 ui.warn(_('%s: deleted in working copy\n') % relsrc)
533 ui.warn(_('%s: deleted in working copy\n') % relsrc)
534 else:
534 else:
535 ui.warn(_('%s: cannot copy - %s\n') %
535 ui.warn(_('%s: cannot copy - %s\n') %
536 (relsrc, inst.strerror))
536 (relsrc, inst.strerror))
537 errors += 1
537 errors += 1
538 return
538 return
539 if ui.verbose or not exact:
539 if ui.verbose or not exact:
540 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
540 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
541 targets[abstarget] = abssrc
541 targets[abstarget] = abssrc
542 if abstarget != origsrc and not opts.get('dry_run'):
542 if abstarget != origsrc and not opts.get('dry_run'):
543 repo.copy(origsrc, abstarget, wlock)
543 repo.copy(origsrc, abstarget, wlock)
544 copied.append((abssrc, relsrc, exact))
544 copied.append((abssrc, relsrc, exact))
545
545
546 # pat: ossep
546 # pat: ossep
547 # dest ossep
547 # dest ossep
548 # srcs: list of (hgsep, hgsep, ossep, bool)
548 # srcs: list of (hgsep, hgsep, ossep, bool)
549 # return: function that takes hgsep and returns ossep
549 # return: function that takes hgsep and returns ossep
550 def targetpathfn(pat, dest, srcs):
550 def targetpathfn(pat, dest, srcs):
551 if os.path.isdir(pat):
551 if os.path.isdir(pat):
552 abspfx = util.canonpath(repo.root, cwd, pat)
552 abspfx = util.canonpath(repo.root, cwd, pat)
553 abspfx = util.localpath(abspfx)
553 abspfx = util.localpath(abspfx)
554 if destdirexists:
554 if destdirexists:
555 striplen = len(os.path.split(abspfx)[0])
555 striplen = len(os.path.split(abspfx)[0])
556 else:
556 else:
557 striplen = len(abspfx)
557 striplen = len(abspfx)
558 if striplen:
558 if striplen:
559 striplen += len(os.sep)
559 striplen += len(os.sep)
560 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
560 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
561 elif destdirexists:
561 elif destdirexists:
562 res = lambda p: os.path.join(dest,
562 res = lambda p: os.path.join(dest,
563 os.path.basename(util.localpath(p)))
563 os.path.basename(util.localpath(p)))
564 else:
564 else:
565 res = lambda p: dest
565 res = lambda p: dest
566 return res
566 return res
567
567
568 # pat: ossep
568 # pat: ossep
569 # dest ossep
569 # dest ossep
570 # srcs: list of (hgsep, hgsep, ossep, bool)
570 # srcs: list of (hgsep, hgsep, ossep, bool)
571 # return: function that takes hgsep and returns ossep
571 # return: function that takes hgsep and returns ossep
572 def targetpathafterfn(pat, dest, srcs):
572 def targetpathafterfn(pat, dest, srcs):
573 if util.patkind(pat, None)[0]:
573 if util.patkind(pat, None)[0]:
574 # a mercurial pattern
574 # a mercurial pattern
575 res = lambda p: os.path.join(dest,
575 res = lambda p: os.path.join(dest,
576 os.path.basename(util.localpath(p)))
576 os.path.basename(util.localpath(p)))
577 else:
577 else:
578 abspfx = util.canonpath(repo.root, cwd, pat)
578 abspfx = util.canonpath(repo.root, cwd, pat)
579 if len(abspfx) < len(srcs[0][0]):
579 if len(abspfx) < len(srcs[0][0]):
580 # A directory. Either the target path contains the last
580 # A directory. Either the target path contains the last
581 # component of the source path or it does not.
581 # component of the source path or it does not.
582 def evalpath(striplen):
582 def evalpath(striplen):
583 score = 0
583 score = 0
584 for s in srcs:
584 for s in srcs:
585 t = os.path.join(dest, util.localpath(s[0])[striplen:])
585 t = os.path.join(dest, util.localpath(s[0])[striplen:])
586 if os.path.exists(t):
586 if os.path.exists(t):
587 score += 1
587 score += 1
588 return score
588 return score
589
589
590 abspfx = util.localpath(abspfx)
590 abspfx = util.localpath(abspfx)
591 striplen = len(abspfx)
591 striplen = len(abspfx)
592 if striplen:
592 if striplen:
593 striplen += len(os.sep)
593 striplen += len(os.sep)
594 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
594 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
595 score = evalpath(striplen)
595 score = evalpath(striplen)
596 striplen1 = len(os.path.split(abspfx)[0])
596 striplen1 = len(os.path.split(abspfx)[0])
597 if striplen1:
597 if striplen1:
598 striplen1 += len(os.sep)
598 striplen1 += len(os.sep)
599 if evalpath(striplen1) > score:
599 if evalpath(striplen1) > score:
600 striplen = striplen1
600 striplen = striplen1
601 res = lambda p: os.path.join(dest,
601 res = lambda p: os.path.join(dest,
602 util.localpath(p)[striplen:])
602 util.localpath(p)[striplen:])
603 else:
603 else:
604 # a file
604 # a file
605 if destdirexists:
605 if destdirexists:
606 res = lambda p: os.path.join(dest,
606 res = lambda p: os.path.join(dest,
607 os.path.basename(util.localpath(p)))
607 os.path.basename(util.localpath(p)))
608 else:
608 else:
609 res = lambda p: dest
609 res = lambda p: dest
610 return res
610 return res
611
611
612
612
613 pats = util.expand_glob(pats)
613 pats = util.expand_glob(pats)
614 if not pats:
614 if not pats:
615 raise util.Abort(_('no source or destination specified'))
615 raise util.Abort(_('no source or destination specified'))
616 if len(pats) == 1:
616 if len(pats) == 1:
617 raise util.Abort(_('no destination specified'))
617 raise util.Abort(_('no destination specified'))
618 dest = pats.pop()
618 dest = pats.pop()
619 destdirexists = os.path.isdir(dest)
619 destdirexists = os.path.isdir(dest)
620 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
620 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
621 raise util.Abort(_('with multiple sources, destination must be an '
621 raise util.Abort(_('with multiple sources, destination must be an '
622 'existing directory'))
622 'existing directory'))
623 if opts['after']:
623 if opts['after']:
624 tfn = targetpathafterfn
624 tfn = targetpathafterfn
625 else:
625 else:
626 tfn = targetpathfn
626 tfn = targetpathfn
627 copylist = []
627 copylist = []
628 for pat in pats:
628 for pat in pats:
629 srcs = []
629 srcs = []
630 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
630 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
631 globbed=True):
631 globbed=True):
632 origsrc = okaytocopy(abssrc, relsrc, exact)
632 origsrc = okaytocopy(abssrc, relsrc, exact)
633 if origsrc:
633 if origsrc:
634 srcs.append((origsrc, abssrc, relsrc, exact))
634 srcs.append((origsrc, abssrc, relsrc, exact))
635 if not srcs:
635 if not srcs:
636 continue
636 continue
637 copylist.append((tfn(pat, dest, srcs), srcs))
637 copylist.append((tfn(pat, dest, srcs), srcs))
638 if not copylist:
638 if not copylist:
639 raise util.Abort(_('no files to copy'))
639 raise util.Abort(_('no files to copy'))
640
640
641 for targetpath, srcs in copylist:
641 for targetpath, srcs in copylist:
642 for origsrc, abssrc, relsrc, exact in srcs:
642 for origsrc, abssrc, relsrc, exact in srcs:
643 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
643 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
644
644
645 if errors:
645 if errors:
646 ui.warn(_('(consider using --after)\n'))
646 ui.warn(_('(consider using --after)\n'))
647 return errors, copied
647 return errors, copied
648
648
649 def copy(ui, repo, *pats, **opts):
649 def copy(ui, repo, *pats, **opts):
650 """mark files as copied for the next commit
650 """mark files as copied for the next commit
651
651
652 Mark dest as having copies of source files. If dest is a
652 Mark dest as having copies of source files. If dest is a
653 directory, copies are put in that directory. If dest is a file,
653 directory, copies are put in that directory. If dest is a file,
654 there can only be one source.
654 there can only be one source.
655
655
656 By default, this command copies the contents of files as they
656 By default, this command copies the contents of files as they
657 stand in the working directory. If invoked with --after, the
657 stand in the working directory. If invoked with --after, the
658 operation is recorded, but no copying is performed.
658 operation is recorded, but no copying is performed.
659
659
660 This command takes effect in the next commit. To undo a copy
660 This command takes effect in the next commit. To undo a copy
661 before that, see hg revert.
661 before that, see hg revert.
662 """
662 """
663 wlock = repo.wlock(0)
663 wlock = repo.wlock(0)
664 errs, copied = docopy(ui, repo, pats, opts, wlock)
664 errs, copied = docopy(ui, repo, pats, opts, wlock)
665 return errs
665 return errs
666
666
667 def debugancestor(ui, index, rev1, rev2):
667 def debugancestor(ui, index, rev1, rev2):
668 """find the ancestor revision of two revisions in a given index"""
668 """find the ancestor revision of two revisions in a given index"""
669 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
669 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
670 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
670 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
671 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
671 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
672
672
673 def debugcomplete(ui, cmd='', **opts):
673 def debugcomplete(ui, cmd='', **opts):
674 """returns the completion list associated with the given command"""
674 """returns the completion list associated with the given command"""
675
675
676 if opts['options']:
676 if opts['options']:
677 options = []
677 options = []
678 otables = [globalopts]
678 otables = [globalopts]
679 if cmd:
679 if cmd:
680 aliases, entry = findcmd(ui, cmd)
680 aliases, entry = findcmd(ui, cmd)
681 otables.append(entry[1])
681 otables.append(entry[1])
682 for t in otables:
682 for t in otables:
683 for o in t:
683 for o in t:
684 if o[0]:
684 if o[0]:
685 options.append('-%s' % o[0])
685 options.append('-%s' % o[0])
686 options.append('--%s' % o[1])
686 options.append('--%s' % o[1])
687 ui.write("%s\n" % "\n".join(options))
687 ui.write("%s\n" % "\n".join(options))
688 return
688 return
689
689
690 clist = findpossible(ui, cmd).keys()
690 clist = findpossible(ui, cmd).keys()
691 clist.sort()
691 clist.sort()
692 ui.write("%s\n" % "\n".join(clist))
692 ui.write("%s\n" % "\n".join(clist))
693
693
694 def debugrebuildstate(ui, repo, rev=""):
694 def debugrebuildstate(ui, repo, rev=""):
695 """rebuild the dirstate as it would look like for the given revision"""
695 """rebuild the dirstate as it would look like for the given revision"""
696 if rev == "":
696 if rev == "":
697 rev = repo.changelog.tip()
697 rev = repo.changelog.tip()
698 ctx = repo.changectx(rev)
698 ctx = repo.changectx(rev)
699 files = ctx.manifest()
699 files = ctx.manifest()
700 wlock = repo.wlock()
700 wlock = repo.wlock()
701 repo.dirstate.rebuild(rev, files)
701 repo.dirstate.rebuild(rev, files)
702
702
703 def debugcheckstate(ui, repo):
703 def debugcheckstate(ui, repo):
704 """validate the correctness of the current dirstate"""
704 """validate the correctness of the current dirstate"""
705 parent1, parent2 = repo.dirstate.parents()
705 parent1, parent2 = repo.dirstate.parents()
706 repo.dirstate.read()
706 repo.dirstate.read()
707 dc = repo.dirstate.map
707 dc = repo.dirstate.map
708 keys = dc.keys()
708 keys = dc.keys()
709 keys.sort()
709 keys.sort()
710 m1 = repo.changectx(parent1).manifest()
710 m1 = repo.changectx(parent1).manifest()
711 m2 = repo.changectx(parent2).manifest()
711 m2 = repo.changectx(parent2).manifest()
712 errors = 0
712 errors = 0
713 for f in dc:
713 for f in dc:
714 state = repo.dirstate.state(f)
714 state = repo.dirstate.state(f)
715 if state in "nr" and f not in m1:
715 if state in "nr" and f not in m1:
716 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
716 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
717 errors += 1
717 errors += 1
718 if state in "a" and f in m1:
718 if state in "a" and f in m1:
719 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
719 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
720 errors += 1
720 errors += 1
721 if state in "m" and f not in m1 and f not in m2:
721 if state in "m" and f not in m1 and f not in m2:
722 ui.warn(_("%s in state %s, but not in either manifest\n") %
722 ui.warn(_("%s in state %s, but not in either manifest\n") %
723 (f, state))
723 (f, state))
724 errors += 1
724 errors += 1
725 for f in m1:
725 for f in m1:
726 state = repo.dirstate.state(f)
726 state = repo.dirstate.state(f)
727 if state not in "nrm":
727 if state not in "nrm":
728 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
728 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
729 errors += 1
729 errors += 1
730 if errors:
730 if errors:
731 error = _(".hg/dirstate inconsistent with current parent's manifest")
731 error = _(".hg/dirstate inconsistent with current parent's manifest")
732 raise util.Abort(error)
732 raise util.Abort(error)
733
733
734 def showconfig(ui, repo, *values, **opts):
734 def showconfig(ui, repo, *values, **opts):
735 """show combined config settings from all hgrc files
735 """show combined config settings from all hgrc files
736
736
737 With no args, print names and values of all config items.
737 With no args, print names and values of all config items.
738
738
739 With one arg of the form section.name, print just the value of
739 With one arg of the form section.name, print just the value of
740 that config item.
740 that config item.
741
741
742 With multiple args, print names and values of all config items
742 With multiple args, print names and values of all config items
743 with matching section names."""
743 with matching section names."""
744
744
745 untrusted = bool(opts.get('untrusted'))
745 untrusted = bool(opts.get('untrusted'))
746 if values:
746 if values:
747 if len([v for v in values if '.' in v]) > 1:
747 if len([v for v in values if '.' in v]) > 1:
748 raise util.Abort(_('only one config item permitted'))
748 raise util.Abort(_('only one config item permitted'))
749 for section, name, value in ui.walkconfig(untrusted=untrusted):
749 for section, name, value in ui.walkconfig(untrusted=untrusted):
750 sectname = section + '.' + name
750 sectname = section + '.' + name
751 if values:
751 if values:
752 for v in values:
752 for v in values:
753 if v == section:
753 if v == section:
754 ui.write('%s=%s\n' % (sectname, value))
754 ui.write('%s=%s\n' % (sectname, value))
755 elif v == sectname:
755 elif v == sectname:
756 ui.write(value, '\n')
756 ui.write(value, '\n')
757 else:
757 else:
758 ui.write('%s=%s\n' % (sectname, value))
758 ui.write('%s=%s\n' % (sectname, value))
759
759
760 def debugsetparents(ui, repo, rev1, rev2=None):
760 def debugsetparents(ui, repo, rev1, rev2=None):
761 """manually set the parents of the current working directory
761 """manually set the parents of the current working directory
762
762
763 This is useful for writing repository conversion tools, but should
763 This is useful for writing repository conversion tools, but should
764 be used with care.
764 be used with care.
765 """
765 """
766
766
767 if not rev2:
767 if not rev2:
768 rev2 = hex(nullid)
768 rev2 = hex(nullid)
769
769
770 wlock = repo.wlock()
770 wlock = repo.wlock()
771 try:
771 try:
772 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
772 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
773 finally:
773 finally:
774 wlock.release()
774 wlock.release()
775
775
776 def debugstate(ui, repo):
776 def debugstate(ui, repo):
777 """show the contents of the current dirstate"""
777 """show the contents of the current dirstate"""
778 repo.dirstate.read()
778 repo.dirstate.read()
779 dc = repo.dirstate.map
779 dc = repo.dirstate.map
780 keys = dc.keys()
780 keys = dc.keys()
781 keys.sort()
781 keys.sort()
782 for file_ in keys:
782 for file_ in keys:
783 if dc[file_][3] == -1:
783 if dc[file_][3] == -1:
784 # Pad or slice to locale representation
784 # Pad or slice to locale representation
785 locale_len = len(time.strftime("%x %X", time.localtime(0)))
785 locale_len = len(time.strftime("%x %X", time.localtime(0)))
786 timestr = 'unset'
786 timestr = 'unset'
787 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
787 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
788 else:
788 else:
789 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
789 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
790 ui.write("%c %3o %10d %s %s\n"
790 ui.write("%c %3o %10d %s %s\n"
791 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
791 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
792 timestr, file_))
792 timestr, file_))
793 for f in repo.dirstate.copies():
793 for f in repo.dirstate.copies():
794 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
794 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
795
795
796 def debugdata(ui, file_, rev):
796 def debugdata(ui, file_, rev):
797 """dump the contents of a data file revision"""
797 """dump the contents of a data file revision"""
798 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
798 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
799 try:
799 try:
800 ui.write(r.revision(r.lookup(rev)))
800 ui.write(r.revision(r.lookup(rev)))
801 except KeyError:
801 except KeyError:
802 raise util.Abort(_('invalid revision identifier %s') % rev)
802 raise util.Abort(_('invalid revision identifier %s') % rev)
803
803
804 def debugdate(ui, date, range=None, **opts):
804 def debugdate(ui, date, range=None, **opts):
805 """parse and display a date"""
805 """parse and display a date"""
806 if opts["extended"]:
806 if opts["extended"]:
807 d = util.parsedate(date, util.extendeddateformats)
807 d = util.parsedate(date, util.extendeddateformats)
808 else:
808 else:
809 d = util.parsedate(date)
809 d = util.parsedate(date)
810 ui.write("internal: %s %s\n" % d)
810 ui.write("internal: %s %s\n" % d)
811 ui.write("standard: %s\n" % util.datestr(d))
811 ui.write("standard: %s\n" % util.datestr(d))
812 if range:
812 if range:
813 m = util.matchdate(range)
813 m = util.matchdate(range)
814 ui.write("match: %s\n" % m(d[0]))
814 ui.write("match: %s\n" % m(d[0]))
815
815
816 def debugindex(ui, file_):
816 def debugindex(ui, file_):
817 """dump the contents of an index file"""
817 """dump the contents of an index file"""
818 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
818 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
819 ui.write(" rev offset length base linkrev" +
819 ui.write(" rev offset length base linkrev" +
820 " nodeid p1 p2\n")
820 " nodeid p1 p2\n")
821 for i in xrange(r.count()):
821 for i in xrange(r.count()):
822 node = r.node(i)
822 node = r.node(i)
823 pp = r.parents(node)
823 pp = r.parents(node)
824 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
824 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
825 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
825 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
826 short(node), short(pp[0]), short(pp[1])))
826 short(node), short(pp[0]), short(pp[1])))
827
827
828 def debugindexdot(ui, file_):
828 def debugindexdot(ui, file_):
829 """dump an index DAG as a .dot file"""
829 """dump an index DAG as a .dot file"""
830 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
830 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
831 ui.write("digraph G {\n")
831 ui.write("digraph G {\n")
832 for i in xrange(r.count()):
832 for i in xrange(r.count()):
833 node = r.node(i)
833 node = r.node(i)
834 pp = r.parents(node)
834 pp = r.parents(node)
835 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
835 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
836 if pp[1] != nullid:
836 if pp[1] != nullid:
837 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
837 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
838 ui.write("}\n")
838 ui.write("}\n")
839
839
840 def debuginstall(ui):
840 def debuginstall(ui):
841 '''test Mercurial installation'''
841 '''test Mercurial installation'''
842
842
843 def writetemp(contents):
843 def writetemp(contents):
844 (fd, name) = tempfile.mkstemp()
844 (fd, name) = tempfile.mkstemp()
845 f = os.fdopen(fd, "wb")
845 f = os.fdopen(fd, "wb")
846 f.write(contents)
846 f.write(contents)
847 f.close()
847 f.close()
848 return name
848 return name
849
849
850 problems = 0
850 problems = 0
851
851
852 # encoding
852 # encoding
853 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
853 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
854 try:
854 try:
855 util.fromlocal("test")
855 util.fromlocal("test")
856 except util.Abort, inst:
856 except util.Abort, inst:
857 ui.write(" %s\n" % inst)
857 ui.write(" %s\n" % inst)
858 ui.write(_(" (check that your locale is properly set)\n"))
858 ui.write(_(" (check that your locale is properly set)\n"))
859 problems += 1
859 problems += 1
860
860
861 # compiled modules
861 # compiled modules
862 ui.status(_("Checking extensions...\n"))
862 ui.status(_("Checking extensions...\n"))
863 try:
863 try:
864 import bdiff, mpatch, base85
864 import bdiff, mpatch, base85
865 except Exception, inst:
865 except Exception, inst:
866 ui.write(" %s\n" % inst)
866 ui.write(" %s\n" % inst)
867 ui.write(_(" One or more extensions could not be found"))
867 ui.write(_(" One or more extensions could not be found"))
868 ui.write(_(" (check that you compiled the extensions)\n"))
868 ui.write(_(" (check that you compiled the extensions)\n"))
869 problems += 1
869 problems += 1
870
870
871 # templates
871 # templates
872 ui.status(_("Checking templates...\n"))
872 ui.status(_("Checking templates...\n"))
873 try:
873 try:
874 import templater
874 import templater
875 t = templater.templater(templater.templatepath("map-cmdline.default"))
875 t = templater.templater(templater.templatepath("map-cmdline.default"))
876 except Exception, inst:
876 except Exception, inst:
877 ui.write(" %s\n" % inst)
877 ui.write(" %s\n" % inst)
878 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
878 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
879 problems += 1
879 problems += 1
880
880
881 # patch
881 # patch
882 ui.status(_("Checking patch...\n"))
882 ui.status(_("Checking patch...\n"))
883 path = os.environ.get('PATH', '')
883 path = os.environ.get('PATH', '')
884 patcher = util.find_in_path('gpatch', path,
884 patcher = util.find_in_path('gpatch', path,
885 util.find_in_path('patch', path, None))
885 util.find_in_path('patch', path, None))
886 if not patcher:
886 if not patcher:
887 ui.write(_(" Can't find patch or gpatch in PATH\n"))
887 ui.write(_(" Can't find patch or gpatch in PATH\n"))
888 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
888 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
889 problems += 1
889 problems += 1
890 else:
890 else:
891 # actually attempt a patch here
891 # actually attempt a patch here
892 a = "1\n2\n3\n4\n"
892 a = "1\n2\n3\n4\n"
893 b = "1\n2\n3\ninsert\n4\n"
893 b = "1\n2\n3\ninsert\n4\n"
894 d = mdiff.unidiff(a, None, b, None, "a")
894 d = mdiff.unidiff(a, None, b, None, "a")
895 fa = writetemp(a)
895 fa = writetemp(a)
896 fd = writetemp(d)
896 fd = writetemp(d)
897 fp = os.popen('%s %s %s' % (patcher, fa, fd))
897 fp = os.popen('%s %s %s' % (patcher, fa, fd))
898 files = []
898 files = []
899 output = ""
899 output = ""
900 for line in fp:
900 for line in fp:
901 output += line
901 output += line
902 if line.startswith('patching file '):
902 if line.startswith('patching file '):
903 pf = util.parse_patch_output(line.rstrip())
903 pf = util.parse_patch_output(line.rstrip())
904 files.append(pf)
904 files.append(pf)
905 if files != [fa]:
905 if files != [fa]:
906 ui.write(_(" unexpected patch output!"))
906 ui.write(_(" unexpected patch output!"))
907 ui.write(_(" (you may have an incompatible version of patch)\n"))
907 ui.write(_(" (you may have an incompatible version of patch)\n"))
908 ui.write(output)
908 ui.write(output)
909 problems += 1
909 problems += 1
910 a = file(fa).read()
910 a = file(fa).read()
911 if a != b:
911 if a != b:
912 ui.write(_(" patch test failed!"))
912 ui.write(_(" patch test failed!"))
913 ui.write(_(" (you may have an incompatible version of patch)\n"))
913 ui.write(_(" (you may have an incompatible version of patch)\n"))
914 problems += 1
914 problems += 1
915 os.unlink(fa)
915 os.unlink(fa)
916 os.unlink(fd)
916 os.unlink(fd)
917
917
918 # merge helper
918 # merge helper
919 ui.status(_("Checking merge helper...\n"))
919 ui.status(_("Checking merge helper...\n"))
920 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
920 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
921 or "hgmerge")
921 or "hgmerge")
922 cmdpath = util.find_in_path(cmd, path)
922 cmdpath = util.find_in_path(cmd, path)
923 if not cmdpath:
923 if not cmdpath:
924 cmdpath = util.find_in_path(cmd.split()[0], path)
924 cmdpath = util.find_in_path(cmd.split()[0], path)
925 if not cmdpath:
925 if not cmdpath:
926 if cmd == 'hgmerge':
926 if cmd == 'hgmerge':
927 ui.write(_(" No merge helper set and can't find default"
927 ui.write(_(" No merge helper set and can't find default"
928 " hgmerge script in PATH\n"))
928 " hgmerge script in PATH\n"))
929 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
929 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
930 else:
930 else:
931 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
931 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
932 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
932 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
933 problems += 1
933 problems += 1
934 else:
934 else:
935 # actually attempt a patch here
935 # actually attempt a patch here
936 fa = writetemp("1\n2\n3\n4\n")
936 fa = writetemp("1\n2\n3\n4\n")
937 fl = writetemp("1\n2\n3\ninsert\n4\n")
937 fl = writetemp("1\n2\n3\ninsert\n4\n")
938 fr = writetemp("begin\n1\n2\n3\n4\n")
938 fr = writetemp("begin\n1\n2\n3\n4\n")
939 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
939 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
940 if r:
940 if r:
941 ui.write(_(" got unexpected merge error %d!") % r)
941 ui.write(_(" got unexpected merge error %d!") % r)
942 problems += 1
942 problems += 1
943 m = file(fl).read()
943 m = file(fl).read()
944 if m != "begin\n1\n2\n3\ninsert\n4\n":
944 if m != "begin\n1\n2\n3\ninsert\n4\n":
945 ui.write(_(" got unexpected merge results!") % r)
945 ui.write(_(" got unexpected merge results!") % r)
946 ui.write(_(" (your merge helper may have the"
946 ui.write(_(" (your merge helper may have the"
947 " wrong argument order)\n"))
947 " wrong argument order)\n"))
948 ui.write(m)
948 ui.write(m)
949 os.unlink(fa)
949 os.unlink(fa)
950 os.unlink(fl)
950 os.unlink(fl)
951 os.unlink(fr)
951 os.unlink(fr)
952
952
953 # editor
953 # editor
954 ui.status(_("Checking commit editor...\n"))
954 ui.status(_("Checking commit editor...\n"))
955 editor = (os.environ.get("HGEDITOR") or
955 editor = (os.environ.get("HGEDITOR") or
956 ui.config("ui", "editor") or
956 ui.config("ui", "editor") or
957 os.environ.get("EDITOR", "vi"))
957 os.environ.get("EDITOR", "vi"))
958 cmdpath = util.find_in_path(editor, path)
958 cmdpath = util.find_in_path(editor, path)
959 if not cmdpath:
959 if not cmdpath:
960 cmdpath = util.find_in_path(editor.split()[0], path)
960 cmdpath = util.find_in_path(editor.split()[0], path)
961 if not cmdpath:
961 if not cmdpath:
962 if editor == 'vi':
962 if editor == 'vi':
963 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
963 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
964 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
964 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
965 else:
965 else:
966 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
966 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
967 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
967 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
968 problems += 1
968 problems += 1
969
969
970 # check username
970 # check username
971 ui.status(_("Checking username...\n"))
971 ui.status(_("Checking username...\n"))
972 user = os.environ.get("HGUSER")
972 user = os.environ.get("HGUSER")
973 if user is None:
973 if user is None:
974 user = ui.config("ui", "username")
974 user = ui.config("ui", "username")
975 if user is None:
975 if user is None:
976 user = os.environ.get("EMAIL")
976 user = os.environ.get("EMAIL")
977 if not user:
977 if not user:
978 ui.warn(" ")
978 ui.warn(" ")
979 ui.username()
979 ui.username()
980 ui.write(_(" (specify a username in your .hgrc file)\n"))
980 ui.write(_(" (specify a username in your .hgrc file)\n"))
981
981
982 if not problems:
982 if not problems:
983 ui.status(_("No problems detected\n"))
983 ui.status(_("No problems detected\n"))
984 else:
984 else:
985 ui.write(_("%s problems detected,"
985 ui.write(_("%s problems detected,"
986 " please check your install!\n") % problems)
986 " please check your install!\n") % problems)
987
987
988 return problems
988 return problems
989
989
990 def debugrename(ui, repo, file1, *pats, **opts):
990 def debugrename(ui, repo, file1, *pats, **opts):
991 """dump rename information"""
991 """dump rename information"""
992
992
993 ctx = repo.changectx(opts.get('rev', 'tip'))
993 ctx = repo.changectx(opts.get('rev', 'tip'))
994 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
994 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
995 ctx.node()):
995 ctx.node()):
996 m = ctx.filectx(abs).renamed()
996 m = ctx.filectx(abs).renamed()
997 if m:
997 if m:
998 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
998 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
999 else:
999 else:
1000 ui.write(_("%s not renamed\n") % rel)
1000 ui.write(_("%s not renamed\n") % rel)
1001
1001
1002 def debugwalk(ui, repo, *pats, **opts):
1002 def debugwalk(ui, repo, *pats, **opts):
1003 """show how files match on given patterns"""
1003 """show how files match on given patterns"""
1004 items = list(cmdutil.walk(repo, pats, opts))
1004 items = list(cmdutil.walk(repo, pats, opts))
1005 if not items:
1005 if not items:
1006 return
1006 return
1007 fmt = '%%s %%-%ds %%-%ds %%s' % (
1007 fmt = '%%s %%-%ds %%-%ds %%s' % (
1008 max([len(abs) for (src, abs, rel, exact) in items]),
1008 max([len(abs) for (src, abs, rel, exact) in items]),
1009 max([len(rel) for (src, abs, rel, exact) in items]))
1009 max([len(rel) for (src, abs, rel, exact) in items]))
1010 for src, abs, rel, exact in items:
1010 for src, abs, rel, exact in items:
1011 line = fmt % (src, abs, rel, exact and 'exact' or '')
1011 line = fmt % (src, abs, rel, exact and 'exact' or '')
1012 ui.write("%s\n" % line.rstrip())
1012 ui.write("%s\n" % line.rstrip())
1013
1013
1014 def diff(ui, repo, *pats, **opts):
1014 def diff(ui, repo, *pats, **opts):
1015 """diff repository (or selected files)
1015 """diff repository (or selected files)
1016
1016
1017 Show differences between revisions for the specified files.
1017 Show differences between revisions for the specified files.
1018
1018
1019 Differences between files are shown using the unified diff format.
1019 Differences between files are shown using the unified diff format.
1020
1020
1021 NOTE: diff may generate unexpected results for merges, as it will
1021 NOTE: diff may generate unexpected results for merges, as it will
1022 default to comparing against the working directory's first parent
1022 default to comparing against the working directory's first parent
1023 changeset if no revisions are specified.
1023 changeset if no revisions are specified.
1024
1024
1025 When two revision arguments are given, then changes are shown
1025 When two revision arguments are given, then changes are shown
1026 between those revisions. If only one revision is specified then
1026 between those revisions. If only one revision is specified then
1027 that revision is compared to the working directory, and, when no
1027 that revision is compared to the working directory, and, when no
1028 revisions are specified, the working directory files are compared
1028 revisions are specified, the working directory files are compared
1029 to its parent.
1029 to its parent.
1030
1030
1031 Without the -a option, diff will avoid generating diffs of files
1031 Without the -a option, diff will avoid generating diffs of files
1032 it detects as binary. With -a, diff will generate a diff anyway,
1032 it detects as binary. With -a, diff will generate a diff anyway,
1033 probably with undesirable results.
1033 probably with undesirable results.
1034 """
1034 """
1035 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1035 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1036
1036
1037 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1037 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1038
1038
1039 patch.diff(repo, node1, node2, fns, match=matchfn,
1039 patch.diff(repo, node1, node2, fns, match=matchfn,
1040 opts=patch.diffopts(ui, opts))
1040 opts=patch.diffopts(ui, opts))
1041
1041
1042 def export(ui, repo, *changesets, **opts):
1042 def export(ui, repo, *changesets, **opts):
1043 """dump the header and diffs for one or more changesets
1043 """dump the header and diffs for one or more changesets
1044
1044
1045 Print the changeset header and diffs for one or more revisions.
1045 Print the changeset header and diffs for one or more revisions.
1046
1046
1047 The information shown in the changeset header is: author,
1047 The information shown in the changeset header is: author,
1048 changeset hash, parent(s) and commit comment.
1048 changeset hash, parent(s) and commit comment.
1049
1049
1050 NOTE: export may generate unexpected diff output for merge changesets,
1050 NOTE: export may generate unexpected diff output for merge changesets,
1051 as it will compare the merge changeset against its first parent only.
1051 as it will compare the merge changeset against its first parent only.
1052
1052
1053 Output may be to a file, in which case the name of the file is
1053 Output may be to a file, in which case the name of the file is
1054 given using a format string. The formatting rules are as follows:
1054 given using a format string. The formatting rules are as follows:
1055
1055
1056 %% literal "%" character
1056 %% literal "%" character
1057 %H changeset hash (40 bytes of hexadecimal)
1057 %H changeset hash (40 bytes of hexadecimal)
1058 %N number of patches being generated
1058 %N number of patches being generated
1059 %R changeset revision number
1059 %R changeset revision number
1060 %b basename of the exporting repository
1060 %b basename of the exporting repository
1061 %h short-form changeset hash (12 bytes of hexadecimal)
1061 %h short-form changeset hash (12 bytes of hexadecimal)
1062 %n zero-padded sequence number, starting at 1
1062 %n zero-padded sequence number, starting at 1
1063 %r zero-padded changeset revision number
1063 %r zero-padded changeset revision number
1064
1064
1065 Without the -a option, export will avoid generating diffs of files
1065 Without the -a option, export will avoid generating diffs of files
1066 it detects as binary. With -a, export will generate a diff anyway,
1066 it detects as binary. With -a, export will generate a diff anyway,
1067 probably with undesirable results.
1067 probably with undesirable results.
1068
1068
1069 With the --switch-parent option, the diff will be against the second
1069 With the --switch-parent option, the diff will be against the second
1070 parent. It can be useful to review a merge.
1070 parent. It can be useful to review a merge.
1071 """
1071 """
1072 if not changesets:
1072 if not changesets:
1073 raise util.Abort(_("export requires at least one changeset"))
1073 raise util.Abort(_("export requires at least one changeset"))
1074 revs = cmdutil.revrange(repo, changesets)
1074 revs = cmdutil.revrange(repo, changesets)
1075 if len(revs) > 1:
1075 if len(revs) > 1:
1076 ui.note(_('exporting patches:\n'))
1076 ui.note(_('exporting patches:\n'))
1077 else:
1077 else:
1078 ui.note(_('exporting patch:\n'))
1078 ui.note(_('exporting patch:\n'))
1079 patch.export(repo, revs, template=opts['output'],
1079 patch.export(repo, revs, template=opts['output'],
1080 switch_parent=opts['switch_parent'],
1080 switch_parent=opts['switch_parent'],
1081 opts=patch.diffopts(ui, opts))
1081 opts=patch.diffopts(ui, opts))
1082
1082
1083 def grep(ui, repo, pattern, *pats, **opts):
1083 def grep(ui, repo, pattern, *pats, **opts):
1084 """search for a pattern in specified files and revisions
1084 """search for a pattern in specified files and revisions
1085
1085
1086 Search revisions of files for a regular expression.
1086 Search revisions of files for a regular expression.
1087
1087
1088 This command behaves differently than Unix grep. It only accepts
1088 This command behaves differently than Unix grep. It only accepts
1089 Python/Perl regexps. It searches repository history, not the
1089 Python/Perl regexps. It searches repository history, not the
1090 working directory. It always prints the revision number in which
1090 working directory. It always prints the revision number in which
1091 a match appears.
1091 a match appears.
1092
1092
1093 By default, grep only prints output for the first revision of a
1093 By default, grep only prints output for the first revision of a
1094 file in which it finds a match. To get it to print every revision
1094 file in which it finds a match. To get it to print every revision
1095 that contains a change in match status ("-" for a match that
1095 that contains a change in match status ("-" for a match that
1096 becomes a non-match, or "+" for a non-match that becomes a match),
1096 becomes a non-match, or "+" for a non-match that becomes a match),
1097 use the --all flag.
1097 use the --all flag.
1098 """
1098 """
1099 reflags = 0
1099 reflags = 0
1100 if opts['ignore_case']:
1100 if opts['ignore_case']:
1101 reflags |= re.I
1101 reflags |= re.I
1102 regexp = re.compile(pattern, reflags)
1102 regexp = re.compile(pattern, reflags)
1103 sep, eol = ':', '\n'
1103 sep, eol = ':', '\n'
1104 if opts['print0']:
1104 if opts['print0']:
1105 sep = eol = '\0'
1105 sep = eol = '\0'
1106
1106
1107 fcache = {}
1107 fcache = {}
1108 def getfile(fn):
1108 def getfile(fn):
1109 if fn not in fcache:
1109 if fn not in fcache:
1110 fcache[fn] = repo.file(fn)
1110 fcache[fn] = repo.file(fn)
1111 return fcache[fn]
1111 return fcache[fn]
1112
1112
1113 def matchlines(body):
1113 def matchlines(body):
1114 begin = 0
1114 begin = 0
1115 linenum = 0
1115 linenum = 0
1116 while True:
1116 while True:
1117 match = regexp.search(body, begin)
1117 match = regexp.search(body, begin)
1118 if not match:
1118 if not match:
1119 break
1119 break
1120 mstart, mend = match.span()
1120 mstart, mend = match.span()
1121 linenum += body.count('\n', begin, mstart) + 1
1121 linenum += body.count('\n', begin, mstart) + 1
1122 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1122 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1123 lend = body.find('\n', mend)
1123 lend = body.find('\n', mend)
1124 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1124 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1125 begin = lend + 1
1125 begin = lend + 1
1126
1126
1127 class linestate(object):
1127 class linestate(object):
1128 def __init__(self, line, linenum, colstart, colend):
1128 def __init__(self, line, linenum, colstart, colend):
1129 self.line = line
1129 self.line = line
1130 self.linenum = linenum
1130 self.linenum = linenum
1131 self.colstart = colstart
1131 self.colstart = colstart
1132 self.colend = colend
1132 self.colend = colend
1133
1133
1134 def __eq__(self, other):
1134 def __eq__(self, other):
1135 return self.line == other.line
1135 return self.line == other.line
1136
1136
1137 matches = {}
1137 matches = {}
1138 copies = {}
1138 copies = {}
1139 def grepbody(fn, rev, body):
1139 def grepbody(fn, rev, body):
1140 matches[rev].setdefault(fn, [])
1140 matches[rev].setdefault(fn, [])
1141 m = matches[rev][fn]
1141 m = matches[rev][fn]
1142 for lnum, cstart, cend, line in matchlines(body):
1142 for lnum, cstart, cend, line in matchlines(body):
1143 s = linestate(line, lnum, cstart, cend)
1143 s = linestate(line, lnum, cstart, cend)
1144 m.append(s)
1144 m.append(s)
1145
1145
1146 def difflinestates(a, b):
1146 def difflinestates(a, b):
1147 sm = difflib.SequenceMatcher(None, a, b)
1147 sm = difflib.SequenceMatcher(None, a, b)
1148 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1148 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1149 if tag == 'insert':
1149 if tag == 'insert':
1150 for i in xrange(blo, bhi):
1150 for i in xrange(blo, bhi):
1151 yield ('+', b[i])
1151 yield ('+', b[i])
1152 elif tag == 'delete':
1152 elif tag == 'delete':
1153 for i in xrange(alo, ahi):
1153 for i in xrange(alo, ahi):
1154 yield ('-', a[i])
1154 yield ('-', a[i])
1155 elif tag == 'replace':
1155 elif tag == 'replace':
1156 for i in xrange(alo, ahi):
1156 for i in xrange(alo, ahi):
1157 yield ('-', a[i])
1157 yield ('-', a[i])
1158 for i in xrange(blo, bhi):
1158 for i in xrange(blo, bhi):
1159 yield ('+', b[i])
1159 yield ('+', b[i])
1160
1160
1161 prev = {}
1161 prev = {}
1162 def display(fn, rev, states, prevstates):
1162 def display(fn, rev, states, prevstates):
1163 found = False
1163 found = False
1164 filerevmatches = {}
1164 filerevmatches = {}
1165 r = prev.get(fn, -1)
1165 r = prev.get(fn, -1)
1166 if opts['all']:
1166 if opts['all']:
1167 iter = difflinestates(states, prevstates)
1167 iter = difflinestates(states, prevstates)
1168 else:
1168 else:
1169 iter = [('', l) for l in prevstates]
1169 iter = [('', l) for l in prevstates]
1170 for change, l in iter:
1170 for change, l in iter:
1171 cols = [fn, str(r)]
1171 cols = [fn, str(r)]
1172 if opts['line_number']:
1172 if opts['line_number']:
1173 cols.append(str(l.linenum))
1173 cols.append(str(l.linenum))
1174 if opts['all']:
1174 if opts['all']:
1175 cols.append(change)
1175 cols.append(change)
1176 if opts['user']:
1176 if opts['user']:
1177 cols.append(ui.shortuser(get(r)[1]))
1177 cols.append(ui.shortuser(get(r)[1]))
1178 if opts['files_with_matches']:
1178 if opts['files_with_matches']:
1179 c = (fn, r)
1179 c = (fn, r)
1180 if c in filerevmatches:
1180 if c in filerevmatches:
1181 continue
1181 continue
1182 filerevmatches[c] = 1
1182 filerevmatches[c] = 1
1183 else:
1183 else:
1184 cols.append(l.line)
1184 cols.append(l.line)
1185 ui.write(sep.join(cols), eol)
1185 ui.write(sep.join(cols), eol)
1186 found = True
1186 found = True
1187 return found
1187 return found
1188
1188
1189 fstate = {}
1189 fstate = {}
1190 skip = {}
1190 skip = {}
1191 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1191 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1192 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1192 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1193 found = False
1193 found = False
1194 follow = opts.get('follow')
1194 follow = opts.get('follow')
1195 for st, rev, fns in changeiter:
1195 for st, rev, fns in changeiter:
1196 if st == 'window':
1196 if st == 'window':
1197 matches.clear()
1197 matches.clear()
1198 elif st == 'add':
1198 elif st == 'add':
1199 mf = repo.changectx(rev).manifest()
1199 mf = repo.changectx(rev).manifest()
1200 matches[rev] = {}
1200 matches[rev] = {}
1201 for fn in fns:
1201 for fn in fns:
1202 if fn in skip:
1202 if fn in skip:
1203 continue
1203 continue
1204 fstate.setdefault(fn, {})
1204 fstate.setdefault(fn, {})
1205 try:
1205 try:
1206 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1206 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1207 if follow:
1207 if follow:
1208 copied = getfile(fn).renamed(mf[fn])
1208 copied = getfile(fn).renamed(mf[fn])
1209 if copied:
1209 if copied:
1210 copies.setdefault(rev, {})[fn] = copied[0]
1210 copies.setdefault(rev, {})[fn] = copied[0]
1211 except KeyError:
1211 except KeyError:
1212 pass
1212 pass
1213 elif st == 'iter':
1213 elif st == 'iter':
1214 states = matches[rev].items()
1214 states = matches[rev].items()
1215 states.sort()
1215 states.sort()
1216 for fn, m in states:
1216 for fn, m in states:
1217 copy = copies.get(rev, {}).get(fn)
1217 copy = copies.get(rev, {}).get(fn)
1218 if fn in skip:
1218 if fn in skip:
1219 if copy:
1219 if copy:
1220 skip[copy] = True
1220 skip[copy] = True
1221 continue
1221 continue
1222 if fn in prev or fstate[fn]:
1222 if fn in prev or fstate[fn]:
1223 r = display(fn, rev, m, fstate[fn])
1223 r = display(fn, rev, m, fstate[fn])
1224 found = found or r
1224 found = found or r
1225 if r and not opts['all']:
1225 if r and not opts['all']:
1226 skip[fn] = True
1226 skip[fn] = True
1227 if copy:
1227 if copy:
1228 skip[copy] = True
1228 skip[copy] = True
1229 fstate[fn] = m
1229 fstate[fn] = m
1230 if copy:
1230 if copy:
1231 fstate[copy] = m
1231 fstate[copy] = m
1232 prev[fn] = rev
1232 prev[fn] = rev
1233
1233
1234 fstate = fstate.items()
1234 fstate = fstate.items()
1235 fstate.sort()
1235 fstate.sort()
1236 for fn, state in fstate:
1236 for fn, state in fstate:
1237 if fn in skip:
1237 if fn in skip:
1238 continue
1238 continue
1239 if fn not in copies.get(prev[fn], {}):
1239 if fn not in copies.get(prev[fn], {}):
1240 found = display(fn, rev, {}, state) or found
1240 found = display(fn, rev, {}, state) or found
1241 return (not found and 1) or 0
1241 return (not found and 1) or 0
1242
1242
1243 def heads(ui, repo, **opts):
1243 def heads(ui, repo, **opts):
1244 """show current repository heads
1244 """show current repository heads
1245
1245
1246 Show all repository head changesets.
1246 Show all repository head changesets.
1247
1247
1248 Repository "heads" are changesets that don't have children
1248 Repository "heads" are changesets that don't have children
1249 changesets. They are where development generally takes place and
1249 changesets. They are where development generally takes place and
1250 are the usual targets for update and merge operations.
1250 are the usual targets for update and merge operations.
1251 """
1251 """
1252 if opts['rev']:
1252 if opts['rev']:
1253 heads = repo.heads(repo.lookup(opts['rev']))
1253 heads = repo.heads(repo.lookup(opts['rev']))
1254 else:
1254 else:
1255 heads = repo.heads()
1255 heads = repo.heads()
1256 displayer = cmdutil.show_changeset(ui, repo, opts)
1256 displayer = cmdutil.show_changeset(ui, repo, opts)
1257 for n in heads:
1257 for n in heads:
1258 displayer.show(changenode=n)
1258 displayer.show(changenode=n)
1259
1259
1260 def help_(ui, name=None, with_version=False):
1260 def help_(ui, name=None, with_version=False):
1261 """show help for a command, extension, or list of commands
1261 """show help for a command, extension, or list of commands
1262
1262
1263 With no arguments, print a list of commands and short help.
1263 With no arguments, print a list of commands and short help.
1264
1264
1265 Given a command name, print help for that command.
1265 Given a command name, print help for that command.
1266
1266
1267 Given an extension name, print help for that extension, and the
1267 Given an extension name, print help for that extension, and the
1268 commands it provides."""
1268 commands it provides."""
1269 option_lists = []
1269 option_lists = []
1270
1270
1271 def helpcmd(name):
1271 def helpcmd(name):
1272 if with_version:
1272 if with_version:
1273 version_(ui)
1273 version_(ui)
1274 ui.write('\n')
1274 ui.write('\n')
1275 aliases, i = findcmd(ui, name)
1275 aliases, i = findcmd(ui, name)
1276 # synopsis
1276 # synopsis
1277 ui.write("%s\n\n" % i[2])
1277 ui.write("%s\n\n" % i[2])
1278
1278
1279 # description
1279 # description
1280 doc = i[0].__doc__
1280 doc = i[0].__doc__
1281 if not doc:
1281 if not doc:
1282 doc = _("(No help text available)")
1282 doc = _("(No help text available)")
1283 if ui.quiet:
1283 if ui.quiet:
1284 doc = doc.splitlines(0)[0]
1284 doc = doc.splitlines(0)[0]
1285 ui.write("%s\n" % doc.rstrip())
1285 ui.write("%s\n" % doc.rstrip())
1286
1286
1287 if not ui.quiet:
1287 if not ui.quiet:
1288 # aliases
1288 # aliases
1289 if len(aliases) > 1:
1289 if len(aliases) > 1:
1290 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1290 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1291
1291
1292 # options
1292 # options
1293 if i[1]:
1293 if i[1]:
1294 option_lists.append(("options", i[1]))
1294 option_lists.append(("options", i[1]))
1295
1295
1296 def helplist(select=None):
1296 def helplist(select=None):
1297 h = {}
1297 h = {}
1298 cmds = {}
1298 cmds = {}
1299 for c, e in table.items():
1299 for c, e in table.items():
1300 f = c.split("|", 1)[0]
1300 f = c.split("|", 1)[0]
1301 if select and not select(f):
1301 if select and not select(f):
1302 continue
1302 continue
1303 if name == "shortlist" and not f.startswith("^"):
1303 if name == "shortlist" and not f.startswith("^"):
1304 continue
1304 continue
1305 f = f.lstrip("^")
1305 f = f.lstrip("^")
1306 if not ui.debugflag and f.startswith("debug"):
1306 if not ui.debugflag and f.startswith("debug"):
1307 continue
1307 continue
1308 doc = e[0].__doc__
1308 doc = e[0].__doc__
1309 if not doc:
1309 if not doc:
1310 doc = _("(No help text available)")
1310 doc = _("(No help text available)")
1311 h[f] = doc.splitlines(0)[0].rstrip()
1311 h[f] = doc.splitlines(0)[0].rstrip()
1312 cmds[f] = c.lstrip("^")
1312 cmds[f] = c.lstrip("^")
1313
1313
1314 fns = h.keys()
1314 fns = h.keys()
1315 fns.sort()
1315 fns.sort()
1316 m = max(map(len, fns))
1316 m = max(map(len, fns))
1317 for f in fns:
1317 for f in fns:
1318 if ui.verbose:
1318 if ui.verbose:
1319 commands = cmds[f].replace("|",", ")
1319 commands = cmds[f].replace("|",", ")
1320 ui.write(" %s:\n %s\n"%(commands, h[f]))
1320 ui.write(" %s:\n %s\n"%(commands, h[f]))
1321 else:
1321 else:
1322 ui.write(' %-*s %s\n' % (m, f, h[f]))
1322 ui.write(' %-*s %s\n' % (m, f, h[f]))
1323
1323
1324 def helptopic(name):
1324 def helptopic(name):
1325 v = None
1325 v = None
1326 for i in help.helptable:
1326 for i in help.helptable:
1327 l = i.split('|')
1327 l = i.split('|')
1328 if name in l:
1328 if name in l:
1329 v = i
1329 v = i
1330 header = l[-1]
1330 header = l[-1]
1331 if not v:
1331 if not v:
1332 raise UnknownCommand(name)
1332 raise UnknownCommand(name)
1333
1333
1334 # description
1334 # description
1335 doc = help.helptable[v]
1335 doc = help.helptable[v]
1336 if not doc:
1336 if not doc:
1337 doc = _("(No help text available)")
1337 doc = _("(No help text available)")
1338 if callable(doc):
1338 if callable(doc):
1339 doc = doc()
1339 doc = doc()
1340
1340
1341 ui.write("%s\n" % header)
1341 ui.write("%s\n" % header)
1342 ui.write("%s\n" % doc.rstrip())
1342 ui.write("%s\n" % doc.rstrip())
1343
1343
1344 def helpext(name):
1344 def helpext(name):
1345 try:
1345 try:
1346 mod = findext(name)
1346 mod = findext(name)
1347 except KeyError:
1347 except KeyError:
1348 raise UnknownCommand(name)
1348 raise UnknownCommand(name)
1349
1349
1350 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1350 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1351 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1351 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1352 for d in doc[1:]:
1352 for d in doc[1:]:
1353 ui.write(d, '\n')
1353 ui.write(d, '\n')
1354
1354
1355 ui.status('\n')
1355 ui.status('\n')
1356
1356
1357 try:
1357 try:
1358 ct = mod.cmdtable
1358 ct = mod.cmdtable
1359 except AttributeError:
1359 except AttributeError:
1360 ui.status(_('no commands defined\n'))
1360 ui.status(_('no commands defined\n'))
1361 return
1361 return
1362
1362
1363 if ui.verbose:
1363 if ui.verbose:
1364 ui.status(_('list of commands:\n\n'))
1364 ui.status(_('list of commands:\n\n'))
1365 else:
1365 else:
1366 ui.status(_('list of commands (use "hg help -v %s" '
1366 ui.status(_('list of commands (use "hg help -v %s" '
1367 'to show aliases and global options):\n\n') % name)
1367 'to show aliases and global options):\n\n') % name)
1368
1368
1369 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1369 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1370 helplist(modcmds.has_key)
1370 helplist(modcmds.has_key)
1371
1371
1372 if name and name != 'shortlist':
1372 if name and name != 'shortlist':
1373 i = None
1373 i = None
1374 for f in (helpcmd, helptopic, helpext):
1374 for f in (helpcmd, helptopic, helpext):
1375 try:
1375 try:
1376 f(name)
1376 f(name)
1377 i = None
1377 i = None
1378 break
1378 break
1379 except UnknownCommand, inst:
1379 except UnknownCommand, inst:
1380 i = inst
1380 i = inst
1381 if i:
1381 if i:
1382 raise i
1382 raise i
1383
1383
1384 else:
1384 else:
1385 # program name
1385 # program name
1386 if ui.verbose or with_version:
1386 if ui.verbose or with_version:
1387 version_(ui)
1387 version_(ui)
1388 else:
1388 else:
1389 ui.status(_("Mercurial Distributed SCM\n"))
1389 ui.status(_("Mercurial Distributed SCM\n"))
1390 ui.status('\n')
1390 ui.status('\n')
1391
1391
1392 # list of commands
1392 # list of commands
1393 if name == "shortlist":
1393 if name == "shortlist":
1394 ui.status(_('basic commands (use "hg help" '
1394 ui.status(_('basic commands (use "hg help" '
1395 'for the full list or option "-v" for details):\n\n'))
1395 'for the full list or option "-v" for details):\n\n'))
1396 elif ui.verbose:
1396 elif ui.verbose:
1397 ui.status(_('list of commands:\n\n'))
1397 ui.status(_('list of commands:\n\n'))
1398 else:
1398 else:
1399 ui.status(_('list of commands (use "hg help -v" '
1399 ui.status(_('list of commands (use "hg help -v" '
1400 'to show aliases and global options):\n\n'))
1400 'to show aliases and global options):\n\n'))
1401
1401
1402 helplist()
1402 helplist()
1403
1403
1404 # global options
1404 # global options
1405 if ui.verbose:
1405 if ui.verbose:
1406 option_lists.append(("global options", globalopts))
1406 option_lists.append(("global options", globalopts))
1407
1407
1408 # list all option lists
1408 # list all option lists
1409 opt_output = []
1409 opt_output = []
1410 for title, options in option_lists:
1410 for title, options in option_lists:
1411 opt_output.append(("\n%s:\n" % title, None))
1411 opt_output.append(("\n%s:\n" % title, None))
1412 for shortopt, longopt, default, desc in options:
1412 for shortopt, longopt, default, desc in options:
1413 if "DEPRECATED" in desc and not ui.verbose: continue
1413 if "DEPRECATED" in desc and not ui.verbose: continue
1414 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1414 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1415 longopt and " --%s" % longopt),
1415 longopt and " --%s" % longopt),
1416 "%s%s" % (desc,
1416 "%s%s" % (desc,
1417 default
1417 default
1418 and _(" (default: %s)") % default
1418 and _(" (default: %s)") % default
1419 or "")))
1419 or "")))
1420
1420
1421 if opt_output:
1421 if opt_output:
1422 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1422 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1423 for first, second in opt_output:
1423 for first, second in opt_output:
1424 if second:
1424 if second:
1425 ui.write(" %-*s %s\n" % (opts_len, first, second))
1425 ui.write(" %-*s %s\n" % (opts_len, first, second))
1426 else:
1426 else:
1427 ui.write("%s\n" % first)
1427 ui.write("%s\n" % first)
1428
1428
1429 def identify(ui, repo):
1429 def identify(ui, repo):
1430 """print information about the working copy
1430 """print information about the working copy
1431
1431
1432 Print a short summary of the current state of the repo.
1432 Print a short summary of the current state of the repo.
1433
1433
1434 This summary identifies the repository state using one or two parent
1434 This summary identifies the repository state using one or two parent
1435 hash identifiers, followed by a "+" if there are uncommitted changes
1435 hash identifiers, followed by a "+" if there are uncommitted changes
1436 in the working directory, followed by a list of tags for this revision.
1436 in the working directory, followed by a list of tags for this revision.
1437 """
1437 """
1438 parents = [p for p in repo.dirstate.parents() if p != nullid]
1438 parents = [p for p in repo.dirstate.parents() if p != nullid]
1439 if not parents:
1439 if not parents:
1440 ui.write(_("unknown\n"))
1440 ui.write(_("unknown\n"))
1441 return
1441 return
1442
1442
1443 hexfunc = ui.debugflag and hex or short
1443 hexfunc = ui.debugflag and hex or short
1444 modified, added, removed, deleted = repo.status()[:4]
1444 modified, added, removed, deleted = repo.status()[:4]
1445 output = ["%s%s" %
1445 output = ["%s%s" %
1446 ('+'.join([hexfunc(parent) for parent in parents]),
1446 ('+'.join([hexfunc(parent) for parent in parents]),
1447 (modified or added or removed or deleted) and "+" or "")]
1447 (modified or added or removed or deleted) and "+" or "")]
1448
1448
1449 if not ui.quiet:
1449 if not ui.quiet:
1450
1450
1451 branch = util.tolocal(repo.workingctx().branch())
1451 branch = util.tolocal(repo.workingctx().branch())
1452 if branch != 'default':
1452 if branch != 'default':
1453 output.append("(%s)" % branch)
1453 output.append("(%s)" % branch)
1454
1454
1455 # multiple tags for a single parent separated by '/'
1455 # multiple tags for a single parent separated by '/'
1456 parenttags = ['/'.join(tags)
1456 parenttags = ['/'.join(tags)
1457 for tags in map(repo.nodetags, parents) if tags]
1457 for tags in map(repo.nodetags, parents) if tags]
1458 # tags for multiple parents separated by ' + '
1458 # tags for multiple parents separated by ' + '
1459 if parenttags:
1459 if parenttags:
1460 output.append(' + '.join(parenttags))
1460 output.append(' + '.join(parenttags))
1461
1461
1462 ui.write("%s\n" % ' '.join(output))
1462 ui.write("%s\n" % ' '.join(output))
1463
1463
1464 def import_(ui, repo, patch1, *patches, **opts):
1464 def import_(ui, repo, patch1, *patches, **opts):
1465 """import an ordered set of patches
1465 """import an ordered set of patches
1466
1466
1467 Import a list of patches and commit them individually.
1467 Import a list of patches and commit them individually.
1468
1468
1469 If there are outstanding changes in the working directory, import
1469 If there are outstanding changes in the working directory, import
1470 will abort unless given the -f flag.
1470 will abort unless given the -f flag.
1471
1471
1472 You can import a patch straight from a mail message. Even patches
1472 You can import a patch straight from a mail message. Even patches
1473 as attachments work (body part must be type text/plain or
1473 as attachments work (body part must be type text/plain or
1474 text/x-patch to be used). From and Subject headers of email
1474 text/x-patch to be used). From and Subject headers of email
1475 message are used as default committer and commit message. All
1475 message are used as default committer and commit message. All
1476 text/plain body parts before first diff are added to commit
1476 text/plain body parts before first diff are added to commit
1477 message.
1477 message.
1478
1478
1479 If the imported patch was generated by hg export, user and description
1479 If the imported patch was generated by hg export, user and description
1480 from patch override values from message headers and body. Values
1480 from patch override values from message headers and body. Values
1481 given on command line with -m and -u override these.
1481 given on command line with -m and -u override these.
1482
1482
1483 If --exact is specified, import will set the working directory
1483 If --exact is specified, import will set the working directory
1484 to the parent of each patch before applying it, and will abort
1484 to the parent of each patch before applying it, and will abort
1485 if the resulting changeset has a different ID than the one
1485 if the resulting changeset has a different ID than the one
1486 recorded in the patch. This may happen due to character set
1486 recorded in the patch. This may happen due to character set
1487 problems or other deficiencies in the text patch format.
1487 problems or other deficiencies in the text patch format.
1488
1488
1489 To read a patch from standard input, use patch name "-".
1489 To read a patch from standard input, use patch name "-".
1490 """
1490 """
1491 patches = (patch1,) + patches
1491 patches = (patch1,) + patches
1492
1492
1493 if opts.get('exact') or not opts['force']:
1493 if opts.get('exact') or not opts['force']:
1494 bail_if_changed(repo)
1494 bail_if_changed(repo)
1495
1495
1496 d = opts["base"]
1496 d = opts["base"]
1497 strip = opts["strip"]
1497 strip = opts["strip"]
1498
1498
1499 wlock = repo.wlock()
1499 wlock = repo.wlock()
1500 lock = repo.lock()
1500 lock = repo.lock()
1501
1501
1502 for p in patches:
1502 for p in patches:
1503 pf = os.path.join(d, p)
1503 pf = os.path.join(d, p)
1504
1504
1505 if pf == '-':
1505 if pf == '-':
1506 ui.status(_("applying patch from stdin\n"))
1506 ui.status(_("applying patch from stdin\n"))
1507 tmpname, message, user, date, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1507 tmpname, message, user, date, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1508 else:
1508 else:
1509 ui.status(_("applying %s\n") % p)
1509 ui.status(_("applying %s\n") % p)
1510 tmpname, message, user, date, nodeid, p1, p2 = patch.extract(ui, file(pf))
1510 tmpname, message, user, date, nodeid, p1, p2 = patch.extract(ui, file(pf))
1511
1511
1512 if tmpname is None:
1512 if tmpname is None:
1513 raise util.Abort(_('no diffs found'))
1513 raise util.Abort(_('no diffs found'))
1514
1514
1515 try:
1515 try:
1516 cmdline_message = logmessage(opts)
1516 cmdline_message = logmessage(opts)
1517 if cmdline_message:
1517 if cmdline_message:
1518 # pickup the cmdline msg
1518 # pickup the cmdline msg
1519 message = cmdline_message
1519 message = cmdline_message
1520 elif message:
1520 elif message:
1521 # pickup the patch msg
1521 # pickup the patch msg
1522 message = message.strip()
1522 message = message.strip()
1523 else:
1523 else:
1524 # launch the editor
1524 # launch the editor
1525 message = None
1525 message = None
1526 ui.debug(_('message:\n%s\n') % message)
1526 ui.debug(_('message:\n%s\n') % message)
1527
1527
1528 wp = repo.workingctx().parents()
1528 wp = repo.workingctx().parents()
1529 if opts.get('exact'):
1529 if opts.get('exact'):
1530 if not nodeid or not p1:
1530 if not nodeid or not p1:
1531 raise util.Abort(_('not a mercurial patch'))
1531 raise util.Abort(_('not a mercurial patch'))
1532 p1 = repo.lookup(p1)
1532 p1 = repo.lookup(p1)
1533 p2 = repo.lookup(p2 or hex(nullid))
1533 p2 = repo.lookup(p2 or hex(nullid))
1534
1534
1535 if p1 != wp[0].node():
1535 if p1 != wp[0].node():
1536 hg.clean(repo, p1, wlock=wlock)
1536 hg.clean(repo, p1, wlock=wlock)
1537 repo.dirstate.setparents(p1, p2)
1537 repo.dirstate.setparents(p1, p2)
1538 elif p2:
1538 elif p2:
1539 try:
1539 try:
1540 p1 = repo.lookup(p1)
1540 p1 = repo.lookup(p1)
1541 p2 = repo.lookup(p2)
1541 p2 = repo.lookup(p2)
1542 if p1 == wp[0].node():
1542 if p1 == wp[0].node():
1543 repo.dirstate.setparents(p1, p2)
1543 repo.dirstate.setparents(p1, p2)
1544 except RepoError:
1544 except RepoError:
1545 pass
1545 pass
1546
1546
1547 files = {}
1547 files = {}
1548 try:
1548 try:
1549 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1549 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1550 files=files)
1550 files=files)
1551 finally:
1551 finally:
1552 files = patch.updatedir(ui, repo, files, wlock=wlock)
1552 files = patch.updatedir(ui, repo, files, wlock=wlock)
1553 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1553 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1554 if opts.get('exact'):
1554 if opts.get('exact'):
1555 if hex(n) != nodeid:
1555 if hex(n) != nodeid:
1556 repo.rollback()
1556 repo.rollback()
1557 raise util.Abort(_('patch is damaged or loses information'))
1557 raise util.Abort(_('patch is damaged or loses information'))
1558 finally:
1558 finally:
1559 os.unlink(tmpname)
1559 os.unlink(tmpname)
1560
1560
1561 def incoming(ui, repo, source="default", **opts):
1561 def incoming(ui, repo, source="default", **opts):
1562 """show new changesets found in source
1562 """show new changesets found in source
1563
1563
1564 Show new changesets found in the specified path/URL or the default
1564 Show new changesets found in the specified path/URL or the default
1565 pull location. These are the changesets that would be pulled if a pull
1565 pull location. These are the changesets that would be pulled if a pull
1566 was requested.
1566 was requested.
1567
1567
1568 For remote repository, using --bundle avoids downloading the changesets
1568 For remote repository, using --bundle avoids downloading the changesets
1569 twice if the incoming is followed by a pull.
1569 twice if the incoming is followed by a pull.
1570
1570
1571 See pull for valid source format details.
1571 See pull for valid source format details.
1572 """
1572 """
1573 source = ui.expandpath(source)
1573 source = ui.expandpath(source)
1574 setremoteconfig(ui, opts)
1574 setremoteconfig(ui, opts)
1575
1575
1576 other = hg.repository(ui, source)
1576 other = hg.repository(ui, source)
1577 ui.status(_('comparing with %s\n') % source)
1577 ui.status(_('comparing with %s\n') % source)
1578 incoming = repo.findincoming(other, force=opts["force"])
1578 incoming = repo.findincoming(other, force=opts["force"])
1579 if not incoming:
1579 if not incoming:
1580 try:
1580 try:
1581 os.unlink(opts["bundle"])
1581 os.unlink(opts["bundle"])
1582 except:
1582 except:
1583 pass
1583 pass
1584 ui.status(_("no changes found\n"))
1584 ui.status(_("no changes found\n"))
1585 return 1
1585 return 1
1586
1586
1587 cleanup = None
1587 cleanup = None
1588 try:
1588 try:
1589 fname = opts["bundle"]
1589 fname = opts["bundle"]
1590 if fname or not other.local():
1590 if fname or not other.local():
1591 # create a bundle (uncompressed if other repo is not local)
1591 # create a bundle (uncompressed if other repo is not local)
1592 cg = other.changegroup(incoming, "incoming")
1592 cg = other.changegroup(incoming, "incoming")
1593 bundletype = other.local() and "HG10BZ" or "HG10UN"
1593 bundletype = other.local() and "HG10BZ" or "HG10UN"
1594 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1594 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1595 # keep written bundle?
1595 # keep written bundle?
1596 if opts["bundle"]:
1596 if opts["bundle"]:
1597 cleanup = None
1597 cleanup = None
1598 if not other.local():
1598 if not other.local():
1599 # use the created uncompressed bundlerepo
1599 # use the created uncompressed bundlerepo
1600 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1600 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1601
1601
1602 revs = None
1602 revs = None
1603 if opts['rev']:
1603 if opts['rev']:
1604 revs = [other.lookup(rev) for rev in opts['rev']]
1604 revs = [other.lookup(rev) for rev in opts['rev']]
1605 o = other.changelog.nodesbetween(incoming, revs)[0]
1605 o = other.changelog.nodesbetween(incoming, revs)[0]
1606 if opts['newest_first']:
1606 if opts['newest_first']:
1607 o.reverse()
1607 o.reverse()
1608 displayer = cmdutil.show_changeset(ui, other, opts)
1608 displayer = cmdutil.show_changeset(ui, other, opts)
1609 for n in o:
1609 for n in o:
1610 parents = [p for p in other.changelog.parents(n) if p != nullid]
1610 parents = [p for p in other.changelog.parents(n) if p != nullid]
1611 if opts['no_merges'] and len(parents) == 2:
1611 if opts['no_merges'] and len(parents) == 2:
1612 continue
1612 continue
1613 displayer.show(changenode=n)
1613 displayer.show(changenode=n)
1614 finally:
1614 finally:
1615 if hasattr(other, 'close'):
1615 if hasattr(other, 'close'):
1616 other.close()
1616 other.close()
1617 if cleanup:
1617 if cleanup:
1618 os.unlink(cleanup)
1618 os.unlink(cleanup)
1619
1619
1620 def init(ui, dest=".", **opts):
1620 def init(ui, dest=".", **opts):
1621 """create a new repository in the given directory
1621 """create a new repository in the given directory
1622
1622
1623 Initialize a new repository in the given directory. If the given
1623 Initialize a new repository in the given directory. If the given
1624 directory does not exist, it is created.
1624 directory does not exist, it is created.
1625
1625
1626 If no directory is given, the current directory is used.
1626 If no directory is given, the current directory is used.
1627
1627
1628 It is possible to specify an ssh:// URL as the destination.
1628 It is possible to specify an ssh:// URL as the destination.
1629 Look at the help text for the pull command for important details
1629 Look at the help text for the pull command for important details
1630 about ssh:// URLs.
1630 about ssh:// URLs.
1631 """
1631 """
1632 setremoteconfig(ui, opts)
1632 setremoteconfig(ui, opts)
1633 hg.repository(ui, dest, create=1)
1633 hg.repository(ui, dest, create=1)
1634
1634
1635 def locate(ui, repo, *pats, **opts):
1635 def locate(ui, repo, *pats, **opts):
1636 """locate files matching specific patterns
1636 """locate files matching specific patterns
1637
1637
1638 Print all files under Mercurial control whose names match the
1638 Print all files under Mercurial control whose names match the
1639 given patterns.
1639 given patterns.
1640
1640
1641 This command searches the entire repository by default. To search
1641 This command searches the entire repository by default. To search
1642 just the current directory and its subdirectories, use "--include .".
1642 just the current directory and its subdirectories, use "--include .".
1643
1643
1644 If no patterns are given to match, this command prints all file
1644 If no patterns are given to match, this command prints all file
1645 names.
1645 names.
1646
1646
1647 If you want to feed the output of this command into the "xargs"
1647 If you want to feed the output of this command into the "xargs"
1648 command, use the "-0" option to both this command and "xargs".
1648 command, use the "-0" option to both this command and "xargs".
1649 This will avoid the problem of "xargs" treating single filenames
1649 This will avoid the problem of "xargs" treating single filenames
1650 that contain white space as multiple filenames.
1650 that contain white space as multiple filenames.
1651 """
1651 """
1652 end = opts['print0'] and '\0' or '\n'
1652 end = opts['print0'] and '\0' or '\n'
1653 rev = opts['rev']
1653 rev = opts['rev']
1654 if rev:
1654 if rev:
1655 node = repo.lookup(rev)
1655 node = repo.lookup(rev)
1656 else:
1656 else:
1657 node = None
1657 node = None
1658
1658
1659 ret = 1
1659 ret = 1
1660 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1660 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1661 default='relglob'):
1661 default='relglob'):
1662 if not node and repo.dirstate.state(abs) == '?':
1662 if not node and repo.dirstate.state(abs) == '?':
1663 continue
1663 continue
1664 if opts['fullpath']:
1664 if opts['fullpath']:
1665 ui.write(os.path.join(repo.root, abs), end)
1665 ui.write(os.path.join(repo.root, abs), end)
1666 else:
1666 else:
1667 ui.write(((pats and rel) or abs), end)
1667 ui.write(((pats and rel) or abs), end)
1668 ret = 0
1668 ret = 0
1669
1669
1670 return ret
1670 return ret
1671
1671
1672 def log(ui, repo, *pats, **opts):
1672 def log(ui, repo, *pats, **opts):
1673 """show revision history of entire repository or files
1673 """show revision history of entire repository or files
1674
1674
1675 Print the revision history of the specified files or the entire
1675 Print the revision history of the specified files or the entire
1676 project.
1676 project.
1677
1677
1678 File history is shown without following rename or copy history of
1678 File history is shown without following rename or copy history of
1679 files. Use -f/--follow with a file name to follow history across
1679 files. Use -f/--follow with a file name to follow history across
1680 renames and copies. --follow without a file name will only show
1680 renames and copies. --follow without a file name will only show
1681 ancestors or descendants of the starting revision. --follow-first
1681 ancestors or descendants of the starting revision. --follow-first
1682 only follows the first parent of merge revisions.
1682 only follows the first parent of merge revisions.
1683
1683
1684 If no revision range is specified, the default is tip:0 unless
1684 If no revision range is specified, the default is tip:0 unless
1685 --follow is set, in which case the working directory parent is
1685 --follow is set, in which case the working directory parent is
1686 used as the starting revision.
1686 used as the starting revision.
1687
1687
1688 By default this command outputs: changeset id and hash, tags,
1688 By default this command outputs: changeset id and hash, tags,
1689 non-trivial parents, user, date and time, and a summary for each
1689 non-trivial parents, user, date and time, and a summary for each
1690 commit. When the -v/--verbose switch is used, the list of changed
1690 commit. When the -v/--verbose switch is used, the list of changed
1691 files and full commit message is shown.
1691 files and full commit message is shown.
1692
1692
1693 NOTE: log -p may generate unexpected diff output for merge
1693 NOTE: log -p may generate unexpected diff output for merge
1694 changesets, as it will compare the merge changeset against its
1694 changesets, as it will compare the merge changeset against its
1695 first parent only. Also, the files: list will only reflect files
1695 first parent only. Also, the files: list will only reflect files
1696 that are different from BOTH parents.
1696 that are different from BOTH parents.
1697
1697
1698 """
1698 """
1699
1699
1700 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1700 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1701 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1701 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1702
1702
1703 if opts['limit']:
1703 if opts['limit']:
1704 try:
1704 try:
1705 limit = int(opts['limit'])
1705 limit = int(opts['limit'])
1706 except ValueError:
1706 except ValueError:
1707 raise util.Abort(_('limit must be a positive integer'))
1707 raise util.Abort(_('limit must be a positive integer'))
1708 if limit <= 0: raise util.Abort(_('limit must be positive'))
1708 if limit <= 0: raise util.Abort(_('limit must be positive'))
1709 else:
1709 else:
1710 limit = sys.maxint
1710 limit = sys.maxint
1711 count = 0
1711 count = 0
1712
1712
1713 if opts['copies'] and opts['rev']:
1713 if opts['copies'] and opts['rev']:
1714 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1714 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1715 else:
1715 else:
1716 endrev = repo.changelog.count()
1716 endrev = repo.changelog.count()
1717 rcache = {}
1717 rcache = {}
1718 ncache = {}
1718 ncache = {}
1719 dcache = []
1719 dcache = []
1720 def getrenamed(fn, rev, man):
1720 def getrenamed(fn, rev, man):
1721 '''looks up all renames for a file (up to endrev) the first
1721 '''looks up all renames for a file (up to endrev) the first
1722 time the file is given. It indexes on the changerev and only
1722 time the file is given. It indexes on the changerev and only
1723 parses the manifest if linkrev != changerev.
1723 parses the manifest if linkrev != changerev.
1724 Returns rename info for fn at changerev rev.'''
1724 Returns rename info for fn at changerev rev.'''
1725 if fn not in rcache:
1725 if fn not in rcache:
1726 rcache[fn] = {}
1726 rcache[fn] = {}
1727 ncache[fn] = {}
1727 ncache[fn] = {}
1728 fl = repo.file(fn)
1728 fl = repo.file(fn)
1729 for i in xrange(fl.count()):
1729 for i in xrange(fl.count()):
1730 node = fl.node(i)
1730 node = fl.node(i)
1731 lr = fl.linkrev(node)
1731 lr = fl.linkrev(node)
1732 renamed = fl.renamed(node)
1732 renamed = fl.renamed(node)
1733 rcache[fn][lr] = renamed
1733 rcache[fn][lr] = renamed
1734 if renamed:
1734 if renamed:
1735 ncache[fn][node] = renamed
1735 ncache[fn][node] = renamed
1736 if lr >= endrev:
1736 if lr >= endrev:
1737 break
1737 break
1738 if rev in rcache[fn]:
1738 if rev in rcache[fn]:
1739 return rcache[fn][rev]
1739 return rcache[fn][rev]
1740 mr = repo.manifest.rev(man)
1740 mr = repo.manifest.rev(man)
1741 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1741 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1742 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1742 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1743 if not dcache or dcache[0] != man:
1743 if not dcache or dcache[0] != man:
1744 dcache[:] = [man, repo.manifest.readdelta(man)]
1744 dcache[:] = [man, repo.manifest.readdelta(man)]
1745 if fn in dcache[1]:
1745 if fn in dcache[1]:
1746 return ncache[fn].get(dcache[1][fn])
1746 return ncache[fn].get(dcache[1][fn])
1747 return None
1747 return None
1748
1748
1749 df = False
1749 df = False
1750 if opts["date"]:
1750 if opts["date"]:
1751 df = util.matchdate(opts["date"])
1751 df = util.matchdate(opts["date"])
1752
1752
1753 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1753 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1754 for st, rev, fns in changeiter:
1754 for st, rev, fns in changeiter:
1755 if st == 'add':
1755 if st == 'add':
1756 changenode = repo.changelog.node(rev)
1756 changenode = repo.changelog.node(rev)
1757 parents = [p for p in repo.changelog.parentrevs(rev)
1757 parents = [p for p in repo.changelog.parentrevs(rev)
1758 if p != nullrev]
1758 if p != nullrev]
1759 if opts['no_merges'] and len(parents) == 2:
1759 if opts['no_merges'] and len(parents) == 2:
1760 continue
1760 continue
1761 if opts['only_merges'] and len(parents) != 2:
1761 if opts['only_merges'] and len(parents) != 2:
1762 continue
1762 continue
1763
1763
1764 if df:
1764 if df:
1765 changes = get(rev)
1765 changes = get(rev)
1766 if not df(changes[2][0]):
1766 if not df(changes[2][0]):
1767 continue
1767 continue
1768
1768
1769 if opts['keyword']:
1769 if opts['keyword']:
1770 changes = get(rev)
1770 changes = get(rev)
1771 miss = 0
1771 miss = 0
1772 for k in [kw.lower() for kw in opts['keyword']]:
1772 for k in [kw.lower() for kw in opts['keyword']]:
1773 if not (k in changes[1].lower() or
1773 if not (k in changes[1].lower() or
1774 k in changes[4].lower() or
1774 k in changes[4].lower() or
1775 k in " ".join(changes[3][:20]).lower()):
1775 k in " ".join(changes[3][:20]).lower()):
1776 miss = 1
1776 miss = 1
1777 break
1777 break
1778 if miss:
1778 if miss:
1779 continue
1779 continue
1780
1780
1781 copies = []
1781 copies = []
1782 if opts.get('copies') and rev:
1782 if opts.get('copies') and rev:
1783 mf = get(rev)[0]
1783 mf = get(rev)[0]
1784 for fn in get(rev)[3]:
1784 for fn in get(rev)[3]:
1785 rename = getrenamed(fn, rev, mf)
1785 rename = getrenamed(fn, rev, mf)
1786 if rename:
1786 if rename:
1787 copies.append((fn, rename[0]))
1787 copies.append((fn, rename[0]))
1788 displayer.show(rev, changenode, copies=copies)
1788 displayer.show(rev, changenode, copies=copies)
1789 elif st == 'iter':
1789 elif st == 'iter':
1790 if count == limit: break
1790 if count == limit: break
1791 if displayer.flush(rev):
1791 if displayer.flush(rev):
1792 count += 1
1792 count += 1
1793
1793
1794 def manifest(ui, repo, rev=None):
1794 def manifest(ui, repo, rev=None):
1795 """output the current or given revision of the project manifest
1795 """output the current or given revision of the project manifest
1796
1796
1797 Print a list of version controlled files for the given revision.
1797 Print a list of version controlled files for the given revision.
1798 If no revision is given, the parent of the working directory is used,
1798 If no revision is given, the parent of the working directory is used,
1799 or tip if no revision is checked out.
1799 or tip if no revision is checked out.
1800
1800
1801 The manifest is the list of files being version controlled. If no revision
1801 The manifest is the list of files being version controlled. If no revision
1802 is given then the first parent of the working directory is used.
1802 is given then the first parent of the working directory is used.
1803
1803
1804 With -v flag, print file permissions. With --debug flag, print
1804 With -v flag, print file permissions. With --debug flag, print
1805 file revision hashes.
1805 file revision hashes.
1806 """
1806 """
1807
1807
1808 m = repo.changectx(rev).manifest()
1808 m = repo.changectx(rev).manifest()
1809 files = m.keys()
1809 files = m.keys()
1810 files.sort()
1810 files.sort()
1811
1811
1812 for f in files:
1812 for f in files:
1813 if ui.debugflag:
1813 if ui.debugflag:
1814 ui.write("%40s " % hex(m[f]))
1814 ui.write("%40s " % hex(m[f]))
1815 if ui.verbose:
1815 if ui.verbose:
1816 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1816 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1817 ui.write("%s\n" % f)
1817 ui.write("%s\n" % f)
1818
1818
1819 def merge(ui, repo, node=None, force=None):
1819 def merge(ui, repo, node=None, force=None):
1820 """merge working directory with another revision
1820 """merge working directory with another revision
1821
1821
1822 Merge the contents of the current working directory and the
1822 Merge the contents of the current working directory and the
1823 requested revision. Files that changed between either parent are
1823 requested revision. Files that changed between either parent are
1824 marked as changed for the next commit and a commit must be
1824 marked as changed for the next commit and a commit must be
1825 performed before any further updates are allowed.
1825 performed before any further updates are allowed.
1826
1826
1827 If no revision is specified, the working directory's parent is a
1827 If no revision is specified, the working directory's parent is a
1828 head revision, and the repository contains exactly one other head,
1828 head revision, and the repository contains exactly one other head,
1829 the other head is merged with by default. Otherwise, an explicit
1829 the other head is merged with by default. Otherwise, an explicit
1830 revision to merge with must be provided.
1830 revision to merge with must be provided.
1831 """
1831 """
1832
1832
1833 if not node:
1833 if not node:
1834 heads = repo.heads()
1834 heads = repo.heads()
1835 if len(heads) > 2:
1835 if len(heads) > 2:
1836 raise util.Abort(_('repo has %d heads - '
1836 raise util.Abort(_('repo has %d heads - '
1837 'please merge with an explicit rev') %
1837 'please merge with an explicit rev') %
1838 len(heads))
1838 len(heads))
1839 if len(heads) == 1:
1839 if len(heads) == 1:
1840 raise util.Abort(_('there is nothing to merge - '
1840 raise util.Abort(_('there is nothing to merge - '
1841 'use "hg update" instead'))
1841 'use "hg update" instead'))
1842 parent = repo.dirstate.parents()[0]
1842 parent = repo.dirstate.parents()[0]
1843 if parent not in heads:
1843 if parent not in heads:
1844 raise util.Abort(_('working dir not at a head rev - '
1844 raise util.Abort(_('working dir not at a head rev - '
1845 'use "hg update" or merge with an explicit rev'))
1845 'use "hg update" or merge with an explicit rev'))
1846 node = parent == heads[0] and heads[-1] or heads[0]
1846 node = parent == heads[0] and heads[-1] or heads[0]
1847 return hg.merge(repo, node, force=force)
1847 return hg.merge(repo, node, force=force)
1848
1848
1849 def outgoing(ui, repo, dest=None, **opts):
1849 def outgoing(ui, repo, dest=None, **opts):
1850 """show changesets not found in destination
1850 """show changesets not found in destination
1851
1851
1852 Show changesets not found in the specified destination repository or
1852 Show changesets not found in the specified destination repository or
1853 the default push location. These are the changesets that would be pushed
1853 the default push location. These are the changesets that would be pushed
1854 if a push was requested.
1854 if a push was requested.
1855
1855
1856 See pull for valid destination format details.
1856 See pull for valid destination format details.
1857 """
1857 """
1858 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1858 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1859 setremoteconfig(ui, opts)
1859 setremoteconfig(ui, opts)
1860 revs = None
1860 revs = None
1861 if opts['rev']:
1861 if opts['rev']:
1862 revs = [repo.lookup(rev) for rev in opts['rev']]
1862 revs = [repo.lookup(rev) for rev in opts['rev']]
1863
1863
1864 other = hg.repository(ui, dest)
1864 other = hg.repository(ui, dest)
1865 ui.status(_('comparing with %s\n') % dest)
1865 ui.status(_('comparing with %s\n') % dest)
1866 o = repo.findoutgoing(other, force=opts['force'])
1866 o = repo.findoutgoing(other, force=opts['force'])
1867 if not o:
1867 if not o:
1868 ui.status(_("no changes found\n"))
1868 ui.status(_("no changes found\n"))
1869 return 1
1869 return 1
1870 o = repo.changelog.nodesbetween(o, revs)[0]
1870 o = repo.changelog.nodesbetween(o, revs)[0]
1871 if opts['newest_first']:
1871 if opts['newest_first']:
1872 o.reverse()
1872 o.reverse()
1873 displayer = cmdutil.show_changeset(ui, repo, opts)
1873 displayer = cmdutil.show_changeset(ui, repo, opts)
1874 for n in o:
1874 for n in o:
1875 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1875 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1876 if opts['no_merges'] and len(parents) == 2:
1876 if opts['no_merges'] and len(parents) == 2:
1877 continue
1877 continue
1878 displayer.show(changenode=n)
1878 displayer.show(changenode=n)
1879
1879
1880 def parents(ui, repo, file_=None, **opts):
1880 def parents(ui, repo, file_=None, **opts):
1881 """show the parents of the working dir or revision
1881 """show the parents of the working dir or revision
1882
1882
1883 Print the working directory's parent revisions.
1883 Print the working directory's parent revisions.
1884 """
1884 """
1885 rev = opts.get('rev')
1885 rev = opts.get('rev')
1886 if rev:
1886 if rev:
1887 if file_:
1887 if file_:
1888 ctx = repo.filectx(file_, changeid=rev)
1888 ctx = repo.filectx(file_, changeid=rev)
1889 else:
1889 else:
1890 ctx = repo.changectx(rev)
1890 ctx = repo.changectx(rev)
1891 p = [cp.node() for cp in ctx.parents()]
1891 p = [cp.node() for cp in ctx.parents()]
1892 else:
1892 else:
1893 p = repo.dirstate.parents()
1893 p = repo.dirstate.parents()
1894
1894
1895 displayer = cmdutil.show_changeset(ui, repo, opts)
1895 displayer = cmdutil.show_changeset(ui, repo, opts)
1896 for n in p:
1896 for n in p:
1897 if n != nullid:
1897 if n != nullid:
1898 displayer.show(changenode=n)
1898 displayer.show(changenode=n)
1899
1899
1900 def paths(ui, repo, search=None):
1900 def paths(ui, repo, search=None):
1901 """show definition of symbolic path names
1901 """show definition of symbolic path names
1902
1902
1903 Show definition of symbolic path name NAME. If no name is given, show
1903 Show definition of symbolic path name NAME. If no name is given, show
1904 definition of available names.
1904 definition of available names.
1905
1905
1906 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1906 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1907 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1907 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1908 """
1908 """
1909 if search:
1909 if search:
1910 for name, path in ui.configitems("paths"):
1910 for name, path in ui.configitems("paths"):
1911 if name == search:
1911 if name == search:
1912 ui.write("%s\n" % path)
1912 ui.write("%s\n" % path)
1913 return
1913 return
1914 ui.warn(_("not found!\n"))
1914 ui.warn(_("not found!\n"))
1915 return 1
1915 return 1
1916 else:
1916 else:
1917 for name, path in ui.configitems("paths"):
1917 for name, path in ui.configitems("paths"):
1918 ui.write("%s = %s\n" % (name, path))
1918 ui.write("%s = %s\n" % (name, path))
1919
1919
1920 def postincoming(ui, repo, modheads, optupdate):
1920 def postincoming(ui, repo, modheads, optupdate):
1921 if modheads == 0:
1921 if modheads == 0:
1922 return
1922 return
1923 if optupdate:
1923 if optupdate:
1924 if modheads == 1:
1924 if modheads == 1:
1925 return hg.update(repo, repo.changelog.tip()) # update
1925 return hg.update(repo, repo.changelog.tip()) # update
1926 else:
1926 else:
1927 ui.status(_("not updating, since new heads added\n"))
1927 ui.status(_("not updating, since new heads added\n"))
1928 if modheads > 1:
1928 if modheads > 1:
1929 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1929 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1930 else:
1930 else:
1931 ui.status(_("(run 'hg update' to get a working copy)\n"))
1931 ui.status(_("(run 'hg update' to get a working copy)\n"))
1932
1932
1933 def pull(ui, repo, source="default", **opts):
1933 def pull(ui, repo, source="default", **opts):
1934 """pull changes from the specified source
1934 """pull changes from the specified source
1935
1935
1936 Pull changes from a remote repository to a local one.
1936 Pull changes from a remote repository to a local one.
1937
1937
1938 This finds all changes from the repository at the specified path
1938 This finds all changes from the repository at the specified path
1939 or URL and adds them to the local repository. By default, this
1939 or URL and adds them to the local repository. By default, this
1940 does not update the copy of the project in the working directory.
1940 does not update the copy of the project in the working directory.
1941
1941
1942 Valid URLs are of the form:
1942 Valid URLs are of the form:
1943
1943
1944 local/filesystem/path (or file://local/filesystem/path)
1944 local/filesystem/path (or file://local/filesystem/path)
1945 http://[user@]host[:port]/[path]
1945 http://[user@]host[:port]/[path]
1946 https://[user@]host[:port]/[path]
1946 https://[user@]host[:port]/[path]
1947 ssh://[user@]host[:port]/[path]
1947 ssh://[user@]host[:port]/[path]
1948 static-http://host[:port]/[path]
1948 static-http://host[:port]/[path]
1949
1949
1950 Paths in the local filesystem can either point to Mercurial
1950 Paths in the local filesystem can either point to Mercurial
1951 repositories or to bundle files (as created by 'hg bundle' or
1951 repositories or to bundle files (as created by 'hg bundle' or
1952 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1952 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1953 allows access to a Mercurial repository where you simply use a web
1953 allows access to a Mercurial repository where you simply use a web
1954 server to publish the .hg directory as static content.
1954 server to publish the .hg directory as static content.
1955
1955
1956 Some notes about using SSH with Mercurial:
1956 Some notes about using SSH with Mercurial:
1957 - SSH requires an accessible shell account on the destination machine
1957 - SSH requires an accessible shell account on the destination machine
1958 and a copy of hg in the remote path or specified with as remotecmd.
1958 and a copy of hg in the remote path or specified with as remotecmd.
1959 - path is relative to the remote user's home directory by default.
1959 - path is relative to the remote user's home directory by default.
1960 Use an extra slash at the start of a path to specify an absolute path:
1960 Use an extra slash at the start of a path to specify an absolute path:
1961 ssh://example.com//tmp/repository
1961 ssh://example.com//tmp/repository
1962 - Mercurial doesn't use its own compression via SSH; the right thing
1962 - Mercurial doesn't use its own compression via SSH; the right thing
1963 to do is to configure it in your ~/.ssh/config, e.g.:
1963 to do is to configure it in your ~/.ssh/config, e.g.:
1964 Host *.mylocalnetwork.example.com
1964 Host *.mylocalnetwork.example.com
1965 Compression no
1965 Compression no
1966 Host *
1966 Host *
1967 Compression yes
1967 Compression yes
1968 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1968 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1969 with the --ssh command line option.
1969 with the --ssh command line option.
1970 """
1970 """
1971 source = ui.expandpath(source)
1971 source = ui.expandpath(source)
1972 setremoteconfig(ui, opts)
1972 setremoteconfig(ui, opts)
1973
1973
1974 other = hg.repository(ui, source)
1974 other = hg.repository(ui, source)
1975 ui.status(_('pulling from %s\n') % (source))
1975 ui.status(_('pulling from %s\n') % (source))
1976 revs = None
1976 revs = None
1977 if opts['rev']:
1977 if opts['rev']:
1978 if 'lookup' in other.capabilities:
1978 if 'lookup' in other.capabilities:
1979 revs = [other.lookup(rev) for rev in opts['rev']]
1979 revs = [other.lookup(rev) for rev in opts['rev']]
1980 else:
1980 else:
1981 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1981 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1982 raise util.Abort(error)
1982 raise util.Abort(error)
1983 modheads = repo.pull(other, heads=revs, force=opts['force'])
1983 modheads = repo.pull(other, heads=revs, force=opts['force'])
1984 return postincoming(ui, repo, modheads, opts['update'])
1984 return postincoming(ui, repo, modheads, opts['update'])
1985
1985
1986 def push(ui, repo, dest=None, **opts):
1986 def push(ui, repo, dest=None, **opts):
1987 """push changes to the specified destination
1987 """push changes to the specified destination
1988
1988
1989 Push changes from the local repository to the given destination.
1989 Push changes from the local repository to the given destination.
1990
1990
1991 This is the symmetrical operation for pull. It helps to move
1991 This is the symmetrical operation for pull. It helps to move
1992 changes from the current repository to a different one. If the
1992 changes from the current repository to a different one. If the
1993 destination is local this is identical to a pull in that directory
1993 destination is local this is identical to a pull in that directory
1994 from the current one.
1994 from the current one.
1995
1995
1996 By default, push will refuse to run if it detects the result would
1996 By default, push will refuse to run if it detects the result would
1997 increase the number of remote heads. This generally indicates the
1997 increase the number of remote heads. This generally indicates the
1998 the client has forgotten to sync and merge before pushing.
1998 the client has forgotten to sync and merge before pushing.
1999
1999
2000 Valid URLs are of the form:
2000 Valid URLs are of the form:
2001
2001
2002 local/filesystem/path (or file://local/filesystem/path)
2002 local/filesystem/path (or file://local/filesystem/path)
2003 ssh://[user@]host[:port]/[path]
2003 ssh://[user@]host[:port]/[path]
2004 http://[user@]host[:port]/[path]
2004 http://[user@]host[:port]/[path]
2005 https://[user@]host[:port]/[path]
2005 https://[user@]host[:port]/[path]
2006
2006
2007 Look at the help text for the pull command for important details
2007 Look at the help text for the pull command for important details
2008 about ssh:// URLs.
2008 about ssh:// URLs.
2009
2009
2010 Pushing to http:// and https:// URLs is only possible, if this
2010 Pushing to http:// and https:// URLs is only possible, if this
2011 feature is explicitly enabled on the remote Mercurial server.
2011 feature is explicitly enabled on the remote Mercurial server.
2012 """
2012 """
2013 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2013 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2014 setremoteconfig(ui, opts)
2014 setremoteconfig(ui, opts)
2015
2015
2016 other = hg.repository(ui, dest)
2016 other = hg.repository(ui, dest)
2017 ui.status('pushing to %s\n' % (dest))
2017 ui.status('pushing to %s\n' % (dest))
2018 revs = None
2018 revs = None
2019 if opts['rev']:
2019 if opts['rev']:
2020 revs = [repo.lookup(rev) for rev in opts['rev']]
2020 revs = [repo.lookup(rev) for rev in opts['rev']]
2021 r = repo.push(other, opts['force'], revs=revs)
2021 r = repo.push(other, opts['force'], revs=revs)
2022 return r == 0
2022 return r == 0
2023
2023
2024 def rawcommit(ui, repo, *pats, **opts):
2024 def rawcommit(ui, repo, *pats, **opts):
2025 """raw commit interface (DEPRECATED)
2025 """raw commit interface (DEPRECATED)
2026
2026
2027 (DEPRECATED)
2027 (DEPRECATED)
2028 Lowlevel commit, for use in helper scripts.
2028 Lowlevel commit, for use in helper scripts.
2029
2029
2030 This command is not intended to be used by normal users, as it is
2030 This command is not intended to be used by normal users, as it is
2031 primarily useful for importing from other SCMs.
2031 primarily useful for importing from other SCMs.
2032
2032
2033 This command is now deprecated and will be removed in a future
2033 This command is now deprecated and will be removed in a future
2034 release, please use debugsetparents and commit instead.
2034 release, please use debugsetparents and commit instead.
2035 """
2035 """
2036
2036
2037 ui.warn(_("(the rawcommit command is deprecated)\n"))
2037 ui.warn(_("(the rawcommit command is deprecated)\n"))
2038
2038
2039 message = logmessage(opts)
2039 message = logmessage(opts)
2040
2040
2041 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2041 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2042 if opts['files']:
2042 if opts['files']:
2043 files += open(opts['files']).read().splitlines()
2043 files += open(opts['files']).read().splitlines()
2044
2044
2045 parents = [repo.lookup(p) for p in opts['parent']]
2045 parents = [repo.lookup(p) for p in opts['parent']]
2046
2046
2047 try:
2047 try:
2048 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2048 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2049 except ValueError, inst:
2049 except ValueError, inst:
2050 raise util.Abort(str(inst))
2050 raise util.Abort(str(inst))
2051
2051
2052 def recover(ui, repo):
2052 def recover(ui, repo):
2053 """roll back an interrupted transaction
2053 """roll back an interrupted transaction
2054
2054
2055 Recover from an interrupted commit or pull.
2055 Recover from an interrupted commit or pull.
2056
2056
2057 This command tries to fix the repository status after an interrupted
2057 This command tries to fix the repository status after an interrupted
2058 operation. It should only be necessary when Mercurial suggests it.
2058 operation. It should only be necessary when Mercurial suggests it.
2059 """
2059 """
2060 if repo.recover():
2060 if repo.recover():
2061 return hg.verify(repo)
2061 return hg.verify(repo)
2062 return 1
2062 return 1
2063
2063
2064 def remove(ui, repo, *pats, **opts):
2064 def remove(ui, repo, *pats, **opts):
2065 """remove the specified files on the next commit
2065 """remove the specified files on the next commit
2066
2066
2067 Schedule the indicated files for removal from the repository.
2067 Schedule the indicated files for removal from the repository.
2068
2068
2069 This only removes files from the current branch, not from the
2069 This only removes files from the current branch, not from the
2070 entire project history. If the files still exist in the working
2070 entire project history. If the files still exist in the working
2071 directory, they will be deleted from it. If invoked with --after,
2071 directory, they will be deleted from it. If invoked with --after,
2072 files that have been manually deleted are marked as removed.
2072 files that have been manually deleted are marked as removed.
2073
2073
2074 This command schedules the files to be removed at the next commit.
2074 This command schedules the files to be removed at the next commit.
2075 To undo a remove before that, see hg revert.
2075 To undo a remove before that, see hg revert.
2076
2076
2077 Modified files and added files are not removed by default. To
2077 Modified files and added files are not removed by default. To
2078 remove them, use the -f/--force option.
2078 remove them, use the -f/--force option.
2079 """
2079 """
2080 names = []
2080 names = []
2081 if not opts['after'] and not pats:
2081 if not opts['after'] and not pats:
2082 raise util.Abort(_('no files specified'))
2082 raise util.Abort(_('no files specified'))
2083 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2083 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2084 exact = dict.fromkeys(files)
2084 exact = dict.fromkeys(files)
2085 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2085 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2086 modified, added, removed, deleted, unknown = mardu
2086 modified, added, removed, deleted, unknown = mardu
2087 remove, forget = [], []
2087 remove, forget = [], []
2088 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2088 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2089 reason = None
2089 reason = None
2090 if abs not in deleted and opts['after']:
2090 if abs not in deleted and opts['after']:
2091 reason = _('is still present')
2091 reason = _('is still present')
2092 elif abs in modified and not opts['force']:
2092 elif abs in modified and not opts['force']:
2093 reason = _('is modified (use -f to force removal)')
2093 reason = _('is modified (use -f to force removal)')
2094 elif abs in added:
2094 elif abs in added:
2095 if opts['force']:
2095 if opts['force']:
2096 forget.append(abs)
2096 forget.append(abs)
2097 continue
2097 continue
2098 reason = _('has been marked for add (use -f to force removal)')
2098 reason = _('has been marked for add (use -f to force removal)')
2099 elif abs in unknown:
2099 elif abs in unknown:
2100 reason = _('is not managed')
2100 reason = _('is not managed')
2101 elif abs in removed:
2101 elif abs in removed:
2102 continue
2102 continue
2103 if reason:
2103 if reason:
2104 if exact:
2104 if exact:
2105 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2105 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2106 else:
2106 else:
2107 if ui.verbose or not exact:
2107 if ui.verbose or not exact:
2108 ui.status(_('removing %s\n') % rel)
2108 ui.status(_('removing %s\n') % rel)
2109 remove.append(abs)
2109 remove.append(abs)
2110 repo.forget(forget)
2110 repo.forget(forget)
2111 repo.remove(remove, unlink=not opts['after'])
2111 repo.remove(remove, unlink=not opts['after'])
2112
2112
2113 def rename(ui, repo, *pats, **opts):
2113 def rename(ui, repo, *pats, **opts):
2114 """rename files; equivalent of copy + remove
2114 """rename files; equivalent of copy + remove
2115
2115
2116 Mark dest as copies of sources; mark sources for deletion. If
2116 Mark dest as copies of sources; mark sources for deletion. If
2117 dest is a directory, copies are put in that directory. If dest is
2117 dest is a directory, copies are put in that directory. If dest is
2118 a file, there can only be one source.
2118 a file, there can only be one source.
2119
2119
2120 By default, this command copies the contents of files as they
2120 By default, this command copies the contents of files as they
2121 stand in the working directory. If invoked with --after, the
2121 stand in the working directory. If invoked with --after, the
2122 operation is recorded, but no copying is performed.
2122 operation is recorded, but no copying is performed.
2123
2123
2124 This command takes effect in the next commit. To undo a rename
2124 This command takes effect in the next commit. To undo a rename
2125 before that, see hg revert.
2125 before that, see hg revert.
2126 """
2126 """
2127 wlock = repo.wlock(0)
2127 wlock = repo.wlock(0)
2128 errs, copied = docopy(ui, repo, pats, opts, wlock)
2128 errs, copied = docopy(ui, repo, pats, opts, wlock)
2129 names = []
2129 names = []
2130 for abs, rel, exact in copied:
2130 for abs, rel, exact in copied:
2131 if ui.verbose or not exact:
2131 if ui.verbose or not exact:
2132 ui.status(_('removing %s\n') % rel)
2132 ui.status(_('removing %s\n') % rel)
2133 names.append(abs)
2133 names.append(abs)
2134 if not opts.get('dry_run'):
2134 if not opts.get('dry_run'):
2135 repo.remove(names, True, wlock)
2135 repo.remove(names, True, wlock)
2136 return errs
2136 return errs
2137
2137
2138 def revert(ui, repo, *pats, **opts):
2138 def revert(ui, repo, *pats, **opts):
2139 """revert files or dirs to their states as of some revision
2139 """revert files or dirs to their states as of some revision
2140
2140
2141 With no revision specified, revert the named files or directories
2141 With no revision specified, revert the named files or directories
2142 to the contents they had in the parent of the working directory.
2142 to the contents they had in the parent of the working directory.
2143 This restores the contents of the affected files to an unmodified
2143 This restores the contents of the affected files to an unmodified
2144 state and unschedules adds, removes, copies, and renames. If the
2144 state and unschedules adds, removes, copies, and renames. If the
2145 working directory has two parents, you must explicitly specify the
2145 working directory has two parents, you must explicitly specify the
2146 revision to revert to.
2146 revision to revert to.
2147
2147
2148 Modified files are saved with a .orig suffix before reverting.
2148 Modified files are saved with a .orig suffix before reverting.
2149 To disable these backups, use --no-backup.
2149 To disable these backups, use --no-backup.
2150
2150
2151 Using the -r option, revert the given files or directories to their
2151 Using the -r option, revert the given files or directories to their
2152 contents as of a specific revision. This can be helpful to "roll
2152 contents as of a specific revision. This can be helpful to "roll
2153 back" some or all of a change that should not have been committed.
2153 back" some or all of a change that should not have been committed.
2154
2154
2155 Revert modifies the working directory. It does not commit any
2155 Revert modifies the working directory. It does not commit any
2156 changes, or change the parent of the working directory. If you
2156 changes, or change the parent of the working directory. If you
2157 revert to a revision other than the parent of the working
2157 revert to a revision other than the parent of the working
2158 directory, the reverted files will thus appear modified
2158 directory, the reverted files will thus appear modified
2159 afterwards.
2159 afterwards.
2160
2160
2161 If a file has been deleted, it is recreated. If the executable
2161 If a file has been deleted, it is recreated. If the executable
2162 mode of a file was changed, it is reset.
2162 mode of a file was changed, it is reset.
2163
2163
2164 If names are given, all files matching the names are reverted.
2164 If names are given, all files matching the names are reverted.
2165
2165
2166 If no arguments are given, no files are reverted.
2166 If no arguments are given, no files are reverted.
2167 """
2167 """
2168
2168
2169 if opts["date"]:
2169 if opts["date"]:
2170 if opts["rev"]:
2170 if opts["rev"]:
2171 raise util.Abort(_("you can't specify a revision and a date"))
2171 raise util.Abort(_("you can't specify a revision and a date"))
2172 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2172 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2173
2173
2174 if not pats and not opts['all']:
2174 if not pats and not opts['all']:
2175 raise util.Abort(_('no files or directories specified; '
2175 raise util.Abort(_('no files or directories specified; '
2176 'use --all to revert the whole repo'))
2176 'use --all to revert the whole repo'))
2177
2177
2178 parent, p2 = repo.dirstate.parents()
2178 parent, p2 = repo.dirstate.parents()
2179 if not opts['rev'] and p2 != nullid:
2179 if not opts['rev'] and p2 != nullid:
2180 raise util.Abort(_('uncommitted merge - please provide a '
2180 raise util.Abort(_('uncommitted merge - please provide a '
2181 'specific revision'))
2181 'specific revision'))
2182 ctx = repo.changectx(opts['rev'])
2182 ctx = repo.changectx(opts['rev'])
2183 node = ctx.node()
2183 node = ctx.node()
2184 mf = ctx.manifest()
2184 mf = ctx.manifest()
2185 if node == parent:
2185 if node == parent:
2186 pmf = mf
2186 pmf = mf
2187 else:
2187 else:
2188 pmf = None
2188 pmf = None
2189
2189
2190 wlock = repo.wlock()
2190 wlock = repo.wlock()
2191
2191
2192 # need all matching names in dirstate and manifest of target rev,
2192 # need all matching names in dirstate and manifest of target rev,
2193 # so have to walk both. do not print errors if files exist in one
2193 # so have to walk both. do not print errors if files exist in one
2194 # but not other.
2194 # but not other.
2195
2195
2196 names = {}
2196 names = {}
2197 target_only = {}
2197 target_only = {}
2198
2198
2199 # walk dirstate.
2199 # walk dirstate.
2200
2200
2201 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2201 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2202 badmatch=mf.has_key):
2202 badmatch=mf.has_key):
2203 names[abs] = (rel, exact)
2203 names[abs] = (rel, exact)
2204 if src == 'b':
2204 if src == 'b':
2205 target_only[abs] = True
2205 target_only[abs] = True
2206
2206
2207 # walk target manifest.
2207 # walk target manifest.
2208
2208
2209 def badmatch(path):
2209 def badmatch(path):
2210 if path in names:
2210 if path in names:
2211 return True
2211 return True
2212 path_ = path + '/'
2212 path_ = path + '/'
2213 for f in names:
2213 for f in names:
2214 if f.startswith(path_):
2214 if f.startswith(path_):
2215 return True
2215 return True
2216 return False
2216 return False
2217
2217
2218 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2218 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2219 badmatch=badmatch):
2219 badmatch=badmatch):
2220 if abs in names or src == 'b':
2220 if abs in names or src == 'b':
2221 continue
2221 continue
2222 names[abs] = (rel, exact)
2222 names[abs] = (rel, exact)
2223 target_only[abs] = True
2223 target_only[abs] = True
2224
2224
2225 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2225 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2226 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2226 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2227
2227
2228 revert = ([], _('reverting %s\n'))
2228 revert = ([], _('reverting %s\n'))
2229 add = ([], _('adding %s\n'))
2229 add = ([], _('adding %s\n'))
2230 remove = ([], _('removing %s\n'))
2230 remove = ([], _('removing %s\n'))
2231 forget = ([], _('forgetting %s\n'))
2231 forget = ([], _('forgetting %s\n'))
2232 undelete = ([], _('undeleting %s\n'))
2232 undelete = ([], _('undeleting %s\n'))
2233 update = {}
2233 update = {}
2234
2234
2235 disptable = (
2235 disptable = (
2236 # dispatch table:
2236 # dispatch table:
2237 # file state
2237 # file state
2238 # action if in target manifest
2238 # action if in target manifest
2239 # action if not in target manifest
2239 # action if not in target manifest
2240 # make backup if in target manifest
2240 # make backup if in target manifest
2241 # make backup if not in target manifest
2241 # make backup if not in target manifest
2242 (modified, revert, remove, True, True),
2242 (modified, revert, remove, True, True),
2243 (added, revert, forget, True, False),
2243 (added, revert, forget, True, False),
2244 (removed, undelete, None, False, False),
2244 (removed, undelete, None, False, False),
2245 (deleted, revert, remove, False, False),
2245 (deleted, revert, remove, False, False),
2246 (unknown, add, None, True, False),
2246 (unknown, add, None, True, False),
2247 (target_only, add, None, False, False),
2247 (target_only, add, None, False, False),
2248 )
2248 )
2249
2249
2250 entries = names.items()
2250 entries = names.items()
2251 entries.sort()
2251 entries.sort()
2252
2252
2253 for abs, (rel, exact) in entries:
2253 for abs, (rel, exact) in entries:
2254 mfentry = mf.get(abs)
2254 mfentry = mf.get(abs)
2255 def handle(xlist, dobackup):
2255 def handle(xlist, dobackup):
2256 xlist[0].append(abs)
2256 xlist[0].append(abs)
2257 update[abs] = 1
2257 update[abs] = 1
2258 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2258 if (dobackup and not opts['no_backup'] and
2259 (os.path.islink(rel) or os.path.exists(rel))):
2259 bakname = "%s.orig" % rel
2260 bakname = "%s.orig" % rel
2260 ui.note(_('saving current version of %s as %s\n') %
2261 ui.note(_('saving current version of %s as %s\n') %
2261 (rel, bakname))
2262 (rel, bakname))
2262 if not opts.get('dry_run'):
2263 if not opts.get('dry_run'):
2263 util.copyfile(rel, bakname)
2264 util.copyfile(rel, bakname)
2264 if ui.verbose or not exact:
2265 if ui.verbose or not exact:
2265 ui.status(xlist[1] % rel)
2266 ui.status(xlist[1] % rel)
2266 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2267 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2267 if abs not in table: continue
2268 if abs not in table: continue
2268 # file has changed in dirstate
2269 # file has changed in dirstate
2269 if mfentry:
2270 if mfentry:
2270 handle(hitlist, backuphit)
2271 handle(hitlist, backuphit)
2271 elif misslist is not None:
2272 elif misslist is not None:
2272 handle(misslist, backupmiss)
2273 handle(misslist, backupmiss)
2273 else:
2274 else:
2274 if exact: ui.warn(_('file not managed: %s\n') % rel)
2275 if exact: ui.warn(_('file not managed: %s\n') % rel)
2275 break
2276 break
2276 else:
2277 else:
2277 # file has not changed in dirstate
2278 # file has not changed in dirstate
2278 if node == parent:
2279 if node == parent:
2279 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2280 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2280 continue
2281 continue
2281 if pmf is None:
2282 if pmf is None:
2282 # only need parent manifest in this unlikely case,
2283 # only need parent manifest in this unlikely case,
2283 # so do not read by default
2284 # so do not read by default
2284 pmf = repo.changectx(parent).manifest()
2285 pmf = repo.changectx(parent).manifest()
2285 if abs in pmf:
2286 if abs in pmf:
2286 if mfentry:
2287 if mfentry:
2287 # if version of file is same in parent and target
2288 # if version of file is same in parent and target
2288 # manifests, do nothing
2289 # manifests, do nothing
2289 if pmf[abs] != mfentry:
2290 if pmf[abs] != mfentry:
2290 handle(revert, False)
2291 handle(revert, False)
2291 else:
2292 else:
2292 handle(remove, False)
2293 handle(remove, False)
2293
2294
2294 if not opts.get('dry_run'):
2295 if not opts.get('dry_run'):
2295 repo.dirstate.forget(forget[0])
2296 repo.dirstate.forget(forget[0])
2296 r = hg.revert(repo, node, update.has_key, wlock)
2297 r = hg.revert(repo, node, update.has_key, wlock)
2297 repo.dirstate.update(add[0], 'a')
2298 repo.dirstate.update(add[0], 'a')
2298 repo.dirstate.update(undelete[0], 'n')
2299 repo.dirstate.update(undelete[0], 'n')
2299 repo.dirstate.update(remove[0], 'r')
2300 repo.dirstate.update(remove[0], 'r')
2300 return r
2301 return r
2301
2302
2302 def rollback(ui, repo):
2303 def rollback(ui, repo):
2303 """roll back the last transaction in this repository
2304 """roll back the last transaction in this repository
2304
2305
2305 Roll back the last transaction in this repository, restoring the
2306 Roll back the last transaction in this repository, restoring the
2306 project to its state prior to the transaction.
2307 project to its state prior to the transaction.
2307
2308
2308 Transactions are used to encapsulate the effects of all commands
2309 Transactions are used to encapsulate the effects of all commands
2309 that create new changesets or propagate existing changesets into a
2310 that create new changesets or propagate existing changesets into a
2310 repository. For example, the following commands are transactional,
2311 repository. For example, the following commands are transactional,
2311 and their effects can be rolled back:
2312 and their effects can be rolled back:
2312
2313
2313 commit
2314 commit
2314 import
2315 import
2315 pull
2316 pull
2316 push (with this repository as destination)
2317 push (with this repository as destination)
2317 unbundle
2318 unbundle
2318
2319
2319 This command should be used with care. There is only one level of
2320 This command should be used with care. There is only one level of
2320 rollback, and there is no way to undo a rollback.
2321 rollback, and there is no way to undo a rollback.
2321
2322
2322 This command is not intended for use on public repositories. Once
2323 This command is not intended for use on public repositories. Once
2323 changes are visible for pull by other users, rolling a transaction
2324 changes are visible for pull by other users, rolling a transaction
2324 back locally is ineffective (someone else may already have pulled
2325 back locally is ineffective (someone else may already have pulled
2325 the changes). Furthermore, a race is possible with readers of the
2326 the changes). Furthermore, a race is possible with readers of the
2326 repository; for example an in-progress pull from the repository
2327 repository; for example an in-progress pull from the repository
2327 may fail if a rollback is performed.
2328 may fail if a rollback is performed.
2328 """
2329 """
2329 repo.rollback()
2330 repo.rollback()
2330
2331
2331 def root(ui, repo):
2332 def root(ui, repo):
2332 """print the root (top) of the current working dir
2333 """print the root (top) of the current working dir
2333
2334
2334 Print the root directory of the current repository.
2335 Print the root directory of the current repository.
2335 """
2336 """
2336 ui.write(repo.root + "\n")
2337 ui.write(repo.root + "\n")
2337
2338
2338 def serve(ui, repo, **opts):
2339 def serve(ui, repo, **opts):
2339 """export the repository via HTTP
2340 """export the repository via HTTP
2340
2341
2341 Start a local HTTP repository browser and pull server.
2342 Start a local HTTP repository browser and pull server.
2342
2343
2343 By default, the server logs accesses to stdout and errors to
2344 By default, the server logs accesses to stdout and errors to
2344 stderr. Use the "-A" and "-E" options to log to files.
2345 stderr. Use the "-A" and "-E" options to log to files.
2345 """
2346 """
2346
2347
2347 if opts["stdio"]:
2348 if opts["stdio"]:
2348 if repo is None:
2349 if repo is None:
2349 raise hg.RepoError(_("There is no Mercurial repository here"
2350 raise hg.RepoError(_("There is no Mercurial repository here"
2350 " (.hg not found)"))
2351 " (.hg not found)"))
2351 s = sshserver.sshserver(ui, repo)
2352 s = sshserver.sshserver(ui, repo)
2352 s.serve_forever()
2353 s.serve_forever()
2353
2354
2354 parentui = ui.parentui or ui
2355 parentui = ui.parentui or ui
2355 optlist = ("name templates style address port ipv6"
2356 optlist = ("name templates style address port ipv6"
2356 " accesslog errorlog webdir_conf")
2357 " accesslog errorlog webdir_conf")
2357 for o in optlist.split():
2358 for o in optlist.split():
2358 if opts[o]:
2359 if opts[o]:
2359 parentui.setconfig("web", o, str(opts[o]))
2360 parentui.setconfig("web", o, str(opts[o]))
2360
2361
2361 if repo is None and not ui.config("web", "webdir_conf"):
2362 if repo is None and not ui.config("web", "webdir_conf"):
2362 raise hg.RepoError(_("There is no Mercurial repository here"
2363 raise hg.RepoError(_("There is no Mercurial repository here"
2363 " (.hg not found)"))
2364 " (.hg not found)"))
2364
2365
2365 if opts['daemon'] and not opts['daemon_pipefds']:
2366 if opts['daemon'] and not opts['daemon_pipefds']:
2366 rfd, wfd = os.pipe()
2367 rfd, wfd = os.pipe()
2367 args = sys.argv[:]
2368 args = sys.argv[:]
2368 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2369 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2369 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2370 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2370 args[0], args)
2371 args[0], args)
2371 os.close(wfd)
2372 os.close(wfd)
2372 os.read(rfd, 1)
2373 os.read(rfd, 1)
2373 os._exit(0)
2374 os._exit(0)
2374
2375
2375 httpd = hgweb.server.create_server(parentui, repo)
2376 httpd = hgweb.server.create_server(parentui, repo)
2376
2377
2377 if ui.verbose:
2378 if ui.verbose:
2378 if httpd.port != 80:
2379 if httpd.port != 80:
2379 ui.status(_('listening at http://%s:%d/\n') %
2380 ui.status(_('listening at http://%s:%d/\n') %
2380 (httpd.addr, httpd.port))
2381 (httpd.addr, httpd.port))
2381 else:
2382 else:
2382 ui.status(_('listening at http://%s/\n') % httpd.addr)
2383 ui.status(_('listening at http://%s/\n') % httpd.addr)
2383
2384
2384 if opts['pid_file']:
2385 if opts['pid_file']:
2385 fp = open(opts['pid_file'], 'w')
2386 fp = open(opts['pid_file'], 'w')
2386 fp.write(str(os.getpid()) + '\n')
2387 fp.write(str(os.getpid()) + '\n')
2387 fp.close()
2388 fp.close()
2388
2389
2389 if opts['daemon_pipefds']:
2390 if opts['daemon_pipefds']:
2390 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2391 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2391 os.close(rfd)
2392 os.close(rfd)
2392 os.write(wfd, 'y')
2393 os.write(wfd, 'y')
2393 os.close(wfd)
2394 os.close(wfd)
2394 sys.stdout.flush()
2395 sys.stdout.flush()
2395 sys.stderr.flush()
2396 sys.stderr.flush()
2396 fd = os.open(util.nulldev, os.O_RDWR)
2397 fd = os.open(util.nulldev, os.O_RDWR)
2397 if fd != 0: os.dup2(fd, 0)
2398 if fd != 0: os.dup2(fd, 0)
2398 if fd != 1: os.dup2(fd, 1)
2399 if fd != 1: os.dup2(fd, 1)
2399 if fd != 2: os.dup2(fd, 2)
2400 if fd != 2: os.dup2(fd, 2)
2400 if fd not in (0, 1, 2): os.close(fd)
2401 if fd not in (0, 1, 2): os.close(fd)
2401
2402
2402 httpd.serve_forever()
2403 httpd.serve_forever()
2403
2404
2404 def status(ui, repo, *pats, **opts):
2405 def status(ui, repo, *pats, **opts):
2405 """show changed files in the working directory
2406 """show changed files in the working directory
2406
2407
2407 Show status of files in the repository. If names are given, only
2408 Show status of files in the repository. If names are given, only
2408 files that match are shown. Files that are clean or ignored, are
2409 files that match are shown. Files that are clean or ignored, are
2409 not listed unless -c (clean), -i (ignored) or -A is given.
2410 not listed unless -c (clean), -i (ignored) or -A is given.
2410
2411
2411 NOTE: status may appear to disagree with diff if permissions have
2412 NOTE: status may appear to disagree with diff if permissions have
2412 changed or a merge has occurred. The standard diff format does not
2413 changed or a merge has occurred. The standard diff format does not
2413 report permission changes and diff only reports changes relative
2414 report permission changes and diff only reports changes relative
2414 to one merge parent.
2415 to one merge parent.
2415
2416
2416 If one revision is given, it is used as the base revision.
2417 If one revision is given, it is used as the base revision.
2417 If two revisions are given, the difference between them is shown.
2418 If two revisions are given, the difference between them is shown.
2418
2419
2419 The codes used to show the status of files are:
2420 The codes used to show the status of files are:
2420 M = modified
2421 M = modified
2421 A = added
2422 A = added
2422 R = removed
2423 R = removed
2423 C = clean
2424 C = clean
2424 ! = deleted, but still tracked
2425 ! = deleted, but still tracked
2425 ? = not tracked
2426 ? = not tracked
2426 I = ignored (not shown by default)
2427 I = ignored (not shown by default)
2427 = the previous added file was copied from here
2428 = the previous added file was copied from here
2428 """
2429 """
2429
2430
2430 all = opts['all']
2431 all = opts['all']
2431 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2432 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2432
2433
2433 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2434 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2434 cwd = (pats and repo.getcwd()) or ''
2435 cwd = (pats and repo.getcwd()) or ''
2435 modified, added, removed, deleted, unknown, ignored, clean = [
2436 modified, added, removed, deleted, unknown, ignored, clean = [
2436 n for n in repo.status(node1=node1, node2=node2, files=files,
2437 n for n in repo.status(node1=node1, node2=node2, files=files,
2437 match=matchfn,
2438 match=matchfn,
2438 list_ignored=all or opts['ignored'],
2439 list_ignored=all or opts['ignored'],
2439 list_clean=all or opts['clean'])]
2440 list_clean=all or opts['clean'])]
2440
2441
2441 changetypes = (('modified', 'M', modified),
2442 changetypes = (('modified', 'M', modified),
2442 ('added', 'A', added),
2443 ('added', 'A', added),
2443 ('removed', 'R', removed),
2444 ('removed', 'R', removed),
2444 ('deleted', '!', deleted),
2445 ('deleted', '!', deleted),
2445 ('unknown', '?', unknown),
2446 ('unknown', '?', unknown),
2446 ('ignored', 'I', ignored))
2447 ('ignored', 'I', ignored))
2447
2448
2448 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2449 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2449
2450
2450 end = opts['print0'] and '\0' or '\n'
2451 end = opts['print0'] and '\0' or '\n'
2451
2452
2452 for opt, char, changes in ([ct for ct in explicit_changetypes
2453 for opt, char, changes in ([ct for ct in explicit_changetypes
2453 if all or opts[ct[0]]]
2454 if all or opts[ct[0]]]
2454 or changetypes):
2455 or changetypes):
2455 if opts['no_status']:
2456 if opts['no_status']:
2456 format = "%%s%s" % end
2457 format = "%%s%s" % end
2457 else:
2458 else:
2458 format = "%s %%s%s" % (char, end)
2459 format = "%s %%s%s" % (char, end)
2459
2460
2460 for f in changes:
2461 for f in changes:
2461 ui.write(format % util.pathto(repo.root, cwd, f))
2462 ui.write(format % util.pathto(repo.root, cwd, f))
2462 if ((all or opts.get('copies')) and not opts.get('no_status')):
2463 if ((all or opts.get('copies')) and not opts.get('no_status')):
2463 copied = repo.dirstate.copied(f)
2464 copied = repo.dirstate.copied(f)
2464 if copied:
2465 if copied:
2465 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2466 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2466 end))
2467 end))
2467
2468
2468 def tag(ui, repo, name, rev_=None, **opts):
2469 def tag(ui, repo, name, rev_=None, **opts):
2469 """add a tag for the current or given revision
2470 """add a tag for the current or given revision
2470
2471
2471 Name a particular revision using <name>.
2472 Name a particular revision using <name>.
2472
2473
2473 Tags are used to name particular revisions of the repository and are
2474 Tags are used to name particular revisions of the repository and are
2474 very useful to compare different revision, to go back to significant
2475 very useful to compare different revision, to go back to significant
2475 earlier versions or to mark branch points as releases, etc.
2476 earlier versions or to mark branch points as releases, etc.
2476
2477
2477 If no revision is given, the parent of the working directory is used,
2478 If no revision is given, the parent of the working directory is used,
2478 or tip if no revision is checked out.
2479 or tip if no revision is checked out.
2479
2480
2480 To facilitate version control, distribution, and merging of tags,
2481 To facilitate version control, distribution, and merging of tags,
2481 they are stored as a file named ".hgtags" which is managed
2482 they are stored as a file named ".hgtags" which is managed
2482 similarly to other project files and can be hand-edited if
2483 similarly to other project files and can be hand-edited if
2483 necessary. The file '.hg/localtags' is used for local tags (not
2484 necessary. The file '.hg/localtags' is used for local tags (not
2484 shared among repositories).
2485 shared among repositories).
2485 """
2486 """
2486 if name in ['tip', '.', 'null']:
2487 if name in ['tip', '.', 'null']:
2487 raise util.Abort(_("the name '%s' is reserved") % name)
2488 raise util.Abort(_("the name '%s' is reserved") % name)
2488 if rev_ is not None:
2489 if rev_ is not None:
2489 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2490 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2490 "please use 'hg tag [-r REV] NAME' instead\n"))
2491 "please use 'hg tag [-r REV] NAME' instead\n"))
2491 if opts['rev']:
2492 if opts['rev']:
2492 raise util.Abort(_("use only one form to specify the revision"))
2493 raise util.Abort(_("use only one form to specify the revision"))
2493 if opts['rev'] and opts['remove']:
2494 if opts['rev'] and opts['remove']:
2494 raise util.Abort(_("--rev and --remove are incompatible"))
2495 raise util.Abort(_("--rev and --remove are incompatible"))
2495 if opts['rev']:
2496 if opts['rev']:
2496 rev_ = opts['rev']
2497 rev_ = opts['rev']
2497 message = opts['message']
2498 message = opts['message']
2498 if opts['remove']:
2499 if opts['remove']:
2499 rev_ = nullid
2500 rev_ = nullid
2500 if not message:
2501 if not message:
2501 message = _('Removed tag %s') % name
2502 message = _('Removed tag %s') % name
2502 elif name in repo.tags() and not opts['force']:
2503 elif name in repo.tags() and not opts['force']:
2503 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2504 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2504 % name)
2505 % name)
2505 if not rev_ and repo.dirstate.parents()[1] != nullid:
2506 if not rev_ and repo.dirstate.parents()[1] != nullid:
2506 raise util.Abort(_('uncommitted merge - please provide a '
2507 raise util.Abort(_('uncommitted merge - please provide a '
2507 'specific revision'))
2508 'specific revision'))
2508 r = repo.changectx(rev_).node()
2509 r = repo.changectx(rev_).node()
2509
2510
2510 if not message:
2511 if not message:
2511 message = _('Added tag %s for changeset %s') % (name, short(r))
2512 message = _('Added tag %s for changeset %s') % (name, short(r))
2512
2513
2513 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2514 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2514
2515
2515 def tags(ui, repo):
2516 def tags(ui, repo):
2516 """list repository tags
2517 """list repository tags
2517
2518
2518 List the repository tags.
2519 List the repository tags.
2519
2520
2520 This lists both regular and local tags.
2521 This lists both regular and local tags.
2521 """
2522 """
2522
2523
2523 l = repo.tagslist()
2524 l = repo.tagslist()
2524 l.reverse()
2525 l.reverse()
2525 hexfunc = ui.debugflag and hex or short
2526 hexfunc = ui.debugflag and hex or short
2526 for t, n in l:
2527 for t, n in l:
2527 try:
2528 try:
2528 hn = hexfunc(n)
2529 hn = hexfunc(n)
2529 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2530 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2530 except revlog.LookupError:
2531 except revlog.LookupError:
2531 r = " ?:%s" % hn
2532 r = " ?:%s" % hn
2532 if ui.quiet:
2533 if ui.quiet:
2533 ui.write("%s\n" % t)
2534 ui.write("%s\n" % t)
2534 else:
2535 else:
2535 spaces = " " * (30 - util.locallen(t))
2536 spaces = " " * (30 - util.locallen(t))
2536 ui.write("%s%s %s\n" % (t, spaces, r))
2537 ui.write("%s%s %s\n" % (t, spaces, r))
2537
2538
2538 def tip(ui, repo, **opts):
2539 def tip(ui, repo, **opts):
2539 """show the tip revision
2540 """show the tip revision
2540
2541
2541 Show the tip revision.
2542 Show the tip revision.
2542 """
2543 """
2543 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2544 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2544
2545
2545 def unbundle(ui, repo, fname, **opts):
2546 def unbundle(ui, repo, fname, **opts):
2546 """apply a changegroup file
2547 """apply a changegroup file
2547
2548
2548 Apply a compressed changegroup file generated by the bundle
2549 Apply a compressed changegroup file generated by the bundle
2549 command.
2550 command.
2550 """
2551 """
2551 if os.path.exists(fname):
2552 if os.path.exists(fname):
2552 f = open(fname, "rb")
2553 f = open(fname, "rb")
2553 else:
2554 else:
2554 f = urllib.urlopen(fname)
2555 f = urllib.urlopen(fname)
2555 gen = changegroup.readbundle(f, fname)
2556 gen = changegroup.readbundle(f, fname)
2556 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2557 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2557 return postincoming(ui, repo, modheads, opts['update'])
2558 return postincoming(ui, repo, modheads, opts['update'])
2558
2559
2559 def update(ui, repo, node=None, clean=False, date=None):
2560 def update(ui, repo, node=None, clean=False, date=None):
2560 """update working directory
2561 """update working directory
2561
2562
2562 Update the working directory to the specified revision, or the
2563 Update the working directory to the specified revision, or the
2563 tip of the current branch if none is specified.
2564 tip of the current branch if none is specified.
2564
2565
2565 If there are no outstanding changes in the working directory and
2566 If there are no outstanding changes in the working directory and
2566 there is a linear relationship between the current version and the
2567 there is a linear relationship between the current version and the
2567 requested version, the result is the requested version.
2568 requested version, the result is the requested version.
2568
2569
2569 To merge the working directory with another revision, use the
2570 To merge the working directory with another revision, use the
2570 merge command.
2571 merge command.
2571
2572
2572 By default, update will refuse to run if doing so would require
2573 By default, update will refuse to run if doing so would require
2573 discarding local changes.
2574 discarding local changes.
2574 """
2575 """
2575 if date:
2576 if date:
2576 if node:
2577 if node:
2577 raise util.Abort(_("you can't specify a revision and a date"))
2578 raise util.Abort(_("you can't specify a revision and a date"))
2578 node = cmdutil.finddate(ui, repo, date)
2579 node = cmdutil.finddate(ui, repo, date)
2579
2580
2580 if clean:
2581 if clean:
2581 return hg.clean(repo, node)
2582 return hg.clean(repo, node)
2582 else:
2583 else:
2583 return hg.update(repo, node)
2584 return hg.update(repo, node)
2584
2585
2585 def verify(ui, repo):
2586 def verify(ui, repo):
2586 """verify the integrity of the repository
2587 """verify the integrity of the repository
2587
2588
2588 Verify the integrity of the current repository.
2589 Verify the integrity of the current repository.
2589
2590
2590 This will perform an extensive check of the repository's
2591 This will perform an extensive check of the repository's
2591 integrity, validating the hashes and checksums of each entry in
2592 integrity, validating the hashes and checksums of each entry in
2592 the changelog, manifest, and tracked files, as well as the
2593 the changelog, manifest, and tracked files, as well as the
2593 integrity of their crosslinks and indices.
2594 integrity of their crosslinks and indices.
2594 """
2595 """
2595 return hg.verify(repo)
2596 return hg.verify(repo)
2596
2597
2597 def version_(ui):
2598 def version_(ui):
2598 """output version and copyright information"""
2599 """output version and copyright information"""
2599 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2600 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2600 % version.get_version())
2601 % version.get_version())
2601 ui.status(_(
2602 ui.status(_(
2602 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2603 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2603 "This is free software; see the source for copying conditions. "
2604 "This is free software; see the source for copying conditions. "
2604 "There is NO\nwarranty; "
2605 "There is NO\nwarranty; "
2605 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2606 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2606 ))
2607 ))
2607
2608
2608 # Command options and aliases are listed here, alphabetically
2609 # Command options and aliases are listed here, alphabetically
2609
2610
2610 globalopts = [
2611 globalopts = [
2611 ('R', 'repository', '',
2612 ('R', 'repository', '',
2612 _('repository root directory or symbolic path name')),
2613 _('repository root directory or symbolic path name')),
2613 ('', 'cwd', '', _('change working directory')),
2614 ('', 'cwd', '', _('change working directory')),
2614 ('y', 'noninteractive', None,
2615 ('y', 'noninteractive', None,
2615 _('do not prompt, assume \'yes\' for any required answers')),
2616 _('do not prompt, assume \'yes\' for any required answers')),
2616 ('q', 'quiet', None, _('suppress output')),
2617 ('q', 'quiet', None, _('suppress output')),
2617 ('v', 'verbose', None, _('enable additional output')),
2618 ('v', 'verbose', None, _('enable additional output')),
2618 ('', 'config', [], _('set/override config option')),
2619 ('', 'config', [], _('set/override config option')),
2619 ('', 'debug', None, _('enable debugging output')),
2620 ('', 'debug', None, _('enable debugging output')),
2620 ('', 'debugger', None, _('start debugger')),
2621 ('', 'debugger', None, _('start debugger')),
2621 ('', 'encoding', util._encoding, _('set the charset encoding')),
2622 ('', 'encoding', util._encoding, _('set the charset encoding')),
2622 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2623 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2623 ('', 'lsprof', None, _('print improved command execution profile')),
2624 ('', 'lsprof', None, _('print improved command execution profile')),
2624 ('', 'traceback', None, _('print traceback on exception')),
2625 ('', 'traceback', None, _('print traceback on exception')),
2625 ('', 'time', None, _('time how long the command takes')),
2626 ('', 'time', None, _('time how long the command takes')),
2626 ('', 'profile', None, _('print command execution profile')),
2627 ('', 'profile', None, _('print command execution profile')),
2627 ('', 'version', None, _('output version information and exit')),
2628 ('', 'version', None, _('output version information and exit')),
2628 ('h', 'help', None, _('display help and exit')),
2629 ('h', 'help', None, _('display help and exit')),
2629 ]
2630 ]
2630
2631
2631 dryrunopts = [('n', 'dry-run', None,
2632 dryrunopts = [('n', 'dry-run', None,
2632 _('do not perform actions, just print output'))]
2633 _('do not perform actions, just print output'))]
2633
2634
2634 remoteopts = [
2635 remoteopts = [
2635 ('e', 'ssh', '', _('specify ssh command to use')),
2636 ('e', 'ssh', '', _('specify ssh command to use')),
2636 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2637 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2637 ]
2638 ]
2638
2639
2639 walkopts = [
2640 walkopts = [
2640 ('I', 'include', [], _('include names matching the given patterns')),
2641 ('I', 'include', [], _('include names matching the given patterns')),
2641 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2642 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2642 ]
2643 ]
2643
2644
2644 commitopts = [
2645 commitopts = [
2645 ('m', 'message', '', _('use <text> as commit message')),
2646 ('m', 'message', '', _('use <text> as commit message')),
2646 ('l', 'logfile', '', _('read commit message from <file>')),
2647 ('l', 'logfile', '', _('read commit message from <file>')),
2647 ]
2648 ]
2648
2649
2649 table = {
2650 table = {
2650 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2651 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2651 "addremove":
2652 "addremove":
2652 (addremove,
2653 (addremove,
2653 [('s', 'similarity', '',
2654 [('s', 'similarity', '',
2654 _('guess renamed files by similarity (0<=s<=100)')),
2655 _('guess renamed files by similarity (0<=s<=100)')),
2655 ] + walkopts + dryrunopts,
2656 ] + walkopts + dryrunopts,
2656 _('hg addremove [OPTION]... [FILE]...')),
2657 _('hg addremove [OPTION]... [FILE]...')),
2657 "^annotate":
2658 "^annotate":
2658 (annotate,
2659 (annotate,
2659 [('r', 'rev', '', _('annotate the specified revision')),
2660 [('r', 'rev', '', _('annotate the specified revision')),
2660 ('f', 'follow', None, _('follow file copies and renames')),
2661 ('f', 'follow', None, _('follow file copies and renames')),
2661 ('a', 'text', None, _('treat all files as text')),
2662 ('a', 'text', None, _('treat all files as text')),
2662 ('u', 'user', None, _('list the author')),
2663 ('u', 'user', None, _('list the author')),
2663 ('d', 'date', None, _('list the date')),
2664 ('d', 'date', None, _('list the date')),
2664 ('n', 'number', None, _('list the revision number (default)')),
2665 ('n', 'number', None, _('list the revision number (default)')),
2665 ('c', 'changeset', None, _('list the changeset')),
2666 ('c', 'changeset', None, _('list the changeset')),
2666 ] + walkopts,
2667 ] + walkopts,
2667 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2668 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2668 "archive":
2669 "archive":
2669 (archive,
2670 (archive,
2670 [('', 'no-decode', None, _('do not pass files through decoders')),
2671 [('', 'no-decode', None, _('do not pass files through decoders')),
2671 ('p', 'prefix', '', _('directory prefix for files in archive')),
2672 ('p', 'prefix', '', _('directory prefix for files in archive')),
2672 ('r', 'rev', '', _('revision to distribute')),
2673 ('r', 'rev', '', _('revision to distribute')),
2673 ('t', 'type', '', _('type of distribution to create')),
2674 ('t', 'type', '', _('type of distribution to create')),
2674 ] + walkopts,
2675 ] + walkopts,
2675 _('hg archive [OPTION]... DEST')),
2676 _('hg archive [OPTION]... DEST')),
2676 "backout":
2677 "backout":
2677 (backout,
2678 (backout,
2678 [('', 'merge', None,
2679 [('', 'merge', None,
2679 _('merge with old dirstate parent after backout')),
2680 _('merge with old dirstate parent after backout')),
2680 ('d', 'date', '', _('record datecode as commit date')),
2681 ('d', 'date', '', _('record datecode as commit date')),
2681 ('', 'parent', '', _('parent to choose when backing out merge')),
2682 ('', 'parent', '', _('parent to choose when backing out merge')),
2682 ('u', 'user', '', _('record user as committer')),
2683 ('u', 'user', '', _('record user as committer')),
2683 ] + walkopts + commitopts,
2684 ] + walkopts + commitopts,
2684 _('hg backout [OPTION]... REV')),
2685 _('hg backout [OPTION]... REV')),
2685 "branch": (branch,
2686 "branch": (branch,
2686 [('f', 'force', None,
2687 [('f', 'force', None,
2687 _('set branch name even if it shadows an existing branch'))],
2688 _('set branch name even if it shadows an existing branch'))],
2688 _('hg branch [NAME]')),
2689 _('hg branch [NAME]')),
2689 "branches": (branches, [], _('hg branches')),
2690 "branches": (branches, [], _('hg branches')),
2690 "bundle":
2691 "bundle":
2691 (bundle,
2692 (bundle,
2692 [('f', 'force', None,
2693 [('f', 'force', None,
2693 _('run even when remote repository is unrelated')),
2694 _('run even when remote repository is unrelated')),
2694 ('r', 'rev', [],
2695 ('r', 'rev', [],
2695 _('a changeset you would like to bundle')),
2696 _('a changeset you would like to bundle')),
2696 ('', 'base', [],
2697 ('', 'base', [],
2697 _('a base changeset to specify instead of a destination')),
2698 _('a base changeset to specify instead of a destination')),
2698 ] + remoteopts,
2699 ] + remoteopts,
2699 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2700 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2700 "cat":
2701 "cat":
2701 (cat,
2702 (cat,
2702 [('o', 'output', '', _('print output to file with formatted name')),
2703 [('o', 'output', '', _('print output to file with formatted name')),
2703 ('r', 'rev', '', _('print the given revision')),
2704 ('r', 'rev', '', _('print the given revision')),
2704 ] + walkopts,
2705 ] + walkopts,
2705 _('hg cat [OPTION]... FILE...')),
2706 _('hg cat [OPTION]... FILE...')),
2706 "^clone":
2707 "^clone":
2707 (clone,
2708 (clone,
2708 [('U', 'noupdate', None, _('do not update the new working directory')),
2709 [('U', 'noupdate', None, _('do not update the new working directory')),
2709 ('r', 'rev', [],
2710 ('r', 'rev', [],
2710 _('a changeset you would like to have after cloning')),
2711 _('a changeset you would like to have after cloning')),
2711 ('', 'pull', None, _('use pull protocol to copy metadata')),
2712 ('', 'pull', None, _('use pull protocol to copy metadata')),
2712 ('', 'uncompressed', None,
2713 ('', 'uncompressed', None,
2713 _('use uncompressed transfer (fast over LAN)')),
2714 _('use uncompressed transfer (fast over LAN)')),
2714 ] + remoteopts,
2715 ] + remoteopts,
2715 _('hg clone [OPTION]... SOURCE [DEST]')),
2716 _('hg clone [OPTION]... SOURCE [DEST]')),
2716 "^commit|ci":
2717 "^commit|ci":
2717 (commit,
2718 (commit,
2718 [('A', 'addremove', None,
2719 [('A', 'addremove', None,
2719 _('mark new/missing files as added/removed before committing')),
2720 _('mark new/missing files as added/removed before committing')),
2720 ('d', 'date', '', _('record datecode as commit date')),
2721 ('d', 'date', '', _('record datecode as commit date')),
2721 ('u', 'user', '', _('record user as commiter')),
2722 ('u', 'user', '', _('record user as commiter')),
2722 ] + walkopts + commitopts,
2723 ] + walkopts + commitopts,
2723 _('hg commit [OPTION]... [FILE]...')),
2724 _('hg commit [OPTION]... [FILE]...')),
2724 "copy|cp":
2725 "copy|cp":
2725 (copy,
2726 (copy,
2726 [('A', 'after', None, _('record a copy that has already occurred')),
2727 [('A', 'after', None, _('record a copy that has already occurred')),
2727 ('f', 'force', None,
2728 ('f', 'force', None,
2728 _('forcibly copy over an existing managed file')),
2729 _('forcibly copy over an existing managed file')),
2729 ] + walkopts + dryrunopts,
2730 ] + walkopts + dryrunopts,
2730 _('hg copy [OPTION]... [SOURCE]... DEST')),
2731 _('hg copy [OPTION]... [SOURCE]... DEST')),
2731 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2732 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2732 "debugcomplete":
2733 "debugcomplete":
2733 (debugcomplete,
2734 (debugcomplete,
2734 [('o', 'options', None, _('show the command options'))],
2735 [('o', 'options', None, _('show the command options'))],
2735 _('debugcomplete [-o] CMD')),
2736 _('debugcomplete [-o] CMD')),
2736 "debuginstall": (debuginstall, [], _('debuginstall')),
2737 "debuginstall": (debuginstall, [], _('debuginstall')),
2737 "debugrebuildstate":
2738 "debugrebuildstate":
2738 (debugrebuildstate,
2739 (debugrebuildstate,
2739 [('r', 'rev', '', _('revision to rebuild to'))],
2740 [('r', 'rev', '', _('revision to rebuild to'))],
2740 _('debugrebuildstate [-r REV] [REV]')),
2741 _('debugrebuildstate [-r REV] [REV]')),
2741 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2742 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2742 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2743 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2743 "debugstate": (debugstate, [], _('debugstate')),
2744 "debugstate": (debugstate, [], _('debugstate')),
2744 "debugdate":
2745 "debugdate":
2745 (debugdate,
2746 (debugdate,
2746 [('e', 'extended', None, _('try extended date formats'))],
2747 [('e', 'extended', None, _('try extended date formats'))],
2747 _('debugdate [-e] DATE [RANGE]')),
2748 _('debugdate [-e] DATE [RANGE]')),
2748 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2749 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2749 "debugindex": (debugindex, [], _('debugindex FILE')),
2750 "debugindex": (debugindex, [], _('debugindex FILE')),
2750 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2751 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2751 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2752 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2752 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2753 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2753 "^diff":
2754 "^diff":
2754 (diff,
2755 (diff,
2755 [('r', 'rev', [], _('revision')),
2756 [('r', 'rev', [], _('revision')),
2756 ('a', 'text', None, _('treat all files as text')),
2757 ('a', 'text', None, _('treat all files as text')),
2757 ('p', 'show-function', None,
2758 ('p', 'show-function', None,
2758 _('show which function each change is in')),
2759 _('show which function each change is in')),
2759 ('g', 'git', None, _('use git extended diff format')),
2760 ('g', 'git', None, _('use git extended diff format')),
2760 ('', 'nodates', None, _("don't include dates in diff headers")),
2761 ('', 'nodates', None, _("don't include dates in diff headers")),
2761 ('w', 'ignore-all-space', None,
2762 ('w', 'ignore-all-space', None,
2762 _('ignore white space when comparing lines')),
2763 _('ignore white space when comparing lines')),
2763 ('b', 'ignore-space-change', None,
2764 ('b', 'ignore-space-change', None,
2764 _('ignore changes in the amount of white space')),
2765 _('ignore changes in the amount of white space')),
2765 ('B', 'ignore-blank-lines', None,
2766 ('B', 'ignore-blank-lines', None,
2766 _('ignore changes whose lines are all blank')),
2767 _('ignore changes whose lines are all blank')),
2767 ] + walkopts,
2768 ] + walkopts,
2768 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2769 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2769 "^export":
2770 "^export":
2770 (export,
2771 (export,
2771 [('o', 'output', '', _('print output to file with formatted name')),
2772 [('o', 'output', '', _('print output to file with formatted name')),
2772 ('a', 'text', None, _('treat all files as text')),
2773 ('a', 'text', None, _('treat all files as text')),
2773 ('g', 'git', None, _('use git extended diff format')),
2774 ('g', 'git', None, _('use git extended diff format')),
2774 ('', 'nodates', None, _("don't include dates in diff headers")),
2775 ('', 'nodates', None, _("don't include dates in diff headers")),
2775 ('', 'switch-parent', None, _('diff against the second parent'))],
2776 ('', 'switch-parent', None, _('diff against the second parent'))],
2776 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2777 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2777 "grep":
2778 "grep":
2778 (grep,
2779 (grep,
2779 [('0', 'print0', None, _('end fields with NUL')),
2780 [('0', 'print0', None, _('end fields with NUL')),
2780 ('', 'all', None, _('print all revisions that match')),
2781 ('', 'all', None, _('print all revisions that match')),
2781 ('f', 'follow', None,
2782 ('f', 'follow', None,
2782 _('follow changeset history, or file history across copies and renames')),
2783 _('follow changeset history, or file history across copies and renames')),
2783 ('i', 'ignore-case', None, _('ignore case when matching')),
2784 ('i', 'ignore-case', None, _('ignore case when matching')),
2784 ('l', 'files-with-matches', None,
2785 ('l', 'files-with-matches', None,
2785 _('print only filenames and revs that match')),
2786 _('print only filenames and revs that match')),
2786 ('n', 'line-number', None, _('print matching line numbers')),
2787 ('n', 'line-number', None, _('print matching line numbers')),
2787 ('r', 'rev', [], _('search in given revision range')),
2788 ('r', 'rev', [], _('search in given revision range')),
2788 ('u', 'user', None, _('print user who committed change')),
2789 ('u', 'user', None, _('print user who committed change')),
2789 ] + walkopts,
2790 ] + walkopts,
2790 _('hg grep [OPTION]... PATTERN [FILE]...')),
2791 _('hg grep [OPTION]... PATTERN [FILE]...')),
2791 "heads":
2792 "heads":
2792 (heads,
2793 (heads,
2793 [('', 'style', '', _('display using template map file')),
2794 [('', 'style', '', _('display using template map file')),
2794 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2795 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2795 ('', 'template', '', _('display with template'))],
2796 ('', 'template', '', _('display with template'))],
2796 _('hg heads [-r REV]')),
2797 _('hg heads [-r REV]')),
2797 "help": (help_, [], _('hg help [COMMAND]')),
2798 "help": (help_, [], _('hg help [COMMAND]')),
2798 "identify|id": (identify, [], _('hg identify')),
2799 "identify|id": (identify, [], _('hg identify')),
2799 "import|patch":
2800 "import|patch":
2800 (import_,
2801 (import_,
2801 [('p', 'strip', 1,
2802 [('p', 'strip', 1,
2802 _('directory strip option for patch. This has the same\n'
2803 _('directory strip option for patch. This has the same\n'
2803 'meaning as the corresponding patch option')),
2804 'meaning as the corresponding patch option')),
2804 ('b', 'base', '', _('base path')),
2805 ('b', 'base', '', _('base path')),
2805 ('f', 'force', None,
2806 ('f', 'force', None,
2806 _('skip check for outstanding uncommitted changes')),
2807 _('skip check for outstanding uncommitted changes')),
2807 ('', 'exact', None,
2808 ('', 'exact', None,
2808 _('apply patch to the nodes from which it was generated'))] + commitopts,
2809 _('apply patch to the nodes from which it was generated'))] + commitopts,
2809 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2810 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2810 "incoming|in": (incoming,
2811 "incoming|in": (incoming,
2811 [('M', 'no-merges', None, _('do not show merges')),
2812 [('M', 'no-merges', None, _('do not show merges')),
2812 ('f', 'force', None,
2813 ('f', 'force', None,
2813 _('run even when remote repository is unrelated')),
2814 _('run even when remote repository is unrelated')),
2814 ('', 'style', '', _('display using template map file')),
2815 ('', 'style', '', _('display using template map file')),
2815 ('n', 'newest-first', None, _('show newest record first')),
2816 ('n', 'newest-first', None, _('show newest record first')),
2816 ('', 'bundle', '', _('file to store the bundles into')),
2817 ('', 'bundle', '', _('file to store the bundles into')),
2817 ('p', 'patch', None, _('show patch')),
2818 ('p', 'patch', None, _('show patch')),
2818 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2819 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2819 ('', 'template', '', _('display with template')),
2820 ('', 'template', '', _('display with template')),
2820 ] + remoteopts,
2821 ] + remoteopts,
2821 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2822 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2822 ' [--bundle FILENAME] [SOURCE]')),
2823 ' [--bundle FILENAME] [SOURCE]')),
2823 "^init":
2824 "^init":
2824 (init,
2825 (init,
2825 remoteopts,
2826 remoteopts,
2826 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2827 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2827 "locate":
2828 "locate":
2828 (locate,
2829 (locate,
2829 [('r', 'rev', '', _('search the repository as it stood at rev')),
2830 [('r', 'rev', '', _('search the repository as it stood at rev')),
2830 ('0', 'print0', None,
2831 ('0', 'print0', None,
2831 _('end filenames with NUL, for use with xargs')),
2832 _('end filenames with NUL, for use with xargs')),
2832 ('f', 'fullpath', None,
2833 ('f', 'fullpath', None,
2833 _('print complete paths from the filesystem root')),
2834 _('print complete paths from the filesystem root')),
2834 ] + walkopts,
2835 ] + walkopts,
2835 _('hg locate [OPTION]... [PATTERN]...')),
2836 _('hg locate [OPTION]... [PATTERN]...')),
2836 "^log|history":
2837 "^log|history":
2837 (log,
2838 (log,
2838 [('f', 'follow', None,
2839 [('f', 'follow', None,
2839 _('follow changeset history, or file history across copies and renames')),
2840 _('follow changeset history, or file history across copies and renames')),
2840 ('', 'follow-first', None,
2841 ('', 'follow-first', None,
2841 _('only follow the first parent of merge changesets')),
2842 _('only follow the first parent of merge changesets')),
2842 ('d', 'date', '', _('show revs matching date spec')),
2843 ('d', 'date', '', _('show revs matching date spec')),
2843 ('C', 'copies', None, _('show copied files')),
2844 ('C', 'copies', None, _('show copied files')),
2844 ('k', 'keyword', [], _('search for a keyword')),
2845 ('k', 'keyword', [], _('search for a keyword')),
2845 ('l', 'limit', '', _('limit number of changes displayed')),
2846 ('l', 'limit', '', _('limit number of changes displayed')),
2846 ('r', 'rev', [], _('show the specified revision or range')),
2847 ('r', 'rev', [], _('show the specified revision or range')),
2847 ('', 'removed', None, _('include revs where files were removed')),
2848 ('', 'removed', None, _('include revs where files were removed')),
2848 ('M', 'no-merges', None, _('do not show merges')),
2849 ('M', 'no-merges', None, _('do not show merges')),
2849 ('', 'style', '', _('display using template map file')),
2850 ('', 'style', '', _('display using template map file')),
2850 ('m', 'only-merges', None, _('show only merges')),
2851 ('m', 'only-merges', None, _('show only merges')),
2851 ('p', 'patch', None, _('show patch')),
2852 ('p', 'patch', None, _('show patch')),
2852 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2853 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2853 ('', 'template', '', _('display with template')),
2854 ('', 'template', '', _('display with template')),
2854 ] + walkopts,
2855 ] + walkopts,
2855 _('hg log [OPTION]... [FILE]')),
2856 _('hg log [OPTION]... [FILE]')),
2856 "manifest": (manifest, [], _('hg manifest [REV]')),
2857 "manifest": (manifest, [], _('hg manifest [REV]')),
2857 "^merge":
2858 "^merge":
2858 (merge,
2859 (merge,
2859 [('f', 'force', None, _('force a merge with outstanding changes'))],
2860 [('f', 'force', None, _('force a merge with outstanding changes'))],
2860 _('hg merge [-f] [REV]')),
2861 _('hg merge [-f] [REV]')),
2861 "outgoing|out": (outgoing,
2862 "outgoing|out": (outgoing,
2862 [('M', 'no-merges', None, _('do not show merges')),
2863 [('M', 'no-merges', None, _('do not show merges')),
2863 ('f', 'force', None,
2864 ('f', 'force', None,
2864 _('run even when remote repository is unrelated')),
2865 _('run even when remote repository is unrelated')),
2865 ('p', 'patch', None, _('show patch')),
2866 ('p', 'patch', None, _('show patch')),
2866 ('', 'style', '', _('display using template map file')),
2867 ('', 'style', '', _('display using template map file')),
2867 ('r', 'rev', [], _('a specific revision you would like to push')),
2868 ('r', 'rev', [], _('a specific revision you would like to push')),
2868 ('n', 'newest-first', None, _('show newest record first')),
2869 ('n', 'newest-first', None, _('show newest record first')),
2869 ('', 'template', '', _('display with template')),
2870 ('', 'template', '', _('display with template')),
2870 ] + remoteopts,
2871 ] + remoteopts,
2871 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2872 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2872 "^parents":
2873 "^parents":
2873 (parents,
2874 (parents,
2874 [('r', 'rev', '', _('show parents from the specified rev')),
2875 [('r', 'rev', '', _('show parents from the specified rev')),
2875 ('', 'style', '', _('display using template map file')),
2876 ('', 'style', '', _('display using template map file')),
2876 ('', 'template', '', _('display with template'))],
2877 ('', 'template', '', _('display with template'))],
2877 _('hg parents [-r REV] [FILE]')),
2878 _('hg parents [-r REV] [FILE]')),
2878 "paths": (paths, [], _('hg paths [NAME]')),
2879 "paths": (paths, [], _('hg paths [NAME]')),
2879 "^pull":
2880 "^pull":
2880 (pull,
2881 (pull,
2881 [('u', 'update', None,
2882 [('u', 'update', None,
2882 _('update to new tip if changesets were pulled')),
2883 _('update to new tip if changesets were pulled')),
2883 ('f', 'force', None,
2884 ('f', 'force', None,
2884 _('run even when remote repository is unrelated')),
2885 _('run even when remote repository is unrelated')),
2885 ('r', 'rev', [],
2886 ('r', 'rev', [],
2886 _('a specific revision up to which you would like to pull')),
2887 _('a specific revision up to which you would like to pull')),
2887 ] + remoteopts,
2888 ] + remoteopts,
2888 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2889 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2889 "^push":
2890 "^push":
2890 (push,
2891 (push,
2891 [('f', 'force', None, _('force push')),
2892 [('f', 'force', None, _('force push')),
2892 ('r', 'rev', [], _('a specific revision you would like to push')),
2893 ('r', 'rev', [], _('a specific revision you would like to push')),
2893 ] + remoteopts,
2894 ] + remoteopts,
2894 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2895 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2895 "debugrawcommit|rawcommit":
2896 "debugrawcommit|rawcommit":
2896 (rawcommit,
2897 (rawcommit,
2897 [('p', 'parent', [], _('parent')),
2898 [('p', 'parent', [], _('parent')),
2898 ('d', 'date', '', _('date code')),
2899 ('d', 'date', '', _('date code')),
2899 ('u', 'user', '', _('user')),
2900 ('u', 'user', '', _('user')),
2900 ('F', 'files', '', _('file list'))
2901 ('F', 'files', '', _('file list'))
2901 ] + commitopts,
2902 ] + commitopts,
2902 _('hg debugrawcommit [OPTION]... [FILE]...')),
2903 _('hg debugrawcommit [OPTION]... [FILE]...')),
2903 "recover": (recover, [], _('hg recover')),
2904 "recover": (recover, [], _('hg recover')),
2904 "^remove|rm":
2905 "^remove|rm":
2905 (remove,
2906 (remove,
2906 [('A', 'after', None, _('record remove that has already occurred')),
2907 [('A', 'after', None, _('record remove that has already occurred')),
2907 ('f', 'force', None, _('remove file even if modified')),
2908 ('f', 'force', None, _('remove file even if modified')),
2908 ] + walkopts,
2909 ] + walkopts,
2909 _('hg remove [OPTION]... FILE...')),
2910 _('hg remove [OPTION]... FILE...')),
2910 "rename|mv":
2911 "rename|mv":
2911 (rename,
2912 (rename,
2912 [('A', 'after', None, _('record a rename that has already occurred')),
2913 [('A', 'after', None, _('record a rename that has already occurred')),
2913 ('f', 'force', None,
2914 ('f', 'force', None,
2914 _('forcibly copy over an existing managed file')),
2915 _('forcibly copy over an existing managed file')),
2915 ] + walkopts + dryrunopts,
2916 ] + walkopts + dryrunopts,
2916 _('hg rename [OPTION]... SOURCE... DEST')),
2917 _('hg rename [OPTION]... SOURCE... DEST')),
2917 "^revert":
2918 "^revert":
2918 (revert,
2919 (revert,
2919 [('a', 'all', None, _('revert all changes when no arguments given')),
2920 [('a', 'all', None, _('revert all changes when no arguments given')),
2920 ('d', 'date', '', _('tipmost revision matching date')),
2921 ('d', 'date', '', _('tipmost revision matching date')),
2921 ('r', 'rev', '', _('revision to revert to')),
2922 ('r', 'rev', '', _('revision to revert to')),
2922 ('', 'no-backup', None, _('do not save backup copies of files')),
2923 ('', 'no-backup', None, _('do not save backup copies of files')),
2923 ] + walkopts + dryrunopts,
2924 ] + walkopts + dryrunopts,
2924 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2925 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2925 "rollback": (rollback, [], _('hg rollback')),
2926 "rollback": (rollback, [], _('hg rollback')),
2926 "root": (root, [], _('hg root')),
2927 "root": (root, [], _('hg root')),
2927 "showconfig|debugconfig":
2928 "showconfig|debugconfig":
2928 (showconfig,
2929 (showconfig,
2929 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2930 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2930 _('showconfig [-u] [NAME]...')),
2931 _('showconfig [-u] [NAME]...')),
2931 "^serve":
2932 "^serve":
2932 (serve,
2933 (serve,
2933 [('A', 'accesslog', '', _('name of access log file to write to')),
2934 [('A', 'accesslog', '', _('name of access log file to write to')),
2934 ('d', 'daemon', None, _('run server in background')),
2935 ('d', 'daemon', None, _('run server in background')),
2935 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2936 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2936 ('E', 'errorlog', '', _('name of error log file to write to')),
2937 ('E', 'errorlog', '', _('name of error log file to write to')),
2937 ('p', 'port', 0, _('port to use (default: 8000)')),
2938 ('p', 'port', 0, _('port to use (default: 8000)')),
2938 ('a', 'address', '', _('address to use')),
2939 ('a', 'address', '', _('address to use')),
2939 ('n', 'name', '',
2940 ('n', 'name', '',
2940 _('name to show in web pages (default: working dir)')),
2941 _('name to show in web pages (default: working dir)')),
2941 ('', 'webdir-conf', '', _('name of the webdir config file'
2942 ('', 'webdir-conf', '', _('name of the webdir config file'
2942 ' (serve more than one repo)')),
2943 ' (serve more than one repo)')),
2943 ('', 'pid-file', '', _('name of file to write process ID to')),
2944 ('', 'pid-file', '', _('name of file to write process ID to')),
2944 ('', 'stdio', None, _('for remote clients')),
2945 ('', 'stdio', None, _('for remote clients')),
2945 ('t', 'templates', '', _('web templates to use')),
2946 ('t', 'templates', '', _('web templates to use')),
2946 ('', 'style', '', _('template style to use')),
2947 ('', 'style', '', _('template style to use')),
2947 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2948 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2948 _('hg serve [OPTION]...')),
2949 _('hg serve [OPTION]...')),
2949 "^status|st":
2950 "^status|st":
2950 (status,
2951 (status,
2951 [('A', 'all', None, _('show status of all files')),
2952 [('A', 'all', None, _('show status of all files')),
2952 ('m', 'modified', None, _('show only modified files')),
2953 ('m', 'modified', None, _('show only modified files')),
2953 ('a', 'added', None, _('show only added files')),
2954 ('a', 'added', None, _('show only added files')),
2954 ('r', 'removed', None, _('show only removed files')),
2955 ('r', 'removed', None, _('show only removed files')),
2955 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2956 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2956 ('c', 'clean', None, _('show only files without changes')),
2957 ('c', 'clean', None, _('show only files without changes')),
2957 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2958 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2958 ('i', 'ignored', None, _('show only ignored files')),
2959 ('i', 'ignored', None, _('show only ignored files')),
2959 ('n', 'no-status', None, _('hide status prefix')),
2960 ('n', 'no-status', None, _('hide status prefix')),
2960 ('C', 'copies', None, _('show source of copied files')),
2961 ('C', 'copies', None, _('show source of copied files')),
2961 ('0', 'print0', None,
2962 ('0', 'print0', None,
2962 _('end filenames with NUL, for use with xargs')),
2963 _('end filenames with NUL, for use with xargs')),
2963 ('', 'rev', [], _('show difference from revision')),
2964 ('', 'rev', [], _('show difference from revision')),
2964 ] + walkopts,
2965 ] + walkopts,
2965 _('hg status [OPTION]... [FILE]...')),
2966 _('hg status [OPTION]... [FILE]...')),
2966 "tag":
2967 "tag":
2967 (tag,
2968 (tag,
2968 [('f', 'force', None, _('replace existing tag')),
2969 [('f', 'force', None, _('replace existing tag')),
2969 ('l', 'local', None, _('make the tag local')),
2970 ('l', 'local', None, _('make the tag local')),
2970 ('m', 'message', '', _('message for tag commit log entry')),
2971 ('m', 'message', '', _('message for tag commit log entry')),
2971 ('d', 'date', '', _('record datecode as commit date')),
2972 ('d', 'date', '', _('record datecode as commit date')),
2972 ('u', 'user', '', _('record user as commiter')),
2973 ('u', 'user', '', _('record user as commiter')),
2973 ('r', 'rev', '', _('revision to tag')),
2974 ('r', 'rev', '', _('revision to tag')),
2974 ('', 'remove', None, _('remove a tag'))],
2975 ('', 'remove', None, _('remove a tag'))],
2975 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2976 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2976 "tags": (tags, [], _('hg tags')),
2977 "tags": (tags, [], _('hg tags')),
2977 "tip":
2978 "tip":
2978 (tip,
2979 (tip,
2979 [('', 'style', '', _('display using template map file')),
2980 [('', 'style', '', _('display using template map file')),
2980 ('p', 'patch', None, _('show patch')),
2981 ('p', 'patch', None, _('show patch')),
2981 ('', 'template', '', _('display with template'))],
2982 ('', 'template', '', _('display with template'))],
2982 _('hg tip [-p]')),
2983 _('hg tip [-p]')),
2983 "unbundle":
2984 "unbundle":
2984 (unbundle,
2985 (unbundle,
2985 [('u', 'update', None,
2986 [('u', 'update', None,
2986 _('update to new tip if changesets were unbundled'))],
2987 _('update to new tip if changesets were unbundled'))],
2987 _('hg unbundle [-u] FILE')),
2988 _('hg unbundle [-u] FILE')),
2988 "^update|up|checkout|co":
2989 "^update|up|checkout|co":
2989 (update,
2990 (update,
2990 [('C', 'clean', None, _('overwrite locally modified files')),
2991 [('C', 'clean', None, _('overwrite locally modified files')),
2991 ('d', 'date', '', _('tipmost revision matching date'))],
2992 ('d', 'date', '', _('tipmost revision matching date'))],
2992 _('hg update [-C] [-d DATE] [REV]')),
2993 _('hg update [-C] [-d DATE] [REV]')),
2993 "verify": (verify, [], _('hg verify')),
2994 "verify": (verify, [], _('hg verify')),
2994 "version": (version_, [], _('hg version')),
2995 "version": (version_, [], _('hg version')),
2995 }
2996 }
2996
2997
2997 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2998 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2998 " debugindex debugindexdot debugdate debuginstall")
2999 " debugindex debugindexdot debugdate debuginstall")
2999 optionalrepo = ("paths serve showconfig")
3000 optionalrepo = ("paths serve showconfig")
3000
3001
3001 def findpossible(ui, cmd):
3002 def findpossible(ui, cmd):
3002 """
3003 """
3003 Return cmd -> (aliases, command table entry)
3004 Return cmd -> (aliases, command table entry)
3004 for each matching command.
3005 for each matching command.
3005 Return debug commands (or their aliases) only if no normal command matches.
3006 Return debug commands (or their aliases) only if no normal command matches.
3006 """
3007 """
3007 choice = {}
3008 choice = {}
3008 debugchoice = {}
3009 debugchoice = {}
3009 for e in table.keys():
3010 for e in table.keys():
3010 aliases = e.lstrip("^").split("|")
3011 aliases = e.lstrip("^").split("|")
3011 found = None
3012 found = None
3012 if cmd in aliases:
3013 if cmd in aliases:
3013 found = cmd
3014 found = cmd
3014 elif not ui.config("ui", "strict"):
3015 elif not ui.config("ui", "strict"):
3015 for a in aliases:
3016 for a in aliases:
3016 if a.startswith(cmd):
3017 if a.startswith(cmd):
3017 found = a
3018 found = a
3018 break
3019 break
3019 if found is not None:
3020 if found is not None:
3020 if aliases[0].startswith("debug") or found.startswith("debug"):
3021 if aliases[0].startswith("debug") or found.startswith("debug"):
3021 debugchoice[found] = (aliases, table[e])
3022 debugchoice[found] = (aliases, table[e])
3022 else:
3023 else:
3023 choice[found] = (aliases, table[e])
3024 choice[found] = (aliases, table[e])
3024
3025
3025 if not choice and debugchoice:
3026 if not choice and debugchoice:
3026 choice = debugchoice
3027 choice = debugchoice
3027
3028
3028 return choice
3029 return choice
3029
3030
3030 def findcmd(ui, cmd):
3031 def findcmd(ui, cmd):
3031 """Return (aliases, command table entry) for command string."""
3032 """Return (aliases, command table entry) for command string."""
3032 choice = findpossible(ui, cmd)
3033 choice = findpossible(ui, cmd)
3033
3034
3034 if choice.has_key(cmd):
3035 if choice.has_key(cmd):
3035 return choice[cmd]
3036 return choice[cmd]
3036
3037
3037 if len(choice) > 1:
3038 if len(choice) > 1:
3038 clist = choice.keys()
3039 clist = choice.keys()
3039 clist.sort()
3040 clist.sort()
3040 raise AmbiguousCommand(cmd, clist)
3041 raise AmbiguousCommand(cmd, clist)
3041
3042
3042 if choice:
3043 if choice:
3043 return choice.values()[0]
3044 return choice.values()[0]
3044
3045
3045 raise UnknownCommand(cmd)
3046 raise UnknownCommand(cmd)
3046
3047
3047 def catchterm(*args):
3048 def catchterm(*args):
3048 raise util.SignalInterrupt
3049 raise util.SignalInterrupt
3049
3050
3050 def run():
3051 def run():
3051 sys.exit(dispatch(sys.argv[1:]))
3052 sys.exit(dispatch(sys.argv[1:]))
3052
3053
3053 class ParseError(Exception):
3054 class ParseError(Exception):
3054 """Exception raised on errors in parsing the command line."""
3055 """Exception raised on errors in parsing the command line."""
3055
3056
3056 def parse(ui, args):
3057 def parse(ui, args):
3057 options = {}
3058 options = {}
3058 cmdoptions = {}
3059 cmdoptions = {}
3059
3060
3060 try:
3061 try:
3061 args = fancyopts.fancyopts(args, globalopts, options)
3062 args = fancyopts.fancyopts(args, globalopts, options)
3062 except fancyopts.getopt.GetoptError, inst:
3063 except fancyopts.getopt.GetoptError, inst:
3063 raise ParseError(None, inst)
3064 raise ParseError(None, inst)
3064
3065
3065 if args:
3066 if args:
3066 cmd, args = args[0], args[1:]
3067 cmd, args = args[0], args[1:]
3067 aliases, i = findcmd(ui, cmd)
3068 aliases, i = findcmd(ui, cmd)
3068 cmd = aliases[0]
3069 cmd = aliases[0]
3069 defaults = ui.config("defaults", cmd)
3070 defaults = ui.config("defaults", cmd)
3070 if defaults:
3071 if defaults:
3071 args = shlex.split(defaults) + args
3072 args = shlex.split(defaults) + args
3072 c = list(i[1])
3073 c = list(i[1])
3073 else:
3074 else:
3074 cmd = None
3075 cmd = None
3075 c = []
3076 c = []
3076
3077
3077 # combine global options into local
3078 # combine global options into local
3078 for o in globalopts:
3079 for o in globalopts:
3079 c.append((o[0], o[1], options[o[1]], o[3]))
3080 c.append((o[0], o[1], options[o[1]], o[3]))
3080
3081
3081 try:
3082 try:
3082 args = fancyopts.fancyopts(args, c, cmdoptions)
3083 args = fancyopts.fancyopts(args, c, cmdoptions)
3083 except fancyopts.getopt.GetoptError, inst:
3084 except fancyopts.getopt.GetoptError, inst:
3084 raise ParseError(cmd, inst)
3085 raise ParseError(cmd, inst)
3085
3086
3086 # separate global options back out
3087 # separate global options back out
3087 for o in globalopts:
3088 for o in globalopts:
3088 n = o[1]
3089 n = o[1]
3089 options[n] = cmdoptions[n]
3090 options[n] = cmdoptions[n]
3090 del cmdoptions[n]
3091 del cmdoptions[n]
3091
3092
3092 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3093 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3093
3094
3094 external = {}
3095 external = {}
3095
3096
3096 def findext(name):
3097 def findext(name):
3097 '''return module with given extension name'''
3098 '''return module with given extension name'''
3098 try:
3099 try:
3099 return sys.modules[external[name]]
3100 return sys.modules[external[name]]
3100 except KeyError:
3101 except KeyError:
3101 for k, v in external.iteritems():
3102 for k, v in external.iteritems():
3102 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3103 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3103 return sys.modules[v]
3104 return sys.modules[v]
3104 raise KeyError(name)
3105 raise KeyError(name)
3105
3106
3106 def load_extensions(ui):
3107 def load_extensions(ui):
3107 added = []
3108 added = []
3108 for ext_name, load_from_name in ui.extensions():
3109 for ext_name, load_from_name in ui.extensions():
3109 if ext_name in external:
3110 if ext_name in external:
3110 continue
3111 continue
3111 try:
3112 try:
3112 if load_from_name:
3113 if load_from_name:
3113 # the module will be loaded in sys.modules
3114 # the module will be loaded in sys.modules
3114 # choose an unique name so that it doesn't
3115 # choose an unique name so that it doesn't
3115 # conflicts with other modules
3116 # conflicts with other modules
3116 module_name = "hgext_%s" % ext_name.replace('.', '_')
3117 module_name = "hgext_%s" % ext_name.replace('.', '_')
3117 mod = imp.load_source(module_name, load_from_name)
3118 mod = imp.load_source(module_name, load_from_name)
3118 else:
3119 else:
3119 def importh(name):
3120 def importh(name):
3120 mod = __import__(name)
3121 mod = __import__(name)
3121 components = name.split('.')
3122 components = name.split('.')
3122 for comp in components[1:]:
3123 for comp in components[1:]:
3123 mod = getattr(mod, comp)
3124 mod = getattr(mod, comp)
3124 return mod
3125 return mod
3125 try:
3126 try:
3126 mod = importh("hgext.%s" % ext_name)
3127 mod = importh("hgext.%s" % ext_name)
3127 except ImportError:
3128 except ImportError:
3128 mod = importh(ext_name)
3129 mod = importh(ext_name)
3129 external[ext_name] = mod.__name__
3130 external[ext_name] = mod.__name__
3130 added.append((mod, ext_name))
3131 added.append((mod, ext_name))
3131 except (util.SignalInterrupt, KeyboardInterrupt):
3132 except (util.SignalInterrupt, KeyboardInterrupt):
3132 raise
3133 raise
3133 except Exception, inst:
3134 except Exception, inst:
3134 ui.warn(_("*** failed to import extension %s: %s\n") %
3135 ui.warn(_("*** failed to import extension %s: %s\n") %
3135 (ext_name, inst))
3136 (ext_name, inst))
3136 if ui.print_exc():
3137 if ui.print_exc():
3137 return 1
3138 return 1
3138
3139
3139 for mod, name in added:
3140 for mod, name in added:
3140 uisetup = getattr(mod, 'uisetup', None)
3141 uisetup = getattr(mod, 'uisetup', None)
3141 if uisetup:
3142 if uisetup:
3142 uisetup(ui)
3143 uisetup(ui)
3143 reposetup = getattr(mod, 'reposetup', None)
3144 reposetup = getattr(mod, 'reposetup', None)
3144 if reposetup:
3145 if reposetup:
3145 hg.repo_setup_hooks.append(reposetup)
3146 hg.repo_setup_hooks.append(reposetup)
3146 cmdtable = getattr(mod, 'cmdtable', {})
3147 cmdtable = getattr(mod, 'cmdtable', {})
3147 overrides = [cmd for cmd in cmdtable if cmd in table]
3148 overrides = [cmd for cmd in cmdtable if cmd in table]
3148 if overrides:
3149 if overrides:
3149 ui.warn(_("extension '%s' overrides commands: %s\n")
3150 ui.warn(_("extension '%s' overrides commands: %s\n")
3150 % (name, " ".join(overrides)))
3151 % (name, " ".join(overrides)))
3151 table.update(cmdtable)
3152 table.update(cmdtable)
3152
3153
3153 def parseconfig(config):
3154 def parseconfig(config):
3154 """parse the --config options from the command line"""
3155 """parse the --config options from the command line"""
3155 parsed = []
3156 parsed = []
3156 for cfg in config:
3157 for cfg in config:
3157 try:
3158 try:
3158 name, value = cfg.split('=', 1)
3159 name, value = cfg.split('=', 1)
3159 section, name = name.split('.', 1)
3160 section, name = name.split('.', 1)
3160 if not section or not name:
3161 if not section or not name:
3161 raise IndexError
3162 raise IndexError
3162 parsed.append((section, name, value))
3163 parsed.append((section, name, value))
3163 except (IndexError, ValueError):
3164 except (IndexError, ValueError):
3164 raise util.Abort(_('malformed --config option: %s') % cfg)
3165 raise util.Abort(_('malformed --config option: %s') % cfg)
3165 return parsed
3166 return parsed
3166
3167
3167 def dispatch(args):
3168 def dispatch(args):
3168 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3169 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3169 num = getattr(signal, name, None)
3170 num = getattr(signal, name, None)
3170 if num: signal.signal(num, catchterm)
3171 if num: signal.signal(num, catchterm)
3171
3172
3172 try:
3173 try:
3173 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3174 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3174 except util.Abort, inst:
3175 except util.Abort, inst:
3175 sys.stderr.write(_("abort: %s\n") % inst)
3176 sys.stderr.write(_("abort: %s\n") % inst)
3176 return -1
3177 return -1
3177
3178
3178 load_extensions(u)
3179 load_extensions(u)
3179 u.addreadhook(load_extensions)
3180 u.addreadhook(load_extensions)
3180
3181
3181 try:
3182 try:
3182 cmd, func, args, options, cmdoptions = parse(u, args)
3183 cmd, func, args, options, cmdoptions = parse(u, args)
3183 if options["encoding"]:
3184 if options["encoding"]:
3184 util._encoding = options["encoding"]
3185 util._encoding = options["encoding"]
3185 if options["encodingmode"]:
3186 if options["encodingmode"]:
3186 util._encodingmode = options["encodingmode"]
3187 util._encodingmode = options["encodingmode"]
3187 if options["time"]:
3188 if options["time"]:
3188 def get_times():
3189 def get_times():
3189 t = os.times()
3190 t = os.times()
3190 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3191 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3191 t = (t[0], t[1], t[2], t[3], time.clock())
3192 t = (t[0], t[1], t[2], t[3], time.clock())
3192 return t
3193 return t
3193 s = get_times()
3194 s = get_times()
3194 def print_time():
3195 def print_time():
3195 t = get_times()
3196 t = get_times()
3196 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3197 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3197 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3198 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3198 atexit.register(print_time)
3199 atexit.register(print_time)
3199
3200
3200 # enter the debugger before command execution
3201 # enter the debugger before command execution
3201 if options['debugger']:
3202 if options['debugger']:
3202 pdb.set_trace()
3203 pdb.set_trace()
3203
3204
3204 try:
3205 try:
3205 if options['cwd']:
3206 if options['cwd']:
3206 os.chdir(options['cwd'])
3207 os.chdir(options['cwd'])
3207
3208
3208 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3209 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3209 not options["noninteractive"], options["traceback"],
3210 not options["noninteractive"], options["traceback"],
3210 parseconfig(options["config"]))
3211 parseconfig(options["config"]))
3211
3212
3212 path = u.expandpath(options["repository"]) or ""
3213 path = u.expandpath(options["repository"]) or ""
3213 repo = path and hg.repository(u, path=path) or None
3214 repo = path and hg.repository(u, path=path) or None
3214 if repo and not repo.local():
3215 if repo and not repo.local():
3215 raise util.Abort(_("repository '%s' is not local") % path)
3216 raise util.Abort(_("repository '%s' is not local") % path)
3216
3217
3217 if options['help']:
3218 if options['help']:
3218 return help_(u, cmd, options['version'])
3219 return help_(u, cmd, options['version'])
3219 elif options['version']:
3220 elif options['version']:
3220 return version_(u)
3221 return version_(u)
3221 elif not cmd:
3222 elif not cmd:
3222 return help_(u, 'shortlist')
3223 return help_(u, 'shortlist')
3223
3224
3224 if cmd not in norepo.split():
3225 if cmd not in norepo.split():
3225 try:
3226 try:
3226 if not repo:
3227 if not repo:
3227 repo = hg.repository(u, path=path)
3228 repo = hg.repository(u, path=path)
3228 u = repo.ui
3229 u = repo.ui
3229 except hg.RepoError:
3230 except hg.RepoError:
3230 if cmd not in optionalrepo.split():
3231 if cmd not in optionalrepo.split():
3231 raise
3232 raise
3232 d = lambda: func(u, repo, *args, **cmdoptions)
3233 d = lambda: func(u, repo, *args, **cmdoptions)
3233 else:
3234 else:
3234 d = lambda: func(u, *args, **cmdoptions)
3235 d = lambda: func(u, *args, **cmdoptions)
3235
3236
3236 try:
3237 try:
3237 if options['profile']:
3238 if options['profile']:
3238 import hotshot, hotshot.stats
3239 import hotshot, hotshot.stats
3239 prof = hotshot.Profile("hg.prof")
3240 prof = hotshot.Profile("hg.prof")
3240 try:
3241 try:
3241 try:
3242 try:
3242 return prof.runcall(d)
3243 return prof.runcall(d)
3243 except:
3244 except:
3244 try:
3245 try:
3245 u.warn(_('exception raised - generating '
3246 u.warn(_('exception raised - generating '
3246 'profile anyway\n'))
3247 'profile anyway\n'))
3247 except:
3248 except:
3248 pass
3249 pass
3249 raise
3250 raise
3250 finally:
3251 finally:
3251 prof.close()
3252 prof.close()
3252 stats = hotshot.stats.load("hg.prof")
3253 stats = hotshot.stats.load("hg.prof")
3253 stats.strip_dirs()
3254 stats.strip_dirs()
3254 stats.sort_stats('time', 'calls')
3255 stats.sort_stats('time', 'calls')
3255 stats.print_stats(40)
3256 stats.print_stats(40)
3256 elif options['lsprof']:
3257 elif options['lsprof']:
3257 try:
3258 try:
3258 from mercurial import lsprof
3259 from mercurial import lsprof
3259 except ImportError:
3260 except ImportError:
3260 raise util.Abort(_(
3261 raise util.Abort(_(
3261 'lsprof not available - install from '
3262 'lsprof not available - install from '
3262 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3263 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3263 p = lsprof.Profiler()
3264 p = lsprof.Profiler()
3264 p.enable(subcalls=True)
3265 p.enable(subcalls=True)
3265 try:
3266 try:
3266 return d()
3267 return d()
3267 finally:
3268 finally:
3268 p.disable()
3269 p.disable()
3269 stats = lsprof.Stats(p.getstats())
3270 stats = lsprof.Stats(p.getstats())
3270 stats.sort()
3271 stats.sort()
3271 stats.pprint(top=10, file=sys.stderr, climit=5)
3272 stats.pprint(top=10, file=sys.stderr, climit=5)
3272 else:
3273 else:
3273 return d()
3274 return d()
3274 finally:
3275 finally:
3275 u.flush()
3276 u.flush()
3276 except:
3277 except:
3277 # enter the debugger when we hit an exception
3278 # enter the debugger when we hit an exception
3278 if options['debugger']:
3279 if options['debugger']:
3279 pdb.post_mortem(sys.exc_info()[2])
3280 pdb.post_mortem(sys.exc_info()[2])
3280 u.print_exc()
3281 u.print_exc()
3281 raise
3282 raise
3282 except ParseError, inst:
3283 except ParseError, inst:
3283 if inst.args[0]:
3284 if inst.args[0]:
3284 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3285 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3285 help_(u, inst.args[0])
3286 help_(u, inst.args[0])
3286 else:
3287 else:
3287 u.warn(_("hg: %s\n") % inst.args[1])
3288 u.warn(_("hg: %s\n") % inst.args[1])
3288 help_(u, 'shortlist')
3289 help_(u, 'shortlist')
3289 except AmbiguousCommand, inst:
3290 except AmbiguousCommand, inst:
3290 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3291 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3291 (inst.args[0], " ".join(inst.args[1])))
3292 (inst.args[0], " ".join(inst.args[1])))
3292 except UnknownCommand, inst:
3293 except UnknownCommand, inst:
3293 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3294 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3294 help_(u, 'shortlist')
3295 help_(u, 'shortlist')
3295 except hg.RepoError, inst:
3296 except hg.RepoError, inst:
3296 u.warn(_("abort: %s!\n") % inst)
3297 u.warn(_("abort: %s!\n") % inst)
3297 except lock.LockHeld, inst:
3298 except lock.LockHeld, inst:
3298 if inst.errno == errno.ETIMEDOUT:
3299 if inst.errno == errno.ETIMEDOUT:
3299 reason = _('timed out waiting for lock held by %s') % inst.locker
3300 reason = _('timed out waiting for lock held by %s') % inst.locker
3300 else:
3301 else:
3301 reason = _('lock held by %s') % inst.locker
3302 reason = _('lock held by %s') % inst.locker
3302 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3303 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3303 except lock.LockUnavailable, inst:
3304 except lock.LockUnavailable, inst:
3304 u.warn(_("abort: could not lock %s: %s\n") %
3305 u.warn(_("abort: could not lock %s: %s\n") %
3305 (inst.desc or inst.filename, inst.strerror))
3306 (inst.desc or inst.filename, inst.strerror))
3306 except revlog.RevlogError, inst:
3307 except revlog.RevlogError, inst:
3307 u.warn(_("abort: %s!\n") % inst)
3308 u.warn(_("abort: %s!\n") % inst)
3308 except util.SignalInterrupt:
3309 except util.SignalInterrupt:
3309 u.warn(_("killed!\n"))
3310 u.warn(_("killed!\n"))
3310 except KeyboardInterrupt:
3311 except KeyboardInterrupt:
3311 try:
3312 try:
3312 u.warn(_("interrupted!\n"))
3313 u.warn(_("interrupted!\n"))
3313 except IOError, inst:
3314 except IOError, inst:
3314 if inst.errno == errno.EPIPE:
3315 if inst.errno == errno.EPIPE:
3315 if u.debugflag:
3316 if u.debugflag:
3316 u.warn(_("\nbroken pipe\n"))
3317 u.warn(_("\nbroken pipe\n"))
3317 else:
3318 else:
3318 raise
3319 raise
3319 except socket.error, inst:
3320 except socket.error, inst:
3320 u.warn(_("abort: %s\n") % inst[1])
3321 u.warn(_("abort: %s\n") % inst[1])
3321 except IOError, inst:
3322 except IOError, inst:
3322 if hasattr(inst, "code"):
3323 if hasattr(inst, "code"):
3323 u.warn(_("abort: %s\n") % inst)
3324 u.warn(_("abort: %s\n") % inst)
3324 elif hasattr(inst, "reason"):
3325 elif hasattr(inst, "reason"):
3325 try: # usually it is in the form (errno, strerror)
3326 try: # usually it is in the form (errno, strerror)
3326 reason = inst.reason.args[1]
3327 reason = inst.reason.args[1]
3327 except: # it might be anything, for example a string
3328 except: # it might be anything, for example a string
3328 reason = inst.reason
3329 reason = inst.reason
3329 u.warn(_("abort: error: %s\n") % reason)
3330 u.warn(_("abort: error: %s\n") % reason)
3330 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3331 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3331 if u.debugflag:
3332 if u.debugflag:
3332 u.warn(_("broken pipe\n"))
3333 u.warn(_("broken pipe\n"))
3333 elif getattr(inst, "strerror", None):
3334 elif getattr(inst, "strerror", None):
3334 if getattr(inst, "filename", None):
3335 if getattr(inst, "filename", None):
3335 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3336 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3336 else:
3337 else:
3337 u.warn(_("abort: %s\n") % inst.strerror)
3338 u.warn(_("abort: %s\n") % inst.strerror)
3338 else:
3339 else:
3339 raise
3340 raise
3340 except OSError, inst:
3341 except OSError, inst:
3341 if getattr(inst, "filename", None):
3342 if getattr(inst, "filename", None):
3342 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3343 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3343 else:
3344 else:
3344 u.warn(_("abort: %s\n") % inst.strerror)
3345 u.warn(_("abort: %s\n") % inst.strerror)
3345 except util.UnexpectedOutput, inst:
3346 except util.UnexpectedOutput, inst:
3346 u.warn(_("abort: %s") % inst[0])
3347 u.warn(_("abort: %s") % inst[0])
3347 if not isinstance(inst[1], basestring):
3348 if not isinstance(inst[1], basestring):
3348 u.warn(" %r\n" % (inst[1],))
3349 u.warn(" %r\n" % (inst[1],))
3349 elif not inst[1]:
3350 elif not inst[1]:
3350 u.warn(_(" empty string\n"))
3351 u.warn(_(" empty string\n"))
3351 else:
3352 else:
3352 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3353 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3353 except util.Abort, inst:
3354 except util.Abort, inst:
3354 u.warn(_("abort: %s\n") % inst)
3355 u.warn(_("abort: %s\n") % inst)
3355 except TypeError, inst:
3356 except TypeError, inst:
3356 # was this an argument error?
3357 # was this an argument error?
3357 tb = traceback.extract_tb(sys.exc_info()[2])
3358 tb = traceback.extract_tb(sys.exc_info()[2])
3358 if len(tb) > 2: # no
3359 if len(tb) > 2: # no
3359 raise
3360 raise
3360 u.debug(inst, "\n")
3361 u.debug(inst, "\n")
3361 u.warn(_("%s: invalid arguments\n") % cmd)
3362 u.warn(_("%s: invalid arguments\n") % cmd)
3362 help_(u, cmd)
3363 help_(u, cmd)
3363 except SystemExit, inst:
3364 except SystemExit, inst:
3364 # Commands shouldn't sys.exit directly, but give a return code.
3365 # Commands shouldn't sys.exit directly, but give a return code.
3365 # Just in case catch this and and pass exit code to caller.
3366 # Just in case catch this and and pass exit code to caller.
3366 return inst.code
3367 return inst.code
3367 except:
3368 except:
3368 u.warn(_("** unknown exception encountered, details follow\n"))
3369 u.warn(_("** unknown exception encountered, details follow\n"))
3369 u.warn(_("** report bug details to "
3370 u.warn(_("** report bug details to "
3370 "http://www.selenic.com/mercurial/bts\n"))
3371 "http://www.selenic.com/mercurial/bts\n"))
3371 u.warn(_("** or mercurial@selenic.com\n"))
3372 u.warn(_("** or mercurial@selenic.com\n"))
3372 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3373 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3373 % version.get_version())
3374 % version.get_version())
3374 raise
3375 raise
3375
3376
3376 return -1
3377 return -1
@@ -1,1934 +1,1933 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.root = os.path.realpath(path)
33 self.root = os.path.realpath(path)
34 self.path = os.path.join(self.root, ".hg")
34 self.path = os.path.join(self.root, ".hg")
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 requirements = ["revlogv1"]
44 requirements = ["revlogv1"]
45 if parentui.configbool('format', 'usestore', True):
45 if parentui.configbool('format', 'usestore', True):
46 os.mkdir(os.path.join(self.path, "store"))
46 os.mkdir(os.path.join(self.path, "store"))
47 requirements.append("store")
47 requirements.append("store")
48 # create an invalid changelog
48 # create an invalid changelog
49 self.opener("00changelog.i", "a").write(
49 self.opener("00changelog.i", "a").write(
50 '\0\0\0\2' # represents revlogv2
50 '\0\0\0\2' # represents revlogv2
51 ' dummy changelog to prevent using the old repo layout'
51 ' dummy changelog to prevent using the old repo layout'
52 )
52 )
53 reqfile = self.opener("requires", "w")
53 reqfile = self.opener("requires", "w")
54 for r in requirements:
54 for r in requirements:
55 reqfile.write("%s\n" % r)
55 reqfile.write("%s\n" % r)
56 reqfile.close()
56 reqfile.close()
57 else:
57 else:
58 raise repo.RepoError(_("repository %s not found") % path)
58 raise repo.RepoError(_("repository %s not found") % path)
59 elif create:
59 elif create:
60 raise repo.RepoError(_("repository %s already exists") % path)
60 raise repo.RepoError(_("repository %s already exists") % path)
61 else:
61 else:
62 # find requirements
62 # find requirements
63 try:
63 try:
64 requirements = self.opener("requires").read().splitlines()
64 requirements = self.opener("requires").read().splitlines()
65 except IOError, inst:
65 except IOError, inst:
66 if inst.errno != errno.ENOENT:
66 if inst.errno != errno.ENOENT:
67 raise
67 raise
68 requirements = []
68 requirements = []
69 # check them
69 # check them
70 for r in requirements:
70 for r in requirements:
71 if r not in self.supported:
71 if r not in self.supported:
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73
73
74 # setup store
74 # setup store
75 if "store" in requirements:
75 if "store" in requirements:
76 self.encodefn = util.encodefilename
76 self.encodefn = util.encodefilename
77 self.decodefn = util.decodefilename
77 self.decodefn = util.decodefilename
78 self.spath = os.path.join(self.path, "store")
78 self.spath = os.path.join(self.path, "store")
79 else:
79 else:
80 self.encodefn = lambda x: x
80 self.encodefn = lambda x: x
81 self.decodefn = lambda x: x
81 self.decodefn = lambda x: x
82 self.spath = self.path
82 self.spath = self.path
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84
84
85 self.ui = ui.ui(parentui=parentui)
85 self.ui = ui.ui(parentui=parentui)
86 try:
86 try:
87 self.ui.readconfig(self.join("hgrc"), self.root)
87 self.ui.readconfig(self.join("hgrc"), self.root)
88 except IOError:
88 except IOError:
89 pass
89 pass
90
90
91 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
93 self.manifest = manifest.manifest(self.sopener)
93 self.manifest = manifest.manifest(self.sopener)
94
94
95 fallback = self.ui.config('ui', 'fallbackencoding')
95 fallback = self.ui.config('ui', 'fallbackencoding')
96 if fallback:
96 if fallback:
97 util._fallbackencoding = fallback
97 util._fallbackencoding = fallback
98
98
99 self.tagscache = None
99 self.tagscache = None
100 self.branchcache = None
100 self.branchcache = None
101 self.nodetagscache = None
101 self.nodetagscache = None
102 self.filterpats = {}
102 self.filterpats = {}
103 self.transhandle = None
103 self.transhandle = None
104
104
105 self._link = lambda x: False
106 if util.checklink(self.root):
107 r = self.root # avoid circular reference in lambda
108 self._link = lambda x: util.is_link(os.path.join(r, x))
109
110 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
111
106
112 def url(self):
107 def url(self):
113 return 'file:' + self.root
108 return 'file:' + self.root
114
109
115 def hook(self, name, throw=False, **args):
110 def hook(self, name, throw=False, **args):
116 def callhook(hname, funcname):
111 def callhook(hname, funcname):
117 '''call python hook. hook is callable object, looked up as
112 '''call python hook. hook is callable object, looked up as
118 name in python module. if callable returns "true", hook
113 name in python module. if callable returns "true", hook
119 fails, else passes. if hook raises exception, treated as
114 fails, else passes. if hook raises exception, treated as
120 hook failure. exception propagates if throw is "true".
115 hook failure. exception propagates if throw is "true".
121
116
122 reason for "true" meaning "hook failed" is so that
117 reason for "true" meaning "hook failed" is so that
123 unmodified commands (e.g. mercurial.commands.update) can
118 unmodified commands (e.g. mercurial.commands.update) can
124 be run as hooks without wrappers to convert return values.'''
119 be run as hooks without wrappers to convert return values.'''
125
120
126 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
121 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
127 obj = funcname
122 obj = funcname
128 if not callable(obj):
123 if not callable(obj):
129 d = funcname.rfind('.')
124 d = funcname.rfind('.')
130 if d == -1:
125 if d == -1:
131 raise util.Abort(_('%s hook is invalid ("%s" not in '
126 raise util.Abort(_('%s hook is invalid ("%s" not in '
132 'a module)') % (hname, funcname))
127 'a module)') % (hname, funcname))
133 modname = funcname[:d]
128 modname = funcname[:d]
134 try:
129 try:
135 obj = __import__(modname)
130 obj = __import__(modname)
136 except ImportError:
131 except ImportError:
137 try:
132 try:
138 # extensions are loaded with hgext_ prefix
133 # extensions are loaded with hgext_ prefix
139 obj = __import__("hgext_%s" % modname)
134 obj = __import__("hgext_%s" % modname)
140 except ImportError:
135 except ImportError:
141 raise util.Abort(_('%s hook is invalid '
136 raise util.Abort(_('%s hook is invalid '
142 '(import of "%s" failed)') %
137 '(import of "%s" failed)') %
143 (hname, modname))
138 (hname, modname))
144 try:
139 try:
145 for p in funcname.split('.')[1:]:
140 for p in funcname.split('.')[1:]:
146 obj = getattr(obj, p)
141 obj = getattr(obj, p)
147 except AttributeError, err:
142 except AttributeError, err:
148 raise util.Abort(_('%s hook is invalid '
143 raise util.Abort(_('%s hook is invalid '
149 '("%s" is not defined)') %
144 '("%s" is not defined)') %
150 (hname, funcname))
145 (hname, funcname))
151 if not callable(obj):
146 if not callable(obj):
152 raise util.Abort(_('%s hook is invalid '
147 raise util.Abort(_('%s hook is invalid '
153 '("%s" is not callable)') %
148 '("%s" is not callable)') %
154 (hname, funcname))
149 (hname, funcname))
155 try:
150 try:
156 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
151 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
157 except (KeyboardInterrupt, util.SignalInterrupt):
152 except (KeyboardInterrupt, util.SignalInterrupt):
158 raise
153 raise
159 except Exception, exc:
154 except Exception, exc:
160 if isinstance(exc, util.Abort):
155 if isinstance(exc, util.Abort):
161 self.ui.warn(_('error: %s hook failed: %s\n') %
156 self.ui.warn(_('error: %s hook failed: %s\n') %
162 (hname, exc.args[0]))
157 (hname, exc.args[0]))
163 else:
158 else:
164 self.ui.warn(_('error: %s hook raised an exception: '
159 self.ui.warn(_('error: %s hook raised an exception: '
165 '%s\n') % (hname, exc))
160 '%s\n') % (hname, exc))
166 if throw:
161 if throw:
167 raise
162 raise
168 self.ui.print_exc()
163 self.ui.print_exc()
169 return True
164 return True
170 if r:
165 if r:
171 if throw:
166 if throw:
172 raise util.Abort(_('%s hook failed') % hname)
167 raise util.Abort(_('%s hook failed') % hname)
173 self.ui.warn(_('warning: %s hook failed\n') % hname)
168 self.ui.warn(_('warning: %s hook failed\n') % hname)
174 return r
169 return r
175
170
176 def runhook(name, cmd):
171 def runhook(name, cmd):
177 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
172 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
178 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
173 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
179 r = util.system(cmd, environ=env, cwd=self.root)
174 r = util.system(cmd, environ=env, cwd=self.root)
180 if r:
175 if r:
181 desc, r = util.explain_exit(r)
176 desc, r = util.explain_exit(r)
182 if throw:
177 if throw:
183 raise util.Abort(_('%s hook %s') % (name, desc))
178 raise util.Abort(_('%s hook %s') % (name, desc))
184 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
179 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
185 return r
180 return r
186
181
187 r = False
182 r = False
188 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
183 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
189 if hname.split(".", 1)[0] == name and cmd]
184 if hname.split(".", 1)[0] == name and cmd]
190 hooks.sort()
185 hooks.sort()
191 for hname, cmd in hooks:
186 for hname, cmd in hooks:
192 if callable(cmd):
187 if callable(cmd):
193 r = callhook(hname, cmd) or r
188 r = callhook(hname, cmd) or r
194 elif cmd.startswith('python:'):
189 elif cmd.startswith('python:'):
195 r = callhook(hname, cmd[7:].strip()) or r
190 r = callhook(hname, cmd[7:].strip()) or r
196 else:
191 else:
197 r = runhook(hname, cmd) or r
192 r = runhook(hname, cmd) or r
198 return r
193 return r
199
194
200 tag_disallowed = ':\r\n'
195 tag_disallowed = ':\r\n'
201
196
202 def _tag(self, name, node, message, local, user, date, parent=None):
197 def _tag(self, name, node, message, local, user, date, parent=None):
203 use_dirstate = parent is None
198 use_dirstate = parent is None
204
199
205 for c in self.tag_disallowed:
200 for c in self.tag_disallowed:
206 if c in name:
201 if c in name:
207 raise util.Abort(_('%r cannot be used in a tag name') % c)
202 raise util.Abort(_('%r cannot be used in a tag name') % c)
208
203
209 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
204 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
210
205
211 if local:
206 if local:
212 # local tags are stored in the current charset
207 # local tags are stored in the current charset
213 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
208 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
214 self.hook('tag', node=hex(node), tag=name, local=local)
209 self.hook('tag', node=hex(node), tag=name, local=local)
215 return
210 return
216
211
217 # committed tags are stored in UTF-8
212 # committed tags are stored in UTF-8
218 line = '%s %s\n' % (hex(node), util.fromlocal(name))
213 line = '%s %s\n' % (hex(node), util.fromlocal(name))
219 if use_dirstate:
214 if use_dirstate:
220 self.wfile('.hgtags', 'ab').write(line)
215 self.wfile('.hgtags', 'ab').write(line)
221 else:
216 else:
222 ntags = self.filectx('.hgtags', parent).data()
217 ntags = self.filectx('.hgtags', parent).data()
223 self.wfile('.hgtags', 'ab').write(ntags + line)
218 self.wfile('.hgtags', 'ab').write(ntags + line)
224 if use_dirstate and self.dirstate.state('.hgtags') == '?':
219 if use_dirstate and self.dirstate.state('.hgtags') == '?':
225 self.add(['.hgtags'])
220 self.add(['.hgtags'])
226
221
227 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
222 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
228
223
229 self.hook('tag', node=hex(node), tag=name, local=local)
224 self.hook('tag', node=hex(node), tag=name, local=local)
230
225
231 return tagnode
226 return tagnode
232
227
233 def tag(self, name, node, message, local, user, date):
228 def tag(self, name, node, message, local, user, date):
234 '''tag a revision with a symbolic name.
229 '''tag a revision with a symbolic name.
235
230
236 if local is True, the tag is stored in a per-repository file.
231 if local is True, the tag is stored in a per-repository file.
237 otherwise, it is stored in the .hgtags file, and a new
232 otherwise, it is stored in the .hgtags file, and a new
238 changeset is committed with the change.
233 changeset is committed with the change.
239
234
240 keyword arguments:
235 keyword arguments:
241
236
242 local: whether to store tag in non-version-controlled file
237 local: whether to store tag in non-version-controlled file
243 (default False)
238 (default False)
244
239
245 message: commit message to use if committing
240 message: commit message to use if committing
246
241
247 user: name of user to use if committing
242 user: name of user to use if committing
248
243
249 date: date tuple to use if committing'''
244 date: date tuple to use if committing'''
250
245
251 for x in self.status()[:5]:
246 for x in self.status()[:5]:
252 if '.hgtags' in x:
247 if '.hgtags' in x:
253 raise util.Abort(_('working copy of .hgtags is changed '
248 raise util.Abort(_('working copy of .hgtags is changed '
254 '(please commit .hgtags manually)'))
249 '(please commit .hgtags manually)'))
255
250
256
251
257 self._tag(name, node, message, local, user, date)
252 self._tag(name, node, message, local, user, date)
258
253
259 def tags(self):
254 def tags(self):
260 '''return a mapping of tag to node'''
255 '''return a mapping of tag to node'''
261 if self.tagscache:
256 if self.tagscache:
262 return self.tagscache
257 return self.tagscache
263
258
264 globaltags = {}
259 globaltags = {}
265
260
266 def readtags(lines, fn):
261 def readtags(lines, fn):
267 filetags = {}
262 filetags = {}
268 count = 0
263 count = 0
269
264
270 def warn(msg):
265 def warn(msg):
271 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
266 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
272
267
273 for l in lines:
268 for l in lines:
274 count += 1
269 count += 1
275 if not l:
270 if not l:
276 continue
271 continue
277 s = l.split(" ", 1)
272 s = l.split(" ", 1)
278 if len(s) != 2:
273 if len(s) != 2:
279 warn(_("cannot parse entry"))
274 warn(_("cannot parse entry"))
280 continue
275 continue
281 node, key = s
276 node, key = s
282 key = util.tolocal(key.strip()) # stored in UTF-8
277 key = util.tolocal(key.strip()) # stored in UTF-8
283 try:
278 try:
284 bin_n = bin(node)
279 bin_n = bin(node)
285 except TypeError:
280 except TypeError:
286 warn(_("node '%s' is not well formed") % node)
281 warn(_("node '%s' is not well formed") % node)
287 continue
282 continue
288 if bin_n not in self.changelog.nodemap:
283 if bin_n not in self.changelog.nodemap:
289 warn(_("tag '%s' refers to unknown node") % key)
284 warn(_("tag '%s' refers to unknown node") % key)
290 continue
285 continue
291
286
292 h = []
287 h = []
293 if key in filetags:
288 if key in filetags:
294 n, h = filetags[key]
289 n, h = filetags[key]
295 h.append(n)
290 h.append(n)
296 filetags[key] = (bin_n, h)
291 filetags[key] = (bin_n, h)
297
292
298 for k,nh in filetags.items():
293 for k,nh in filetags.items():
299 if k not in globaltags:
294 if k not in globaltags:
300 globaltags[k] = nh
295 globaltags[k] = nh
301 continue
296 continue
302 # we prefer the global tag if:
297 # we prefer the global tag if:
303 # it supercedes us OR
298 # it supercedes us OR
304 # mutual supercedes and it has a higher rank
299 # mutual supercedes and it has a higher rank
305 # otherwise we win because we're tip-most
300 # otherwise we win because we're tip-most
306 an, ah = nh
301 an, ah = nh
307 bn, bh = globaltags[k]
302 bn, bh = globaltags[k]
308 if bn != an and an in bh and \
303 if bn != an and an in bh and \
309 (bn not in ah or len(bh) > len(ah)):
304 (bn not in ah or len(bh) > len(ah)):
310 an = bn
305 an = bn
311 ah.append([n for n in bh if n not in ah])
306 ah.append([n for n in bh if n not in ah])
312 globaltags[k] = an, ah
307 globaltags[k] = an, ah
313
308
314 # read the tags file from each head, ending with the tip
309 # read the tags file from each head, ending with the tip
315 f = None
310 f = None
316 for rev, node, fnode in self._hgtagsnodes():
311 for rev, node, fnode in self._hgtagsnodes():
317 f = (f and f.filectx(fnode) or
312 f = (f and f.filectx(fnode) or
318 self.filectx('.hgtags', fileid=fnode))
313 self.filectx('.hgtags', fileid=fnode))
319 readtags(f.data().splitlines(), f)
314 readtags(f.data().splitlines(), f)
320
315
321 try:
316 try:
322 data = util.fromlocal(self.opener("localtags").read())
317 data = util.fromlocal(self.opener("localtags").read())
323 # localtags are stored in the local character set
318 # localtags are stored in the local character set
324 # while the internal tag table is stored in UTF-8
319 # while the internal tag table is stored in UTF-8
325 readtags(data.splitlines(), "localtags")
320 readtags(data.splitlines(), "localtags")
326 except IOError:
321 except IOError:
327 pass
322 pass
328
323
329 self.tagscache = {}
324 self.tagscache = {}
330 for k,nh in globaltags.items():
325 for k,nh in globaltags.items():
331 n = nh[0]
326 n = nh[0]
332 if n != nullid:
327 if n != nullid:
333 self.tagscache[k] = n
328 self.tagscache[k] = n
334 self.tagscache['tip'] = self.changelog.tip()
329 self.tagscache['tip'] = self.changelog.tip()
335
330
336 return self.tagscache
331 return self.tagscache
337
332
338 def _hgtagsnodes(self):
333 def _hgtagsnodes(self):
339 heads = self.heads()
334 heads = self.heads()
340 heads.reverse()
335 heads.reverse()
341 last = {}
336 last = {}
342 ret = []
337 ret = []
343 for node in heads:
338 for node in heads:
344 c = self.changectx(node)
339 c = self.changectx(node)
345 rev = c.rev()
340 rev = c.rev()
346 try:
341 try:
347 fnode = c.filenode('.hgtags')
342 fnode = c.filenode('.hgtags')
348 except revlog.LookupError:
343 except revlog.LookupError:
349 continue
344 continue
350 ret.append((rev, node, fnode))
345 ret.append((rev, node, fnode))
351 if fnode in last:
346 if fnode in last:
352 ret[last[fnode]] = None
347 ret[last[fnode]] = None
353 last[fnode] = len(ret) - 1
348 last[fnode] = len(ret) - 1
354 return [item for item in ret if item]
349 return [item for item in ret if item]
355
350
356 def tagslist(self):
351 def tagslist(self):
357 '''return a list of tags ordered by revision'''
352 '''return a list of tags ordered by revision'''
358 l = []
353 l = []
359 for t, n in self.tags().items():
354 for t, n in self.tags().items():
360 try:
355 try:
361 r = self.changelog.rev(n)
356 r = self.changelog.rev(n)
362 except:
357 except:
363 r = -2 # sort to the beginning of the list if unknown
358 r = -2 # sort to the beginning of the list if unknown
364 l.append((r, t, n))
359 l.append((r, t, n))
365 l.sort()
360 l.sort()
366 return [(t, n) for r, t, n in l]
361 return [(t, n) for r, t, n in l]
367
362
368 def nodetags(self, node):
363 def nodetags(self, node):
369 '''return the tags associated with a node'''
364 '''return the tags associated with a node'''
370 if not self.nodetagscache:
365 if not self.nodetagscache:
371 self.nodetagscache = {}
366 self.nodetagscache = {}
372 for t, n in self.tags().items():
367 for t, n in self.tags().items():
373 self.nodetagscache.setdefault(n, []).append(t)
368 self.nodetagscache.setdefault(n, []).append(t)
374 return self.nodetagscache.get(node, [])
369 return self.nodetagscache.get(node, [])
375
370
376 def _branchtags(self):
371 def _branchtags(self):
377 partial, last, lrev = self._readbranchcache()
372 partial, last, lrev = self._readbranchcache()
378
373
379 tiprev = self.changelog.count() - 1
374 tiprev = self.changelog.count() - 1
380 if lrev != tiprev:
375 if lrev != tiprev:
381 self._updatebranchcache(partial, lrev+1, tiprev+1)
376 self._updatebranchcache(partial, lrev+1, tiprev+1)
382 self._writebranchcache(partial, self.changelog.tip(), tiprev)
377 self._writebranchcache(partial, self.changelog.tip(), tiprev)
383
378
384 return partial
379 return partial
385
380
386 def branchtags(self):
381 def branchtags(self):
387 if self.branchcache is not None:
382 if self.branchcache is not None:
388 return self.branchcache
383 return self.branchcache
389
384
390 self.branchcache = {} # avoid recursion in changectx
385 self.branchcache = {} # avoid recursion in changectx
391 partial = self._branchtags()
386 partial = self._branchtags()
392
387
393 # the branch cache is stored on disk as UTF-8, but in the local
388 # the branch cache is stored on disk as UTF-8, but in the local
394 # charset internally
389 # charset internally
395 for k, v in partial.items():
390 for k, v in partial.items():
396 self.branchcache[util.tolocal(k)] = v
391 self.branchcache[util.tolocal(k)] = v
397 return self.branchcache
392 return self.branchcache
398
393
399 def _readbranchcache(self):
394 def _readbranchcache(self):
400 partial = {}
395 partial = {}
401 try:
396 try:
402 f = self.opener("branch.cache")
397 f = self.opener("branch.cache")
403 lines = f.read().split('\n')
398 lines = f.read().split('\n')
404 f.close()
399 f.close()
405 last, lrev = lines.pop(0).split(" ", 1)
400 last, lrev = lines.pop(0).split(" ", 1)
406 last, lrev = bin(last), int(lrev)
401 last, lrev = bin(last), int(lrev)
407 if not (lrev < self.changelog.count() and
402 if not (lrev < self.changelog.count() and
408 self.changelog.node(lrev) == last): # sanity check
403 self.changelog.node(lrev) == last): # sanity check
409 # invalidate the cache
404 # invalidate the cache
410 raise ValueError('Invalid branch cache: unknown tip')
405 raise ValueError('Invalid branch cache: unknown tip')
411 for l in lines:
406 for l in lines:
412 if not l: continue
407 if not l: continue
413 node, label = l.split(" ", 1)
408 node, label = l.split(" ", 1)
414 partial[label.strip()] = bin(node)
409 partial[label.strip()] = bin(node)
415 except (KeyboardInterrupt, util.SignalInterrupt):
410 except (KeyboardInterrupt, util.SignalInterrupt):
416 raise
411 raise
417 except Exception, inst:
412 except Exception, inst:
418 if self.ui.debugflag:
413 if self.ui.debugflag:
419 self.ui.warn(str(inst), '\n')
414 self.ui.warn(str(inst), '\n')
420 partial, last, lrev = {}, nullid, nullrev
415 partial, last, lrev = {}, nullid, nullrev
421 return partial, last, lrev
416 return partial, last, lrev
422
417
423 def _writebranchcache(self, branches, tip, tiprev):
418 def _writebranchcache(self, branches, tip, tiprev):
424 try:
419 try:
425 f = self.opener("branch.cache", "w")
420 f = self.opener("branch.cache", "w")
426 f.write("%s %s\n" % (hex(tip), tiprev))
421 f.write("%s %s\n" % (hex(tip), tiprev))
427 for label, node in branches.iteritems():
422 for label, node in branches.iteritems():
428 f.write("%s %s\n" % (hex(node), label))
423 f.write("%s %s\n" % (hex(node), label))
429 except IOError:
424 except IOError:
430 pass
425 pass
431
426
432 def _updatebranchcache(self, partial, start, end):
427 def _updatebranchcache(self, partial, start, end):
433 for r in xrange(start, end):
428 for r in xrange(start, end):
434 c = self.changectx(r)
429 c = self.changectx(r)
435 b = c.branch()
430 b = c.branch()
436 partial[b] = c.node()
431 partial[b] = c.node()
437
432
438 def lookup(self, key):
433 def lookup(self, key):
439 if key == '.':
434 if key == '.':
440 key = self.dirstate.parents()[0]
435 key = self.dirstate.parents()[0]
441 if key == nullid:
436 if key == nullid:
442 raise repo.RepoError(_("no revision checked out"))
437 raise repo.RepoError(_("no revision checked out"))
443 elif key == 'null':
438 elif key == 'null':
444 return nullid
439 return nullid
445 n = self.changelog._match(key)
440 n = self.changelog._match(key)
446 if n:
441 if n:
447 return n
442 return n
448 if key in self.tags():
443 if key in self.tags():
449 return self.tags()[key]
444 return self.tags()[key]
450 if key in self.branchtags():
445 if key in self.branchtags():
451 return self.branchtags()[key]
446 return self.branchtags()[key]
452 n = self.changelog._partialmatch(key)
447 n = self.changelog._partialmatch(key)
453 if n:
448 if n:
454 return n
449 return n
455 raise repo.RepoError(_("unknown revision '%s'") % key)
450 raise repo.RepoError(_("unknown revision '%s'") % key)
456
451
457 def dev(self):
452 def dev(self):
458 return os.lstat(self.path).st_dev
453 return os.lstat(self.path).st_dev
459
454
460 def local(self):
455 def local(self):
461 return True
456 return True
462
457
463 def join(self, f):
458 def join(self, f):
464 return os.path.join(self.path, f)
459 return os.path.join(self.path, f)
465
460
466 def sjoin(self, f):
461 def sjoin(self, f):
467 f = self.encodefn(f)
462 f = self.encodefn(f)
468 return os.path.join(self.spath, f)
463 return os.path.join(self.spath, f)
469
464
470 def wjoin(self, f):
465 def wjoin(self, f):
471 return os.path.join(self.root, f)
466 return os.path.join(self.root, f)
472
467
473 def file(self, f):
468 def file(self, f):
474 if f[0] == '/':
469 if f[0] == '/':
475 f = f[1:]
470 f = f[1:]
476 return filelog.filelog(self.sopener, f)
471 return filelog.filelog(self.sopener, f)
477
472
478 def changectx(self, changeid=None):
473 def changectx(self, changeid=None):
479 return context.changectx(self, changeid)
474 return context.changectx(self, changeid)
480
475
481 def workingctx(self):
476 def workingctx(self):
482 return context.workingctx(self)
477 return context.workingctx(self)
483
478
484 def parents(self, changeid=None):
479 def parents(self, changeid=None):
485 '''
480 '''
486 get list of changectxs for parents of changeid or working directory
481 get list of changectxs for parents of changeid or working directory
487 '''
482 '''
488 if changeid is None:
483 if changeid is None:
489 pl = self.dirstate.parents()
484 pl = self.dirstate.parents()
490 else:
485 else:
491 n = self.changelog.lookup(changeid)
486 n = self.changelog.lookup(changeid)
492 pl = self.changelog.parents(n)
487 pl = self.changelog.parents(n)
493 if pl[1] == nullid:
488 if pl[1] == nullid:
494 return [self.changectx(pl[0])]
489 return [self.changectx(pl[0])]
495 return [self.changectx(pl[0]), self.changectx(pl[1])]
490 return [self.changectx(pl[0]), self.changectx(pl[1])]
496
491
497 def filectx(self, path, changeid=None, fileid=None):
492 def filectx(self, path, changeid=None, fileid=None):
498 """changeid can be a changeset revision, node, or tag.
493 """changeid can be a changeset revision, node, or tag.
499 fileid can be a file revision or node."""
494 fileid can be a file revision or node."""
500 return context.filectx(self, path, changeid, fileid)
495 return context.filectx(self, path, changeid, fileid)
501
496
502 def getcwd(self):
497 def getcwd(self):
503 return self.dirstate.getcwd()
498 return self.dirstate.getcwd()
504
499
505 def wfile(self, f, mode='r'):
500 def wfile(self, f, mode='r'):
506 return self.wopener(f, mode)
501 return self.wopener(f, mode)
507
502
503 def _link(self, f):
504 return os.path.islink(self.wjoin(f))
505
508 def _filter(self, filter, filename, data):
506 def _filter(self, filter, filename, data):
509 if filter not in self.filterpats:
507 if filter not in self.filterpats:
510 l = []
508 l = []
511 for pat, cmd in self.ui.configitems(filter):
509 for pat, cmd in self.ui.configitems(filter):
512 mf = util.matcher(self.root, "", [pat], [], [])[1]
510 mf = util.matcher(self.root, "", [pat], [], [])[1]
513 l.append((mf, cmd))
511 l.append((mf, cmd))
514 self.filterpats[filter] = l
512 self.filterpats[filter] = l
515
513
516 for mf, cmd in self.filterpats[filter]:
514 for mf, cmd in self.filterpats[filter]:
517 if mf(filename):
515 if mf(filename):
518 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
516 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
519 data = util.filter(data, cmd)
517 data = util.filter(data, cmd)
520 break
518 break
521
519
522 return data
520 return data
523
521
524 def wread(self, filename):
522 def wread(self, filename):
525 if self._link(filename):
523 if self._link(filename):
526 data = os.readlink(self.wjoin(filename))
524 data = os.readlink(self.wjoin(filename))
527 else:
525 else:
528 data = self.wopener(filename, 'r').read()
526 data = self.wopener(filename, 'r').read()
529 return self._filter("encode", filename, data)
527 return self._filter("encode", filename, data)
530
528
531 def wwrite(self, filename, data, flags):
529 def wwrite(self, filename, data, flags):
532 data = self._filter("decode", filename, data)
530 data = self._filter("decode", filename, data)
533 if "l" in flags:
531 if "l" in flags:
534 f = self.wjoin(filename)
532 f = self.wjoin(filename)
535 try:
533 try:
536 os.unlink(f)
534 os.unlink(f)
537 except OSError:
535 except OSError:
538 pass
536 pass
539 d = os.path.dirname(f)
537 d = os.path.dirname(f)
540 if not os.path.exists(d):
538 if not os.path.exists(d):
541 os.makedirs(d)
539 os.makedirs(d)
542 os.symlink(data, f)
540 os.symlink(data, f)
543 else:
541 else:
544 try:
542 try:
545 if self._link(filename):
543 if self._link(filename):
546 os.unlink(self.wjoin(filename))
544 os.unlink(self.wjoin(filename))
547 except OSError:
545 except OSError:
548 pass
546 pass
549 self.wopener(filename, 'w').write(data)
547 self.wopener(filename, 'w').write(data)
550 util.set_exec(self.wjoin(filename), "x" in flags)
548 util.set_exec(self.wjoin(filename), "x" in flags)
551
549
552 def wwritedata(self, filename, data):
550 def wwritedata(self, filename, data):
553 return self._filter("decode", filename, data)
551 return self._filter("decode", filename, data)
554
552
555 def transaction(self):
553 def transaction(self):
556 tr = self.transhandle
554 tr = self.transhandle
557 if tr != None and tr.running():
555 if tr != None and tr.running():
558 return tr.nest()
556 return tr.nest()
559
557
560 # save dirstate for rollback
558 # save dirstate for rollback
561 try:
559 try:
562 ds = self.opener("dirstate").read()
560 ds = self.opener("dirstate").read()
563 except IOError:
561 except IOError:
564 ds = ""
562 ds = ""
565 self.opener("journal.dirstate", "w").write(ds)
563 self.opener("journal.dirstate", "w").write(ds)
566
564
567 renames = [(self.sjoin("journal"), self.sjoin("undo")),
565 renames = [(self.sjoin("journal"), self.sjoin("undo")),
568 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
566 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
569 tr = transaction.transaction(self.ui.warn, self.sopener,
567 tr = transaction.transaction(self.ui.warn, self.sopener,
570 self.sjoin("journal"),
568 self.sjoin("journal"),
571 aftertrans(renames))
569 aftertrans(renames))
572 self.transhandle = tr
570 self.transhandle = tr
573 return tr
571 return tr
574
572
575 def recover(self):
573 def recover(self):
576 l = self.lock()
574 l = self.lock()
577 if os.path.exists(self.sjoin("journal")):
575 if os.path.exists(self.sjoin("journal")):
578 self.ui.status(_("rolling back interrupted transaction\n"))
576 self.ui.status(_("rolling back interrupted transaction\n"))
579 transaction.rollback(self.sopener, self.sjoin("journal"))
577 transaction.rollback(self.sopener, self.sjoin("journal"))
580 self.reload()
578 self.reload()
581 return True
579 return True
582 else:
580 else:
583 self.ui.warn(_("no interrupted transaction available\n"))
581 self.ui.warn(_("no interrupted transaction available\n"))
584 return False
582 return False
585
583
586 def rollback(self, wlock=None):
584 def rollback(self, wlock=None):
587 if not wlock:
585 if not wlock:
588 wlock = self.wlock()
586 wlock = self.wlock()
589 l = self.lock()
587 l = self.lock()
590 if os.path.exists(self.sjoin("undo")):
588 if os.path.exists(self.sjoin("undo")):
591 self.ui.status(_("rolling back last transaction\n"))
589 self.ui.status(_("rolling back last transaction\n"))
592 transaction.rollback(self.sopener, self.sjoin("undo"))
590 transaction.rollback(self.sopener, self.sjoin("undo"))
593 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
591 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
594 self.reload()
592 self.reload()
595 self.wreload()
593 self.wreload()
596 else:
594 else:
597 self.ui.warn(_("no rollback information available\n"))
595 self.ui.warn(_("no rollback information available\n"))
598
596
599 def wreload(self):
597 def wreload(self):
600 self.dirstate.read()
598 self.dirstate.read()
601
599
602 def reload(self):
600 def reload(self):
603 self.changelog.load()
601 self.changelog.load()
604 self.manifest.load()
602 self.manifest.load()
605 self.tagscache = None
603 self.tagscache = None
606 self.nodetagscache = None
604 self.nodetagscache = None
607
605
608 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
606 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
609 desc=None):
607 desc=None):
610 try:
608 try:
611 l = lock.lock(lockname, 0, releasefn, desc=desc)
609 l = lock.lock(lockname, 0, releasefn, desc=desc)
612 except lock.LockHeld, inst:
610 except lock.LockHeld, inst:
613 if not wait:
611 if not wait:
614 raise
612 raise
615 self.ui.warn(_("waiting for lock on %s held by %r\n") %
613 self.ui.warn(_("waiting for lock on %s held by %r\n") %
616 (desc, inst.locker))
614 (desc, inst.locker))
617 # default to 600 seconds timeout
615 # default to 600 seconds timeout
618 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
616 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
619 releasefn, desc=desc)
617 releasefn, desc=desc)
620 if acquirefn:
618 if acquirefn:
621 acquirefn()
619 acquirefn()
622 return l
620 return l
623
621
624 def lock(self, wait=1):
622 def lock(self, wait=1):
625 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
623 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
626 desc=_('repository %s') % self.origroot)
624 desc=_('repository %s') % self.origroot)
627
625
628 def wlock(self, wait=1):
626 def wlock(self, wait=1):
629 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
627 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
630 self.wreload,
628 self.wreload,
631 desc=_('working directory of %s') % self.origroot)
629 desc=_('working directory of %s') % self.origroot)
632
630
633 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
631 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
634 """
632 """
635 commit an individual file as part of a larger transaction
633 commit an individual file as part of a larger transaction
636 """
634 """
637
635
638 t = self.wread(fn)
636 t = self.wread(fn)
639 fl = self.file(fn)
637 fl = self.file(fn)
640 fp1 = manifest1.get(fn, nullid)
638 fp1 = manifest1.get(fn, nullid)
641 fp2 = manifest2.get(fn, nullid)
639 fp2 = manifest2.get(fn, nullid)
642
640
643 meta = {}
641 meta = {}
644 cp = self.dirstate.copied(fn)
642 cp = self.dirstate.copied(fn)
645 if cp:
643 if cp:
646 # Mark the new revision of this file as a copy of another
644 # Mark the new revision of this file as a copy of another
647 # file. This copy data will effectively act as a parent
645 # file. This copy data will effectively act as a parent
648 # of this new revision. If this is a merge, the first
646 # of this new revision. If this is a merge, the first
649 # parent will be the nullid (meaning "look up the copy data")
647 # parent will be the nullid (meaning "look up the copy data")
650 # and the second one will be the other parent. For example:
648 # and the second one will be the other parent. For example:
651 #
649 #
652 # 0 --- 1 --- 3 rev1 changes file foo
650 # 0 --- 1 --- 3 rev1 changes file foo
653 # \ / rev2 renames foo to bar and changes it
651 # \ / rev2 renames foo to bar and changes it
654 # \- 2 -/ rev3 should have bar with all changes and
652 # \- 2 -/ rev3 should have bar with all changes and
655 # should record that bar descends from
653 # should record that bar descends from
656 # bar in rev2 and foo in rev1
654 # bar in rev2 and foo in rev1
657 #
655 #
658 # this allows this merge to succeed:
656 # this allows this merge to succeed:
659 #
657 #
660 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
658 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
661 # \ / merging rev3 and rev4 should use bar@rev2
659 # \ / merging rev3 and rev4 should use bar@rev2
662 # \- 2 --- 4 as the merge base
660 # \- 2 --- 4 as the merge base
663 #
661 #
664 meta["copy"] = cp
662 meta["copy"] = cp
665 if not manifest2: # not a branch merge
663 if not manifest2: # not a branch merge
666 meta["copyrev"] = hex(manifest1.get(cp, nullid))
664 meta["copyrev"] = hex(manifest1.get(cp, nullid))
667 fp2 = nullid
665 fp2 = nullid
668 elif fp2 != nullid: # copied on remote side
666 elif fp2 != nullid: # copied on remote side
669 meta["copyrev"] = hex(manifest1.get(cp, nullid))
667 meta["copyrev"] = hex(manifest1.get(cp, nullid))
670 elif fp1 != nullid: # copied on local side, reversed
668 elif fp1 != nullid: # copied on local side, reversed
671 meta["copyrev"] = hex(manifest2.get(cp))
669 meta["copyrev"] = hex(manifest2.get(cp))
672 fp2 = fp1
670 fp2 = fp1
673 else: # directory rename
671 else: # directory rename
674 meta["copyrev"] = hex(manifest1.get(cp, nullid))
672 meta["copyrev"] = hex(manifest1.get(cp, nullid))
675 self.ui.debug(_(" %s: copy %s:%s\n") %
673 self.ui.debug(_(" %s: copy %s:%s\n") %
676 (fn, cp, meta["copyrev"]))
674 (fn, cp, meta["copyrev"]))
677 fp1 = nullid
675 fp1 = nullid
678 elif fp2 != nullid:
676 elif fp2 != nullid:
679 # is one parent an ancestor of the other?
677 # is one parent an ancestor of the other?
680 fpa = fl.ancestor(fp1, fp2)
678 fpa = fl.ancestor(fp1, fp2)
681 if fpa == fp1:
679 if fpa == fp1:
682 fp1, fp2 = fp2, nullid
680 fp1, fp2 = fp2, nullid
683 elif fpa == fp2:
681 elif fpa == fp2:
684 fp2 = nullid
682 fp2 = nullid
685
683
686 # is the file unmodified from the parent? report existing entry
684 # is the file unmodified from the parent? report existing entry
687 if fp2 == nullid and not fl.cmp(fp1, t):
685 if fp2 == nullid and not fl.cmp(fp1, t):
688 return fp1
686 return fp1
689
687
690 changelist.append(fn)
688 changelist.append(fn)
691 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
689 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
692
690
693 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
691 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
694 if p1 is None:
692 if p1 is None:
695 p1, p2 = self.dirstate.parents()
693 p1, p2 = self.dirstate.parents()
696 return self.commit(files=files, text=text, user=user, date=date,
694 return self.commit(files=files, text=text, user=user, date=date,
697 p1=p1, p2=p2, wlock=wlock, extra=extra)
695 p1=p1, p2=p2, wlock=wlock, extra=extra)
698
696
699 def commit(self, files=None, text="", user=None, date=None,
697 def commit(self, files=None, text="", user=None, date=None,
700 match=util.always, force=False, lock=None, wlock=None,
698 match=util.always, force=False, lock=None, wlock=None,
701 force_editor=False, p1=None, p2=None, extra={}):
699 force_editor=False, p1=None, p2=None, extra={}):
702
700
703 commit = []
701 commit = []
704 remove = []
702 remove = []
705 changed = []
703 changed = []
706 use_dirstate = (p1 is None) # not rawcommit
704 use_dirstate = (p1 is None) # not rawcommit
707 extra = extra.copy()
705 extra = extra.copy()
708
706
709 if use_dirstate:
707 if use_dirstate:
710 if files:
708 if files:
711 for f in files:
709 for f in files:
712 s = self.dirstate.state(f)
710 s = self.dirstate.state(f)
713 if s in 'nmai':
711 if s in 'nmai':
714 commit.append(f)
712 commit.append(f)
715 elif s == 'r':
713 elif s == 'r':
716 remove.append(f)
714 remove.append(f)
717 else:
715 else:
718 self.ui.warn(_("%s not tracked!\n") % f)
716 self.ui.warn(_("%s not tracked!\n") % f)
719 else:
717 else:
720 changes = self.status(match=match)[:5]
718 changes = self.status(match=match)[:5]
721 modified, added, removed, deleted, unknown = changes
719 modified, added, removed, deleted, unknown = changes
722 commit = modified + added
720 commit = modified + added
723 remove = removed
721 remove = removed
724 else:
722 else:
725 commit = files
723 commit = files
726
724
727 if use_dirstate:
725 if use_dirstate:
728 p1, p2 = self.dirstate.parents()
726 p1, p2 = self.dirstate.parents()
729 update_dirstate = True
727 update_dirstate = True
730 else:
728 else:
731 p1, p2 = p1, p2 or nullid
729 p1, p2 = p1, p2 or nullid
732 update_dirstate = (self.dirstate.parents()[0] == p1)
730 update_dirstate = (self.dirstate.parents()[0] == p1)
733
731
734 c1 = self.changelog.read(p1)
732 c1 = self.changelog.read(p1)
735 c2 = self.changelog.read(p2)
733 c2 = self.changelog.read(p2)
736 m1 = self.manifest.read(c1[0]).copy()
734 m1 = self.manifest.read(c1[0]).copy()
737 m2 = self.manifest.read(c2[0])
735 m2 = self.manifest.read(c2[0])
738
736
739 if use_dirstate:
737 if use_dirstate:
740 branchname = self.workingctx().branch()
738 branchname = self.workingctx().branch()
741 try:
739 try:
742 branchname = branchname.decode('UTF-8').encode('UTF-8')
740 branchname = branchname.decode('UTF-8').encode('UTF-8')
743 except UnicodeDecodeError:
741 except UnicodeDecodeError:
744 raise util.Abort(_('branch name not in UTF-8!'))
742 raise util.Abort(_('branch name not in UTF-8!'))
745 else:
743 else:
746 branchname = ""
744 branchname = ""
747
745
748 if use_dirstate:
746 if use_dirstate:
749 oldname = c1[5].get("branch") # stored in UTF-8
747 oldname = c1[5].get("branch") # stored in UTF-8
750 if not commit and not remove and not force and p2 == nullid and \
748 if not commit and not remove and not force and p2 == nullid and \
751 branchname == oldname:
749 branchname == oldname:
752 self.ui.status(_("nothing changed\n"))
750 self.ui.status(_("nothing changed\n"))
753 return None
751 return None
754
752
755 xp1 = hex(p1)
753 xp1 = hex(p1)
756 if p2 == nullid: xp2 = ''
754 if p2 == nullid: xp2 = ''
757 else: xp2 = hex(p2)
755 else: xp2 = hex(p2)
758
756
759 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
757 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
760
758
761 if not wlock:
759 if not wlock:
762 wlock = self.wlock()
760 wlock = self.wlock()
763 if not lock:
761 if not lock:
764 lock = self.lock()
762 lock = self.lock()
765 tr = self.transaction()
763 tr = self.transaction()
766
764
767 # check in files
765 # check in files
768 new = {}
766 new = {}
769 linkrev = self.changelog.count()
767 linkrev = self.changelog.count()
770 commit.sort()
768 commit.sort()
771 is_exec = util.execfunc(self.root, m1.execf)
769 is_exec = util.execfunc(self.root, m1.execf)
772 is_link = util.linkfunc(self.root, m1.linkf)
770 is_link = util.linkfunc(self.root, m1.linkf)
773 for f in commit:
771 for f in commit:
774 self.ui.note(f + "\n")
772 self.ui.note(f + "\n")
775 try:
773 try:
776 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
774 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
777 m1.set(f, is_exec(f), is_link(f))
775 m1.set(f, is_exec(f), is_link(f))
778 except (OSError, IOError):
776 except (OSError, IOError):
779 if use_dirstate:
777 if use_dirstate:
780 self.ui.warn(_("trouble committing %s!\n") % f)
778 self.ui.warn(_("trouble committing %s!\n") % f)
781 raise
779 raise
782 else:
780 else:
783 remove.append(f)
781 remove.append(f)
784
782
785 # update manifest
783 # update manifest
786 m1.update(new)
784 m1.update(new)
787 remove.sort()
785 remove.sort()
788 removed = []
786 removed = []
789
787
790 for f in remove:
788 for f in remove:
791 if f in m1:
789 if f in m1:
792 del m1[f]
790 del m1[f]
793 removed.append(f)
791 removed.append(f)
794 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
792 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
795
793
796 # add changeset
794 # add changeset
797 new = new.keys()
795 new = new.keys()
798 new.sort()
796 new.sort()
799
797
800 user = user or self.ui.username()
798 user = user or self.ui.username()
801 if not text or force_editor:
799 if not text or force_editor:
802 edittext = []
800 edittext = []
803 if text:
801 if text:
804 edittext.append(text)
802 edittext.append(text)
805 edittext.append("")
803 edittext.append("")
806 edittext.append("HG: user: %s" % user)
804 edittext.append("HG: user: %s" % user)
807 if p2 != nullid:
805 if p2 != nullid:
808 edittext.append("HG: branch merge")
806 edittext.append("HG: branch merge")
809 if branchname:
807 if branchname:
810 edittext.append("HG: branch %s" % util.tolocal(branchname))
808 edittext.append("HG: branch %s" % util.tolocal(branchname))
811 edittext.extend(["HG: changed %s" % f for f in changed])
809 edittext.extend(["HG: changed %s" % f for f in changed])
812 edittext.extend(["HG: removed %s" % f for f in removed])
810 edittext.extend(["HG: removed %s" % f for f in removed])
813 if not changed and not remove:
811 if not changed and not remove:
814 edittext.append("HG: no files changed")
812 edittext.append("HG: no files changed")
815 edittext.append("")
813 edittext.append("")
816 # run editor in the repository root
814 # run editor in the repository root
817 olddir = os.getcwd()
815 olddir = os.getcwd()
818 os.chdir(self.root)
816 os.chdir(self.root)
819 text = self.ui.edit("\n".join(edittext), user)
817 text = self.ui.edit("\n".join(edittext), user)
820 os.chdir(olddir)
818 os.chdir(olddir)
821
819
822 lines = [line.rstrip() for line in text.rstrip().splitlines()]
820 lines = [line.rstrip() for line in text.rstrip().splitlines()]
823 while lines and not lines[0]:
821 while lines and not lines[0]:
824 del lines[0]
822 del lines[0]
825 if not lines:
823 if not lines:
826 return None
824 return None
827 text = '\n'.join(lines)
825 text = '\n'.join(lines)
828 if branchname:
826 if branchname:
829 extra["branch"] = branchname
827 extra["branch"] = branchname
830 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
828 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
831 user, date, extra)
829 user, date, extra)
832 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
830 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
833 parent2=xp2)
831 parent2=xp2)
834 tr.close()
832 tr.close()
835
833
836 if self.branchcache and "branch" in extra:
834 if self.branchcache and "branch" in extra:
837 self.branchcache[util.tolocal(extra["branch"])] = n
835 self.branchcache[util.tolocal(extra["branch"])] = n
838
836
839 if use_dirstate or update_dirstate:
837 if use_dirstate or update_dirstate:
840 self.dirstate.setparents(n)
838 self.dirstate.setparents(n)
841 if use_dirstate:
839 if use_dirstate:
842 self.dirstate.update(new, "n")
840 self.dirstate.update(new, "n")
843 self.dirstate.forget(removed)
841 self.dirstate.forget(removed)
844
842
845 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
843 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
846 return n
844 return n
847
845
848 def walk(self, node=None, files=[], match=util.always, badmatch=None):
846 def walk(self, node=None, files=[], match=util.always, badmatch=None):
849 '''
847 '''
850 walk recursively through the directory tree or a given
848 walk recursively through the directory tree or a given
851 changeset, finding all files matched by the match
849 changeset, finding all files matched by the match
852 function
850 function
853
851
854 results are yielded in a tuple (src, filename), where src
852 results are yielded in a tuple (src, filename), where src
855 is one of:
853 is one of:
856 'f' the file was found in the directory tree
854 'f' the file was found in the directory tree
857 'm' the file was only in the dirstate and not in the tree
855 'm' the file was only in the dirstate and not in the tree
858 'b' file was not found and matched badmatch
856 'b' file was not found and matched badmatch
859 '''
857 '''
860
858
861 if node:
859 if node:
862 fdict = dict.fromkeys(files)
860 fdict = dict.fromkeys(files)
863 # for dirstate.walk, files=['.'] means "walk the whole tree".
861 # for dirstate.walk, files=['.'] means "walk the whole tree".
864 # follow that here, too
862 # follow that here, too
865 fdict.pop('.', None)
863 fdict.pop('.', None)
866 mdict = self.manifest.read(self.changelog.read(node)[0])
864 mdict = self.manifest.read(self.changelog.read(node)[0])
867 mfiles = mdict.keys()
865 mfiles = mdict.keys()
868 mfiles.sort()
866 mfiles.sort()
869 for fn in mfiles:
867 for fn in mfiles:
870 for ffn in fdict:
868 for ffn in fdict:
871 # match if the file is the exact name or a directory
869 # match if the file is the exact name or a directory
872 if ffn == fn or fn.startswith("%s/" % ffn):
870 if ffn == fn or fn.startswith("%s/" % ffn):
873 del fdict[ffn]
871 del fdict[ffn]
874 break
872 break
875 if match(fn):
873 if match(fn):
876 yield 'm', fn
874 yield 'm', fn
877 ffiles = fdict.keys()
875 ffiles = fdict.keys()
878 ffiles.sort()
876 ffiles.sort()
879 for fn in ffiles:
877 for fn in ffiles:
880 if badmatch and badmatch(fn):
878 if badmatch and badmatch(fn):
881 if match(fn):
879 if match(fn):
882 yield 'b', fn
880 yield 'b', fn
883 else:
881 else:
884 self.ui.warn(_('%s: No such file in rev %s\n') % (
882 self.ui.warn(_('%s: No such file in rev %s\n') % (
885 util.pathto(self.root, self.getcwd(), fn), short(node)))
883 util.pathto(self.root, self.getcwd(), fn), short(node)))
886 else:
884 else:
887 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
885 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
888 yield src, fn
886 yield src, fn
889
887
890 def status(self, node1=None, node2=None, files=[], match=util.always,
888 def status(self, node1=None, node2=None, files=[], match=util.always,
891 wlock=None, list_ignored=False, list_clean=False):
889 wlock=None, list_ignored=False, list_clean=False):
892 """return status of files between two nodes or node and working directory
890 """return status of files between two nodes or node and working directory
893
891
894 If node1 is None, use the first dirstate parent instead.
892 If node1 is None, use the first dirstate parent instead.
895 If node2 is None, compare node1 with working directory.
893 If node2 is None, compare node1 with working directory.
896 """
894 """
897
895
898 def fcmp(fn, getnode):
896 def fcmp(fn, getnode):
899 t1 = self.wread(fn)
897 t1 = self.wread(fn)
900 return self.file(fn).cmp(getnode(fn), t1)
898 return self.file(fn).cmp(getnode(fn), t1)
901
899
902 def mfmatches(node):
900 def mfmatches(node):
903 change = self.changelog.read(node)
901 change = self.changelog.read(node)
904 mf = self.manifest.read(change[0]).copy()
902 mf = self.manifest.read(change[0]).copy()
905 for fn in mf.keys():
903 for fn in mf.keys():
906 if not match(fn):
904 if not match(fn):
907 del mf[fn]
905 del mf[fn]
908 return mf
906 return mf
909
907
910 modified, added, removed, deleted, unknown = [], [], [], [], []
908 modified, added, removed, deleted, unknown = [], [], [], [], []
911 ignored, clean = [], []
909 ignored, clean = [], []
912
910
913 compareworking = False
911 compareworking = False
914 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
912 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
915 compareworking = True
913 compareworking = True
916
914
917 if not compareworking:
915 if not compareworking:
918 # read the manifest from node1 before the manifest from node2,
916 # read the manifest from node1 before the manifest from node2,
919 # so that we'll hit the manifest cache if we're going through
917 # so that we'll hit the manifest cache if we're going through
920 # all the revisions in parent->child order.
918 # all the revisions in parent->child order.
921 mf1 = mfmatches(node1)
919 mf1 = mfmatches(node1)
922
920
923 # are we comparing the working directory?
921 # are we comparing the working directory?
924 if not node2:
922 if not node2:
925 if not wlock:
923 if not wlock:
926 try:
924 try:
927 wlock = self.wlock(wait=0)
925 wlock = self.wlock(wait=0)
928 except lock.LockException:
926 except lock.LockException:
929 wlock = None
927 wlock = None
930 (lookup, modified, added, removed, deleted, unknown,
928 (lookup, modified, added, removed, deleted, unknown,
931 ignored, clean) = self.dirstate.status(files, match,
929 ignored, clean) = self.dirstate.status(files, match,
932 list_ignored, list_clean)
930 list_ignored, list_clean)
933
931
934 # are we comparing working dir against its parent?
932 # are we comparing working dir against its parent?
935 if compareworking:
933 if compareworking:
936 if lookup:
934 if lookup:
937 # do a full compare of any files that might have changed
935 # do a full compare of any files that might have changed
938 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
936 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
939 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
937 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
940 nullid)
938 nullid)
941 for f in lookup:
939 for f in lookup:
942 if fcmp(f, getnode):
940 if fcmp(f, getnode):
943 modified.append(f)
941 modified.append(f)
944 else:
942 else:
945 clean.append(f)
943 clean.append(f)
946 if wlock is not None:
944 if wlock is not None:
947 self.dirstate.update([f], "n")
945 self.dirstate.update([f], "n")
948 else:
946 else:
949 # we are comparing working dir against non-parent
947 # we are comparing working dir against non-parent
950 # generate a pseudo-manifest for the working dir
948 # generate a pseudo-manifest for the working dir
951 # XXX: create it in dirstate.py ?
949 # XXX: create it in dirstate.py ?
952 mf2 = mfmatches(self.dirstate.parents()[0])
950 mf2 = mfmatches(self.dirstate.parents()[0])
953 is_exec = util.execfunc(self.root, mf2.execf)
951 is_exec = util.execfunc(self.root, mf2.execf)
954 is_link = util.linkfunc(self.root, mf2.linkf)
952 is_link = util.linkfunc(self.root, mf2.linkf)
955 for f in lookup + modified + added:
953 for f in lookup + modified + added:
956 mf2[f] = ""
954 mf2[f] = ""
957 mf2.set(f, is_exec(f), is_link(f))
955 mf2.set(f, is_exec(f), is_link(f))
958 for f in removed:
956 for f in removed:
959 if f in mf2:
957 if f in mf2:
960 del mf2[f]
958 del mf2[f]
961 else:
959 else:
962 # we are comparing two revisions
960 # we are comparing two revisions
963 mf2 = mfmatches(node2)
961 mf2 = mfmatches(node2)
964
962
965 if not compareworking:
963 if not compareworking:
966 # flush lists from dirstate before comparing manifests
964 # flush lists from dirstate before comparing manifests
967 modified, added, clean = [], [], []
965 modified, added, clean = [], [], []
968
966
969 # make sure to sort the files so we talk to the disk in a
967 # make sure to sort the files so we talk to the disk in a
970 # reasonable order
968 # reasonable order
971 mf2keys = mf2.keys()
969 mf2keys = mf2.keys()
972 mf2keys.sort()
970 mf2keys.sort()
973 getnode = lambda fn: mf1.get(fn, nullid)
971 getnode = lambda fn: mf1.get(fn, nullid)
974 for fn in mf2keys:
972 for fn in mf2keys:
975 if mf1.has_key(fn):
973 if mf1.has_key(fn):
976 if mf1.flags(fn) != mf2.flags(fn) or \
974 if mf1.flags(fn) != mf2.flags(fn) or \
977 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
975 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
978 fcmp(fn, getnode))):
976 fcmp(fn, getnode))):
979 modified.append(fn)
977 modified.append(fn)
980 elif list_clean:
978 elif list_clean:
981 clean.append(fn)
979 clean.append(fn)
982 del mf1[fn]
980 del mf1[fn]
983 else:
981 else:
984 added.append(fn)
982 added.append(fn)
985
983
986 removed = mf1.keys()
984 removed = mf1.keys()
987
985
988 # sort and return results:
986 # sort and return results:
989 for l in modified, added, removed, deleted, unknown, ignored, clean:
987 for l in modified, added, removed, deleted, unknown, ignored, clean:
990 l.sort()
988 l.sort()
991 return (modified, added, removed, deleted, unknown, ignored, clean)
989 return (modified, added, removed, deleted, unknown, ignored, clean)
992
990
993 def add(self, list, wlock=None):
991 def add(self, list, wlock=None):
994 if not wlock:
992 if not wlock:
995 wlock = self.wlock()
993 wlock = self.wlock()
996 for f in list:
994 for f in list:
997 p = self.wjoin(f)
995 p = self.wjoin(f)
998 islink = os.path.islink(p)
996 islink = os.path.islink(p)
999 if not islink and not os.path.exists(p):
997 if not islink and not os.path.exists(p):
1000 self.ui.warn(_("%s does not exist!\n") % f)
998 self.ui.warn(_("%s does not exist!\n") % f)
1001 elif not islink and not os.path.isfile(p):
999 elif not islink and not os.path.isfile(p):
1002 self.ui.warn(_("%s not added: only files and symlinks "
1000 self.ui.warn(_("%s not added: only files and symlinks "
1003 "supported currently\n") % f)
1001 "supported currently\n") % f)
1004 elif self.dirstate.state(f) in 'an':
1002 elif self.dirstate.state(f) in 'an':
1005 self.ui.warn(_("%s already tracked!\n") % f)
1003 self.ui.warn(_("%s already tracked!\n") % f)
1006 else:
1004 else:
1007 self.dirstate.update([f], "a")
1005 self.dirstate.update([f], "a")
1008
1006
1009 def forget(self, list, wlock=None):
1007 def forget(self, list, wlock=None):
1010 if not wlock:
1008 if not wlock:
1011 wlock = self.wlock()
1009 wlock = self.wlock()
1012 for f in list:
1010 for f in list:
1013 if self.dirstate.state(f) not in 'ai':
1011 if self.dirstate.state(f) not in 'ai':
1014 self.ui.warn(_("%s not added!\n") % f)
1012 self.ui.warn(_("%s not added!\n") % f)
1015 else:
1013 else:
1016 self.dirstate.forget([f])
1014 self.dirstate.forget([f])
1017
1015
1018 def remove(self, list, unlink=False, wlock=None):
1016 def remove(self, list, unlink=False, wlock=None):
1019 if unlink:
1017 if unlink:
1020 for f in list:
1018 for f in list:
1021 try:
1019 try:
1022 util.unlink(self.wjoin(f))
1020 util.unlink(self.wjoin(f))
1023 except OSError, inst:
1021 except OSError, inst:
1024 if inst.errno != errno.ENOENT:
1022 if inst.errno != errno.ENOENT:
1025 raise
1023 raise
1026 if not wlock:
1024 if not wlock:
1027 wlock = self.wlock()
1025 wlock = self.wlock()
1028 for f in list:
1026 for f in list:
1029 p = self.wjoin(f)
1027 p = self.wjoin(f)
1030 if os.path.exists(p):
1028 if os.path.exists(p):
1031 self.ui.warn(_("%s still exists!\n") % f)
1029 self.ui.warn(_("%s still exists!\n") % f)
1032 elif self.dirstate.state(f) == 'a':
1030 elif self.dirstate.state(f) == 'a':
1033 self.dirstate.forget([f])
1031 self.dirstate.forget([f])
1034 elif f not in self.dirstate:
1032 elif f not in self.dirstate:
1035 self.ui.warn(_("%s not tracked!\n") % f)
1033 self.ui.warn(_("%s not tracked!\n") % f)
1036 else:
1034 else:
1037 self.dirstate.update([f], "r")
1035 self.dirstate.update([f], "r")
1038
1036
1039 def undelete(self, list, wlock=None):
1037 def undelete(self, list, wlock=None):
1040 p = self.dirstate.parents()[0]
1038 p = self.dirstate.parents()[0]
1041 mn = self.changelog.read(p)[0]
1039 mn = self.changelog.read(p)[0]
1042 m = self.manifest.read(mn)
1040 m = self.manifest.read(mn)
1043 if not wlock:
1041 if not wlock:
1044 wlock = self.wlock()
1042 wlock = self.wlock()
1045 for f in list:
1043 for f in list:
1046 if self.dirstate.state(f) not in "r":
1044 if self.dirstate.state(f) not in "r":
1047 self.ui.warn("%s not removed!\n" % f)
1045 self.ui.warn("%s not removed!\n" % f)
1048 else:
1046 else:
1049 t = self.file(f).read(m[f])
1047 t = self.file(f).read(m[f])
1050 self.wwrite(f, t, m.flags(f))
1048 self.wwrite(f, t, m.flags(f))
1051 self.dirstate.update([f], "n")
1049 self.dirstate.update([f], "n")
1052
1050
1053 def copy(self, source, dest, wlock=None):
1051 def copy(self, source, dest, wlock=None):
1054 p = self.wjoin(dest)
1052 p = self.wjoin(dest)
1055 if not os.path.exists(p):
1053 if not (os.path.exists(p) or os.path.islink(p)):
1056 self.ui.warn(_("%s does not exist!\n") % dest)
1054 self.ui.warn(_("%s does not exist!\n") % dest)
1057 elif not os.path.isfile(p):
1055 elif not (os.path.isfile(p) or os.path.islink(p)):
1058 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1056 self.ui.warn(_("copy failed: %s is not a file or a "
1057 "symbolic link\n") % dest)
1059 else:
1058 else:
1060 if not wlock:
1059 if not wlock:
1061 wlock = self.wlock()
1060 wlock = self.wlock()
1062 if self.dirstate.state(dest) == '?':
1061 if self.dirstate.state(dest) == '?':
1063 self.dirstate.update([dest], "a")
1062 self.dirstate.update([dest], "a")
1064 self.dirstate.copy(source, dest)
1063 self.dirstate.copy(source, dest)
1065
1064
1066 def heads(self, start=None):
1065 def heads(self, start=None):
1067 heads = self.changelog.heads(start)
1066 heads = self.changelog.heads(start)
1068 # sort the output in rev descending order
1067 # sort the output in rev descending order
1069 heads = [(-self.changelog.rev(h), h) for h in heads]
1068 heads = [(-self.changelog.rev(h), h) for h in heads]
1070 heads.sort()
1069 heads.sort()
1071 return [n for (r, n) in heads]
1070 return [n for (r, n) in heads]
1072
1071
1073 def branches(self, nodes):
1072 def branches(self, nodes):
1074 if not nodes:
1073 if not nodes:
1075 nodes = [self.changelog.tip()]
1074 nodes = [self.changelog.tip()]
1076 b = []
1075 b = []
1077 for n in nodes:
1076 for n in nodes:
1078 t = n
1077 t = n
1079 while 1:
1078 while 1:
1080 p = self.changelog.parents(n)
1079 p = self.changelog.parents(n)
1081 if p[1] != nullid or p[0] == nullid:
1080 if p[1] != nullid or p[0] == nullid:
1082 b.append((t, n, p[0], p[1]))
1081 b.append((t, n, p[0], p[1]))
1083 break
1082 break
1084 n = p[0]
1083 n = p[0]
1085 return b
1084 return b
1086
1085
1087 def between(self, pairs):
1086 def between(self, pairs):
1088 r = []
1087 r = []
1089
1088
1090 for top, bottom in pairs:
1089 for top, bottom in pairs:
1091 n, l, i = top, [], 0
1090 n, l, i = top, [], 0
1092 f = 1
1091 f = 1
1093
1092
1094 while n != bottom:
1093 while n != bottom:
1095 p = self.changelog.parents(n)[0]
1094 p = self.changelog.parents(n)[0]
1096 if i == f:
1095 if i == f:
1097 l.append(n)
1096 l.append(n)
1098 f = f * 2
1097 f = f * 2
1099 n = p
1098 n = p
1100 i += 1
1099 i += 1
1101
1100
1102 r.append(l)
1101 r.append(l)
1103
1102
1104 return r
1103 return r
1105
1104
1106 def findincoming(self, remote, base=None, heads=None, force=False):
1105 def findincoming(self, remote, base=None, heads=None, force=False):
1107 """Return list of roots of the subsets of missing nodes from remote
1106 """Return list of roots of the subsets of missing nodes from remote
1108
1107
1109 If base dict is specified, assume that these nodes and their parents
1108 If base dict is specified, assume that these nodes and their parents
1110 exist on the remote side and that no child of a node of base exists
1109 exist on the remote side and that no child of a node of base exists
1111 in both remote and self.
1110 in both remote and self.
1112 Furthermore base will be updated to include the nodes that exists
1111 Furthermore base will be updated to include the nodes that exists
1113 in self and remote but no children exists in self and remote.
1112 in self and remote but no children exists in self and remote.
1114 If a list of heads is specified, return only nodes which are heads
1113 If a list of heads is specified, return only nodes which are heads
1115 or ancestors of these heads.
1114 or ancestors of these heads.
1116
1115
1117 All the ancestors of base are in self and in remote.
1116 All the ancestors of base are in self and in remote.
1118 All the descendants of the list returned are missing in self.
1117 All the descendants of the list returned are missing in self.
1119 (and so we know that the rest of the nodes are missing in remote, see
1118 (and so we know that the rest of the nodes are missing in remote, see
1120 outgoing)
1119 outgoing)
1121 """
1120 """
1122 m = self.changelog.nodemap
1121 m = self.changelog.nodemap
1123 search = []
1122 search = []
1124 fetch = {}
1123 fetch = {}
1125 seen = {}
1124 seen = {}
1126 seenbranch = {}
1125 seenbranch = {}
1127 if base == None:
1126 if base == None:
1128 base = {}
1127 base = {}
1129
1128
1130 if not heads:
1129 if not heads:
1131 heads = remote.heads()
1130 heads = remote.heads()
1132
1131
1133 if self.changelog.tip() == nullid:
1132 if self.changelog.tip() == nullid:
1134 base[nullid] = 1
1133 base[nullid] = 1
1135 if heads != [nullid]:
1134 if heads != [nullid]:
1136 return [nullid]
1135 return [nullid]
1137 return []
1136 return []
1138
1137
1139 # assume we're closer to the tip than the root
1138 # assume we're closer to the tip than the root
1140 # and start by examining the heads
1139 # and start by examining the heads
1141 self.ui.status(_("searching for changes\n"))
1140 self.ui.status(_("searching for changes\n"))
1142
1141
1143 unknown = []
1142 unknown = []
1144 for h in heads:
1143 for h in heads:
1145 if h not in m:
1144 if h not in m:
1146 unknown.append(h)
1145 unknown.append(h)
1147 else:
1146 else:
1148 base[h] = 1
1147 base[h] = 1
1149
1148
1150 if not unknown:
1149 if not unknown:
1151 return []
1150 return []
1152
1151
1153 req = dict.fromkeys(unknown)
1152 req = dict.fromkeys(unknown)
1154 reqcnt = 0
1153 reqcnt = 0
1155
1154
1156 # search through remote branches
1155 # search through remote branches
1157 # a 'branch' here is a linear segment of history, with four parts:
1156 # a 'branch' here is a linear segment of history, with four parts:
1158 # head, root, first parent, second parent
1157 # head, root, first parent, second parent
1159 # (a branch always has two parents (or none) by definition)
1158 # (a branch always has two parents (or none) by definition)
1160 unknown = remote.branches(unknown)
1159 unknown = remote.branches(unknown)
1161 while unknown:
1160 while unknown:
1162 r = []
1161 r = []
1163 while unknown:
1162 while unknown:
1164 n = unknown.pop(0)
1163 n = unknown.pop(0)
1165 if n[0] in seen:
1164 if n[0] in seen:
1166 continue
1165 continue
1167
1166
1168 self.ui.debug(_("examining %s:%s\n")
1167 self.ui.debug(_("examining %s:%s\n")
1169 % (short(n[0]), short(n[1])))
1168 % (short(n[0]), short(n[1])))
1170 if n[0] == nullid: # found the end of the branch
1169 if n[0] == nullid: # found the end of the branch
1171 pass
1170 pass
1172 elif n in seenbranch:
1171 elif n in seenbranch:
1173 self.ui.debug(_("branch already found\n"))
1172 self.ui.debug(_("branch already found\n"))
1174 continue
1173 continue
1175 elif n[1] and n[1] in m: # do we know the base?
1174 elif n[1] and n[1] in m: # do we know the base?
1176 self.ui.debug(_("found incomplete branch %s:%s\n")
1175 self.ui.debug(_("found incomplete branch %s:%s\n")
1177 % (short(n[0]), short(n[1])))
1176 % (short(n[0]), short(n[1])))
1178 search.append(n) # schedule branch range for scanning
1177 search.append(n) # schedule branch range for scanning
1179 seenbranch[n] = 1
1178 seenbranch[n] = 1
1180 else:
1179 else:
1181 if n[1] not in seen and n[1] not in fetch:
1180 if n[1] not in seen and n[1] not in fetch:
1182 if n[2] in m and n[3] in m:
1181 if n[2] in m and n[3] in m:
1183 self.ui.debug(_("found new changeset %s\n") %
1182 self.ui.debug(_("found new changeset %s\n") %
1184 short(n[1]))
1183 short(n[1]))
1185 fetch[n[1]] = 1 # earliest unknown
1184 fetch[n[1]] = 1 # earliest unknown
1186 for p in n[2:4]:
1185 for p in n[2:4]:
1187 if p in m:
1186 if p in m:
1188 base[p] = 1 # latest known
1187 base[p] = 1 # latest known
1189
1188
1190 for p in n[2:4]:
1189 for p in n[2:4]:
1191 if p not in req and p not in m:
1190 if p not in req and p not in m:
1192 r.append(p)
1191 r.append(p)
1193 req[p] = 1
1192 req[p] = 1
1194 seen[n[0]] = 1
1193 seen[n[0]] = 1
1195
1194
1196 if r:
1195 if r:
1197 reqcnt += 1
1196 reqcnt += 1
1198 self.ui.debug(_("request %d: %s\n") %
1197 self.ui.debug(_("request %d: %s\n") %
1199 (reqcnt, " ".join(map(short, r))))
1198 (reqcnt, " ".join(map(short, r))))
1200 for p in xrange(0, len(r), 10):
1199 for p in xrange(0, len(r), 10):
1201 for b in remote.branches(r[p:p+10]):
1200 for b in remote.branches(r[p:p+10]):
1202 self.ui.debug(_("received %s:%s\n") %
1201 self.ui.debug(_("received %s:%s\n") %
1203 (short(b[0]), short(b[1])))
1202 (short(b[0]), short(b[1])))
1204 unknown.append(b)
1203 unknown.append(b)
1205
1204
1206 # do binary search on the branches we found
1205 # do binary search on the branches we found
1207 while search:
1206 while search:
1208 n = search.pop(0)
1207 n = search.pop(0)
1209 reqcnt += 1
1208 reqcnt += 1
1210 l = remote.between([(n[0], n[1])])[0]
1209 l = remote.between([(n[0], n[1])])[0]
1211 l.append(n[1])
1210 l.append(n[1])
1212 p = n[0]
1211 p = n[0]
1213 f = 1
1212 f = 1
1214 for i in l:
1213 for i in l:
1215 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1214 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1216 if i in m:
1215 if i in m:
1217 if f <= 2:
1216 if f <= 2:
1218 self.ui.debug(_("found new branch changeset %s\n") %
1217 self.ui.debug(_("found new branch changeset %s\n") %
1219 short(p))
1218 short(p))
1220 fetch[p] = 1
1219 fetch[p] = 1
1221 base[i] = 1
1220 base[i] = 1
1222 else:
1221 else:
1223 self.ui.debug(_("narrowed branch search to %s:%s\n")
1222 self.ui.debug(_("narrowed branch search to %s:%s\n")
1224 % (short(p), short(i)))
1223 % (short(p), short(i)))
1225 search.append((p, i))
1224 search.append((p, i))
1226 break
1225 break
1227 p, f = i, f * 2
1226 p, f = i, f * 2
1228
1227
1229 # sanity check our fetch list
1228 # sanity check our fetch list
1230 for f in fetch.keys():
1229 for f in fetch.keys():
1231 if f in m:
1230 if f in m:
1232 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1231 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1233
1232
1234 if base.keys() == [nullid]:
1233 if base.keys() == [nullid]:
1235 if force:
1234 if force:
1236 self.ui.warn(_("warning: repository is unrelated\n"))
1235 self.ui.warn(_("warning: repository is unrelated\n"))
1237 else:
1236 else:
1238 raise util.Abort(_("repository is unrelated"))
1237 raise util.Abort(_("repository is unrelated"))
1239
1238
1240 self.ui.debug(_("found new changesets starting at ") +
1239 self.ui.debug(_("found new changesets starting at ") +
1241 " ".join([short(f) for f in fetch]) + "\n")
1240 " ".join([short(f) for f in fetch]) + "\n")
1242
1241
1243 self.ui.debug(_("%d total queries\n") % reqcnt)
1242 self.ui.debug(_("%d total queries\n") % reqcnt)
1244
1243
1245 return fetch.keys()
1244 return fetch.keys()
1246
1245
1247 def findoutgoing(self, remote, base=None, heads=None, force=False):
1246 def findoutgoing(self, remote, base=None, heads=None, force=False):
1248 """Return list of nodes that are roots of subsets not in remote
1247 """Return list of nodes that are roots of subsets not in remote
1249
1248
1250 If base dict is specified, assume that these nodes and their parents
1249 If base dict is specified, assume that these nodes and their parents
1251 exist on the remote side.
1250 exist on the remote side.
1252 If a list of heads is specified, return only nodes which are heads
1251 If a list of heads is specified, return only nodes which are heads
1253 or ancestors of these heads, and return a second element which
1252 or ancestors of these heads, and return a second element which
1254 contains all remote heads which get new children.
1253 contains all remote heads which get new children.
1255 """
1254 """
1256 if base == None:
1255 if base == None:
1257 base = {}
1256 base = {}
1258 self.findincoming(remote, base, heads, force=force)
1257 self.findincoming(remote, base, heads, force=force)
1259
1258
1260 self.ui.debug(_("common changesets up to ")
1259 self.ui.debug(_("common changesets up to ")
1261 + " ".join(map(short, base.keys())) + "\n")
1260 + " ".join(map(short, base.keys())) + "\n")
1262
1261
1263 remain = dict.fromkeys(self.changelog.nodemap)
1262 remain = dict.fromkeys(self.changelog.nodemap)
1264
1263
1265 # prune everything remote has from the tree
1264 # prune everything remote has from the tree
1266 del remain[nullid]
1265 del remain[nullid]
1267 remove = base.keys()
1266 remove = base.keys()
1268 while remove:
1267 while remove:
1269 n = remove.pop(0)
1268 n = remove.pop(0)
1270 if n in remain:
1269 if n in remain:
1271 del remain[n]
1270 del remain[n]
1272 for p in self.changelog.parents(n):
1271 for p in self.changelog.parents(n):
1273 remove.append(p)
1272 remove.append(p)
1274
1273
1275 # find every node whose parents have been pruned
1274 # find every node whose parents have been pruned
1276 subset = []
1275 subset = []
1277 # find every remote head that will get new children
1276 # find every remote head that will get new children
1278 updated_heads = {}
1277 updated_heads = {}
1279 for n in remain:
1278 for n in remain:
1280 p1, p2 = self.changelog.parents(n)
1279 p1, p2 = self.changelog.parents(n)
1281 if p1 not in remain and p2 not in remain:
1280 if p1 not in remain and p2 not in remain:
1282 subset.append(n)
1281 subset.append(n)
1283 if heads:
1282 if heads:
1284 if p1 in heads:
1283 if p1 in heads:
1285 updated_heads[p1] = True
1284 updated_heads[p1] = True
1286 if p2 in heads:
1285 if p2 in heads:
1287 updated_heads[p2] = True
1286 updated_heads[p2] = True
1288
1287
1289 # this is the set of all roots we have to push
1288 # this is the set of all roots we have to push
1290 if heads:
1289 if heads:
1291 return subset, updated_heads.keys()
1290 return subset, updated_heads.keys()
1292 else:
1291 else:
1293 return subset
1292 return subset
1294
1293
1295 def pull(self, remote, heads=None, force=False, lock=None):
1294 def pull(self, remote, heads=None, force=False, lock=None):
1296 mylock = False
1295 mylock = False
1297 if not lock:
1296 if not lock:
1298 lock = self.lock()
1297 lock = self.lock()
1299 mylock = True
1298 mylock = True
1300
1299
1301 try:
1300 try:
1302 fetch = self.findincoming(remote, force=force)
1301 fetch = self.findincoming(remote, force=force)
1303 if fetch == [nullid]:
1302 if fetch == [nullid]:
1304 self.ui.status(_("requesting all changes\n"))
1303 self.ui.status(_("requesting all changes\n"))
1305
1304
1306 if not fetch:
1305 if not fetch:
1307 self.ui.status(_("no changes found\n"))
1306 self.ui.status(_("no changes found\n"))
1308 return 0
1307 return 0
1309
1308
1310 if heads is None:
1309 if heads is None:
1311 cg = remote.changegroup(fetch, 'pull')
1310 cg = remote.changegroup(fetch, 'pull')
1312 else:
1311 else:
1313 if 'changegroupsubset' not in remote.capabilities:
1312 if 'changegroupsubset' not in remote.capabilities:
1314 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1313 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1315 cg = remote.changegroupsubset(fetch, heads, 'pull')
1314 cg = remote.changegroupsubset(fetch, heads, 'pull')
1316 return self.addchangegroup(cg, 'pull', remote.url())
1315 return self.addchangegroup(cg, 'pull', remote.url())
1317 finally:
1316 finally:
1318 if mylock:
1317 if mylock:
1319 lock.release()
1318 lock.release()
1320
1319
1321 def push(self, remote, force=False, revs=None):
1320 def push(self, remote, force=False, revs=None):
1322 # there are two ways to push to remote repo:
1321 # there are two ways to push to remote repo:
1323 #
1322 #
1324 # addchangegroup assumes local user can lock remote
1323 # addchangegroup assumes local user can lock remote
1325 # repo (local filesystem, old ssh servers).
1324 # repo (local filesystem, old ssh servers).
1326 #
1325 #
1327 # unbundle assumes local user cannot lock remote repo (new ssh
1326 # unbundle assumes local user cannot lock remote repo (new ssh
1328 # servers, http servers).
1327 # servers, http servers).
1329
1328
1330 if remote.capable('unbundle'):
1329 if remote.capable('unbundle'):
1331 return self.push_unbundle(remote, force, revs)
1330 return self.push_unbundle(remote, force, revs)
1332 return self.push_addchangegroup(remote, force, revs)
1331 return self.push_addchangegroup(remote, force, revs)
1333
1332
1334 def prepush(self, remote, force, revs):
1333 def prepush(self, remote, force, revs):
1335 base = {}
1334 base = {}
1336 remote_heads = remote.heads()
1335 remote_heads = remote.heads()
1337 inc = self.findincoming(remote, base, remote_heads, force=force)
1336 inc = self.findincoming(remote, base, remote_heads, force=force)
1338
1337
1339 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1338 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1340 if revs is not None:
1339 if revs is not None:
1341 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1340 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1342 else:
1341 else:
1343 bases, heads = update, self.changelog.heads()
1342 bases, heads = update, self.changelog.heads()
1344
1343
1345 if not bases:
1344 if not bases:
1346 self.ui.status(_("no changes found\n"))
1345 self.ui.status(_("no changes found\n"))
1347 return None, 1
1346 return None, 1
1348 elif not force:
1347 elif not force:
1349 # check if we're creating new remote heads
1348 # check if we're creating new remote heads
1350 # to be a remote head after push, node must be either
1349 # to be a remote head after push, node must be either
1351 # - unknown locally
1350 # - unknown locally
1352 # - a local outgoing head descended from update
1351 # - a local outgoing head descended from update
1353 # - a remote head that's known locally and not
1352 # - a remote head that's known locally and not
1354 # ancestral to an outgoing head
1353 # ancestral to an outgoing head
1355
1354
1356 warn = 0
1355 warn = 0
1357
1356
1358 if remote_heads == [nullid]:
1357 if remote_heads == [nullid]:
1359 warn = 0
1358 warn = 0
1360 elif not revs and len(heads) > len(remote_heads):
1359 elif not revs and len(heads) > len(remote_heads):
1361 warn = 1
1360 warn = 1
1362 else:
1361 else:
1363 newheads = list(heads)
1362 newheads = list(heads)
1364 for r in remote_heads:
1363 for r in remote_heads:
1365 if r in self.changelog.nodemap:
1364 if r in self.changelog.nodemap:
1366 desc = self.changelog.heads(r, heads)
1365 desc = self.changelog.heads(r, heads)
1367 l = [h for h in heads if h in desc]
1366 l = [h for h in heads if h in desc]
1368 if not l:
1367 if not l:
1369 newheads.append(r)
1368 newheads.append(r)
1370 else:
1369 else:
1371 newheads.append(r)
1370 newheads.append(r)
1372 if len(newheads) > len(remote_heads):
1371 if len(newheads) > len(remote_heads):
1373 warn = 1
1372 warn = 1
1374
1373
1375 if warn:
1374 if warn:
1376 self.ui.warn(_("abort: push creates new remote branches!\n"))
1375 self.ui.warn(_("abort: push creates new remote branches!\n"))
1377 self.ui.status(_("(did you forget to merge?"
1376 self.ui.status(_("(did you forget to merge?"
1378 " use push -f to force)\n"))
1377 " use push -f to force)\n"))
1379 return None, 1
1378 return None, 1
1380 elif inc:
1379 elif inc:
1381 self.ui.warn(_("note: unsynced remote changes!\n"))
1380 self.ui.warn(_("note: unsynced remote changes!\n"))
1382
1381
1383
1382
1384 if revs is None:
1383 if revs is None:
1385 cg = self.changegroup(update, 'push')
1384 cg = self.changegroup(update, 'push')
1386 else:
1385 else:
1387 cg = self.changegroupsubset(update, revs, 'push')
1386 cg = self.changegroupsubset(update, revs, 'push')
1388 return cg, remote_heads
1387 return cg, remote_heads
1389
1388
1390 def push_addchangegroup(self, remote, force, revs):
1389 def push_addchangegroup(self, remote, force, revs):
1391 lock = remote.lock()
1390 lock = remote.lock()
1392
1391
1393 ret = self.prepush(remote, force, revs)
1392 ret = self.prepush(remote, force, revs)
1394 if ret[0] is not None:
1393 if ret[0] is not None:
1395 cg, remote_heads = ret
1394 cg, remote_heads = ret
1396 return remote.addchangegroup(cg, 'push', self.url())
1395 return remote.addchangegroup(cg, 'push', self.url())
1397 return ret[1]
1396 return ret[1]
1398
1397
1399 def push_unbundle(self, remote, force, revs):
1398 def push_unbundle(self, remote, force, revs):
1400 # local repo finds heads on server, finds out what revs it
1399 # local repo finds heads on server, finds out what revs it
1401 # must push. once revs transferred, if server finds it has
1400 # must push. once revs transferred, if server finds it has
1402 # different heads (someone else won commit/push race), server
1401 # different heads (someone else won commit/push race), server
1403 # aborts.
1402 # aborts.
1404
1403
1405 ret = self.prepush(remote, force, revs)
1404 ret = self.prepush(remote, force, revs)
1406 if ret[0] is not None:
1405 if ret[0] is not None:
1407 cg, remote_heads = ret
1406 cg, remote_heads = ret
1408 if force: remote_heads = ['force']
1407 if force: remote_heads = ['force']
1409 return remote.unbundle(cg, remote_heads, 'push')
1408 return remote.unbundle(cg, remote_heads, 'push')
1410 return ret[1]
1409 return ret[1]
1411
1410
1412 def changegroupinfo(self, nodes):
1411 def changegroupinfo(self, nodes):
1413 self.ui.note(_("%d changesets found\n") % len(nodes))
1412 self.ui.note(_("%d changesets found\n") % len(nodes))
1414 if self.ui.debugflag:
1413 if self.ui.debugflag:
1415 self.ui.debug(_("List of changesets:\n"))
1414 self.ui.debug(_("List of changesets:\n"))
1416 for node in nodes:
1415 for node in nodes:
1417 self.ui.debug("%s\n" % hex(node))
1416 self.ui.debug("%s\n" % hex(node))
1418
1417
1419 def changegroupsubset(self, bases, heads, source):
1418 def changegroupsubset(self, bases, heads, source):
1420 """This function generates a changegroup consisting of all the nodes
1419 """This function generates a changegroup consisting of all the nodes
1421 that are descendents of any of the bases, and ancestors of any of
1420 that are descendents of any of the bases, and ancestors of any of
1422 the heads.
1421 the heads.
1423
1422
1424 It is fairly complex as determining which filenodes and which
1423 It is fairly complex as determining which filenodes and which
1425 manifest nodes need to be included for the changeset to be complete
1424 manifest nodes need to be included for the changeset to be complete
1426 is non-trivial.
1425 is non-trivial.
1427
1426
1428 Another wrinkle is doing the reverse, figuring out which changeset in
1427 Another wrinkle is doing the reverse, figuring out which changeset in
1429 the changegroup a particular filenode or manifestnode belongs to."""
1428 the changegroup a particular filenode or manifestnode belongs to."""
1430
1429
1431 self.hook('preoutgoing', throw=True, source=source)
1430 self.hook('preoutgoing', throw=True, source=source)
1432
1431
1433 # Set up some initial variables
1432 # Set up some initial variables
1434 # Make it easy to refer to self.changelog
1433 # Make it easy to refer to self.changelog
1435 cl = self.changelog
1434 cl = self.changelog
1436 # msng is short for missing - compute the list of changesets in this
1435 # msng is short for missing - compute the list of changesets in this
1437 # changegroup.
1436 # changegroup.
1438 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1437 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1439 self.changegroupinfo(msng_cl_lst)
1438 self.changegroupinfo(msng_cl_lst)
1440 # Some bases may turn out to be superfluous, and some heads may be
1439 # Some bases may turn out to be superfluous, and some heads may be
1441 # too. nodesbetween will return the minimal set of bases and heads
1440 # too. nodesbetween will return the minimal set of bases and heads
1442 # necessary to re-create the changegroup.
1441 # necessary to re-create the changegroup.
1443
1442
1444 # Known heads are the list of heads that it is assumed the recipient
1443 # Known heads are the list of heads that it is assumed the recipient
1445 # of this changegroup will know about.
1444 # of this changegroup will know about.
1446 knownheads = {}
1445 knownheads = {}
1447 # We assume that all parents of bases are known heads.
1446 # We assume that all parents of bases are known heads.
1448 for n in bases:
1447 for n in bases:
1449 for p in cl.parents(n):
1448 for p in cl.parents(n):
1450 if p != nullid:
1449 if p != nullid:
1451 knownheads[p] = 1
1450 knownheads[p] = 1
1452 knownheads = knownheads.keys()
1451 knownheads = knownheads.keys()
1453 if knownheads:
1452 if knownheads:
1454 # Now that we know what heads are known, we can compute which
1453 # Now that we know what heads are known, we can compute which
1455 # changesets are known. The recipient must know about all
1454 # changesets are known. The recipient must know about all
1456 # changesets required to reach the known heads from the null
1455 # changesets required to reach the known heads from the null
1457 # changeset.
1456 # changeset.
1458 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1457 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1459 junk = None
1458 junk = None
1460 # Transform the list into an ersatz set.
1459 # Transform the list into an ersatz set.
1461 has_cl_set = dict.fromkeys(has_cl_set)
1460 has_cl_set = dict.fromkeys(has_cl_set)
1462 else:
1461 else:
1463 # If there were no known heads, the recipient cannot be assumed to
1462 # If there were no known heads, the recipient cannot be assumed to
1464 # know about any changesets.
1463 # know about any changesets.
1465 has_cl_set = {}
1464 has_cl_set = {}
1466
1465
1467 # Make it easy to refer to self.manifest
1466 # Make it easy to refer to self.manifest
1468 mnfst = self.manifest
1467 mnfst = self.manifest
1469 # We don't know which manifests are missing yet
1468 # We don't know which manifests are missing yet
1470 msng_mnfst_set = {}
1469 msng_mnfst_set = {}
1471 # Nor do we know which filenodes are missing.
1470 # Nor do we know which filenodes are missing.
1472 msng_filenode_set = {}
1471 msng_filenode_set = {}
1473
1472
1474 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1473 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1475 junk = None
1474 junk = None
1476
1475
1477 # A changeset always belongs to itself, so the changenode lookup
1476 # A changeset always belongs to itself, so the changenode lookup
1478 # function for a changenode is identity.
1477 # function for a changenode is identity.
1479 def identity(x):
1478 def identity(x):
1480 return x
1479 return x
1481
1480
1482 # A function generating function. Sets up an environment for the
1481 # A function generating function. Sets up an environment for the
1483 # inner function.
1482 # inner function.
1484 def cmp_by_rev_func(revlog):
1483 def cmp_by_rev_func(revlog):
1485 # Compare two nodes by their revision number in the environment's
1484 # Compare two nodes by their revision number in the environment's
1486 # revision history. Since the revision number both represents the
1485 # revision history. Since the revision number both represents the
1487 # most efficient order to read the nodes in, and represents a
1486 # most efficient order to read the nodes in, and represents a
1488 # topological sorting of the nodes, this function is often useful.
1487 # topological sorting of the nodes, this function is often useful.
1489 def cmp_by_rev(a, b):
1488 def cmp_by_rev(a, b):
1490 return cmp(revlog.rev(a), revlog.rev(b))
1489 return cmp(revlog.rev(a), revlog.rev(b))
1491 return cmp_by_rev
1490 return cmp_by_rev
1492
1491
1493 # If we determine that a particular file or manifest node must be a
1492 # If we determine that a particular file or manifest node must be a
1494 # node that the recipient of the changegroup will already have, we can
1493 # node that the recipient of the changegroup will already have, we can
1495 # also assume the recipient will have all the parents. This function
1494 # also assume the recipient will have all the parents. This function
1496 # prunes them from the set of missing nodes.
1495 # prunes them from the set of missing nodes.
1497 def prune_parents(revlog, hasset, msngset):
1496 def prune_parents(revlog, hasset, msngset):
1498 haslst = hasset.keys()
1497 haslst = hasset.keys()
1499 haslst.sort(cmp_by_rev_func(revlog))
1498 haslst.sort(cmp_by_rev_func(revlog))
1500 for node in haslst:
1499 for node in haslst:
1501 parentlst = [p for p in revlog.parents(node) if p != nullid]
1500 parentlst = [p for p in revlog.parents(node) if p != nullid]
1502 while parentlst:
1501 while parentlst:
1503 n = parentlst.pop()
1502 n = parentlst.pop()
1504 if n not in hasset:
1503 if n not in hasset:
1505 hasset[n] = 1
1504 hasset[n] = 1
1506 p = [p for p in revlog.parents(n) if p != nullid]
1505 p = [p for p in revlog.parents(n) if p != nullid]
1507 parentlst.extend(p)
1506 parentlst.extend(p)
1508 for n in hasset:
1507 for n in hasset:
1509 msngset.pop(n, None)
1508 msngset.pop(n, None)
1510
1509
1511 # This is a function generating function used to set up an environment
1510 # This is a function generating function used to set up an environment
1512 # for the inner function to execute in.
1511 # for the inner function to execute in.
1513 def manifest_and_file_collector(changedfileset):
1512 def manifest_and_file_collector(changedfileset):
1514 # This is an information gathering function that gathers
1513 # This is an information gathering function that gathers
1515 # information from each changeset node that goes out as part of
1514 # information from each changeset node that goes out as part of
1516 # the changegroup. The information gathered is a list of which
1515 # the changegroup. The information gathered is a list of which
1517 # manifest nodes are potentially required (the recipient may
1516 # manifest nodes are potentially required (the recipient may
1518 # already have them) and total list of all files which were
1517 # already have them) and total list of all files which were
1519 # changed in any changeset in the changegroup.
1518 # changed in any changeset in the changegroup.
1520 #
1519 #
1521 # We also remember the first changenode we saw any manifest
1520 # We also remember the first changenode we saw any manifest
1522 # referenced by so we can later determine which changenode 'owns'
1521 # referenced by so we can later determine which changenode 'owns'
1523 # the manifest.
1522 # the manifest.
1524 def collect_manifests_and_files(clnode):
1523 def collect_manifests_and_files(clnode):
1525 c = cl.read(clnode)
1524 c = cl.read(clnode)
1526 for f in c[3]:
1525 for f in c[3]:
1527 # This is to make sure we only have one instance of each
1526 # This is to make sure we only have one instance of each
1528 # filename string for each filename.
1527 # filename string for each filename.
1529 changedfileset.setdefault(f, f)
1528 changedfileset.setdefault(f, f)
1530 msng_mnfst_set.setdefault(c[0], clnode)
1529 msng_mnfst_set.setdefault(c[0], clnode)
1531 return collect_manifests_and_files
1530 return collect_manifests_and_files
1532
1531
1533 # Figure out which manifest nodes (of the ones we think might be part
1532 # Figure out which manifest nodes (of the ones we think might be part
1534 # of the changegroup) the recipient must know about and remove them
1533 # of the changegroup) the recipient must know about and remove them
1535 # from the changegroup.
1534 # from the changegroup.
1536 def prune_manifests():
1535 def prune_manifests():
1537 has_mnfst_set = {}
1536 has_mnfst_set = {}
1538 for n in msng_mnfst_set:
1537 for n in msng_mnfst_set:
1539 # If a 'missing' manifest thinks it belongs to a changenode
1538 # If a 'missing' manifest thinks it belongs to a changenode
1540 # the recipient is assumed to have, obviously the recipient
1539 # the recipient is assumed to have, obviously the recipient
1541 # must have that manifest.
1540 # must have that manifest.
1542 linknode = cl.node(mnfst.linkrev(n))
1541 linknode = cl.node(mnfst.linkrev(n))
1543 if linknode in has_cl_set:
1542 if linknode in has_cl_set:
1544 has_mnfst_set[n] = 1
1543 has_mnfst_set[n] = 1
1545 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1544 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1546
1545
1547 # Use the information collected in collect_manifests_and_files to say
1546 # Use the information collected in collect_manifests_and_files to say
1548 # which changenode any manifestnode belongs to.
1547 # which changenode any manifestnode belongs to.
1549 def lookup_manifest_link(mnfstnode):
1548 def lookup_manifest_link(mnfstnode):
1550 return msng_mnfst_set[mnfstnode]
1549 return msng_mnfst_set[mnfstnode]
1551
1550
1552 # A function generating function that sets up the initial environment
1551 # A function generating function that sets up the initial environment
1553 # the inner function.
1552 # the inner function.
1554 def filenode_collector(changedfiles):
1553 def filenode_collector(changedfiles):
1555 next_rev = [0]
1554 next_rev = [0]
1556 # This gathers information from each manifestnode included in the
1555 # This gathers information from each manifestnode included in the
1557 # changegroup about which filenodes the manifest node references
1556 # changegroup about which filenodes the manifest node references
1558 # so we can include those in the changegroup too.
1557 # so we can include those in the changegroup too.
1559 #
1558 #
1560 # It also remembers which changenode each filenode belongs to. It
1559 # It also remembers which changenode each filenode belongs to. It
1561 # does this by assuming the a filenode belongs to the changenode
1560 # does this by assuming the a filenode belongs to the changenode
1562 # the first manifest that references it belongs to.
1561 # the first manifest that references it belongs to.
1563 def collect_msng_filenodes(mnfstnode):
1562 def collect_msng_filenodes(mnfstnode):
1564 r = mnfst.rev(mnfstnode)
1563 r = mnfst.rev(mnfstnode)
1565 if r == next_rev[0]:
1564 if r == next_rev[0]:
1566 # If the last rev we looked at was the one just previous,
1565 # If the last rev we looked at was the one just previous,
1567 # we only need to see a diff.
1566 # we only need to see a diff.
1568 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1567 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1569 # For each line in the delta
1568 # For each line in the delta
1570 for dline in delta.splitlines():
1569 for dline in delta.splitlines():
1571 # get the filename and filenode for that line
1570 # get the filename and filenode for that line
1572 f, fnode = dline.split('\0')
1571 f, fnode = dline.split('\0')
1573 fnode = bin(fnode[:40])
1572 fnode = bin(fnode[:40])
1574 f = changedfiles.get(f, None)
1573 f = changedfiles.get(f, None)
1575 # And if the file is in the list of files we care
1574 # And if the file is in the list of files we care
1576 # about.
1575 # about.
1577 if f is not None:
1576 if f is not None:
1578 # Get the changenode this manifest belongs to
1577 # Get the changenode this manifest belongs to
1579 clnode = msng_mnfst_set[mnfstnode]
1578 clnode = msng_mnfst_set[mnfstnode]
1580 # Create the set of filenodes for the file if
1579 # Create the set of filenodes for the file if
1581 # there isn't one already.
1580 # there isn't one already.
1582 ndset = msng_filenode_set.setdefault(f, {})
1581 ndset = msng_filenode_set.setdefault(f, {})
1583 # And set the filenode's changelog node to the
1582 # And set the filenode's changelog node to the
1584 # manifest's if it hasn't been set already.
1583 # manifest's if it hasn't been set already.
1585 ndset.setdefault(fnode, clnode)
1584 ndset.setdefault(fnode, clnode)
1586 else:
1585 else:
1587 # Otherwise we need a full manifest.
1586 # Otherwise we need a full manifest.
1588 m = mnfst.read(mnfstnode)
1587 m = mnfst.read(mnfstnode)
1589 # For every file in we care about.
1588 # For every file in we care about.
1590 for f in changedfiles:
1589 for f in changedfiles:
1591 fnode = m.get(f, None)
1590 fnode = m.get(f, None)
1592 # If it's in the manifest
1591 # If it's in the manifest
1593 if fnode is not None:
1592 if fnode is not None:
1594 # See comments above.
1593 # See comments above.
1595 clnode = msng_mnfst_set[mnfstnode]
1594 clnode = msng_mnfst_set[mnfstnode]
1596 ndset = msng_filenode_set.setdefault(f, {})
1595 ndset = msng_filenode_set.setdefault(f, {})
1597 ndset.setdefault(fnode, clnode)
1596 ndset.setdefault(fnode, clnode)
1598 # Remember the revision we hope to see next.
1597 # Remember the revision we hope to see next.
1599 next_rev[0] = r + 1
1598 next_rev[0] = r + 1
1600 return collect_msng_filenodes
1599 return collect_msng_filenodes
1601
1600
1602 # We have a list of filenodes we think we need for a file, lets remove
1601 # We have a list of filenodes we think we need for a file, lets remove
1603 # all those we now the recipient must have.
1602 # all those we now the recipient must have.
1604 def prune_filenodes(f, filerevlog):
1603 def prune_filenodes(f, filerevlog):
1605 msngset = msng_filenode_set[f]
1604 msngset = msng_filenode_set[f]
1606 hasset = {}
1605 hasset = {}
1607 # If a 'missing' filenode thinks it belongs to a changenode we
1606 # If a 'missing' filenode thinks it belongs to a changenode we
1608 # assume the recipient must have, then the recipient must have
1607 # assume the recipient must have, then the recipient must have
1609 # that filenode.
1608 # that filenode.
1610 for n in msngset:
1609 for n in msngset:
1611 clnode = cl.node(filerevlog.linkrev(n))
1610 clnode = cl.node(filerevlog.linkrev(n))
1612 if clnode in has_cl_set:
1611 if clnode in has_cl_set:
1613 hasset[n] = 1
1612 hasset[n] = 1
1614 prune_parents(filerevlog, hasset, msngset)
1613 prune_parents(filerevlog, hasset, msngset)
1615
1614
1616 # A function generator function that sets up the a context for the
1615 # A function generator function that sets up the a context for the
1617 # inner function.
1616 # inner function.
1618 def lookup_filenode_link_func(fname):
1617 def lookup_filenode_link_func(fname):
1619 msngset = msng_filenode_set[fname]
1618 msngset = msng_filenode_set[fname]
1620 # Lookup the changenode the filenode belongs to.
1619 # Lookup the changenode the filenode belongs to.
1621 def lookup_filenode_link(fnode):
1620 def lookup_filenode_link(fnode):
1622 return msngset[fnode]
1621 return msngset[fnode]
1623 return lookup_filenode_link
1622 return lookup_filenode_link
1624
1623
1625 # Now that we have all theses utility functions to help out and
1624 # Now that we have all theses utility functions to help out and
1626 # logically divide up the task, generate the group.
1625 # logically divide up the task, generate the group.
1627 def gengroup():
1626 def gengroup():
1628 # The set of changed files starts empty.
1627 # The set of changed files starts empty.
1629 changedfiles = {}
1628 changedfiles = {}
1630 # Create a changenode group generator that will call our functions
1629 # Create a changenode group generator that will call our functions
1631 # back to lookup the owning changenode and collect information.
1630 # back to lookup the owning changenode and collect information.
1632 group = cl.group(msng_cl_lst, identity,
1631 group = cl.group(msng_cl_lst, identity,
1633 manifest_and_file_collector(changedfiles))
1632 manifest_and_file_collector(changedfiles))
1634 for chnk in group:
1633 for chnk in group:
1635 yield chnk
1634 yield chnk
1636
1635
1637 # The list of manifests has been collected by the generator
1636 # The list of manifests has been collected by the generator
1638 # calling our functions back.
1637 # calling our functions back.
1639 prune_manifests()
1638 prune_manifests()
1640 msng_mnfst_lst = msng_mnfst_set.keys()
1639 msng_mnfst_lst = msng_mnfst_set.keys()
1641 # Sort the manifestnodes by revision number.
1640 # Sort the manifestnodes by revision number.
1642 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1641 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1643 # Create a generator for the manifestnodes that calls our lookup
1642 # Create a generator for the manifestnodes that calls our lookup
1644 # and data collection functions back.
1643 # and data collection functions back.
1645 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1644 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1646 filenode_collector(changedfiles))
1645 filenode_collector(changedfiles))
1647 for chnk in group:
1646 for chnk in group:
1648 yield chnk
1647 yield chnk
1649
1648
1650 # These are no longer needed, dereference and toss the memory for
1649 # These are no longer needed, dereference and toss the memory for
1651 # them.
1650 # them.
1652 msng_mnfst_lst = None
1651 msng_mnfst_lst = None
1653 msng_mnfst_set.clear()
1652 msng_mnfst_set.clear()
1654
1653
1655 changedfiles = changedfiles.keys()
1654 changedfiles = changedfiles.keys()
1656 changedfiles.sort()
1655 changedfiles.sort()
1657 # Go through all our files in order sorted by name.
1656 # Go through all our files in order sorted by name.
1658 for fname in changedfiles:
1657 for fname in changedfiles:
1659 filerevlog = self.file(fname)
1658 filerevlog = self.file(fname)
1660 # Toss out the filenodes that the recipient isn't really
1659 # Toss out the filenodes that the recipient isn't really
1661 # missing.
1660 # missing.
1662 if msng_filenode_set.has_key(fname):
1661 if msng_filenode_set.has_key(fname):
1663 prune_filenodes(fname, filerevlog)
1662 prune_filenodes(fname, filerevlog)
1664 msng_filenode_lst = msng_filenode_set[fname].keys()
1663 msng_filenode_lst = msng_filenode_set[fname].keys()
1665 else:
1664 else:
1666 msng_filenode_lst = []
1665 msng_filenode_lst = []
1667 # If any filenodes are left, generate the group for them,
1666 # If any filenodes are left, generate the group for them,
1668 # otherwise don't bother.
1667 # otherwise don't bother.
1669 if len(msng_filenode_lst) > 0:
1668 if len(msng_filenode_lst) > 0:
1670 yield changegroup.genchunk(fname)
1669 yield changegroup.genchunk(fname)
1671 # Sort the filenodes by their revision #
1670 # Sort the filenodes by their revision #
1672 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1671 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1673 # Create a group generator and only pass in a changenode
1672 # Create a group generator and only pass in a changenode
1674 # lookup function as we need to collect no information
1673 # lookup function as we need to collect no information
1675 # from filenodes.
1674 # from filenodes.
1676 group = filerevlog.group(msng_filenode_lst,
1675 group = filerevlog.group(msng_filenode_lst,
1677 lookup_filenode_link_func(fname))
1676 lookup_filenode_link_func(fname))
1678 for chnk in group:
1677 for chnk in group:
1679 yield chnk
1678 yield chnk
1680 if msng_filenode_set.has_key(fname):
1679 if msng_filenode_set.has_key(fname):
1681 # Don't need this anymore, toss it to free memory.
1680 # Don't need this anymore, toss it to free memory.
1682 del msng_filenode_set[fname]
1681 del msng_filenode_set[fname]
1683 # Signal that no more groups are left.
1682 # Signal that no more groups are left.
1684 yield changegroup.closechunk()
1683 yield changegroup.closechunk()
1685
1684
1686 if msng_cl_lst:
1685 if msng_cl_lst:
1687 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1686 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1688
1687
1689 return util.chunkbuffer(gengroup())
1688 return util.chunkbuffer(gengroup())
1690
1689
1691 def changegroup(self, basenodes, source):
1690 def changegroup(self, basenodes, source):
1692 """Generate a changegroup of all nodes that we have that a recipient
1691 """Generate a changegroup of all nodes that we have that a recipient
1693 doesn't.
1692 doesn't.
1694
1693
1695 This is much easier than the previous function as we can assume that
1694 This is much easier than the previous function as we can assume that
1696 the recipient has any changenode we aren't sending them."""
1695 the recipient has any changenode we aren't sending them."""
1697
1696
1698 self.hook('preoutgoing', throw=True, source=source)
1697 self.hook('preoutgoing', throw=True, source=source)
1699
1698
1700 cl = self.changelog
1699 cl = self.changelog
1701 nodes = cl.nodesbetween(basenodes, None)[0]
1700 nodes = cl.nodesbetween(basenodes, None)[0]
1702 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1701 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1703 self.changegroupinfo(nodes)
1702 self.changegroupinfo(nodes)
1704
1703
1705 def identity(x):
1704 def identity(x):
1706 return x
1705 return x
1707
1706
1708 def gennodelst(revlog):
1707 def gennodelst(revlog):
1709 for r in xrange(0, revlog.count()):
1708 for r in xrange(0, revlog.count()):
1710 n = revlog.node(r)
1709 n = revlog.node(r)
1711 if revlog.linkrev(n) in revset:
1710 if revlog.linkrev(n) in revset:
1712 yield n
1711 yield n
1713
1712
1714 def changed_file_collector(changedfileset):
1713 def changed_file_collector(changedfileset):
1715 def collect_changed_files(clnode):
1714 def collect_changed_files(clnode):
1716 c = cl.read(clnode)
1715 c = cl.read(clnode)
1717 for fname in c[3]:
1716 for fname in c[3]:
1718 changedfileset[fname] = 1
1717 changedfileset[fname] = 1
1719 return collect_changed_files
1718 return collect_changed_files
1720
1719
1721 def lookuprevlink_func(revlog):
1720 def lookuprevlink_func(revlog):
1722 def lookuprevlink(n):
1721 def lookuprevlink(n):
1723 return cl.node(revlog.linkrev(n))
1722 return cl.node(revlog.linkrev(n))
1724 return lookuprevlink
1723 return lookuprevlink
1725
1724
1726 def gengroup():
1725 def gengroup():
1727 # construct a list of all changed files
1726 # construct a list of all changed files
1728 changedfiles = {}
1727 changedfiles = {}
1729
1728
1730 for chnk in cl.group(nodes, identity,
1729 for chnk in cl.group(nodes, identity,
1731 changed_file_collector(changedfiles)):
1730 changed_file_collector(changedfiles)):
1732 yield chnk
1731 yield chnk
1733 changedfiles = changedfiles.keys()
1732 changedfiles = changedfiles.keys()
1734 changedfiles.sort()
1733 changedfiles.sort()
1735
1734
1736 mnfst = self.manifest
1735 mnfst = self.manifest
1737 nodeiter = gennodelst(mnfst)
1736 nodeiter = gennodelst(mnfst)
1738 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1737 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1739 yield chnk
1738 yield chnk
1740
1739
1741 for fname in changedfiles:
1740 for fname in changedfiles:
1742 filerevlog = self.file(fname)
1741 filerevlog = self.file(fname)
1743 nodeiter = gennodelst(filerevlog)
1742 nodeiter = gennodelst(filerevlog)
1744 nodeiter = list(nodeiter)
1743 nodeiter = list(nodeiter)
1745 if nodeiter:
1744 if nodeiter:
1746 yield changegroup.genchunk(fname)
1745 yield changegroup.genchunk(fname)
1747 lookup = lookuprevlink_func(filerevlog)
1746 lookup = lookuprevlink_func(filerevlog)
1748 for chnk in filerevlog.group(nodeiter, lookup):
1747 for chnk in filerevlog.group(nodeiter, lookup):
1749 yield chnk
1748 yield chnk
1750
1749
1751 yield changegroup.closechunk()
1750 yield changegroup.closechunk()
1752
1751
1753 if nodes:
1752 if nodes:
1754 self.hook('outgoing', node=hex(nodes[0]), source=source)
1753 self.hook('outgoing', node=hex(nodes[0]), source=source)
1755
1754
1756 return util.chunkbuffer(gengroup())
1755 return util.chunkbuffer(gengroup())
1757
1756
1758 def addchangegroup(self, source, srctype, url):
1757 def addchangegroup(self, source, srctype, url):
1759 """add changegroup to repo.
1758 """add changegroup to repo.
1760
1759
1761 return values:
1760 return values:
1762 - nothing changed or no source: 0
1761 - nothing changed or no source: 0
1763 - more heads than before: 1+added heads (2..n)
1762 - more heads than before: 1+added heads (2..n)
1764 - less heads than before: -1-removed heads (-2..-n)
1763 - less heads than before: -1-removed heads (-2..-n)
1765 - number of heads stays the same: 1
1764 - number of heads stays the same: 1
1766 """
1765 """
1767 def csmap(x):
1766 def csmap(x):
1768 self.ui.debug(_("add changeset %s\n") % short(x))
1767 self.ui.debug(_("add changeset %s\n") % short(x))
1769 return cl.count()
1768 return cl.count()
1770
1769
1771 def revmap(x):
1770 def revmap(x):
1772 return cl.rev(x)
1771 return cl.rev(x)
1773
1772
1774 if not source:
1773 if not source:
1775 return 0
1774 return 0
1776
1775
1777 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1776 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1778
1777
1779 changesets = files = revisions = 0
1778 changesets = files = revisions = 0
1780
1779
1781 tr = self.transaction()
1780 tr = self.transaction()
1782
1781
1783 # write changelog data to temp files so concurrent readers will not see
1782 # write changelog data to temp files so concurrent readers will not see
1784 # inconsistent view
1783 # inconsistent view
1785 cl = self.changelog
1784 cl = self.changelog
1786 cl.delayupdate()
1785 cl.delayupdate()
1787 oldheads = len(cl.heads())
1786 oldheads = len(cl.heads())
1788
1787
1789 # pull off the changeset group
1788 # pull off the changeset group
1790 self.ui.status(_("adding changesets\n"))
1789 self.ui.status(_("adding changesets\n"))
1791 cor = cl.count() - 1
1790 cor = cl.count() - 1
1792 chunkiter = changegroup.chunkiter(source)
1791 chunkiter = changegroup.chunkiter(source)
1793 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1792 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1794 raise util.Abort(_("received changelog group is empty"))
1793 raise util.Abort(_("received changelog group is empty"))
1795 cnr = cl.count() - 1
1794 cnr = cl.count() - 1
1796 changesets = cnr - cor
1795 changesets = cnr - cor
1797
1796
1798 # pull off the manifest group
1797 # pull off the manifest group
1799 self.ui.status(_("adding manifests\n"))
1798 self.ui.status(_("adding manifests\n"))
1800 chunkiter = changegroup.chunkiter(source)
1799 chunkiter = changegroup.chunkiter(source)
1801 # no need to check for empty manifest group here:
1800 # no need to check for empty manifest group here:
1802 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1801 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1803 # no new manifest will be created and the manifest group will
1802 # no new manifest will be created and the manifest group will
1804 # be empty during the pull
1803 # be empty during the pull
1805 self.manifest.addgroup(chunkiter, revmap, tr)
1804 self.manifest.addgroup(chunkiter, revmap, tr)
1806
1805
1807 # process the files
1806 # process the files
1808 self.ui.status(_("adding file changes\n"))
1807 self.ui.status(_("adding file changes\n"))
1809 while 1:
1808 while 1:
1810 f = changegroup.getchunk(source)
1809 f = changegroup.getchunk(source)
1811 if not f:
1810 if not f:
1812 break
1811 break
1813 self.ui.debug(_("adding %s revisions\n") % f)
1812 self.ui.debug(_("adding %s revisions\n") % f)
1814 fl = self.file(f)
1813 fl = self.file(f)
1815 o = fl.count()
1814 o = fl.count()
1816 chunkiter = changegroup.chunkiter(source)
1815 chunkiter = changegroup.chunkiter(source)
1817 if fl.addgroup(chunkiter, revmap, tr) is None:
1816 if fl.addgroup(chunkiter, revmap, tr) is None:
1818 raise util.Abort(_("received file revlog group is empty"))
1817 raise util.Abort(_("received file revlog group is empty"))
1819 revisions += fl.count() - o
1818 revisions += fl.count() - o
1820 files += 1
1819 files += 1
1821
1820
1822 # make changelog see real files again
1821 # make changelog see real files again
1823 cl.finalize(tr)
1822 cl.finalize(tr)
1824
1823
1825 newheads = len(self.changelog.heads())
1824 newheads = len(self.changelog.heads())
1826 heads = ""
1825 heads = ""
1827 if oldheads and newheads != oldheads:
1826 if oldheads and newheads != oldheads:
1828 heads = _(" (%+d heads)") % (newheads - oldheads)
1827 heads = _(" (%+d heads)") % (newheads - oldheads)
1829
1828
1830 self.ui.status(_("added %d changesets"
1829 self.ui.status(_("added %d changesets"
1831 " with %d changes to %d files%s\n")
1830 " with %d changes to %d files%s\n")
1832 % (changesets, revisions, files, heads))
1831 % (changesets, revisions, files, heads))
1833
1832
1834 if changesets > 0:
1833 if changesets > 0:
1835 self.hook('pretxnchangegroup', throw=True,
1834 self.hook('pretxnchangegroup', throw=True,
1836 node=hex(self.changelog.node(cor+1)), source=srctype,
1835 node=hex(self.changelog.node(cor+1)), source=srctype,
1837 url=url)
1836 url=url)
1838
1837
1839 tr.close()
1838 tr.close()
1840
1839
1841 if changesets > 0:
1840 if changesets > 0:
1842 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1841 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1843 source=srctype, url=url)
1842 source=srctype, url=url)
1844
1843
1845 for i in xrange(cor + 1, cnr + 1):
1844 for i in xrange(cor + 1, cnr + 1):
1846 self.hook("incoming", node=hex(self.changelog.node(i)),
1845 self.hook("incoming", node=hex(self.changelog.node(i)),
1847 source=srctype, url=url)
1846 source=srctype, url=url)
1848
1847
1849 # never return 0 here:
1848 # never return 0 here:
1850 if newheads < oldheads:
1849 if newheads < oldheads:
1851 return newheads - oldheads - 1
1850 return newheads - oldheads - 1
1852 else:
1851 else:
1853 return newheads - oldheads + 1
1852 return newheads - oldheads + 1
1854
1853
1855
1854
1856 def stream_in(self, remote):
1855 def stream_in(self, remote):
1857 fp = remote.stream_out()
1856 fp = remote.stream_out()
1858 l = fp.readline()
1857 l = fp.readline()
1859 try:
1858 try:
1860 resp = int(l)
1859 resp = int(l)
1861 except ValueError:
1860 except ValueError:
1862 raise util.UnexpectedOutput(
1861 raise util.UnexpectedOutput(
1863 _('Unexpected response from remote server:'), l)
1862 _('Unexpected response from remote server:'), l)
1864 if resp == 1:
1863 if resp == 1:
1865 raise util.Abort(_('operation forbidden by server'))
1864 raise util.Abort(_('operation forbidden by server'))
1866 elif resp == 2:
1865 elif resp == 2:
1867 raise util.Abort(_('locking the remote repository failed'))
1866 raise util.Abort(_('locking the remote repository failed'))
1868 elif resp != 0:
1867 elif resp != 0:
1869 raise util.Abort(_('the server sent an unknown error code'))
1868 raise util.Abort(_('the server sent an unknown error code'))
1870 self.ui.status(_('streaming all changes\n'))
1869 self.ui.status(_('streaming all changes\n'))
1871 l = fp.readline()
1870 l = fp.readline()
1872 try:
1871 try:
1873 total_files, total_bytes = map(int, l.split(' ', 1))
1872 total_files, total_bytes = map(int, l.split(' ', 1))
1874 except ValueError, TypeError:
1873 except ValueError, TypeError:
1875 raise util.UnexpectedOutput(
1874 raise util.UnexpectedOutput(
1876 _('Unexpected response from remote server:'), l)
1875 _('Unexpected response from remote server:'), l)
1877 self.ui.status(_('%d files to transfer, %s of data\n') %
1876 self.ui.status(_('%d files to transfer, %s of data\n') %
1878 (total_files, util.bytecount(total_bytes)))
1877 (total_files, util.bytecount(total_bytes)))
1879 start = time.time()
1878 start = time.time()
1880 for i in xrange(total_files):
1879 for i in xrange(total_files):
1881 # XXX doesn't support '\n' or '\r' in filenames
1880 # XXX doesn't support '\n' or '\r' in filenames
1882 l = fp.readline()
1881 l = fp.readline()
1883 try:
1882 try:
1884 name, size = l.split('\0', 1)
1883 name, size = l.split('\0', 1)
1885 size = int(size)
1884 size = int(size)
1886 except ValueError, TypeError:
1885 except ValueError, TypeError:
1887 raise util.UnexpectedOutput(
1886 raise util.UnexpectedOutput(
1888 _('Unexpected response from remote server:'), l)
1887 _('Unexpected response from remote server:'), l)
1889 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1888 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1890 ofp = self.sopener(name, 'w')
1889 ofp = self.sopener(name, 'w')
1891 for chunk in util.filechunkiter(fp, limit=size):
1890 for chunk in util.filechunkiter(fp, limit=size):
1892 ofp.write(chunk)
1891 ofp.write(chunk)
1893 ofp.close()
1892 ofp.close()
1894 elapsed = time.time() - start
1893 elapsed = time.time() - start
1895 if elapsed <= 0:
1894 if elapsed <= 0:
1896 elapsed = 0.001
1895 elapsed = 0.001
1897 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1896 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1898 (util.bytecount(total_bytes), elapsed,
1897 (util.bytecount(total_bytes), elapsed,
1899 util.bytecount(total_bytes / elapsed)))
1898 util.bytecount(total_bytes / elapsed)))
1900 self.reload()
1899 self.reload()
1901 return len(self.heads()) + 1
1900 return len(self.heads()) + 1
1902
1901
1903 def clone(self, remote, heads=[], stream=False):
1902 def clone(self, remote, heads=[], stream=False):
1904 '''clone remote repository.
1903 '''clone remote repository.
1905
1904
1906 keyword arguments:
1905 keyword arguments:
1907 heads: list of revs to clone (forces use of pull)
1906 heads: list of revs to clone (forces use of pull)
1908 stream: use streaming clone if possible'''
1907 stream: use streaming clone if possible'''
1909
1908
1910 # now, all clients that can request uncompressed clones can
1909 # now, all clients that can request uncompressed clones can
1911 # read repo formats supported by all servers that can serve
1910 # read repo formats supported by all servers that can serve
1912 # them.
1911 # them.
1913
1912
1914 # if revlog format changes, client will have to check version
1913 # if revlog format changes, client will have to check version
1915 # and format flags on "stream" capability, and use
1914 # and format flags on "stream" capability, and use
1916 # uncompressed only if compatible.
1915 # uncompressed only if compatible.
1917
1916
1918 if stream and not heads and remote.capable('stream'):
1917 if stream and not heads and remote.capable('stream'):
1919 return self.stream_in(remote)
1918 return self.stream_in(remote)
1920 return self.pull(remote, heads)
1919 return self.pull(remote, heads)
1921
1920
1922 # used to avoid circular references so destructors work
1921 # used to avoid circular references so destructors work
1923 def aftertrans(files):
1922 def aftertrans(files):
1924 renamefiles = [tuple(t) for t in files]
1923 renamefiles = [tuple(t) for t in files]
1925 def a():
1924 def a():
1926 for src, dest in renamefiles:
1925 for src, dest in renamefiles:
1927 util.rename(src, dest)
1926 util.rename(src, dest)
1928 return a
1927 return a
1929
1928
1930 def instance(ui, path, create):
1929 def instance(ui, path, create):
1931 return localrepository(ui, util.drop_scheme('file', path), create)
1930 return localrepository(ui, util.drop_scheme('file', path), create)
1932
1931
1933 def islocal(path):
1932 def islocal(path):
1934 return True
1933 return True
@@ -1,1472 +1,1475 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile
17 import os, threading, time, calendar, ConfigParser, locale, glob
17 import os, threading, time, calendar, ConfigParser, locale, glob
18
18
19 try:
19 try:
20 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
20 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
21 or "ascii"
21 or "ascii"
22 except locale.Error:
22 except locale.Error:
23 _encoding = 'ascii'
23 _encoding = 'ascii'
24 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
24 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
25 _fallbackencoding = 'ISO-8859-1'
25 _fallbackencoding = 'ISO-8859-1'
26
26
27 def tolocal(s):
27 def tolocal(s):
28 """
28 """
29 Convert a string from internal UTF-8 to local encoding
29 Convert a string from internal UTF-8 to local encoding
30
30
31 All internal strings should be UTF-8 but some repos before the
31 All internal strings should be UTF-8 but some repos before the
32 implementation of locale support may contain latin1 or possibly
32 implementation of locale support may contain latin1 or possibly
33 other character sets. We attempt to decode everything strictly
33 other character sets. We attempt to decode everything strictly
34 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
34 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
35 replace unknown characters.
35 replace unknown characters.
36 """
36 """
37 for e in ('UTF-8', _fallbackencoding):
37 for e in ('UTF-8', _fallbackencoding):
38 try:
38 try:
39 u = s.decode(e) # attempt strict decoding
39 u = s.decode(e) # attempt strict decoding
40 return u.encode(_encoding, "replace")
40 return u.encode(_encoding, "replace")
41 except LookupError, k:
41 except LookupError, k:
42 raise Abort(_("%s, please check your locale settings") % k)
42 raise Abort(_("%s, please check your locale settings") % k)
43 except UnicodeDecodeError:
43 except UnicodeDecodeError:
44 pass
44 pass
45 u = s.decode("utf-8", "replace") # last ditch
45 u = s.decode("utf-8", "replace") # last ditch
46 return u.encode(_encoding, "replace")
46 return u.encode(_encoding, "replace")
47
47
48 def fromlocal(s):
48 def fromlocal(s):
49 """
49 """
50 Convert a string from the local character encoding to UTF-8
50 Convert a string from the local character encoding to UTF-8
51
51
52 We attempt to decode strings using the encoding mode set by
52 We attempt to decode strings using the encoding mode set by
53 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
53 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
54 characters will cause an error message. Other modes include
54 characters will cause an error message. Other modes include
55 'replace', which replaces unknown characters with a special
55 'replace', which replaces unknown characters with a special
56 Unicode character, and 'ignore', which drops the character.
56 Unicode character, and 'ignore', which drops the character.
57 """
57 """
58 try:
58 try:
59 return s.decode(_encoding, _encodingmode).encode("utf-8")
59 return s.decode(_encoding, _encodingmode).encode("utf-8")
60 except UnicodeDecodeError, inst:
60 except UnicodeDecodeError, inst:
61 sub = s[max(0, inst.start-10):inst.start+10]
61 sub = s[max(0, inst.start-10):inst.start+10]
62 raise Abort("decoding near '%s': %s!" % (sub, inst))
62 raise Abort("decoding near '%s': %s!" % (sub, inst))
63 except LookupError, k:
63 except LookupError, k:
64 raise Abort(_("%s, please check your locale settings") % k)
64 raise Abort(_("%s, please check your locale settings") % k)
65
65
66 def locallen(s):
66 def locallen(s):
67 """Find the length in characters of a local string"""
67 """Find the length in characters of a local string"""
68 return len(s.decode(_encoding, "replace"))
68 return len(s.decode(_encoding, "replace"))
69
69
70 def localsub(s, a, b=None):
70 def localsub(s, a, b=None):
71 try:
71 try:
72 u = s.decode(_encoding, _encodingmode)
72 u = s.decode(_encoding, _encodingmode)
73 if b is not None:
73 if b is not None:
74 u = u[a:b]
74 u = u[a:b]
75 else:
75 else:
76 u = u[:a]
76 u = u[:a]
77 return u.encode(_encoding, _encodingmode)
77 return u.encode(_encoding, _encodingmode)
78 except UnicodeDecodeError, inst:
78 except UnicodeDecodeError, inst:
79 sub = s[max(0, inst.start-10), inst.start+10]
79 sub = s[max(0, inst.start-10), inst.start+10]
80 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
80 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
81
81
82 # used by parsedate
82 # used by parsedate
83 defaultdateformats = (
83 defaultdateformats = (
84 '%Y-%m-%d %H:%M:%S',
84 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %I:%M:%S%p',
85 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %H:%M',
86 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %I:%M%p',
87 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d',
88 '%Y-%m-%d',
89 '%m-%d',
89 '%m-%d',
90 '%m/%d',
90 '%m/%d',
91 '%m/%d/%y',
91 '%m/%d/%y',
92 '%m/%d/%Y',
92 '%m/%d/%Y',
93 '%a %b %d %H:%M:%S %Y',
93 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %I:%M:%S%p %Y',
94 '%a %b %d %I:%M:%S%p %Y',
95 '%b %d %H:%M:%S %Y',
95 '%b %d %H:%M:%S %Y',
96 '%b %d %I:%M:%S%p %Y',
96 '%b %d %I:%M:%S%p %Y',
97 '%b %d %H:%M:%S',
97 '%b %d %H:%M:%S',
98 '%b %d %I:%M:%S%p',
98 '%b %d %I:%M:%S%p',
99 '%b %d %H:%M',
99 '%b %d %H:%M',
100 '%b %d %I:%M%p',
100 '%b %d %I:%M%p',
101 '%b %d %Y',
101 '%b %d %Y',
102 '%b %d',
102 '%b %d',
103 '%H:%M:%S',
103 '%H:%M:%S',
104 '%I:%M:%SP',
104 '%I:%M:%SP',
105 '%H:%M',
105 '%H:%M',
106 '%I:%M%p',
106 '%I:%M%p',
107 )
107 )
108
108
109 extendeddateformats = defaultdateformats + (
109 extendeddateformats = defaultdateformats + (
110 "%Y",
110 "%Y",
111 "%Y-%m",
111 "%Y-%m",
112 "%b",
112 "%b",
113 "%b %Y",
113 "%b %Y",
114 )
114 )
115
115
116 class SignalInterrupt(Exception):
116 class SignalInterrupt(Exception):
117 """Exception raised on SIGTERM and SIGHUP."""
117 """Exception raised on SIGTERM and SIGHUP."""
118
118
119 # differences from SafeConfigParser:
119 # differences from SafeConfigParser:
120 # - case-sensitive keys
120 # - case-sensitive keys
121 # - allows values that are not strings (this means that you may not
121 # - allows values that are not strings (this means that you may not
122 # be able to save the configuration to a file)
122 # be able to save the configuration to a file)
123 class configparser(ConfigParser.SafeConfigParser):
123 class configparser(ConfigParser.SafeConfigParser):
124 def optionxform(self, optionstr):
124 def optionxform(self, optionstr):
125 return optionstr
125 return optionstr
126
126
127 def set(self, section, option, value):
127 def set(self, section, option, value):
128 return ConfigParser.ConfigParser.set(self, section, option, value)
128 return ConfigParser.ConfigParser.set(self, section, option, value)
129
129
130 def _interpolate(self, section, option, rawval, vars):
130 def _interpolate(self, section, option, rawval, vars):
131 if not isinstance(rawval, basestring):
131 if not isinstance(rawval, basestring):
132 return rawval
132 return rawval
133 return ConfigParser.SafeConfigParser._interpolate(self, section,
133 return ConfigParser.SafeConfigParser._interpolate(self, section,
134 option, rawval, vars)
134 option, rawval, vars)
135
135
136 def cachefunc(func):
136 def cachefunc(func):
137 '''cache the result of function calls'''
137 '''cache the result of function calls'''
138 # XXX doesn't handle keywords args
138 # XXX doesn't handle keywords args
139 cache = {}
139 cache = {}
140 if func.func_code.co_argcount == 1:
140 if func.func_code.co_argcount == 1:
141 # we gain a small amount of time because
141 # we gain a small amount of time because
142 # we don't need to pack/unpack the list
142 # we don't need to pack/unpack the list
143 def f(arg):
143 def f(arg):
144 if arg not in cache:
144 if arg not in cache:
145 cache[arg] = func(arg)
145 cache[arg] = func(arg)
146 return cache[arg]
146 return cache[arg]
147 else:
147 else:
148 def f(*args):
148 def f(*args):
149 if args not in cache:
149 if args not in cache:
150 cache[args] = func(*args)
150 cache[args] = func(*args)
151 return cache[args]
151 return cache[args]
152
152
153 return f
153 return f
154
154
155 def pipefilter(s, cmd):
155 def pipefilter(s, cmd):
156 '''filter string S through command CMD, returning its output'''
156 '''filter string S through command CMD, returning its output'''
157 (pout, pin) = popen2.popen2(cmd, -1, 'b')
157 (pout, pin) = popen2.popen2(cmd, -1, 'b')
158 def writer():
158 def writer():
159 try:
159 try:
160 pin.write(s)
160 pin.write(s)
161 pin.close()
161 pin.close()
162 except IOError, inst:
162 except IOError, inst:
163 if inst.errno != errno.EPIPE:
163 if inst.errno != errno.EPIPE:
164 raise
164 raise
165
165
166 # we should use select instead on UNIX, but this will work on most
166 # we should use select instead on UNIX, but this will work on most
167 # systems, including Windows
167 # systems, including Windows
168 w = threading.Thread(target=writer)
168 w = threading.Thread(target=writer)
169 w.start()
169 w.start()
170 f = pout.read()
170 f = pout.read()
171 pout.close()
171 pout.close()
172 w.join()
172 w.join()
173 return f
173 return f
174
174
175 def tempfilter(s, cmd):
175 def tempfilter(s, cmd):
176 '''filter string S through a pair of temporary files with CMD.
176 '''filter string S through a pair of temporary files with CMD.
177 CMD is used as a template to create the real command to be run,
177 CMD is used as a template to create the real command to be run,
178 with the strings INFILE and OUTFILE replaced by the real names of
178 with the strings INFILE and OUTFILE replaced by the real names of
179 the temporary files generated.'''
179 the temporary files generated.'''
180 inname, outname = None, None
180 inname, outname = None, None
181 try:
181 try:
182 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
182 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
183 fp = os.fdopen(infd, 'wb')
183 fp = os.fdopen(infd, 'wb')
184 fp.write(s)
184 fp.write(s)
185 fp.close()
185 fp.close()
186 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
186 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
187 os.close(outfd)
187 os.close(outfd)
188 cmd = cmd.replace('INFILE', inname)
188 cmd = cmd.replace('INFILE', inname)
189 cmd = cmd.replace('OUTFILE', outname)
189 cmd = cmd.replace('OUTFILE', outname)
190 code = os.system(cmd)
190 code = os.system(cmd)
191 if code: raise Abort(_("command '%s' failed: %s") %
191 if code: raise Abort(_("command '%s' failed: %s") %
192 (cmd, explain_exit(code)))
192 (cmd, explain_exit(code)))
193 return open(outname, 'rb').read()
193 return open(outname, 'rb').read()
194 finally:
194 finally:
195 try:
195 try:
196 if inname: os.unlink(inname)
196 if inname: os.unlink(inname)
197 except: pass
197 except: pass
198 try:
198 try:
199 if outname: os.unlink(outname)
199 if outname: os.unlink(outname)
200 except: pass
200 except: pass
201
201
202 filtertable = {
202 filtertable = {
203 'tempfile:': tempfilter,
203 'tempfile:': tempfilter,
204 'pipe:': pipefilter,
204 'pipe:': pipefilter,
205 }
205 }
206
206
207 def filter(s, cmd):
207 def filter(s, cmd):
208 "filter a string through a command that transforms its input to its output"
208 "filter a string through a command that transforms its input to its output"
209 for name, fn in filtertable.iteritems():
209 for name, fn in filtertable.iteritems():
210 if cmd.startswith(name):
210 if cmd.startswith(name):
211 return fn(s, cmd[len(name):].lstrip())
211 return fn(s, cmd[len(name):].lstrip())
212 return pipefilter(s, cmd)
212 return pipefilter(s, cmd)
213
213
214 def find_in_path(name, path, default=None):
214 def find_in_path(name, path, default=None):
215 '''find name in search path. path can be string (will be split
215 '''find name in search path. path can be string (will be split
216 with os.pathsep), or iterable thing that returns strings. if name
216 with os.pathsep), or iterable thing that returns strings. if name
217 found, return path to name. else return default.'''
217 found, return path to name. else return default.'''
218 if isinstance(path, str):
218 if isinstance(path, str):
219 path = path.split(os.pathsep)
219 path = path.split(os.pathsep)
220 for p in path:
220 for p in path:
221 p_name = os.path.join(p, name)
221 p_name = os.path.join(p, name)
222 if os.path.exists(p_name):
222 if os.path.exists(p_name):
223 return p_name
223 return p_name
224 return default
224 return default
225
225
226 def binary(s):
226 def binary(s):
227 """return true if a string is binary data using diff's heuristic"""
227 """return true if a string is binary data using diff's heuristic"""
228 if s and '\0' in s[:4096]:
228 if s and '\0' in s[:4096]:
229 return True
229 return True
230 return False
230 return False
231
231
232 def unique(g):
232 def unique(g):
233 """return the uniq elements of iterable g"""
233 """return the uniq elements of iterable g"""
234 seen = {}
234 seen = {}
235 l = []
235 l = []
236 for f in g:
236 for f in g:
237 if f not in seen:
237 if f not in seen:
238 seen[f] = 1
238 seen[f] = 1
239 l.append(f)
239 l.append(f)
240 return l
240 return l
241
241
242 class Abort(Exception):
242 class Abort(Exception):
243 """Raised if a command needs to print an error and exit."""
243 """Raised if a command needs to print an error and exit."""
244
244
245 class UnexpectedOutput(Abort):
245 class UnexpectedOutput(Abort):
246 """Raised to print an error with part of output and exit."""
246 """Raised to print an error with part of output and exit."""
247
247
248 def always(fn): return True
248 def always(fn): return True
249 def never(fn): return False
249 def never(fn): return False
250
250
251 def expand_glob(pats):
251 def expand_glob(pats):
252 '''On Windows, expand the implicit globs in a list of patterns'''
252 '''On Windows, expand the implicit globs in a list of patterns'''
253 if os.name != 'nt':
253 if os.name != 'nt':
254 return list(pats)
254 return list(pats)
255 ret = []
255 ret = []
256 for p in pats:
256 for p in pats:
257 kind, name = patkind(p, None)
257 kind, name = patkind(p, None)
258 if kind is None:
258 if kind is None:
259 globbed = glob.glob(name)
259 globbed = glob.glob(name)
260 if globbed:
260 if globbed:
261 ret.extend(globbed)
261 ret.extend(globbed)
262 continue
262 continue
263 # if we couldn't expand the glob, just keep it around
263 # if we couldn't expand the glob, just keep it around
264 ret.append(p)
264 ret.append(p)
265 return ret
265 return ret
266
266
267 def patkind(name, dflt_pat='glob'):
267 def patkind(name, dflt_pat='glob'):
268 """Split a string into an optional pattern kind prefix and the
268 """Split a string into an optional pattern kind prefix and the
269 actual pattern."""
269 actual pattern."""
270 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
270 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
271 if name.startswith(prefix + ':'): return name.split(':', 1)
271 if name.startswith(prefix + ':'): return name.split(':', 1)
272 return dflt_pat, name
272 return dflt_pat, name
273
273
274 def globre(pat, head='^', tail='$'):
274 def globre(pat, head='^', tail='$'):
275 "convert a glob pattern into a regexp"
275 "convert a glob pattern into a regexp"
276 i, n = 0, len(pat)
276 i, n = 0, len(pat)
277 res = ''
277 res = ''
278 group = False
278 group = False
279 def peek(): return i < n and pat[i]
279 def peek(): return i < n and pat[i]
280 while i < n:
280 while i < n:
281 c = pat[i]
281 c = pat[i]
282 i = i+1
282 i = i+1
283 if c == '*':
283 if c == '*':
284 if peek() == '*':
284 if peek() == '*':
285 i += 1
285 i += 1
286 res += '.*'
286 res += '.*'
287 else:
287 else:
288 res += '[^/]*'
288 res += '[^/]*'
289 elif c == '?':
289 elif c == '?':
290 res += '.'
290 res += '.'
291 elif c == '[':
291 elif c == '[':
292 j = i
292 j = i
293 if j < n and pat[j] in '!]':
293 if j < n and pat[j] in '!]':
294 j += 1
294 j += 1
295 while j < n and pat[j] != ']':
295 while j < n and pat[j] != ']':
296 j += 1
296 j += 1
297 if j >= n:
297 if j >= n:
298 res += '\\['
298 res += '\\['
299 else:
299 else:
300 stuff = pat[i:j].replace('\\','\\\\')
300 stuff = pat[i:j].replace('\\','\\\\')
301 i = j + 1
301 i = j + 1
302 if stuff[0] == '!':
302 if stuff[0] == '!':
303 stuff = '^' + stuff[1:]
303 stuff = '^' + stuff[1:]
304 elif stuff[0] == '^':
304 elif stuff[0] == '^':
305 stuff = '\\' + stuff
305 stuff = '\\' + stuff
306 res = '%s[%s]' % (res, stuff)
306 res = '%s[%s]' % (res, stuff)
307 elif c == '{':
307 elif c == '{':
308 group = True
308 group = True
309 res += '(?:'
309 res += '(?:'
310 elif c == '}' and group:
310 elif c == '}' and group:
311 res += ')'
311 res += ')'
312 group = False
312 group = False
313 elif c == ',' and group:
313 elif c == ',' and group:
314 res += '|'
314 res += '|'
315 elif c == '\\':
315 elif c == '\\':
316 p = peek()
316 p = peek()
317 if p:
317 if p:
318 i += 1
318 i += 1
319 res += re.escape(p)
319 res += re.escape(p)
320 else:
320 else:
321 res += re.escape(c)
321 res += re.escape(c)
322 else:
322 else:
323 res += re.escape(c)
323 res += re.escape(c)
324 return head + res + tail
324 return head + res + tail
325
325
326 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
326 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
327
327
328 def pathto(root, n1, n2):
328 def pathto(root, n1, n2):
329 '''return the relative path from one place to another.
329 '''return the relative path from one place to another.
330 root should use os.sep to separate directories
330 root should use os.sep to separate directories
331 n1 should use os.sep to separate directories
331 n1 should use os.sep to separate directories
332 n2 should use "/" to separate directories
332 n2 should use "/" to separate directories
333 returns an os.sep-separated path.
333 returns an os.sep-separated path.
334
334
335 If n1 is a relative path, it's assumed it's
335 If n1 is a relative path, it's assumed it's
336 relative to root.
336 relative to root.
337 n2 should always be relative to root.
337 n2 should always be relative to root.
338 '''
338 '''
339 if not n1: return localpath(n2)
339 if not n1: return localpath(n2)
340 if os.path.isabs(n1):
340 if os.path.isabs(n1):
341 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
341 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
342 return os.path.join(root, localpath(n2))
342 return os.path.join(root, localpath(n2))
343 n2 = '/'.join((pconvert(root), n2))
343 n2 = '/'.join((pconvert(root), n2))
344 a, b = n1.split(os.sep), n2.split('/')
344 a, b = n1.split(os.sep), n2.split('/')
345 a.reverse()
345 a.reverse()
346 b.reverse()
346 b.reverse()
347 while a and b and a[-1] == b[-1]:
347 while a and b and a[-1] == b[-1]:
348 a.pop()
348 a.pop()
349 b.pop()
349 b.pop()
350 b.reverse()
350 b.reverse()
351 return os.sep.join((['..'] * len(a)) + b)
351 return os.sep.join((['..'] * len(a)) + b)
352
352
353 def canonpath(root, cwd, myname):
353 def canonpath(root, cwd, myname):
354 """return the canonical path of myname, given cwd and root"""
354 """return the canonical path of myname, given cwd and root"""
355 if root == os.sep:
355 if root == os.sep:
356 rootsep = os.sep
356 rootsep = os.sep
357 elif root.endswith(os.sep):
357 elif root.endswith(os.sep):
358 rootsep = root
358 rootsep = root
359 else:
359 else:
360 rootsep = root + os.sep
360 rootsep = root + os.sep
361 name = myname
361 name = myname
362 if not os.path.isabs(name):
362 if not os.path.isabs(name):
363 name = os.path.join(root, cwd, name)
363 name = os.path.join(root, cwd, name)
364 name = os.path.normpath(name)
364 name = os.path.normpath(name)
365 if name != rootsep and name.startswith(rootsep):
365 if name != rootsep and name.startswith(rootsep):
366 name = name[len(rootsep):]
366 name = name[len(rootsep):]
367 audit_path(name)
367 audit_path(name)
368 return pconvert(name)
368 return pconvert(name)
369 elif name == root:
369 elif name == root:
370 return ''
370 return ''
371 else:
371 else:
372 # Determine whether `name' is in the hierarchy at or beneath `root',
372 # Determine whether `name' is in the hierarchy at or beneath `root',
373 # by iterating name=dirname(name) until that causes no change (can't
373 # by iterating name=dirname(name) until that causes no change (can't
374 # check name == '/', because that doesn't work on windows). For each
374 # check name == '/', because that doesn't work on windows). For each
375 # `name', compare dev/inode numbers. If they match, the list `rel'
375 # `name', compare dev/inode numbers. If they match, the list `rel'
376 # holds the reversed list of components making up the relative file
376 # holds the reversed list of components making up the relative file
377 # name we want.
377 # name we want.
378 root_st = os.stat(root)
378 root_st = os.stat(root)
379 rel = []
379 rel = []
380 while True:
380 while True:
381 try:
381 try:
382 name_st = os.stat(name)
382 name_st = os.stat(name)
383 except OSError:
383 except OSError:
384 break
384 break
385 if samestat(name_st, root_st):
385 if samestat(name_st, root_st):
386 if not rel:
386 if not rel:
387 # name was actually the same as root (maybe a symlink)
387 # name was actually the same as root (maybe a symlink)
388 return ''
388 return ''
389 rel.reverse()
389 rel.reverse()
390 name = os.path.join(*rel)
390 name = os.path.join(*rel)
391 audit_path(name)
391 audit_path(name)
392 return pconvert(name)
392 return pconvert(name)
393 dirname, basename = os.path.split(name)
393 dirname, basename = os.path.split(name)
394 rel.append(basename)
394 rel.append(basename)
395 if dirname == name:
395 if dirname == name:
396 break
396 break
397 name = dirname
397 name = dirname
398
398
399 raise Abort('%s not under root' % myname)
399 raise Abort('%s not under root' % myname)
400
400
401 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
401 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
402 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
402 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
403
403
404 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
404 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
405 globbed=False, default=None):
405 globbed=False, default=None):
406 default = default or 'relpath'
406 default = default or 'relpath'
407 if default == 'relpath' and not globbed:
407 if default == 'relpath' and not globbed:
408 names = expand_glob(names)
408 names = expand_glob(names)
409 return _matcher(canonroot, cwd, names, inc, exc, default, src)
409 return _matcher(canonroot, cwd, names, inc, exc, default, src)
410
410
411 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
411 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
412 """build a function to match a set of file patterns
412 """build a function to match a set of file patterns
413
413
414 arguments:
414 arguments:
415 canonroot - the canonical root of the tree you're matching against
415 canonroot - the canonical root of the tree you're matching against
416 cwd - the current working directory, if relevant
416 cwd - the current working directory, if relevant
417 names - patterns to find
417 names - patterns to find
418 inc - patterns to include
418 inc - patterns to include
419 exc - patterns to exclude
419 exc - patterns to exclude
420 dflt_pat - if a pattern in names has no explicit type, assume this one
420 dflt_pat - if a pattern in names has no explicit type, assume this one
421 src - where these patterns came from (e.g. .hgignore)
421 src - where these patterns came from (e.g. .hgignore)
422
422
423 a pattern is one of:
423 a pattern is one of:
424 'glob:<glob>' - a glob relative to cwd
424 'glob:<glob>' - a glob relative to cwd
425 're:<regexp>' - a regular expression
425 're:<regexp>' - a regular expression
426 'path:<path>' - a path relative to canonroot
426 'path:<path>' - a path relative to canonroot
427 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
427 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
428 'relpath:<path>' - a path relative to cwd
428 'relpath:<path>' - a path relative to cwd
429 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
429 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
430 '<something>' - one of the cases above, selected by the dflt_pat argument
430 '<something>' - one of the cases above, selected by the dflt_pat argument
431
431
432 returns:
432 returns:
433 a 3-tuple containing
433 a 3-tuple containing
434 - list of roots (places where one should start a recursive walk of the fs);
434 - list of roots (places where one should start a recursive walk of the fs);
435 this often matches the explicit non-pattern names passed in, but also
435 this often matches the explicit non-pattern names passed in, but also
436 includes the initial part of glob: patterns that has no glob characters
436 includes the initial part of glob: patterns that has no glob characters
437 - a bool match(filename) function
437 - a bool match(filename) function
438 - a bool indicating if any patterns were passed in
438 - a bool indicating if any patterns were passed in
439 """
439 """
440
440
441 # a common case: no patterns at all
441 # a common case: no patterns at all
442 if not names and not inc and not exc:
442 if not names and not inc and not exc:
443 return [], always, False
443 return [], always, False
444
444
445 def contains_glob(name):
445 def contains_glob(name):
446 for c in name:
446 for c in name:
447 if c in _globchars: return True
447 if c in _globchars: return True
448 return False
448 return False
449
449
450 def regex(kind, name):
450 def regex(kind, name):
451 '''convert a pattern into a regular expression'''
451 '''convert a pattern into a regular expression'''
452 if not name:
452 if not name:
453 return ''
453 return ''
454 if kind == 're':
454 if kind == 're':
455 return name
455 return name
456 elif kind == 'path':
456 elif kind == 'path':
457 return '^' + re.escape(name) + '(?:/|$)'
457 return '^' + re.escape(name) + '(?:/|$)'
458 elif kind == 'relglob':
458 elif kind == 'relglob':
459 return globre(name, '(?:|.*/)', '(?:/|$)')
459 return globre(name, '(?:|.*/)', '(?:/|$)')
460 elif kind == 'relpath':
460 elif kind == 'relpath':
461 return re.escape(name) + '(?:/|$)'
461 return re.escape(name) + '(?:/|$)'
462 elif kind == 'relre':
462 elif kind == 'relre':
463 if name.startswith('^'):
463 if name.startswith('^'):
464 return name
464 return name
465 return '.*' + name
465 return '.*' + name
466 return globre(name, '', '(?:/|$)')
466 return globre(name, '', '(?:/|$)')
467
467
468 def matchfn(pats):
468 def matchfn(pats):
469 """build a matching function from a set of patterns"""
469 """build a matching function from a set of patterns"""
470 if not pats:
470 if not pats:
471 return
471 return
472 matches = []
472 matches = []
473 for k, p in pats:
473 for k, p in pats:
474 try:
474 try:
475 pat = '(?:%s)' % regex(k, p)
475 pat = '(?:%s)' % regex(k, p)
476 matches.append(re.compile(pat).match)
476 matches.append(re.compile(pat).match)
477 except re.error:
477 except re.error:
478 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
478 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
479 else: raise Abort("invalid pattern (%s): %s" % (k, p))
479 else: raise Abort("invalid pattern (%s): %s" % (k, p))
480
480
481 def buildfn(text):
481 def buildfn(text):
482 for m in matches:
482 for m in matches:
483 r = m(text)
483 r = m(text)
484 if r:
484 if r:
485 return r
485 return r
486
486
487 return buildfn
487 return buildfn
488
488
489 def globprefix(pat):
489 def globprefix(pat):
490 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
490 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
491 root = []
491 root = []
492 for p in pat.split('/'):
492 for p in pat.split('/'):
493 if contains_glob(p): break
493 if contains_glob(p): break
494 root.append(p)
494 root.append(p)
495 return '/'.join(root) or '.'
495 return '/'.join(root) or '.'
496
496
497 def normalizepats(names, default):
497 def normalizepats(names, default):
498 pats = []
498 pats = []
499 roots = []
499 roots = []
500 anypats = False
500 anypats = False
501 for kind, name in [patkind(p, default) for p in names]:
501 for kind, name in [patkind(p, default) for p in names]:
502 if kind in ('glob', 'relpath'):
502 if kind in ('glob', 'relpath'):
503 name = canonpath(canonroot, cwd, name)
503 name = canonpath(canonroot, cwd, name)
504 elif kind in ('relglob', 'path'):
504 elif kind in ('relglob', 'path'):
505 name = normpath(name)
505 name = normpath(name)
506
506
507 pats.append((kind, name))
507 pats.append((kind, name))
508
508
509 if kind in ('glob', 're', 'relglob', 'relre'):
509 if kind in ('glob', 're', 'relglob', 'relre'):
510 anypats = True
510 anypats = True
511
511
512 if kind == 'glob':
512 if kind == 'glob':
513 root = globprefix(name)
513 root = globprefix(name)
514 roots.append(root)
514 roots.append(root)
515 elif kind in ('relpath', 'path'):
515 elif kind in ('relpath', 'path'):
516 roots.append(name or '.')
516 roots.append(name or '.')
517 elif kind == 'relglob':
517 elif kind == 'relglob':
518 roots.append('.')
518 roots.append('.')
519 return roots, pats, anypats
519 return roots, pats, anypats
520
520
521 roots, pats, anypats = normalizepats(names, dflt_pat)
521 roots, pats, anypats = normalizepats(names, dflt_pat)
522
522
523 patmatch = matchfn(pats) or always
523 patmatch = matchfn(pats) or always
524 incmatch = always
524 incmatch = always
525 if inc:
525 if inc:
526 dummy, inckinds, dummy = normalizepats(inc, 'glob')
526 dummy, inckinds, dummy = normalizepats(inc, 'glob')
527 incmatch = matchfn(inckinds)
527 incmatch = matchfn(inckinds)
528 excmatch = lambda fn: False
528 excmatch = lambda fn: False
529 if exc:
529 if exc:
530 dummy, exckinds, dummy = normalizepats(exc, 'glob')
530 dummy, exckinds, dummy = normalizepats(exc, 'glob')
531 excmatch = matchfn(exckinds)
531 excmatch = matchfn(exckinds)
532
532
533 if not names and inc and not exc:
533 if not names and inc and not exc:
534 # common case: hgignore patterns
534 # common case: hgignore patterns
535 match = incmatch
535 match = incmatch
536 else:
536 else:
537 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
537 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
538
538
539 return (roots, match, (inc or exc or anypats) and True)
539 return (roots, match, (inc or exc or anypats) and True)
540
540
541 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
541 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
542 '''enhanced shell command execution.
542 '''enhanced shell command execution.
543 run with environment maybe modified, maybe in different dir.
543 run with environment maybe modified, maybe in different dir.
544
544
545 if command fails and onerr is None, return status. if ui object,
545 if command fails and onerr is None, return status. if ui object,
546 print error message and return status, else raise onerr object as
546 print error message and return status, else raise onerr object as
547 exception.'''
547 exception.'''
548 def py2shell(val):
548 def py2shell(val):
549 'convert python object into string that is useful to shell'
549 'convert python object into string that is useful to shell'
550 if val in (None, False):
550 if val in (None, False):
551 return '0'
551 return '0'
552 if val == True:
552 if val == True:
553 return '1'
553 return '1'
554 return str(val)
554 return str(val)
555 oldenv = {}
555 oldenv = {}
556 for k in environ:
556 for k in environ:
557 oldenv[k] = os.environ.get(k)
557 oldenv[k] = os.environ.get(k)
558 if cwd is not None:
558 if cwd is not None:
559 oldcwd = os.getcwd()
559 oldcwd = os.getcwd()
560 origcmd = cmd
560 origcmd = cmd
561 if os.name == 'nt':
561 if os.name == 'nt':
562 cmd = '"%s"' % cmd
562 cmd = '"%s"' % cmd
563 try:
563 try:
564 for k, v in environ.iteritems():
564 for k, v in environ.iteritems():
565 os.environ[k] = py2shell(v)
565 os.environ[k] = py2shell(v)
566 if cwd is not None and oldcwd != cwd:
566 if cwd is not None and oldcwd != cwd:
567 os.chdir(cwd)
567 os.chdir(cwd)
568 rc = os.system(cmd)
568 rc = os.system(cmd)
569 if rc and onerr:
569 if rc and onerr:
570 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
570 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
571 explain_exit(rc)[0])
571 explain_exit(rc)[0])
572 if errprefix:
572 if errprefix:
573 errmsg = '%s: %s' % (errprefix, errmsg)
573 errmsg = '%s: %s' % (errprefix, errmsg)
574 try:
574 try:
575 onerr.warn(errmsg + '\n')
575 onerr.warn(errmsg + '\n')
576 except AttributeError:
576 except AttributeError:
577 raise onerr(errmsg)
577 raise onerr(errmsg)
578 return rc
578 return rc
579 finally:
579 finally:
580 for k, v in oldenv.iteritems():
580 for k, v in oldenv.iteritems():
581 if v is None:
581 if v is None:
582 del os.environ[k]
582 del os.environ[k]
583 else:
583 else:
584 os.environ[k] = v
584 os.environ[k] = v
585 if cwd is not None and oldcwd != cwd:
585 if cwd is not None and oldcwd != cwd:
586 os.chdir(oldcwd)
586 os.chdir(oldcwd)
587
587
588 def rename(src, dst):
588 def rename(src, dst):
589 """forcibly rename a file"""
589 """forcibly rename a file"""
590 try:
590 try:
591 os.rename(src, dst)
591 os.rename(src, dst)
592 except OSError, err:
592 except OSError, err:
593 # on windows, rename to existing file is not allowed, so we
593 # on windows, rename to existing file is not allowed, so we
594 # must delete destination first. but if file is open, unlink
594 # must delete destination first. but if file is open, unlink
595 # schedules it for delete but does not delete it. rename
595 # schedules it for delete but does not delete it. rename
596 # happens immediately even for open files, so we create
596 # happens immediately even for open files, so we create
597 # temporary file, delete it, rename destination to that name,
597 # temporary file, delete it, rename destination to that name,
598 # then delete that. then rename is safe to do.
598 # then delete that. then rename is safe to do.
599 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
599 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
600 os.close(fd)
600 os.close(fd)
601 os.unlink(temp)
601 os.unlink(temp)
602 os.rename(dst, temp)
602 os.rename(dst, temp)
603 os.unlink(temp)
603 os.unlink(temp)
604 os.rename(src, dst)
604 os.rename(src, dst)
605
605
606 def unlink(f):
606 def unlink(f):
607 """unlink and remove the directory if it is empty"""
607 """unlink and remove the directory if it is empty"""
608 os.unlink(f)
608 os.unlink(f)
609 # try removing directories that might now be empty
609 # try removing directories that might now be empty
610 try:
610 try:
611 os.removedirs(os.path.dirname(f))
611 os.removedirs(os.path.dirname(f))
612 except OSError:
612 except OSError:
613 pass
613 pass
614
614
615 def copyfile(src, dest):
615 def copyfile(src, dest):
616 "copy a file, preserving mode"
616 "copy a file, preserving mode"
617 try:
617 if os.path.islink(src):
618 shutil.copyfile(src, dest)
618 try:
619 shutil.copymode(src, dest)
619 os.unlink(dest)
620 except shutil.Error, inst:
620 except:
621 raise Abort(str(inst))
621 pass
622 os.symlink(os.readlink(src), dest)
623 else:
624 try:
625 shutil.copyfile(src, dest)
626 shutil.copymode(src, dest)
627 except shutil.Error, inst:
628 raise Abort(str(inst))
622
629
623 def copyfiles(src, dst, hardlink=None):
630 def copyfiles(src, dst, hardlink=None):
624 """Copy a directory tree using hardlinks if possible"""
631 """Copy a directory tree using hardlinks if possible"""
625
632
626 if hardlink is None:
633 if hardlink is None:
627 hardlink = (os.stat(src).st_dev ==
634 hardlink = (os.stat(src).st_dev ==
628 os.stat(os.path.dirname(dst)).st_dev)
635 os.stat(os.path.dirname(dst)).st_dev)
629
636
630 if os.path.isdir(src):
637 if os.path.isdir(src):
631 os.mkdir(dst)
638 os.mkdir(dst)
632 for name in os.listdir(src):
639 for name in os.listdir(src):
633 srcname = os.path.join(src, name)
640 srcname = os.path.join(src, name)
634 dstname = os.path.join(dst, name)
641 dstname = os.path.join(dst, name)
635 copyfiles(srcname, dstname, hardlink)
642 copyfiles(srcname, dstname, hardlink)
636 else:
643 else:
637 if hardlink:
644 if hardlink:
638 try:
645 try:
639 os_link(src, dst)
646 os_link(src, dst)
640 except (IOError, OSError):
647 except (IOError, OSError):
641 hardlink = False
648 hardlink = False
642 shutil.copy(src, dst)
649 shutil.copy(src, dst)
643 else:
650 else:
644 shutil.copy(src, dst)
651 shutil.copy(src, dst)
645
652
646 def audit_path(path):
653 def audit_path(path):
647 """Abort if path contains dangerous components"""
654 """Abort if path contains dangerous components"""
648 parts = os.path.normcase(path).split(os.sep)
655 parts = os.path.normcase(path).split(os.sep)
649 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
656 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
650 or os.pardir in parts):
657 or os.pardir in parts):
651 raise Abort(_("path contains illegal component: %s\n") % path)
658 raise Abort(_("path contains illegal component: %s\n") % path)
652
659
653 def _makelock_file(info, pathname):
660 def _makelock_file(info, pathname):
654 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
661 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
655 os.write(ld, info)
662 os.write(ld, info)
656 os.close(ld)
663 os.close(ld)
657
664
658 def _readlock_file(pathname):
665 def _readlock_file(pathname):
659 return posixfile(pathname).read()
666 return posixfile(pathname).read()
660
667
661 def nlinks(pathname):
668 def nlinks(pathname):
662 """Return number of hardlinks for the given file."""
669 """Return number of hardlinks for the given file."""
663 return os.lstat(pathname).st_nlink
670 return os.lstat(pathname).st_nlink
664
671
665 if hasattr(os, 'link'):
672 if hasattr(os, 'link'):
666 os_link = os.link
673 os_link = os.link
667 else:
674 else:
668 def os_link(src, dst):
675 def os_link(src, dst):
669 raise OSError(0, _("Hardlinks not supported"))
676 raise OSError(0, _("Hardlinks not supported"))
670
677
671 def fstat(fp):
678 def fstat(fp):
672 '''stat file object that may not have fileno method.'''
679 '''stat file object that may not have fileno method.'''
673 try:
680 try:
674 return os.fstat(fp.fileno())
681 return os.fstat(fp.fileno())
675 except AttributeError:
682 except AttributeError:
676 return os.stat(fp.name)
683 return os.stat(fp.name)
677
684
678 posixfile = file
685 posixfile = file
679
686
680 def is_win_9x():
687 def is_win_9x():
681 '''return true if run on windows 95, 98 or me.'''
688 '''return true if run on windows 95, 98 or me.'''
682 try:
689 try:
683 return sys.getwindowsversion()[3] == 1
690 return sys.getwindowsversion()[3] == 1
684 except AttributeError:
691 except AttributeError:
685 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
692 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
686
693
687 getuser_fallback = None
694 getuser_fallback = None
688
695
689 def getuser():
696 def getuser():
690 '''return name of current user'''
697 '''return name of current user'''
691 try:
698 try:
692 return getpass.getuser()
699 return getpass.getuser()
693 except ImportError:
700 except ImportError:
694 # import of pwd will fail on windows - try fallback
701 # import of pwd will fail on windows - try fallback
695 if getuser_fallback:
702 if getuser_fallback:
696 return getuser_fallback()
703 return getuser_fallback()
697 # raised if win32api not available
704 # raised if win32api not available
698 raise Abort(_('user name not available - set USERNAME '
705 raise Abort(_('user name not available - set USERNAME '
699 'environment variable'))
706 'environment variable'))
700
707
701 def username(uid=None):
708 def username(uid=None):
702 """Return the name of the user with the given uid.
709 """Return the name of the user with the given uid.
703
710
704 If uid is None, return the name of the current user."""
711 If uid is None, return the name of the current user."""
705 try:
712 try:
706 import pwd
713 import pwd
707 if uid is None:
714 if uid is None:
708 uid = os.getuid()
715 uid = os.getuid()
709 try:
716 try:
710 return pwd.getpwuid(uid)[0]
717 return pwd.getpwuid(uid)[0]
711 except KeyError:
718 except KeyError:
712 return str(uid)
719 return str(uid)
713 except ImportError:
720 except ImportError:
714 return None
721 return None
715
722
716 def groupname(gid=None):
723 def groupname(gid=None):
717 """Return the name of the group with the given gid.
724 """Return the name of the group with the given gid.
718
725
719 If gid is None, return the name of the current group."""
726 If gid is None, return the name of the current group."""
720 try:
727 try:
721 import grp
728 import grp
722 if gid is None:
729 if gid is None:
723 gid = os.getgid()
730 gid = os.getgid()
724 try:
731 try:
725 return grp.getgrgid(gid)[0]
732 return grp.getgrgid(gid)[0]
726 except KeyError:
733 except KeyError:
727 return str(gid)
734 return str(gid)
728 except ImportError:
735 except ImportError:
729 return None
736 return None
730
737
731 # File system features
738 # File system features
732
739
733 def checkfolding(path):
740 def checkfolding(path):
734 """
741 """
735 Check whether the given path is on a case-sensitive filesystem
742 Check whether the given path is on a case-sensitive filesystem
736
743
737 Requires a path (like /foo/.hg) ending with a foldable final
744 Requires a path (like /foo/.hg) ending with a foldable final
738 directory component.
745 directory component.
739 """
746 """
740 s1 = os.stat(path)
747 s1 = os.stat(path)
741 d, b = os.path.split(path)
748 d, b = os.path.split(path)
742 p2 = os.path.join(d, b.upper())
749 p2 = os.path.join(d, b.upper())
743 if path == p2:
750 if path == p2:
744 p2 = os.path.join(d, b.lower())
751 p2 = os.path.join(d, b.lower())
745 try:
752 try:
746 s2 = os.stat(p2)
753 s2 = os.stat(p2)
747 if s2 == s1:
754 if s2 == s1:
748 return False
755 return False
749 return True
756 return True
750 except:
757 except:
751 return True
758 return True
752
759
753 def checkexec(path):
760 def checkexec(path):
754 """
761 """
755 Check whether the given path is on a filesystem with UNIX-like exec flags
762 Check whether the given path is on a filesystem with UNIX-like exec flags
756
763
757 Requires a directory (like /foo/.hg)
764 Requires a directory (like /foo/.hg)
758 """
765 """
759 fh, fn = tempfile.mkstemp("", "", path)
766 fh, fn = tempfile.mkstemp("", "", path)
760 os.close(fh)
767 os.close(fh)
761 m = os.stat(fn).st_mode
768 m = os.stat(fn).st_mode
762 os.chmod(fn, m ^ 0111)
769 os.chmod(fn, m ^ 0111)
763 r = (os.stat(fn).st_mode != m)
770 r = (os.stat(fn).st_mode != m)
764 os.unlink(fn)
771 os.unlink(fn)
765 return r
772 return r
766
773
767 def execfunc(path, fallback):
774 def execfunc(path, fallback):
768 '''return an is_exec() function with default to fallback'''
775 '''return an is_exec() function with default to fallback'''
769 if checkexec(path):
776 if checkexec(path):
770 return lambda x: is_exec(os.path.join(path, x))
777 return lambda x: is_exec(os.path.join(path, x))
771 return fallback
778 return fallback
772
779
773 def checklink(path):
780 def checklink(path):
774 """check whether the given path is on a symlink-capable filesystem"""
781 """check whether the given path is on a symlink-capable filesystem"""
775 # mktemp is not racy because symlink creation will fail if the
782 # mktemp is not racy because symlink creation will fail if the
776 # file already exists
783 # file already exists
777 name = tempfile.mktemp(dir=path)
784 name = tempfile.mktemp(dir=path)
778 try:
785 try:
779 os.symlink(".", name)
786 os.symlink(".", name)
780 os.unlink(name)
787 os.unlink(name)
781 return True
788 return True
782 except (OSError, AttributeError):
789 except (OSError, AttributeError):
783 return False
790 return False
784
791
785 def linkfunc(path, fallback):
792 def linkfunc(path, fallback):
786 '''return an is_link() function with default to fallback'''
793 '''return an is_link() function with default to fallback'''
787 if checklink(path):
794 if checklink(path):
788 return lambda x: is_link(os.path.join(path, x))
795 return lambda x: os.path.islink(os.path.join(path, x))
789 return fallback
796 return fallback
790
797
791 # Platform specific variants
798 # Platform specific variants
792 if os.name == 'nt':
799 if os.name == 'nt':
793 import msvcrt
800 import msvcrt
794 nulldev = 'NUL:'
801 nulldev = 'NUL:'
795
802
796 class winstdout:
803 class winstdout:
797 '''stdout on windows misbehaves if sent through a pipe'''
804 '''stdout on windows misbehaves if sent through a pipe'''
798
805
799 def __init__(self, fp):
806 def __init__(self, fp):
800 self.fp = fp
807 self.fp = fp
801
808
802 def __getattr__(self, key):
809 def __getattr__(self, key):
803 return getattr(self.fp, key)
810 return getattr(self.fp, key)
804
811
805 def close(self):
812 def close(self):
806 try:
813 try:
807 self.fp.close()
814 self.fp.close()
808 except: pass
815 except: pass
809
816
810 def write(self, s):
817 def write(self, s):
811 try:
818 try:
812 return self.fp.write(s)
819 return self.fp.write(s)
813 except IOError, inst:
820 except IOError, inst:
814 if inst.errno != 0: raise
821 if inst.errno != 0: raise
815 self.close()
822 self.close()
816 raise IOError(errno.EPIPE, 'Broken pipe')
823 raise IOError(errno.EPIPE, 'Broken pipe')
817
824
818 def flush(self):
825 def flush(self):
819 try:
826 try:
820 return self.fp.flush()
827 return self.fp.flush()
821 except IOError, inst:
828 except IOError, inst:
822 if inst.errno != errno.EINVAL: raise
829 if inst.errno != errno.EINVAL: raise
823 self.close()
830 self.close()
824 raise IOError(errno.EPIPE, 'Broken pipe')
831 raise IOError(errno.EPIPE, 'Broken pipe')
825
832
826 sys.stdout = winstdout(sys.stdout)
833 sys.stdout = winstdout(sys.stdout)
827
834
828 def system_rcpath():
835 def system_rcpath():
829 try:
836 try:
830 return system_rcpath_win32()
837 return system_rcpath_win32()
831 except:
838 except:
832 return [r'c:\mercurial\mercurial.ini']
839 return [r'c:\mercurial\mercurial.ini']
833
840
834 def user_rcpath():
841 def user_rcpath():
835 '''return os-specific hgrc search path to the user dir'''
842 '''return os-specific hgrc search path to the user dir'''
836 try:
843 try:
837 userrc = user_rcpath_win32()
844 userrc = user_rcpath_win32()
838 except:
845 except:
839 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
846 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
840 path = [userrc]
847 path = [userrc]
841 userprofile = os.environ.get('USERPROFILE')
848 userprofile = os.environ.get('USERPROFILE')
842 if userprofile:
849 if userprofile:
843 path.append(os.path.join(userprofile, 'mercurial.ini'))
850 path.append(os.path.join(userprofile, 'mercurial.ini'))
844 return path
851 return path
845
852
846 def parse_patch_output(output_line):
853 def parse_patch_output(output_line):
847 """parses the output produced by patch and returns the file name"""
854 """parses the output produced by patch and returns the file name"""
848 pf = output_line[14:]
855 pf = output_line[14:]
849 if pf[0] == '`':
856 if pf[0] == '`':
850 pf = pf[1:-1] # Remove the quotes
857 pf = pf[1:-1] # Remove the quotes
851 return pf
858 return pf
852
859
853 def testpid(pid):
860 def testpid(pid):
854 '''return False if pid dead, True if running or not known'''
861 '''return False if pid dead, True if running or not known'''
855 return True
862 return True
856
863
857 def set_exec(f, mode):
864 def set_exec(f, mode):
858 pass
865 pass
859
866
860 def set_link(f, mode):
867 def set_link(f, mode):
861 pass
868 pass
862
869
863 def set_binary(fd):
870 def set_binary(fd):
864 msvcrt.setmode(fd.fileno(), os.O_BINARY)
871 msvcrt.setmode(fd.fileno(), os.O_BINARY)
865
872
866 def pconvert(path):
873 def pconvert(path):
867 return path.replace("\\", "/")
874 return path.replace("\\", "/")
868
875
869 def localpath(path):
876 def localpath(path):
870 return path.replace('/', '\\')
877 return path.replace('/', '\\')
871
878
872 def normpath(path):
879 def normpath(path):
873 return pconvert(os.path.normpath(path))
880 return pconvert(os.path.normpath(path))
874
881
875 makelock = _makelock_file
882 makelock = _makelock_file
876 readlock = _readlock_file
883 readlock = _readlock_file
877
884
878 def samestat(s1, s2):
885 def samestat(s1, s2):
879 return False
886 return False
880
887
881 # A sequence of backslashes is special iff it precedes a double quote:
888 # A sequence of backslashes is special iff it precedes a double quote:
882 # - if there's an even number of backslashes, the double quote is not
889 # - if there's an even number of backslashes, the double quote is not
883 # quoted (i.e. it ends the quoted region)
890 # quoted (i.e. it ends the quoted region)
884 # - if there's an odd number of backslashes, the double quote is quoted
891 # - if there's an odd number of backslashes, the double quote is quoted
885 # - in both cases, every pair of backslashes is unquoted into a single
892 # - in both cases, every pair of backslashes is unquoted into a single
886 # backslash
893 # backslash
887 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
894 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
888 # So, to quote a string, we must surround it in double quotes, double
895 # So, to quote a string, we must surround it in double quotes, double
889 # the number of backslashes that preceed double quotes and add another
896 # the number of backslashes that preceed double quotes and add another
890 # backslash before every double quote (being careful with the double
897 # backslash before every double quote (being careful with the double
891 # quote we've appended to the end)
898 # quote we've appended to the end)
892 _quotere = None
899 _quotere = None
893 def shellquote(s):
900 def shellquote(s):
894 global _quotere
901 global _quotere
895 if _quotere is None:
902 if _quotere is None:
896 _quotere = re.compile(r'(\\*)("|\\$)')
903 _quotere = re.compile(r'(\\*)("|\\$)')
897 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
904 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
898
905
899 def explain_exit(code):
906 def explain_exit(code):
900 return _("exited with status %d") % code, code
907 return _("exited with status %d") % code, code
901
908
902 # if you change this stub into a real check, please try to implement the
909 # if you change this stub into a real check, please try to implement the
903 # username and groupname functions above, too.
910 # username and groupname functions above, too.
904 def isowner(fp, st=None):
911 def isowner(fp, st=None):
905 return True
912 return True
906
913
907 try:
914 try:
908 # override functions with win32 versions if possible
915 # override functions with win32 versions if possible
909 from util_win32 import *
916 from util_win32 import *
910 if not is_win_9x():
917 if not is_win_9x():
911 posixfile = posixfile_nt
918 posixfile = posixfile_nt
912 except ImportError:
919 except ImportError:
913 pass
920 pass
914
921
915 else:
922 else:
916 nulldev = '/dev/null'
923 nulldev = '/dev/null'
917 _umask = os.umask(0)
924 _umask = os.umask(0)
918 os.umask(_umask)
925 os.umask(_umask)
919
926
920 def rcfiles(path):
927 def rcfiles(path):
921 rcs = [os.path.join(path, 'hgrc')]
928 rcs = [os.path.join(path, 'hgrc')]
922 rcdir = os.path.join(path, 'hgrc.d')
929 rcdir = os.path.join(path, 'hgrc.d')
923 try:
930 try:
924 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
931 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
925 if f.endswith(".rc")])
932 if f.endswith(".rc")])
926 except OSError:
933 except OSError:
927 pass
934 pass
928 return rcs
935 return rcs
929
936
930 def system_rcpath():
937 def system_rcpath():
931 path = []
938 path = []
932 # old mod_python does not set sys.argv
939 # old mod_python does not set sys.argv
933 if len(getattr(sys, 'argv', [])) > 0:
940 if len(getattr(sys, 'argv', [])) > 0:
934 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
941 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
935 '/../etc/mercurial'))
942 '/../etc/mercurial'))
936 path.extend(rcfiles('/etc/mercurial'))
943 path.extend(rcfiles('/etc/mercurial'))
937 return path
944 return path
938
945
939 def user_rcpath():
946 def user_rcpath():
940 return [os.path.expanduser('~/.hgrc')]
947 return [os.path.expanduser('~/.hgrc')]
941
948
942 def parse_patch_output(output_line):
949 def parse_patch_output(output_line):
943 """parses the output produced by patch and returns the file name"""
950 """parses the output produced by patch and returns the file name"""
944 pf = output_line[14:]
951 pf = output_line[14:]
945 if pf.startswith("'") and pf.endswith("'") and " " in pf:
952 if pf.startswith("'") and pf.endswith("'") and " " in pf:
946 pf = pf[1:-1] # Remove the quotes
953 pf = pf[1:-1] # Remove the quotes
947 return pf
954 return pf
948
955
949 def is_exec(f):
956 def is_exec(f):
950 """check whether a file is executable"""
957 """check whether a file is executable"""
951 return (os.lstat(f).st_mode & 0100 != 0)
958 return (os.lstat(f).st_mode & 0100 != 0)
952
959
953 def set_exec(f, mode):
960 def set_exec(f, mode):
954 s = os.lstat(f).st_mode
961 s = os.lstat(f).st_mode
955 if (s & 0100 != 0) == mode:
962 if (s & 0100 != 0) == mode:
956 return
963 return
957 if mode:
964 if mode:
958 # Turn on +x for every +r bit when making a file executable
965 # Turn on +x for every +r bit when making a file executable
959 # and obey umask.
966 # and obey umask.
960 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
967 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
961 else:
968 else:
962 os.chmod(f, s & 0666)
969 os.chmod(f, s & 0666)
963
970
964 def is_link(f):
965 """check whether a file is a symlink"""
966 return (os.lstat(f).st_mode & 0120000 == 0120000)
967
968 def set_link(f, mode):
971 def set_link(f, mode):
969 """make a file a symbolic link/regular file
972 """make a file a symbolic link/regular file
970
973
971 if a file is changed to a link, its contents become the link data
974 if a file is changed to a link, its contents become the link data
972 if a link is changed to a file, its link data become its contents
975 if a link is changed to a file, its link data become its contents
973 """
976 """
974
977
975 m = is_link(f)
978 m = os.path.islink(f)
976 if m == bool(mode):
979 if m == bool(mode):
977 return
980 return
978
981
979 if mode: # switch file to link
982 if mode: # switch file to link
980 data = file(f).read()
983 data = file(f).read()
981 os.unlink(f)
984 os.unlink(f)
982 os.symlink(data, f)
985 os.symlink(data, f)
983 else:
986 else:
984 data = os.readlink(f)
987 data = os.readlink(f)
985 os.unlink(f)
988 os.unlink(f)
986 file(f, "w").write(data)
989 file(f, "w").write(data)
987
990
988 def set_binary(fd):
991 def set_binary(fd):
989 pass
992 pass
990
993
991 def pconvert(path):
994 def pconvert(path):
992 return path
995 return path
993
996
994 def localpath(path):
997 def localpath(path):
995 return path
998 return path
996
999
997 normpath = os.path.normpath
1000 normpath = os.path.normpath
998 samestat = os.path.samestat
1001 samestat = os.path.samestat
999
1002
1000 def makelock(info, pathname):
1003 def makelock(info, pathname):
1001 try:
1004 try:
1002 os.symlink(info, pathname)
1005 os.symlink(info, pathname)
1003 except OSError, why:
1006 except OSError, why:
1004 if why.errno == errno.EEXIST:
1007 if why.errno == errno.EEXIST:
1005 raise
1008 raise
1006 else:
1009 else:
1007 _makelock_file(info, pathname)
1010 _makelock_file(info, pathname)
1008
1011
1009 def readlock(pathname):
1012 def readlock(pathname):
1010 try:
1013 try:
1011 return os.readlink(pathname)
1014 return os.readlink(pathname)
1012 except OSError, why:
1015 except OSError, why:
1013 if why.errno == errno.EINVAL:
1016 if why.errno == errno.EINVAL:
1014 return _readlock_file(pathname)
1017 return _readlock_file(pathname)
1015 else:
1018 else:
1016 raise
1019 raise
1017
1020
1018 def shellquote(s):
1021 def shellquote(s):
1019 return "'%s'" % s.replace("'", "'\\''")
1022 return "'%s'" % s.replace("'", "'\\''")
1020
1023
1021 def testpid(pid):
1024 def testpid(pid):
1022 '''return False if pid dead, True if running or not sure'''
1025 '''return False if pid dead, True if running or not sure'''
1023 try:
1026 try:
1024 os.kill(pid, 0)
1027 os.kill(pid, 0)
1025 return True
1028 return True
1026 except OSError, inst:
1029 except OSError, inst:
1027 return inst.errno != errno.ESRCH
1030 return inst.errno != errno.ESRCH
1028
1031
1029 def explain_exit(code):
1032 def explain_exit(code):
1030 """return a 2-tuple (desc, code) describing a process's status"""
1033 """return a 2-tuple (desc, code) describing a process's status"""
1031 if os.WIFEXITED(code):
1034 if os.WIFEXITED(code):
1032 val = os.WEXITSTATUS(code)
1035 val = os.WEXITSTATUS(code)
1033 return _("exited with status %d") % val, val
1036 return _("exited with status %d") % val, val
1034 elif os.WIFSIGNALED(code):
1037 elif os.WIFSIGNALED(code):
1035 val = os.WTERMSIG(code)
1038 val = os.WTERMSIG(code)
1036 return _("killed by signal %d") % val, val
1039 return _("killed by signal %d") % val, val
1037 elif os.WIFSTOPPED(code):
1040 elif os.WIFSTOPPED(code):
1038 val = os.WSTOPSIG(code)
1041 val = os.WSTOPSIG(code)
1039 return _("stopped by signal %d") % val, val
1042 return _("stopped by signal %d") % val, val
1040 raise ValueError(_("invalid exit code"))
1043 raise ValueError(_("invalid exit code"))
1041
1044
1042 def isowner(fp, st=None):
1045 def isowner(fp, st=None):
1043 """Return True if the file object f belongs to the current user.
1046 """Return True if the file object f belongs to the current user.
1044
1047
1045 The return value of a util.fstat(f) may be passed as the st argument.
1048 The return value of a util.fstat(f) may be passed as the st argument.
1046 """
1049 """
1047 if st is None:
1050 if st is None:
1048 st = fstat(fp)
1051 st = fstat(fp)
1049 return st.st_uid == os.getuid()
1052 return st.st_uid == os.getuid()
1050
1053
1051 def _buildencodefun():
1054 def _buildencodefun():
1052 e = '_'
1055 e = '_'
1053 win_reserved = [ord(x) for x in '\\:*?"<>|']
1056 win_reserved = [ord(x) for x in '\\:*?"<>|']
1054 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1057 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1055 for x in (range(32) + range(126, 256) + win_reserved):
1058 for x in (range(32) + range(126, 256) + win_reserved):
1056 cmap[chr(x)] = "~%02x" % x
1059 cmap[chr(x)] = "~%02x" % x
1057 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1060 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1058 cmap[chr(x)] = e + chr(x).lower()
1061 cmap[chr(x)] = e + chr(x).lower()
1059 dmap = {}
1062 dmap = {}
1060 for k, v in cmap.iteritems():
1063 for k, v in cmap.iteritems():
1061 dmap[v] = k
1064 dmap[v] = k
1062 def decode(s):
1065 def decode(s):
1063 i = 0
1066 i = 0
1064 while i < len(s):
1067 while i < len(s):
1065 for l in xrange(1, 4):
1068 for l in xrange(1, 4):
1066 try:
1069 try:
1067 yield dmap[s[i:i+l]]
1070 yield dmap[s[i:i+l]]
1068 i += l
1071 i += l
1069 break
1072 break
1070 except KeyError:
1073 except KeyError:
1071 pass
1074 pass
1072 else:
1075 else:
1073 raise KeyError
1076 raise KeyError
1074 return (lambda s: "".join([cmap[c] for c in s]),
1077 return (lambda s: "".join([cmap[c] for c in s]),
1075 lambda s: "".join(list(decode(s))))
1078 lambda s: "".join(list(decode(s))))
1076
1079
1077 encodefilename, decodefilename = _buildencodefun()
1080 encodefilename, decodefilename = _buildencodefun()
1078
1081
1079 def encodedopener(openerfn, fn):
1082 def encodedopener(openerfn, fn):
1080 def o(path, *args, **kw):
1083 def o(path, *args, **kw):
1081 return openerfn(fn(path), *args, **kw)
1084 return openerfn(fn(path), *args, **kw)
1082 return o
1085 return o
1083
1086
1084 def opener(base, audit=True):
1087 def opener(base, audit=True):
1085 """
1088 """
1086 return a function that opens files relative to base
1089 return a function that opens files relative to base
1087
1090
1088 this function is used to hide the details of COW semantics and
1091 this function is used to hide the details of COW semantics and
1089 remote file access from higher level code.
1092 remote file access from higher level code.
1090 """
1093 """
1091 p = base
1094 p = base
1092 audit_p = audit
1095 audit_p = audit
1093
1096
1094 def mktempcopy(name):
1097 def mktempcopy(name):
1095 d, fn = os.path.split(name)
1098 d, fn = os.path.split(name)
1096 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1099 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1097 os.close(fd)
1100 os.close(fd)
1098 ofp = posixfile(temp, "wb")
1101 ofp = posixfile(temp, "wb")
1099 try:
1102 try:
1100 try:
1103 try:
1101 ifp = posixfile(name, "rb")
1104 ifp = posixfile(name, "rb")
1102 except IOError, inst:
1105 except IOError, inst:
1103 if not getattr(inst, 'filename', None):
1106 if not getattr(inst, 'filename', None):
1104 inst.filename = name
1107 inst.filename = name
1105 raise
1108 raise
1106 for chunk in filechunkiter(ifp):
1109 for chunk in filechunkiter(ifp):
1107 ofp.write(chunk)
1110 ofp.write(chunk)
1108 ifp.close()
1111 ifp.close()
1109 ofp.close()
1112 ofp.close()
1110 except:
1113 except:
1111 try: os.unlink(temp)
1114 try: os.unlink(temp)
1112 except: pass
1115 except: pass
1113 raise
1116 raise
1114 st = os.lstat(name)
1117 st = os.lstat(name)
1115 os.chmod(temp, st.st_mode)
1118 os.chmod(temp, st.st_mode)
1116 return temp
1119 return temp
1117
1120
1118 class atomictempfile(posixfile):
1121 class atomictempfile(posixfile):
1119 """the file will only be copied when rename is called"""
1122 """the file will only be copied when rename is called"""
1120 def __init__(self, name, mode):
1123 def __init__(self, name, mode):
1121 self.__name = name
1124 self.__name = name
1122 self.temp = mktempcopy(name)
1125 self.temp = mktempcopy(name)
1123 posixfile.__init__(self, self.temp, mode)
1126 posixfile.__init__(self, self.temp, mode)
1124 def rename(self):
1127 def rename(self):
1125 if not self.closed:
1128 if not self.closed:
1126 posixfile.close(self)
1129 posixfile.close(self)
1127 rename(self.temp, localpath(self.__name))
1130 rename(self.temp, localpath(self.__name))
1128 def __del__(self):
1131 def __del__(self):
1129 if not self.closed:
1132 if not self.closed:
1130 try:
1133 try:
1131 os.unlink(self.temp)
1134 os.unlink(self.temp)
1132 except: pass
1135 except: pass
1133 posixfile.close(self)
1136 posixfile.close(self)
1134
1137
1135 class atomicfile(atomictempfile):
1138 class atomicfile(atomictempfile):
1136 """the file will only be copied on close"""
1139 """the file will only be copied on close"""
1137 def __init__(self, name, mode):
1140 def __init__(self, name, mode):
1138 atomictempfile.__init__(self, name, mode)
1141 atomictempfile.__init__(self, name, mode)
1139 def close(self):
1142 def close(self):
1140 self.rename()
1143 self.rename()
1141 def __del__(self):
1144 def __del__(self):
1142 self.rename()
1145 self.rename()
1143
1146
1144 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1147 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1145 if audit_p:
1148 if audit_p:
1146 audit_path(path)
1149 audit_path(path)
1147 f = os.path.join(p, path)
1150 f = os.path.join(p, path)
1148
1151
1149 if not text:
1152 if not text:
1150 mode += "b" # for that other OS
1153 mode += "b" # for that other OS
1151
1154
1152 if mode[0] != "r":
1155 if mode[0] != "r":
1153 try:
1156 try:
1154 nlink = nlinks(f)
1157 nlink = nlinks(f)
1155 except OSError:
1158 except OSError:
1156 d = os.path.dirname(f)
1159 d = os.path.dirname(f)
1157 if not os.path.isdir(d):
1160 if not os.path.isdir(d):
1158 os.makedirs(d)
1161 os.makedirs(d)
1159 else:
1162 else:
1160 if atomic:
1163 if atomic:
1161 return atomicfile(f, mode)
1164 return atomicfile(f, mode)
1162 elif atomictemp:
1165 elif atomictemp:
1163 return atomictempfile(f, mode)
1166 return atomictempfile(f, mode)
1164 if nlink > 1:
1167 if nlink > 1:
1165 rename(mktempcopy(f), f)
1168 rename(mktempcopy(f), f)
1166 return posixfile(f, mode)
1169 return posixfile(f, mode)
1167
1170
1168 return o
1171 return o
1169
1172
1170 class chunkbuffer(object):
1173 class chunkbuffer(object):
1171 """Allow arbitrary sized chunks of data to be efficiently read from an
1174 """Allow arbitrary sized chunks of data to be efficiently read from an
1172 iterator over chunks of arbitrary size."""
1175 iterator over chunks of arbitrary size."""
1173
1176
1174 def __init__(self, in_iter, targetsize = 2**16):
1177 def __init__(self, in_iter, targetsize = 2**16):
1175 """in_iter is the iterator that's iterating over the input chunks.
1178 """in_iter is the iterator that's iterating over the input chunks.
1176 targetsize is how big a buffer to try to maintain."""
1179 targetsize is how big a buffer to try to maintain."""
1177 self.in_iter = iter(in_iter)
1180 self.in_iter = iter(in_iter)
1178 self.buf = ''
1181 self.buf = ''
1179 self.targetsize = int(targetsize)
1182 self.targetsize = int(targetsize)
1180 if self.targetsize <= 0:
1183 if self.targetsize <= 0:
1181 raise ValueError(_("targetsize must be greater than 0, was %d") %
1184 raise ValueError(_("targetsize must be greater than 0, was %d") %
1182 targetsize)
1185 targetsize)
1183 self.iterempty = False
1186 self.iterempty = False
1184
1187
1185 def fillbuf(self):
1188 def fillbuf(self):
1186 """Ignore target size; read every chunk from iterator until empty."""
1189 """Ignore target size; read every chunk from iterator until empty."""
1187 if not self.iterempty:
1190 if not self.iterempty:
1188 collector = cStringIO.StringIO()
1191 collector = cStringIO.StringIO()
1189 collector.write(self.buf)
1192 collector.write(self.buf)
1190 for ch in self.in_iter:
1193 for ch in self.in_iter:
1191 collector.write(ch)
1194 collector.write(ch)
1192 self.buf = collector.getvalue()
1195 self.buf = collector.getvalue()
1193 self.iterempty = True
1196 self.iterempty = True
1194
1197
1195 def read(self, l):
1198 def read(self, l):
1196 """Read L bytes of data from the iterator of chunks of data.
1199 """Read L bytes of data from the iterator of chunks of data.
1197 Returns less than L bytes if the iterator runs dry."""
1200 Returns less than L bytes if the iterator runs dry."""
1198 if l > len(self.buf) and not self.iterempty:
1201 if l > len(self.buf) and not self.iterempty:
1199 # Clamp to a multiple of self.targetsize
1202 # Clamp to a multiple of self.targetsize
1200 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1203 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1201 collector = cStringIO.StringIO()
1204 collector = cStringIO.StringIO()
1202 collector.write(self.buf)
1205 collector.write(self.buf)
1203 collected = len(self.buf)
1206 collected = len(self.buf)
1204 for chunk in self.in_iter:
1207 for chunk in self.in_iter:
1205 collector.write(chunk)
1208 collector.write(chunk)
1206 collected += len(chunk)
1209 collected += len(chunk)
1207 if collected >= targetsize:
1210 if collected >= targetsize:
1208 break
1211 break
1209 if collected < targetsize:
1212 if collected < targetsize:
1210 self.iterempty = True
1213 self.iterempty = True
1211 self.buf = collector.getvalue()
1214 self.buf = collector.getvalue()
1212 s, self.buf = self.buf[:l], buffer(self.buf, l)
1215 s, self.buf = self.buf[:l], buffer(self.buf, l)
1213 return s
1216 return s
1214
1217
1215 def filechunkiter(f, size=65536, limit=None):
1218 def filechunkiter(f, size=65536, limit=None):
1216 """Create a generator that produces the data in the file size
1219 """Create a generator that produces the data in the file size
1217 (default 65536) bytes at a time, up to optional limit (default is
1220 (default 65536) bytes at a time, up to optional limit (default is
1218 to read all data). Chunks may be less than size bytes if the
1221 to read all data). Chunks may be less than size bytes if the
1219 chunk is the last chunk in the file, or the file is a socket or
1222 chunk is the last chunk in the file, or the file is a socket or
1220 some other type of file that sometimes reads less data than is
1223 some other type of file that sometimes reads less data than is
1221 requested."""
1224 requested."""
1222 assert size >= 0
1225 assert size >= 0
1223 assert limit is None or limit >= 0
1226 assert limit is None or limit >= 0
1224 while True:
1227 while True:
1225 if limit is None: nbytes = size
1228 if limit is None: nbytes = size
1226 else: nbytes = min(limit, size)
1229 else: nbytes = min(limit, size)
1227 s = nbytes and f.read(nbytes)
1230 s = nbytes and f.read(nbytes)
1228 if not s: break
1231 if not s: break
1229 if limit: limit -= len(s)
1232 if limit: limit -= len(s)
1230 yield s
1233 yield s
1231
1234
1232 def makedate():
1235 def makedate():
1233 lt = time.localtime()
1236 lt = time.localtime()
1234 if lt[8] == 1 and time.daylight:
1237 if lt[8] == 1 and time.daylight:
1235 tz = time.altzone
1238 tz = time.altzone
1236 else:
1239 else:
1237 tz = time.timezone
1240 tz = time.timezone
1238 return time.mktime(lt), tz
1241 return time.mktime(lt), tz
1239
1242
1240 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1243 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1241 """represent a (unixtime, offset) tuple as a localized time.
1244 """represent a (unixtime, offset) tuple as a localized time.
1242 unixtime is seconds since the epoch, and offset is the time zone's
1245 unixtime is seconds since the epoch, and offset is the time zone's
1243 number of seconds away from UTC. if timezone is false, do not
1246 number of seconds away from UTC. if timezone is false, do not
1244 append time zone to string."""
1247 append time zone to string."""
1245 t, tz = date or makedate()
1248 t, tz = date or makedate()
1246 s = time.strftime(format, time.gmtime(float(t) - tz))
1249 s = time.strftime(format, time.gmtime(float(t) - tz))
1247 if timezone:
1250 if timezone:
1248 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1251 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1249 return s
1252 return s
1250
1253
1251 def strdate(string, format, defaults):
1254 def strdate(string, format, defaults):
1252 """parse a localized time string and return a (unixtime, offset) tuple.
1255 """parse a localized time string and return a (unixtime, offset) tuple.
1253 if the string cannot be parsed, ValueError is raised."""
1256 if the string cannot be parsed, ValueError is raised."""
1254 def timezone(string):
1257 def timezone(string):
1255 tz = string.split()[-1]
1258 tz = string.split()[-1]
1256 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1259 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1257 tz = int(tz)
1260 tz = int(tz)
1258 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1261 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1259 return offset
1262 return offset
1260 if tz == "GMT" or tz == "UTC":
1263 if tz == "GMT" or tz == "UTC":
1261 return 0
1264 return 0
1262 return None
1265 return None
1263
1266
1264 # NOTE: unixtime = localunixtime + offset
1267 # NOTE: unixtime = localunixtime + offset
1265 offset, date = timezone(string), string
1268 offset, date = timezone(string), string
1266 if offset != None:
1269 if offset != None:
1267 date = " ".join(string.split()[:-1])
1270 date = " ".join(string.split()[:-1])
1268
1271
1269 # add missing elements from defaults
1272 # add missing elements from defaults
1270 for part in defaults:
1273 for part in defaults:
1271 found = [True for p in part if ("%"+p) in format]
1274 found = [True for p in part if ("%"+p) in format]
1272 if not found:
1275 if not found:
1273 date += "@" + defaults[part]
1276 date += "@" + defaults[part]
1274 format += "@%" + part[0]
1277 format += "@%" + part[0]
1275
1278
1276 timetuple = time.strptime(date, format)
1279 timetuple = time.strptime(date, format)
1277 localunixtime = int(calendar.timegm(timetuple))
1280 localunixtime = int(calendar.timegm(timetuple))
1278 if offset is None:
1281 if offset is None:
1279 # local timezone
1282 # local timezone
1280 unixtime = int(time.mktime(timetuple))
1283 unixtime = int(time.mktime(timetuple))
1281 offset = unixtime - localunixtime
1284 offset = unixtime - localunixtime
1282 else:
1285 else:
1283 unixtime = localunixtime + offset
1286 unixtime = localunixtime + offset
1284 return unixtime, offset
1287 return unixtime, offset
1285
1288
1286 def parsedate(string, formats=None, defaults=None):
1289 def parsedate(string, formats=None, defaults=None):
1287 """parse a localized time string and return a (unixtime, offset) tuple.
1290 """parse a localized time string and return a (unixtime, offset) tuple.
1288 The date may be a "unixtime offset" string or in one of the specified
1291 The date may be a "unixtime offset" string or in one of the specified
1289 formats."""
1292 formats."""
1290 if not string:
1293 if not string:
1291 return 0, 0
1294 return 0, 0
1292 if not formats:
1295 if not formats:
1293 formats = defaultdateformats
1296 formats = defaultdateformats
1294 string = string.strip()
1297 string = string.strip()
1295 try:
1298 try:
1296 when, offset = map(int, string.split(' '))
1299 when, offset = map(int, string.split(' '))
1297 except ValueError:
1300 except ValueError:
1298 # fill out defaults
1301 # fill out defaults
1299 if not defaults:
1302 if not defaults:
1300 defaults = {}
1303 defaults = {}
1301 now = makedate()
1304 now = makedate()
1302 for part in "d mb yY HI M S".split():
1305 for part in "d mb yY HI M S".split():
1303 if part not in defaults:
1306 if part not in defaults:
1304 if part[0] in "HMS":
1307 if part[0] in "HMS":
1305 defaults[part] = "00"
1308 defaults[part] = "00"
1306 elif part[0] in "dm":
1309 elif part[0] in "dm":
1307 defaults[part] = "1"
1310 defaults[part] = "1"
1308 else:
1311 else:
1309 defaults[part] = datestr(now, "%" + part[0], False)
1312 defaults[part] = datestr(now, "%" + part[0], False)
1310
1313
1311 for format in formats:
1314 for format in formats:
1312 try:
1315 try:
1313 when, offset = strdate(string, format, defaults)
1316 when, offset = strdate(string, format, defaults)
1314 except ValueError:
1317 except ValueError:
1315 pass
1318 pass
1316 else:
1319 else:
1317 break
1320 break
1318 else:
1321 else:
1319 raise Abort(_('invalid date: %r ') % string)
1322 raise Abort(_('invalid date: %r ') % string)
1320 # validate explicit (probably user-specified) date and
1323 # validate explicit (probably user-specified) date and
1321 # time zone offset. values must fit in signed 32 bits for
1324 # time zone offset. values must fit in signed 32 bits for
1322 # current 32-bit linux runtimes. timezones go from UTC-12
1325 # current 32-bit linux runtimes. timezones go from UTC-12
1323 # to UTC+14
1326 # to UTC+14
1324 if abs(when) > 0x7fffffff:
1327 if abs(when) > 0x7fffffff:
1325 raise Abort(_('date exceeds 32 bits: %d') % when)
1328 raise Abort(_('date exceeds 32 bits: %d') % when)
1326 if offset < -50400 or offset > 43200:
1329 if offset < -50400 or offset > 43200:
1327 raise Abort(_('impossible time zone offset: %d') % offset)
1330 raise Abort(_('impossible time zone offset: %d') % offset)
1328 return when, offset
1331 return when, offset
1329
1332
1330 def matchdate(date):
1333 def matchdate(date):
1331 """Return a function that matches a given date match specifier
1334 """Return a function that matches a given date match specifier
1332
1335
1333 Formats include:
1336 Formats include:
1334
1337
1335 '{date}' match a given date to the accuracy provided
1338 '{date}' match a given date to the accuracy provided
1336
1339
1337 '<{date}' on or before a given date
1340 '<{date}' on or before a given date
1338
1341
1339 '>{date}' on or after a given date
1342 '>{date}' on or after a given date
1340
1343
1341 """
1344 """
1342
1345
1343 def lower(date):
1346 def lower(date):
1344 return parsedate(date, extendeddateformats)[0]
1347 return parsedate(date, extendeddateformats)[0]
1345
1348
1346 def upper(date):
1349 def upper(date):
1347 d = dict(mb="12", HI="23", M="59", S="59")
1350 d = dict(mb="12", HI="23", M="59", S="59")
1348 for days in "31 30 29".split():
1351 for days in "31 30 29".split():
1349 try:
1352 try:
1350 d["d"] = days
1353 d["d"] = days
1351 return parsedate(date, extendeddateformats, d)[0]
1354 return parsedate(date, extendeddateformats, d)[0]
1352 except:
1355 except:
1353 pass
1356 pass
1354 d["d"] = "28"
1357 d["d"] = "28"
1355 return parsedate(date, extendeddateformats, d)[0]
1358 return parsedate(date, extendeddateformats, d)[0]
1356
1359
1357 if date[0] == "<":
1360 if date[0] == "<":
1358 when = upper(date[1:])
1361 when = upper(date[1:])
1359 return lambda x: x <= when
1362 return lambda x: x <= when
1360 elif date[0] == ">":
1363 elif date[0] == ">":
1361 when = lower(date[1:])
1364 when = lower(date[1:])
1362 return lambda x: x >= when
1365 return lambda x: x >= when
1363 elif date[0] == "-":
1366 elif date[0] == "-":
1364 try:
1367 try:
1365 days = int(date[1:])
1368 days = int(date[1:])
1366 except ValueError:
1369 except ValueError:
1367 raise Abort(_("invalid day spec: %s") % date[1:])
1370 raise Abort(_("invalid day spec: %s") % date[1:])
1368 when = makedate()[0] - days * 3600 * 24
1371 when = makedate()[0] - days * 3600 * 24
1369 return lambda x: x >= when
1372 return lambda x: x >= when
1370 elif " to " in date:
1373 elif " to " in date:
1371 a, b = date.split(" to ")
1374 a, b = date.split(" to ")
1372 start, stop = lower(a), upper(b)
1375 start, stop = lower(a), upper(b)
1373 return lambda x: x >= start and x <= stop
1376 return lambda x: x >= start and x <= stop
1374 else:
1377 else:
1375 start, stop = lower(date), upper(date)
1378 start, stop = lower(date), upper(date)
1376 return lambda x: x >= start and x <= stop
1379 return lambda x: x >= start and x <= stop
1377
1380
1378 def shortuser(user):
1381 def shortuser(user):
1379 """Return a short representation of a user name or email address."""
1382 """Return a short representation of a user name or email address."""
1380 f = user.find('@')
1383 f = user.find('@')
1381 if f >= 0:
1384 if f >= 0:
1382 user = user[:f]
1385 user = user[:f]
1383 f = user.find('<')
1386 f = user.find('<')
1384 if f >= 0:
1387 if f >= 0:
1385 user = user[f+1:]
1388 user = user[f+1:]
1386 f = user.find(' ')
1389 f = user.find(' ')
1387 if f >= 0:
1390 if f >= 0:
1388 user = user[:f]
1391 user = user[:f]
1389 f = user.find('.')
1392 f = user.find('.')
1390 if f >= 0:
1393 if f >= 0:
1391 user = user[:f]
1394 user = user[:f]
1392 return user
1395 return user
1393
1396
1394 def ellipsis(text, maxlength=400):
1397 def ellipsis(text, maxlength=400):
1395 """Trim string to at most maxlength (default: 400) characters."""
1398 """Trim string to at most maxlength (default: 400) characters."""
1396 if len(text) <= maxlength:
1399 if len(text) <= maxlength:
1397 return text
1400 return text
1398 else:
1401 else:
1399 return "%s..." % (text[:maxlength-3])
1402 return "%s..." % (text[:maxlength-3])
1400
1403
1401 def walkrepos(path):
1404 def walkrepos(path):
1402 '''yield every hg repository under path, recursively.'''
1405 '''yield every hg repository under path, recursively.'''
1403 def errhandler(err):
1406 def errhandler(err):
1404 if err.filename == path:
1407 if err.filename == path:
1405 raise err
1408 raise err
1406
1409
1407 for root, dirs, files in os.walk(path, onerror=errhandler):
1410 for root, dirs, files in os.walk(path, onerror=errhandler):
1408 for d in dirs:
1411 for d in dirs:
1409 if d == '.hg':
1412 if d == '.hg':
1410 yield root
1413 yield root
1411 dirs[:] = []
1414 dirs[:] = []
1412 break
1415 break
1413
1416
1414 _rcpath = None
1417 _rcpath = None
1415
1418
1416 def os_rcpath():
1419 def os_rcpath():
1417 '''return default os-specific hgrc search path'''
1420 '''return default os-specific hgrc search path'''
1418 path = system_rcpath()
1421 path = system_rcpath()
1419 path.extend(user_rcpath())
1422 path.extend(user_rcpath())
1420 path = [os.path.normpath(f) for f in path]
1423 path = [os.path.normpath(f) for f in path]
1421 return path
1424 return path
1422
1425
1423 def rcpath():
1426 def rcpath():
1424 '''return hgrc search path. if env var HGRCPATH is set, use it.
1427 '''return hgrc search path. if env var HGRCPATH is set, use it.
1425 for each item in path, if directory, use files ending in .rc,
1428 for each item in path, if directory, use files ending in .rc,
1426 else use item.
1429 else use item.
1427 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1430 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1428 if no HGRCPATH, use default os-specific path.'''
1431 if no HGRCPATH, use default os-specific path.'''
1429 global _rcpath
1432 global _rcpath
1430 if _rcpath is None:
1433 if _rcpath is None:
1431 if 'HGRCPATH' in os.environ:
1434 if 'HGRCPATH' in os.environ:
1432 _rcpath = []
1435 _rcpath = []
1433 for p in os.environ['HGRCPATH'].split(os.pathsep):
1436 for p in os.environ['HGRCPATH'].split(os.pathsep):
1434 if not p: continue
1437 if not p: continue
1435 if os.path.isdir(p):
1438 if os.path.isdir(p):
1436 for f in os.listdir(p):
1439 for f in os.listdir(p):
1437 if f.endswith('.rc'):
1440 if f.endswith('.rc'):
1438 _rcpath.append(os.path.join(p, f))
1441 _rcpath.append(os.path.join(p, f))
1439 else:
1442 else:
1440 _rcpath.append(p)
1443 _rcpath.append(p)
1441 else:
1444 else:
1442 _rcpath = os_rcpath()
1445 _rcpath = os_rcpath()
1443 return _rcpath
1446 return _rcpath
1444
1447
1445 def bytecount(nbytes):
1448 def bytecount(nbytes):
1446 '''return byte count formatted as readable string, with units'''
1449 '''return byte count formatted as readable string, with units'''
1447
1450
1448 units = (
1451 units = (
1449 (100, 1<<30, _('%.0f GB')),
1452 (100, 1<<30, _('%.0f GB')),
1450 (10, 1<<30, _('%.1f GB')),
1453 (10, 1<<30, _('%.1f GB')),
1451 (1, 1<<30, _('%.2f GB')),
1454 (1, 1<<30, _('%.2f GB')),
1452 (100, 1<<20, _('%.0f MB')),
1455 (100, 1<<20, _('%.0f MB')),
1453 (10, 1<<20, _('%.1f MB')),
1456 (10, 1<<20, _('%.1f MB')),
1454 (1, 1<<20, _('%.2f MB')),
1457 (1, 1<<20, _('%.2f MB')),
1455 (100, 1<<10, _('%.0f KB')),
1458 (100, 1<<10, _('%.0f KB')),
1456 (10, 1<<10, _('%.1f KB')),
1459 (10, 1<<10, _('%.1f KB')),
1457 (1, 1<<10, _('%.2f KB')),
1460 (1, 1<<10, _('%.2f KB')),
1458 (1, 1, _('%.0f bytes')),
1461 (1, 1, _('%.0f bytes')),
1459 )
1462 )
1460
1463
1461 for multiplier, divisor, format in units:
1464 for multiplier, divisor, format in units:
1462 if nbytes >= divisor * multiplier:
1465 if nbytes >= divisor * multiplier:
1463 return format % (nbytes / float(divisor))
1466 return format % (nbytes / float(divisor))
1464 return units[-1][2] % nbytes
1467 return units[-1][2] % nbytes
1465
1468
1466 def drop_scheme(scheme, path):
1469 def drop_scheme(scheme, path):
1467 sc = scheme + ':'
1470 sc = scheme + ':'
1468 if path.startswith(sc):
1471 if path.startswith(sc):
1469 path = path[len(sc):]
1472 path = path[len(sc):]
1470 if path.startswith('//'):
1473 if path.startswith('//'):
1471 path = path[2:]
1474 path = path[2:]
1472 return path
1475 return path
General Comments 0
You need to be logged in to leave comments. Login now