##// END OF EJS Templates
fix coding style (reported by pylint)
Benoit Boissinot -
r10394:4612cded default
parent child Browse files
Show More
@@ -1,260 +1,260 b''
1 # bzr.py - bzr support for the convert extension
1 # bzr.py - bzr support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This module is for handling 'bzr', that was formerly known as Bazaar-NG;
8 # This module is for handling 'bzr', that was formerly known as Bazaar-NG;
9 # it cannot access 'bar' repositories, but they were never used very much
9 # it cannot access 'bar' repositories, but they were never used very much
10
10
11 import os
11 import os
12 from mercurial import demandimport
12 from mercurial import demandimport
13 # these do not work with demandimport, blacklist
13 # these do not work with demandimport, blacklist
14 demandimport.ignore.extend([
14 demandimport.ignore.extend([
15 'bzrlib.transactions',
15 'bzrlib.transactions',
16 'bzrlib.urlutils',
16 'bzrlib.urlutils',
17 ])
17 ])
18
18
19 from mercurial.i18n import _
19 from mercurial.i18n import _
20 from mercurial import util
20 from mercurial import util
21 from common import NoRepo, commit, converter_source
21 from common import NoRepo, commit, converter_source
22
22
23 try:
23 try:
24 # bazaar imports
24 # bazaar imports
25 from bzrlib import branch, revision, errors
25 from bzrlib import branch, revision, errors
26 from bzrlib.revisionspec import RevisionSpec
26 from bzrlib.revisionspec import RevisionSpec
27 except ImportError:
27 except ImportError:
28 pass
28 pass
29
29
30 supportedkinds = ('file', 'symlink')
30 supportedkinds = ('file', 'symlink')
31
31
32 class bzr_source(converter_source):
32 class bzr_source(converter_source):
33 """Reads Bazaar repositories by using the Bazaar Python libraries"""
33 """Reads Bazaar repositories by using the Bazaar Python libraries"""
34
34
35 def __init__(self, ui, path, rev=None):
35 def __init__(self, ui, path, rev=None):
36 super(bzr_source, self).__init__(ui, path, rev=rev)
36 super(bzr_source, self).__init__(ui, path, rev=rev)
37
37
38 if not os.path.exists(os.path.join(path, '.bzr')):
38 if not os.path.exists(os.path.join(path, '.bzr')):
39 raise NoRepo('%s does not look like a Bazaar repo' % path)
39 raise NoRepo('%s does not look like a Bazaar repo' % path)
40
40
41 try:
41 try:
42 # access bzrlib stuff
42 # access bzrlib stuff
43 branch
43 branch
44 except NameError:
44 except NameError:
45 raise NoRepo('Bazaar modules could not be loaded')
45 raise NoRepo('Bazaar modules could not be loaded')
46
46
47 path = os.path.abspath(path)
47 path = os.path.abspath(path)
48 self._checkrepotype(path)
48 self._checkrepotype(path)
49 self.branch = branch.Branch.open(path)
49 self.branch = branch.Branch.open(path)
50 self.sourcerepo = self.branch.repository
50 self.sourcerepo = self.branch.repository
51 self._parentids = {}
51 self._parentids = {}
52
52
53 def _checkrepotype(self, path):
53 def _checkrepotype(self, path):
54 # Lightweight checkouts detection is informational but probably
54 # Lightweight checkouts detection is informational but probably
55 # fragile at API level. It should not terminate the conversion.
55 # fragile at API level. It should not terminate the conversion.
56 try:
56 try:
57 from bzrlib import bzrdir
57 from bzrlib import bzrdir
58 dir = bzrdir.BzrDir.open_containing(path)[0]
58 dir = bzrdir.BzrDir.open_containing(path)[0]
59 try:
59 try:
60 tree = dir.open_workingtree(recommend_upgrade=False)
60 tree = dir.open_workingtree(recommend_upgrade=False)
61 branch = tree.branch
61 branch = tree.branch
62 except (errors.NoWorkingTree, errors.NotLocalUrl), e:
62 except (errors.NoWorkingTree, errors.NotLocalUrl), e:
63 tree = None
63 tree = None
64 branch = dir.open_branch()
64 branch = dir.open_branch()
65 if (tree is not None and tree.bzrdir.root_transport.base !=
65 if (tree is not None and tree.bzrdir.root_transport.base !=
66 branch.bzrdir.root_transport.base):
66 branch.bzrdir.root_transport.base):
67 self.ui.warn(_('warning: lightweight checkouts may cause '
67 self.ui.warn(_('warning: lightweight checkouts may cause '
68 'conversion failures, try with a regular '
68 'conversion failures, try with a regular '
69 'branch instead.\n'))
69 'branch instead.\n'))
70 except:
70 except:
71 self.ui.note(_('bzr source type could not be determined\n'))
71 self.ui.note(_('bzr source type could not be determined\n'))
72
72
73 def before(self):
73 def before(self):
74 """Before the conversion begins, acquire a read lock
74 """Before the conversion begins, acquire a read lock
75 for all the operations that might need it. Fortunately
75 for all the operations that might need it. Fortunately
76 read locks don't block other reads or writes to the
76 read locks don't block other reads or writes to the
77 repository, so this shouldn't have any impact on the usage of
77 repository, so this shouldn't have any impact on the usage of
78 the source repository.
78 the source repository.
79
79
80 The alternative would be locking on every operation that
80 The alternative would be locking on every operation that
81 needs locks (there are currently two: getting the file and
81 needs locks (there are currently two: getting the file and
82 getting the parent map) and releasing immediately after,
82 getting the parent map) and releasing immediately after,
83 but this approach can take even 40% longer."""
83 but this approach can take even 40% longer."""
84 self.sourcerepo.lock_read()
84 self.sourcerepo.lock_read()
85
85
86 def after(self):
86 def after(self):
87 self.sourcerepo.unlock()
87 self.sourcerepo.unlock()
88
88
89 def getheads(self):
89 def getheads(self):
90 if not self.rev:
90 if not self.rev:
91 return [self.branch.last_revision()]
91 return [self.branch.last_revision()]
92 try:
92 try:
93 r = RevisionSpec.from_string(self.rev)
93 r = RevisionSpec.from_string(self.rev)
94 info = r.in_history(self.branch)
94 info = r.in_history(self.branch)
95 except errors.BzrError:
95 except errors.BzrError:
96 raise util.Abort(_('%s is not a valid revision in current branch')
96 raise util.Abort(_('%s is not a valid revision in current branch')
97 % self.rev)
97 % self.rev)
98 return [info.rev_id]
98 return [info.rev_id]
99
99
100 def getfile(self, name, rev):
100 def getfile(self, name, rev):
101 revtree = self.sourcerepo.revision_tree(rev)
101 revtree = self.sourcerepo.revision_tree(rev)
102 fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
102 fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
103 kind = None
103 kind = None
104 if fileid is not None:
104 if fileid is not None:
105 kind = revtree.kind(fileid)
105 kind = revtree.kind(fileid)
106 if kind not in supportedkinds:
106 if kind not in supportedkinds:
107 # the file is not available anymore - was deleted
107 # the file is not available anymore - was deleted
108 raise IOError(_('%s is not available in %s anymore') %
108 raise IOError(_('%s is not available in %s anymore') %
109 (name, rev))
109 (name, rev))
110 if kind == 'symlink':
110 if kind == 'symlink':
111 target = revtree.get_symlink_target(fileid)
111 target = revtree.get_symlink_target(fileid)
112 if target is None:
112 if target is None:
113 raise util.Abort(_('%s.%s symlink has no target')
113 raise util.Abort(_('%s.%s symlink has no target')
114 % (name, rev))
114 % (name, rev))
115 return target
115 return target
116 else:
116 else:
117 sio = revtree.get_file(fileid)
117 sio = revtree.get_file(fileid)
118 return sio.read()
118 return sio.read()
119
119
120 def getmode(self, name, rev):
120 def getmode(self, name, rev):
121 return self._modecache[(name, rev)]
121 return self._modecache[(name, rev)]
122
122
123 def getchanges(self, version):
123 def getchanges(self, version):
124 # set up caches: modecache and revtree
124 # set up caches: modecache and revtree
125 self._modecache = {}
125 self._modecache = {}
126 self._revtree = self.sourcerepo.revision_tree(version)
126 self._revtree = self.sourcerepo.revision_tree(version)
127 # get the parentids from the cache
127 # get the parentids from the cache
128 parentids = self._parentids.pop(version)
128 parentids = self._parentids.pop(version)
129 # only diff against first parent id
129 # only diff against first parent id
130 prevtree = self.sourcerepo.revision_tree(parentids[0])
130 prevtree = self.sourcerepo.revision_tree(parentids[0])
131 return self._gettreechanges(self._revtree, prevtree)
131 return self._gettreechanges(self._revtree, prevtree)
132
132
133 def getcommit(self, version):
133 def getcommit(self, version):
134 rev = self.sourcerepo.get_revision(version)
134 rev = self.sourcerepo.get_revision(version)
135 # populate parent id cache
135 # populate parent id cache
136 if not rev.parent_ids:
136 if not rev.parent_ids:
137 parents = []
137 parents = []
138 self._parentids[version] = (revision.NULL_REVISION,)
138 self._parentids[version] = (revision.NULL_REVISION,)
139 else:
139 else:
140 parents = self._filterghosts(rev.parent_ids)
140 parents = self._filterghosts(rev.parent_ids)
141 self._parentids[version] = parents
141 self._parentids[version] = parents
142
142
143 return commit(parents=parents,
143 return commit(parents=parents,
144 date='%d %d' % (rev.timestamp, -rev.timezone),
144 date='%d %d' % (rev.timestamp, -rev.timezone),
145 author=self.recode(rev.committer),
145 author=self.recode(rev.committer),
146 # bzr returns bytestrings or unicode, depending on the content
146 # bzr returns bytestrings or unicode, depending on the content
147 desc=self.recode(rev.message),
147 desc=self.recode(rev.message),
148 rev=version)
148 rev=version)
149
149
150 def gettags(self):
150 def gettags(self):
151 if not self.branch.supports_tags():
151 if not self.branch.supports_tags():
152 return {}
152 return {}
153 tagdict = self.branch.tags.get_tag_dict()
153 tagdict = self.branch.tags.get_tag_dict()
154 bytetags = {}
154 bytetags = {}
155 for name, rev in tagdict.iteritems():
155 for name, rev in tagdict.iteritems():
156 bytetags[self.recode(name)] = rev
156 bytetags[self.recode(name)] = rev
157 return bytetags
157 return bytetags
158
158
159 def getchangedfiles(self, rev, i):
159 def getchangedfiles(self, rev, i):
160 self._modecache = {}
160 self._modecache = {}
161 curtree = self.sourcerepo.revision_tree(rev)
161 curtree = self.sourcerepo.revision_tree(rev)
162 if i is not None:
162 if i is not None:
163 parentid = self._parentids[rev][i]
163 parentid = self._parentids[rev][i]
164 else:
164 else:
165 # no parent id, get the empty revision
165 # no parent id, get the empty revision
166 parentid = revision.NULL_REVISION
166 parentid = revision.NULL_REVISION
167
167
168 prevtree = self.sourcerepo.revision_tree(parentid)
168 prevtree = self.sourcerepo.revision_tree(parentid)
169 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
169 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
170 return changes
170 return changes
171
171
172 def _gettreechanges(self, current, origin):
172 def _gettreechanges(self, current, origin):
173 revid = current._revision_id;
173 revid = current._revision_id
174 changes = []
174 changes = []
175 renames = {}
175 renames = {}
176 for (fileid, paths, changed_content, versioned, parent, name,
176 for (fileid, paths, changed_content, versioned, parent, name,
177 kind, executable) in current.iter_changes(origin):
177 kind, executable) in current.iter_changes(origin):
178
178
179 if paths[0] == u'' or paths[1] == u'':
179 if paths[0] == u'' or paths[1] == u'':
180 # ignore changes to tree root
180 # ignore changes to tree root
181 continue
181 continue
182
182
183 # bazaar tracks directories, mercurial does not, so
183 # bazaar tracks directories, mercurial does not, so
184 # we have to rename the directory contents
184 # we have to rename the directory contents
185 if kind[1] == 'directory':
185 if kind[1] == 'directory':
186 if kind[0] not in (None, 'directory'):
186 if kind[0] not in (None, 'directory'):
187 # Replacing 'something' with a directory, record it
187 # Replacing 'something' with a directory, record it
188 # so it can be removed.
188 # so it can be removed.
189 changes.append((self.recode(paths[0]), revid))
189 changes.append((self.recode(paths[0]), revid))
190
190
191 if None not in paths and paths[0] != paths[1]:
191 if None not in paths and paths[0] != paths[1]:
192 # neither an add nor an delete - a move
192 # neither an add nor an delete - a move
193 # rename all directory contents manually
193 # rename all directory contents manually
194 subdir = origin.inventory.path2id(paths[0])
194 subdir = origin.inventory.path2id(paths[0])
195 # get all child-entries of the directory
195 # get all child-entries of the directory
196 for name, entry in origin.inventory.iter_entries(subdir):
196 for name, entry in origin.inventory.iter_entries(subdir):
197 # hg does not track directory renames
197 # hg does not track directory renames
198 if entry.kind == 'directory':
198 if entry.kind == 'directory':
199 continue
199 continue
200 frompath = self.recode(paths[0] + '/' + name)
200 frompath = self.recode(paths[0] + '/' + name)
201 topath = self.recode(paths[1] + '/' + name)
201 topath = self.recode(paths[1] + '/' + name)
202 # register the files as changed
202 # register the files as changed
203 changes.append((frompath, revid))
203 changes.append((frompath, revid))
204 changes.append((topath, revid))
204 changes.append((topath, revid))
205 # add to mode cache
205 # add to mode cache
206 mode = ((entry.executable and 'x')
206 mode = ((entry.executable and 'x')
207 or (entry.kind == 'symlink' and 's')
207 or (entry.kind == 'symlink' and 's')
208 or '')
208 or '')
209 self._modecache[(topath, revid)] = mode
209 self._modecache[(topath, revid)] = mode
210 # register the change as move
210 # register the change as move
211 renames[topath] = frompath
211 renames[topath] = frompath
212
212
213 # no futher changes, go to the next change
213 # no futher changes, go to the next change
214 continue
214 continue
215
215
216 # we got unicode paths, need to convert them
216 # we got unicode paths, need to convert them
217 path, topath = [self.recode(part) for part in paths]
217 path, topath = [self.recode(part) for part in paths]
218
218
219 if topath is None:
219 if topath is None:
220 # file deleted
220 # file deleted
221 changes.append((path, revid))
221 changes.append((path, revid))
222 continue
222 continue
223
223
224 # renamed
224 # renamed
225 if path and path != topath:
225 if path and path != topath:
226 renames[topath] = path
226 renames[topath] = path
227 changes.append((path, revid))
227 changes.append((path, revid))
228
228
229 # populate the mode cache
229 # populate the mode cache
230 kind, executable = [e[1] for e in (kind, executable)]
230 kind, executable = [e[1] for e in (kind, executable)]
231 mode = ((executable and 'x') or (kind == 'symlink' and 'l')
231 mode = ((executable and 'x') or (kind == 'symlink' and 'l')
232 or '')
232 or '')
233 self._modecache[(topath, revid)] = mode
233 self._modecache[(topath, revid)] = mode
234 changes.append((topath, revid))
234 changes.append((topath, revid))
235
235
236 return changes, renames
236 return changes, renames
237
237
238 def _filterghosts(self, ids):
238 def _filterghosts(self, ids):
239 """Filters out ghost revisions which hg does not support, see
239 """Filters out ghost revisions which hg does not support, see
240 <http://bazaar-vcs.org/GhostRevision>
240 <http://bazaar-vcs.org/GhostRevision>
241 """
241 """
242 parentmap = self.sourcerepo.get_parent_map(ids)
242 parentmap = self.sourcerepo.get_parent_map(ids)
243 parents = tuple([parent for parent in ids if parent in parentmap])
243 parents = tuple([parent for parent in ids if parent in parentmap])
244 return parents
244 return parents
245
245
246 def recode(self, s, encoding=None):
246 def recode(self, s, encoding=None):
247 """This version of recode tries to encode unicode to bytecode,
247 """This version of recode tries to encode unicode to bytecode,
248 and preferably using the UTF-8 codec.
248 and preferably using the UTF-8 codec.
249 Other types than Unicode are silently returned, this is by
249 Other types than Unicode are silently returned, this is by
250 intention, e.g. the None-type is not going to be encoded but instead
250 intention, e.g. the None-type is not going to be encoded but instead
251 just passed through
251 just passed through
252 """
252 """
253 if not encoding:
253 if not encoding:
254 encoding = self.encoding or 'utf-8'
254 encoding = self.encoding or 'utf-8'
255
255
256 if isinstance(s, unicode):
256 if isinstance(s, unicode):
257 return s.encode(encoding)
257 return s.encode(encoding)
258 else:
258 else:
259 # leave it alone
259 # leave it alone
260 return s
260 return s
@@ -1,345 +1,345 b''
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from common import NoRepo, commandline, commit, converter_source
9 from common import NoRepo, commandline, commit, converter_source
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import util
11 from mercurial import util
12 import os, shutil, tempfile, stat, locale
12 import os, shutil, tempfile, stat, locale
13 from email.Parser import Parser
13 from email.Parser import Parser
14
14
15 class gnuarch_source(converter_source, commandline):
15 class gnuarch_source(converter_source, commandline):
16
16
17 class gnuarch_rev(object):
17 class gnuarch_rev(object):
18 def __init__(self, rev):
18 def __init__(self, rev):
19 self.rev = rev
19 self.rev = rev
20 self.summary = ''
20 self.summary = ''
21 self.date = None
21 self.date = None
22 self.author = ''
22 self.author = ''
23 self.continuationof = None
23 self.continuationof = None
24 self.add_files = []
24 self.add_files = []
25 self.mod_files = []
25 self.mod_files = []
26 self.del_files = []
26 self.del_files = []
27 self.ren_files = {}
27 self.ren_files = {}
28 self.ren_dirs = {}
28 self.ren_dirs = {}
29
29
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32
32
33 if not os.path.exists(os.path.join(path, '{arch}')):
33 if not os.path.exists(os.path.join(path, '{arch}')):
34 raise NoRepo(_("%s does not look like a GNU Arch repo") % path)
34 raise NoRepo(_("%s does not look like a GNU Arch repo") % path)
35
35
36 # Could use checktool, but we want to check for baz or tla.
36 # Could use checktool, but we want to check for baz or tla.
37 self.execmd = None
37 self.execmd = None
38 if util.find_exe('baz'):
38 if util.find_exe('baz'):
39 self.execmd = 'baz'
39 self.execmd = 'baz'
40 else:
40 else:
41 if util.find_exe('tla'):
41 if util.find_exe('tla'):
42 self.execmd = 'tla'
42 self.execmd = 'tla'
43 else:
43 else:
44 raise util.Abort(_('cannot find a GNU Arch tool'))
44 raise util.Abort(_('cannot find a GNU Arch tool'))
45
45
46 commandline.__init__(self, ui, self.execmd)
46 commandline.__init__(self, ui, self.execmd)
47
47
48 self.path = os.path.realpath(path)
48 self.path = os.path.realpath(path)
49 self.tmppath = None
49 self.tmppath = None
50
50
51 self.treeversion = None
51 self.treeversion = None
52 self.lastrev = None
52 self.lastrev = None
53 self.changes = {}
53 self.changes = {}
54 self.parents = {}
54 self.parents = {}
55 self.tags = {}
55 self.tags = {}
56 self.modecache = {}
56 self.modecache = {}
57 self.catlogparser = Parser()
57 self.catlogparser = Parser()
58 self.locale = locale.getpreferredencoding()
58 self.locale = locale.getpreferredencoding()
59 self.archives = []
59 self.archives = []
60
60
61 def before(self):
61 def before(self):
62 # Get registered archives
62 # Get registered archives
63 self.archives = [i.rstrip('\n')
63 self.archives = [i.rstrip('\n')
64 for i in self.runlines0('archives', '-n')]
64 for i in self.runlines0('archives', '-n')]
65
65
66 if self.execmd == 'tla':
66 if self.execmd == 'tla':
67 output = self.run0('tree-version', self.path)
67 output = self.run0('tree-version', self.path)
68 else:
68 else:
69 output = self.run0('tree-version', '-d', self.path)
69 output = self.run0('tree-version', '-d', self.path)
70 self.treeversion = output.strip()
70 self.treeversion = output.strip()
71
71
72 # Get name of temporary directory
72 # Get name of temporary directory
73 version = self.treeversion.split('/')
73 version = self.treeversion.split('/')
74 self.tmppath = os.path.join(tempfile.gettempdir(),
74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 'hg-%s' % version[1])
75 'hg-%s' % version[1])
76
76
77 # Generate parents dictionary
77 # Generate parents dictionary
78 self.parents[None] = []
78 self.parents[None] = []
79 treeversion = self.treeversion
79 treeversion = self.treeversion
80 child = None
80 child = None
81 while treeversion:
81 while treeversion:
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83
83
84 archive = treeversion.split('/')[0]
84 archive = treeversion.split('/')[0]
85 if archive not in self.archives:
85 if archive not in self.archives:
86 self.ui.status(_('tree analysis stopped because it points to '
86 self.ui.status(_('tree analysis stopped because it points to '
87 'an unregistered archive %s...\n') % archive)
87 'an unregistered archive %s...\n') % archive)
88 break
88 break
89
89
90 # Get the complete list of revisions for that tree version
90 # Get the complete list of revisions for that tree version
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 self.checkexit(status, 'failed retrieveing revisions for %s'
92 self.checkexit(status, 'failed retrieveing revisions for %s'
93 % treeversion)
93 % treeversion)
94
94
95 # No new iteration unless a revision has a continuation-of header
95 # No new iteration unless a revision has a continuation-of header
96 treeversion = None
96 treeversion = None
97
97
98 for l in output:
98 for l in output:
99 rev = l.strip()
99 rev = l.strip()
100 self.changes[rev] = self.gnuarch_rev(rev)
100 self.changes[rev] = self.gnuarch_rev(rev)
101 self.parents[rev] = []
101 self.parents[rev] = []
102
102
103 # Read author, date and summary
103 # Read author, date and summary
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 if status:
105 if status:
106 catlog = self.run0('cat-archive-log', rev)
106 catlog = self.run0('cat-archive-log', rev)
107 self._parsecatlog(catlog, rev)
107 self._parsecatlog(catlog, rev)
108
108
109 # Populate the parents map
109 # Populate the parents map
110 self.parents[child].append(rev)
110 self.parents[child].append(rev)
111
111
112 # Keep track of the current revision as the child of the next
112 # Keep track of the current revision as the child of the next
113 # revision scanned
113 # revision scanned
114 child = rev
114 child = rev
115
115
116 # Check if we have to follow the usual incremental history
116 # Check if we have to follow the usual incremental history
117 # or if we have to 'jump' to a different treeversion given
117 # or if we have to 'jump' to a different treeversion given
118 # by the continuation-of header.
118 # by the continuation-of header.
119 if self.changes[rev].continuationof:
119 if self.changes[rev].continuationof:
120 treeversion = '--'.join(
120 treeversion = '--'.join(
121 self.changes[rev].continuationof.split('--')[:-1])
121 self.changes[rev].continuationof.split('--')[:-1])
122 break
122 break
123
123
124 # If we reached a base-0 revision w/o any continuation-of
124 # If we reached a base-0 revision w/o any continuation-of
125 # header, it means the tree history ends here.
125 # header, it means the tree history ends here.
126 if rev[-6:] == 'base-0':
126 if rev[-6:] == 'base-0':
127 break
127 break
128
128
129 def after(self):
129 def after(self):
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getfile(self, name, rev):
136 def getfile(self, name, rev):
137 if rev != self.lastrev:
137 if rev != self.lastrev:
138 raise util.Abort(_('internal calling inconsistency'))
138 raise util.Abort(_('internal calling inconsistency'))
139
139
140 # Raise IOError if necessary (i.e. deleted files).
140 # Raise IOError if necessary (i.e. deleted files).
141 if not os.path.exists(os.path.join(self.tmppath, name)):
141 if not os.path.exists(os.path.join(self.tmppath, name)):
142 raise IOError
142 raise IOError
143
143
144 data, mode = self._getfile(name, rev)
144 data, mode = self._getfile(name, rev)
145 self.modecache[(name, rev)] = mode
145 self.modecache[(name, rev)] = mode
146
146
147 return data
147 return data
148
148
149 def getmode(self, name, rev):
149 def getmode(self, name, rev):
150 return self.modecache[(name, rev)]
150 return self.modecache[(name, rev)]
151
151
152 def getchanges(self, rev):
152 def getchanges(self, rev):
153 self.modecache = {}
153 self.modecache = {}
154 self._update(rev)
154 self._update(rev)
155 changes = []
155 changes = []
156 copies = {}
156 copies = {}
157
157
158 for f in self.changes[rev].add_files:
158 for f in self.changes[rev].add_files:
159 changes.append((f, rev))
159 changes.append((f, rev))
160
160
161 for f in self.changes[rev].mod_files:
161 for f in self.changes[rev].mod_files:
162 changes.append((f, rev))
162 changes.append((f, rev))
163
163
164 for f in self.changes[rev].del_files:
164 for f in self.changes[rev].del_files:
165 changes.append((f, rev))
165 changes.append((f, rev))
166
166
167 for src in self.changes[rev].ren_files:
167 for src in self.changes[rev].ren_files:
168 to = self.changes[rev].ren_files[src]
168 to = self.changes[rev].ren_files[src]
169 changes.append((src, rev))
169 changes.append((src, rev))
170 changes.append((to, rev))
170 changes.append((to, rev))
171 copies[to] = src
171 copies[to] = src
172
172
173 for src in self.changes[rev].ren_dirs:
173 for src in self.changes[rev].ren_dirs:
174 to = self.changes[rev].ren_dirs[src]
174 to = self.changes[rev].ren_dirs[src]
175 chgs, cps = self._rendirchanges(src, to);
175 chgs, cps = self._rendirchanges(src, to)
176 changes += [(f, rev) for f in chgs]
176 changes += [(f, rev) for f in chgs]
177 copies.update(cps)
177 copies.update(cps)
178
178
179 self.lastrev = rev
179 self.lastrev = rev
180 return sorted(set(changes)), copies
180 return sorted(set(changes)), copies
181
181
182 def getcommit(self, rev):
182 def getcommit(self, rev):
183 changes = self.changes[rev]
183 changes = self.changes[rev]
184 return commit(author=changes.author, date=changes.date,
184 return commit(author=changes.author, date=changes.date,
185 desc=changes.summary, parents=self.parents[rev], rev=rev)
185 desc=changes.summary, parents=self.parents[rev], rev=rev)
186
186
187 def gettags(self):
187 def gettags(self):
188 return self.tags
188 return self.tags
189
189
190 def _execute(self, cmd, *args, **kwargs):
190 def _execute(self, cmd, *args, **kwargs):
191 cmdline = [self.execmd, cmd]
191 cmdline = [self.execmd, cmd]
192 cmdline += args
192 cmdline += args
193 cmdline = [util.shellquote(arg) for arg in cmdline]
193 cmdline = [util.shellquote(arg) for arg in cmdline]
194 cmdline += ['>', util.nulldev, '2>', util.nulldev]
194 cmdline += ['>', util.nulldev, '2>', util.nulldev]
195 cmdline = util.quotecommand(' '.join(cmdline))
195 cmdline = util.quotecommand(' '.join(cmdline))
196 self.ui.debug(cmdline, '\n')
196 self.ui.debug(cmdline, '\n')
197 return os.system(cmdline)
197 return os.system(cmdline)
198
198
199 def _update(self, rev):
199 def _update(self, rev):
200 self.ui.debug('applying revision %s...\n' % rev)
200 self.ui.debug('applying revision %s...\n' % rev)
201 changeset, status = self.runlines('replay', '-d', self.tmppath,
201 changeset, status = self.runlines('replay', '-d', self.tmppath,
202 rev)
202 rev)
203 if status:
203 if status:
204 # Something went wrong while merging (baz or tla
204 # Something went wrong while merging (baz or tla
205 # issue?), get latest revision and try from there
205 # issue?), get latest revision and try from there
206 shutil.rmtree(self.tmppath, ignore_errors=True)
206 shutil.rmtree(self.tmppath, ignore_errors=True)
207 self._obtainrevision(rev)
207 self._obtainrevision(rev)
208 else:
208 else:
209 old_rev = self.parents[rev][0]
209 old_rev = self.parents[rev][0]
210 self.ui.debug('computing changeset between %s and %s...\n'
210 self.ui.debug('computing changeset between %s and %s...\n'
211 % (old_rev, rev))
211 % (old_rev, rev))
212 self._parsechangeset(changeset, rev)
212 self._parsechangeset(changeset, rev)
213
213
214 def _getfile(self, name, rev):
214 def _getfile(self, name, rev):
215 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
215 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
216 if stat.S_ISLNK(mode):
216 if stat.S_ISLNK(mode):
217 data = os.readlink(os.path.join(self.tmppath, name))
217 data = os.readlink(os.path.join(self.tmppath, name))
218 mode = mode and 'l' or ''
218 mode = mode and 'l' or ''
219 else:
219 else:
220 data = open(os.path.join(self.tmppath, name), 'rb').read()
220 data = open(os.path.join(self.tmppath, name), 'rb').read()
221 mode = (mode & 0111) and 'x' or ''
221 mode = (mode & 0111) and 'x' or ''
222 return data, mode
222 return data, mode
223
223
224 def _exclude(self, name):
224 def _exclude(self, name):
225 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
225 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
226 for exc in exclude:
226 for exc in exclude:
227 if name.find(exc) != -1:
227 if name.find(exc) != -1:
228 return True
228 return True
229 return False
229 return False
230
230
231 def _readcontents(self, path):
231 def _readcontents(self, path):
232 files = []
232 files = []
233 contents = os.listdir(path)
233 contents = os.listdir(path)
234 while len(contents) > 0:
234 while len(contents) > 0:
235 c = contents.pop()
235 c = contents.pop()
236 p = os.path.join(path, c)
236 p = os.path.join(path, c)
237 # os.walk could be used, but here we avoid internal GNU
237 # os.walk could be used, but here we avoid internal GNU
238 # Arch files and directories, thus saving a lot time.
238 # Arch files and directories, thus saving a lot time.
239 if not self._exclude(p):
239 if not self._exclude(p):
240 if os.path.isdir(p):
240 if os.path.isdir(p):
241 contents += [os.path.join(c, f) for f in os.listdir(p)]
241 contents += [os.path.join(c, f) for f in os.listdir(p)]
242 else:
242 else:
243 files.append(c)
243 files.append(c)
244 return files
244 return files
245
245
246 def _rendirchanges(self, src, dest):
246 def _rendirchanges(self, src, dest):
247 changes = []
247 changes = []
248 copies = {}
248 copies = {}
249 files = self._readcontents(os.path.join(self.tmppath, dest))
249 files = self._readcontents(os.path.join(self.tmppath, dest))
250 for f in files:
250 for f in files:
251 s = os.path.join(src, f)
251 s = os.path.join(src, f)
252 d = os.path.join(dest, f)
252 d = os.path.join(dest, f)
253 changes.append(s)
253 changes.append(s)
254 changes.append(d)
254 changes.append(d)
255 copies[d] = s
255 copies[d] = s
256 return changes, copies
256 return changes, copies
257
257
258 def _obtainrevision(self, rev):
258 def _obtainrevision(self, rev):
259 self.ui.debug('obtaining revision %s...\n' % rev)
259 self.ui.debug('obtaining revision %s...\n' % rev)
260 output = self._execute('get', rev, self.tmppath)
260 output = self._execute('get', rev, self.tmppath)
261 self.checkexit(output)
261 self.checkexit(output)
262 self.ui.debug('analyzing revision %s...\n' % rev)
262 self.ui.debug('analyzing revision %s...\n' % rev)
263 files = self._readcontents(self.tmppath)
263 files = self._readcontents(self.tmppath)
264 self.changes[rev].add_files += files
264 self.changes[rev].add_files += files
265
265
266 def _stripbasepath(self, path):
266 def _stripbasepath(self, path):
267 if path.startswith('./'):
267 if path.startswith('./'):
268 return path[2:]
268 return path[2:]
269 return path
269 return path
270
270
271 def _parsecatlog(self, data, rev):
271 def _parsecatlog(self, data, rev):
272 try:
272 try:
273 catlog = self.catlogparser.parsestr(data)
273 catlog = self.catlogparser.parsestr(data)
274
274
275 # Commit date
275 # Commit date
276 self.changes[rev].date = util.datestr(
276 self.changes[rev].date = util.datestr(
277 util.strdate(catlog['Standard-date'],
277 util.strdate(catlog['Standard-date'],
278 '%Y-%m-%d %H:%M:%S'))
278 '%Y-%m-%d %H:%M:%S'))
279
279
280 # Commit author
280 # Commit author
281 self.changes[rev].author = self.recode(catlog['Creator'])
281 self.changes[rev].author = self.recode(catlog['Creator'])
282
282
283 # Commit description
283 # Commit description
284 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
284 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
285 catlog.get_payload()))
285 catlog.get_payload()))
286 self.changes[rev].summary = self.recode(self.changes[rev].summary)
286 self.changes[rev].summary = self.recode(self.changes[rev].summary)
287
287
288 # Commit revision origin when dealing with a branch or tag
288 # Commit revision origin when dealing with a branch or tag
289 if 'Continuation-of' in catlog:
289 if 'Continuation-of' in catlog:
290 self.changes[rev].continuationof = self.recode(
290 self.changes[rev].continuationof = self.recode(
291 catlog['Continuation-of'])
291 catlog['Continuation-of'])
292 except Exception:
292 except Exception:
293 raise util.Abort(_('could not parse cat-log of %s') % rev)
293 raise util.Abort(_('could not parse cat-log of %s') % rev)
294
294
295 def _parsechangeset(self, data, rev):
295 def _parsechangeset(self, data, rev):
296 for l in data:
296 for l in data:
297 l = l.strip()
297 l = l.strip()
298 # Added file (ignore added directory)
298 # Added file (ignore added directory)
299 if l.startswith('A') and not l.startswith('A/'):
299 if l.startswith('A') and not l.startswith('A/'):
300 file = self._stripbasepath(l[1:].strip())
300 file = self._stripbasepath(l[1:].strip())
301 if not self._exclude(file):
301 if not self._exclude(file):
302 self.changes[rev].add_files.append(file)
302 self.changes[rev].add_files.append(file)
303 # Deleted file (ignore deleted directory)
303 # Deleted file (ignore deleted directory)
304 elif l.startswith('D') and not l.startswith('D/'):
304 elif l.startswith('D') and not l.startswith('D/'):
305 file = self._stripbasepath(l[1:].strip())
305 file = self._stripbasepath(l[1:].strip())
306 if not self._exclude(file):
306 if not self._exclude(file):
307 self.changes[rev].del_files.append(file)
307 self.changes[rev].del_files.append(file)
308 # Modified binary file
308 # Modified binary file
309 elif l.startswith('Mb'):
309 elif l.startswith('Mb'):
310 file = self._stripbasepath(l[2:].strip())
310 file = self._stripbasepath(l[2:].strip())
311 if not self._exclude(file):
311 if not self._exclude(file):
312 self.changes[rev].mod_files.append(file)
312 self.changes[rev].mod_files.append(file)
313 # Modified link
313 # Modified link
314 elif l.startswith('M->'):
314 elif l.startswith('M->'):
315 file = self._stripbasepath(l[3:].strip())
315 file = self._stripbasepath(l[3:].strip())
316 if not self._exclude(file):
316 if not self._exclude(file):
317 self.changes[rev].mod_files.append(file)
317 self.changes[rev].mod_files.append(file)
318 # Modified file
318 # Modified file
319 elif l.startswith('M'):
319 elif l.startswith('M'):
320 file = self._stripbasepath(l[1:].strip())
320 file = self._stripbasepath(l[1:].strip())
321 if not self._exclude(file):
321 if not self._exclude(file):
322 self.changes[rev].mod_files.append(file)
322 self.changes[rev].mod_files.append(file)
323 # Renamed file (or link)
323 # Renamed file (or link)
324 elif l.startswith('=>'):
324 elif l.startswith('=>'):
325 files = l[2:].strip().split(' ')
325 files = l[2:].strip().split(' ')
326 if len(files) == 1:
326 if len(files) == 1:
327 files = l[2:].strip().split('\t')
327 files = l[2:].strip().split('\t')
328 src = self._stripbasepath(files[0])
328 src = self._stripbasepath(files[0])
329 dst = self._stripbasepath(files[1])
329 dst = self._stripbasepath(files[1])
330 if not self._exclude(src) and not self._exclude(dst):
330 if not self._exclude(src) and not self._exclude(dst):
331 self.changes[rev].ren_files[src] = dst
331 self.changes[rev].ren_files[src] = dst
332 # Conversion from file to link or from link to file (modified)
332 # Conversion from file to link or from link to file (modified)
333 elif l.startswith('ch'):
333 elif l.startswith('ch'):
334 file = self._stripbasepath(l[2:].strip())
334 file = self._stripbasepath(l[2:].strip())
335 if not self._exclude(file):
335 if not self._exclude(file):
336 self.changes[rev].mod_files.append(file)
336 self.changes[rev].mod_files.append(file)
337 # Renamed directory
337 # Renamed directory
338 elif l.startswith('/>'):
338 elif l.startswith('/>'):
339 dirs = l[2:].strip().split(' ')
339 dirs = l[2:].strip().split(' ')
340 if len(dirs) == 1:
340 if len(dirs) == 1:
341 dirs = l[2:].strip().split('\t')
341 dirs = l[2:].strip().split('\t')
342 src = self._stripbasepath(dirs[0])
342 src = self._stripbasepath(dirs[0])
343 dst = self._stripbasepath(dirs[1])
343 dst = self._stripbasepath(dirs[1])
344 if not self._exclude(src) and not self._exclude(dst):
344 if not self._exclude(src) and not self._exclude(dst):
345 self.changes[rev].ren_dirs[src] = dst
345 self.changes[rev].ren_dirs[src] = dst
@@ -1,283 +1,283 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 The extdiff extension also allows to configure new diff commands, so
16 The extdiff extension also allows to configure new diff commands, so
17 you do not need to type "hg extdiff -p kdiff3" always. ::
17 you do not need to type "hg extdiff -p kdiff3" always. ::
18
18
19 [extdiff]
19 [extdiff]
20 # add new command that runs GNU diff(1) in 'context diff' mode
20 # add new command that runs GNU diff(1) in 'context diff' mode
21 cdiff = gdiff -Nprc5
21 cdiff = gdiff -Nprc5
22 ## or the old way:
22 ## or the old way:
23 #cmd.cdiff = gdiff
23 #cmd.cdiff = gdiff
24 #opts.cdiff = -Nprc5
24 #opts.cdiff = -Nprc5
25
25
26 # add new command called vdiff, runs kdiff3
26 # add new command called vdiff, runs kdiff3
27 vdiff = kdiff3
27 vdiff = kdiff3
28
28
29 # add new command called meld, runs meld (no need to name twice)
29 # add new command called meld, runs meld (no need to name twice)
30 meld =
30 meld =
31
31
32 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
32 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
33 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
33 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
34 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
34 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
35 # your .vimrc
35 # your .vimrc
36 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
36 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
37
37
38 You can use -I/-X and list of file or directory names like normal "hg
38 You can use -I/-X and list of file or directory names like normal "hg
39 diff" command. The extdiff extension makes snapshots of only needed
39 diff" command. The extdiff extension makes snapshots of only needed
40 files, so running the external diff program will actually be pretty
40 files, so running the external diff program will actually be pretty
41 fast (at least faster than having to compare the entire tree).
41 fast (at least faster than having to compare the entire tree).
42 '''
42 '''
43
43
44 from mercurial.i18n import _
44 from mercurial.i18n import _
45 from mercurial.node import short, nullid
45 from mercurial.node import short, nullid
46 from mercurial import cmdutil, util, commands, encoding
46 from mercurial import cmdutil, util, commands, encoding
47 import os, shlex, shutil, tempfile, re
47 import os, shlex, shutil, tempfile, re
48
48
49 def snapshot(ui, repo, files, node, tmproot):
49 def snapshot(ui, repo, files, node, tmproot):
50 '''snapshot files as of some revision
50 '''snapshot files as of some revision
51 if not using snapshot, -I/-X does not work and recursive diff
51 if not using snapshot, -I/-X does not work and recursive diff
52 in tools like kdiff3 and meld displays too many files.'''
52 in tools like kdiff3 and meld displays too many files.'''
53 dirname = os.path.basename(repo.root)
53 dirname = os.path.basename(repo.root)
54 if dirname == "":
54 if dirname == "":
55 dirname = "root"
55 dirname = "root"
56 if node is not None:
56 if node is not None:
57 dirname = '%s.%s' % (dirname, short(node))
57 dirname = '%s.%s' % (dirname, short(node))
58 base = os.path.join(tmproot, dirname)
58 base = os.path.join(tmproot, dirname)
59 os.mkdir(base)
59 os.mkdir(base)
60 if node is not None:
60 if node is not None:
61 ui.note(_('making snapshot of %d files from rev %s\n') %
61 ui.note(_('making snapshot of %d files from rev %s\n') %
62 (len(files), short(node)))
62 (len(files), short(node)))
63 else:
63 else:
64 ui.note(_('making snapshot of %d files from working directory\n') %
64 ui.note(_('making snapshot of %d files from working directory\n') %
65 (len(files)))
65 (len(files)))
66 wopener = util.opener(base)
66 wopener = util.opener(base)
67 fns_and_mtime = []
67 fns_and_mtime = []
68 ctx = repo[node]
68 ctx = repo[node]
69 for fn in files:
69 for fn in files:
70 wfn = util.pconvert(fn)
70 wfn = util.pconvert(fn)
71 if not wfn in ctx:
71 if not wfn in ctx:
72 # File doesn't exist; could be a bogus modify
72 # File doesn't exist; could be a bogus modify
73 continue
73 continue
74 ui.note(' %s\n' % wfn)
74 ui.note(' %s\n' % wfn)
75 dest = os.path.join(base, wfn)
75 dest = os.path.join(base, wfn)
76 fctx = ctx[wfn]
76 fctx = ctx[wfn]
77 data = repo.wwritedata(wfn, fctx.data())
77 data = repo.wwritedata(wfn, fctx.data())
78 if 'l' in fctx.flags():
78 if 'l' in fctx.flags():
79 wopener.symlink(data, wfn)
79 wopener.symlink(data, wfn)
80 else:
80 else:
81 wopener(wfn, 'w').write(data)
81 wopener(wfn, 'w').write(data)
82 if 'x' in fctx.flags():
82 if 'x' in fctx.flags():
83 util.set_flags(dest, False, True)
83 util.set_flags(dest, False, True)
84 if node is None:
84 if node is None:
85 fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest)))
85 fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest)))
86 return dirname, fns_and_mtime
86 return dirname, fns_and_mtime
87
87
88 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
88 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
89 '''Do the actuall diff:
89 '''Do the actuall diff:
90
90
91 - copy to a temp structure if diffing 2 internal revisions
91 - copy to a temp structure if diffing 2 internal revisions
92 - copy to a temp structure if diffing working revision with
92 - copy to a temp structure if diffing working revision with
93 another one and more than 1 file is changed
93 another one and more than 1 file is changed
94 - just invoke the diff for a single file in the working dir
94 - just invoke the diff for a single file in the working dir
95 '''
95 '''
96
96
97 revs = opts.get('rev')
97 revs = opts.get('rev')
98 change = opts.get('change')
98 change = opts.get('change')
99 args = ' '.join(diffopts)
99 args = ' '.join(diffopts)
100 do3way = '$parent2' in args
100 do3way = '$parent2' in args
101
101
102 if revs and change:
102 if revs and change:
103 msg = _('cannot specify --rev and --change at the same time')
103 msg = _('cannot specify --rev and --change at the same time')
104 raise util.Abort(msg)
104 raise util.Abort(msg)
105 elif change:
105 elif change:
106 node2 = repo.lookup(change)
106 node2 = repo.lookup(change)
107 node1a, node1b = repo.changelog.parents(node2)
107 node1a, node1b = repo.changelog.parents(node2)
108 else:
108 else:
109 node1a, node2 = cmdutil.revpair(repo, revs)
109 node1a, node2 = cmdutil.revpair(repo, revs)
110 if not revs:
110 if not revs:
111 node1b = repo.dirstate.parents()[1]
111 node1b = repo.dirstate.parents()[1]
112 else:
112 else:
113 node1b = nullid
113 node1b = nullid
114
114
115 # Disable 3-way merge if there is only one parent
115 # Disable 3-way merge if there is only one parent
116 if do3way:
116 if do3way:
117 if node1b == nullid:
117 if node1b == nullid:
118 do3way = False
118 do3way = False
119
119
120 matcher = cmdutil.match(repo, pats, opts)
120 matcher = cmdutil.match(repo, pats, opts)
121 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
121 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
122 if do3way:
122 if do3way:
123 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
123 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
124 else:
124 else:
125 mod_b, add_b, rem_b = set(), set(), set()
125 mod_b, add_b, rem_b = set(), set(), set()
126 modadd = mod_a | add_a | mod_b | add_b
126 modadd = mod_a | add_a | mod_b | add_b
127 common = modadd | rem_a | rem_b
127 common = modadd | rem_a | rem_b
128 if not common:
128 if not common:
129 return 0
129 return 0
130
130
131 tmproot = tempfile.mkdtemp(prefix='extdiff.')
131 tmproot = tempfile.mkdtemp(prefix='extdiff.')
132 try:
132 try:
133 # Always make a copy of node1a (and node1b, if applicable)
133 # Always make a copy of node1a (and node1b, if applicable)
134 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
134 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
135 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
135 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
136 if do3way:
136 if do3way:
137 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
137 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
138 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
138 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
139 else:
139 else:
140 dir1b = None
140 dir1b = None
141
141
142 fns_and_mtime = []
142 fns_and_mtime = []
143
143
144 # If node2 in not the wc or there is >1 change, copy it
144 # If node2 in not the wc or there is >1 change, copy it
145 dir2root = ''
145 dir2root = ''
146 if node2:
146 if node2:
147 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
147 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
148 elif len(common) > 1:
148 elif len(common) > 1:
149 #we only actually need to get the files to copy back to
149 #we only actually need to get the files to copy back to
150 #the working dir in this case (because the other cases
150 #the working dir in this case (because the other cases
151 #are: diffing 2 revisions or single file -- in which case
151 #are: diffing 2 revisions or single file -- in which case
152 #the file is already directly passed to the diff tool).
152 #the file is already directly passed to the diff tool).
153 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
153 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
154 else:
154 else:
155 # This lets the diff tool open the changed file directly
155 # This lets the diff tool open the changed file directly
156 dir2 = ''
156 dir2 = ''
157 dir2root = repo.root
157 dir2root = repo.root
158
158
159 # If only one change, diff the files instead of the directories
159 # If only one change, diff the files instead of the directories
160 # Handle bogus modifies correctly by checking if the files exist
160 # Handle bogus modifies correctly by checking if the files exist
161 if len(common) == 1:
161 if len(common) == 1:
162 common_file = util.localpath(common.pop())
162 common_file = util.localpath(common.pop())
163 dir1a = os.path.join(dir1a, common_file)
163 dir1a = os.path.join(dir1a, common_file)
164 if not os.path.isfile(os.path.join(tmproot, dir1a)):
164 if not os.path.isfile(os.path.join(tmproot, dir1a)):
165 dir1a = os.devnull
165 dir1a = os.devnull
166 if do3way:
166 if do3way:
167 dir1b = os.path.join(dir1b, common_file)
167 dir1b = os.path.join(dir1b, common_file)
168 if not os.path.isfile(os.path.join(tmproot, dir1b)):
168 if not os.path.isfile(os.path.join(tmproot, dir1b)):
169 dir1b = os.devnull
169 dir1b = os.devnull
170 dir2 = os.path.join(dir2root, dir2, common_file)
170 dir2 = os.path.join(dir2root, dir2, common_file)
171
171
172 # Function to quote file/dir names in the argument string.
172 # Function to quote file/dir names in the argument string.
173 # When not operating in 3-way mode, an empty string is
173 # When not operating in 3-way mode, an empty string is
174 # returned for parent2
174 # returned for parent2
175 replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b, child=dir2)
175 replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b, child=dir2)
176 def quote(match):
176 def quote(match):
177 key = match.group()[1:]
177 key = match.group()[1:]
178 if not do3way and key == 'parent2':
178 if not do3way and key == 'parent2':
179 return ''
179 return ''
180 return util.shellquote(replace[key])
180 return util.shellquote(replace[key])
181
181
182 # Match parent2 first, so 'parent1?' will match both parent1 and parent
182 # Match parent2 first, so 'parent1?' will match both parent1 and parent
183 regex = '\$(parent2|parent1?|child)'
183 regex = '\$(parent2|parent1?|child)'
184 if not do3way and not re.search(regex, args):
184 if not do3way and not re.search(regex, args):
185 args += ' $parent1 $child'
185 args += ' $parent1 $child'
186 args = re.sub(regex, quote, args)
186 args = re.sub(regex, quote, args)
187 cmdline = util.shellquote(diffcmd) + ' ' + args
187 cmdline = util.shellquote(diffcmd) + ' ' + args
188
188
189 ui.debug('running %r in %s\n' % (cmdline, tmproot))
189 ui.debug('running %r in %s\n' % (cmdline, tmproot))
190 util.system(cmdline, cwd=tmproot)
190 util.system(cmdline, cwd=tmproot)
191
191
192 for copy_fn, working_fn, mtime in fns_and_mtime:
192 for copy_fn, working_fn, mtime in fns_and_mtime:
193 if os.path.getmtime(copy_fn) != mtime:
193 if os.path.getmtime(copy_fn) != mtime:
194 ui.debug('file changed while diffing. '
194 ui.debug('file changed while diffing. '
195 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
195 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
196 util.copyfile(copy_fn, working_fn)
196 util.copyfile(copy_fn, working_fn)
197
197
198 return 1
198 return 1
199 finally:
199 finally:
200 ui.note(_('cleaning up temp directory\n'))
200 ui.note(_('cleaning up temp directory\n'))
201 shutil.rmtree(tmproot)
201 shutil.rmtree(tmproot)
202
202
203 def extdiff(ui, repo, *pats, **opts):
203 def extdiff(ui, repo, *pats, **opts):
204 '''use external program to diff repository (or selected files)
204 '''use external program to diff repository (or selected files)
205
205
206 Show differences between revisions for the specified files, using
206 Show differences between revisions for the specified files, using
207 an external program. The default program used is diff, with
207 an external program. The default program used is diff, with
208 default options "-Npru".
208 default options "-Npru".
209
209
210 To select a different program, use the -p/--program option. The
210 To select a different program, use the -p/--program option. The
211 program will be passed the names of two directories to compare. To
211 program will be passed the names of two directories to compare. To
212 pass additional options to the program, use -o/--option. These
212 pass additional options to the program, use -o/--option. These
213 will be passed before the names of the directories to compare.
213 will be passed before the names of the directories to compare.
214
214
215 When two revision arguments are given, then changes are shown
215 When two revision arguments are given, then changes are shown
216 between those revisions. If only one revision is specified then
216 between those revisions. If only one revision is specified then
217 that revision is compared to the working directory, and, when no
217 that revision is compared to the working directory, and, when no
218 revisions are specified, the working directory files are compared
218 revisions are specified, the working directory files are compared
219 to its parent.'''
219 to its parent.'''
220 program = opts.get('program')
220 program = opts.get('program')
221 option = opts.get('option')
221 option = opts.get('option')
222 if not program:
222 if not program:
223 program = 'diff'
223 program = 'diff'
224 option = option or ['-Npru']
224 option = option or ['-Npru']
225 return dodiff(ui, repo, program, option, pats, opts)
225 return dodiff(ui, repo, program, option, pats, opts)
226
226
227 cmdtable = {
227 cmdtable = {
228 "extdiff":
228 "extdiff":
229 (extdiff,
229 (extdiff,
230 [('p', 'program', '', _('comparison program to run')),
230 [('p', 'program', '', _('comparison program to run')),
231 ('o', 'option', [], _('pass option to comparison program')),
231 ('o', 'option', [], _('pass option to comparison program')),
232 ('r', 'rev', [], _('revision')),
232 ('r', 'rev', [], _('revision')),
233 ('c', 'change', '', _('change made by revision')),
233 ('c', 'change', '', _('change made by revision')),
234 ] + commands.walkopts,
234 ] + commands.walkopts,
235 _('hg extdiff [OPT]... [FILE]...')),
235 _('hg extdiff [OPT]... [FILE]...')),
236 }
236 }
237
237
238 def uisetup(ui):
238 def uisetup(ui):
239 for cmd, path in ui.configitems('extdiff'):
239 for cmd, path in ui.configitems('extdiff'):
240 if cmd.startswith('cmd.'):
240 if cmd.startswith('cmd.'):
241 cmd = cmd[4:]
241 cmd = cmd[4:]
242 if not path:
242 if not path:
243 path = cmd
243 path = cmd
244 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
244 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
245 diffopts = diffopts and [diffopts] or []
245 diffopts = diffopts and [diffopts] or []
246 elif cmd.startswith('opts.'):
246 elif cmd.startswith('opts.'):
247 continue
247 continue
248 else:
248 else:
249 # command = path opts
249 # command = path opts
250 if path:
250 if path:
251 diffopts = shlex.split(path)
251 diffopts = shlex.split(path)
252 path = diffopts.pop(0)
252 path = diffopts.pop(0)
253 else:
253 else:
254 path, diffopts = cmd, []
254 path, diffopts = cmd, []
255 def save(cmd, path, diffopts):
255 def save(cmd, path, diffopts):
256 '''use closure to save diff command to use'''
256 '''use closure to save diff command to use'''
257 def mydiff(ui, repo, *pats, **opts):
257 def mydiff(ui, repo, *pats, **opts):
258 return dodiff(ui, repo, path, diffopts + opts['option'],
258 return dodiff(ui, repo, path, diffopts + opts['option'],
259 pats, opts)
259 pats, opts)
260 doc = _('''\
260 doc = _('''\
261 use %(path)s to diff repository (or selected files)
261 use %(path)s to diff repository (or selected files)
262
262
263 Show differences between revisions for the specified files, using
263 Show differences between revisions for the specified files, using
264 the %(path)s program.
264 the %(path)s program.
265
265
266 When two revision arguments are given, then changes are shown
266 When two revision arguments are given, then changes are shown
267 between those revisions. If only one revision is specified then
267 between those revisions. If only one revision is specified then
268 that revision is compared to the working directory, and, when no
268 that revision is compared to the working directory, and, when no
269 revisions are specified, the working directory files are compared
269 revisions are specified, the working directory files are compared
270 to its parent.\
270 to its parent.\
271 ''') % dict(path=util.uirepr(path))
271 ''') % dict(path=util.uirepr(path))
272
272
273 # We must translate the docstring right away since it is
273 # We must translate the docstring right away since it is
274 # used as a format string. The string will unfortunately
274 # used as a format string. The string will unfortunately
275 # be translated again in commands.helpcmd and this will
275 # be translated again in commands.helpcmd and this will
276 # fail when the docstring contains non-ASCII characters.
276 # fail when the docstring contains non-ASCII characters.
277 # Decoding the string to a Unicode string here (using the
277 # Decoding the string to a Unicode string here (using the
278 # right encoding) prevents that.
278 # right encoding) prevents that.
279 mydiff.__doc__ = doc.decode(encoding.encoding)
279 mydiff.__doc__ = doc.decode(encoding.encoding)
280 return mydiff
280 return mydiff
281 cmdtable[cmd] = (save(cmd, path, diffopts),
281 cmdtable[cmd] = (save(cmd, path, diffopts),
282 cmdtable['extdiff'][1][1:],
282 cmdtable['extdiff'][1][1:],
283 _('hg %s [OPTION]... [FILE]...') % cmd)
283 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,286 +1,286 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util, commands, match
9 from mercurial import util, commands, match
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 class gpg(object):
13 class gpg(object):
14 def __init__(self, path, key=None):
14 def __init__(self, path, key=None):
15 self.path = path
15 self.path = path
16 self.key = (key and " --local-user \"%s\"" % key) or ""
16 self.key = (key and " --local-user \"%s\"" % key) or ""
17
17
18 def sign(self, data):
18 def sign(self, data):
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 return util.filter(data, gpgcmd)
20 return util.filter(data, gpgcmd)
21
21
22 def verify(self, data, sig):
22 def verify(self, data, sig):
23 """ returns of the good and bad signatures"""
23 """ returns of the good and bad signatures"""
24 sigfile = datafile = None
24 sigfile = datafile = None
25 try:
25 try:
26 # create temporary files
26 # create temporary files
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 fp = os.fdopen(fd, 'wb')
28 fp = os.fdopen(fd, 'wb')
29 fp.write(sig)
29 fp.write(sig)
30 fp.close()
30 fp.close()
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 fp = os.fdopen(fd, 'wb')
32 fp = os.fdopen(fd, 'wb')
33 fp.write(data)
33 fp.write(data)
34 fp.close()
34 fp.close()
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 ret = util.filter("", gpgcmd)
37 ret = util.filter("", gpgcmd)
38 finally:
38 finally:
39 for f in (sigfile, datafile):
39 for f in (sigfile, datafile):
40 try:
40 try:
41 if f:
41 if f:
42 os.unlink(f)
42 os.unlink(f)
43 except:
43 except:
44 pass
44 pass
45 keys = []
45 keys = []
46 key, fingerprint = None, None
46 key, fingerprint = None, None
47 err = ""
47 err = ""
48 for l in ret.splitlines():
48 for l in ret.splitlines():
49 # see DETAILS in the gnupg documentation
49 # see DETAILS in the gnupg documentation
50 # filter the logger output
50 # filter the logger output
51 if not l.startswith("[GNUPG:]"):
51 if not l.startswith("[GNUPG:]"):
52 continue
52 continue
53 l = l[9:]
53 l = l[9:]
54 if l.startswith("ERRSIG"):
54 if l.startswith("ERRSIG"):
55 err = _("error while verifying signature")
55 err = _("error while verifying signature")
56 break
56 break
57 elif l.startswith("VALIDSIG"):
57 elif l.startswith("VALIDSIG"):
58 # fingerprint of the primary key
58 # fingerprint of the primary key
59 fingerprint = l.split()[10]
59 fingerprint = l.split()[10]
60 elif (l.startswith("GOODSIG") or
60 elif (l.startswith("GOODSIG") or
61 l.startswith("EXPSIG") or
61 l.startswith("EXPSIG") or
62 l.startswith("EXPKEYSIG") or
62 l.startswith("EXPKEYSIG") or
63 l.startswith("BADSIG")):
63 l.startswith("BADSIG")):
64 if key is not None:
64 if key is not None:
65 keys.append(key + [fingerprint])
65 keys.append(key + [fingerprint])
66 key = l.split(" ", 2)
66 key = l.split(" ", 2)
67 fingerprint = None
67 fingerprint = None
68 if err:
68 if err:
69 return err, []
69 return err, []
70 if key is not None:
70 if key is not None:
71 keys.append(key + [fingerprint])
71 keys.append(key + [fingerprint])
72 return err, keys
72 return err, keys
73
73
74 def newgpg(ui, **opts):
74 def newgpg(ui, **opts):
75 """create a new gpg instance"""
75 """create a new gpg instance"""
76 gpgpath = ui.config("gpg", "cmd", "gpg")
76 gpgpath = ui.config("gpg", "cmd", "gpg")
77 gpgkey = opts.get('key')
77 gpgkey = opts.get('key')
78 if not gpgkey:
78 if not gpgkey:
79 gpgkey = ui.config("gpg", "key", None)
79 gpgkey = ui.config("gpg", "key", None)
80 return gpg(gpgpath, gpgkey)
80 return gpg(gpgpath, gpgkey)
81
81
82 def sigwalk(repo):
82 def sigwalk(repo):
83 """
83 """
84 walk over every sigs, yields a couple
84 walk over every sigs, yields a couple
85 ((node, version, sig), (filename, linenumber))
85 ((node, version, sig), (filename, linenumber))
86 """
86 """
87 def parsefile(fileiter, context):
87 def parsefile(fileiter, context):
88 ln = 1
88 ln = 1
89 for l in fileiter:
89 for l in fileiter:
90 if not l:
90 if not l:
91 continue
91 continue
92 yield (l.split(" ", 2), (context, ln))
92 yield (l.split(" ", 2), (context, ln))
93 ln +=1
93 ln += 1
94
94
95 # read the heads
95 # read the heads
96 fl = repo.file(".hgsigs")
96 fl = repo.file(".hgsigs")
97 for r in reversed(fl.heads()):
97 for r in reversed(fl.heads()):
98 fn = ".hgsigs|%s" % hgnode.short(r)
98 fn = ".hgsigs|%s" % hgnode.short(r)
99 for item in parsefile(fl.read(r).splitlines(), fn):
99 for item in parsefile(fl.read(r).splitlines(), fn):
100 yield item
100 yield item
101 try:
101 try:
102 # read local signatures
102 # read local signatures
103 fn = "localsigs"
103 fn = "localsigs"
104 for item in parsefile(repo.opener(fn), fn):
104 for item in parsefile(repo.opener(fn), fn):
105 yield item
105 yield item
106 except IOError:
106 except IOError:
107 pass
107 pass
108
108
109 def getkeys(ui, repo, mygpg, sigdata, context):
109 def getkeys(ui, repo, mygpg, sigdata, context):
110 """get the keys who signed a data"""
110 """get the keys who signed a data"""
111 fn, ln = context
111 fn, ln = context
112 node, version, sig = sigdata
112 node, version, sig = sigdata
113 prefix = "%s:%d" % (fn, ln)
113 prefix = "%s:%d" % (fn, ln)
114 node = hgnode.bin(node)
114 node = hgnode.bin(node)
115
115
116 data = node2txt(repo, node, version)
116 data = node2txt(repo, node, version)
117 sig = binascii.a2b_base64(sig)
117 sig = binascii.a2b_base64(sig)
118 err, keys = mygpg.verify(data, sig)
118 err, keys = mygpg.verify(data, sig)
119 if err:
119 if err:
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 return None
121 return None
122
122
123 validkeys = []
123 validkeys = []
124 # warn for expired key and/or sigs
124 # warn for expired key and/or sigs
125 for key in keys:
125 for key in keys:
126 if key[0] == "BADSIG":
126 if key[0] == "BADSIG":
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 continue
128 continue
129 if key[0] == "EXPSIG":
129 if key[0] == "EXPSIG":
130 ui.write(_("%s Note: Signature has expired"
130 ui.write(_("%s Note: Signature has expired"
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 elif key[0] == "EXPKEYSIG":
132 elif key[0] == "EXPKEYSIG":
133 ui.write(_("%s Note: This key has expired"
133 ui.write(_("%s Note: This key has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 validkeys.append((key[1], key[2], key[3]))
135 validkeys.append((key[1], key[2], key[3]))
136 return validkeys
136 return validkeys
137
137
138 def sigs(ui, repo):
138 def sigs(ui, repo):
139 """list signed changesets"""
139 """list signed changesets"""
140 mygpg = newgpg(ui)
140 mygpg = newgpg(ui)
141 revs = {}
141 revs = {}
142
142
143 for data, context in sigwalk(repo):
143 for data, context in sigwalk(repo):
144 node, version, sig = data
144 node, version, sig = data
145 fn, ln = context
145 fn, ln = context
146 try:
146 try:
147 n = repo.lookup(node)
147 n = repo.lookup(node)
148 except KeyError:
148 except KeyError:
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 continue
150 continue
151 r = repo.changelog.rev(n)
151 r = repo.changelog.rev(n)
152 keys = getkeys(ui, repo, mygpg, data, context)
152 keys = getkeys(ui, repo, mygpg, data, context)
153 if not keys:
153 if not keys:
154 continue
154 continue
155 revs.setdefault(r, [])
155 revs.setdefault(r, [])
156 revs[r].extend(keys)
156 revs[r].extend(keys)
157 for rev in sorted(revs, reverse=True):
157 for rev in sorted(revs, reverse=True):
158 for k in revs[rev]:
158 for k in revs[rev]:
159 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
159 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
160 ui.write("%-30s %s\n" % (keystr(ui, k), r))
160 ui.write("%-30s %s\n" % (keystr(ui, k), r))
161
161
162 def check(ui, repo, rev):
162 def check(ui, repo, rev):
163 """verify all the signatures there may be for a particular revision"""
163 """verify all the signatures there may be for a particular revision"""
164 mygpg = newgpg(ui)
164 mygpg = newgpg(ui)
165 rev = repo.lookup(rev)
165 rev = repo.lookup(rev)
166 hexrev = hgnode.hex(rev)
166 hexrev = hgnode.hex(rev)
167 keys = []
167 keys = []
168
168
169 for data, context in sigwalk(repo):
169 for data, context in sigwalk(repo):
170 node, version, sig = data
170 node, version, sig = data
171 if node == hexrev:
171 if node == hexrev:
172 k = getkeys(ui, repo, mygpg, data, context)
172 k = getkeys(ui, repo, mygpg, data, context)
173 if k:
173 if k:
174 keys.extend(k)
174 keys.extend(k)
175
175
176 if not keys:
176 if not keys:
177 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
177 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
178 return
178 return
179
179
180 # print summary
180 # print summary
181 ui.write("%s is signed by:\n" % hgnode.short(rev))
181 ui.write("%s is signed by:\n" % hgnode.short(rev))
182 for key in keys:
182 for key in keys:
183 ui.write(" %s\n" % keystr(ui, key))
183 ui.write(" %s\n" % keystr(ui, key))
184
184
185 def keystr(ui, key):
185 def keystr(ui, key):
186 """associate a string to a key (username, comment)"""
186 """associate a string to a key (username, comment)"""
187 keyid, user, fingerprint = key
187 keyid, user, fingerprint = key
188 comment = ui.config("gpg", fingerprint, None)
188 comment = ui.config("gpg", fingerprint, None)
189 if comment:
189 if comment:
190 return "%s (%s)" % (user, comment)
190 return "%s (%s)" % (user, comment)
191 else:
191 else:
192 return user
192 return user
193
193
194 def sign(ui, repo, *revs, **opts):
194 def sign(ui, repo, *revs, **opts):
195 """add a signature for the current or given revision
195 """add a signature for the current or given revision
196
196
197 If no revision is given, the parent of the working directory is used,
197 If no revision is given, the parent of the working directory is used,
198 or tip if no revision is checked out.
198 or tip if no revision is checked out.
199
199
200 See 'hg help dates' for a list of formats valid for -d/--date.
200 See 'hg help dates' for a list of formats valid for -d/--date.
201 """
201 """
202
202
203 mygpg = newgpg(ui, **opts)
203 mygpg = newgpg(ui, **opts)
204 sigver = "0"
204 sigver = "0"
205 sigmessage = ""
205 sigmessage = ""
206
206
207 date = opts.get('date')
207 date = opts.get('date')
208 if date:
208 if date:
209 opts['date'] = util.parsedate(date)
209 opts['date'] = util.parsedate(date)
210
210
211 if revs:
211 if revs:
212 nodes = [repo.lookup(n) for n in revs]
212 nodes = [repo.lookup(n) for n in revs]
213 else:
213 else:
214 nodes = [node for node in repo.dirstate.parents()
214 nodes = [node for node in repo.dirstate.parents()
215 if node != hgnode.nullid]
215 if node != hgnode.nullid]
216 if len(nodes) > 1:
216 if len(nodes) > 1:
217 raise util.Abort(_('uncommitted merge - please provide a '
217 raise util.Abort(_('uncommitted merge - please provide a '
218 'specific revision'))
218 'specific revision'))
219 if not nodes:
219 if not nodes:
220 nodes = [repo.changelog.tip()]
220 nodes = [repo.changelog.tip()]
221
221
222 for n in nodes:
222 for n in nodes:
223 hexnode = hgnode.hex(n)
223 hexnode = hgnode.hex(n)
224 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
224 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
225 hgnode.short(n)))
225 hgnode.short(n)))
226 # build data
226 # build data
227 data = node2txt(repo, n, sigver)
227 data = node2txt(repo, n, sigver)
228 sig = mygpg.sign(data)
228 sig = mygpg.sign(data)
229 if not sig:
229 if not sig:
230 raise util.Abort(_("Error while signing"))
230 raise util.Abort(_("Error while signing"))
231 sig = binascii.b2a_base64(sig)
231 sig = binascii.b2a_base64(sig)
232 sig = sig.replace("\n", "")
232 sig = sig.replace("\n", "")
233 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
233 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
234
234
235 # write it
235 # write it
236 if opts['local']:
236 if opts['local']:
237 repo.opener("localsigs", "ab").write(sigmessage)
237 repo.opener("localsigs", "ab").write(sigmessage)
238 return
238 return
239
239
240 for x in repo.status(unknown=True)[:5]:
240 for x in repo.status(unknown=True)[:5]:
241 if ".hgsigs" in x and not opts["force"]:
241 if ".hgsigs" in x and not opts["force"]:
242 raise util.Abort(_("working copy of .hgsigs is changed "
242 raise util.Abort(_("working copy of .hgsigs is changed "
243 "(please commit .hgsigs manually "
243 "(please commit .hgsigs manually "
244 "or use --force)"))
244 "or use --force)"))
245
245
246 repo.wfile(".hgsigs", "ab").write(sigmessage)
246 repo.wfile(".hgsigs", "ab").write(sigmessage)
247
247
248 if '.hgsigs' not in repo.dirstate:
248 if '.hgsigs' not in repo.dirstate:
249 repo.add([".hgsigs"])
249 repo.add([".hgsigs"])
250
250
251 if opts["no_commit"]:
251 if opts["no_commit"]:
252 return
252 return
253
253
254 message = opts['message']
254 message = opts['message']
255 if not message:
255 if not message:
256 # we don't translate commit messages
256 # we don't translate commit messages
257 message = "\n".join(["Added signature for changeset %s"
257 message = "\n".join(["Added signature for changeset %s"
258 % hgnode.short(n)
258 % hgnode.short(n)
259 for n in nodes])
259 for n in nodes])
260 try:
260 try:
261 m = match.exact(repo.root, '', ['.hgsigs'])
261 m = match.exact(repo.root, '', ['.hgsigs'])
262 repo.commit(message, opts['user'], opts['date'], match=m)
262 repo.commit(message, opts['user'], opts['date'], match=m)
263 except ValueError, inst:
263 except ValueError, inst:
264 raise util.Abort(str(inst))
264 raise util.Abort(str(inst))
265
265
266 def node2txt(repo, node, ver):
266 def node2txt(repo, node, ver):
267 """map a manifest into some text"""
267 """map a manifest into some text"""
268 if ver == "0":
268 if ver == "0":
269 return "%s\n" % hgnode.hex(node)
269 return "%s\n" % hgnode.hex(node)
270 else:
270 else:
271 raise util.Abort(_("unknown signature version"))
271 raise util.Abort(_("unknown signature version"))
272
272
273 cmdtable = {
273 cmdtable = {
274 "sign":
274 "sign":
275 (sign,
275 (sign,
276 [('l', 'local', None, _('make the signature local')),
276 [('l', 'local', None, _('make the signature local')),
277 ('f', 'force', None, _('sign even if the sigfile is modified')),
277 ('f', 'force', None, _('sign even if the sigfile is modified')),
278 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
278 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
279 ('k', 'key', '', _('the key id to sign with')),
279 ('k', 'key', '', _('the key id to sign with')),
280 ('m', 'message', '', _('commit message')),
280 ('m', 'message', '', _('commit message')),
281 ] + commands.commitopts2,
281 ] + commands.commitopts2,
282 _('hg sign [OPTION]... [REVISION]...')),
282 _('hg sign [OPTION]... [REVISION]...')),
283 "sigcheck": (check, [], _('hg sigcheck REVISION')),
283 "sigcheck": (check, [], _('hg sigcheck REVISION')),
284 "sigs": (sigs, [], _('hg sigs')),
284 "sigs": (sigs, [], _('hg sigs')),
285 }
285 }
286
286
@@ -1,347 +1,347 b''
1 # Minimal support for git commands on an hg repository
1 # Minimal support for git commands on an hg repository
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''browse the repository in a graphical way
8 '''browse the repository in a graphical way
9
9
10 The hgk extension allows browsing the history of a repository in a
10 The hgk extension allows browsing the history of a repository in a
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
11 graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
12 distributed with Mercurial.)
12 distributed with Mercurial.)
13
13
14 hgk consists of two parts: a Tcl script that does the displaying and
14 hgk consists of two parts: a Tcl script that does the displaying and
15 querying of information, and an extension to Mercurial named hgk.py,
15 querying of information, and an extension to Mercurial named hgk.py,
16 which provides hooks for hgk to get information. hgk can be found in
16 which provides hooks for hgk to get information. hgk can be found in
17 the contrib directory, and the extension is shipped in the hgext
17 the contrib directory, and the extension is shipped in the hgext
18 repository, and needs to be enabled.
18 repository, and needs to be enabled.
19
19
20 The hg view command will launch the hgk Tcl script. For this command
20 The hg view command will launch the hgk Tcl script. For this command
21 to work, hgk must be in your search path. Alternately, you can specify
21 to work, hgk must be in your search path. Alternately, you can specify
22 the path to hgk in your .hgrc file::
22 the path to hgk in your .hgrc file::
23
23
24 [hgk]
24 [hgk]
25 path=/location/of/hgk
25 path=/location/of/hgk
26
26
27 hgk can make use of the extdiff extension to visualize revisions.
27 hgk can make use of the extdiff extension to visualize revisions.
28 Assuming you had already configured extdiff vdiff command, just add::
28 Assuming you had already configured extdiff vdiff command, just add::
29
29
30 [hgk]
30 [hgk]
31 vdiff=vdiff
31 vdiff=vdiff
32
32
33 Revisions context menu will now display additional entries to fire
33 Revisions context menu will now display additional entries to fire
34 vdiff on hovered and selected revisions.
34 vdiff on hovered and selected revisions.
35 '''
35 '''
36
36
37 import os
37 import os
38 from mercurial import commands, util, patch, revlog, cmdutil
38 from mercurial import commands, util, patch, revlog, cmdutil
39 from mercurial.node import nullid, nullrev, short
39 from mercurial.node import nullid, nullrev, short
40 from mercurial.i18n import _
40 from mercurial.i18n import _
41
41
42 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
42 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
43 """diff trees from two commits"""
43 """diff trees from two commits"""
44 def __difftree(repo, node1, node2, files=[]):
44 def __difftree(repo, node1, node2, files=[]):
45 assert node2 is not None
45 assert node2 is not None
46 mmap = repo[node1].manifest()
46 mmap = repo[node1].manifest()
47 mmap2 = repo[node2].manifest()
47 mmap2 = repo[node2].manifest()
48 m = cmdutil.match(repo, files)
48 m = cmdutil.match(repo, files)
49 modified, added, removed = repo.status(node1, node2, m)[:3]
49 modified, added, removed = repo.status(node1, node2, m)[:3]
50 empty = short(nullid)
50 empty = short(nullid)
51
51
52 for f in modified:
52 for f in modified:
53 # TODO get file permissions
53 # TODO get file permissions
54 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
54 ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
55 (short(mmap[f]), short(mmap2[f]), f, f))
55 (short(mmap[f]), short(mmap2[f]), f, f))
56 for f in added:
56 for f in added:
57 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
57 ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
58 (empty, short(mmap2[f]), f, f))
58 (empty, short(mmap2[f]), f, f))
59 for f in removed:
59 for f in removed:
60 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
60 ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
61 (short(mmap[f]), empty, f, f))
61 (short(mmap[f]), empty, f, f))
62 ##
62 ##
63
63
64 while True:
64 while True:
65 if opts['stdin']:
65 if opts['stdin']:
66 try:
66 try:
67 line = raw_input().split(' ')
67 line = raw_input().split(' ')
68 node1 = line[0]
68 node1 = line[0]
69 if len(line) > 1:
69 if len(line) > 1:
70 node2 = line[1]
70 node2 = line[1]
71 else:
71 else:
72 node2 = None
72 node2 = None
73 except EOFError:
73 except EOFError:
74 break
74 break
75 node1 = repo.lookup(node1)
75 node1 = repo.lookup(node1)
76 if node2:
76 if node2:
77 node2 = repo.lookup(node2)
77 node2 = repo.lookup(node2)
78 else:
78 else:
79 node2 = node1
79 node2 = node1
80 node1 = repo.changelog.parents(node1)[0]
80 node1 = repo.changelog.parents(node1)[0]
81 if opts['patch']:
81 if opts['patch']:
82 if opts['pretty']:
82 if opts['pretty']:
83 catcommit(ui, repo, node2, "")
83 catcommit(ui, repo, node2, "")
84 m = cmdutil.match(repo, files)
84 m = cmdutil.match(repo, files)
85 chunks = patch.diff(repo, node1, node2, match=m,
85 chunks = patch.diff(repo, node1, node2, match=m,
86 opts=patch.diffopts(ui, {'git': True}))
86 opts=patch.diffopts(ui, {'git': True}))
87 for chunk in chunks:
87 for chunk in chunks:
88 ui.write(chunk)
88 ui.write(chunk)
89 else:
89 else:
90 __difftree(repo, node1, node2, files=files)
90 __difftree(repo, node1, node2, files=files)
91 if not opts['stdin']:
91 if not opts['stdin']:
92 break
92 break
93
93
94 def catcommit(ui, repo, n, prefix, ctx=None):
94 def catcommit(ui, repo, n, prefix, ctx=None):
95 nlprefix = '\n' + prefix;
95 nlprefix = '\n' + prefix
96 if ctx is None:
96 if ctx is None:
97 ctx = repo[n]
97 ctx = repo[n]
98 ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
98 ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
99 for p in ctx.parents():
99 for p in ctx.parents():
100 ui.write("parent %s\n" % p)
100 ui.write("parent %s\n" % p)
101
101
102 date = ctx.date()
102 date = ctx.date()
103 description = ctx.description().replace("\0", "")
103 description = ctx.description().replace("\0", "")
104 lines = description.splitlines()
104 lines = description.splitlines()
105 if lines and lines[-1].startswith('committer:'):
105 if lines and lines[-1].startswith('committer:'):
106 committer = lines[-1].split(': ')[1].rstrip()
106 committer = lines[-1].split(': ')[1].rstrip()
107 else:
107 else:
108 committer = ctx.user()
108 committer = ctx.user()
109
109
110 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
110 ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
111 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
111 ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
112 ui.write("revision %d\n" % ctx.rev())
112 ui.write("revision %d\n" % ctx.rev())
113 ui.write("branch %s\n\n" % ctx.branch())
113 ui.write("branch %s\n\n" % ctx.branch())
114
114
115 if prefix != "":
115 if prefix != "":
116 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
116 ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
117 else:
117 else:
118 ui.write(description + "\n")
118 ui.write(description + "\n")
119 if prefix:
119 if prefix:
120 ui.write('\0')
120 ui.write('\0')
121
121
122 def base(ui, repo, node1, node2):
122 def base(ui, repo, node1, node2):
123 """output common ancestor information"""
123 """output common ancestor information"""
124 node1 = repo.lookup(node1)
124 node1 = repo.lookup(node1)
125 node2 = repo.lookup(node2)
125 node2 = repo.lookup(node2)
126 n = repo.changelog.ancestor(node1, node2)
126 n = repo.changelog.ancestor(node1, node2)
127 ui.write(short(n) + "\n")
127 ui.write(short(n) + "\n")
128
128
129 def catfile(ui, repo, type=None, r=None, **opts):
129 def catfile(ui, repo, type=None, r=None, **opts):
130 """cat a specific revision"""
130 """cat a specific revision"""
131 # in stdin mode, every line except the commit is prefixed with two
131 # in stdin mode, every line except the commit is prefixed with two
132 # spaces. This way the our caller can find the commit without magic
132 # spaces. This way the our caller can find the commit without magic
133 # strings
133 # strings
134 #
134 #
135 prefix = ""
135 prefix = ""
136 if opts['stdin']:
136 if opts['stdin']:
137 try:
137 try:
138 (type, r) = raw_input().split(' ');
138 (type, r) = raw_input().split(' ')
139 prefix = " "
139 prefix = " "
140 except EOFError:
140 except EOFError:
141 return
141 return
142
142
143 else:
143 else:
144 if not type or not r:
144 if not type or not r:
145 ui.warn(_("cat-file: type or revision not supplied\n"))
145 ui.warn(_("cat-file: type or revision not supplied\n"))
146 commands.help_(ui, 'cat-file')
146 commands.help_(ui, 'cat-file')
147
147
148 while r:
148 while r:
149 if type != "commit":
149 if type != "commit":
150 ui.warn(_("aborting hg cat-file only understands commits\n"))
150 ui.warn(_("aborting hg cat-file only understands commits\n"))
151 return 1;
151 return 1
152 n = repo.lookup(r)
152 n = repo.lookup(r)
153 catcommit(ui, repo, n, prefix)
153 catcommit(ui, repo, n, prefix)
154 if opts['stdin']:
154 if opts['stdin']:
155 try:
155 try:
156 (type, r) = raw_input().split(' ');
156 (type, r) = raw_input().split(' ')
157 except EOFError:
157 except EOFError:
158 break
158 break
159 else:
159 else:
160 break
160 break
161
161
162 # git rev-tree is a confusing thing. You can supply a number of
162 # git rev-tree is a confusing thing. You can supply a number of
163 # commit sha1s on the command line, and it walks the commit history
163 # commit sha1s on the command line, and it walks the commit history
164 # telling you which commits are reachable from the supplied ones via
164 # telling you which commits are reachable from the supplied ones via
165 # a bitmask based on arg position.
165 # a bitmask based on arg position.
166 # you can specify a commit to stop at by starting the sha1 with ^
166 # you can specify a commit to stop at by starting the sha1 with ^
167 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
167 def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
168 def chlogwalk():
168 def chlogwalk():
169 count = len(repo)
169 count = len(repo)
170 i = count
170 i = count
171 l = [0] * 100
171 l = [0] * 100
172 chunk = 100
172 chunk = 100
173 while True:
173 while True:
174 if chunk > i:
174 if chunk > i:
175 chunk = i
175 chunk = i
176 i = 0
176 i = 0
177 else:
177 else:
178 i -= chunk
178 i -= chunk
179
179
180 for x in xrange(chunk):
180 for x in xrange(chunk):
181 if i + x >= count:
181 if i + x >= count:
182 l[chunk - x:] = [0] * (chunk - x)
182 l[chunk - x:] = [0] * (chunk - x)
183 break
183 break
184 if full != None:
184 if full != None:
185 l[x] = repo[i + x]
185 l[x] = repo[i + x]
186 l[x].changeset() # force reading
186 l[x].changeset() # force reading
187 else:
187 else:
188 l[x] = 1
188 l[x] = 1
189 for x in xrange(chunk - 1, -1, -1):
189 for x in xrange(chunk - 1, -1, -1):
190 if l[x] != 0:
190 if l[x] != 0:
191 yield (i + x, full != None and l[x] or None)
191 yield (i + x, full != None and l[x] or None)
192 if i == 0:
192 if i == 0:
193 break
193 break
194
194
195 # calculate and return the reachability bitmask for sha
195 # calculate and return the reachability bitmask for sha
196 def is_reachable(ar, reachable, sha):
196 def is_reachable(ar, reachable, sha):
197 if len(ar) == 0:
197 if len(ar) == 0:
198 return 1
198 return 1
199 mask = 0
199 mask = 0
200 for i in xrange(len(ar)):
200 for i in xrange(len(ar)):
201 if sha in reachable[i]:
201 if sha in reachable[i]:
202 mask |= 1 << i
202 mask |= 1 << i
203
203
204 return mask
204 return mask
205
205
206 reachable = []
206 reachable = []
207 stop_sha1 = []
207 stop_sha1 = []
208 want_sha1 = []
208 want_sha1 = []
209 count = 0
209 count = 0
210
210
211 # figure out which commits they are asking for and which ones they
211 # figure out which commits they are asking for and which ones they
212 # want us to stop on
212 # want us to stop on
213 for i, arg in enumerate(args):
213 for i, arg in enumerate(args):
214 if arg.startswith('^'):
214 if arg.startswith('^'):
215 s = repo.lookup(arg[1:])
215 s = repo.lookup(arg[1:])
216 stop_sha1.append(s)
216 stop_sha1.append(s)
217 want_sha1.append(s)
217 want_sha1.append(s)
218 elif arg != 'HEAD':
218 elif arg != 'HEAD':
219 want_sha1.append(repo.lookup(arg))
219 want_sha1.append(repo.lookup(arg))
220
220
221 # calculate the graph for the supplied commits
221 # calculate the graph for the supplied commits
222 for i, n in enumerate(want_sha1):
222 for i, n in enumerate(want_sha1):
223 reachable.append(set());
223 reachable.append(set())
224 visit = [n];
224 visit = [n]
225 reachable[i].add(n)
225 reachable[i].add(n)
226 while visit:
226 while visit:
227 n = visit.pop(0)
227 n = visit.pop(0)
228 if n in stop_sha1:
228 if n in stop_sha1:
229 continue
229 continue
230 for p in repo.changelog.parents(n):
230 for p in repo.changelog.parents(n):
231 if p not in reachable[i]:
231 if p not in reachable[i]:
232 reachable[i].add(p)
232 reachable[i].add(p)
233 visit.append(p)
233 visit.append(p)
234 if p in stop_sha1:
234 if p in stop_sha1:
235 continue
235 continue
236
236
237 # walk the repository looking for commits that are in our
237 # walk the repository looking for commits that are in our
238 # reachability graph
238 # reachability graph
239 for i, ctx in chlogwalk():
239 for i, ctx in chlogwalk():
240 n = repo.changelog.node(i)
240 n = repo.changelog.node(i)
241 mask = is_reachable(want_sha1, reachable, n)
241 mask = is_reachable(want_sha1, reachable, n)
242 if mask:
242 if mask:
243 parentstr = ""
243 parentstr = ""
244 if parents:
244 if parents:
245 pp = repo.changelog.parents(n)
245 pp = repo.changelog.parents(n)
246 if pp[0] != nullid:
246 if pp[0] != nullid:
247 parentstr += " " + short(pp[0])
247 parentstr += " " + short(pp[0])
248 if pp[1] != nullid:
248 if pp[1] != nullid:
249 parentstr += " " + short(pp[1])
249 parentstr += " " + short(pp[1])
250 if not full:
250 if not full:
251 ui.write("%s%s\n" % (short(n), parentstr))
251 ui.write("%s%s\n" % (short(n), parentstr))
252 elif full == "commit":
252 elif full == "commit":
253 ui.write("%s%s\n" % (short(n), parentstr))
253 ui.write("%s%s\n" % (short(n), parentstr))
254 catcommit(ui, repo, n, ' ', ctx)
254 catcommit(ui, repo, n, ' ', ctx)
255 else:
255 else:
256 (p1, p2) = repo.changelog.parents(n)
256 (p1, p2) = repo.changelog.parents(n)
257 (h, h1, h2) = map(short, (n, p1, p2))
257 (h, h1, h2) = map(short, (n, p1, p2))
258 (i1, i2) = map(repo.changelog.rev, (p1, p2))
258 (i1, i2) = map(repo.changelog.rev, (p1, p2))
259
259
260 date = ctx.date()[0]
260 date = ctx.date()[0]
261 ui.write("%s %s:%s" % (date, h, mask))
261 ui.write("%s %s:%s" % (date, h, mask))
262 mask = is_reachable(want_sha1, reachable, p1)
262 mask = is_reachable(want_sha1, reachable, p1)
263 if i1 != nullrev and mask > 0:
263 if i1 != nullrev and mask > 0:
264 ui.write("%s:%s " % (h1, mask)),
264 ui.write("%s:%s " % (h1, mask)),
265 mask = is_reachable(want_sha1, reachable, p2)
265 mask = is_reachable(want_sha1, reachable, p2)
266 if i2 != nullrev and mask > 0:
266 if i2 != nullrev and mask > 0:
267 ui.write("%s:%s " % (h2, mask))
267 ui.write("%s:%s " % (h2, mask))
268 ui.write("\n")
268 ui.write("\n")
269 if maxnr and count >= maxnr:
269 if maxnr and count >= maxnr:
270 break
270 break
271 count += 1
271 count += 1
272
272
273 def revparse(ui, repo, *revs, **opts):
273 def revparse(ui, repo, *revs, **opts):
274 """parse given revisions"""
274 """parse given revisions"""
275 def revstr(rev):
275 def revstr(rev):
276 if rev == 'HEAD':
276 if rev == 'HEAD':
277 rev = 'tip'
277 rev = 'tip'
278 return revlog.hex(repo.lookup(rev))
278 return revlog.hex(repo.lookup(rev))
279
279
280 for r in revs:
280 for r in revs:
281 revrange = r.split(':', 1)
281 revrange = r.split(':', 1)
282 ui.write('%s\n' % revstr(revrange[0]))
282 ui.write('%s\n' % revstr(revrange[0]))
283 if len(revrange) == 2:
283 if len(revrange) == 2:
284 ui.write('^%s\n' % revstr(revrange[1]))
284 ui.write('^%s\n' % revstr(revrange[1]))
285
285
286 # git rev-list tries to order things by date, and has the ability to stop
286 # git rev-list tries to order things by date, and has the ability to stop
287 # at a given commit without walking the whole repo. TODO add the stop
287 # at a given commit without walking the whole repo. TODO add the stop
288 # parameter
288 # parameter
289 def revlist(ui, repo, *revs, **opts):
289 def revlist(ui, repo, *revs, **opts):
290 """print revisions"""
290 """print revisions"""
291 if opts['header']:
291 if opts['header']:
292 full = "commit"
292 full = "commit"
293 else:
293 else:
294 full = None
294 full = None
295 copy = [x for x in revs]
295 copy = [x for x in revs]
296 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
296 revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
297
297
298 def config(ui, repo, **opts):
298 def config(ui, repo, **opts):
299 """print extension options"""
299 """print extension options"""
300 def writeopt(name, value):
300 def writeopt(name, value):
301 ui.write('k=%s\nv=%s\n' % (name, value))
301 ui.write('k=%s\nv=%s\n' % (name, value))
302
302
303 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
303 writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
304
304
305
305
306 def view(ui, repo, *etc, **opts):
306 def view(ui, repo, *etc, **opts):
307 "start interactive history viewer"
307 "start interactive history viewer"
308 os.chdir(repo.root)
308 os.chdir(repo.root)
309 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
309 optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
310 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
310 cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
311 ui.debug("running %s\n" % cmd)
311 ui.debug("running %s\n" % cmd)
312 util.system(cmd)
312 util.system(cmd)
313
313
314 cmdtable = {
314 cmdtable = {
315 "^view":
315 "^view":
316 (view,
316 (view,
317 [('l', 'limit', '', _('limit number of changes displayed'))],
317 [('l', 'limit', '', _('limit number of changes displayed'))],
318 _('hg view [-l LIMIT] [REVRANGE]')),
318 _('hg view [-l LIMIT] [REVRANGE]')),
319 "debug-diff-tree":
319 "debug-diff-tree":
320 (difftree,
320 (difftree,
321 [('p', 'patch', None, _('generate patch')),
321 [('p', 'patch', None, _('generate patch')),
322 ('r', 'recursive', None, _('recursive')),
322 ('r', 'recursive', None, _('recursive')),
323 ('P', 'pretty', None, _('pretty')),
323 ('P', 'pretty', None, _('pretty')),
324 ('s', 'stdin', None, _('stdin')),
324 ('s', 'stdin', None, _('stdin')),
325 ('C', 'copy', None, _('detect copies')),
325 ('C', 'copy', None, _('detect copies')),
326 ('S', 'search', "", _('search'))],
326 ('S', 'search', "", _('search'))],
327 _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')),
327 _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')),
328 "debug-cat-file":
328 "debug-cat-file":
329 (catfile,
329 (catfile,
330 [('s', 'stdin', None, _('stdin'))],
330 [('s', 'stdin', None, _('stdin'))],
331 _('hg debug-cat-file [OPTION]... TYPE FILE')),
331 _('hg debug-cat-file [OPTION]... TYPE FILE')),
332 "debug-config":
332 "debug-config":
333 (config, [], _('hg debug-config')),
333 (config, [], _('hg debug-config')),
334 "debug-merge-base":
334 "debug-merge-base":
335 (base, [], _('hg debug-merge-base REV REV')),
335 (base, [], _('hg debug-merge-base REV REV')),
336 "debug-rev-parse":
336 "debug-rev-parse":
337 (revparse,
337 (revparse,
338 [('', 'default', '', _('ignored'))],
338 [('', 'default', '', _('ignored'))],
339 _('hg debug-rev-parse REV')),
339 _('hg debug-rev-parse REV')),
340 "debug-rev-list":
340 "debug-rev-list":
341 (revlist,
341 (revlist,
342 [('H', 'header', None, _('header')),
342 [('H', 'header', None, _('header')),
343 ('t', 'topo-order', None, _('topo-order')),
343 ('t', 'topo-order', None, _('topo-order')),
344 ('p', 'parents', None, _('parents')),
344 ('p', 'parents', None, _('parents')),
345 ('n', 'max-count', 0, _('max-count'))],
345 ('n', 'max-count', 0, _('max-count'))],
346 _('hg debug-rev-list [OPTION]... REV...')),
346 _('hg debug-rev-list [OPTION]... REV...')),
347 }
347 }
@@ -1,61 +1,61 b''
1 # highlight.py - highlight extension implementation file
1 # highlight.py - highlight extension implementation file
2 #
2 #
3 # Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
3 # Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 from mercurial import demandimport
11 from mercurial import demandimport
12 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__',])
12 demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
13 from mercurial import util, encoding
13 from mercurial import util, encoding
14
14
15 from pygments import highlight
15 from pygments import highlight
16 from pygments.util import ClassNotFound
16 from pygments.util import ClassNotFound
17 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
17 from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
18 from pygments.formatters import HtmlFormatter
18 from pygments.formatters import HtmlFormatter
19
19
20 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
20 SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
21 'type="text/css" />')
21 'type="text/css" />')
22
22
23 def pygmentize(field, fctx, style, tmpl):
23 def pygmentize(field, fctx, style, tmpl):
24
24
25 # append a <link ...> to the syntax highlighting css
25 # append a <link ...> to the syntax highlighting css
26 old_header = ''.join(tmpl('header'))
26 old_header = ''.join(tmpl('header'))
27 if SYNTAX_CSS not in old_header:
27 if SYNTAX_CSS not in old_header:
28 new_header = old_header + SYNTAX_CSS
28 new_header = old_header + SYNTAX_CSS
29 tmpl.cache['header'] = new_header
29 tmpl.cache['header'] = new_header
30
30
31 text = fctx.data()
31 text = fctx.data()
32 if util.binary(text):
32 if util.binary(text):
33 return
33 return
34
34
35 # Pygments is best used with Unicode strings:
35 # Pygments is best used with Unicode strings:
36 # <http://pygments.org/docs/unicode/>
36 # <http://pygments.org/docs/unicode/>
37 text = text.decode(encoding.encoding, 'replace')
37 text = text.decode(encoding.encoding, 'replace')
38
38
39 # To get multi-line strings right, we can't format line-by-line
39 # To get multi-line strings right, we can't format line-by-line
40 try:
40 try:
41 lexer = guess_lexer_for_filename(fctx.path(), text[:1024])
41 lexer = guess_lexer_for_filename(fctx.path(), text[:1024])
42 except (ClassNotFound, ValueError):
42 except (ClassNotFound, ValueError):
43 try:
43 try:
44 lexer = guess_lexer(text[:1024])
44 lexer = guess_lexer(text[:1024])
45 except (ClassNotFound, ValueError):
45 except (ClassNotFound, ValueError):
46 lexer = TextLexer()
46 lexer = TextLexer()
47
47
48 formatter = HtmlFormatter(style=style)
48 formatter = HtmlFormatter(style=style)
49
49
50 colorized = highlight(text, lexer, formatter)
50 colorized = highlight(text, lexer, formatter)
51 # strip wrapping div
51 # strip wrapping div
52 colorized = colorized[:colorized.find('\n</pre>')]
52 colorized = colorized[:colorized.find('\n</pre>')]
53 colorized = colorized[colorized.find('<pre>')+5:]
53 colorized = colorized[colorized.find('<pre>')+5:]
54 coloriter = (s.encode(encoding.encoding, 'replace')
54 coloriter = (s.encode(encoding.encoding, 'replace')
55 for s in colorized.splitlines())
55 for s in colorized.splitlines())
56
56
57 tmpl.filters['colorize'] = lambda x: coloriter.next()
57 tmpl.filters['colorize'] = lambda x: coloriter.next()
58
58
59 oldl = tmpl.cache[field]
59 oldl = tmpl.cache[field]
60 newl = oldl.replace('line|escape', 'line|colorize')
60 newl = oldl.replace('line|escape', 'line|colorize')
61 tmpl.cache[field] = newl
61 tmpl.cache[field] = newl
@@ -1,605 +1,605 b''
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to transplant changesets from another branch
8 '''command to transplant changesets from another branch
9
9
10 This extension allows you to transplant patches from another branch.
10 This extension allows you to transplant patches from another branch.
11
11
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 map from a changeset hash to its hash in the source repository.
13 map from a changeset hash to its hash in the source repository.
14 '''
14 '''
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 import os, tempfile
17 import os, tempfile
18 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge, match
18 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge, match
19 from mercurial import patch, revlog, util, error
19 from mercurial import patch, revlog, util, error
20
20
21 class transplantentry(object):
21 class transplantentry(object):
22 def __init__(self, lnode, rnode):
22 def __init__(self, lnode, rnode):
23 self.lnode = lnode
23 self.lnode = lnode
24 self.rnode = rnode
24 self.rnode = rnode
25
25
26 class transplants(object):
26 class transplants(object):
27 def __init__(self, path=None, transplantfile=None, opener=None):
27 def __init__(self, path=None, transplantfile=None, opener=None):
28 self.path = path
28 self.path = path
29 self.transplantfile = transplantfile
29 self.transplantfile = transplantfile
30 self.opener = opener
30 self.opener = opener
31
31
32 if not opener:
32 if not opener:
33 self.opener = util.opener(self.path)
33 self.opener = util.opener(self.path)
34 self.transplants = []
34 self.transplants = []
35 self.dirty = False
35 self.dirty = False
36 self.read()
36 self.read()
37
37
38 def read(self):
38 def read(self):
39 abspath = os.path.join(self.path, self.transplantfile)
39 abspath = os.path.join(self.path, self.transplantfile)
40 if self.transplantfile and os.path.exists(abspath):
40 if self.transplantfile and os.path.exists(abspath):
41 for line in self.opener(self.transplantfile).read().splitlines():
41 for line in self.opener(self.transplantfile).read().splitlines():
42 lnode, rnode = map(revlog.bin, line.split(':'))
42 lnode, rnode = map(revlog.bin, line.split(':'))
43 self.transplants.append(transplantentry(lnode, rnode))
43 self.transplants.append(transplantentry(lnode, rnode))
44
44
45 def write(self):
45 def write(self):
46 if self.dirty and self.transplantfile:
46 if self.dirty and self.transplantfile:
47 if not os.path.isdir(self.path):
47 if not os.path.isdir(self.path):
48 os.mkdir(self.path)
48 os.mkdir(self.path)
49 fp = self.opener(self.transplantfile, 'w')
49 fp = self.opener(self.transplantfile, 'w')
50 for c in self.transplants:
50 for c in self.transplants:
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 fp.write(l + ':' + r + '\n')
52 fp.write(l + ':' + r + '\n')
53 fp.close()
53 fp.close()
54 self.dirty = False
54 self.dirty = False
55
55
56 def get(self, rnode):
56 def get(self, rnode):
57 return [t for t in self.transplants if t.rnode == rnode]
57 return [t for t in self.transplants if t.rnode == rnode]
58
58
59 def set(self, lnode, rnode):
59 def set(self, lnode, rnode):
60 self.transplants.append(transplantentry(lnode, rnode))
60 self.transplants.append(transplantentry(lnode, rnode))
61 self.dirty = True
61 self.dirty = True
62
62
63 def remove(self, transplant):
63 def remove(self, transplant):
64 del self.transplants[self.transplants.index(transplant)]
64 del self.transplants[self.transplants.index(transplant)]
65 self.dirty = True
65 self.dirty = True
66
66
67 class transplanter(object):
67 class transplanter(object):
68 def __init__(self, ui, repo):
68 def __init__(self, ui, repo):
69 self.ui = ui
69 self.ui = ui
70 self.path = repo.join('transplant')
70 self.path = repo.join('transplant')
71 self.opener = util.opener(self.path)
71 self.opener = util.opener(self.path)
72 self.transplants = transplants(self.path, 'transplants',
72 self.transplants = transplants(self.path, 'transplants',
73 opener=self.opener)
73 opener=self.opener)
74
74
75 def applied(self, repo, node, parent):
75 def applied(self, repo, node, parent):
76 '''returns True if a node is already an ancestor of parent
76 '''returns True if a node is already an ancestor of parent
77 or has already been transplanted'''
77 or has already been transplanted'''
78 if hasnode(repo, node):
78 if hasnode(repo, node):
79 if node in repo.changelog.reachable(parent, stop=node):
79 if node in repo.changelog.reachable(parent, stop=node):
80 return True
80 return True
81 for t in self.transplants.get(node):
81 for t in self.transplants.get(node):
82 # it might have been stripped
82 # it might have been stripped
83 if not hasnode(repo, t.lnode):
83 if not hasnode(repo, t.lnode):
84 self.transplants.remove(t)
84 self.transplants.remove(t)
85 return False
85 return False
86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
87 return True
87 return True
88 return False
88 return False
89
89
90 def apply(self, repo, source, revmap, merges, opts={}):
90 def apply(self, repo, source, revmap, merges, opts={}):
91 '''apply the revisions in revmap one by one in revision order'''
91 '''apply the revisions in revmap one by one in revision order'''
92 revs = sorted(revmap)
92 revs = sorted(revmap)
93 p1, p2 = repo.dirstate.parents()
93 p1, p2 = repo.dirstate.parents()
94 pulls = []
94 pulls = []
95 diffopts = patch.diffopts(self.ui, opts)
95 diffopts = patch.diffopts(self.ui, opts)
96 diffopts.git = True
96 diffopts.git = True
97
97
98 lock = wlock = None
98 lock = wlock = None
99 try:
99 try:
100 wlock = repo.wlock()
100 wlock = repo.wlock()
101 lock = repo.lock()
101 lock = repo.lock()
102 for rev in revs:
102 for rev in revs:
103 node = revmap[rev]
103 node = revmap[rev]
104 revstr = '%s:%s' % (rev, revlog.short(node))
104 revstr = '%s:%s' % (rev, revlog.short(node))
105
105
106 if self.applied(repo, node, p1):
106 if self.applied(repo, node, p1):
107 self.ui.warn(_('skipping already applied revision %s\n') %
107 self.ui.warn(_('skipping already applied revision %s\n') %
108 revstr)
108 revstr)
109 continue
109 continue
110
110
111 parents = source.changelog.parents(node)
111 parents = source.changelog.parents(node)
112 if not opts.get('filter'):
112 if not opts.get('filter'):
113 # If the changeset parent is the same as the
113 # If the changeset parent is the same as the
114 # wdir's parent, just pull it.
114 # wdir's parent, just pull it.
115 if parents[0] == p1:
115 if parents[0] == p1:
116 pulls.append(node)
116 pulls.append(node)
117 p1 = node
117 p1 = node
118 continue
118 continue
119 if pulls:
119 if pulls:
120 if source != repo:
120 if source != repo:
121 repo.pull(source, heads=pulls)
121 repo.pull(source, heads=pulls)
122 merge.update(repo, pulls[-1], False, False, None)
122 merge.update(repo, pulls[-1], False, False, None)
123 p1, p2 = repo.dirstate.parents()
123 p1, p2 = repo.dirstate.parents()
124 pulls = []
124 pulls = []
125
125
126 domerge = False
126 domerge = False
127 if node in merges:
127 if node in merges:
128 # pulling all the merge revs at once would mean we
128 # pulling all the merge revs at once would mean we
129 # couldn't transplant after the latest even if
129 # couldn't transplant after the latest even if
130 # transplants before them fail.
130 # transplants before them fail.
131 domerge = True
131 domerge = True
132 if not hasnode(repo, node):
132 if not hasnode(repo, node):
133 repo.pull(source, heads=[node])
133 repo.pull(source, heads=[node])
134
134
135 if parents[1] != revlog.nullid:
135 if parents[1] != revlog.nullid:
136 self.ui.note(_('skipping merge changeset %s:%s\n')
136 self.ui.note(_('skipping merge changeset %s:%s\n')
137 % (rev, revlog.short(node)))
137 % (rev, revlog.short(node)))
138 patchfile = None
138 patchfile = None
139 else:
139 else:
140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
141 fp = os.fdopen(fd, 'w')
141 fp = os.fdopen(fd, 'w')
142 gen = patch.diff(source, parents[0], node, opts=diffopts)
142 gen = patch.diff(source, parents[0], node, opts=diffopts)
143 for chunk in gen:
143 for chunk in gen:
144 fp.write(chunk)
144 fp.write(chunk)
145 fp.close()
145 fp.close()
146
146
147 del revmap[rev]
147 del revmap[rev]
148 if patchfile or domerge:
148 if patchfile or domerge:
149 try:
149 try:
150 n = self.applyone(repo, node,
150 n = self.applyone(repo, node,
151 source.changelog.read(node),
151 source.changelog.read(node),
152 patchfile, merge=domerge,
152 patchfile, merge=domerge,
153 log=opts.get('log'),
153 log=opts.get('log'),
154 filter=opts.get('filter'))
154 filter=opts.get('filter'))
155 if n and domerge:
155 if n and domerge:
156 self.ui.status(_('%s merged at %s\n') % (revstr,
156 self.ui.status(_('%s merged at %s\n') % (revstr,
157 revlog.short(n)))
157 revlog.short(n)))
158 elif n:
158 elif n:
159 self.ui.status(_('%s transplanted to %s\n')
159 self.ui.status(_('%s transplanted to %s\n')
160 % (revlog.short(node),
160 % (revlog.short(node),
161 revlog.short(n)))
161 revlog.short(n)))
162 finally:
162 finally:
163 if patchfile:
163 if patchfile:
164 os.unlink(patchfile)
164 os.unlink(patchfile)
165 if pulls:
165 if pulls:
166 repo.pull(source, heads=pulls)
166 repo.pull(source, heads=pulls)
167 merge.update(repo, pulls[-1], False, False, None)
167 merge.update(repo, pulls[-1], False, False, None)
168 finally:
168 finally:
169 self.saveseries(revmap, merges)
169 self.saveseries(revmap, merges)
170 self.transplants.write()
170 self.transplants.write()
171 lock.release()
171 lock.release()
172 wlock.release()
172 wlock.release()
173
173
174 def filter(self, filter, changelog, patchfile):
174 def filter(self, filter, changelog, patchfile):
175 '''arbitrarily rewrite changeset before applying it'''
175 '''arbitrarily rewrite changeset before applying it'''
176
176
177 self.ui.status(_('filtering %s\n') % patchfile)
177 self.ui.status(_('filtering %s\n') % patchfile)
178 user, date, msg = (changelog[1], changelog[2], changelog[4])
178 user, date, msg = (changelog[1], changelog[2], changelog[4])
179
179
180 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
180 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
181 fp = os.fdopen(fd, 'w')
181 fp = os.fdopen(fd, 'w')
182 fp.write("# HG changeset patch\n")
182 fp.write("# HG changeset patch\n")
183 fp.write("# User %s\n" % user)
183 fp.write("# User %s\n" % user)
184 fp.write("# Date %d %d\n" % date)
184 fp.write("# Date %d %d\n" % date)
185 fp.write(msg + '\n')
185 fp.write(msg + '\n')
186 fp.close()
186 fp.close()
187
187
188 try:
188 try:
189 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
189 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
190 util.shellquote(patchfile)),
190 util.shellquote(patchfile)),
191 environ={'HGUSER': changelog[1]},
191 environ={'HGUSER': changelog[1]},
192 onerr=util.Abort, errprefix=_('filter failed'))
192 onerr=util.Abort, errprefix=_('filter failed'))
193 user, date, msg = self.parselog(file(headerfile))[1:4]
193 user, date, msg = self.parselog(file(headerfile))[1:4]
194 finally:
194 finally:
195 os.unlink(headerfile)
195 os.unlink(headerfile)
196
196
197 return (user, date, msg)
197 return (user, date, msg)
198
198
199 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
199 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
200 filter=None):
200 filter=None):
201 '''apply the patch in patchfile to the repository as a transplant'''
201 '''apply the patch in patchfile to the repository as a transplant'''
202 (manifest, user, (time, timezone), files, message) = cl[:5]
202 (manifest, user, (time, timezone), files, message) = cl[:5]
203 date = "%d %d" % (time, timezone)
203 date = "%d %d" % (time, timezone)
204 extra = {'transplant_source': node}
204 extra = {'transplant_source': node}
205 if filter:
205 if filter:
206 (user, date, message) = self.filter(filter, cl, patchfile)
206 (user, date, message) = self.filter(filter, cl, patchfile)
207
207
208 if log:
208 if log:
209 # we don't translate messages inserted into commits
209 # we don't translate messages inserted into commits
210 message += '\n(transplanted from %s)' % revlog.hex(node)
210 message += '\n(transplanted from %s)' % revlog.hex(node)
211
211
212 self.ui.status(_('applying %s\n') % revlog.short(node))
212 self.ui.status(_('applying %s\n') % revlog.short(node))
213 self.ui.note('%s %s\n%s\n' % (user, date, message))
213 self.ui.note('%s %s\n%s\n' % (user, date, message))
214
214
215 if not patchfile and not merge:
215 if not patchfile and not merge:
216 raise util.Abort(_('can only omit patchfile if merging'))
216 raise util.Abort(_('can only omit patchfile if merging'))
217 if patchfile:
217 if patchfile:
218 try:
218 try:
219 files = {}
219 files = {}
220 try:
220 try:
221 patch.patch(patchfile, self.ui, cwd=repo.root,
221 patch.patch(patchfile, self.ui, cwd=repo.root,
222 files=files, eolmode=None)
222 files=files, eolmode=None)
223 if not files:
223 if not files:
224 self.ui.warn(_('%s: empty changeset')
224 self.ui.warn(_('%s: empty changeset')
225 % revlog.hex(node))
225 % revlog.hex(node))
226 return None
226 return None
227 finally:
227 finally:
228 files = patch.updatedir(self.ui, repo, files)
228 files = patch.updatedir(self.ui, repo, files)
229 except Exception, inst:
229 except Exception, inst:
230 seriespath = os.path.join(self.path, 'series')
230 seriespath = os.path.join(self.path, 'series')
231 if os.path.exists(seriespath):
231 if os.path.exists(seriespath):
232 os.unlink(seriespath)
232 os.unlink(seriespath)
233 p1 = repo.dirstate.parents()[0]
233 p1 = repo.dirstate.parents()[0]
234 p2 = node
234 p2 = node
235 self.log(user, date, message, p1, p2, merge=merge)
235 self.log(user, date, message, p1, p2, merge=merge)
236 self.ui.write(str(inst) + '\n')
236 self.ui.write(str(inst) + '\n')
237 raise util.Abort(_('Fix up the merge and run '
237 raise util.Abort(_('Fix up the merge and run '
238 'hg transplant --continue'))
238 'hg transplant --continue'))
239 else:
239 else:
240 files = None
240 files = None
241 if merge:
241 if merge:
242 p1, p2 = repo.dirstate.parents()
242 p1, p2 = repo.dirstate.parents()
243 repo.dirstate.setparents(p1, node)
243 repo.dirstate.setparents(p1, node)
244 m = match.always(repo.root, '')
244 m = match.always(repo.root, '')
245 else:
245 else:
246 m = match.exact(repo.root, '', files)
246 m = match.exact(repo.root, '', files)
247
247
248 n = repo.commit(message, user, date, extra=extra, match=m)
248 n = repo.commit(message, user, date, extra=extra, match=m)
249 if not merge:
249 if not merge:
250 self.transplants.set(n, node)
250 self.transplants.set(n, node)
251
251
252 return n
252 return n
253
253
254 def resume(self, repo, source, opts=None):
254 def resume(self, repo, source, opts=None):
255 '''recover last transaction and apply remaining changesets'''
255 '''recover last transaction and apply remaining changesets'''
256 if os.path.exists(os.path.join(self.path, 'journal')):
256 if os.path.exists(os.path.join(self.path, 'journal')):
257 n, node = self.recover(repo)
257 n, node = self.recover(repo)
258 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
258 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
259 revlog.short(n)))
259 revlog.short(n)))
260 seriespath = os.path.join(self.path, 'series')
260 seriespath = os.path.join(self.path, 'series')
261 if not os.path.exists(seriespath):
261 if not os.path.exists(seriespath):
262 self.transplants.write()
262 self.transplants.write()
263 return
263 return
264 nodes, merges = self.readseries()
264 nodes, merges = self.readseries()
265 revmap = {}
265 revmap = {}
266 for n in nodes:
266 for n in nodes:
267 revmap[source.changelog.rev(n)] = n
267 revmap[source.changelog.rev(n)] = n
268 os.unlink(seriespath)
268 os.unlink(seriespath)
269
269
270 self.apply(repo, source, revmap, merges, opts)
270 self.apply(repo, source, revmap, merges, opts)
271
271
272 def recover(self, repo):
272 def recover(self, repo):
273 '''commit working directory using journal metadata'''
273 '''commit working directory using journal metadata'''
274 node, user, date, message, parents = self.readlog()
274 node, user, date, message, parents = self.readlog()
275 merge = len(parents) == 2
275 merge = len(parents) == 2
276
276
277 if not user or not date or not message or not parents[0]:
277 if not user or not date or not message or not parents[0]:
278 raise util.Abort(_('transplant log file is corrupt'))
278 raise util.Abort(_('transplant log file is corrupt'))
279
279
280 extra = {'transplant_source': node}
280 extra = {'transplant_source': node}
281 wlock = repo.wlock()
281 wlock = repo.wlock()
282 try:
282 try:
283 p1, p2 = repo.dirstate.parents()
283 p1, p2 = repo.dirstate.parents()
284 if p1 != parents[0]:
284 if p1 != parents[0]:
285 raise util.Abort(
285 raise util.Abort(
286 _('working dir not at transplant parent %s') %
286 _('working dir not at transplant parent %s') %
287 revlog.hex(parents[0]))
287 revlog.hex(parents[0]))
288 if merge:
288 if merge:
289 repo.dirstate.setparents(p1, parents[1])
289 repo.dirstate.setparents(p1, parents[1])
290 n = repo.commit(message, user, date, extra=extra)
290 n = repo.commit(message, user, date, extra=extra)
291 if not n:
291 if not n:
292 raise util.Abort(_('commit failed'))
292 raise util.Abort(_('commit failed'))
293 if not merge:
293 if not merge:
294 self.transplants.set(n, node)
294 self.transplants.set(n, node)
295 self.unlog()
295 self.unlog()
296
296
297 return n, node
297 return n, node
298 finally:
298 finally:
299 wlock.release()
299 wlock.release()
300
300
301 def readseries(self):
301 def readseries(self):
302 nodes = []
302 nodes = []
303 merges = []
303 merges = []
304 cur = nodes
304 cur = nodes
305 for line in self.opener('series').read().splitlines():
305 for line in self.opener('series').read().splitlines():
306 if line.startswith('# Merges'):
306 if line.startswith('# Merges'):
307 cur = merges
307 cur = merges
308 continue
308 continue
309 cur.append(revlog.bin(line))
309 cur.append(revlog.bin(line))
310
310
311 return (nodes, merges)
311 return (nodes, merges)
312
312
313 def saveseries(self, revmap, merges):
313 def saveseries(self, revmap, merges):
314 if not revmap:
314 if not revmap:
315 return
315 return
316
316
317 if not os.path.isdir(self.path):
317 if not os.path.isdir(self.path):
318 os.mkdir(self.path)
318 os.mkdir(self.path)
319 series = self.opener('series', 'w')
319 series = self.opener('series', 'w')
320 for rev in sorted(revmap):
320 for rev in sorted(revmap):
321 series.write(revlog.hex(revmap[rev]) + '\n')
321 series.write(revlog.hex(revmap[rev]) + '\n')
322 if merges:
322 if merges:
323 series.write('# Merges\n')
323 series.write('# Merges\n')
324 for m in merges:
324 for m in merges:
325 series.write(revlog.hex(m) + '\n')
325 series.write(revlog.hex(m) + '\n')
326 series.close()
326 series.close()
327
327
328 def parselog(self, fp):
328 def parselog(self, fp):
329 parents = []
329 parents = []
330 message = []
330 message = []
331 node = revlog.nullid
331 node = revlog.nullid
332 inmsg = False
332 inmsg = False
333 for line in fp.read().splitlines():
333 for line in fp.read().splitlines():
334 if inmsg:
334 if inmsg:
335 message.append(line)
335 message.append(line)
336 elif line.startswith('# User '):
336 elif line.startswith('# User '):
337 user = line[7:]
337 user = line[7:]
338 elif line.startswith('# Date '):
338 elif line.startswith('# Date '):
339 date = line[7:]
339 date = line[7:]
340 elif line.startswith('# Node ID '):
340 elif line.startswith('# Node ID '):
341 node = revlog.bin(line[10:])
341 node = revlog.bin(line[10:])
342 elif line.startswith('# Parent '):
342 elif line.startswith('# Parent '):
343 parents.append(revlog.bin(line[9:]))
343 parents.append(revlog.bin(line[9:]))
344 elif not line.startswith('#'):
344 elif not line.startswith('#'):
345 inmsg = True
345 inmsg = True
346 message.append(line)
346 message.append(line)
347 return (node, user, date, '\n'.join(message), parents)
347 return (node, user, date, '\n'.join(message), parents)
348
348
349 def log(self, user, date, message, p1, p2, merge=False):
349 def log(self, user, date, message, p1, p2, merge=False):
350 '''journal changelog metadata for later recover'''
350 '''journal changelog metadata for later recover'''
351
351
352 if not os.path.isdir(self.path):
352 if not os.path.isdir(self.path):
353 os.mkdir(self.path)
353 os.mkdir(self.path)
354 fp = self.opener('journal', 'w')
354 fp = self.opener('journal', 'w')
355 fp.write('# User %s\n' % user)
355 fp.write('# User %s\n' % user)
356 fp.write('# Date %s\n' % date)
356 fp.write('# Date %s\n' % date)
357 fp.write('# Node ID %s\n' % revlog.hex(p2))
357 fp.write('# Node ID %s\n' % revlog.hex(p2))
358 fp.write('# Parent ' + revlog.hex(p1) + '\n')
358 fp.write('# Parent ' + revlog.hex(p1) + '\n')
359 if merge:
359 if merge:
360 fp.write('# Parent ' + revlog.hex(p2) + '\n')
360 fp.write('# Parent ' + revlog.hex(p2) + '\n')
361 fp.write(message.rstrip() + '\n')
361 fp.write(message.rstrip() + '\n')
362 fp.close()
362 fp.close()
363
363
364 def readlog(self):
364 def readlog(self):
365 return self.parselog(self.opener('journal'))
365 return self.parselog(self.opener('journal'))
366
366
367 def unlog(self):
367 def unlog(self):
368 '''remove changelog journal'''
368 '''remove changelog journal'''
369 absdst = os.path.join(self.path, 'journal')
369 absdst = os.path.join(self.path, 'journal')
370 if os.path.exists(absdst):
370 if os.path.exists(absdst):
371 os.unlink(absdst)
371 os.unlink(absdst)
372
372
373 def transplantfilter(self, repo, source, root):
373 def transplantfilter(self, repo, source, root):
374 def matchfn(node):
374 def matchfn(node):
375 if self.applied(repo, node, root):
375 if self.applied(repo, node, root):
376 return False
376 return False
377 if source.changelog.parents(node)[1] != revlog.nullid:
377 if source.changelog.parents(node)[1] != revlog.nullid:
378 return False
378 return False
379 extra = source.changelog.read(node)[5]
379 extra = source.changelog.read(node)[5]
380 cnode = extra.get('transplant_source')
380 cnode = extra.get('transplant_source')
381 if cnode and self.applied(repo, cnode, root):
381 if cnode and self.applied(repo, cnode, root):
382 return False
382 return False
383 return True
383 return True
384
384
385 return matchfn
385 return matchfn
386
386
387 def hasnode(repo, node):
387 def hasnode(repo, node):
388 try:
388 try:
389 return repo.changelog.rev(node) != None
389 return repo.changelog.rev(node) != None
390 except error.RevlogError:
390 except error.RevlogError:
391 return False
391 return False
392
392
393 def browserevs(ui, repo, nodes, opts):
393 def browserevs(ui, repo, nodes, opts):
394 '''interactively transplant changesets'''
394 '''interactively transplant changesets'''
395 def browsehelp(ui):
395 def browsehelp(ui):
396 ui.write('y: transplant this changeset\n'
396 ui.write('y: transplant this changeset\n'
397 'n: skip this changeset\n'
397 'n: skip this changeset\n'
398 'm: merge at this changeset\n'
398 'm: merge at this changeset\n'
399 'p: show patch\n'
399 'p: show patch\n'
400 'c: commit selected changesets\n'
400 'c: commit selected changesets\n'
401 'q: cancel transplant\n'
401 'q: cancel transplant\n'
402 '?: show this help\n')
402 '?: show this help\n')
403
403
404 displayer = cmdutil.show_changeset(ui, repo, opts)
404 displayer = cmdutil.show_changeset(ui, repo, opts)
405 transplants = []
405 transplants = []
406 merges = []
406 merges = []
407 for node in nodes:
407 for node in nodes:
408 displayer.show(repo[node])
408 displayer.show(repo[node])
409 action = None
409 action = None
410 while not action:
410 while not action:
411 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
411 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
412 if action == '?':
412 if action == '?':
413 browsehelp(ui)
413 browsehelp(ui)
414 action = None
414 action = None
415 elif action == 'p':
415 elif action == 'p':
416 parent = repo.changelog.parents(node)[0]
416 parent = repo.changelog.parents(node)[0]
417 for chunk in patch.diff(repo, parent, node):
417 for chunk in patch.diff(repo, parent, node):
418 ui.write(chunk)
418 ui.write(chunk)
419 action = None
419 action = None
420 elif action not in ('y', 'n', 'm', 'c', 'q'):
420 elif action not in ('y', 'n', 'm', 'c', 'q'):
421 ui.write('no such option\n')
421 ui.write('no such option\n')
422 action = None
422 action = None
423 if action == 'y':
423 if action == 'y':
424 transplants.append(node)
424 transplants.append(node)
425 elif action == 'm':
425 elif action == 'm':
426 merges.append(node)
426 merges.append(node)
427 elif action == 'c':
427 elif action == 'c':
428 break
428 break
429 elif action == 'q':
429 elif action == 'q':
430 transplants = ()
430 transplants = ()
431 merges = ()
431 merges = ()
432 break
432 break
433 displayer.close()
433 displayer.close()
434 return (transplants, merges)
434 return (transplants, merges)
435
435
436 def transplant(ui, repo, *revs, **opts):
436 def transplant(ui, repo, *revs, **opts):
437 '''transplant changesets from another branch
437 '''transplant changesets from another branch
438
438
439 Selected changesets will be applied on top of the current working
439 Selected changesets will be applied on top of the current working
440 directory with the log of the original changeset. If --log is
440 directory with the log of the original changeset. If --log is
441 specified, log messages will have a comment appended of the form::
441 specified, log messages will have a comment appended of the form::
442
442
443 (transplanted from CHANGESETHASH)
443 (transplanted from CHANGESETHASH)
444
444
445 You can rewrite the changelog message with the --filter option.
445 You can rewrite the changelog message with the --filter option.
446 Its argument will be invoked with the current changelog message as
446 Its argument will be invoked with the current changelog message as
447 $1 and the patch as $2.
447 $1 and the patch as $2.
448
448
449 If --source/-s is specified, selects changesets from the named
449 If --source/-s is specified, selects changesets from the named
450 repository. If --branch/-b is specified, selects changesets from
450 repository. If --branch/-b is specified, selects changesets from
451 the branch holding the named revision, up to that revision. If
451 the branch holding the named revision, up to that revision. If
452 --all/-a is specified, all changesets on the branch will be
452 --all/-a is specified, all changesets on the branch will be
453 transplanted, otherwise you will be prompted to select the
453 transplanted, otherwise you will be prompted to select the
454 changesets you want.
454 changesets you want.
455
455
456 hg transplant --branch REVISION --all will rebase the selected
456 hg transplant --branch REVISION --all will rebase the selected
457 branch (up to the named revision) onto your current working
457 branch (up to the named revision) onto your current working
458 directory.
458 directory.
459
459
460 You can optionally mark selected transplanted changesets as merge
460 You can optionally mark selected transplanted changesets as merge
461 changesets. You will not be prompted to transplant any ancestors
461 changesets. You will not be prompted to transplant any ancestors
462 of a merged transplant, and you can merge descendants of them
462 of a merged transplant, and you can merge descendants of them
463 normally instead of transplanting them.
463 normally instead of transplanting them.
464
464
465 If no merges or revisions are provided, hg transplant will start
465 If no merges or revisions are provided, hg transplant will start
466 an interactive changeset browser.
466 an interactive changeset browser.
467
467
468 If a changeset application fails, you can fix the merge by hand
468 If a changeset application fails, you can fix the merge by hand
469 and then resume where you left off by calling hg transplant
469 and then resume where you left off by calling hg transplant
470 --continue/-c.
470 --continue/-c.
471 '''
471 '''
472 def getremotechanges(repo, url):
472 def getremotechanges(repo, url):
473 sourcerepo = ui.expandpath(url)
473 sourcerepo = ui.expandpath(url)
474 source = hg.repository(ui, sourcerepo)
474 source = hg.repository(ui, sourcerepo)
475 common, incoming, rheads = repo.findcommonincoming(source, force=True)
475 common, incoming, rheads = repo.findcommonincoming(source, force=True)
476 if not incoming:
476 if not incoming:
477 return (source, None, None)
477 return (source, None, None)
478
478
479 bundle = None
479 bundle = None
480 if not source.local():
480 if not source.local():
481 if source.capable('changegroupsubset'):
481 if source.capable('changegroupsubset'):
482 cg = source.changegroupsubset(incoming, rheads, 'incoming')
482 cg = source.changegroupsubset(incoming, rheads, 'incoming')
483 else:
483 else:
484 cg = source.changegroup(incoming, 'incoming')
484 cg = source.changegroup(incoming, 'incoming')
485 bundle = changegroup.writebundle(cg, None, 'HG10UN')
485 bundle = changegroup.writebundle(cg, None, 'HG10UN')
486 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
486 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
487
487
488 return (source, incoming, bundle)
488 return (source, incoming, bundle)
489
489
490 def incwalk(repo, incoming, branches, match=util.always):
490 def incwalk(repo, incoming, branches, match=util.always):
491 if not branches:
491 if not branches:
492 branches = None
492 branches = None
493 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
493 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
494 if match(node):
494 if match(node):
495 yield node
495 yield node
496
496
497 def transplantwalk(repo, root, branches, match=util.always):
497 def transplantwalk(repo, root, branches, match=util.always):
498 if not branches:
498 if not branches:
499 branches = repo.heads()
499 branches = repo.heads()
500 ancestors = []
500 ancestors = []
501 for branch in branches:
501 for branch in branches:
502 ancestors.append(repo.changelog.ancestor(root, branch))
502 ancestors.append(repo.changelog.ancestor(root, branch))
503 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
503 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
504 if match(node):
504 if match(node):
505 yield node
505 yield node
506
506
507 def checkopts(opts, revs):
507 def checkopts(opts, revs):
508 if opts.get('continue'):
508 if opts.get('continue'):
509 if opts.get('branch') or opts.get('all') or opts.get('merge'):
509 if opts.get('branch') or opts.get('all') or opts.get('merge'):
510 raise util.Abort(_('--continue is incompatible with '
510 raise util.Abort(_('--continue is incompatible with '
511 'branch, all or merge'))
511 'branch, all or merge'))
512 return
512 return
513 if not (opts.get('source') or revs or
513 if not (opts.get('source') or revs or
514 opts.get('merge') or opts.get('branch')):
514 opts.get('merge') or opts.get('branch')):
515 raise util.Abort(_('no source URL, branch tag or revision '
515 raise util.Abort(_('no source URL, branch tag or revision '
516 'list provided'))
516 'list provided'))
517 if opts.get('all'):
517 if opts.get('all'):
518 if not opts.get('branch'):
518 if not opts.get('branch'):
519 raise util.Abort(_('--all requires a branch revision'))
519 raise util.Abort(_('--all requires a branch revision'))
520 if revs:
520 if revs:
521 raise util.Abort(_('--all is incompatible with a '
521 raise util.Abort(_('--all is incompatible with a '
522 'revision list'))
522 'revision list'))
523
523
524 checkopts(opts, revs)
524 checkopts(opts, revs)
525
525
526 if not opts.get('log'):
526 if not opts.get('log'):
527 opts['log'] = ui.config('transplant', 'log')
527 opts['log'] = ui.config('transplant', 'log')
528 if not opts.get('filter'):
528 if not opts.get('filter'):
529 opts['filter'] = ui.config('transplant', 'filter')
529 opts['filter'] = ui.config('transplant', 'filter')
530
530
531 tp = transplanter(ui, repo)
531 tp = transplanter(ui, repo)
532
532
533 p1, p2 = repo.dirstate.parents()
533 p1, p2 = repo.dirstate.parents()
534 if len(repo) > 0 and p1 == revlog.nullid:
534 if len(repo) > 0 and p1 == revlog.nullid:
535 raise util.Abort(_('no revision checked out'))
535 raise util.Abort(_('no revision checked out'))
536 if not opts.get('continue'):
536 if not opts.get('continue'):
537 if p2 != revlog.nullid:
537 if p2 != revlog.nullid:
538 raise util.Abort(_('outstanding uncommitted merges'))
538 raise util.Abort(_('outstanding uncommitted merges'))
539 m, a, r, d = repo.status()[:4]
539 m, a, r, d = repo.status()[:4]
540 if m or a or r or d:
540 if m or a or r or d:
541 raise util.Abort(_('outstanding local changes'))
541 raise util.Abort(_('outstanding local changes'))
542
542
543 bundle = None
543 bundle = None
544 source = opts.get('source')
544 source = opts.get('source')
545 if source:
545 if source:
546 (source, incoming, bundle) = getremotechanges(repo, source)
546 (source, incoming, bundle) = getremotechanges(repo, source)
547 else:
547 else:
548 source = repo
548 source = repo
549
549
550 try:
550 try:
551 if opts.get('continue'):
551 if opts.get('continue'):
552 tp.resume(repo, source, opts)
552 tp.resume(repo, source, opts)
553 return
553 return
554
554
555 tf=tp.transplantfilter(repo, source, p1)
555 tf = tp.transplantfilter(repo, source, p1)
556 if opts.get('prune'):
556 if opts.get('prune'):
557 prune = [source.lookup(r)
557 prune = [source.lookup(r)
558 for r in cmdutil.revrange(source, opts.get('prune'))]
558 for r in cmdutil.revrange(source, opts.get('prune'))]
559 matchfn = lambda x: tf(x) and x not in prune
559 matchfn = lambda x: tf(x) and x not in prune
560 else:
560 else:
561 matchfn = tf
561 matchfn = tf
562 branches = map(source.lookup, opts.get('branch', ()))
562 branches = map(source.lookup, opts.get('branch', ()))
563 merges = map(source.lookup, opts.get('merge', ()))
563 merges = map(source.lookup, opts.get('merge', ()))
564 revmap = {}
564 revmap = {}
565 if revs:
565 if revs:
566 for r in cmdutil.revrange(source, revs):
566 for r in cmdutil.revrange(source, revs):
567 revmap[int(r)] = source.lookup(r)
567 revmap[int(r)] = source.lookup(r)
568 elif opts.get('all') or not merges:
568 elif opts.get('all') or not merges:
569 if source != repo:
569 if source != repo:
570 alltransplants = incwalk(source, incoming, branches,
570 alltransplants = incwalk(source, incoming, branches,
571 match=matchfn)
571 match=matchfn)
572 else:
572 else:
573 alltransplants = transplantwalk(source, p1, branches,
573 alltransplants = transplantwalk(source, p1, branches,
574 match=matchfn)
574 match=matchfn)
575 if opts.get('all'):
575 if opts.get('all'):
576 revs = alltransplants
576 revs = alltransplants
577 else:
577 else:
578 revs, newmerges = browserevs(ui, source, alltransplants, opts)
578 revs, newmerges = browserevs(ui, source, alltransplants, opts)
579 merges.extend(newmerges)
579 merges.extend(newmerges)
580 for r in revs:
580 for r in revs:
581 revmap[source.changelog.rev(r)] = r
581 revmap[source.changelog.rev(r)] = r
582 for r in merges:
582 for r in merges:
583 revmap[source.changelog.rev(r)] = r
583 revmap[source.changelog.rev(r)] = r
584
584
585 tp.apply(repo, source, revmap, merges, opts)
585 tp.apply(repo, source, revmap, merges, opts)
586 finally:
586 finally:
587 if bundle:
587 if bundle:
588 source.close()
588 source.close()
589 os.unlink(bundle)
589 os.unlink(bundle)
590
590
591 cmdtable = {
591 cmdtable = {
592 "transplant":
592 "transplant":
593 (transplant,
593 (transplant,
594 [('s', 'source', '', _('pull patches from REPOSITORY')),
594 [('s', 'source', '', _('pull patches from REPOSITORY')),
595 ('b', 'branch', [], _('pull patches from branch BRANCH')),
595 ('b', 'branch', [], _('pull patches from branch BRANCH')),
596 ('a', 'all', None, _('pull all changesets up to BRANCH')),
596 ('a', 'all', None, _('pull all changesets up to BRANCH')),
597 ('p', 'prune', [], _('skip over REV')),
597 ('p', 'prune', [], _('skip over REV')),
598 ('m', 'merge', [], _('merge at REV')),
598 ('m', 'merge', [], _('merge at REV')),
599 ('', 'log', None, _('append transplant info to log message')),
599 ('', 'log', None, _('append transplant info to log message')),
600 ('c', 'continue', None, _('continue last transplant session '
600 ('c', 'continue', None, _('continue last transplant session '
601 'after repair')),
601 'after repair')),
602 ('', 'filter', '', _('filter changesets through FILTER'))],
602 ('', 'filter', '', _('filter changesets through FILTER'))],
603 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] '
603 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] '
604 '[-m REV] [REV]...'))
604 '[-m REV] [REV]...'))
605 }
605 }
@@ -1,3808 +1,3808 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, util, revlog, bundlerepo, extensions, copies, error
12 import hg, util, revlog, bundlerepo, extensions, copies, error
13 import patch, help, mdiff, url, encoding, templatekw
13 import patch, help, mdiff, url, encoding, templatekw
14 import archival, changegroup, cmdutil, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server
15 from hgweb import server
16 import merge as merge_
16 import merge as merge_
17 import minirst
17 import minirst
18
18
19 # Commands start here, listed alphabetically
19 # Commands start here, listed alphabetically
20
20
21 def add(ui, repo, *pats, **opts):
21 def add(ui, repo, *pats, **opts):
22 """add the specified files on the next commit
22 """add the specified files on the next commit
23
23
24 Schedule files to be version controlled and added to the
24 Schedule files to be version controlled and added to the
25 repository.
25 repository.
26
26
27 The files will be added to the repository at the next commit. To
27 The files will be added to the repository at the next commit. To
28 undo an add before that, see hg forget.
28 undo an add before that, see hg forget.
29
29
30 If no names are given, add all files to the repository.
30 If no names are given, add all files to the repository.
31 """
31 """
32
32
33 bad = []
33 bad = []
34 names = []
34 names = []
35 m = cmdutil.match(repo, pats, opts)
35 m = cmdutil.match(repo, pats, opts)
36 oldbad = m.bad
36 oldbad = m.bad
37 m.bad = lambda x, y: bad.append(x) or oldbad(x, y)
37 m.bad = lambda x, y: bad.append(x) or oldbad(x, y)
38
38
39 for f in repo.walk(m):
39 for f in repo.walk(m):
40 exact = m.exact(f)
40 exact = m.exact(f)
41 if exact or f not in repo.dirstate:
41 if exact or f not in repo.dirstate:
42 names.append(f)
42 names.append(f)
43 if ui.verbose or not exact:
43 if ui.verbose or not exact:
44 ui.status(_('adding %s\n') % m.rel(f))
44 ui.status(_('adding %s\n') % m.rel(f))
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 bad += [f for f in repo.add(names) if f in m.files()]
46 bad += [f for f in repo.add(names) if f in m.files()]
47 return bad and 1 or 0
47 return bad and 1 or 0
48
48
49 def addremove(ui, repo, *pats, **opts):
49 def addremove(ui, repo, *pats, **opts):
50 """add all new files, delete all missing files
50 """add all new files, delete all missing files
51
51
52 Add all new files and remove all missing files from the
52 Add all new files and remove all missing files from the
53 repository.
53 repository.
54
54
55 New files are ignored if they match any of the patterns in
55 New files are ignored if they match any of the patterns in
56 .hgignore. As with add, these changes take effect at the next
56 .hgignore. As with add, these changes take effect at the next
57 commit.
57 commit.
58
58
59 Use the -s/--similarity option to detect renamed files. With a
59 Use the -s/--similarity option to detect renamed files. With a
60 parameter greater than 0, this compares every removed file with
60 parameter greater than 0, this compares every removed file with
61 every added file and records those similar enough as renames. This
61 every added file and records those similar enough as renames. This
62 option takes a percentage between 0 (disabled) and 100 (files must
62 option takes a percentage between 0 (disabled) and 100 (files must
63 be identical) as its parameter. Detecting renamed files this way
63 be identical) as its parameter. Detecting renamed files this way
64 can be expensive.
64 can be expensive.
65 """
65 """
66 try:
66 try:
67 sim = float(opts.get('similarity') or 0)
67 sim = float(opts.get('similarity') or 0)
68 except ValueError:
68 except ValueError:
69 raise util.Abort(_('similarity must be a number'))
69 raise util.Abort(_('similarity must be a number'))
70 if sim < 0 or sim > 100:
70 if sim < 0 or sim > 100:
71 raise util.Abort(_('similarity must be between 0 and 100'))
71 raise util.Abort(_('similarity must be between 0 and 100'))
72 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
72 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
73
73
74 def annotate(ui, repo, *pats, **opts):
74 def annotate(ui, repo, *pats, **opts):
75 """show changeset information by line for each file
75 """show changeset information by line for each file
76
76
77 List changes in files, showing the revision id responsible for
77 List changes in files, showing the revision id responsible for
78 each line
78 each line
79
79
80 This command is useful for discovering when a change was made and
80 This command is useful for discovering when a change was made and
81 by whom.
81 by whom.
82
82
83 Without the -a/--text option, annotate will avoid processing files
83 Without the -a/--text option, annotate will avoid processing files
84 it detects as binary. With -a, annotate will annotate the file
84 it detects as binary. With -a, annotate will annotate the file
85 anyway, although the results will probably be neither useful
85 anyway, although the results will probably be neither useful
86 nor desirable.
86 nor desirable.
87 """
87 """
88 datefunc = ui.quiet and util.shortdate or util.datestr
88 datefunc = ui.quiet and util.shortdate or util.datestr
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90
90
91 if not pats:
91 if not pats:
92 raise util.Abort(_('at least one filename or pattern is required'))
92 raise util.Abort(_('at least one filename or pattern is required'))
93
93
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 ('number', lambda x: str(x[0].rev())),
95 ('number', lambda x: str(x[0].rev())),
96 ('changeset', lambda x: short(x[0].node())),
96 ('changeset', lambda x: short(x[0].node())),
97 ('date', getdate),
97 ('date', getdate),
98 ('file', lambda x: x[0].path()),
98 ('file', lambda x: x[0].path()),
99 ]
99 ]
100
100
101 if (not opts.get('user') and not opts.get('changeset')
101 if (not opts.get('user') and not opts.get('changeset')
102 and not opts.get('date') and not opts.get('file')):
102 and not opts.get('date') and not opts.get('file')):
103 opts['number'] = 1
103 opts['number'] = 1
104
104
105 linenumber = opts.get('line_number') is not None
105 linenumber = opts.get('line_number') is not None
106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
106 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
108
108
109 funcmap = [func for op, func in opmap if opts.get(op)]
109 funcmap = [func for op, func in opmap if opts.get(op)]
110 if linenumber:
110 if linenumber:
111 lastfunc = funcmap[-1]
111 lastfunc = funcmap[-1]
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113
113
114 ctx = repo[opts.get('rev')]
114 ctx = repo[opts.get('rev')]
115 m = cmdutil.match(repo, pats, opts)
115 m = cmdutil.match(repo, pats, opts)
116 follow = not opts.get('no_follow')
116 follow = not opts.get('no_follow')
117 for abs in ctx.walk(m):
117 for abs in ctx.walk(m):
118 fctx = ctx[abs]
118 fctx = ctx[abs]
119 if not opts.get('text') and util.binary(fctx.data()):
119 if not opts.get('text') and util.binary(fctx.data()):
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 continue
121 continue
122
122
123 lines = fctx.annotate(follow=follow, linenumber=linenumber)
123 lines = fctx.annotate(follow=follow, linenumber=linenumber)
124 pieces = []
124 pieces = []
125
125
126 for f in funcmap:
126 for f in funcmap:
127 l = [f(n) for n, dummy in lines]
127 l = [f(n) for n, dummy in lines]
128 if l:
128 if l:
129 ml = max(map(len, l))
129 ml = max(map(len, l))
130 pieces.append(["%*s" % (ml, x) for x in l])
130 pieces.append(["%*s" % (ml, x) for x in l])
131
131
132 if pieces:
132 if pieces:
133 for p, l in zip(zip(*pieces), lines):
133 for p, l in zip(zip(*pieces), lines):
134 ui.write("%s: %s" % (" ".join(p), l[1]))
134 ui.write("%s: %s" % (" ".join(p), l[1]))
135
135
136 def archive(ui, repo, dest, **opts):
136 def archive(ui, repo, dest, **opts):
137 '''create an unversioned archive of a repository revision
137 '''create an unversioned archive of a repository revision
138
138
139 By default, the revision used is the parent of the working
139 By default, the revision used is the parent of the working
140 directory; use -r/--rev to specify a different revision.
140 directory; use -r/--rev to specify a different revision.
141
141
142 To specify the type of archive to create, use -t/--type. Valid
142 To specify the type of archive to create, use -t/--type. Valid
143 types are:
143 types are:
144
144
145 :``files``: a directory full of files (default)
145 :``files``: a directory full of files (default)
146 :``tar``: tar archive, uncompressed
146 :``tar``: tar archive, uncompressed
147 :``tbz2``: tar archive, compressed using bzip2
147 :``tbz2``: tar archive, compressed using bzip2
148 :``tgz``: tar archive, compressed using gzip
148 :``tgz``: tar archive, compressed using gzip
149 :``uzip``: zip archive, uncompressed
149 :``uzip``: zip archive, uncompressed
150 :``zip``: zip archive, compressed using deflate
150 :``zip``: zip archive, compressed using deflate
151
151
152 The exact name of the destination archive or directory is given
152 The exact name of the destination archive or directory is given
153 using a format string; see 'hg help export' for details.
153 using a format string; see 'hg help export' for details.
154
154
155 Each member added to an archive file has a directory prefix
155 Each member added to an archive file has a directory prefix
156 prepended. Use -p/--prefix to specify a format string for the
156 prepended. Use -p/--prefix to specify a format string for the
157 prefix. The default is the basename of the archive, with suffixes
157 prefix. The default is the basename of the archive, with suffixes
158 removed.
158 removed.
159 '''
159 '''
160
160
161 ctx = repo[opts.get('rev')]
161 ctx = repo[opts.get('rev')]
162 if not ctx:
162 if not ctx:
163 raise util.Abort(_('no working directory: please specify a revision'))
163 raise util.Abort(_('no working directory: please specify a revision'))
164 node = ctx.node()
164 node = ctx.node()
165 dest = cmdutil.make_filename(repo, dest, node)
165 dest = cmdutil.make_filename(repo, dest, node)
166 if os.path.realpath(dest) == repo.root:
166 if os.path.realpath(dest) == repo.root:
167 raise util.Abort(_('repository root cannot be destination'))
167 raise util.Abort(_('repository root cannot be destination'))
168 matchfn = cmdutil.match(repo, [], opts)
168 matchfn = cmdutil.match(repo, [], opts)
169 kind = opts.get('type') or 'files'
169 kind = opts.get('type') or 'files'
170 prefix = opts.get('prefix')
170 prefix = opts.get('prefix')
171 if dest == '-':
171 if dest == '-':
172 if kind == 'files':
172 if kind == 'files':
173 raise util.Abort(_('cannot archive plain files to stdout'))
173 raise util.Abort(_('cannot archive plain files to stdout'))
174 dest = sys.stdout
174 dest = sys.stdout
175 if not prefix:
175 if not prefix:
176 prefix = os.path.basename(repo.root) + '-%h'
176 prefix = os.path.basename(repo.root) + '-%h'
177 prefix = cmdutil.make_filename(repo, prefix, node)
177 prefix = cmdutil.make_filename(repo, prefix, node)
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 matchfn, prefix)
179 matchfn, prefix)
180
180
181 def backout(ui, repo, node=None, rev=None, **opts):
181 def backout(ui, repo, node=None, rev=None, **opts):
182 '''reverse effect of earlier changeset
182 '''reverse effect of earlier changeset
183
183
184 Commit the backed out changes as a new changeset. The new
184 Commit the backed out changes as a new changeset. The new
185 changeset is a child of the backed out changeset.
185 changeset is a child of the backed out changeset.
186
186
187 If you backout a changeset other than the tip, a new head is
187 If you backout a changeset other than the tip, a new head is
188 created. This head will be the new tip and you should merge this
188 created. This head will be the new tip and you should merge this
189 backout changeset with another head.
189 backout changeset with another head.
190
190
191 The --merge option remembers the parent of the working directory
191 The --merge option remembers the parent of the working directory
192 before starting the backout, then merges the new head with that
192 before starting the backout, then merges the new head with that
193 changeset afterwards. This saves you from doing the merge by hand.
193 changeset afterwards. This saves you from doing the merge by hand.
194 The result of this merge is not committed, as with a normal merge.
194 The result of this merge is not committed, as with a normal merge.
195
195
196 See 'hg help dates' for a list of formats valid for -d/--date.
196 See 'hg help dates' for a list of formats valid for -d/--date.
197 '''
197 '''
198 if rev and node:
198 if rev and node:
199 raise util.Abort(_("please specify just one revision"))
199 raise util.Abort(_("please specify just one revision"))
200
200
201 if not rev:
201 if not rev:
202 rev = node
202 rev = node
203
203
204 if not rev:
204 if not rev:
205 raise util.Abort(_("please specify a revision to backout"))
205 raise util.Abort(_("please specify a revision to backout"))
206
206
207 date = opts.get('date')
207 date = opts.get('date')
208 if date:
208 if date:
209 opts['date'] = util.parsedate(date)
209 opts['date'] = util.parsedate(date)
210
210
211 cmdutil.bail_if_changed(repo)
211 cmdutil.bail_if_changed(repo)
212 node = repo.lookup(rev)
212 node = repo.lookup(rev)
213
213
214 op1, op2 = repo.dirstate.parents()
214 op1, op2 = repo.dirstate.parents()
215 a = repo.changelog.ancestor(op1, node)
215 a = repo.changelog.ancestor(op1, node)
216 if a != node:
216 if a != node:
217 raise util.Abort(_('cannot backout change on a different branch'))
217 raise util.Abort(_('cannot backout change on a different branch'))
218
218
219 p1, p2 = repo.changelog.parents(node)
219 p1, p2 = repo.changelog.parents(node)
220 if p1 == nullid:
220 if p1 == nullid:
221 raise util.Abort(_('cannot backout a change with no parents'))
221 raise util.Abort(_('cannot backout a change with no parents'))
222 if p2 != nullid:
222 if p2 != nullid:
223 if not opts.get('parent'):
223 if not opts.get('parent'):
224 raise util.Abort(_('cannot backout a merge changeset without '
224 raise util.Abort(_('cannot backout a merge changeset without '
225 '--parent'))
225 '--parent'))
226 p = repo.lookup(opts['parent'])
226 p = repo.lookup(opts['parent'])
227 if p not in (p1, p2):
227 if p not in (p1, p2):
228 raise util.Abort(_('%s is not a parent of %s') %
228 raise util.Abort(_('%s is not a parent of %s') %
229 (short(p), short(node)))
229 (short(p), short(node)))
230 parent = p
230 parent = p
231 else:
231 else:
232 if opts.get('parent'):
232 if opts.get('parent'):
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 parent = p1
234 parent = p1
235
235
236 # the backout should appear on the same branch
236 # the backout should appear on the same branch
237 branch = repo.dirstate.branch()
237 branch = repo.dirstate.branch()
238 hg.clean(repo, node, show_stats=False)
238 hg.clean(repo, node, show_stats=False)
239 repo.dirstate.setbranch(branch)
239 repo.dirstate.setbranch(branch)
240 revert_opts = opts.copy()
240 revert_opts = opts.copy()
241 revert_opts['date'] = None
241 revert_opts['date'] = None
242 revert_opts['all'] = True
242 revert_opts['all'] = True
243 revert_opts['rev'] = hex(parent)
243 revert_opts['rev'] = hex(parent)
244 revert_opts['no_backup'] = None
244 revert_opts['no_backup'] = None
245 revert(ui, repo, **revert_opts)
245 revert(ui, repo, **revert_opts)
246 commit_opts = opts.copy()
246 commit_opts = opts.copy()
247 commit_opts['addremove'] = False
247 commit_opts['addremove'] = False
248 if not commit_opts['message'] and not commit_opts['logfile']:
248 if not commit_opts['message'] and not commit_opts['logfile']:
249 # we don't translate commit messages
249 # we don't translate commit messages
250 commit_opts['message'] = "Backed out changeset %s" % short(node)
250 commit_opts['message'] = "Backed out changeset %s" % short(node)
251 commit_opts['force_editor'] = True
251 commit_opts['force_editor'] = True
252 commit(ui, repo, **commit_opts)
252 commit(ui, repo, **commit_opts)
253 def nice(node):
253 def nice(node):
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 return '%d:%s' % (repo.changelog.rev(node), short(node))
255 ui.status(_('changeset %s backs out changeset %s\n') %
255 ui.status(_('changeset %s backs out changeset %s\n') %
256 (nice(repo.changelog.tip()), nice(node)))
256 (nice(repo.changelog.tip()), nice(node)))
257 if op1 != node:
257 if op1 != node:
258 hg.clean(repo, op1, show_stats=False)
258 hg.clean(repo, op1, show_stats=False)
259 if opts.get('merge'):
259 if opts.get('merge'):
260 ui.status(_('merging with changeset %s\n')
260 ui.status(_('merging with changeset %s\n')
261 % nice(repo.changelog.tip()))
261 % nice(repo.changelog.tip()))
262 hg.merge(repo, hex(repo.changelog.tip()))
262 hg.merge(repo, hex(repo.changelog.tip()))
263 else:
263 else:
264 ui.status(_('the backout changeset is a new head - '
264 ui.status(_('the backout changeset is a new head - '
265 'do not forget to merge\n'))
265 'do not forget to merge\n'))
266 ui.status(_('(use "backout --merge" '
266 ui.status(_('(use "backout --merge" '
267 'if you want to auto-merge)\n'))
267 'if you want to auto-merge)\n'))
268
268
269 def bisect(ui, repo, rev=None, extra=None, command=None,
269 def bisect(ui, repo, rev=None, extra=None, command=None,
270 reset=None, good=None, bad=None, skip=None, noupdate=None):
270 reset=None, good=None, bad=None, skip=None, noupdate=None):
271 """subdivision search of changesets
271 """subdivision search of changesets
272
272
273 This command helps to find changesets which introduce problems. To
273 This command helps to find changesets which introduce problems. To
274 use, mark the earliest changeset you know exhibits the problem as
274 use, mark the earliest changeset you know exhibits the problem as
275 bad, then mark the latest changeset which is free from the problem
275 bad, then mark the latest changeset which is free from the problem
276 as good. Bisect will update your working directory to a revision
276 as good. Bisect will update your working directory to a revision
277 for testing (unless the -U/--noupdate option is specified). Once
277 for testing (unless the -U/--noupdate option is specified). Once
278 you have performed tests, mark the working directory as good or
278 you have performed tests, mark the working directory as good or
279 bad, and bisect will either update to another candidate changeset
279 bad, and bisect will either update to another candidate changeset
280 or announce that it has found the bad revision.
280 or announce that it has found the bad revision.
281
281
282 As a shortcut, you can also use the revision argument to mark a
282 As a shortcut, you can also use the revision argument to mark a
283 revision as good or bad without checking it out first.
283 revision as good or bad without checking it out first.
284
284
285 If you supply a command, it will be used for automatic bisection.
285 If you supply a command, it will be used for automatic bisection.
286 Its exit status will be used to mark revisions as good or bad:
286 Its exit status will be used to mark revisions as good or bad:
287 status 0 means good, 125 means to skip the revision, 127
287 status 0 means good, 125 means to skip the revision, 127
288 (command not found) will abort the bisection, and any other
288 (command not found) will abort the bisection, and any other
289 non-zero exit status means the revision is bad.
289 non-zero exit status means the revision is bad.
290 """
290 """
291 def print_result(nodes, good):
291 def print_result(nodes, good):
292 displayer = cmdutil.show_changeset(ui, repo, {})
292 displayer = cmdutil.show_changeset(ui, repo, {})
293 if len(nodes) == 1:
293 if len(nodes) == 1:
294 # narrowed it down to a single revision
294 # narrowed it down to a single revision
295 if good:
295 if good:
296 ui.write(_("The first good revision is:\n"))
296 ui.write(_("The first good revision is:\n"))
297 else:
297 else:
298 ui.write(_("The first bad revision is:\n"))
298 ui.write(_("The first bad revision is:\n"))
299 displayer.show(repo[nodes[0]])
299 displayer.show(repo[nodes[0]])
300 else:
300 else:
301 # multiple possible revisions
301 # multiple possible revisions
302 if good:
302 if good:
303 ui.write(_("Due to skipped revisions, the first "
303 ui.write(_("Due to skipped revisions, the first "
304 "good revision could be any of:\n"))
304 "good revision could be any of:\n"))
305 else:
305 else:
306 ui.write(_("Due to skipped revisions, the first "
306 ui.write(_("Due to skipped revisions, the first "
307 "bad revision could be any of:\n"))
307 "bad revision could be any of:\n"))
308 for n in nodes:
308 for n in nodes:
309 displayer.show(repo[n])
309 displayer.show(repo[n])
310 displayer.close()
310 displayer.close()
311
311
312 def check_state(state, interactive=True):
312 def check_state(state, interactive=True):
313 if not state['good'] or not state['bad']:
313 if not state['good'] or not state['bad']:
314 if (good or bad or skip or reset) and interactive:
314 if (good or bad or skip or reset) and interactive:
315 return
315 return
316 if not state['good']:
316 if not state['good']:
317 raise util.Abort(_('cannot bisect (no known good revisions)'))
317 raise util.Abort(_('cannot bisect (no known good revisions)'))
318 else:
318 else:
319 raise util.Abort(_('cannot bisect (no known bad revisions)'))
319 raise util.Abort(_('cannot bisect (no known bad revisions)'))
320 return True
320 return True
321
321
322 # backward compatibility
322 # backward compatibility
323 if rev in "good bad reset init".split():
323 if rev in "good bad reset init".split():
324 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
324 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
325 cmd, rev, extra = rev, extra, None
325 cmd, rev, extra = rev, extra, None
326 if cmd == "good":
326 if cmd == "good":
327 good = True
327 good = True
328 elif cmd == "bad":
328 elif cmd == "bad":
329 bad = True
329 bad = True
330 else:
330 else:
331 reset = True
331 reset = True
332 elif extra or good + bad + skip + reset + bool(command) > 1:
332 elif extra or good + bad + skip + reset + bool(command) > 1:
333 raise util.Abort(_('incompatible arguments'))
333 raise util.Abort(_('incompatible arguments'))
334
334
335 if reset:
335 if reset:
336 p = repo.join("bisect.state")
336 p = repo.join("bisect.state")
337 if os.path.exists(p):
337 if os.path.exists(p):
338 os.unlink(p)
338 os.unlink(p)
339 return
339 return
340
340
341 state = hbisect.load_state(repo)
341 state = hbisect.load_state(repo)
342
342
343 if command:
343 if command:
344 changesets = 1
344 changesets = 1
345 try:
345 try:
346 while changesets:
346 while changesets:
347 # update state
347 # update state
348 status = util.system(command)
348 status = util.system(command)
349 if status == 125:
349 if status == 125:
350 transition = "skip"
350 transition = "skip"
351 elif status == 0:
351 elif status == 0:
352 transition = "good"
352 transition = "good"
353 # status < 0 means process was killed
353 # status < 0 means process was killed
354 elif status == 127:
354 elif status == 127:
355 raise util.Abort(_("failed to execute %s") % command)
355 raise util.Abort(_("failed to execute %s") % command)
356 elif status < 0:
356 elif status < 0:
357 raise util.Abort(_("%s killed") % command)
357 raise util.Abort(_("%s killed") % command)
358 else:
358 else:
359 transition = "bad"
359 transition = "bad"
360 ctx = repo[rev or '.']
360 ctx = repo[rev or '.']
361 state[transition].append(ctx.node())
361 state[transition].append(ctx.node())
362 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
362 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
363 check_state(state, interactive=False)
363 check_state(state, interactive=False)
364 # bisect
364 # bisect
365 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
366 # update to next check
366 # update to next check
367 cmdutil.bail_if_changed(repo)
367 cmdutil.bail_if_changed(repo)
368 hg.clean(repo, nodes[0], show_stats=False)
368 hg.clean(repo, nodes[0], show_stats=False)
369 finally:
369 finally:
370 hbisect.save_state(repo, state)
370 hbisect.save_state(repo, state)
371 return print_result(nodes, good)
371 return print_result(nodes, good)
372
372
373 # update state
373 # update state
374 node = repo.lookup(rev or '.')
374 node = repo.lookup(rev or '.')
375 if good or bad or skip:
375 if good or bad or skip:
376 if good:
376 if good:
377 state['good'].append(node)
377 state['good'].append(node)
378 elif bad:
378 elif bad:
379 state['bad'].append(node)
379 state['bad'].append(node)
380 elif skip:
380 elif skip:
381 state['skip'].append(node)
381 state['skip'].append(node)
382 hbisect.save_state(repo, state)
382 hbisect.save_state(repo, state)
383
383
384 if not check_state(state):
384 if not check_state(state):
385 return
385 return
386
386
387 # actually bisect
387 # actually bisect
388 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
389 if changesets == 0:
389 if changesets == 0:
390 print_result(nodes, good)
390 print_result(nodes, good)
391 else:
391 else:
392 assert len(nodes) == 1 # only a single node can be tested next
392 assert len(nodes) == 1 # only a single node can be tested next
393 node = nodes[0]
393 node = nodes[0]
394 # compute the approximate number of remaining tests
394 # compute the approximate number of remaining tests
395 tests, size = 0, 2
395 tests, size = 0, 2
396 while size <= changesets:
396 while size <= changesets:
397 tests, size = tests + 1, size * 2
397 tests, size = tests + 1, size * 2
398 rev = repo.changelog.rev(node)
398 rev = repo.changelog.rev(node)
399 ui.write(_("Testing changeset %d:%s "
399 ui.write(_("Testing changeset %d:%s "
400 "(%d changesets remaining, ~%d tests)\n")
400 "(%d changesets remaining, ~%d tests)\n")
401 % (rev, short(node), changesets, tests))
401 % (rev, short(node), changesets, tests))
402 if not noupdate:
402 if not noupdate:
403 cmdutil.bail_if_changed(repo)
403 cmdutil.bail_if_changed(repo)
404 return hg.clean(repo, node)
404 return hg.clean(repo, node)
405
405
406 def branch(ui, repo, label=None, **opts):
406 def branch(ui, repo, label=None, **opts):
407 """set or show the current branch name
407 """set or show the current branch name
408
408
409 With no argument, show the current branch name. With one argument,
409 With no argument, show the current branch name. With one argument,
410 set the working directory branch name (the branch will not exist
410 set the working directory branch name (the branch will not exist
411 in the repository until the next commit). Standard practice
411 in the repository until the next commit). Standard practice
412 recommends that primary development take place on the 'default'
412 recommends that primary development take place on the 'default'
413 branch.
413 branch.
414
414
415 Unless -f/--force is specified, branch will not let you set a
415 Unless -f/--force is specified, branch will not let you set a
416 branch name that already exists, even if it's inactive.
416 branch name that already exists, even if it's inactive.
417
417
418 Use -C/--clean to reset the working directory branch to that of
418 Use -C/--clean to reset the working directory branch to that of
419 the parent of the working directory, negating a previous branch
419 the parent of the working directory, negating a previous branch
420 change.
420 change.
421
421
422 Use the command 'hg update' to switch to an existing branch. Use
422 Use the command 'hg update' to switch to an existing branch. Use
423 'hg commit --close-branch' to mark this branch as closed.
423 'hg commit --close-branch' to mark this branch as closed.
424 """
424 """
425
425
426 if opts.get('clean'):
426 if opts.get('clean'):
427 label = repo[None].parents()[0].branch()
427 label = repo[None].parents()[0].branch()
428 repo.dirstate.setbranch(label)
428 repo.dirstate.setbranch(label)
429 ui.status(_('reset working directory to branch %s\n') % label)
429 ui.status(_('reset working directory to branch %s\n') % label)
430 elif label:
430 elif label:
431 utflabel = encoding.fromlocal(label)
431 utflabel = encoding.fromlocal(label)
432 if not opts.get('force') and utflabel in repo.branchtags():
432 if not opts.get('force') and utflabel in repo.branchtags():
433 if label not in [p.branch() for p in repo.parents()]:
433 if label not in [p.branch() for p in repo.parents()]:
434 raise util.Abort(_('a branch of the same name already exists'
434 raise util.Abort(_('a branch of the same name already exists'
435 ' (use --force to override)'))
435 ' (use --force to override)'))
436 repo.dirstate.setbranch(utflabel)
436 repo.dirstate.setbranch(utflabel)
437 ui.status(_('marked working directory as branch %s\n') % label)
437 ui.status(_('marked working directory as branch %s\n') % label)
438 else:
438 else:
439 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
439 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
440
440
441 def branches(ui, repo, active=False, closed=False):
441 def branches(ui, repo, active=False, closed=False):
442 """list repository named branches
442 """list repository named branches
443
443
444 List the repository's named branches, indicating which ones are
444 List the repository's named branches, indicating which ones are
445 inactive. If -c/--closed is specified, also list branches which have
445 inactive. If -c/--closed is specified, also list branches which have
446 been marked closed (see hg commit --close-branch).
446 been marked closed (see hg commit --close-branch).
447
447
448 If -a/--active is specified, only show active branches. A branch
448 If -a/--active is specified, only show active branches. A branch
449 is considered active if it contains repository heads.
449 is considered active if it contains repository heads.
450
450
451 Use the command 'hg update' to switch to an existing branch.
451 Use the command 'hg update' to switch to an existing branch.
452 """
452 """
453
453
454 hexfunc = ui.debugflag and hex or short
454 hexfunc = ui.debugflag and hex or short
455 activebranches = [repo[n].branch() for n in repo.heads()]
455 activebranches = [repo[n].branch() for n in repo.heads()]
456 def testactive(tag, node):
456 def testactive(tag, node):
457 realhead = tag in activebranches
457 realhead = tag in activebranches
458 open = node in repo.branchheads(tag, closed=False)
458 open = node in repo.branchheads(tag, closed=False)
459 return realhead and open
459 return realhead and open
460 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
460 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
461 for tag, node in repo.branchtags().items()],
461 for tag, node in repo.branchtags().items()],
462 reverse=True)
462 reverse=True)
463
463
464 for isactive, node, tag in branches:
464 for isactive, node, tag in branches:
465 if (not active) or isactive:
465 if (not active) or isactive:
466 encodedtag = encoding.tolocal(tag)
466 encodedtag = encoding.tolocal(tag)
467 if ui.quiet:
467 if ui.quiet:
468 ui.write("%s\n" % encodedtag)
468 ui.write("%s\n" % encodedtag)
469 else:
469 else:
470 hn = repo.lookup(node)
470 hn = repo.lookup(node)
471 if isactive:
471 if isactive:
472 notice = ''
472 notice = ''
473 elif hn not in repo.branchheads(tag, closed=False):
473 elif hn not in repo.branchheads(tag, closed=False):
474 if not closed:
474 if not closed:
475 continue
475 continue
476 notice = _(' (closed)')
476 notice = _(' (closed)')
477 else:
477 else:
478 notice = _(' (inactive)')
478 notice = _(' (inactive)')
479 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
479 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
480 data = encodedtag, rev, hexfunc(hn), notice
480 data = encodedtag, rev, hexfunc(hn), notice
481 ui.write("%s %s:%s%s\n" % data)
481 ui.write("%s %s:%s%s\n" % data)
482
482
483 def bundle(ui, repo, fname, dest=None, **opts):
483 def bundle(ui, repo, fname, dest=None, **opts):
484 """create a changegroup file
484 """create a changegroup file
485
485
486 Generate a compressed changegroup file collecting changesets not
486 Generate a compressed changegroup file collecting changesets not
487 known to be in another repository.
487 known to be in another repository.
488
488
489 If you omit the destination repository, then hg assumes the
489 If you omit the destination repository, then hg assumes the
490 destination will have all the nodes you specify with --base
490 destination will have all the nodes you specify with --base
491 parameters. To create a bundle containing all changesets, use
491 parameters. To create a bundle containing all changesets, use
492 -a/--all (or --base null).
492 -a/--all (or --base null).
493
493
494 You can change compression method with the -t/--type option.
494 You can change compression method with the -t/--type option.
495 The available compression methods are: none, bzip2, and
495 The available compression methods are: none, bzip2, and
496 gzip (by default, bundles are compressed using bzip2).
496 gzip (by default, bundles are compressed using bzip2).
497
497
498 The bundle file can then be transferred using conventional means
498 The bundle file can then be transferred using conventional means
499 and applied to another repository with the unbundle or pull
499 and applied to another repository with the unbundle or pull
500 command. This is useful when direct push and pull are not
500 command. This is useful when direct push and pull are not
501 available or when exporting an entire repository is undesirable.
501 available or when exporting an entire repository is undesirable.
502
502
503 Applying bundles preserves all changeset contents including
503 Applying bundles preserves all changeset contents including
504 permissions, copy/rename information, and revision history.
504 permissions, copy/rename information, and revision history.
505 """
505 """
506 revs = opts.get('rev') or None
506 revs = opts.get('rev') or None
507 if revs:
507 if revs:
508 revs = [repo.lookup(rev) for rev in revs]
508 revs = [repo.lookup(rev) for rev in revs]
509 if opts.get('all'):
509 if opts.get('all'):
510 base = ['null']
510 base = ['null']
511 else:
511 else:
512 base = opts.get('base')
512 base = opts.get('base')
513 if base:
513 if base:
514 if dest:
514 if dest:
515 raise util.Abort(_("--base is incompatible with specifying "
515 raise util.Abort(_("--base is incompatible with specifying "
516 "a destination"))
516 "a destination"))
517 base = [repo.lookup(rev) for rev in base]
517 base = [repo.lookup(rev) for rev in base]
518 # create the right base
518 # create the right base
519 # XXX: nodesbetween / changegroup* should be "fixed" instead
519 # XXX: nodesbetween / changegroup* should be "fixed" instead
520 o = []
520 o = []
521 has = set((nullid,))
521 has = set((nullid,))
522 for n in base:
522 for n in base:
523 has.update(repo.changelog.reachable(n))
523 has.update(repo.changelog.reachable(n))
524 if revs:
524 if revs:
525 visit = list(revs)
525 visit = list(revs)
526 else:
526 else:
527 visit = repo.changelog.heads()
527 visit = repo.changelog.heads()
528 seen = {}
528 seen = {}
529 while visit:
529 while visit:
530 n = visit.pop(0)
530 n = visit.pop(0)
531 parents = [p for p in repo.changelog.parents(n) if p not in has]
531 parents = [p for p in repo.changelog.parents(n) if p not in has]
532 if len(parents) == 0:
532 if len(parents) == 0:
533 o.insert(0, n)
533 o.insert(0, n)
534 else:
534 else:
535 for p in parents:
535 for p in parents:
536 if p not in seen:
536 if p not in seen:
537 seen[p] = 1
537 seen[p] = 1
538 visit.append(p)
538 visit.append(p)
539 else:
539 else:
540 dest = ui.expandpath(dest or 'default-push', dest or 'default')
540 dest = ui.expandpath(dest or 'default-push', dest or 'default')
541 dest, branches = hg.parseurl(dest, opts.get('branch'))
541 dest, branches = hg.parseurl(dest, opts.get('branch'))
542 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
542 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
543 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
543 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
544 o = repo.findoutgoing(other, force=opts.get('force'))
544 o = repo.findoutgoing(other, force=opts.get('force'))
545
545
546 if revs:
546 if revs:
547 cg = repo.changegroupsubset(o, revs, 'bundle')
547 cg = repo.changegroupsubset(o, revs, 'bundle')
548 else:
548 else:
549 cg = repo.changegroup(o, 'bundle')
549 cg = repo.changegroup(o, 'bundle')
550
550
551 bundletype = opts.get('type', 'bzip2').lower()
551 bundletype = opts.get('type', 'bzip2').lower()
552 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
552 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
553 bundletype = btypes.get(bundletype)
553 bundletype = btypes.get(bundletype)
554 if bundletype not in changegroup.bundletypes:
554 if bundletype not in changegroup.bundletypes:
555 raise util.Abort(_('unknown bundle type specified with --type'))
555 raise util.Abort(_('unknown bundle type specified with --type'))
556
556
557 changegroup.writebundle(cg, fname, bundletype)
557 changegroup.writebundle(cg, fname, bundletype)
558
558
559 def cat(ui, repo, file1, *pats, **opts):
559 def cat(ui, repo, file1, *pats, **opts):
560 """output the current or given revision of files
560 """output the current or given revision of files
561
561
562 Print the specified files as they were at the given revision. If
562 Print the specified files as they were at the given revision. If
563 no revision is given, the parent of the working directory is used,
563 no revision is given, the parent of the working directory is used,
564 or tip if no revision is checked out.
564 or tip if no revision is checked out.
565
565
566 Output may be to a file, in which case the name of the file is
566 Output may be to a file, in which case the name of the file is
567 given using a format string. The formatting rules are the same as
567 given using a format string. The formatting rules are the same as
568 for the export command, with the following additions:
568 for the export command, with the following additions:
569
569
570 :``%s``: basename of file being printed
570 :``%s``: basename of file being printed
571 :``%d``: dirname of file being printed, or '.' if in repository root
571 :``%d``: dirname of file being printed, or '.' if in repository root
572 :``%p``: root-relative path name of file being printed
572 :``%p``: root-relative path name of file being printed
573 """
573 """
574 ctx = repo[opts.get('rev')]
574 ctx = repo[opts.get('rev')]
575 err = 1
575 err = 1
576 m = cmdutil.match(repo, (file1,) + pats, opts)
576 m = cmdutil.match(repo, (file1,) + pats, opts)
577 for abs in ctx.walk(m):
577 for abs in ctx.walk(m):
578 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
578 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
579 data = ctx[abs].data()
579 data = ctx[abs].data()
580 if opts.get('decode'):
580 if opts.get('decode'):
581 data = repo.wwritedata(abs, data)
581 data = repo.wwritedata(abs, data)
582 fp.write(data)
582 fp.write(data)
583 err = 0
583 err = 0
584 return err
584 return err
585
585
586 def clone(ui, source, dest=None, **opts):
586 def clone(ui, source, dest=None, **opts):
587 """make a copy of an existing repository
587 """make a copy of an existing repository
588
588
589 Create a copy of an existing repository in a new directory.
589 Create a copy of an existing repository in a new directory.
590
590
591 If no destination directory name is specified, it defaults to the
591 If no destination directory name is specified, it defaults to the
592 basename of the source.
592 basename of the source.
593
593
594 The location of the source is added to the new repository's
594 The location of the source is added to the new repository's
595 .hg/hgrc file, as the default to be used for future pulls.
595 .hg/hgrc file, as the default to be used for future pulls.
596
596
597 See 'hg help urls' for valid source format details.
597 See 'hg help urls' for valid source format details.
598
598
599 It is possible to specify an ``ssh://`` URL as the destination, but no
599 It is possible to specify an ``ssh://`` URL as the destination, but no
600 .hg/hgrc and working directory will be created on the remote side.
600 .hg/hgrc and working directory will be created on the remote side.
601 Please see 'hg help urls' for important details about ``ssh://`` URLs.
601 Please see 'hg help urls' for important details about ``ssh://`` URLs.
602
602
603 If the -U/--noupdate option is specified, the new clone will contain
603 If the -U/--noupdate option is specified, the new clone will contain
604 only a repository (.hg) and no working copy (the working copy parent
604 only a repository (.hg) and no working copy (the working copy parent
605 will be the null changeset). Otherwise, clone will initially check
605 will be the null changeset). Otherwise, clone will initially check
606 out (in order of precedence):
606 out (in order of precedence):
607
607
608 a) the changeset, tag or branch specified with -u/--updaterev
608 a) the changeset, tag or branch specified with -u/--updaterev
609 b) the changeset, tag or branch given with the first -r/--rev
609 b) the changeset, tag or branch given with the first -r/--rev
610 c) the branch given with the first -b/--branch
610 c) the branch given with the first -b/--branch
611 d) the branch given with the url#branch source syntax
611 d) the branch given with the url#branch source syntax
612 e) the head of the default branch
612 e) the head of the default branch
613
613
614 Use 'hg clone -u . src dst' to checkout the source repository's
614 Use 'hg clone -u . src dst' to checkout the source repository's
615 parent changeset (applicable for local source repositories only).
615 parent changeset (applicable for local source repositories only).
616
616
617 A set of changesets (tags, or branch names) to pull may be specified
617 A set of changesets (tags, or branch names) to pull may be specified
618 by listing each changeset (tag, or branch name) with -r/--rev.
618 by listing each changeset (tag, or branch name) with -r/--rev.
619 If -r/--rev is used, the cloned repository will contain only a subset
619 If -r/--rev is used, the cloned repository will contain only a subset
620 of the changesets of the source repository. Only the set of changesets
620 of the changesets of the source repository. Only the set of changesets
621 defined by all -r/--rev options (including all their ancestors)
621 defined by all -r/--rev options (including all their ancestors)
622 will be pulled into the destination repository.
622 will be pulled into the destination repository.
623 No subsequent changesets (including subsequent tags) will be present
623 No subsequent changesets (including subsequent tags) will be present
624 in the destination.
624 in the destination.
625
625
626 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
626 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
627 local source repositories.
627 local source repositories.
628
628
629 For efficiency, hardlinks are used for cloning whenever the source
629 For efficiency, hardlinks are used for cloning whenever the source
630 and destination are on the same filesystem (note this applies only
630 and destination are on the same filesystem (note this applies only
631 to the repository data, not to the checked out files). Some
631 to the repository data, not to the checked out files). Some
632 filesystems, such as AFS, implement hardlinking incorrectly, but
632 filesystems, such as AFS, implement hardlinking incorrectly, but
633 do not report errors. In these cases, use the --pull option to
633 do not report errors. In these cases, use the --pull option to
634 avoid hardlinking.
634 avoid hardlinking.
635
635
636 In some cases, you can clone repositories and checked out files
636 In some cases, you can clone repositories and checked out files
637 using full hardlinks with ::
637 using full hardlinks with ::
638
638
639 $ cp -al REPO REPOCLONE
639 $ cp -al REPO REPOCLONE
640
640
641 This is the fastest way to clone, but it is not always safe. The
641 This is the fastest way to clone, but it is not always safe. The
642 operation is not atomic (making sure REPO is not modified during
642 operation is not atomic (making sure REPO is not modified during
643 the operation is up to you) and you have to make sure your editor
643 the operation is up to you) and you have to make sure your editor
644 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
644 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
645 this is not compatible with certain extensions that place their
645 this is not compatible with certain extensions that place their
646 metadata under the .hg directory, such as mq.
646 metadata under the .hg directory, such as mq.
647 """
647 """
648 if opts.get('noupdate') and opts.get('updaterev'):
648 if opts.get('noupdate') and opts.get('updaterev'):
649 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
649 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
650
650
651 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
651 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
652 pull=opts.get('pull'),
652 pull=opts.get('pull'),
653 stream=opts.get('uncompressed'),
653 stream=opts.get('uncompressed'),
654 rev=opts.get('rev'),
654 rev=opts.get('rev'),
655 update=opts.get('updaterev') or not opts.get('noupdate'),
655 update=opts.get('updaterev') or not opts.get('noupdate'),
656 branch=opts.get('branch'))
656 branch=opts.get('branch'))
657
657
658 def commit(ui, repo, *pats, **opts):
658 def commit(ui, repo, *pats, **opts):
659 """commit the specified files or all outstanding changes
659 """commit the specified files or all outstanding changes
660
660
661 Commit changes to the given files into the repository. Unlike a
661 Commit changes to the given files into the repository. Unlike a
662 centralized RCS, this operation is a local operation. See hg push
662 centralized RCS, this operation is a local operation. See hg push
663 for a way to actively distribute your changes.
663 for a way to actively distribute your changes.
664
664
665 If a list of files is omitted, all changes reported by "hg status"
665 If a list of files is omitted, all changes reported by "hg status"
666 will be committed.
666 will be committed.
667
667
668 If you are committing the result of a merge, do not provide any
668 If you are committing the result of a merge, do not provide any
669 filenames or -I/-X filters.
669 filenames or -I/-X filters.
670
670
671 If no commit message is specified, the configured editor is
671 If no commit message is specified, the configured editor is
672 started to prompt you for a message.
672 started to prompt you for a message.
673
673
674 See 'hg help dates' for a list of formats valid for -d/--date.
674 See 'hg help dates' for a list of formats valid for -d/--date.
675 """
675 """
676 extra = {}
676 extra = {}
677 if opts.get('close_branch'):
677 if opts.get('close_branch'):
678 extra['close'] = 1
678 extra['close'] = 1
679 e = cmdutil.commiteditor
679 e = cmdutil.commiteditor
680 if opts.get('force_editor'):
680 if opts.get('force_editor'):
681 e = cmdutil.commitforceeditor
681 e = cmdutil.commitforceeditor
682
682
683 def commitfunc(ui, repo, message, match, opts):
683 def commitfunc(ui, repo, message, match, opts):
684 return repo.commit(message, opts.get('user'), opts.get('date'), match,
684 return repo.commit(message, opts.get('user'), opts.get('date'), match,
685 editor=e, extra=extra)
685 editor=e, extra=extra)
686
686
687 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
687 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
688 if not node:
688 if not node:
689 ui.status(_("nothing changed\n"))
689 ui.status(_("nothing changed\n"))
690 return
690 return
691 cl = repo.changelog
691 cl = repo.changelog
692 rev = cl.rev(node)
692 rev = cl.rev(node)
693 parents = cl.parentrevs(rev)
693 parents = cl.parentrevs(rev)
694 if rev - 1 in parents:
694 if rev - 1 in parents:
695 # one of the parents was the old tip
695 # one of the parents was the old tip
696 pass
696 pass
697 elif (parents == (nullrev, nullrev) or
697 elif (parents == (nullrev, nullrev) or
698 len(cl.heads(cl.node(parents[0]))) > 1 and
698 len(cl.heads(cl.node(parents[0]))) > 1 and
699 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
699 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
700 ui.status(_('created new head\n'))
700 ui.status(_('created new head\n'))
701
701
702 if ui.debugflag:
702 if ui.debugflag:
703 ui.write(_('committed changeset %d:%s\n') % (rev, hex(node)))
703 ui.write(_('committed changeset %d:%s\n') % (rev, hex(node)))
704 elif ui.verbose:
704 elif ui.verbose:
705 ui.write(_('committed changeset %d:%s\n') % (rev, short(node)))
705 ui.write(_('committed changeset %d:%s\n') % (rev, short(node)))
706
706
707 def copy(ui, repo, *pats, **opts):
707 def copy(ui, repo, *pats, **opts):
708 """mark files as copied for the next commit
708 """mark files as copied for the next commit
709
709
710 Mark dest as having copies of source files. If dest is a
710 Mark dest as having copies of source files. If dest is a
711 directory, copies are put in that directory. If dest is a file,
711 directory, copies are put in that directory. If dest is a file,
712 the source must be a single file.
712 the source must be a single file.
713
713
714 By default, this command copies the contents of files as they
714 By default, this command copies the contents of files as they
715 exist in the working directory. If invoked with -A/--after, the
715 exist in the working directory. If invoked with -A/--after, the
716 operation is recorded, but no copying is performed.
716 operation is recorded, but no copying is performed.
717
717
718 This command takes effect with the next commit. To undo a copy
718 This command takes effect with the next commit. To undo a copy
719 before that, see hg revert.
719 before that, see hg revert.
720 """
720 """
721 wlock = repo.wlock(False)
721 wlock = repo.wlock(False)
722 try:
722 try:
723 return cmdutil.copy(ui, repo, pats, opts)
723 return cmdutil.copy(ui, repo, pats, opts)
724 finally:
724 finally:
725 wlock.release()
725 wlock.release()
726
726
727 def debugancestor(ui, repo, *args):
727 def debugancestor(ui, repo, *args):
728 """find the ancestor revision of two revisions in a given index"""
728 """find the ancestor revision of two revisions in a given index"""
729 if len(args) == 3:
729 if len(args) == 3:
730 index, rev1, rev2 = args
730 index, rev1, rev2 = args
731 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
731 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
732 lookup = r.lookup
732 lookup = r.lookup
733 elif len(args) == 2:
733 elif len(args) == 2:
734 if not repo:
734 if not repo:
735 raise util.Abort(_("There is no Mercurial repository here "
735 raise util.Abort(_("There is no Mercurial repository here "
736 "(.hg not found)"))
736 "(.hg not found)"))
737 rev1, rev2 = args
737 rev1, rev2 = args
738 r = repo.changelog
738 r = repo.changelog
739 lookup = repo.lookup
739 lookup = repo.lookup
740 else:
740 else:
741 raise util.Abort(_('either two or three arguments required'))
741 raise util.Abort(_('either two or three arguments required'))
742 a = r.ancestor(lookup(rev1), lookup(rev2))
742 a = r.ancestor(lookup(rev1), lookup(rev2))
743 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
743 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
744
744
745 def debugcommands(ui, cmd='', *args):
745 def debugcommands(ui, cmd='', *args):
746 for cmd, vals in sorted(table.iteritems()):
746 for cmd, vals in sorted(table.iteritems()):
747 cmd = cmd.split('|')[0].strip('^')
747 cmd = cmd.split('|')[0].strip('^')
748 opts = ', '.join([i[1] for i in vals[1]])
748 opts = ', '.join([i[1] for i in vals[1]])
749 ui.write('%s: %s\n' % (cmd, opts))
749 ui.write('%s: %s\n' % (cmd, opts))
750
750
751 def debugcomplete(ui, cmd='', **opts):
751 def debugcomplete(ui, cmd='', **opts):
752 """returns the completion list associated with the given command"""
752 """returns the completion list associated with the given command"""
753
753
754 if opts.get('options'):
754 if opts.get('options'):
755 options = []
755 options = []
756 otables = [globalopts]
756 otables = [globalopts]
757 if cmd:
757 if cmd:
758 aliases, entry = cmdutil.findcmd(cmd, table, False)
758 aliases, entry = cmdutil.findcmd(cmd, table, False)
759 otables.append(entry[1])
759 otables.append(entry[1])
760 for t in otables:
760 for t in otables:
761 for o in t:
761 for o in t:
762 if o[0]:
762 if o[0]:
763 options.append('-%s' % o[0])
763 options.append('-%s' % o[0])
764 options.append('--%s' % o[1])
764 options.append('--%s' % o[1])
765 ui.write("%s\n" % "\n".join(options))
765 ui.write("%s\n" % "\n".join(options))
766 return
766 return
767
767
768 cmdlist = cmdutil.findpossible(cmd, table)
768 cmdlist = cmdutil.findpossible(cmd, table)
769 if ui.verbose:
769 if ui.verbose:
770 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
770 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
771 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
771 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
772
772
773 def debugfsinfo(ui, path = "."):
773 def debugfsinfo(ui, path = "."):
774 open('.debugfsinfo', 'w').write('')
774 open('.debugfsinfo', 'w').write('')
775 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
775 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
776 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
776 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
777 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
777 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
778 and 'yes' or 'no'))
778 and 'yes' or 'no'))
779 os.unlink('.debugfsinfo')
779 os.unlink('.debugfsinfo')
780
780
781 def debugrebuildstate(ui, repo, rev="tip"):
781 def debugrebuildstate(ui, repo, rev="tip"):
782 """rebuild the dirstate as it would look like for the given revision"""
782 """rebuild the dirstate as it would look like for the given revision"""
783 ctx = repo[rev]
783 ctx = repo[rev]
784 wlock = repo.wlock()
784 wlock = repo.wlock()
785 try:
785 try:
786 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
786 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
787 finally:
787 finally:
788 wlock.release()
788 wlock.release()
789
789
790 def debugcheckstate(ui, repo):
790 def debugcheckstate(ui, repo):
791 """validate the correctness of the current dirstate"""
791 """validate the correctness of the current dirstate"""
792 parent1, parent2 = repo.dirstate.parents()
792 parent1, parent2 = repo.dirstate.parents()
793 m1 = repo[parent1].manifest()
793 m1 = repo[parent1].manifest()
794 m2 = repo[parent2].manifest()
794 m2 = repo[parent2].manifest()
795 errors = 0
795 errors = 0
796 for f in repo.dirstate:
796 for f in repo.dirstate:
797 state = repo.dirstate[f]
797 state = repo.dirstate[f]
798 if state in "nr" and f not in m1:
798 if state in "nr" and f not in m1:
799 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
799 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
800 errors += 1
800 errors += 1
801 if state in "a" and f in m1:
801 if state in "a" and f in m1:
802 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
802 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
803 errors += 1
803 errors += 1
804 if state in "m" and f not in m1 and f not in m2:
804 if state in "m" and f not in m1 and f not in m2:
805 ui.warn(_("%s in state %s, but not in either manifest\n") %
805 ui.warn(_("%s in state %s, but not in either manifest\n") %
806 (f, state))
806 (f, state))
807 errors += 1
807 errors += 1
808 for f in m1:
808 for f in m1:
809 state = repo.dirstate[f]
809 state = repo.dirstate[f]
810 if state not in "nrm":
810 if state not in "nrm":
811 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
811 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
812 errors += 1
812 errors += 1
813 if errors:
813 if errors:
814 error = _(".hg/dirstate inconsistent with current parent's manifest")
814 error = _(".hg/dirstate inconsistent with current parent's manifest")
815 raise util.Abort(error)
815 raise util.Abort(error)
816
816
817 def showconfig(ui, repo, *values, **opts):
817 def showconfig(ui, repo, *values, **opts):
818 """show combined config settings from all hgrc files
818 """show combined config settings from all hgrc files
819
819
820 With no arguments, print names and values of all config items.
820 With no arguments, print names and values of all config items.
821
821
822 With one argument of the form section.name, print just the value
822 With one argument of the form section.name, print just the value
823 of that config item.
823 of that config item.
824
824
825 With multiple arguments, print names and values of all config
825 With multiple arguments, print names and values of all config
826 items with matching section names.
826 items with matching section names.
827
827
828 With --debug, the source (filename and line number) is printed
828 With --debug, the source (filename and line number) is printed
829 for each config item.
829 for each config item.
830 """
830 """
831
831
832 untrusted = bool(opts.get('untrusted'))
832 untrusted = bool(opts.get('untrusted'))
833 if values:
833 if values:
834 if len([v for v in values if '.' in v]) > 1:
834 if len([v for v in values if '.' in v]) > 1:
835 raise util.Abort(_('only one config item permitted'))
835 raise util.Abort(_('only one config item permitted'))
836 for section, name, value in ui.walkconfig(untrusted=untrusted):
836 for section, name, value in ui.walkconfig(untrusted=untrusted):
837 sectname = section + '.' + name
837 sectname = section + '.' + name
838 if values:
838 if values:
839 for v in values:
839 for v in values:
840 if v == section:
840 if v == section:
841 ui.debug('%s: ' %
841 ui.debug('%s: ' %
842 ui.configsource(section, name, untrusted))
842 ui.configsource(section, name, untrusted))
843 ui.write('%s=%s\n' % (sectname, value))
843 ui.write('%s=%s\n' % (sectname, value))
844 elif v == sectname:
844 elif v == sectname:
845 ui.debug('%s: ' %
845 ui.debug('%s: ' %
846 ui.configsource(section, name, untrusted))
846 ui.configsource(section, name, untrusted))
847 ui.write(value, '\n')
847 ui.write(value, '\n')
848 else:
848 else:
849 ui.debug('%s: ' %
849 ui.debug('%s: ' %
850 ui.configsource(section, name, untrusted))
850 ui.configsource(section, name, untrusted))
851 ui.write('%s=%s\n' % (sectname, value))
851 ui.write('%s=%s\n' % (sectname, value))
852
852
853 def debugsetparents(ui, repo, rev1, rev2=None):
853 def debugsetparents(ui, repo, rev1, rev2=None):
854 """manually set the parents of the current working directory
854 """manually set the parents of the current working directory
855
855
856 This is useful for writing repository conversion tools, but should
856 This is useful for writing repository conversion tools, but should
857 be used with care.
857 be used with care.
858 """
858 """
859
859
860 if not rev2:
860 if not rev2:
861 rev2 = hex(nullid)
861 rev2 = hex(nullid)
862
862
863 wlock = repo.wlock()
863 wlock = repo.wlock()
864 try:
864 try:
865 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
865 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
866 finally:
866 finally:
867 wlock.release()
867 wlock.release()
868
868
869 def debugstate(ui, repo, nodates=None):
869 def debugstate(ui, repo, nodates=None):
870 """show the contents of the current dirstate"""
870 """show the contents of the current dirstate"""
871 timestr = ""
871 timestr = ""
872 showdate = not nodates
872 showdate = not nodates
873 for file_, ent in sorted(repo.dirstate._map.iteritems()):
873 for file_, ent in sorted(repo.dirstate._map.iteritems()):
874 if showdate:
874 if showdate:
875 if ent[3] == -1:
875 if ent[3] == -1:
876 # Pad or slice to locale representation
876 # Pad or slice to locale representation
877 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
877 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
878 time.localtime(0)))
878 time.localtime(0)))
879 timestr = 'unset'
879 timestr = 'unset'
880 timestr = (timestr[:locale_len] +
880 timestr = (timestr[:locale_len] +
881 ' ' * (locale_len - len(timestr)))
881 ' ' * (locale_len - len(timestr)))
882 else:
882 else:
883 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
883 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
884 time.localtime(ent[3]))
884 time.localtime(ent[3]))
885 if ent[1] & 020000:
885 if ent[1] & 020000:
886 mode = 'lnk'
886 mode = 'lnk'
887 else:
887 else:
888 mode = '%3o' % (ent[1] & 0777)
888 mode = '%3o' % (ent[1] & 0777)
889 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
890 for f in repo.dirstate.copies():
890 for f in repo.dirstate.copies():
891 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
892
892
893 def debugsub(ui, repo, rev=None):
893 def debugsub(ui, repo, rev=None):
894 if rev == '':
894 if rev == '':
895 rev = None
895 rev = None
896 for k, v in sorted(repo[rev].substate.items()):
896 for k, v in sorted(repo[rev].substate.items()):
897 ui.write('path %s\n' % k)
897 ui.write('path %s\n' % k)
898 ui.write(' source %s\n' % v[0])
898 ui.write(' source %s\n' % v[0])
899 ui.write(' revision %s\n' % v[1])
899 ui.write(' revision %s\n' % v[1])
900
900
901 def debugdata(ui, file_, rev):
901 def debugdata(ui, file_, rev):
902 """dump the contents of a data file revision"""
902 """dump the contents of a data file revision"""
903 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
903 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
904 try:
904 try:
905 ui.write(r.revision(r.lookup(rev)))
905 ui.write(r.revision(r.lookup(rev)))
906 except KeyError:
906 except KeyError:
907 raise util.Abort(_('invalid revision identifier %s') % rev)
907 raise util.Abort(_('invalid revision identifier %s') % rev)
908
908
909 def debugdate(ui, date, range=None, **opts):
909 def debugdate(ui, date, range=None, **opts):
910 """parse and display a date"""
910 """parse and display a date"""
911 if opts["extended"]:
911 if opts["extended"]:
912 d = util.parsedate(date, util.extendeddateformats)
912 d = util.parsedate(date, util.extendeddateformats)
913 else:
913 else:
914 d = util.parsedate(date)
914 d = util.parsedate(date)
915 ui.write("internal: %s %s\n" % d)
915 ui.write("internal: %s %s\n" % d)
916 ui.write("standard: %s\n" % util.datestr(d))
916 ui.write("standard: %s\n" % util.datestr(d))
917 if range:
917 if range:
918 m = util.matchdate(range)
918 m = util.matchdate(range)
919 ui.write("match: %s\n" % m(d[0]))
919 ui.write("match: %s\n" % m(d[0]))
920
920
921 def debugindex(ui, file_):
921 def debugindex(ui, file_):
922 """dump the contents of an index file"""
922 """dump the contents of an index file"""
923 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
923 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
924 ui.write(" rev offset length base linkrev"
924 ui.write(" rev offset length base linkrev"
925 " nodeid p1 p2\n")
925 " nodeid p1 p2\n")
926 for i in r:
926 for i in r:
927 node = r.node(i)
927 node = r.node(i)
928 try:
928 try:
929 pp = r.parents(node)
929 pp = r.parents(node)
930 except:
930 except:
931 pp = [nullid, nullid]
931 pp = [nullid, nullid]
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
933 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
933 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
934 short(node), short(pp[0]), short(pp[1])))
934 short(node), short(pp[0]), short(pp[1])))
935
935
936 def debugindexdot(ui, file_):
936 def debugindexdot(ui, file_):
937 """dump an index DAG as a graphviz dot file"""
937 """dump an index DAG as a graphviz dot file"""
938 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
938 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
939 ui.write("digraph G {\n")
939 ui.write("digraph G {\n")
940 for i in r:
940 for i in r:
941 node = r.node(i)
941 node = r.node(i)
942 pp = r.parents(node)
942 pp = r.parents(node)
943 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
943 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
944 if pp[1] != nullid:
944 if pp[1] != nullid:
945 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
945 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
946 ui.write("}\n")
946 ui.write("}\n")
947
947
948 def debuginstall(ui):
948 def debuginstall(ui):
949 '''test Mercurial installation'''
949 '''test Mercurial installation'''
950
950
951 def writetemp(contents):
951 def writetemp(contents):
952 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
952 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
953 f = os.fdopen(fd, "wb")
953 f = os.fdopen(fd, "wb")
954 f.write(contents)
954 f.write(contents)
955 f.close()
955 f.close()
956 return name
956 return name
957
957
958 problems = 0
958 problems = 0
959
959
960 # encoding
960 # encoding
961 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
961 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
962 try:
962 try:
963 encoding.fromlocal("test")
963 encoding.fromlocal("test")
964 except util.Abort, inst:
964 except util.Abort, inst:
965 ui.write(" %s\n" % inst)
965 ui.write(" %s\n" % inst)
966 ui.write(_(" (check that your locale is properly set)\n"))
966 ui.write(_(" (check that your locale is properly set)\n"))
967 problems += 1
967 problems += 1
968
968
969 # compiled modules
969 # compiled modules
970 ui.status(_("Checking extensions...\n"))
970 ui.status(_("Checking extensions...\n"))
971 try:
971 try:
972 import bdiff, mpatch, base85
972 import bdiff, mpatch, base85
973 except Exception, inst:
973 except Exception, inst:
974 ui.write(" %s\n" % inst)
974 ui.write(" %s\n" % inst)
975 ui.write(_(" One or more extensions could not be found"))
975 ui.write(_(" One or more extensions could not be found"))
976 ui.write(_(" (check that you compiled the extensions)\n"))
976 ui.write(_(" (check that you compiled the extensions)\n"))
977 problems += 1
977 problems += 1
978
978
979 # templates
979 # templates
980 ui.status(_("Checking templates...\n"))
980 ui.status(_("Checking templates...\n"))
981 try:
981 try:
982 import templater
982 import templater
983 templater.templater(templater.templatepath("map-cmdline.default"))
983 templater.templater(templater.templatepath("map-cmdline.default"))
984 except Exception, inst:
984 except Exception, inst:
985 ui.write(" %s\n" % inst)
985 ui.write(" %s\n" % inst)
986 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
986 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
987 problems += 1
987 problems += 1
988
988
989 # patch
989 # patch
990 ui.status(_("Checking patch...\n"))
990 ui.status(_("Checking patch...\n"))
991 patchproblems = 0
991 patchproblems = 0
992 a = "1\n2\n3\n4\n"
992 a = "1\n2\n3\n4\n"
993 b = "1\n2\n3\ninsert\n4\n"
993 b = "1\n2\n3\ninsert\n4\n"
994 fa = writetemp(a)
994 fa = writetemp(a)
995 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
995 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
996 os.path.basename(fa))
996 os.path.basename(fa))
997 fd = writetemp(d)
997 fd = writetemp(d)
998
998
999 files = {}
999 files = {}
1000 try:
1000 try:
1001 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1001 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1002 except util.Abort, e:
1002 except util.Abort, e:
1003 ui.write(_(" patch call failed:\n"))
1003 ui.write(_(" patch call failed:\n"))
1004 ui.write(" " + str(e) + "\n")
1004 ui.write(" " + str(e) + "\n")
1005 patchproblems += 1
1005 patchproblems += 1
1006 else:
1006 else:
1007 if list(files) != [os.path.basename(fa)]:
1007 if list(files) != [os.path.basename(fa)]:
1008 ui.write(_(" unexpected patch output!\n"))
1008 ui.write(_(" unexpected patch output!\n"))
1009 patchproblems += 1
1009 patchproblems += 1
1010 a = open(fa).read()
1010 a = open(fa).read()
1011 if a != b:
1011 if a != b:
1012 ui.write(_(" patch test failed!\n"))
1012 ui.write(_(" patch test failed!\n"))
1013 patchproblems += 1
1013 patchproblems += 1
1014
1014
1015 if patchproblems:
1015 if patchproblems:
1016 if ui.config('ui', 'patch'):
1016 if ui.config('ui', 'patch'):
1017 ui.write(_(" (Current patch tool may be incompatible with patch,"
1017 ui.write(_(" (Current patch tool may be incompatible with patch,"
1018 " or misconfigured. Please check your .hgrc file)\n"))
1018 " or misconfigured. Please check your .hgrc file)\n"))
1019 else:
1019 else:
1020 ui.write(_(" Internal patcher failure, please report this error"
1020 ui.write(_(" Internal patcher failure, please report this error"
1021 " to http://mercurial.selenic.com/bts/\n"))
1021 " to http://mercurial.selenic.com/bts/\n"))
1022 problems += patchproblems
1022 problems += patchproblems
1023
1023
1024 os.unlink(fa)
1024 os.unlink(fa)
1025 os.unlink(fd)
1025 os.unlink(fd)
1026
1026
1027 # editor
1027 # editor
1028 ui.status(_("Checking commit editor...\n"))
1028 ui.status(_("Checking commit editor...\n"))
1029 editor = ui.geteditor()
1029 editor = ui.geteditor()
1030 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1030 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1031 if not cmdpath:
1031 if not cmdpath:
1032 if editor == 'vi':
1032 if editor == 'vi':
1033 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1033 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1034 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1034 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1035 else:
1035 else:
1036 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1036 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1037 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1037 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1038 problems += 1
1038 problems += 1
1039
1039
1040 # check username
1040 # check username
1041 ui.status(_("Checking username...\n"))
1041 ui.status(_("Checking username...\n"))
1042 try:
1042 try:
1043 user = ui.username()
1043 user = ui.username()
1044 except util.Abort, e:
1044 except util.Abort, e:
1045 ui.write(" %s\n" % e)
1045 ui.write(" %s\n" % e)
1046 ui.write(_(" (specify a username in your .hgrc file)\n"))
1046 ui.write(_(" (specify a username in your .hgrc file)\n"))
1047 problems += 1
1047 problems += 1
1048
1048
1049 if not problems:
1049 if not problems:
1050 ui.status(_("No problems detected\n"))
1050 ui.status(_("No problems detected\n"))
1051 else:
1051 else:
1052 ui.write(_("%s problems detected,"
1052 ui.write(_("%s problems detected,"
1053 " please check your install!\n") % problems)
1053 " please check your install!\n") % problems)
1054
1054
1055 return problems
1055 return problems
1056
1056
1057 def debugrename(ui, repo, file1, *pats, **opts):
1057 def debugrename(ui, repo, file1, *pats, **opts):
1058 """dump rename information"""
1058 """dump rename information"""
1059
1059
1060 ctx = repo[opts.get('rev')]
1060 ctx = repo[opts.get('rev')]
1061 m = cmdutil.match(repo, (file1,) + pats, opts)
1061 m = cmdutil.match(repo, (file1,) + pats, opts)
1062 for abs in ctx.walk(m):
1062 for abs in ctx.walk(m):
1063 fctx = ctx[abs]
1063 fctx = ctx[abs]
1064 o = fctx.filelog().renamed(fctx.filenode())
1064 o = fctx.filelog().renamed(fctx.filenode())
1065 rel = m.rel(abs)
1065 rel = m.rel(abs)
1066 if o:
1066 if o:
1067 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1067 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1068 else:
1068 else:
1069 ui.write(_("%s not renamed\n") % rel)
1069 ui.write(_("%s not renamed\n") % rel)
1070
1070
1071 def debugwalk(ui, repo, *pats, **opts):
1071 def debugwalk(ui, repo, *pats, **opts):
1072 """show how files match on given patterns"""
1072 """show how files match on given patterns"""
1073 m = cmdutil.match(repo, pats, opts)
1073 m = cmdutil.match(repo, pats, opts)
1074 items = list(repo.walk(m))
1074 items = list(repo.walk(m))
1075 if not items:
1075 if not items:
1076 return
1076 return
1077 fmt = 'f %%-%ds %%-%ds %%s' % (
1077 fmt = 'f %%-%ds %%-%ds %%s' % (
1078 max([len(abs) for abs in items]),
1078 max([len(abs) for abs in items]),
1079 max([len(m.rel(abs)) for abs in items]))
1079 max([len(m.rel(abs)) for abs in items]))
1080 for abs in items:
1080 for abs in items:
1081 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1081 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1082 ui.write("%s\n" % line.rstrip())
1082 ui.write("%s\n" % line.rstrip())
1083
1083
1084 def diff(ui, repo, *pats, **opts):
1084 def diff(ui, repo, *pats, **opts):
1085 """diff repository (or selected files)
1085 """diff repository (or selected files)
1086
1086
1087 Show differences between revisions for the specified files.
1087 Show differences between revisions for the specified files.
1088
1088
1089 Differences between files are shown using the unified diff format.
1089 Differences between files are shown using the unified diff format.
1090
1090
1091 NOTE: diff may generate unexpected results for merges, as it will
1091 NOTE: diff may generate unexpected results for merges, as it will
1092 default to comparing against the working directory's first parent
1092 default to comparing against the working directory's first parent
1093 changeset if no revisions are specified.
1093 changeset if no revisions are specified.
1094
1094
1095 When two revision arguments are given, then changes are shown
1095 When two revision arguments are given, then changes are shown
1096 between those revisions. If only one revision is specified then
1096 between those revisions. If only one revision is specified then
1097 that revision is compared to the working directory, and, when no
1097 that revision is compared to the working directory, and, when no
1098 revisions are specified, the working directory files are compared
1098 revisions are specified, the working directory files are compared
1099 to its parent.
1099 to its parent.
1100
1100
1101 Without the -a/--text option, diff will avoid generating diffs of
1101 Without the -a/--text option, diff will avoid generating diffs of
1102 files it detects as binary. With -a, diff will generate a diff
1102 files it detects as binary. With -a, diff will generate a diff
1103 anyway, probably with undesirable results.
1103 anyway, probably with undesirable results.
1104
1104
1105 Use the -g/--git option to generate diffs in the git extended diff
1105 Use the -g/--git option to generate diffs in the git extended diff
1106 format. For more information, read 'hg help diffs'.
1106 format. For more information, read 'hg help diffs'.
1107 """
1107 """
1108
1108
1109 revs = opts.get('rev')
1109 revs = opts.get('rev')
1110 change = opts.get('change')
1110 change = opts.get('change')
1111 stat = opts.get('stat')
1111 stat = opts.get('stat')
1112 reverse = opts.get('reverse')
1112 reverse = opts.get('reverse')
1113
1113
1114 if revs and change:
1114 if revs and change:
1115 msg = _('cannot specify --rev and --change at the same time')
1115 msg = _('cannot specify --rev and --change at the same time')
1116 raise util.Abort(msg)
1116 raise util.Abort(msg)
1117 elif change:
1117 elif change:
1118 node2 = repo.lookup(change)
1118 node2 = repo.lookup(change)
1119 node1 = repo[node2].parents()[0].node()
1119 node1 = repo[node2].parents()[0].node()
1120 else:
1120 else:
1121 node1, node2 = cmdutil.revpair(repo, revs)
1121 node1, node2 = cmdutil.revpair(repo, revs)
1122
1122
1123 if reverse:
1123 if reverse:
1124 node1, node2 = node2, node1
1124 node1, node2 = node2, node1
1125
1125
1126 if stat:
1126 if stat:
1127 opts['unified'] = '0'
1127 opts['unified'] = '0'
1128 diffopts = patch.diffopts(ui, opts)
1128 diffopts = patch.diffopts(ui, opts)
1129
1129
1130 m = cmdutil.match(repo, pats, opts)
1130 m = cmdutil.match(repo, pats, opts)
1131 it = patch.diff(repo, node1, node2, match=m, opts=diffopts)
1131 it = patch.diff(repo, node1, node2, match=m, opts=diffopts)
1132 if stat:
1132 if stat:
1133 width = ui.interactive() and util.termwidth() or 80
1133 width = ui.interactive() and util.termwidth() or 80
1134 ui.write(patch.diffstat(util.iterlines(it), width=width,
1134 ui.write(patch.diffstat(util.iterlines(it), width=width,
1135 git=diffopts.git))
1135 git=diffopts.git))
1136 else:
1136 else:
1137 for chunk in it:
1137 for chunk in it:
1138 ui.write(chunk)
1138 ui.write(chunk)
1139
1139
1140 def export(ui, repo, *changesets, **opts):
1140 def export(ui, repo, *changesets, **opts):
1141 """dump the header and diffs for one or more changesets
1141 """dump the header and diffs for one or more changesets
1142
1142
1143 Print the changeset header and diffs for one or more revisions.
1143 Print the changeset header and diffs for one or more revisions.
1144
1144
1145 The information shown in the changeset header is: author, date,
1145 The information shown in the changeset header is: author, date,
1146 branch name (if non-default), changeset hash, parent(s) and commit
1146 branch name (if non-default), changeset hash, parent(s) and commit
1147 comment.
1147 comment.
1148
1148
1149 NOTE: export may generate unexpected diff output for merge
1149 NOTE: export may generate unexpected diff output for merge
1150 changesets, as it will compare the merge changeset against its
1150 changesets, as it will compare the merge changeset against its
1151 first parent only.
1151 first parent only.
1152
1152
1153 Output may be to a file, in which case the name of the file is
1153 Output may be to a file, in which case the name of the file is
1154 given using a format string. The formatting rules are as follows:
1154 given using a format string. The formatting rules are as follows:
1155
1155
1156 :``%%``: literal "%" character
1156 :``%%``: literal "%" character
1157 :``%H``: changeset hash (40 bytes of hexadecimal)
1157 :``%H``: changeset hash (40 bytes of hexadecimal)
1158 :``%N``: number of patches being generated
1158 :``%N``: number of patches being generated
1159 :``%R``: changeset revision number
1159 :``%R``: changeset revision number
1160 :``%b``: basename of the exporting repository
1160 :``%b``: basename of the exporting repository
1161 :``%h``: short-form changeset hash (12 bytes of hexadecimal)
1161 :``%h``: short-form changeset hash (12 bytes of hexadecimal)
1162 :``%n``: zero-padded sequence number, starting at 1
1162 :``%n``: zero-padded sequence number, starting at 1
1163 :``%r``: zero-padded changeset revision number
1163 :``%r``: zero-padded changeset revision number
1164
1164
1165 Without the -a/--text option, export will avoid generating diffs
1165 Without the -a/--text option, export will avoid generating diffs
1166 of files it detects as binary. With -a, export will generate a
1166 of files it detects as binary. With -a, export will generate a
1167 diff anyway, probably with undesirable results.
1167 diff anyway, probably with undesirable results.
1168
1168
1169 Use the -g/--git option to generate diffs in the git extended diff
1169 Use the -g/--git option to generate diffs in the git extended diff
1170 format. See 'hg help diffs' for more information.
1170 format. See 'hg help diffs' for more information.
1171
1171
1172 With the --switch-parent option, the diff will be against the
1172 With the --switch-parent option, the diff will be against the
1173 second parent. It can be useful to review a merge.
1173 second parent. It can be useful to review a merge.
1174 """
1174 """
1175 changesets += tuple(opts.get('rev', []))
1175 changesets += tuple(opts.get('rev', []))
1176 if not changesets:
1176 if not changesets:
1177 raise util.Abort(_("export requires at least one changeset"))
1177 raise util.Abort(_("export requires at least one changeset"))
1178 revs = cmdutil.revrange(repo, changesets)
1178 revs = cmdutil.revrange(repo, changesets)
1179 if len(revs) > 1:
1179 if len(revs) > 1:
1180 ui.note(_('exporting patches:\n'))
1180 ui.note(_('exporting patches:\n'))
1181 else:
1181 else:
1182 ui.note(_('exporting patch:\n'))
1182 ui.note(_('exporting patch:\n'))
1183 patch.export(repo, revs, template=opts.get('output'),
1183 patch.export(repo, revs, template=opts.get('output'),
1184 switch_parent=opts.get('switch_parent'),
1184 switch_parent=opts.get('switch_parent'),
1185 opts=patch.diffopts(ui, opts))
1185 opts=patch.diffopts(ui, opts))
1186
1186
1187 def forget(ui, repo, *pats, **opts):
1187 def forget(ui, repo, *pats, **opts):
1188 """forget the specified files on the next commit
1188 """forget the specified files on the next commit
1189
1189
1190 Mark the specified files so they will no longer be tracked
1190 Mark the specified files so they will no longer be tracked
1191 after the next commit.
1191 after the next commit.
1192
1192
1193 This only removes files from the current branch, not from the
1193 This only removes files from the current branch, not from the
1194 entire project history, and it does not delete them from the
1194 entire project history, and it does not delete them from the
1195 working directory.
1195 working directory.
1196
1196
1197 To undo a forget before the next commit, see hg add.
1197 To undo a forget before the next commit, see hg add.
1198 """
1198 """
1199
1199
1200 if not pats:
1200 if not pats:
1201 raise util.Abort(_('no files specified'))
1201 raise util.Abort(_('no files specified'))
1202
1202
1203 m = cmdutil.match(repo, pats, opts)
1203 m = cmdutil.match(repo, pats, opts)
1204 s = repo.status(match=m, clean=True)
1204 s = repo.status(match=m, clean=True)
1205 forget = sorted(s[0] + s[1] + s[3] + s[6])
1205 forget = sorted(s[0] + s[1] + s[3] + s[6])
1206
1206
1207 for f in m.files():
1207 for f in m.files():
1208 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1208 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1209 ui.warn(_('not removing %s: file is already untracked\n')
1209 ui.warn(_('not removing %s: file is already untracked\n')
1210 % m.rel(f))
1210 % m.rel(f))
1211
1211
1212 for f in forget:
1212 for f in forget:
1213 if ui.verbose or not m.exact(f):
1213 if ui.verbose or not m.exact(f):
1214 ui.status(_('removing %s\n') % m.rel(f))
1214 ui.status(_('removing %s\n') % m.rel(f))
1215
1215
1216 repo.remove(forget, unlink=False)
1216 repo.remove(forget, unlink=False)
1217
1217
1218 def grep(ui, repo, pattern, *pats, **opts):
1218 def grep(ui, repo, pattern, *pats, **opts):
1219 """search for a pattern in specified files and revisions
1219 """search for a pattern in specified files and revisions
1220
1220
1221 Search revisions of files for a regular expression.
1221 Search revisions of files for a regular expression.
1222
1222
1223 This command behaves differently than Unix grep. It only accepts
1223 This command behaves differently than Unix grep. It only accepts
1224 Python/Perl regexps. It searches repository history, not the
1224 Python/Perl regexps. It searches repository history, not the
1225 working directory. It always prints the revision number in which a
1225 working directory. It always prints the revision number in which a
1226 match appears.
1226 match appears.
1227
1227
1228 By default, grep only prints output for the first revision of a
1228 By default, grep only prints output for the first revision of a
1229 file in which it finds a match. To get it to print every revision
1229 file in which it finds a match. To get it to print every revision
1230 that contains a change in match status ("-" for a match that
1230 that contains a change in match status ("-" for a match that
1231 becomes a non-match, or "+" for a non-match that becomes a match),
1231 becomes a non-match, or "+" for a non-match that becomes a match),
1232 use the --all flag.
1232 use the --all flag.
1233 """
1233 """
1234 reflags = 0
1234 reflags = 0
1235 if opts.get('ignore_case'):
1235 if opts.get('ignore_case'):
1236 reflags |= re.I
1236 reflags |= re.I
1237 try:
1237 try:
1238 regexp = re.compile(pattern, reflags)
1238 regexp = re.compile(pattern, reflags)
1239 except Exception, inst:
1239 except Exception, inst:
1240 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1240 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1241 return None
1241 return None
1242 sep, eol = ':', '\n'
1242 sep, eol = ':', '\n'
1243 if opts.get('print0'):
1243 if opts.get('print0'):
1244 sep = eol = '\0'
1244 sep = eol = '\0'
1245
1245
1246 getfile = util.lrucachefunc(repo.file)
1246 getfile = util.lrucachefunc(repo.file)
1247
1247
1248 def matchlines(body):
1248 def matchlines(body):
1249 begin = 0
1249 begin = 0
1250 linenum = 0
1250 linenum = 0
1251 while True:
1251 while True:
1252 match = regexp.search(body, begin)
1252 match = regexp.search(body, begin)
1253 if not match:
1253 if not match:
1254 break
1254 break
1255 mstart, mend = match.span()
1255 mstart, mend = match.span()
1256 linenum += body.count('\n', begin, mstart) + 1
1256 linenum += body.count('\n', begin, mstart) + 1
1257 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1257 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1258 begin = body.find('\n', mend) + 1 or len(body)
1258 begin = body.find('\n', mend) + 1 or len(body)
1259 lend = begin - 1
1259 lend = begin - 1
1260 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1260 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1261
1261
1262 class linestate(object):
1262 class linestate(object):
1263 def __init__(self, line, linenum, colstart, colend):
1263 def __init__(self, line, linenum, colstart, colend):
1264 self.line = line
1264 self.line = line
1265 self.linenum = linenum
1265 self.linenum = linenum
1266 self.colstart = colstart
1266 self.colstart = colstart
1267 self.colend = colend
1267 self.colend = colend
1268
1268
1269 def __hash__(self):
1269 def __hash__(self):
1270 return hash((self.linenum, self.line))
1270 return hash((self.linenum, self.line))
1271
1271
1272 def __eq__(self, other):
1272 def __eq__(self, other):
1273 return self.line == other.line
1273 return self.line == other.line
1274
1274
1275 matches = {}
1275 matches = {}
1276 copies = {}
1276 copies = {}
1277 def grepbody(fn, rev, body):
1277 def grepbody(fn, rev, body):
1278 matches[rev].setdefault(fn, [])
1278 matches[rev].setdefault(fn, [])
1279 m = matches[rev][fn]
1279 m = matches[rev][fn]
1280 for lnum, cstart, cend, line in matchlines(body):
1280 for lnum, cstart, cend, line in matchlines(body):
1281 s = linestate(line, lnum, cstart, cend)
1281 s = linestate(line, lnum, cstart, cend)
1282 m.append(s)
1282 m.append(s)
1283
1283
1284 def difflinestates(a, b):
1284 def difflinestates(a, b):
1285 sm = difflib.SequenceMatcher(None, a, b)
1285 sm = difflib.SequenceMatcher(None, a, b)
1286 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1286 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1287 if tag == 'insert':
1287 if tag == 'insert':
1288 for i in xrange(blo, bhi):
1288 for i in xrange(blo, bhi):
1289 yield ('+', b[i])
1289 yield ('+', b[i])
1290 elif tag == 'delete':
1290 elif tag == 'delete':
1291 for i in xrange(alo, ahi):
1291 for i in xrange(alo, ahi):
1292 yield ('-', a[i])
1292 yield ('-', a[i])
1293 elif tag == 'replace':
1293 elif tag == 'replace':
1294 for i in xrange(alo, ahi):
1294 for i in xrange(alo, ahi):
1295 yield ('-', a[i])
1295 yield ('-', a[i])
1296 for i in xrange(blo, bhi):
1296 for i in xrange(blo, bhi):
1297 yield ('+', b[i])
1297 yield ('+', b[i])
1298
1298
1299 def display(fn, ctx, pstates, states):
1299 def display(fn, ctx, pstates, states):
1300 rev = ctx.rev()
1300 rev = ctx.rev()
1301 datefunc = ui.quiet and util.shortdate or util.datestr
1301 datefunc = ui.quiet and util.shortdate or util.datestr
1302 found = False
1302 found = False
1303 filerevmatches = {}
1303 filerevmatches = {}
1304 if opts.get('all'):
1304 if opts.get('all'):
1305 iter = difflinestates(pstates, states)
1305 iter = difflinestates(pstates, states)
1306 else:
1306 else:
1307 iter = [('', l) for l in states]
1307 iter = [('', l) for l in states]
1308 for change, l in iter:
1308 for change, l in iter:
1309 cols = [fn, str(rev)]
1309 cols = [fn, str(rev)]
1310 if opts.get('line_number'):
1310 if opts.get('line_number'):
1311 cols.append(str(l.linenum))
1311 cols.append(str(l.linenum))
1312 if opts.get('all'):
1312 if opts.get('all'):
1313 cols.append(change)
1313 cols.append(change)
1314 if opts.get('user'):
1314 if opts.get('user'):
1315 cols.append(ui.shortuser(ctx.user()))
1315 cols.append(ui.shortuser(ctx.user()))
1316 if opts.get('date'):
1316 if opts.get('date'):
1317 cols.append(datefunc(ctx.date()))
1317 cols.append(datefunc(ctx.date()))
1318 if opts.get('files_with_matches'):
1318 if opts.get('files_with_matches'):
1319 c = (fn, rev)
1319 c = (fn, rev)
1320 if c in filerevmatches:
1320 if c in filerevmatches:
1321 continue
1321 continue
1322 filerevmatches[c] = 1
1322 filerevmatches[c] = 1
1323 else:
1323 else:
1324 cols.append(l.line)
1324 cols.append(l.line)
1325 ui.write(sep.join(cols), eol)
1325 ui.write(sep.join(cols), eol)
1326 found = True
1326 found = True
1327 return found
1327 return found
1328
1328
1329 skip = {}
1329 skip = {}
1330 revfiles = {}
1330 revfiles = {}
1331 matchfn = cmdutil.match(repo, pats, opts)
1331 matchfn = cmdutil.match(repo, pats, opts)
1332 found = False
1332 found = False
1333 follow = opts.get('follow')
1333 follow = opts.get('follow')
1334
1334
1335 def prep(ctx, fns):
1335 def prep(ctx, fns):
1336 rev = ctx.rev()
1336 rev = ctx.rev()
1337 pctx = ctx.parents()[0]
1337 pctx = ctx.parents()[0]
1338 parent = pctx.rev()
1338 parent = pctx.rev()
1339 matches.setdefault(rev, {})
1339 matches.setdefault(rev, {})
1340 matches.setdefault(parent, {})
1340 matches.setdefault(parent, {})
1341 files = revfiles.setdefault(rev, [])
1341 files = revfiles.setdefault(rev, [])
1342 for fn in fns:
1342 for fn in fns:
1343 flog = getfile(fn)
1343 flog = getfile(fn)
1344 try:
1344 try:
1345 fnode = ctx.filenode(fn)
1345 fnode = ctx.filenode(fn)
1346 except error.LookupError:
1346 except error.LookupError:
1347 continue
1347 continue
1348
1348
1349 copied = flog.renamed(fnode)
1349 copied = flog.renamed(fnode)
1350 copy = follow and copied and copied[0]
1350 copy = follow and copied and copied[0]
1351 if copy:
1351 if copy:
1352 copies.setdefault(rev, {})[fn] = copy
1352 copies.setdefault(rev, {})[fn] = copy
1353 if fn in skip:
1353 if fn in skip:
1354 if copy:
1354 if copy:
1355 skip[copy] = True
1355 skip[copy] = True
1356 continue
1356 continue
1357 files.append(fn)
1357 files.append(fn)
1358
1358
1359 if fn not in matches[rev]:
1359 if fn not in matches[rev]:
1360 grepbody(fn, rev, flog.read(fnode))
1360 grepbody(fn, rev, flog.read(fnode))
1361
1361
1362 pfn = copy or fn
1362 pfn = copy or fn
1363 if pfn not in matches[parent]:
1363 if pfn not in matches[parent]:
1364 try:
1364 try:
1365 fnode = pctx.filenode(pfn)
1365 fnode = pctx.filenode(pfn)
1366 grepbody(pfn, parent, flog.read(fnode))
1366 grepbody(pfn, parent, flog.read(fnode))
1367 except error.LookupError:
1367 except error.LookupError:
1368 pass
1368 pass
1369
1369
1370 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1370 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1371 rev = ctx.rev()
1371 rev = ctx.rev()
1372 parent = ctx.parents()[0].rev()
1372 parent = ctx.parents()[0].rev()
1373 for fn in sorted(revfiles.get(rev, [])):
1373 for fn in sorted(revfiles.get(rev, [])):
1374 states = matches[rev][fn]
1374 states = matches[rev][fn]
1375 copy = copies.get(rev, {}).get(fn)
1375 copy = copies.get(rev, {}).get(fn)
1376 if fn in skip:
1376 if fn in skip:
1377 if copy:
1377 if copy:
1378 skip[copy] = True
1378 skip[copy] = True
1379 continue
1379 continue
1380 pstates = matches.get(parent, {}).get(copy or fn, [])
1380 pstates = matches.get(parent, {}).get(copy or fn, [])
1381 if pstates or states:
1381 if pstates or states:
1382 r = display(fn, ctx, pstates, states)
1382 r = display(fn, ctx, pstates, states)
1383 found = found or r
1383 found = found or r
1384 if r and not opts.get('all'):
1384 if r and not opts.get('all'):
1385 skip[fn] = True
1385 skip[fn] = True
1386 if copy:
1386 if copy:
1387 skip[copy] = True
1387 skip[copy] = True
1388 del matches[rev]
1388 del matches[rev]
1389 del revfiles[rev]
1389 del revfiles[rev]
1390
1390
1391 def heads(ui, repo, *branchrevs, **opts):
1391 def heads(ui, repo, *branchrevs, **opts):
1392 """show current repository heads or show branch heads
1392 """show current repository heads or show branch heads
1393
1393
1394 With no arguments, show all repository branch heads.
1394 With no arguments, show all repository branch heads.
1395
1395
1396 Repository "heads" are changesets with no child changesets. They are
1396 Repository "heads" are changesets with no child changesets. They are
1397 where development generally takes place and are the usual targets
1397 where development generally takes place and are the usual targets
1398 for update and merge operations. Branch heads are changesets that have
1398 for update and merge operations. Branch heads are changesets that have
1399 no child changeset on the same branch.
1399 no child changeset on the same branch.
1400
1400
1401 If one or more REVs are given, only branch heads on the branches
1401 If one or more REVs are given, only branch heads on the branches
1402 associated with the specified changesets are shown.
1402 associated with the specified changesets are shown.
1403
1403
1404 If -c/--closed is specified, also show branch heads marked closed
1404 If -c/--closed is specified, also show branch heads marked closed
1405 (see hg commit --close-branch).
1405 (see hg commit --close-branch).
1406
1406
1407 If STARTREV is specified, only those heads that are descendants of
1407 If STARTREV is specified, only those heads that are descendants of
1408 STARTREV will be displayed.
1408 STARTREV will be displayed.
1409
1409
1410 If -t/--topo is specified, named branch mechanics will be ignored and only
1410 If -t/--topo is specified, named branch mechanics will be ignored and only
1411 changesets without children will be shown.
1411 changesets without children will be shown.
1412 """
1412 """
1413
1413
1414 if opts.get('rev'):
1414 if opts.get('rev'):
1415 start = repo.lookup(opts['rev'])
1415 start = repo.lookup(opts['rev'])
1416 else:
1416 else:
1417 start = None
1417 start = None
1418
1418
1419 if opts.get('topo'):
1419 if opts.get('topo'):
1420 heads = [repo[h] for h in repo.heads(start)]
1420 heads = [repo[h] for h in repo.heads(start)]
1421 else:
1421 else:
1422 heads = []
1422 heads = []
1423 for b, ls in repo.branchmap().iteritems():
1423 for b, ls in repo.branchmap().iteritems():
1424 if start is None:
1424 if start is None:
1425 heads += [repo[h] for h in ls]
1425 heads += [repo[h] for h in ls]
1426 continue
1426 continue
1427 startrev = repo.changelog.rev(start)
1427 startrev = repo.changelog.rev(start)
1428 descendants = set(repo.changelog.descendants(startrev))
1428 descendants = set(repo.changelog.descendants(startrev))
1429 descendants.add(startrev)
1429 descendants.add(startrev)
1430 rev = repo.changelog.rev
1430 rev = repo.changelog.rev
1431 heads += [repo[h] for h in ls if rev(h) in descendants]
1431 heads += [repo[h] for h in ls if rev(h) in descendants]
1432
1432
1433 if branchrevs:
1433 if branchrevs:
1434 decode, encode = encoding.fromlocal, encoding.tolocal
1434 decode, encode = encoding.fromlocal, encoding.tolocal
1435 branches = set(repo[decode(br)].branch() for br in branchrevs)
1435 branches = set(repo[decode(br)].branch() for br in branchrevs)
1436 heads = [h for h in heads if h.branch() in branches]
1436 heads = [h for h in heads if h.branch() in branches]
1437
1437
1438 if not opts.get('closed'):
1438 if not opts.get('closed'):
1439 heads = [h for h in heads if not h.extra().get('close')]
1439 heads = [h for h in heads if not h.extra().get('close')]
1440
1440
1441 if opts.get('active') and branchrevs:
1441 if opts.get('active') and branchrevs:
1442 dagheads = repo.heads(start)
1442 dagheads = repo.heads(start)
1443 heads = [h for h in heads if h.node() in dagheads]
1443 heads = [h for h in heads if h.node() in dagheads]
1444
1444
1445 if branchrevs:
1445 if branchrevs:
1446 haveheads = set(h.branch() for h in heads)
1446 haveheads = set(h.branch() for h in heads)
1447 if branches - haveheads:
1447 if branches - haveheads:
1448 headless = ', '.join(encode(b) for b in branches - haveheads)
1448 headless = ', '.join(encode(b) for b in branches - haveheads)
1449 msg = _('no open branch heads found on branches %s')
1449 msg = _('no open branch heads found on branches %s')
1450 if opts.get('rev'):
1450 if opts.get('rev'):
1451 msg += _(' (started at %s)' % opts['rev'])
1451 msg += _(' (started at %s)' % opts['rev'])
1452 ui.warn((msg + '\n') % headless)
1452 ui.warn((msg + '\n') % headless)
1453
1453
1454 if not heads:
1454 if not heads:
1455 return 1
1455 return 1
1456
1456
1457 heads = sorted(heads, key=lambda x: -x.rev())
1457 heads = sorted(heads, key=lambda x: -x.rev())
1458 displayer = cmdutil.show_changeset(ui, repo, opts)
1458 displayer = cmdutil.show_changeset(ui, repo, opts)
1459 for ctx in heads:
1459 for ctx in heads:
1460 displayer.show(ctx)
1460 displayer.show(ctx)
1461 displayer.close()
1461 displayer.close()
1462
1462
1463 def help_(ui, name=None, with_version=False, unknowncmd=False):
1463 def help_(ui, name=None, with_version=False, unknowncmd=False):
1464 """show help for a given topic or a help overview
1464 """show help for a given topic or a help overview
1465
1465
1466 With no arguments, print a list of commands with short help messages.
1466 With no arguments, print a list of commands with short help messages.
1467
1467
1468 Given a topic, extension, or command name, print help for that
1468 Given a topic, extension, or command name, print help for that
1469 topic."""
1469 topic."""
1470 option_lists = []
1470 option_lists = []
1471 textwidth = util.termwidth() - 2
1471 textwidth = util.termwidth() - 2
1472
1472
1473 def addglobalopts(aliases):
1473 def addglobalopts(aliases):
1474 if ui.verbose:
1474 if ui.verbose:
1475 option_lists.append((_("global options:"), globalopts))
1475 option_lists.append((_("global options:"), globalopts))
1476 if name == 'shortlist':
1476 if name == 'shortlist':
1477 option_lists.append((_('use "hg help" for the full list '
1477 option_lists.append((_('use "hg help" for the full list '
1478 'of commands'), ()))
1478 'of commands'), ()))
1479 else:
1479 else:
1480 if name == 'shortlist':
1480 if name == 'shortlist':
1481 msg = _('use "hg help" for the full list of commands '
1481 msg = _('use "hg help" for the full list of commands '
1482 'or "hg -v" for details')
1482 'or "hg -v" for details')
1483 elif aliases:
1483 elif aliases:
1484 msg = _('use "hg -v help%s" to show aliases and '
1484 msg = _('use "hg -v help%s" to show aliases and '
1485 'global options') % (name and " " + name or "")
1485 'global options') % (name and " " + name or "")
1486 else:
1486 else:
1487 msg = _('use "hg -v help %s" to show global options') % name
1487 msg = _('use "hg -v help %s" to show global options') % name
1488 option_lists.append((msg, ()))
1488 option_lists.append((msg, ()))
1489
1489
1490 def helpcmd(name):
1490 def helpcmd(name):
1491 if with_version:
1491 if with_version:
1492 version_(ui)
1492 version_(ui)
1493 ui.write('\n')
1493 ui.write('\n')
1494
1494
1495 try:
1495 try:
1496 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
1496 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
1497 except error.AmbiguousCommand, inst:
1497 except error.AmbiguousCommand, inst:
1498 # py3k fix: except vars can't be used outside the scope of the
1498 # py3k fix: except vars can't be used outside the scope of the
1499 # except block, nor can be used inside a lambda. python issue4617
1499 # except block, nor can be used inside a lambda. python issue4617
1500 prefix = inst.args[0]
1500 prefix = inst.args[0]
1501 select = lambda c: c.lstrip('^').startswith(prefix)
1501 select = lambda c: c.lstrip('^').startswith(prefix)
1502 helplist(_('list of commands:\n\n'), select)
1502 helplist(_('list of commands:\n\n'), select)
1503 return
1503 return
1504
1504
1505 # check if it's an invalid alias and display its error if it is
1505 # check if it's an invalid alias and display its error if it is
1506 if getattr(entry[0], 'badalias', False):
1506 if getattr(entry[0], 'badalias', False):
1507 if not unknowncmd:
1507 if not unknowncmd:
1508 entry[0](ui)
1508 entry[0](ui)
1509 return
1509 return
1510
1510
1511 # synopsis
1511 # synopsis
1512 if len(entry) > 2:
1512 if len(entry) > 2:
1513 if entry[2].startswith('hg'):
1513 if entry[2].startswith('hg'):
1514 ui.write("%s\n" % entry[2])
1514 ui.write("%s\n" % entry[2])
1515 else:
1515 else:
1516 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
1516 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
1517 else:
1517 else:
1518 ui.write('hg %s\n' % aliases[0])
1518 ui.write('hg %s\n' % aliases[0])
1519
1519
1520 # aliases
1520 # aliases
1521 if not ui.quiet and len(aliases) > 1:
1521 if not ui.quiet and len(aliases) > 1:
1522 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1522 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1523
1523
1524 # description
1524 # description
1525 doc = gettext(entry[0].__doc__)
1525 doc = gettext(entry[0].__doc__)
1526 if not doc:
1526 if not doc:
1527 doc = _("(no help text available)")
1527 doc = _("(no help text available)")
1528 if ui.quiet:
1528 if ui.quiet:
1529 doc = doc.splitlines()[0]
1529 doc = doc.splitlines()[0]
1530 ui.write("\n%s\n" % minirst.format(doc, textwidth))
1530 ui.write("\n%s\n" % minirst.format(doc, textwidth))
1531
1531
1532 if not ui.quiet:
1532 if not ui.quiet:
1533 # options
1533 # options
1534 if entry[1]:
1534 if entry[1]:
1535 option_lists.append((_("options:\n"), entry[1]))
1535 option_lists.append((_("options:\n"), entry[1]))
1536
1536
1537 addglobalopts(False)
1537 addglobalopts(False)
1538
1538
1539 def helplist(header, select=None):
1539 def helplist(header, select=None):
1540 h = {}
1540 h = {}
1541 cmds = {}
1541 cmds = {}
1542 for c, e in table.iteritems():
1542 for c, e in table.iteritems():
1543 f = c.split("|", 1)[0]
1543 f = c.split("|", 1)[0]
1544 if select and not select(f):
1544 if select and not select(f):
1545 continue
1545 continue
1546 if (not select and name != 'shortlist' and
1546 if (not select and name != 'shortlist' and
1547 e[0].__module__ != __name__):
1547 e[0].__module__ != __name__):
1548 continue
1548 continue
1549 if name == "shortlist" and not f.startswith("^"):
1549 if name == "shortlist" and not f.startswith("^"):
1550 continue
1550 continue
1551 f = f.lstrip("^")
1551 f = f.lstrip("^")
1552 if not ui.debugflag and f.startswith("debug"):
1552 if not ui.debugflag and f.startswith("debug"):
1553 continue
1553 continue
1554 doc = e[0].__doc__
1554 doc = e[0].__doc__
1555 if doc and 'DEPRECATED' in doc and not ui.verbose:
1555 if doc and 'DEPRECATED' in doc and not ui.verbose:
1556 continue
1556 continue
1557 doc = gettext(doc)
1557 doc = gettext(doc)
1558 if not doc:
1558 if not doc:
1559 doc = _("(no help text available)")
1559 doc = _("(no help text available)")
1560 h[f] = doc.splitlines()[0].rstrip()
1560 h[f] = doc.splitlines()[0].rstrip()
1561 cmds[f] = c.lstrip("^")
1561 cmds[f] = c.lstrip("^")
1562
1562
1563 if not h:
1563 if not h:
1564 ui.status(_('no commands defined\n'))
1564 ui.status(_('no commands defined\n'))
1565 return
1565 return
1566
1566
1567 ui.status(header)
1567 ui.status(header)
1568 fns = sorted(h)
1568 fns = sorted(h)
1569 m = max(map(len, fns))
1569 m = max(map(len, fns))
1570 for f in fns:
1570 for f in fns:
1571 if ui.verbose:
1571 if ui.verbose:
1572 commands = cmds[f].replace("|",", ")
1572 commands = cmds[f].replace("|",", ")
1573 ui.write(" %s:\n %s\n"%(commands, h[f]))
1573 ui.write(" %s:\n %s\n"%(commands, h[f]))
1574 else:
1574 else:
1575 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1575 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1576
1576
1577 if not ui.quiet:
1577 if not ui.quiet:
1578 addglobalopts(True)
1578 addglobalopts(True)
1579
1579
1580 def helptopic(name):
1580 def helptopic(name):
1581 for names, header, doc in help.helptable:
1581 for names, header, doc in help.helptable:
1582 if name in names:
1582 if name in names:
1583 break
1583 break
1584 else:
1584 else:
1585 raise error.UnknownCommand(name)
1585 raise error.UnknownCommand(name)
1586
1586
1587 # description
1587 # description
1588 if not doc:
1588 if not doc:
1589 doc = _("(no help text available)")
1589 doc = _("(no help text available)")
1590 if hasattr(doc, '__call__'):
1590 if hasattr(doc, '__call__'):
1591 doc = doc()
1591 doc = doc()
1592
1592
1593 ui.write("%s\n\n" % header)
1593 ui.write("%s\n\n" % header)
1594 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1594 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1595
1595
1596 def helpext(name):
1596 def helpext(name):
1597 try:
1597 try:
1598 mod = extensions.find(name)
1598 mod = extensions.find(name)
1599 doc = gettext(mod.__doc__) or _('no help text available')
1599 doc = gettext(mod.__doc__) or _('no help text available')
1600 except KeyError:
1600 except KeyError:
1601 mod = None
1601 mod = None
1602 doc = extensions.disabledext(name)
1602 doc = extensions.disabledext(name)
1603 if not doc:
1603 if not doc:
1604 raise error.UnknownCommand(name)
1604 raise error.UnknownCommand(name)
1605
1605
1606 if '\n' not in doc:
1606 if '\n' not in doc:
1607 head, tail = doc, ""
1607 head, tail = doc, ""
1608 else:
1608 else:
1609 head, tail = doc.split('\n', 1)
1609 head, tail = doc.split('\n', 1)
1610 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1610 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1611 if tail:
1611 if tail:
1612 ui.write(minirst.format(tail, textwidth))
1612 ui.write(minirst.format(tail, textwidth))
1613 ui.status('\n\n')
1613 ui.status('\n\n')
1614
1614
1615 if mod:
1615 if mod:
1616 try:
1616 try:
1617 ct = mod.cmdtable
1617 ct = mod.cmdtable
1618 except AttributeError:
1618 except AttributeError:
1619 ct = {}
1619 ct = {}
1620 modcmds = set([c.split('|', 1)[0] for c in ct])
1620 modcmds = set([c.split('|', 1)[0] for c in ct])
1621 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1621 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1622 else:
1622 else:
1623 ui.write(_('use "hg help extensions" for information on enabling '
1623 ui.write(_('use "hg help extensions" for information on enabling '
1624 'extensions\n'))
1624 'extensions\n'))
1625
1625
1626 def helpextcmd(name):
1626 def helpextcmd(name):
1627 cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict'))
1627 cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict'))
1628 doc = gettext(mod.__doc__).splitlines()[0]
1628 doc = gettext(mod.__doc__).splitlines()[0]
1629
1629
1630 msg = help.listexts(_("'%s' is provided by the following "
1630 msg = help.listexts(_("'%s' is provided by the following "
1631 "extension:") % cmd, {ext: doc}, len(ext),
1631 "extension:") % cmd, {ext: doc}, len(ext),
1632 indent=4)
1632 indent=4)
1633 ui.write(minirst.format(msg, textwidth))
1633 ui.write(minirst.format(msg, textwidth))
1634 ui.write('\n\n')
1634 ui.write('\n\n')
1635 ui.write(_('use "hg help extensions" for information on enabling '
1635 ui.write(_('use "hg help extensions" for information on enabling '
1636 'extensions\n'))
1636 'extensions\n'))
1637
1637
1638 if name and name != 'shortlist':
1638 if name and name != 'shortlist':
1639 i = None
1639 i = None
1640 if unknowncmd:
1640 if unknowncmd:
1641 queries = (helpextcmd,)
1641 queries = (helpextcmd,)
1642 else:
1642 else:
1643 queries = (helptopic, helpcmd, helpext, helpextcmd)
1643 queries = (helptopic, helpcmd, helpext, helpextcmd)
1644 for f in queries:
1644 for f in queries:
1645 try:
1645 try:
1646 f(name)
1646 f(name)
1647 i = None
1647 i = None
1648 break
1648 break
1649 except error.UnknownCommand, inst:
1649 except error.UnknownCommand, inst:
1650 i = inst
1650 i = inst
1651 if i:
1651 if i:
1652 raise i
1652 raise i
1653
1653
1654 else:
1654 else:
1655 # program name
1655 # program name
1656 if ui.verbose or with_version:
1656 if ui.verbose or with_version:
1657 version_(ui)
1657 version_(ui)
1658 else:
1658 else:
1659 ui.status(_("Mercurial Distributed SCM\n"))
1659 ui.status(_("Mercurial Distributed SCM\n"))
1660 ui.status('\n')
1660 ui.status('\n')
1661
1661
1662 # list of commands
1662 # list of commands
1663 if name == "shortlist":
1663 if name == "shortlist":
1664 header = _('basic commands:\n\n')
1664 header = _('basic commands:\n\n')
1665 else:
1665 else:
1666 header = _('list of commands:\n\n')
1666 header = _('list of commands:\n\n')
1667
1667
1668 helplist(header)
1668 helplist(header)
1669 if name != 'shortlist':
1669 if name != 'shortlist':
1670 exts, maxlength = extensions.enabled()
1670 exts, maxlength = extensions.enabled()
1671 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1671 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1672 if text:
1672 if text:
1673 ui.write("\n%s\n" % minirst.format(text, textwidth))
1673 ui.write("\n%s\n" % minirst.format(text, textwidth))
1674
1674
1675 # list all option lists
1675 # list all option lists
1676 opt_output = []
1676 opt_output = []
1677 for title, options in option_lists:
1677 for title, options in option_lists:
1678 opt_output.append(("\n%s" % title, None))
1678 opt_output.append(("\n%s" % title, None))
1679 for shortopt, longopt, default, desc in options:
1679 for shortopt, longopt, default, desc in options:
1680 if _("DEPRECATED") in desc and not ui.verbose:
1680 if _("DEPRECATED") in desc and not ui.verbose:
1681 continue
1681 continue
1682 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1682 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1683 longopt and " --%s" % longopt),
1683 longopt and " --%s" % longopt),
1684 "%s%s" % (desc,
1684 "%s%s" % (desc,
1685 default
1685 default
1686 and _(" (default: %s)") % default
1686 and _(" (default: %s)") % default
1687 or "")))
1687 or "")))
1688
1688
1689 if not name:
1689 if not name:
1690 ui.write(_("\nadditional help topics:\n\n"))
1690 ui.write(_("\nadditional help topics:\n\n"))
1691 topics = []
1691 topics = []
1692 for names, header, doc in help.helptable:
1692 for names, header, doc in help.helptable:
1693 topics.append((sorted(names, key=len, reverse=True)[0], header))
1693 topics.append((sorted(names, key=len, reverse=True)[0], header))
1694 topics_len = max([len(s[0]) for s in topics])
1694 topics_len = max([len(s[0]) for s in topics])
1695 for t, desc in topics:
1695 for t, desc in topics:
1696 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1696 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1697
1697
1698 if opt_output:
1698 if opt_output:
1699 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1699 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1700 for first, second in opt_output:
1700 for first, second in opt_output:
1701 if second:
1701 if second:
1702 second = util.wrap(second, opts_len + 3)
1702 second = util.wrap(second, opts_len + 3)
1703 ui.write(" %-*s %s\n" % (opts_len, first, second))
1703 ui.write(" %-*s %s\n" % (opts_len, first, second))
1704 else:
1704 else:
1705 ui.write("%s\n" % first)
1705 ui.write("%s\n" % first)
1706
1706
1707 def identify(ui, repo, source=None,
1707 def identify(ui, repo, source=None,
1708 rev=None, num=None, id=None, branch=None, tags=None):
1708 rev=None, num=None, id=None, branch=None, tags=None):
1709 """identify the working copy or specified revision
1709 """identify the working copy or specified revision
1710
1710
1711 With no revision, print a summary of the current state of the
1711 With no revision, print a summary of the current state of the
1712 repository.
1712 repository.
1713
1713
1714 Specifying a path to a repository root or Mercurial bundle will
1714 Specifying a path to a repository root or Mercurial bundle will
1715 cause lookup to operate on that repository/bundle.
1715 cause lookup to operate on that repository/bundle.
1716
1716
1717 This summary identifies the repository state using one or two
1717 This summary identifies the repository state using one or two
1718 parent hash identifiers, followed by a "+" if there are
1718 parent hash identifiers, followed by a "+" if there are
1719 uncommitted changes in the working directory, a list of tags for
1719 uncommitted changes in the working directory, a list of tags for
1720 this revision and a branch name for non-default branches.
1720 this revision and a branch name for non-default branches.
1721 """
1721 """
1722
1722
1723 if not repo and not source:
1723 if not repo and not source:
1724 raise util.Abort(_("There is no Mercurial repository here "
1724 raise util.Abort(_("There is no Mercurial repository here "
1725 "(.hg not found)"))
1725 "(.hg not found)"))
1726
1726
1727 hexfunc = ui.debugflag and hex or short
1727 hexfunc = ui.debugflag and hex or short
1728 default = not (num or id or branch or tags)
1728 default = not (num or id or branch or tags)
1729 output = []
1729 output = []
1730
1730
1731 revs = []
1731 revs = []
1732 if source:
1732 if source:
1733 source, branches = hg.parseurl(ui.expandpath(source))
1733 source, branches = hg.parseurl(ui.expandpath(source))
1734 repo = hg.repository(ui, source)
1734 repo = hg.repository(ui, source)
1735 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1735 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1736
1736
1737 if not repo.local():
1737 if not repo.local():
1738 if not rev and revs:
1738 if not rev and revs:
1739 rev = revs[0]
1739 rev = revs[0]
1740 if not rev:
1740 if not rev:
1741 rev = "tip"
1741 rev = "tip"
1742 if num or branch or tags:
1742 if num or branch or tags:
1743 raise util.Abort(
1743 raise util.Abort(
1744 "can't query remote revision number, branch, or tags")
1744 "can't query remote revision number, branch, or tags")
1745 output = [hexfunc(repo.lookup(rev))]
1745 output = [hexfunc(repo.lookup(rev))]
1746 elif not rev:
1746 elif not rev:
1747 ctx = repo[None]
1747 ctx = repo[None]
1748 parents = ctx.parents()
1748 parents = ctx.parents()
1749 changed = False
1749 changed = False
1750 if default or id or num:
1750 if default or id or num:
1751 changed = ctx.files() + ctx.deleted()
1751 changed = ctx.files() + ctx.deleted()
1752 if default or id:
1752 if default or id:
1753 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1753 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1754 (changed) and "+" or "")]
1754 (changed) and "+" or "")]
1755 if num:
1755 if num:
1756 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1756 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1757 (changed) and "+" or ""))
1757 (changed) and "+" or ""))
1758 else:
1758 else:
1759 ctx = repo[rev]
1759 ctx = repo[rev]
1760 if default or id:
1760 if default or id:
1761 output = [hexfunc(ctx.node())]
1761 output = [hexfunc(ctx.node())]
1762 if num:
1762 if num:
1763 output.append(str(ctx.rev()))
1763 output.append(str(ctx.rev()))
1764
1764
1765 if repo.local() and default and not ui.quiet:
1765 if repo.local() and default and not ui.quiet:
1766 b = encoding.tolocal(ctx.branch())
1766 b = encoding.tolocal(ctx.branch())
1767 if b != 'default':
1767 if b != 'default':
1768 output.append("(%s)" % b)
1768 output.append("(%s)" % b)
1769
1769
1770 # multiple tags for a single parent separated by '/'
1770 # multiple tags for a single parent separated by '/'
1771 t = "/".join(ctx.tags())
1771 t = "/".join(ctx.tags())
1772 if t:
1772 if t:
1773 output.append(t)
1773 output.append(t)
1774
1774
1775 if branch:
1775 if branch:
1776 output.append(encoding.tolocal(ctx.branch()))
1776 output.append(encoding.tolocal(ctx.branch()))
1777
1777
1778 if tags:
1778 if tags:
1779 output.extend(ctx.tags())
1779 output.extend(ctx.tags())
1780
1780
1781 ui.write("%s\n" % ' '.join(output))
1781 ui.write("%s\n" % ' '.join(output))
1782
1782
1783 def import_(ui, repo, patch1, *patches, **opts):
1783 def import_(ui, repo, patch1, *patches, **opts):
1784 """import an ordered set of patches
1784 """import an ordered set of patches
1785
1785
1786 Import a list of patches and commit them individually (unless
1786 Import a list of patches and commit them individually (unless
1787 --no-commit is specified).
1787 --no-commit is specified).
1788
1788
1789 If there are outstanding changes in the working directory, import
1789 If there are outstanding changes in the working directory, import
1790 will abort unless given the -f/--force flag.
1790 will abort unless given the -f/--force flag.
1791
1791
1792 You can import a patch straight from a mail message. Even patches
1792 You can import a patch straight from a mail message. Even patches
1793 as attachments work (to use the body part, it must have type
1793 as attachments work (to use the body part, it must have type
1794 text/plain or text/x-patch). From and Subject headers of email
1794 text/plain or text/x-patch). From and Subject headers of email
1795 message are used as default committer and commit message. All
1795 message are used as default committer and commit message. All
1796 text/plain body parts before first diff are added to commit
1796 text/plain body parts before first diff are added to commit
1797 message.
1797 message.
1798
1798
1799 If the imported patch was generated by hg export, user and
1799 If the imported patch was generated by hg export, user and
1800 description from patch override values from message headers and
1800 description from patch override values from message headers and
1801 body. Values given on command line with -m/--message and -u/--user
1801 body. Values given on command line with -m/--message and -u/--user
1802 override these.
1802 override these.
1803
1803
1804 If --exact is specified, import will set the working directory to
1804 If --exact is specified, import will set the working directory to
1805 the parent of each patch before applying it, and will abort if the
1805 the parent of each patch before applying it, and will abort if the
1806 resulting changeset has a different ID than the one recorded in
1806 resulting changeset has a different ID than the one recorded in
1807 the patch. This may happen due to character set problems or other
1807 the patch. This may happen due to character set problems or other
1808 deficiencies in the text patch format.
1808 deficiencies in the text patch format.
1809
1809
1810 With -s/--similarity, hg will attempt to discover renames and
1810 With -s/--similarity, hg will attempt to discover renames and
1811 copies in the patch in the same way as 'addremove'.
1811 copies in the patch in the same way as 'addremove'.
1812
1812
1813 To read a patch from standard input, use "-" as the patch name. If
1813 To read a patch from standard input, use "-" as the patch name. If
1814 a URL is specified, the patch will be downloaded from it.
1814 a URL is specified, the patch will be downloaded from it.
1815 See 'hg help dates' for a list of formats valid for -d/--date.
1815 See 'hg help dates' for a list of formats valid for -d/--date.
1816 """
1816 """
1817 patches = (patch1,) + patches
1817 patches = (patch1,) + patches
1818
1818
1819 date = opts.get('date')
1819 date = opts.get('date')
1820 if date:
1820 if date:
1821 opts['date'] = util.parsedate(date)
1821 opts['date'] = util.parsedate(date)
1822
1822
1823 try:
1823 try:
1824 sim = float(opts.get('similarity') or 0)
1824 sim = float(opts.get('similarity') or 0)
1825 except ValueError:
1825 except ValueError:
1826 raise util.Abort(_('similarity must be a number'))
1826 raise util.Abort(_('similarity must be a number'))
1827 if sim < 0 or sim > 100:
1827 if sim < 0 or sim > 100:
1828 raise util.Abort(_('similarity must be between 0 and 100'))
1828 raise util.Abort(_('similarity must be between 0 and 100'))
1829
1829
1830 if opts.get('exact') or not opts.get('force'):
1830 if opts.get('exact') or not opts.get('force'):
1831 cmdutil.bail_if_changed(repo)
1831 cmdutil.bail_if_changed(repo)
1832
1832
1833 d = opts["base"]
1833 d = opts["base"]
1834 strip = opts["strip"]
1834 strip = opts["strip"]
1835 wlock = lock = None
1835 wlock = lock = None
1836
1836
1837 def tryone(ui, hunk):
1837 def tryone(ui, hunk):
1838 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, hunk)
1838 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, hunk)
1839
1839
1840 if not tmpname:
1840 if not tmpname:
1841 return None
1841 return None
1842 commitid = _('to working directory')
1842 commitid = _('to working directory')
1843
1843
1844 try:
1844 try:
1845 cmdline_message = cmdutil.logmessage(opts)
1845 cmdline_message = cmdutil.logmessage(opts)
1846 if cmdline_message:
1846 if cmdline_message:
1847 # pickup the cmdline msg
1847 # pickup the cmdline msg
1848 message = cmdline_message
1848 message = cmdline_message
1849 elif message:
1849 elif message:
1850 # pickup the patch msg
1850 # pickup the patch msg
1851 message = message.strip()
1851 message = message.strip()
1852 else:
1852 else:
1853 # launch the editor
1853 # launch the editor
1854 message = None
1854 message = None
1855 ui.debug('message:\n%s\n' % message)
1855 ui.debug('message:\n%s\n' % message)
1856
1856
1857 wp = repo.parents()
1857 wp = repo.parents()
1858 if opts.get('exact'):
1858 if opts.get('exact'):
1859 if not nodeid or not p1:
1859 if not nodeid or not p1:
1860 raise util.Abort(_('not a Mercurial patch'))
1860 raise util.Abort(_('not a Mercurial patch'))
1861 p1 = repo.lookup(p1)
1861 p1 = repo.lookup(p1)
1862 p2 = repo.lookup(p2 or hex(nullid))
1862 p2 = repo.lookup(p2 or hex(nullid))
1863
1863
1864 if p1 != wp[0].node():
1864 if p1 != wp[0].node():
1865 hg.clean(repo, p1)
1865 hg.clean(repo, p1)
1866 repo.dirstate.setparents(p1, p2)
1866 repo.dirstate.setparents(p1, p2)
1867 elif p2:
1867 elif p2:
1868 try:
1868 try:
1869 p1 = repo.lookup(p1)
1869 p1 = repo.lookup(p1)
1870 p2 = repo.lookup(p2)
1870 p2 = repo.lookup(p2)
1871 if p1 == wp[0].node():
1871 if p1 == wp[0].node():
1872 repo.dirstate.setparents(p1, p2)
1872 repo.dirstate.setparents(p1, p2)
1873 except error.RepoError:
1873 except error.RepoError:
1874 pass
1874 pass
1875 if opts.get('exact') or opts.get('import_branch'):
1875 if opts.get('exact') or opts.get('import_branch'):
1876 repo.dirstate.setbranch(branch or 'default')
1876 repo.dirstate.setbranch(branch or 'default')
1877
1877
1878 files = {}
1878 files = {}
1879 try:
1879 try:
1880 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1880 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1881 files=files, eolmode=None)
1881 files=files, eolmode=None)
1882 finally:
1882 finally:
1883 files = patch.updatedir(ui, repo, files,
1883 files = patch.updatedir(ui, repo, files,
1884 similarity=sim / 100.0)
1884 similarity=sim / 100.0)
1885 if not opts.get('no_commit'):
1885 if not opts.get('no_commit'):
1886 if opts.get('exact'):
1886 if opts.get('exact'):
1887 m = None
1887 m = None
1888 else:
1888 else:
1889 m = cmdutil.matchfiles(repo, files or [])
1889 m = cmdutil.matchfiles(repo, files or [])
1890 n = repo.commit(message, opts.get('user') or user,
1890 n = repo.commit(message, opts.get('user') or user,
1891 opts.get('date') or date, match=m,
1891 opts.get('date') or date, match=m,
1892 editor=cmdutil.commiteditor)
1892 editor=cmdutil.commiteditor)
1893 if opts.get('exact'):
1893 if opts.get('exact'):
1894 if hex(n) != nodeid:
1894 if hex(n) != nodeid:
1895 repo.rollback()
1895 repo.rollback()
1896 raise util.Abort(_('patch is damaged'
1896 raise util.Abort(_('patch is damaged'
1897 ' or loses information'))
1897 ' or loses information'))
1898 # Force a dirstate write so that the next transaction
1898 # Force a dirstate write so that the next transaction
1899 # backups an up-do-date file.
1899 # backups an up-do-date file.
1900 repo.dirstate.write()
1900 repo.dirstate.write()
1901 if n:
1901 if n:
1902 commitid = short(n)
1902 commitid = short(n)
1903
1903
1904 return commitid
1904 return commitid
1905 finally:
1905 finally:
1906 os.unlink(tmpname)
1906 os.unlink(tmpname)
1907
1907
1908 try:
1908 try:
1909 wlock = repo.wlock()
1909 wlock = repo.wlock()
1910 lock = repo.lock()
1910 lock = repo.lock()
1911 lastcommit = None
1911 lastcommit = None
1912 for p in patches:
1912 for p in patches:
1913 pf = os.path.join(d, p)
1913 pf = os.path.join(d, p)
1914
1914
1915 if pf == '-':
1915 if pf == '-':
1916 ui.status(_("applying patch from stdin\n"))
1916 ui.status(_("applying patch from stdin\n"))
1917 pf = sys.stdin
1917 pf = sys.stdin
1918 else:
1918 else:
1919 ui.status(_("applying %s\n") % p)
1919 ui.status(_("applying %s\n") % p)
1920 pf = url.open(ui, pf)
1920 pf = url.open(ui, pf)
1921
1921
1922 haspatch = False
1922 haspatch = False
1923 for hunk in patch.split(pf):
1923 for hunk in patch.split(pf):
1924 commitid = tryone(ui, hunk)
1924 commitid = tryone(ui, hunk)
1925 if commitid:
1925 if commitid:
1926 haspatch = True
1926 haspatch = True
1927 if lastcommit:
1927 if lastcommit:
1928 ui.status(_('applied %s\n') % lastcommit)
1928 ui.status(_('applied %s\n') % lastcommit)
1929 lastcommit = commitid
1929 lastcommit = commitid
1930
1930
1931 if not haspatch:
1931 if not haspatch:
1932 raise util.Abort(_('no diffs found'))
1932 raise util.Abort(_('no diffs found'))
1933
1933
1934 finally:
1934 finally:
1935 release(lock, wlock)
1935 release(lock, wlock)
1936
1936
1937 def incoming(ui, repo, source="default", **opts):
1937 def incoming(ui, repo, source="default", **opts):
1938 """show new changesets found in source
1938 """show new changesets found in source
1939
1939
1940 Show new changesets found in the specified path/URL or the default
1940 Show new changesets found in the specified path/URL or the default
1941 pull location. These are the changesets that would have been pulled
1941 pull location. These are the changesets that would have been pulled
1942 if a pull at the time you issued this command.
1942 if a pull at the time you issued this command.
1943
1943
1944 For remote repository, using --bundle avoids downloading the
1944 For remote repository, using --bundle avoids downloading the
1945 changesets twice if the incoming is followed by a pull.
1945 changesets twice if the incoming is followed by a pull.
1946
1946
1947 See pull for valid source format details.
1947 See pull for valid source format details.
1948 """
1948 """
1949 limit = cmdutil.loglimit(opts)
1949 limit = cmdutil.loglimit(opts)
1950 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
1950 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
1951 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1951 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1952 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1952 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1953 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
1953 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
1954 if revs:
1954 if revs:
1955 revs = [other.lookup(rev) for rev in revs]
1955 revs = [other.lookup(rev) for rev in revs]
1956 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1956 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1957 force=opts["force"])
1957 force=opts["force"])
1958 if not incoming:
1958 if not incoming:
1959 try:
1959 try:
1960 os.unlink(opts["bundle"])
1960 os.unlink(opts["bundle"])
1961 except:
1961 except:
1962 pass
1962 pass
1963 ui.status(_("no changes found\n"))
1963 ui.status(_("no changes found\n"))
1964 return 1
1964 return 1
1965
1965
1966 cleanup = None
1966 cleanup = None
1967 try:
1967 try:
1968 fname = opts["bundle"]
1968 fname = opts["bundle"]
1969 if fname or not other.local():
1969 if fname or not other.local():
1970 # create a bundle (uncompressed if other repo is not local)
1970 # create a bundle (uncompressed if other repo is not local)
1971
1971
1972 if revs is None and other.capable('changegroupsubset'):
1972 if revs is None and other.capable('changegroupsubset'):
1973 revs = rheads
1973 revs = rheads
1974
1974
1975 if revs is None:
1975 if revs is None:
1976 cg = other.changegroup(incoming, "incoming")
1976 cg = other.changegroup(incoming, "incoming")
1977 else:
1977 else:
1978 cg = other.changegroupsubset(incoming, revs, 'incoming')
1978 cg = other.changegroupsubset(incoming, revs, 'incoming')
1979 bundletype = other.local() and "HG10BZ" or "HG10UN"
1979 bundletype = other.local() and "HG10BZ" or "HG10UN"
1980 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1980 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1981 # keep written bundle?
1981 # keep written bundle?
1982 if opts["bundle"]:
1982 if opts["bundle"]:
1983 cleanup = None
1983 cleanup = None
1984 if not other.local():
1984 if not other.local():
1985 # use the created uncompressed bundlerepo
1985 # use the created uncompressed bundlerepo
1986 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1986 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1987
1987
1988 o = other.changelog.nodesbetween(incoming, revs)[0]
1988 o = other.changelog.nodesbetween(incoming, revs)[0]
1989 if opts.get('newest_first'):
1989 if opts.get('newest_first'):
1990 o.reverse()
1990 o.reverse()
1991 displayer = cmdutil.show_changeset(ui, other, opts)
1991 displayer = cmdutil.show_changeset(ui, other, opts)
1992 count = 0
1992 count = 0
1993 for n in o:
1993 for n in o:
1994 if limit is not None and count >= limit:
1994 if limit is not None and count >= limit:
1995 break
1995 break
1996 parents = [p for p in other.changelog.parents(n) if p != nullid]
1996 parents = [p for p in other.changelog.parents(n) if p != nullid]
1997 if opts.get('no_merges') and len(parents) == 2:
1997 if opts.get('no_merges') and len(parents) == 2:
1998 continue
1998 continue
1999 count += 1
1999 count += 1
2000 displayer.show(other[n])
2000 displayer.show(other[n])
2001 displayer.close()
2001 displayer.close()
2002 finally:
2002 finally:
2003 if hasattr(other, 'close'):
2003 if hasattr(other, 'close'):
2004 other.close()
2004 other.close()
2005 if cleanup:
2005 if cleanup:
2006 os.unlink(cleanup)
2006 os.unlink(cleanup)
2007
2007
2008 def init(ui, dest=".", **opts):
2008 def init(ui, dest=".", **opts):
2009 """create a new repository in the given directory
2009 """create a new repository in the given directory
2010
2010
2011 Initialize a new repository in the given directory. If the given
2011 Initialize a new repository in the given directory. If the given
2012 directory does not exist, it will be created.
2012 directory does not exist, it will be created.
2013
2013
2014 If no directory is given, the current directory is used.
2014 If no directory is given, the current directory is used.
2015
2015
2016 It is possible to specify an ``ssh://`` URL as the destination.
2016 It is possible to specify an ``ssh://`` URL as the destination.
2017 See 'hg help urls' for more information.
2017 See 'hg help urls' for more information.
2018 """
2018 """
2019 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
2019 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
2020
2020
2021 def locate(ui, repo, *pats, **opts):
2021 def locate(ui, repo, *pats, **opts):
2022 """locate files matching specific patterns
2022 """locate files matching specific patterns
2023
2023
2024 Print files under Mercurial control in the working directory whose
2024 Print files under Mercurial control in the working directory whose
2025 names match the given patterns.
2025 names match the given patterns.
2026
2026
2027 By default, this command searches all directories in the working
2027 By default, this command searches all directories in the working
2028 directory. To search just the current directory and its
2028 directory. To search just the current directory and its
2029 subdirectories, use "--include .".
2029 subdirectories, use "--include .".
2030
2030
2031 If no patterns are given to match, this command prints the names
2031 If no patterns are given to match, this command prints the names
2032 of all files under Mercurial control in the working directory.
2032 of all files under Mercurial control in the working directory.
2033
2033
2034 If you want to feed the output of this command into the "xargs"
2034 If you want to feed the output of this command into the "xargs"
2035 command, use the -0 option to both this command and "xargs". This
2035 command, use the -0 option to both this command and "xargs". This
2036 will avoid the problem of "xargs" treating single filenames that
2036 will avoid the problem of "xargs" treating single filenames that
2037 contain whitespace as multiple filenames.
2037 contain whitespace as multiple filenames.
2038 """
2038 """
2039 end = opts.get('print0') and '\0' or '\n'
2039 end = opts.get('print0') and '\0' or '\n'
2040 rev = opts.get('rev') or None
2040 rev = opts.get('rev') or None
2041
2041
2042 ret = 1
2042 ret = 1
2043 m = cmdutil.match(repo, pats, opts, default='relglob')
2043 m = cmdutil.match(repo, pats, opts, default='relglob')
2044 m.bad = lambda x, y: False
2044 m.bad = lambda x, y: False
2045 for abs in repo[rev].walk(m):
2045 for abs in repo[rev].walk(m):
2046 if not rev and abs not in repo.dirstate:
2046 if not rev and abs not in repo.dirstate:
2047 continue
2047 continue
2048 if opts.get('fullpath'):
2048 if opts.get('fullpath'):
2049 ui.write(repo.wjoin(abs), end)
2049 ui.write(repo.wjoin(abs), end)
2050 else:
2050 else:
2051 ui.write(((pats and m.rel(abs)) or abs), end)
2051 ui.write(((pats and m.rel(abs)) or abs), end)
2052 ret = 0
2052 ret = 0
2053
2053
2054 return ret
2054 return ret
2055
2055
2056 def log(ui, repo, *pats, **opts):
2056 def log(ui, repo, *pats, **opts):
2057 """show revision history of entire repository or files
2057 """show revision history of entire repository or files
2058
2058
2059 Print the revision history of the specified files or the entire
2059 Print the revision history of the specified files or the entire
2060 project.
2060 project.
2061
2061
2062 File history is shown without following rename or copy history of
2062 File history is shown without following rename or copy history of
2063 files. Use -f/--follow with a filename to follow history across
2063 files. Use -f/--follow with a filename to follow history across
2064 renames and copies. --follow without a filename will only show
2064 renames and copies. --follow without a filename will only show
2065 ancestors or descendants of the starting revision. --follow-first
2065 ancestors or descendants of the starting revision. --follow-first
2066 only follows the first parent of merge revisions.
2066 only follows the first parent of merge revisions.
2067
2067
2068 If no revision range is specified, the default is tip:0 unless
2068 If no revision range is specified, the default is tip:0 unless
2069 --follow is set, in which case the working directory parent is
2069 --follow is set, in which case the working directory parent is
2070 used as the starting revision.
2070 used as the starting revision.
2071
2071
2072 See 'hg help dates' for a list of formats valid for -d/--date.
2072 See 'hg help dates' for a list of formats valid for -d/--date.
2073
2073
2074 By default this command prints revision number and changeset id,
2074 By default this command prints revision number and changeset id,
2075 tags, non-trivial parents, user, date and time, and a summary for
2075 tags, non-trivial parents, user, date and time, and a summary for
2076 each commit. When the -v/--verbose switch is used, the list of
2076 each commit. When the -v/--verbose switch is used, the list of
2077 changed files and full commit message are shown.
2077 changed files and full commit message are shown.
2078
2078
2079 NOTE: log -p/--patch may generate unexpected diff output for merge
2079 NOTE: log -p/--patch may generate unexpected diff output for merge
2080 changesets, as it will only compare the merge changeset against
2080 changesets, as it will only compare the merge changeset against
2081 its first parent. Also, only files different from BOTH parents
2081 its first parent. Also, only files different from BOTH parents
2082 will appear in files:.
2082 will appear in files:.
2083 """
2083 """
2084
2084
2085 matchfn = cmdutil.match(repo, pats, opts)
2085 matchfn = cmdutil.match(repo, pats, opts)
2086 limit = cmdutil.loglimit(opts)
2086 limit = cmdutil.loglimit(opts)
2087 count = 0
2087 count = 0
2088
2088
2089 endrev = None
2089 endrev = None
2090 if opts.get('copies') and opts.get('rev'):
2090 if opts.get('copies') and opts.get('rev'):
2091 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2091 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2092
2092
2093 df = False
2093 df = False
2094 if opts["date"]:
2094 if opts["date"]:
2095 df = util.matchdate(opts["date"])
2095 df = util.matchdate(opts["date"])
2096
2096
2097 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2097 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2098 def prep(ctx, fns):
2098 def prep(ctx, fns):
2099 rev = ctx.rev()
2099 rev = ctx.rev()
2100 parents = [p for p in repo.changelog.parentrevs(rev)
2100 parents = [p for p in repo.changelog.parentrevs(rev)
2101 if p != nullrev]
2101 if p != nullrev]
2102 if opts.get('no_merges') and len(parents) == 2:
2102 if opts.get('no_merges') and len(parents) == 2:
2103 return
2103 return
2104 if opts.get('only_merges') and len(parents) != 2:
2104 if opts.get('only_merges') and len(parents) != 2:
2105 return
2105 return
2106 if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
2106 if opts.get('only_branch') and ctx.branch() not in opts['only_branch']:
2107 return
2107 return
2108 if df and not df(ctx.date()[0]):
2108 if df and not df(ctx.date()[0]):
2109 return
2109 return
2110 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2110 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2111 return
2111 return
2112 if opts.get('keyword'):
2112 if opts.get('keyword'):
2113 for k in [kw.lower() for kw in opts['keyword']]:
2113 for k in [kw.lower() for kw in opts['keyword']]:
2114 if (k in ctx.user().lower() or
2114 if (k in ctx.user().lower() or
2115 k in ctx.description().lower() or
2115 k in ctx.description().lower() or
2116 k in " ".join(ctx.files()).lower()):
2116 k in " ".join(ctx.files()).lower()):
2117 break
2117 break
2118 else:
2118 else:
2119 return
2119 return
2120
2120
2121 copies = None
2121 copies = None
2122 if opts.get('copies') and rev:
2122 if opts.get('copies') and rev:
2123 copies = []
2123 copies = []
2124 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2124 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2125 for fn in ctx.files():
2125 for fn in ctx.files():
2126 rename = getrenamed(fn, rev)
2126 rename = getrenamed(fn, rev)
2127 if rename:
2127 if rename:
2128 copies.append((fn, rename[0]))
2128 copies.append((fn, rename[0]))
2129
2129
2130 displayer.show(ctx, copies=copies)
2130 displayer.show(ctx, copies=copies)
2131
2131
2132 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2132 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2133 if count == limit:
2133 if count == limit:
2134 break
2134 break
2135 if displayer.flush(ctx.rev()):
2135 if displayer.flush(ctx.rev()):
2136 count += 1
2136 count += 1
2137 displayer.close()
2137 displayer.close()
2138
2138
2139 def manifest(ui, repo, node=None, rev=None):
2139 def manifest(ui, repo, node=None, rev=None):
2140 """output the current or given revision of the project manifest
2140 """output the current or given revision of the project manifest
2141
2141
2142 Print a list of version controlled files for the given revision.
2142 Print a list of version controlled files for the given revision.
2143 If no revision is given, the first parent of the working directory
2143 If no revision is given, the first parent of the working directory
2144 is used, or the null revision if no revision is checked out.
2144 is used, or the null revision if no revision is checked out.
2145
2145
2146 With -v, print file permissions, symlink and executable bits.
2146 With -v, print file permissions, symlink and executable bits.
2147 With --debug, print file revision hashes.
2147 With --debug, print file revision hashes.
2148 """
2148 """
2149
2149
2150 if rev and node:
2150 if rev and node:
2151 raise util.Abort(_("please specify just one revision"))
2151 raise util.Abort(_("please specify just one revision"))
2152
2152
2153 if not node:
2153 if not node:
2154 node = rev
2154 node = rev
2155
2155
2156 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2156 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2157 ctx = repo[node]
2157 ctx = repo[node]
2158 for f in ctx:
2158 for f in ctx:
2159 if ui.debugflag:
2159 if ui.debugflag:
2160 ui.write("%40s " % hex(ctx.manifest()[f]))
2160 ui.write("%40s " % hex(ctx.manifest()[f]))
2161 if ui.verbose:
2161 if ui.verbose:
2162 ui.write(decor[ctx.flags(f)])
2162 ui.write(decor[ctx.flags(f)])
2163 ui.write("%s\n" % f)
2163 ui.write("%s\n" % f)
2164
2164
2165 def merge(ui, repo, node=None, **opts):
2165 def merge(ui, repo, node=None, **opts):
2166 """merge working directory with another revision
2166 """merge working directory with another revision
2167
2167
2168 The current working directory is updated with all changes made in
2168 The current working directory is updated with all changes made in
2169 the requested revision since the last common predecessor revision.
2169 the requested revision since the last common predecessor revision.
2170
2170
2171 Files that changed between either parent are marked as changed for
2171 Files that changed between either parent are marked as changed for
2172 the next commit and a commit must be performed before any further
2172 the next commit and a commit must be performed before any further
2173 updates to the repository are allowed. The next commit will have
2173 updates to the repository are allowed. The next commit will have
2174 two parents.
2174 two parents.
2175
2175
2176 If no revision is specified, the working directory's parent is a
2176 If no revision is specified, the working directory's parent is a
2177 head revision, and the current branch contains exactly one other
2177 head revision, and the current branch contains exactly one other
2178 head, the other head is merged with by default. Otherwise, an
2178 head, the other head is merged with by default. Otherwise, an
2179 explicit revision with which to merge with must be provided.
2179 explicit revision with which to merge with must be provided.
2180 """
2180 """
2181
2181
2182 if opts.get('rev') and node:
2182 if opts.get('rev') and node:
2183 raise util.Abort(_("please specify just one revision"))
2183 raise util.Abort(_("please specify just one revision"))
2184 if not node:
2184 if not node:
2185 node = opts.get('rev')
2185 node = opts.get('rev')
2186
2186
2187 if not node:
2187 if not node:
2188 branch = repo.changectx(None).branch()
2188 branch = repo.changectx(None).branch()
2189 bheads = repo.branchheads(branch)
2189 bheads = repo.branchheads(branch)
2190 if len(bheads) > 2:
2190 if len(bheads) > 2:
2191 ui.warn(_("abort: branch '%s' has %d heads - "
2191 ui.warn(_("abort: branch '%s' has %d heads - "
2192 "please merge with an explicit rev\n")
2192 "please merge with an explicit rev\n")
2193 % (branch, len(bheads)))
2193 % (branch, len(bheads)))
2194 ui.status(_("(run 'hg heads .' to see heads)\n"))
2194 ui.status(_("(run 'hg heads .' to see heads)\n"))
2195 return False
2195 return False
2196
2196
2197 parent = repo.dirstate.parents()[0]
2197 parent = repo.dirstate.parents()[0]
2198 if len(bheads) == 1:
2198 if len(bheads) == 1:
2199 if len(repo.heads()) > 1:
2199 if len(repo.heads()) > 1:
2200 ui.warn(_("abort: branch '%s' has one head - "
2200 ui.warn(_("abort: branch '%s' has one head - "
2201 "please merge with an explicit rev\n" % branch))
2201 "please merge with an explicit rev\n" % branch))
2202 ui.status(_("(run 'hg heads' to see all heads)\n"))
2202 ui.status(_("(run 'hg heads' to see all heads)\n"))
2203 return False
2203 return False
2204 msg = _('there is nothing to merge')
2204 msg = _('there is nothing to merge')
2205 if parent != repo.lookup(repo[None].branch()):
2205 if parent != repo.lookup(repo[None].branch()):
2206 msg = _('%s - use "hg update" instead') % msg
2206 msg = _('%s - use "hg update" instead') % msg
2207 raise util.Abort(msg)
2207 raise util.Abort(msg)
2208
2208
2209 if parent not in bheads:
2209 if parent not in bheads:
2210 raise util.Abort(_('working dir not at a head rev - '
2210 raise util.Abort(_('working dir not at a head rev - '
2211 'use "hg update" or merge with an explicit rev'))
2211 'use "hg update" or merge with an explicit rev'))
2212 node = parent == bheads[0] and bheads[-1] or bheads[0]
2212 node = parent == bheads[0] and bheads[-1] or bheads[0]
2213
2213
2214 if opts.get('preview'):
2214 if opts.get('preview'):
2215 p1 = repo['.']
2215 p1 = repo['.']
2216 p2 = repo[node]
2216 p2 = repo[node]
2217 common = p1.ancestor(p2)
2217 common = p1.ancestor(p2)
2218 roots, heads = [common.node()], [p2.node()]
2218 roots, heads = [common.node()], [p2.node()]
2219 displayer = cmdutil.show_changeset(ui, repo, opts)
2219 displayer = cmdutil.show_changeset(ui, repo, opts)
2220 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2220 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2221 if node not in roots:
2221 if node not in roots:
2222 displayer.show(repo[node])
2222 displayer.show(repo[node])
2223 displayer.close()
2223 displayer.close()
2224 return 0
2224 return 0
2225
2225
2226 return hg.merge(repo, node, force=opts.get('force'))
2226 return hg.merge(repo, node, force=opts.get('force'))
2227
2227
2228 def outgoing(ui, repo, dest=None, **opts):
2228 def outgoing(ui, repo, dest=None, **opts):
2229 """show changesets not found in the destination
2229 """show changesets not found in the destination
2230
2230
2231 Show changesets not found in the specified destination repository
2231 Show changesets not found in the specified destination repository
2232 or the default push location. These are the changesets that would
2232 or the default push location. These are the changesets that would
2233 be pushed if a push was requested.
2233 be pushed if a push was requested.
2234
2234
2235 See pull for details of valid destination formats.
2235 See pull for details of valid destination formats.
2236 """
2236 """
2237 limit = cmdutil.loglimit(opts)
2237 limit = cmdutil.loglimit(opts)
2238 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2238 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2239 dest, branches = hg.parseurl(dest, opts.get('branch'))
2239 dest, branches = hg.parseurl(dest, opts.get('branch'))
2240 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2240 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2241 if revs:
2241 if revs:
2242 revs = [repo.lookup(rev) for rev in revs]
2242 revs = [repo.lookup(rev) for rev in revs]
2243
2243
2244 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2244 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2245 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2245 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2246 o = repo.findoutgoing(other, force=opts.get('force'))
2246 o = repo.findoutgoing(other, force=opts.get('force'))
2247 if not o:
2247 if not o:
2248 ui.status(_("no changes found\n"))
2248 ui.status(_("no changes found\n"))
2249 return 1
2249 return 1
2250 o = repo.changelog.nodesbetween(o, revs)[0]
2250 o = repo.changelog.nodesbetween(o, revs)[0]
2251 if opts.get('newest_first'):
2251 if opts.get('newest_first'):
2252 o.reverse()
2252 o.reverse()
2253 displayer = cmdutil.show_changeset(ui, repo, opts)
2253 displayer = cmdutil.show_changeset(ui, repo, opts)
2254 count = 0
2254 count = 0
2255 for n in o:
2255 for n in o:
2256 if limit is not None and count >= limit:
2256 if limit is not None and count >= limit:
2257 break
2257 break
2258 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2258 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2259 if opts.get('no_merges') and len(parents) == 2:
2259 if opts.get('no_merges') and len(parents) == 2:
2260 continue
2260 continue
2261 count += 1
2261 count += 1
2262 displayer.show(repo[n])
2262 displayer.show(repo[n])
2263 displayer.close()
2263 displayer.close()
2264
2264
2265 def parents(ui, repo, file_=None, **opts):
2265 def parents(ui, repo, file_=None, **opts):
2266 """show the parents of the working directory or revision
2266 """show the parents of the working directory or revision
2267
2267
2268 Print the working directory's parent revisions. If a revision is
2268 Print the working directory's parent revisions. If a revision is
2269 given via -r/--rev, the parent of that revision will be printed.
2269 given via -r/--rev, the parent of that revision will be printed.
2270 If a file argument is given, the revision in which the file was
2270 If a file argument is given, the revision in which the file was
2271 last changed (before the working directory revision or the
2271 last changed (before the working directory revision or the
2272 argument to --rev if given) is printed.
2272 argument to --rev if given) is printed.
2273 """
2273 """
2274 rev = opts.get('rev')
2274 rev = opts.get('rev')
2275 if rev:
2275 if rev:
2276 ctx = repo[rev]
2276 ctx = repo[rev]
2277 else:
2277 else:
2278 ctx = repo[None]
2278 ctx = repo[None]
2279
2279
2280 if file_:
2280 if file_:
2281 m = cmdutil.match(repo, (file_,), opts)
2281 m = cmdutil.match(repo, (file_,), opts)
2282 if m.anypats() or len(m.files()) != 1:
2282 if m.anypats() or len(m.files()) != 1:
2283 raise util.Abort(_('can only specify an explicit filename'))
2283 raise util.Abort(_('can only specify an explicit filename'))
2284 file_ = m.files()[0]
2284 file_ = m.files()[0]
2285 filenodes = []
2285 filenodes = []
2286 for cp in ctx.parents():
2286 for cp in ctx.parents():
2287 if not cp:
2287 if not cp:
2288 continue
2288 continue
2289 try:
2289 try:
2290 filenodes.append(cp.filenode(file_))
2290 filenodes.append(cp.filenode(file_))
2291 except error.LookupError:
2291 except error.LookupError:
2292 pass
2292 pass
2293 if not filenodes:
2293 if not filenodes:
2294 raise util.Abort(_("'%s' not found in manifest!") % file_)
2294 raise util.Abort(_("'%s' not found in manifest!") % file_)
2295 fl = repo.file(file_)
2295 fl = repo.file(file_)
2296 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2296 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2297 else:
2297 else:
2298 p = [cp.node() for cp in ctx.parents()]
2298 p = [cp.node() for cp in ctx.parents()]
2299
2299
2300 displayer = cmdutil.show_changeset(ui, repo, opts)
2300 displayer = cmdutil.show_changeset(ui, repo, opts)
2301 for n in p:
2301 for n in p:
2302 if n != nullid:
2302 if n != nullid:
2303 displayer.show(repo[n])
2303 displayer.show(repo[n])
2304 displayer.close()
2304 displayer.close()
2305
2305
2306 def paths(ui, repo, search=None):
2306 def paths(ui, repo, search=None):
2307 """show aliases for remote repositories
2307 """show aliases for remote repositories
2308
2308
2309 Show definition of symbolic path name NAME. If no name is given,
2309 Show definition of symbolic path name NAME. If no name is given,
2310 show definition of all available names.
2310 show definition of all available names.
2311
2311
2312 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2312 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2313 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2313 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2314
2314
2315 See 'hg help urls' for more information.
2315 See 'hg help urls' for more information.
2316 """
2316 """
2317 if search:
2317 if search:
2318 for name, path in ui.configitems("paths"):
2318 for name, path in ui.configitems("paths"):
2319 if name == search:
2319 if name == search:
2320 ui.write("%s\n" % url.hidepassword(path))
2320 ui.write("%s\n" % url.hidepassword(path))
2321 return
2321 return
2322 ui.warn(_("not found!\n"))
2322 ui.warn(_("not found!\n"))
2323 return 1
2323 return 1
2324 else:
2324 else:
2325 for name, path in ui.configitems("paths"):
2325 for name, path in ui.configitems("paths"):
2326 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2326 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2327
2327
2328 def postincoming(ui, repo, modheads, optupdate, checkout):
2328 def postincoming(ui, repo, modheads, optupdate, checkout):
2329 if modheads == 0:
2329 if modheads == 0:
2330 return
2330 return
2331 if optupdate:
2331 if optupdate:
2332 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2332 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2333 return hg.update(repo, checkout)
2333 return hg.update(repo, checkout)
2334 else:
2334 else:
2335 ui.status(_("not updating, since new heads added\n"))
2335 ui.status(_("not updating, since new heads added\n"))
2336 if modheads > 1:
2336 if modheads > 1:
2337 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2337 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2338 else:
2338 else:
2339 ui.status(_("(run 'hg update' to get a working copy)\n"))
2339 ui.status(_("(run 'hg update' to get a working copy)\n"))
2340
2340
2341 def pull(ui, repo, source="default", **opts):
2341 def pull(ui, repo, source="default", **opts):
2342 """pull changes from the specified source
2342 """pull changes from the specified source
2343
2343
2344 Pull changes from a remote repository to a local one.
2344 Pull changes from a remote repository to a local one.
2345
2345
2346 This finds all changes from the repository at the specified path
2346 This finds all changes from the repository at the specified path
2347 or URL and adds them to a local repository (the current one unless
2347 or URL and adds them to a local repository (the current one unless
2348 -R is specified). By default, this does not update the copy of the
2348 -R is specified). By default, this does not update the copy of the
2349 project in the working directory.
2349 project in the working directory.
2350
2350
2351 Use hg incoming if you want to see what would have been added by a
2351 Use hg incoming if you want to see what would have been added by a
2352 pull at the time you issued this command. If you then decide to
2352 pull at the time you issued this command. If you then decide to
2353 added those changes to the repository, you should use pull -r X
2353 added those changes to the repository, you should use pull -r X
2354 where X is the last changeset listed by hg incoming.
2354 where X is the last changeset listed by hg incoming.
2355
2355
2356 If SOURCE is omitted, the 'default' path will be used.
2356 If SOURCE is omitted, the 'default' path will be used.
2357 See 'hg help urls' for more information.
2357 See 'hg help urls' for more information.
2358 """
2358 """
2359 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2359 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2360 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2360 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2361 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2361 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2362 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2362 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2363 if revs:
2363 if revs:
2364 try:
2364 try:
2365 revs = [other.lookup(rev) for rev in revs]
2365 revs = [other.lookup(rev) for rev in revs]
2366 except error.CapabilityError:
2366 except error.CapabilityError:
2367 err = _("Other repository doesn't support revision lookup, "
2367 err = _("Other repository doesn't support revision lookup, "
2368 "so a rev cannot be specified.")
2368 "so a rev cannot be specified.")
2369 raise util.Abort(err)
2369 raise util.Abort(err)
2370
2370
2371 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2371 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2372 if checkout:
2372 if checkout:
2373 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2373 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2374 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2374 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2375
2375
2376 def push(ui, repo, dest=None, **opts):
2376 def push(ui, repo, dest=None, **opts):
2377 """push changes to the specified destination
2377 """push changes to the specified destination
2378
2378
2379 Push changes from the local repository to the specified destination.
2379 Push changes from the local repository to the specified destination.
2380
2380
2381 This is the symmetrical operation for pull. It moves changes from
2381 This is the symmetrical operation for pull. It moves changes from
2382 the current repository to a different one. If the destination is
2382 the current repository to a different one. If the destination is
2383 local this is identical to a pull in that directory from the
2383 local this is identical to a pull in that directory from the
2384 current one.
2384 current one.
2385
2385
2386 By default, push will refuse to run if it detects the result would
2386 By default, push will refuse to run if it detects the result would
2387 increase the number of remote heads. This generally indicates the
2387 increase the number of remote heads. This generally indicates the
2388 user forgot to pull and merge before pushing.
2388 user forgot to pull and merge before pushing.
2389
2389
2390 If -r/--rev is used, the named revision and all its ancestors will
2390 If -r/--rev is used, the named revision and all its ancestors will
2391 be pushed to the remote repository.
2391 be pushed to the remote repository.
2392
2392
2393 Please see 'hg help urls' for important details about ``ssh://``
2393 Please see 'hg help urls' for important details about ``ssh://``
2394 URLs. If DESTINATION is omitted, a default path will be used.
2394 URLs. If DESTINATION is omitted, a default path will be used.
2395 """
2395 """
2396 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2396 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2397 dest, branches = hg.parseurl(dest, opts.get('branch'))
2397 dest, branches = hg.parseurl(dest, opts.get('branch'))
2398 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2398 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2399 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2399 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2400 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2400 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2401 if revs:
2401 if revs:
2402 revs = [repo.lookup(rev) for rev in revs]
2402 revs = [repo.lookup(rev) for rev in revs]
2403
2403
2404 # push subrepos depth-first for coherent ordering
2404 # push subrepos depth-first for coherent ordering
2405 c = repo['']
2405 c = repo['']
2406 subs = c.substate # only repos that are committed
2406 subs = c.substate # only repos that are committed
2407 for s in sorted(subs):
2407 for s in sorted(subs):
2408 c.sub(s).push(opts.get('force'))
2408 c.sub(s).push(opts.get('force'))
2409
2409
2410 r = repo.push(other, opts.get('force'), revs=revs)
2410 r = repo.push(other, opts.get('force'), revs=revs)
2411 return r == 0
2411 return r == 0
2412
2412
2413 def recover(ui, repo):
2413 def recover(ui, repo):
2414 """roll back an interrupted transaction
2414 """roll back an interrupted transaction
2415
2415
2416 Recover from an interrupted commit or pull.
2416 Recover from an interrupted commit or pull.
2417
2417
2418 This command tries to fix the repository status after an
2418 This command tries to fix the repository status after an
2419 interrupted operation. It should only be necessary when Mercurial
2419 interrupted operation. It should only be necessary when Mercurial
2420 suggests it.
2420 suggests it.
2421 """
2421 """
2422 if repo.recover():
2422 if repo.recover():
2423 return hg.verify(repo)
2423 return hg.verify(repo)
2424 return 1
2424 return 1
2425
2425
2426 def remove(ui, repo, *pats, **opts):
2426 def remove(ui, repo, *pats, **opts):
2427 """remove the specified files on the next commit
2427 """remove the specified files on the next commit
2428
2428
2429 Schedule the indicated files for removal from the repository.
2429 Schedule the indicated files for removal from the repository.
2430
2430
2431 This only removes files from the current branch, not from the
2431 This only removes files from the current branch, not from the
2432 entire project history. -A/--after can be used to remove only
2432 entire project history. -A/--after can be used to remove only
2433 files that have already been deleted, -f/--force can be used to
2433 files that have already been deleted, -f/--force can be used to
2434 force deletion, and -Af can be used to remove files from the next
2434 force deletion, and -Af can be used to remove files from the next
2435 revision without deleting them from the working directory.
2435 revision without deleting them from the working directory.
2436
2436
2437 The following table details the behavior of remove for different
2437 The following table details the behavior of remove for different
2438 file states (columns) and option combinations (rows). The file
2438 file states (columns) and option combinations (rows). The file
2439 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2439 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2440 reported by hg status). The actions are Warn, Remove (from branch)
2440 reported by hg status). The actions are Warn, Remove (from branch)
2441 and Delete (from disk)::
2441 and Delete (from disk)::
2442
2442
2443 A C M !
2443 A C M !
2444 none W RD W R
2444 none W RD W R
2445 -f R RD RD R
2445 -f R RD RD R
2446 -A W W W R
2446 -A W W W R
2447 -Af R R R R
2447 -Af R R R R
2448
2448
2449 This command schedules the files to be removed at the next commit.
2449 This command schedules the files to be removed at the next commit.
2450 To undo a remove before that, see hg revert.
2450 To undo a remove before that, see hg revert.
2451 """
2451 """
2452
2452
2453 after, force = opts.get('after'), opts.get('force')
2453 after, force = opts.get('after'), opts.get('force')
2454 if not pats and not after:
2454 if not pats and not after:
2455 raise util.Abort(_('no files specified'))
2455 raise util.Abort(_('no files specified'))
2456
2456
2457 m = cmdutil.match(repo, pats, opts)
2457 m = cmdutil.match(repo, pats, opts)
2458 s = repo.status(match=m, clean=True)
2458 s = repo.status(match=m, clean=True)
2459 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2459 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2460
2460
2461 for f in m.files():
2461 for f in m.files():
2462 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2462 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2463 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2463 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2464
2464
2465 def warn(files, reason):
2465 def warn(files, reason):
2466 for f in files:
2466 for f in files:
2467 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2467 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2468 % (m.rel(f), reason))
2468 % (m.rel(f), reason))
2469
2469
2470 if force:
2470 if force:
2471 remove, forget = modified + deleted + clean, added
2471 remove, forget = modified + deleted + clean, added
2472 elif after:
2472 elif after:
2473 remove, forget = deleted, []
2473 remove, forget = deleted, []
2474 warn(modified + added + clean, _('still exists'))
2474 warn(modified + added + clean, _('still exists'))
2475 else:
2475 else:
2476 remove, forget = deleted + clean, []
2476 remove, forget = deleted + clean, []
2477 warn(modified, _('is modified'))
2477 warn(modified, _('is modified'))
2478 warn(added, _('has been marked for add'))
2478 warn(added, _('has been marked for add'))
2479
2479
2480 for f in sorted(remove + forget):
2480 for f in sorted(remove + forget):
2481 if ui.verbose or not m.exact(f):
2481 if ui.verbose or not m.exact(f):
2482 ui.status(_('removing %s\n') % m.rel(f))
2482 ui.status(_('removing %s\n') % m.rel(f))
2483
2483
2484 repo.forget(forget)
2484 repo.forget(forget)
2485 repo.remove(remove, unlink=not after)
2485 repo.remove(remove, unlink=not after)
2486
2486
2487 def rename(ui, repo, *pats, **opts):
2487 def rename(ui, repo, *pats, **opts):
2488 """rename files; equivalent of copy + remove
2488 """rename files; equivalent of copy + remove
2489
2489
2490 Mark dest as copies of sources; mark sources for deletion. If dest
2490 Mark dest as copies of sources; mark sources for deletion. If dest
2491 is a directory, copies are put in that directory. If dest is a
2491 is a directory, copies are put in that directory. If dest is a
2492 file, there can only be one source.
2492 file, there can only be one source.
2493
2493
2494 By default, this command copies the contents of files as they
2494 By default, this command copies the contents of files as they
2495 exist in the working directory. If invoked with -A/--after, the
2495 exist in the working directory. If invoked with -A/--after, the
2496 operation is recorded, but no copying is performed.
2496 operation is recorded, but no copying is performed.
2497
2497
2498 This command takes effect at the next commit. To undo a rename
2498 This command takes effect at the next commit. To undo a rename
2499 before that, see hg revert.
2499 before that, see hg revert.
2500 """
2500 """
2501 wlock = repo.wlock(False)
2501 wlock = repo.wlock(False)
2502 try:
2502 try:
2503 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2503 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2504 finally:
2504 finally:
2505 wlock.release()
2505 wlock.release()
2506
2506
2507 def resolve(ui, repo, *pats, **opts):
2507 def resolve(ui, repo, *pats, **opts):
2508 """retry file merges from a merge or update
2508 """retry file merges from a merge or update
2509
2509
2510 This command can cleanly retry unresolved file merges using file
2510 This command can cleanly retry unresolved file merges using file
2511 revisions preserved from the last update or merge.
2511 revisions preserved from the last update or merge.
2512
2512
2513 If a conflict is resolved manually, please note that the changes
2513 If a conflict is resolved manually, please note that the changes
2514 will be overwritten if the merge is retried with resolve. The
2514 will be overwritten if the merge is retried with resolve. The
2515 -m/--mark switch should be used to mark the file as resolved.
2515 -m/--mark switch should be used to mark the file as resolved.
2516
2516
2517 You can specify a set of files to operate on, or use the -a/--all
2517 You can specify a set of files to operate on, or use the -a/--all
2518 switch to select all unresolved files.
2518 switch to select all unresolved files.
2519
2519
2520 This command also allows listing resolved files and manually
2520 This command also allows listing resolved files and manually
2521 indicating whether or not files are resolved. All files must be
2521 indicating whether or not files are resolved. All files must be
2522 marked as resolved before a commit is permitted.
2522 marked as resolved before a commit is permitted.
2523
2523
2524 The codes used to show the status of files are::
2524 The codes used to show the status of files are::
2525
2525
2526 U = unresolved
2526 U = unresolved
2527 R = resolved
2527 R = resolved
2528 """
2528 """
2529
2529
2530 all, mark, unmark, show, nostatus = \
2530 all, mark, unmark, show, nostatus = \
2531 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2531 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2532
2532
2533 if (show and (mark or unmark)) or (mark and unmark):
2533 if (show and (mark or unmark)) or (mark and unmark):
2534 raise util.Abort(_("too many options specified"))
2534 raise util.Abort(_("too many options specified"))
2535 if pats and all:
2535 if pats and all:
2536 raise util.Abort(_("can't specify --all and patterns"))
2536 raise util.Abort(_("can't specify --all and patterns"))
2537 if not (all or pats or show or mark or unmark):
2537 if not (all or pats or show or mark or unmark):
2538 raise util.Abort(_('no files or directories specified; '
2538 raise util.Abort(_('no files or directories specified; '
2539 'use --all to remerge all files'))
2539 'use --all to remerge all files'))
2540
2540
2541 ms = merge_.mergestate(repo)
2541 ms = merge_.mergestate(repo)
2542 m = cmdutil.match(repo, pats, opts)
2542 m = cmdutil.match(repo, pats, opts)
2543
2543
2544 for f in ms:
2544 for f in ms:
2545 if m(f):
2545 if m(f):
2546 if show:
2546 if show:
2547 if nostatus:
2547 if nostatus:
2548 ui.write("%s\n" % f)
2548 ui.write("%s\n" % f)
2549 else:
2549 else:
2550 ui.write("%s %s\n" % (ms[f].upper(), f))
2550 ui.write("%s %s\n" % (ms[f].upper(), f))
2551 elif mark:
2551 elif mark:
2552 ms.mark(f, "r")
2552 ms.mark(f, "r")
2553 elif unmark:
2553 elif unmark:
2554 ms.mark(f, "u")
2554 ms.mark(f, "u")
2555 else:
2555 else:
2556 wctx = repo[None]
2556 wctx = repo[None]
2557 mctx = wctx.parents()[-1]
2557 mctx = wctx.parents()[-1]
2558
2558
2559 # backup pre-resolve (merge uses .orig for its own purposes)
2559 # backup pre-resolve (merge uses .orig for its own purposes)
2560 a = repo.wjoin(f)
2560 a = repo.wjoin(f)
2561 util.copyfile(a, a + ".resolve")
2561 util.copyfile(a, a + ".resolve")
2562
2562
2563 # resolve file
2563 # resolve file
2564 ms.resolve(f, wctx, mctx)
2564 ms.resolve(f, wctx, mctx)
2565
2565
2566 # replace filemerge's .orig file with our resolve file
2566 # replace filemerge's .orig file with our resolve file
2567 util.rename(a + ".resolve", a + ".orig")
2567 util.rename(a + ".resolve", a + ".orig")
2568
2568
2569 def revert(ui, repo, *pats, **opts):
2569 def revert(ui, repo, *pats, **opts):
2570 """restore individual files or directories to an earlier state
2570 """restore individual files or directories to an earlier state
2571
2571
2572 (Use update -r to check out earlier revisions, revert does not
2572 (Use update -r to check out earlier revisions, revert does not
2573 change the working directory parents.)
2573 change the working directory parents.)
2574
2574
2575 With no revision specified, revert the named files or directories
2575 With no revision specified, revert the named files or directories
2576 to the contents they had in the parent of the working directory.
2576 to the contents they had in the parent of the working directory.
2577 This restores the contents of the affected files to an unmodified
2577 This restores the contents of the affected files to an unmodified
2578 state and unschedules adds, removes, copies, and renames. If the
2578 state and unschedules adds, removes, copies, and renames. If the
2579 working directory has two parents, you must explicitly specify a
2579 working directory has two parents, you must explicitly specify a
2580 revision.
2580 revision.
2581
2581
2582 Using the -r/--rev option, revert the given files or directories
2582 Using the -r/--rev option, revert the given files or directories
2583 to their contents as of a specific revision. This can be helpful
2583 to their contents as of a specific revision. This can be helpful
2584 to "roll back" some or all of an earlier change. See 'hg help
2584 to "roll back" some or all of an earlier change. See 'hg help
2585 dates' for a list of formats valid for -d/--date.
2585 dates' for a list of formats valid for -d/--date.
2586
2586
2587 Revert modifies the working directory. It does not commit any
2587 Revert modifies the working directory. It does not commit any
2588 changes, or change the parent of the working directory. If you
2588 changes, or change the parent of the working directory. If you
2589 revert to a revision other than the parent of the working
2589 revert to a revision other than the parent of the working
2590 directory, the reverted files will thus appear modified
2590 directory, the reverted files will thus appear modified
2591 afterwards.
2591 afterwards.
2592
2592
2593 If a file has been deleted, it is restored. If the executable mode
2593 If a file has been deleted, it is restored. If the executable mode
2594 of a file was changed, it is reset.
2594 of a file was changed, it is reset.
2595
2595
2596 If names are given, all files matching the names are reverted.
2596 If names are given, all files matching the names are reverted.
2597 If no arguments are given, no files are reverted.
2597 If no arguments are given, no files are reverted.
2598
2598
2599 Modified files are saved with a .orig suffix before reverting.
2599 Modified files are saved with a .orig suffix before reverting.
2600 To disable these backups, use --no-backup.
2600 To disable these backups, use --no-backup.
2601 """
2601 """
2602
2602
2603 if opts["date"]:
2603 if opts["date"]:
2604 if opts["rev"]:
2604 if opts["rev"]:
2605 raise util.Abort(_("you can't specify a revision and a date"))
2605 raise util.Abort(_("you can't specify a revision and a date"))
2606 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2606 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2607
2607
2608 if not pats and not opts.get('all'):
2608 if not pats and not opts.get('all'):
2609 raise util.Abort(_('no files or directories specified; '
2609 raise util.Abort(_('no files or directories specified; '
2610 'use --all to revert the whole repo'))
2610 'use --all to revert the whole repo'))
2611
2611
2612 parent, p2 = repo.dirstate.parents()
2612 parent, p2 = repo.dirstate.parents()
2613 if not opts.get('rev') and p2 != nullid:
2613 if not opts.get('rev') and p2 != nullid:
2614 raise util.Abort(_('uncommitted merge - please provide a '
2614 raise util.Abort(_('uncommitted merge - please provide a '
2615 'specific revision'))
2615 'specific revision'))
2616 ctx = repo[opts.get('rev')]
2616 ctx = repo[opts.get('rev')]
2617 node = ctx.node()
2617 node = ctx.node()
2618 mf = ctx.manifest()
2618 mf = ctx.manifest()
2619 if node == parent:
2619 if node == parent:
2620 pmf = mf
2620 pmf = mf
2621 else:
2621 else:
2622 pmf = None
2622 pmf = None
2623
2623
2624 # need all matching names in dirstate and manifest of target rev,
2624 # need all matching names in dirstate and manifest of target rev,
2625 # so have to walk both. do not print errors if files exist in one
2625 # so have to walk both. do not print errors if files exist in one
2626 # but not other.
2626 # but not other.
2627
2627
2628 names = {}
2628 names = {}
2629
2629
2630 wlock = repo.wlock()
2630 wlock = repo.wlock()
2631 try:
2631 try:
2632 # walk dirstate.
2632 # walk dirstate.
2633
2633
2634 m = cmdutil.match(repo, pats, opts)
2634 m = cmdutil.match(repo, pats, opts)
2635 m.bad = lambda x, y: False
2635 m.bad = lambda x, y: False
2636 for abs in repo.walk(m):
2636 for abs in repo.walk(m):
2637 names[abs] = m.rel(abs), m.exact(abs)
2637 names[abs] = m.rel(abs), m.exact(abs)
2638
2638
2639 # walk target manifest.
2639 # walk target manifest.
2640
2640
2641 def badfn(path, msg):
2641 def badfn(path, msg):
2642 if path in names:
2642 if path in names:
2643 return
2643 return
2644 path_ = path + '/'
2644 path_ = path + '/'
2645 for f in names:
2645 for f in names:
2646 if f.startswith(path_):
2646 if f.startswith(path_):
2647 return
2647 return
2648 ui.warn("%s: %s\n" % (m.rel(path), msg))
2648 ui.warn("%s: %s\n" % (m.rel(path), msg))
2649
2649
2650 m = cmdutil.match(repo, pats, opts)
2650 m = cmdutil.match(repo, pats, opts)
2651 m.bad = badfn
2651 m.bad = badfn
2652 for abs in repo[node].walk(m):
2652 for abs in repo[node].walk(m):
2653 if abs not in names:
2653 if abs not in names:
2654 names[abs] = m.rel(abs), m.exact(abs)
2654 names[abs] = m.rel(abs), m.exact(abs)
2655
2655
2656 m = cmdutil.matchfiles(repo, names)
2656 m = cmdutil.matchfiles(repo, names)
2657 changes = repo.status(match=m)[:4]
2657 changes = repo.status(match=m)[:4]
2658 modified, added, removed, deleted = map(set, changes)
2658 modified, added, removed, deleted = map(set, changes)
2659
2659
2660 # if f is a rename, also revert the source
2660 # if f is a rename, also revert the source
2661 cwd = repo.getcwd()
2661 cwd = repo.getcwd()
2662 for f in added:
2662 for f in added:
2663 src = repo.dirstate.copied(f)
2663 src = repo.dirstate.copied(f)
2664 if src and src not in names and repo.dirstate[src] == 'r':
2664 if src and src not in names and repo.dirstate[src] == 'r':
2665 removed.add(src)
2665 removed.add(src)
2666 names[src] = (repo.pathto(src, cwd), True)
2666 names[src] = (repo.pathto(src, cwd), True)
2667
2667
2668 def removeforget(abs):
2668 def removeforget(abs):
2669 if repo.dirstate[abs] == 'a':
2669 if repo.dirstate[abs] == 'a':
2670 return _('forgetting %s\n')
2670 return _('forgetting %s\n')
2671 return _('removing %s\n')
2671 return _('removing %s\n')
2672
2672
2673 revert = ([], _('reverting %s\n'))
2673 revert = ([], _('reverting %s\n'))
2674 add = ([], _('adding %s\n'))
2674 add = ([], _('adding %s\n'))
2675 remove = ([], removeforget)
2675 remove = ([], removeforget)
2676 undelete = ([], _('undeleting %s\n'))
2676 undelete = ([], _('undeleting %s\n'))
2677
2677
2678 disptable = (
2678 disptable = (
2679 # dispatch table:
2679 # dispatch table:
2680 # file state
2680 # file state
2681 # action if in target manifest
2681 # action if in target manifest
2682 # action if not in target manifest
2682 # action if not in target manifest
2683 # make backup if in target manifest
2683 # make backup if in target manifest
2684 # make backup if not in target manifest
2684 # make backup if not in target manifest
2685 (modified, revert, remove, True, True),
2685 (modified, revert, remove, True, True),
2686 (added, revert, remove, True, False),
2686 (added, revert, remove, True, False),
2687 (removed, undelete, None, False, False),
2687 (removed, undelete, None, False, False),
2688 (deleted, revert, remove, False, False),
2688 (deleted, revert, remove, False, False),
2689 )
2689 )
2690
2690
2691 for abs, (rel, exact) in sorted(names.items()):
2691 for abs, (rel, exact) in sorted(names.items()):
2692 mfentry = mf.get(abs)
2692 mfentry = mf.get(abs)
2693 target = repo.wjoin(abs)
2693 target = repo.wjoin(abs)
2694 def handle(xlist, dobackup):
2694 def handle(xlist, dobackup):
2695 xlist[0].append(abs)
2695 xlist[0].append(abs)
2696 if dobackup and not opts.get('no_backup') and util.lexists(target):
2696 if dobackup and not opts.get('no_backup') and util.lexists(target):
2697 bakname = "%s.orig" % rel
2697 bakname = "%s.orig" % rel
2698 ui.note(_('saving current version of %s as %s\n') %
2698 ui.note(_('saving current version of %s as %s\n') %
2699 (rel, bakname))
2699 (rel, bakname))
2700 if not opts.get('dry_run'):
2700 if not opts.get('dry_run'):
2701 util.copyfile(target, bakname)
2701 util.copyfile(target, bakname)
2702 if ui.verbose or not exact:
2702 if ui.verbose or not exact:
2703 msg = xlist[1]
2703 msg = xlist[1]
2704 if not isinstance(msg, basestring):
2704 if not isinstance(msg, basestring):
2705 msg = msg(abs)
2705 msg = msg(abs)
2706 ui.status(msg % rel)
2706 ui.status(msg % rel)
2707 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2707 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2708 if abs not in table:
2708 if abs not in table:
2709 continue
2709 continue
2710 # file has changed in dirstate
2710 # file has changed in dirstate
2711 if mfentry:
2711 if mfentry:
2712 handle(hitlist, backuphit)
2712 handle(hitlist, backuphit)
2713 elif misslist is not None:
2713 elif misslist is not None:
2714 handle(misslist, backupmiss)
2714 handle(misslist, backupmiss)
2715 break
2715 break
2716 else:
2716 else:
2717 if abs not in repo.dirstate:
2717 if abs not in repo.dirstate:
2718 if mfentry:
2718 if mfentry:
2719 handle(add, True)
2719 handle(add, True)
2720 elif exact:
2720 elif exact:
2721 ui.warn(_('file not managed: %s\n') % rel)
2721 ui.warn(_('file not managed: %s\n') % rel)
2722 continue
2722 continue
2723 # file has not changed in dirstate
2723 # file has not changed in dirstate
2724 if node == parent:
2724 if node == parent:
2725 if exact:
2725 if exact:
2726 ui.warn(_('no changes needed to %s\n') % rel)
2726 ui.warn(_('no changes needed to %s\n') % rel)
2727 continue
2727 continue
2728 if pmf is None:
2728 if pmf is None:
2729 # only need parent manifest in this unlikely case,
2729 # only need parent manifest in this unlikely case,
2730 # so do not read by default
2730 # so do not read by default
2731 pmf = repo[parent].manifest()
2731 pmf = repo[parent].manifest()
2732 if abs in pmf:
2732 if abs in pmf:
2733 if mfentry:
2733 if mfentry:
2734 # if version of file is same in parent and target
2734 # if version of file is same in parent and target
2735 # manifests, do nothing
2735 # manifests, do nothing
2736 if (pmf[abs] != mfentry or
2736 if (pmf[abs] != mfentry or
2737 pmf.flags(abs) != mf.flags(abs)):
2737 pmf.flags(abs) != mf.flags(abs)):
2738 handle(revert, False)
2738 handle(revert, False)
2739 else:
2739 else:
2740 handle(remove, False)
2740 handle(remove, False)
2741
2741
2742 if not opts.get('dry_run'):
2742 if not opts.get('dry_run'):
2743 def checkout(f):
2743 def checkout(f):
2744 fc = ctx[f]
2744 fc = ctx[f]
2745 repo.wwrite(f, fc.data(), fc.flags())
2745 repo.wwrite(f, fc.data(), fc.flags())
2746
2746
2747 audit_path = util.path_auditor(repo.root)
2747 audit_path = util.path_auditor(repo.root)
2748 for f in remove[0]:
2748 for f in remove[0]:
2749 if repo.dirstate[f] == 'a':
2749 if repo.dirstate[f] == 'a':
2750 repo.dirstate.forget(f)
2750 repo.dirstate.forget(f)
2751 continue
2751 continue
2752 audit_path(f)
2752 audit_path(f)
2753 try:
2753 try:
2754 util.unlink(repo.wjoin(f))
2754 util.unlink(repo.wjoin(f))
2755 except OSError:
2755 except OSError:
2756 pass
2756 pass
2757 repo.dirstate.remove(f)
2757 repo.dirstate.remove(f)
2758
2758
2759 normal = None
2759 normal = None
2760 if node == parent:
2760 if node == parent:
2761 # We're reverting to our parent. If possible, we'd like status
2761 # We're reverting to our parent. If possible, we'd like status
2762 # to report the file as clean. We have to use normallookup for
2762 # to report the file as clean. We have to use normallookup for
2763 # merges to avoid losing information about merged/dirty files.
2763 # merges to avoid losing information about merged/dirty files.
2764 if p2 != nullid:
2764 if p2 != nullid:
2765 normal = repo.dirstate.normallookup
2765 normal = repo.dirstate.normallookup
2766 else:
2766 else:
2767 normal = repo.dirstate.normal
2767 normal = repo.dirstate.normal
2768 for f in revert[0]:
2768 for f in revert[0]:
2769 checkout(f)
2769 checkout(f)
2770 if normal:
2770 if normal:
2771 normal(f)
2771 normal(f)
2772
2772
2773 for f in add[0]:
2773 for f in add[0]:
2774 checkout(f)
2774 checkout(f)
2775 repo.dirstate.add(f)
2775 repo.dirstate.add(f)
2776
2776
2777 normal = repo.dirstate.normallookup
2777 normal = repo.dirstate.normallookup
2778 if node == parent and p2 == nullid:
2778 if node == parent and p2 == nullid:
2779 normal = repo.dirstate.normal
2779 normal = repo.dirstate.normal
2780 for f in undelete[0]:
2780 for f in undelete[0]:
2781 checkout(f)
2781 checkout(f)
2782 normal(f)
2782 normal(f)
2783
2783
2784 finally:
2784 finally:
2785 wlock.release()
2785 wlock.release()
2786
2786
2787 def rollback(ui, repo):
2787 def rollback(ui, repo):
2788 """roll back the last transaction
2788 """roll back the last transaction
2789
2789
2790 This command should be used with care. There is only one level of
2790 This command should be used with care. There is only one level of
2791 rollback, and there is no way to undo a rollback. It will also
2791 rollback, and there is no way to undo a rollback. It will also
2792 restore the dirstate at the time of the last transaction, losing
2792 restore the dirstate at the time of the last transaction, losing
2793 any dirstate changes since that time. This command does not alter
2793 any dirstate changes since that time. This command does not alter
2794 the working directory.
2794 the working directory.
2795
2795
2796 Transactions are used to encapsulate the effects of all commands
2796 Transactions are used to encapsulate the effects of all commands
2797 that create new changesets or propagate existing changesets into a
2797 that create new changesets or propagate existing changesets into a
2798 repository. For example, the following commands are transactional,
2798 repository. For example, the following commands are transactional,
2799 and their effects can be rolled back:
2799 and their effects can be rolled back:
2800
2800
2801 - commit
2801 - commit
2802 - import
2802 - import
2803 - pull
2803 - pull
2804 - push (with this repository as the destination)
2804 - push (with this repository as the destination)
2805 - unbundle
2805 - unbundle
2806
2806
2807 This command is not intended for use on public repositories. Once
2807 This command is not intended for use on public repositories. Once
2808 changes are visible for pull by other users, rolling a transaction
2808 changes are visible for pull by other users, rolling a transaction
2809 back locally is ineffective (someone else may already have pulled
2809 back locally is ineffective (someone else may already have pulled
2810 the changes). Furthermore, a race is possible with readers of the
2810 the changes). Furthermore, a race is possible with readers of the
2811 repository; for example an in-progress pull from the repository
2811 repository; for example an in-progress pull from the repository
2812 may fail if a rollback is performed.
2812 may fail if a rollback is performed.
2813 """
2813 """
2814 repo.rollback()
2814 repo.rollback()
2815
2815
2816 def root(ui, repo):
2816 def root(ui, repo):
2817 """print the root (top) of the current working directory
2817 """print the root (top) of the current working directory
2818
2818
2819 Print the root directory of the current repository.
2819 Print the root directory of the current repository.
2820 """
2820 """
2821 ui.write(repo.root + "\n")
2821 ui.write(repo.root + "\n")
2822
2822
2823 def serve(ui, repo, **opts):
2823 def serve(ui, repo, **opts):
2824 """export the repository via HTTP
2824 """export the repository via HTTP
2825
2825
2826 Start a local HTTP repository browser and pull server.
2826 Start a local HTTP repository browser and pull server.
2827
2827
2828 By default, the server logs accesses to stdout and errors to
2828 By default, the server logs accesses to stdout and errors to
2829 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2829 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2830 files.
2830 files.
2831 """
2831 """
2832
2832
2833 if opts["stdio"]:
2833 if opts["stdio"]:
2834 if repo is None:
2834 if repo is None:
2835 raise error.RepoError(_("There is no Mercurial repository here"
2835 raise error.RepoError(_("There is no Mercurial repository here"
2836 " (.hg not found)"))
2836 " (.hg not found)"))
2837 s = sshserver.sshserver(ui, repo)
2837 s = sshserver.sshserver(ui, repo)
2838 s.serve_forever()
2838 s.serve_forever()
2839
2839
2840 baseui = repo and repo.baseui or ui
2840 baseui = repo and repo.baseui or ui
2841 optlist = ("name templates style address port prefix ipv6"
2841 optlist = ("name templates style address port prefix ipv6"
2842 " accesslog errorlog webdir_conf certificate encoding")
2842 " accesslog errorlog webdir_conf certificate encoding")
2843 for o in optlist.split():
2843 for o in optlist.split():
2844 if opts.get(o, None):
2844 if opts.get(o, None):
2845 baseui.setconfig("web", o, str(opts[o]))
2845 baseui.setconfig("web", o, str(opts[o]))
2846 if (repo is not None) and (repo.ui != baseui):
2846 if (repo is not None) and (repo.ui != baseui):
2847 repo.ui.setconfig("web", o, str(opts[o]))
2847 repo.ui.setconfig("web", o, str(opts[o]))
2848
2848
2849 if repo is None and not ui.config("web", "webdir_conf"):
2849 if repo is None and not ui.config("web", "webdir_conf"):
2850 raise error.RepoError(_("There is no Mercurial repository here"
2850 raise error.RepoError(_("There is no Mercurial repository here"
2851 " (.hg not found)"))
2851 " (.hg not found)"))
2852
2852
2853 class service(object):
2853 class service(object):
2854 def init(self):
2854 def init(self):
2855 util.set_signal_handler()
2855 util.set_signal_handler()
2856 self.httpd = server.create_server(baseui, repo)
2856 self.httpd = server.create_server(baseui, repo)
2857
2857
2858 if not ui.verbose:
2858 if not ui.verbose:
2859 return
2859 return
2860
2860
2861 if self.httpd.prefix:
2861 if self.httpd.prefix:
2862 prefix = self.httpd.prefix.strip('/') + '/'
2862 prefix = self.httpd.prefix.strip('/') + '/'
2863 else:
2863 else:
2864 prefix = ''
2864 prefix = ''
2865
2865
2866 port = ':%d' % self.httpd.port
2866 port = ':%d' % self.httpd.port
2867 if port == ':80':
2867 if port == ':80':
2868 port = ''
2868 port = ''
2869
2869
2870 bindaddr = self.httpd.addr
2870 bindaddr = self.httpd.addr
2871 if bindaddr == '0.0.0.0':
2871 if bindaddr == '0.0.0.0':
2872 bindaddr = '*'
2872 bindaddr = '*'
2873 elif ':' in bindaddr: # IPv6
2873 elif ':' in bindaddr: # IPv6
2874 bindaddr = '[%s]' % bindaddr
2874 bindaddr = '[%s]' % bindaddr
2875
2875
2876 fqaddr = self.httpd.fqaddr
2876 fqaddr = self.httpd.fqaddr
2877 if ':' in fqaddr:
2877 if ':' in fqaddr:
2878 fqaddr = '[%s]' % fqaddr
2878 fqaddr = '[%s]' % fqaddr
2879 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2879 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2880 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2880 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2881
2881
2882 def run(self):
2882 def run(self):
2883 self.httpd.serve_forever()
2883 self.httpd.serve_forever()
2884
2884
2885 service = service()
2885 service = service()
2886
2886
2887 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2887 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2888
2888
2889 def status(ui, repo, *pats, **opts):
2889 def status(ui, repo, *pats, **opts):
2890 """show changed files in the working directory
2890 """show changed files in the working directory
2891
2891
2892 Show status of files in the repository. If names are given, only
2892 Show status of files in the repository. If names are given, only
2893 files that match are shown. Files that are clean or ignored or
2893 files that match are shown. Files that are clean or ignored or
2894 the source of a copy/move operation, are not listed unless
2894 the source of a copy/move operation, are not listed unless
2895 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2895 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2896 Unless options described with "show only ..." are given, the
2896 Unless options described with "show only ..." are given, the
2897 options -mardu are used.
2897 options -mardu are used.
2898
2898
2899 Option -q/--quiet hides untracked (unknown and ignored) files
2899 Option -q/--quiet hides untracked (unknown and ignored) files
2900 unless explicitly requested with -u/--unknown or -i/--ignored.
2900 unless explicitly requested with -u/--unknown or -i/--ignored.
2901
2901
2902 NOTE: status may appear to disagree with diff if permissions have
2902 NOTE: status may appear to disagree with diff if permissions have
2903 changed or a merge has occurred. The standard diff format does not
2903 changed or a merge has occurred. The standard diff format does not
2904 report permission changes and diff only reports changes relative
2904 report permission changes and diff only reports changes relative
2905 to one merge parent.
2905 to one merge parent.
2906
2906
2907 If one revision is given, it is used as the base revision.
2907 If one revision is given, it is used as the base revision.
2908 If two revisions are given, the differences between them are
2908 If two revisions are given, the differences between them are
2909 shown. The --change option can also be used as a shortcut to list
2909 shown. The --change option can also be used as a shortcut to list
2910 the changed files of a revision from its first parent.
2910 the changed files of a revision from its first parent.
2911
2911
2912 The codes used to show the status of files are::
2912 The codes used to show the status of files are::
2913
2913
2914 M = modified
2914 M = modified
2915 A = added
2915 A = added
2916 R = removed
2916 R = removed
2917 C = clean
2917 C = clean
2918 ! = missing (deleted by non-hg command, but still tracked)
2918 ! = missing (deleted by non-hg command, but still tracked)
2919 ? = not tracked
2919 ? = not tracked
2920 I = ignored
2920 I = ignored
2921 = origin of the previous file listed as A (added)
2921 = origin of the previous file listed as A (added)
2922 """
2922 """
2923
2923
2924 revs = opts.get('rev')
2924 revs = opts.get('rev')
2925 change = opts.get('change')
2925 change = opts.get('change')
2926
2926
2927 if revs and change:
2927 if revs and change:
2928 msg = _('cannot specify --rev and --change at the same time')
2928 msg = _('cannot specify --rev and --change at the same time')
2929 raise util.Abort(msg)
2929 raise util.Abort(msg)
2930 elif change:
2930 elif change:
2931 node2 = repo.lookup(change)
2931 node2 = repo.lookup(change)
2932 node1 = repo[node2].parents()[0].node()
2932 node1 = repo[node2].parents()[0].node()
2933 else:
2933 else:
2934 node1, node2 = cmdutil.revpair(repo, revs)
2934 node1, node2 = cmdutil.revpair(repo, revs)
2935
2935
2936 cwd = (pats and repo.getcwd()) or ''
2936 cwd = (pats and repo.getcwd()) or ''
2937 end = opts.get('print0') and '\0' or '\n'
2937 end = opts.get('print0') and '\0' or '\n'
2938 copy = {}
2938 copy = {}
2939 states = 'modified added removed deleted unknown ignored clean'.split()
2939 states = 'modified added removed deleted unknown ignored clean'.split()
2940 show = [k for k in states if opts.get(k)]
2940 show = [k for k in states if opts.get(k)]
2941 if opts.get('all'):
2941 if opts.get('all'):
2942 show += ui.quiet and (states[:4] + ['clean']) or states
2942 show += ui.quiet and (states[:4] + ['clean']) or states
2943 if not show:
2943 if not show:
2944 show = ui.quiet and states[:4] or states[:5]
2944 show = ui.quiet and states[:4] or states[:5]
2945
2945
2946 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2946 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2947 'ignored' in show, 'clean' in show, 'unknown' in show)
2947 'ignored' in show, 'clean' in show, 'unknown' in show)
2948 changestates = zip(states, 'MAR!?IC', stat)
2948 changestates = zip(states, 'MAR!?IC', stat)
2949
2949
2950 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2950 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2951 ctxn = repo[nullid]
2951 ctxn = repo[nullid]
2952 ctx1 = repo[node1]
2952 ctx1 = repo[node1]
2953 ctx2 = repo[node2]
2953 ctx2 = repo[node2]
2954 added = stat[1]
2954 added = stat[1]
2955 if node2 is None:
2955 if node2 is None:
2956 added = stat[0] + stat[1] # merged?
2956 added = stat[0] + stat[1] # merged?
2957
2957
2958 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2958 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2959 if k in added:
2959 if k in added:
2960 copy[k] = v
2960 copy[k] = v
2961 elif v in added:
2961 elif v in added:
2962 copy[v] = k
2962 copy[v] = k
2963
2963
2964 for state, char, files in changestates:
2964 for state, char, files in changestates:
2965 if state in show:
2965 if state in show:
2966 format = "%s %%s%s" % (char, end)
2966 format = "%s %%s%s" % (char, end)
2967 if opts.get('no_status'):
2967 if opts.get('no_status'):
2968 format = "%%s%s" % end
2968 format = "%%s%s" % end
2969
2969
2970 for f in files:
2970 for f in files:
2971 ui.write(format % repo.pathto(f, cwd))
2971 ui.write(format % repo.pathto(f, cwd))
2972 if f in copy:
2972 if f in copy:
2973 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2973 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2974
2974
2975 def summary(ui, repo, **opts):
2975 def summary(ui, repo, **opts):
2976 """summarize working directory state
2976 """summarize working directory state
2977
2977
2978 This generates a brief summary of the working directory state,
2978 This generates a brief summary of the working directory state,
2979 including parents, branch, commit status, and available updates.
2979 including parents, branch, commit status, and available updates.
2980
2980
2981 With the --remote option, this will check the default paths for
2981 With the --remote option, this will check the default paths for
2982 incoming and outgoing changes. This can be time-consuming.
2982 incoming and outgoing changes. This can be time-consuming.
2983 """
2983 """
2984
2984
2985 ctx = repo[None]
2985 ctx = repo[None]
2986 parents = ctx.parents()
2986 parents = ctx.parents()
2987 pnode = parents[0].node()
2987 pnode = parents[0].node()
2988 tags = repo.tags()
2988 tags = repo.tags()
2989
2989
2990 for p in parents:
2990 for p in parents:
2991 t = ' '.join([t for t in tags if tags[t] == p.node()])
2991 t = ' '.join([t for t in tags if tags[t] == p.node()])
2992 if p.rev() == -1:
2992 if p.rev() == -1:
2993 if not len(repo):
2993 if not len(repo):
2994 t += _(' (empty repository)')
2994 t += _(' (empty repository)')
2995 else:
2995 else:
2996 t += _(' (no revision checked out)')
2996 t += _(' (no revision checked out)')
2997 ui.write(_('parent: %d:%s %s\n') % (p.rev(), str(p), t))
2997 ui.write(_('parent: %d:%s %s\n') % (p.rev(), str(p), t))
2998 if p.description():
2998 if p.description():
2999 ui.status(' ' + p.description().splitlines()[0].strip() + '\n')
2999 ui.status(' ' + p.description().splitlines()[0].strip() + '\n')
3000
3000
3001 branch = ctx.branch()
3001 branch = ctx.branch()
3002 bheads = repo.branchheads(branch)
3002 bheads = repo.branchheads(branch)
3003 m = _('branch: %s\n') % branch
3003 m = _('branch: %s\n') % branch
3004 if branch != 'default':
3004 if branch != 'default':
3005 ui.write(m)
3005 ui.write(m)
3006 else:
3006 else:
3007 ui.status(m)
3007 ui.status(m)
3008
3008
3009 st = list(repo.status(unknown=True))[:6]
3009 st = list(repo.status(unknown=True))[:6]
3010 ms = merge_.mergestate(repo)
3010 ms = merge_.mergestate(repo)
3011 st.append([f for f in ms if ms[f] == 'u'])
3011 st.append([f for f in ms if ms[f] == 'u'])
3012 labels = [_('%d modified'), _('%d added'), _('%d removed'),
3012 labels = [_('%d modified'), _('%d added'), _('%d removed'),
3013 _('%d deleted'), _('%d unknown'), _('%d ignored'),
3013 _('%d deleted'), _('%d unknown'), _('%d ignored'),
3014 _('%d unresolved')]
3014 _('%d unresolved')]
3015 t = []
3015 t = []
3016 for s, l in zip(st, labels):
3016 for s, l in zip(st, labels):
3017 if s:
3017 if s:
3018 t.append(l % len(s))
3018 t.append(l % len(s))
3019
3019
3020 t = ', '.join(t)
3020 t = ', '.join(t)
3021 cleanworkdir = False
3021 cleanworkdir = False
3022
3022
3023 if len(parents) > 1:
3023 if len(parents) > 1:
3024 t += _(' (merge)')
3024 t += _(' (merge)')
3025 elif branch != parents[0].branch():
3025 elif branch != parents[0].branch():
3026 t += _(' (new branch)')
3026 t += _(' (new branch)')
3027 elif (not st[0] and not st[1] and not st[2]):
3027 elif (not st[0] and not st[1] and not st[2]):
3028 t += _(' (clean)')
3028 t += _(' (clean)')
3029 cleanworkdir = True
3029 cleanworkdir = True
3030 elif pnode not in bheads:
3030 elif pnode not in bheads:
3031 t += _(' (new branch head)')
3031 t += _(' (new branch head)')
3032
3032
3033 if cleanworkdir:
3033 if cleanworkdir:
3034 ui.status(_('commit: %s\n') % t.strip())
3034 ui.status(_('commit: %s\n') % t.strip())
3035 else:
3035 else:
3036 ui.write(_('commit: %s\n') % t.strip())
3036 ui.write(_('commit: %s\n') % t.strip())
3037
3037
3038 # all ancestors of branch heads - all ancestors of parent = new csets
3038 # all ancestors of branch heads - all ancestors of parent = new csets
3039 new = [0] * len(repo)
3039 new = [0] * len(repo)
3040 cl = repo.changelog
3040 cl = repo.changelog
3041 for a in [cl.rev(n) for n in bheads]:
3041 for a in [cl.rev(n) for n in bheads]:
3042 new[a] = 1
3042 new[a] = 1
3043 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3043 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3044 new[a] = 1
3044 new[a] = 1
3045 for a in [p.rev() for p in parents]:
3045 for a in [p.rev() for p in parents]:
3046 if a >= 0:
3046 if a >= 0:
3047 new[a] = 0
3047 new[a] = 0
3048 for a in cl.ancestors(*[p.rev() for p in parents]):
3048 for a in cl.ancestors(*[p.rev() for p in parents]):
3049 new[a] = 0
3049 new[a] = 0
3050 new = sum(new)
3050 new = sum(new)
3051
3051
3052 if new == 0:
3052 if new == 0:
3053 ui.status(_('update: (current)\n'))
3053 ui.status(_('update: (current)\n'))
3054 elif pnode not in bheads:
3054 elif pnode not in bheads:
3055 ui.write(_('update: %d new changesets (update)\n') % new)
3055 ui.write(_('update: %d new changesets (update)\n') % new)
3056 else:
3056 else:
3057 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3057 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3058 (new, len(bheads)))
3058 (new, len(bheads)))
3059
3059
3060 if opts.get('remote'):
3060 if opts.get('remote'):
3061 t = []
3061 t = []
3062 source, branches = hg.parseurl(ui.expandpath('default'))
3062 source, branches = hg.parseurl(ui.expandpath('default'))
3063 other = hg.repository(cmdutil.remoteui(repo, {}), source)
3063 other = hg.repository(cmdutil.remoteui(repo, {}), source)
3064 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3064 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3065 ui.debug('comparing with %s\n' % url.hidepassword(source))
3065 ui.debug('comparing with %s\n' % url.hidepassword(source))
3066 repo.ui.pushbuffer()
3066 repo.ui.pushbuffer()
3067 common, incoming, rheads = repo.findcommonincoming(other)
3067 common, incoming, rheads = repo.findcommonincoming(other)
3068 repo.ui.popbuffer()
3068 repo.ui.popbuffer()
3069 if incoming:
3069 if incoming:
3070 t.append(_('1 or more incoming'))
3070 t.append(_('1 or more incoming'))
3071
3071
3072 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3072 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3073 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3073 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3074 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
3074 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
3075 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3075 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3076 repo.ui.pushbuffer()
3076 repo.ui.pushbuffer()
3077 o = repo.findoutgoing(other)
3077 o = repo.findoutgoing(other)
3078 repo.ui.popbuffer()
3078 repo.ui.popbuffer()
3079 o = repo.changelog.nodesbetween(o, None)[0]
3079 o = repo.changelog.nodesbetween(o, None)[0]
3080 if o:
3080 if o:
3081 t.append(_('%d outgoing') % len(o))
3081 t.append(_('%d outgoing') % len(o))
3082
3082
3083 if t:
3083 if t:
3084 ui.write(_('remote: %s\n') % (', '.join(t)))
3084 ui.write(_('remote: %s\n') % (', '.join(t)))
3085 else:
3085 else:
3086 ui.status(_('remote: (synced)\n'))
3086 ui.status(_('remote: (synced)\n'))
3087
3087
3088 def tag(ui, repo, name1, *names, **opts):
3088 def tag(ui, repo, name1, *names, **opts):
3089 """add one or more tags for the current or given revision
3089 """add one or more tags for the current or given revision
3090
3090
3091 Name a particular revision using <name>.
3091 Name a particular revision using <name>.
3092
3092
3093 Tags are used to name particular revisions of the repository and are
3093 Tags are used to name particular revisions of the repository and are
3094 very useful to compare different revisions, to go back to significant
3094 very useful to compare different revisions, to go back to significant
3095 earlier versions or to mark branch points as releases, etc.
3095 earlier versions or to mark branch points as releases, etc.
3096
3096
3097 If no revision is given, the parent of the working directory is
3097 If no revision is given, the parent of the working directory is
3098 used, or tip if no revision is checked out.
3098 used, or tip if no revision is checked out.
3099
3099
3100 To facilitate version control, distribution, and merging of tags,
3100 To facilitate version control, distribution, and merging of tags,
3101 they are stored as a file named ".hgtags" which is managed
3101 they are stored as a file named ".hgtags" which is managed
3102 similarly to other project files and can be hand-edited if
3102 similarly to other project files and can be hand-edited if
3103 necessary. The file '.hg/localtags' is used for local tags (not
3103 necessary. The file '.hg/localtags' is used for local tags (not
3104 shared among repositories).
3104 shared among repositories).
3105
3105
3106 See 'hg help dates' for a list of formats valid for -d/--date.
3106 See 'hg help dates' for a list of formats valid for -d/--date.
3107 """
3107 """
3108
3108
3109 rev_ = "."
3109 rev_ = "."
3110 names = (name1,) + names
3110 names = (name1,) + names
3111 if len(names) != len(set(names)):
3111 if len(names) != len(set(names)):
3112 raise util.Abort(_('tag names must be unique'))
3112 raise util.Abort(_('tag names must be unique'))
3113 for n in names:
3113 for n in names:
3114 if n in ['tip', '.', 'null']:
3114 if n in ['tip', '.', 'null']:
3115 raise util.Abort(_('the name \'%s\' is reserved') % n)
3115 raise util.Abort(_('the name \'%s\' is reserved') % n)
3116 if opts.get('rev') and opts.get('remove'):
3116 if opts.get('rev') and opts.get('remove'):
3117 raise util.Abort(_("--rev and --remove are incompatible"))
3117 raise util.Abort(_("--rev and --remove are incompatible"))
3118 if opts.get('rev'):
3118 if opts.get('rev'):
3119 rev_ = opts['rev']
3119 rev_ = opts['rev']
3120 message = opts.get('message')
3120 message = opts.get('message')
3121 if opts.get('remove'):
3121 if opts.get('remove'):
3122 expectedtype = opts.get('local') and 'local' or 'global'
3122 expectedtype = opts.get('local') and 'local' or 'global'
3123 for n in names:
3123 for n in names:
3124 if not repo.tagtype(n):
3124 if not repo.tagtype(n):
3125 raise util.Abort(_('tag \'%s\' does not exist') % n)
3125 raise util.Abort(_('tag \'%s\' does not exist') % n)
3126 if repo.tagtype(n) != expectedtype:
3126 if repo.tagtype(n) != expectedtype:
3127 if expectedtype == 'global':
3127 if expectedtype == 'global':
3128 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3128 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3129 else:
3129 else:
3130 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3130 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3131 rev_ = nullid
3131 rev_ = nullid
3132 if not message:
3132 if not message:
3133 # we don't translate commit messages
3133 # we don't translate commit messages
3134 message = 'Removed tag %s' % ', '.join(names)
3134 message = 'Removed tag %s' % ', '.join(names)
3135 elif not opts.get('force'):
3135 elif not opts.get('force'):
3136 for n in names:
3136 for n in names:
3137 if n in repo.tags():
3137 if n in repo.tags():
3138 raise util.Abort(_('tag \'%s\' already exists '
3138 raise util.Abort(_('tag \'%s\' already exists '
3139 '(use -f to force)') % n)
3139 '(use -f to force)') % n)
3140 if not rev_ and repo.dirstate.parents()[1] != nullid:
3140 if not rev_ and repo.dirstate.parents()[1] != nullid:
3141 raise util.Abort(_('uncommitted merge - please provide a '
3141 raise util.Abort(_('uncommitted merge - please provide a '
3142 'specific revision'))
3142 'specific revision'))
3143 r = repo[rev_].node()
3143 r = repo[rev_].node()
3144
3144
3145 if not message:
3145 if not message:
3146 # we don't translate commit messages
3146 # we don't translate commit messages
3147 message = ('Added tag %s for changeset %s' %
3147 message = ('Added tag %s for changeset %s' %
3148 (', '.join(names), short(r)))
3148 (', '.join(names), short(r)))
3149
3149
3150 date = opts.get('date')
3150 date = opts.get('date')
3151 if date:
3151 if date:
3152 date = util.parsedate(date)
3152 date = util.parsedate(date)
3153
3153
3154 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3154 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3155
3155
3156 def tags(ui, repo):
3156 def tags(ui, repo):
3157 """list repository tags
3157 """list repository tags
3158
3158
3159 This lists both regular and local tags. When the -v/--verbose
3159 This lists both regular and local tags. When the -v/--verbose
3160 switch is used, a third column "local" is printed for local tags.
3160 switch is used, a third column "local" is printed for local tags.
3161 """
3161 """
3162
3162
3163 hexfunc = ui.debugflag and hex or short
3163 hexfunc = ui.debugflag and hex or short
3164 tagtype = ""
3164 tagtype = ""
3165
3165
3166 for t, n in reversed(repo.tagslist()):
3166 for t, n in reversed(repo.tagslist()):
3167 if ui.quiet:
3167 if ui.quiet:
3168 ui.write("%s\n" % t)
3168 ui.write("%s\n" % t)
3169 continue
3169 continue
3170
3170
3171 try:
3171 try:
3172 hn = hexfunc(n)
3172 hn = hexfunc(n)
3173 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3173 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3174 except error.LookupError:
3174 except error.LookupError:
3175 r = " ?:%s" % hn
3175 r = " ?:%s" % hn
3176 else:
3176 else:
3177 spaces = " " * (30 - encoding.colwidth(t))
3177 spaces = " " * (30 - encoding.colwidth(t))
3178 if ui.verbose:
3178 if ui.verbose:
3179 if repo.tagtype(t) == 'local':
3179 if repo.tagtype(t) == 'local':
3180 tagtype = " local"
3180 tagtype = " local"
3181 else:
3181 else:
3182 tagtype = ""
3182 tagtype = ""
3183 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3183 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3184
3184
3185 def tip(ui, repo, **opts):
3185 def tip(ui, repo, **opts):
3186 """show the tip revision
3186 """show the tip revision
3187
3187
3188 The tip revision (usually just called the tip) is the changeset
3188 The tip revision (usually just called the tip) is the changeset
3189 most recently added to the repository (and therefore the most
3189 most recently added to the repository (and therefore the most
3190 recently changed head).
3190 recently changed head).
3191
3191
3192 If you have just made a commit, that commit will be the tip. If
3192 If you have just made a commit, that commit will be the tip. If
3193 you have just pulled changes from another repository, the tip of
3193 you have just pulled changes from another repository, the tip of
3194 that repository becomes the current tip. The "tip" tag is special
3194 that repository becomes the current tip. The "tip" tag is special
3195 and cannot be renamed or assigned to a different changeset.
3195 and cannot be renamed or assigned to a different changeset.
3196 """
3196 """
3197 displayer = cmdutil.show_changeset(ui, repo, opts)
3197 displayer = cmdutil.show_changeset(ui, repo, opts)
3198 displayer.show(repo[len(repo) - 1])
3198 displayer.show(repo[len(repo) - 1])
3199 displayer.close()
3199 displayer.close()
3200
3200
3201 def unbundle(ui, repo, fname1, *fnames, **opts):
3201 def unbundle(ui, repo, fname1, *fnames, **opts):
3202 """apply one or more changegroup files
3202 """apply one or more changegroup files
3203
3203
3204 Apply one or more compressed changegroup files generated by the
3204 Apply one or more compressed changegroup files generated by the
3205 bundle command.
3205 bundle command.
3206 """
3206 """
3207 fnames = (fname1,) + fnames
3207 fnames = (fname1,) + fnames
3208
3208
3209 lock = repo.lock()
3209 lock = repo.lock()
3210 try:
3210 try:
3211 for fname in fnames:
3211 for fname in fnames:
3212 f = url.open(ui, fname)
3212 f = url.open(ui, fname)
3213 gen = changegroup.readbundle(f, fname)
3213 gen = changegroup.readbundle(f, fname)
3214 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3214 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3215 finally:
3215 finally:
3216 lock.release()
3216 lock.release()
3217
3217
3218 return postincoming(ui, repo, modheads, opts.get('update'), None)
3218 return postincoming(ui, repo, modheads, opts.get('update'), None)
3219
3219
3220 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3220 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3221 """update working directory
3221 """update working directory
3222
3222
3223 Update the repository's working directory to the specified
3223 Update the repository's working directory to the specified
3224 changeset.
3224 changeset.
3225
3225
3226 If no changeset is specified, attempt to update to the head of the
3226 If no changeset is specified, attempt to update to the head of the
3227 current branch. If this head is a descendant of the working
3227 current branch. If this head is a descendant of the working
3228 directory's parent, update to it, otherwise abort.
3228 directory's parent, update to it, otherwise abort.
3229
3229
3230 The following rules apply when the working directory contains
3230 The following rules apply when the working directory contains
3231 uncommitted changes:
3231 uncommitted changes:
3232
3232
3233 1. If neither -c/--check nor -C/--clean is specified, and if
3233 1. If neither -c/--check nor -C/--clean is specified, and if
3234 the requested changeset is an ancestor or descendant of
3234 the requested changeset is an ancestor or descendant of
3235 the working directory's parent, the uncommitted changes
3235 the working directory's parent, the uncommitted changes
3236 are merged into the requested changeset and the merged
3236 are merged into the requested changeset and the merged
3237 result is left uncommitted. If the requested changeset is
3237 result is left uncommitted. If the requested changeset is
3238 not an ancestor or descendant (that is, it is on another
3238 not an ancestor or descendant (that is, it is on another
3239 branch), the update is aborted and the uncommitted changes
3239 branch), the update is aborted and the uncommitted changes
3240 are preserved.
3240 are preserved.
3241
3241
3242 2. With the -c/--check option, the update is aborted and the
3242 2. With the -c/--check option, the update is aborted and the
3243 uncommitted changes are preserved.
3243 uncommitted changes are preserved.
3244
3244
3245 3. With the -C/--clean option, uncommitted changes are discarded and
3245 3. With the -C/--clean option, uncommitted changes are discarded and
3246 the working directory is updated to the requested changeset.
3246 the working directory is updated to the requested changeset.
3247
3247
3248 Use null as the changeset to remove the working directory (like 'hg
3248 Use null as the changeset to remove the working directory (like 'hg
3249 clone -U').
3249 clone -U').
3250
3250
3251 If you want to update just one file to an older changeset, use 'hg revert'.
3251 If you want to update just one file to an older changeset, use 'hg revert'.
3252
3252
3253 See 'hg help dates' for a list of formats valid for -d/--date.
3253 See 'hg help dates' for a list of formats valid for -d/--date.
3254 """
3254 """
3255 if rev and node:
3255 if rev and node:
3256 raise util.Abort(_("please specify just one revision"))
3256 raise util.Abort(_("please specify just one revision"))
3257
3257
3258 if not rev:
3258 if not rev:
3259 rev = node
3259 rev = node
3260
3260
3261 if check and clean:
3261 if check and clean:
3262 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3262 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3263
3263
3264 if check:
3264 if check:
3265 # we could use dirty() but we can ignore merge and branch trivia
3265 # we could use dirty() but we can ignore merge and branch trivia
3266 c = repo[None]
3266 c = repo[None]
3267 if c.modified() or c.added() or c.removed():
3267 if c.modified() or c.added() or c.removed():
3268 raise util.Abort(_("uncommitted local changes"))
3268 raise util.Abort(_("uncommitted local changes"))
3269
3269
3270 if date:
3270 if date:
3271 if rev:
3271 if rev:
3272 raise util.Abort(_("you can't specify a revision and a date"))
3272 raise util.Abort(_("you can't specify a revision and a date"))
3273 rev = cmdutil.finddate(ui, repo, date)
3273 rev = cmdutil.finddate(ui, repo, date)
3274
3274
3275 if clean or check:
3275 if clean or check:
3276 return hg.clean(repo, rev)
3276 return hg.clean(repo, rev)
3277 else:
3277 else:
3278 return hg.update(repo, rev)
3278 return hg.update(repo, rev)
3279
3279
3280 def verify(ui, repo):
3280 def verify(ui, repo):
3281 """verify the integrity of the repository
3281 """verify the integrity of the repository
3282
3282
3283 Verify the integrity of the current repository.
3283 Verify the integrity of the current repository.
3284
3284
3285 This will perform an extensive check of the repository's
3285 This will perform an extensive check of the repository's
3286 integrity, validating the hashes and checksums of each entry in
3286 integrity, validating the hashes and checksums of each entry in
3287 the changelog, manifest, and tracked files, as well as the
3287 the changelog, manifest, and tracked files, as well as the
3288 integrity of their crosslinks and indices.
3288 integrity of their crosslinks and indices.
3289 """
3289 """
3290 return hg.verify(repo)
3290 return hg.verify(repo)
3291
3291
3292 def version_(ui):
3292 def version_(ui):
3293 """output version and copyright information"""
3293 """output version and copyright information"""
3294 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3294 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3295 % util.version())
3295 % util.version())
3296 ui.status(_(
3296 ui.status(_(
3297 "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n"
3297 "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n"
3298 "This is free software; see the source for copying conditions. "
3298 "This is free software; see the source for copying conditions. "
3299 "There is NO\nwarranty; "
3299 "There is NO\nwarranty; "
3300 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3300 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3301 ))
3301 ))
3302
3302
3303 # Command options and aliases are listed here, alphabetically
3303 # Command options and aliases are listed here, alphabetically
3304
3304
3305 globalopts = [
3305 globalopts = [
3306 ('R', 'repository', '',
3306 ('R', 'repository', '',
3307 _('repository root directory or name of overlay bundle file')),
3307 _('repository root directory or name of overlay bundle file')),
3308 ('', 'cwd', '', _('change working directory')),
3308 ('', 'cwd', '', _('change working directory')),
3309 ('y', 'noninteractive', None,
3309 ('y', 'noninteractive', None,
3310 _('do not prompt, assume \'yes\' for any required answers')),
3310 _('do not prompt, assume \'yes\' for any required answers')),
3311 ('q', 'quiet', None, _('suppress output')),
3311 ('q', 'quiet', None, _('suppress output')),
3312 ('v', 'verbose', None, _('enable additional output')),
3312 ('v', 'verbose', None, _('enable additional output')),
3313 ('', 'config', [], _('set/override config option')),
3313 ('', 'config', [], _('set/override config option')),
3314 ('', 'debug', None, _('enable debugging output')),
3314 ('', 'debug', None, _('enable debugging output')),
3315 ('', 'debugger', None, _('start debugger')),
3315 ('', 'debugger', None, _('start debugger')),
3316 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3316 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3317 ('', 'encodingmode', encoding.encodingmode,
3317 ('', 'encodingmode', encoding.encodingmode,
3318 _('set the charset encoding mode')),
3318 _('set the charset encoding mode')),
3319 ('', 'traceback', None, _('always print a traceback on exception')),
3319 ('', 'traceback', None, _('always print a traceback on exception')),
3320 ('', 'time', None, _('time how long the command takes')),
3320 ('', 'time', None, _('time how long the command takes')),
3321 ('', 'profile', None, _('print command execution profile')),
3321 ('', 'profile', None, _('print command execution profile')),
3322 ('', 'version', None, _('output version information and exit')),
3322 ('', 'version', None, _('output version information and exit')),
3323 ('h', 'help', None, _('display help and exit')),
3323 ('h', 'help', None, _('display help and exit')),
3324 ]
3324 ]
3325
3325
3326 dryrunopts = [('n', 'dry-run', None,
3326 dryrunopts = [('n', 'dry-run', None,
3327 _('do not perform actions, just print output'))]
3327 _('do not perform actions, just print output'))]
3328
3328
3329 remoteopts = [
3329 remoteopts = [
3330 ('e', 'ssh', '', _('specify ssh command to use')),
3330 ('e', 'ssh', '', _('specify ssh command to use')),
3331 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3331 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3332 ]
3332 ]
3333
3333
3334 walkopts = [
3334 walkopts = [
3335 ('I', 'include', [], _('include names matching the given patterns')),
3335 ('I', 'include', [], _('include names matching the given patterns')),
3336 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3336 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3337 ]
3337 ]
3338
3338
3339 commitopts = [
3339 commitopts = [
3340 ('m', 'message', '', _('use <text> as commit message')),
3340 ('m', 'message', '', _('use <text> as commit message')),
3341 ('l', 'logfile', '', _('read commit message from <file>')),
3341 ('l', 'logfile', '', _('read commit message from <file>')),
3342 ]
3342 ]
3343
3343
3344 commitopts2 = [
3344 commitopts2 = [
3345 ('d', 'date', '', _('record datecode as commit date')),
3345 ('d', 'date', '', _('record datecode as commit date')),
3346 ('u', 'user', '', _('record the specified user as committer')),
3346 ('u', 'user', '', _('record the specified user as committer')),
3347 ]
3347 ]
3348
3348
3349 templateopts = [
3349 templateopts = [
3350 ('', 'style', '', _('display using template map file')),
3350 ('', 'style', '', _('display using template map file')),
3351 ('', 'template', '', _('display with template')),
3351 ('', 'template', '', _('display with template')),
3352 ]
3352 ]
3353
3353
3354 logopts = [
3354 logopts = [
3355 ('p', 'patch', None, _('show patch')),
3355 ('p', 'patch', None, _('show patch')),
3356 ('g', 'git', None, _('use git extended diff format')),
3356 ('g', 'git', None, _('use git extended diff format')),
3357 ('l', 'limit', '', _('limit number of changes displayed')),
3357 ('l', 'limit', '', _('limit number of changes displayed')),
3358 ('M', 'no-merges', None, _('do not show merges')),
3358 ('M', 'no-merges', None, _('do not show merges')),
3359 ] + templateopts
3359 ] + templateopts
3360
3360
3361 diffopts = [
3361 diffopts = [
3362 ('a', 'text', None, _('treat all files as text')),
3362 ('a', 'text', None, _('treat all files as text')),
3363 ('g', 'git', None, _('use git extended diff format')),
3363 ('g', 'git', None, _('use git extended diff format')),
3364 ('', 'nodates', None, _('omit dates from diff headers'))
3364 ('', 'nodates', None, _('omit dates from diff headers'))
3365 ]
3365 ]
3366
3366
3367 diffopts2 = [
3367 diffopts2 = [
3368 ('p', 'show-function', None, _('show which function each change is in')),
3368 ('p', 'show-function', None, _('show which function each change is in')),
3369 ('', 'reverse', None, _('produce a diff that undoes the changes')),
3369 ('', 'reverse', None, _('produce a diff that undoes the changes')),
3370 ('w', 'ignore-all-space', None,
3370 ('w', 'ignore-all-space', None,
3371 _('ignore white space when comparing lines')),
3371 _('ignore white space when comparing lines')),
3372 ('b', 'ignore-space-change', None,
3372 ('b', 'ignore-space-change', None,
3373 _('ignore changes in the amount of white space')),
3373 _('ignore changes in the amount of white space')),
3374 ('B', 'ignore-blank-lines', None,
3374 ('B', 'ignore-blank-lines', None,
3375 _('ignore changes whose lines are all blank')),
3375 _('ignore changes whose lines are all blank')),
3376 ('U', 'unified', '', _('number of lines of context to show')),
3376 ('U', 'unified', '', _('number of lines of context to show')),
3377 ('', 'stat', None, _('output diffstat-style summary of changes')),
3377 ('', 'stat', None, _('output diffstat-style summary of changes')),
3378 ]
3378 ]
3379
3379
3380 similarityopts = [
3380 similarityopts = [
3381 ('s', 'similarity', '',
3381 ('s', 'similarity', '',
3382 _('guess renamed files by similarity (0<=s<=100)'))
3382 _('guess renamed files by similarity (0<=s<=100)'))
3383 ]
3383 ]
3384
3384
3385 table = {
3385 table = {
3386 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3386 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3387 "addremove":
3387 "addremove":
3388 (addremove, similarityopts + walkopts + dryrunopts,
3388 (addremove, similarityopts + walkopts + dryrunopts,
3389 _('[OPTION]... [FILE]...')),
3389 _('[OPTION]... [FILE]...')),
3390 "^annotate|blame":
3390 "^annotate|blame":
3391 (annotate,
3391 (annotate,
3392 [('r', 'rev', '', _('annotate the specified revision')),
3392 [('r', 'rev', '', _('annotate the specified revision')),
3393 ('', 'follow', None, _('follow copies and renames (DEPRECATED)')),
3393 ('', 'follow', None, _('follow copies and renames (DEPRECATED)')),
3394 ('', 'no-follow', None, _("don't follow copies and renames")),
3394 ('', 'no-follow', None, _("don't follow copies and renames")),
3395 ('a', 'text', None, _('treat all files as text')),
3395 ('a', 'text', None, _('treat all files as text')),
3396 ('u', 'user', None, _('list the author (long with -v)')),
3396 ('u', 'user', None, _('list the author (long with -v)')),
3397 ('f', 'file', None, _('list the filename')),
3397 ('f', 'file', None, _('list the filename')),
3398 ('d', 'date', None, _('list the date (short with -q)')),
3398 ('d', 'date', None, _('list the date (short with -q)')),
3399 ('n', 'number', None, _('list the revision number (default)')),
3399 ('n', 'number', None, _('list the revision number (default)')),
3400 ('c', 'changeset', None, _('list the changeset')),
3400 ('c', 'changeset', None, _('list the changeset')),
3401 ('l', 'line-number', None,
3401 ('l', 'line-number', None,
3402 _('show line number at the first appearance'))
3402 _('show line number at the first appearance'))
3403 ] + walkopts,
3403 ] + walkopts,
3404 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3404 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3405 "archive":
3405 "archive":
3406 (archive,
3406 (archive,
3407 [('', 'no-decode', None, _('do not pass files through decoders')),
3407 [('', 'no-decode', None, _('do not pass files through decoders')),
3408 ('p', 'prefix', '', _('directory prefix for files in archive')),
3408 ('p', 'prefix', '', _('directory prefix for files in archive')),
3409 ('r', 'rev', '', _('revision to distribute')),
3409 ('r', 'rev', '', _('revision to distribute')),
3410 ('t', 'type', '', _('type of distribution to create')),
3410 ('t', 'type', '', _('type of distribution to create')),
3411 ] + walkopts,
3411 ] + walkopts,
3412 _('[OPTION]... DEST')),
3412 _('[OPTION]... DEST')),
3413 "backout":
3413 "backout":
3414 (backout,
3414 (backout,
3415 [('', 'merge', None,
3415 [('', 'merge', None,
3416 _('merge with old dirstate parent after backout')),
3416 _('merge with old dirstate parent after backout')),
3417 ('', 'parent', '', _('parent to choose when backing out merge')),
3417 ('', 'parent', '', _('parent to choose when backing out merge')),
3418 ('r', 'rev', '', _('revision to backout')),
3418 ('r', 'rev', '', _('revision to backout')),
3419 ] + walkopts + commitopts + commitopts2,
3419 ] + walkopts + commitopts + commitopts2,
3420 _('[OPTION]... [-r] REV')),
3420 _('[OPTION]... [-r] REV')),
3421 "bisect":
3421 "bisect":
3422 (bisect,
3422 (bisect,
3423 [('r', 'reset', False, _('reset bisect state')),
3423 [('r', 'reset', False, _('reset bisect state')),
3424 ('g', 'good', False, _('mark changeset good')),
3424 ('g', 'good', False, _('mark changeset good')),
3425 ('b', 'bad', False, _('mark changeset bad')),
3425 ('b', 'bad', False, _('mark changeset bad')),
3426 ('s', 'skip', False, _('skip testing changeset')),
3426 ('s', 'skip', False, _('skip testing changeset')),
3427 ('c', 'command', '', _('use command to check changeset state')),
3427 ('c', 'command', '', _('use command to check changeset state')),
3428 ('U', 'noupdate', False, _('do not update to target'))],
3428 ('U', 'noupdate', False, _('do not update to target'))],
3429 _("[-gbsr] [-U] [-c CMD] [REV]")),
3429 _("[-gbsr] [-U] [-c CMD] [REV]")),
3430 "branch":
3430 "branch":
3431 (branch,
3431 (branch,
3432 [('f', 'force', None,
3432 [('f', 'force', None,
3433 _('set branch name even if it shadows an existing branch')),
3433 _('set branch name even if it shadows an existing branch')),
3434 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3434 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3435 _('[-fC] [NAME]')),
3435 _('[-fC] [NAME]')),
3436 "branches":
3436 "branches":
3437 (branches,
3437 (branches,
3438 [('a', 'active', False,
3438 [('a', 'active', False,
3439 _('show only branches that have unmerged heads')),
3439 _('show only branches that have unmerged heads')),
3440 ('c', 'closed', False,
3440 ('c', 'closed', False,
3441 _('show normal and closed branches'))],
3441 _('show normal and closed branches'))],
3442 _('[-ac]')),
3442 _('[-ac]')),
3443 "bundle":
3443 "bundle":
3444 (bundle,
3444 (bundle,
3445 [('f', 'force', None,
3445 [('f', 'force', None,
3446 _('run even when the destination is unrelated')),
3446 _('run even when the destination is unrelated')),
3447 ('r', 'rev', [],
3447 ('r', 'rev', [],
3448 _('a changeset intended to be added to the destination')),
3448 _('a changeset intended to be added to the destination')),
3449 ('b', 'branch', [],
3449 ('b', 'branch', [],
3450 _('a specific branch you would like to bundle')),
3450 _('a specific branch you would like to bundle')),
3451 ('', 'base', [],
3451 ('', 'base', [],
3452 _('a base changeset assumed to be available at the destination')),
3452 _('a base changeset assumed to be available at the destination')),
3453 ('a', 'all', None, _('bundle all changesets in the repository')),
3453 ('a', 'all', None, _('bundle all changesets in the repository')),
3454 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3454 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3455 ] + remoteopts,
3455 ] + remoteopts,
3456 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3456 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3457 "cat":
3457 "cat":
3458 (cat,
3458 (cat,
3459 [('o', 'output', '', _('print output to file with formatted name')),
3459 [('o', 'output', '', _('print output to file with formatted name')),
3460 ('r', 'rev', '', _('print the given revision')),
3460 ('r', 'rev', '', _('print the given revision')),
3461 ('', 'decode', None, _('apply any matching decode filter')),
3461 ('', 'decode', None, _('apply any matching decode filter')),
3462 ] + walkopts,
3462 ] + walkopts,
3463 _('[OPTION]... FILE...')),
3463 _('[OPTION]... FILE...')),
3464 "^clone":
3464 "^clone":
3465 (clone,
3465 (clone,
3466 [('U', 'noupdate', None,
3466 [('U', 'noupdate', None,
3467 _('the clone will include an empty working copy (only a repository)')),
3467 _('the clone will include an empty working copy (only a repository)')),
3468 ('u', 'updaterev', '',
3468 ('u', 'updaterev', '',
3469 _('revision, tag or branch to check out')),
3469 _('revision, tag or branch to check out')),
3470 ('r', 'rev', [],
3470 ('r', 'rev', [],
3471 _('include the specified changeset')),
3471 _('include the specified changeset')),
3472 ('b', 'branch', [],
3472 ('b', 'branch', [],
3473 _('clone only the specified branch')),
3473 _('clone only the specified branch')),
3474 ('', 'pull', None, _('use pull protocol to copy metadata')),
3474 ('', 'pull', None, _('use pull protocol to copy metadata')),
3475 ('', 'uncompressed', None,
3475 ('', 'uncompressed', None,
3476 _('use uncompressed transfer (fast over LAN)')),
3476 _('use uncompressed transfer (fast over LAN)')),
3477 ] + remoteopts,
3477 ] + remoteopts,
3478 _('[OPTION]... SOURCE [DEST]')),
3478 _('[OPTION]... SOURCE [DEST]')),
3479 "^commit|ci":
3479 "^commit|ci":
3480 (commit,
3480 (commit,
3481 [('A', 'addremove', None,
3481 [('A', 'addremove', None,
3482 _('mark new/missing files as added/removed before committing')),
3482 _('mark new/missing files as added/removed before committing')),
3483 ('', 'close-branch', None,
3483 ('', 'close-branch', None,
3484 _('mark a branch as closed, hiding it from the branch list')),
3484 _('mark a branch as closed, hiding it from the branch list')),
3485 ] + walkopts + commitopts + commitopts2,
3485 ] + walkopts + commitopts + commitopts2,
3486 _('[OPTION]... [FILE]...')),
3486 _('[OPTION]... [FILE]...')),
3487 "copy|cp":
3487 "copy|cp":
3488 (copy,
3488 (copy,
3489 [('A', 'after', None, _('record a copy that has already occurred')),
3489 [('A', 'after', None, _('record a copy that has already occurred')),
3490 ('f', 'force', None,
3490 ('f', 'force', None,
3491 _('forcibly copy over an existing managed file')),
3491 _('forcibly copy over an existing managed file')),
3492 ] + walkopts + dryrunopts,
3492 ] + walkopts + dryrunopts,
3493 _('[OPTION]... [SOURCE]... DEST')),
3493 _('[OPTION]... [SOURCE]... DEST')),
3494 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3494 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3495 "debugcheckstate": (debugcheckstate, [], ''),
3495 "debugcheckstate": (debugcheckstate, [], ''),
3496 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3496 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3497 "debugcomplete":
3497 "debugcomplete":
3498 (debugcomplete,
3498 (debugcomplete,
3499 [('o', 'options', None, _('show the command options'))],
3499 [('o', 'options', None, _('show the command options'))],
3500 _('[-o] CMD')),
3500 _('[-o] CMD')),
3501 "debugdate":
3501 "debugdate":
3502 (debugdate,
3502 (debugdate,
3503 [('e', 'extended', None, _('try extended date formats'))],
3503 [('e', 'extended', None, _('try extended date formats'))],
3504 _('[-e] DATE [RANGE]')),
3504 _('[-e] DATE [RANGE]')),
3505 "debugdata": (debugdata, [], _('FILE REV')),
3505 "debugdata": (debugdata, [], _('FILE REV')),
3506 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3506 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3507 "debugindex": (debugindex, [], _('FILE')),
3507 "debugindex": (debugindex, [], _('FILE')),
3508 "debugindexdot": (debugindexdot, [], _('FILE')),
3508 "debugindexdot": (debugindexdot, [], _('FILE')),
3509 "debuginstall": (debuginstall, [], ''),
3509 "debuginstall": (debuginstall, [], ''),
3510 "debugrebuildstate":
3510 "debugrebuildstate":
3511 (debugrebuildstate,
3511 (debugrebuildstate,
3512 [('r', 'rev', '', _('revision to rebuild to'))],
3512 [('r', 'rev', '', _('revision to rebuild to'))],
3513 _('[-r REV] [REV]')),
3513 _('[-r REV] [REV]')),
3514 "debugrename":
3514 "debugrename":
3515 (debugrename,
3515 (debugrename,
3516 [('r', 'rev', '', _('revision to debug'))],
3516 [('r', 'rev', '', _('revision to debug'))],
3517 _('[-r REV] FILE')),
3517 _('[-r REV] FILE')),
3518 "debugsetparents":
3518 "debugsetparents":
3519 (debugsetparents, [], _('REV1 [REV2]')),
3519 (debugsetparents, [], _('REV1 [REV2]')),
3520 "debugstate":
3520 "debugstate":
3521 (debugstate,
3521 (debugstate,
3522 [('', 'nodates', None, _('do not display the saved mtime'))],
3522 [('', 'nodates', None, _('do not display the saved mtime'))],
3523 _('[OPTION]...')),
3523 _('[OPTION]...')),
3524 "debugsub":
3524 "debugsub":
3525 (debugsub,
3525 (debugsub,
3526 [('r', 'rev', '', _('revision to check'))],
3526 [('r', 'rev', '', _('revision to check'))],
3527 _('[-r REV] [REV]')),
3527 _('[-r REV] [REV]')),
3528 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3528 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3529 "^diff":
3529 "^diff":
3530 (diff,
3530 (diff,
3531 [('r', 'rev', [], _('revision')),
3531 [('r', 'rev', [], _('revision')),
3532 ('c', 'change', '', _('change made by revision'))
3532 ('c', 'change', '', _('change made by revision'))
3533 ] + diffopts + diffopts2 + walkopts,
3533 ] + diffopts + diffopts2 + walkopts,
3534 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3534 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3535 "^export":
3535 "^export":
3536 (export,
3536 (export,
3537 [('o', 'output', '', _('print output to file with formatted name')),
3537 [('o', 'output', '', _('print output to file with formatted name')),
3538 ('', 'switch-parent', None, _('diff against the second parent')),
3538 ('', 'switch-parent', None, _('diff against the second parent')),
3539 ('r', 'rev', [], _('revisions to export')),
3539 ('r', 'rev', [], _('revisions to export')),
3540 ] + diffopts,
3540 ] + diffopts,
3541 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3541 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3542 "^forget":
3542 "^forget":
3543 (forget,
3543 (forget,
3544 [] + walkopts,
3544 [] + walkopts,
3545 _('[OPTION]... FILE...')),
3545 _('[OPTION]... FILE...')),
3546 "grep":
3546 "grep":
3547 (grep,
3547 (grep,
3548 [('0', 'print0', None, _('end fields with NUL')),
3548 [('0', 'print0', None, _('end fields with NUL')),
3549 ('', 'all', None, _('print all revisions that match')),
3549 ('', 'all', None, _('print all revisions that match')),
3550 ('f', 'follow', None,
3550 ('f', 'follow', None,
3551 _('follow changeset history,'
3551 _('follow changeset history,'
3552 ' or file history across copies and renames')),
3552 ' or file history across copies and renames')),
3553 ('i', 'ignore-case', None, _('ignore case when matching')),
3553 ('i', 'ignore-case', None, _('ignore case when matching')),
3554 ('l', 'files-with-matches', None,
3554 ('l', 'files-with-matches', None,
3555 _('print only filenames and revisions that match')),
3555 _('print only filenames and revisions that match')),
3556 ('n', 'line-number', None, _('print matching line numbers')),
3556 ('n', 'line-number', None, _('print matching line numbers')),
3557 ('r', 'rev', [], _('search in given revision range')),
3557 ('r', 'rev', [], _('search in given revision range')),
3558 ('u', 'user', None, _('list the author (long with -v)')),
3558 ('u', 'user', None, _('list the author (long with -v)')),
3559 ('d', 'date', None, _('list the date (short with -q)')),
3559 ('d', 'date', None, _('list the date (short with -q)')),
3560 ] + walkopts,
3560 ] + walkopts,
3561 _('[OPTION]... PATTERN [FILE]...')),
3561 _('[OPTION]... PATTERN [FILE]...')),
3562 "heads":
3562 "heads":
3563 (heads,
3563 (heads,
3564 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3564 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3565 ('t', 'topo', False, _('show topological heads only')),
3565 ('t', 'topo', False, _('show topological heads only')),
3566 ('a', 'active', False,
3566 ('a', 'active', False,
3567 _('show active branchheads only [DEPRECATED]')),
3567 _('show active branchheads only [DEPRECATED]')),
3568 ('c', 'closed', False,
3568 ('c', 'closed', False,
3569 _('show normal and closed branch heads')),
3569 _('show normal and closed branch heads')),
3570 ] + templateopts,
3570 ] + templateopts,
3571 _('[-ac] [-r STARTREV] [REV]...')),
3571 _('[-ac] [-r STARTREV] [REV]...')),
3572 "help": (help_, [], _('[TOPIC]')),
3572 "help": (help_, [], _('[TOPIC]')),
3573 "identify|id":
3573 "identify|id":
3574 (identify,
3574 (identify,
3575 [('r', 'rev', '', _('identify the specified revision')),
3575 [('r', 'rev', '', _('identify the specified revision')),
3576 ('n', 'num', None, _('show local revision number')),
3576 ('n', 'num', None, _('show local revision number')),
3577 ('i', 'id', None, _('show global revision id')),
3577 ('i', 'id', None, _('show global revision id')),
3578 ('b', 'branch', None, _('show branch')),
3578 ('b', 'branch', None, _('show branch')),
3579 ('t', 'tags', None, _('show tags'))],
3579 ('t', 'tags', None, _('show tags'))],
3580 _('[-nibt] [-r REV] [SOURCE]')),
3580 _('[-nibt] [-r REV] [SOURCE]')),
3581 "import|patch":
3581 "import|patch":
3582 (import_,
3582 (import_,
3583 [('p', 'strip', 1,
3583 [('p', 'strip', 1,
3584 _('directory strip option for patch. This has the same '
3584 _('directory strip option for patch. This has the same '
3585 'meaning as the corresponding patch option')),
3585 'meaning as the corresponding patch option')),
3586 ('b', 'base', '', _('base path')),
3586 ('b', 'base', '', _('base path')),
3587 ('f', 'force', None,
3587 ('f', 'force', None,
3588 _('skip check for outstanding uncommitted changes')),
3588 _('skip check for outstanding uncommitted changes')),
3589 ('', 'no-commit', None,
3589 ('', 'no-commit', None,
3590 _("don't commit, just update the working directory")),
3590 _("don't commit, just update the working directory")),
3591 ('', 'exact', None,
3591 ('', 'exact', None,
3592 _('apply patch to the nodes from which it was generated')),
3592 _('apply patch to the nodes from which it was generated')),
3593 ('', 'import-branch', None,
3593 ('', 'import-branch', None,
3594 _('use any branch information in patch (implied by --exact)'))] +
3594 _('use any branch information in patch (implied by --exact)'))] +
3595 commitopts + commitopts2 + similarityopts,
3595 commitopts + commitopts2 + similarityopts,
3596 _('[OPTION]... PATCH...')),
3596 _('[OPTION]... PATCH...')),
3597 "incoming|in":
3597 "incoming|in":
3598 (incoming,
3598 (incoming,
3599 [('f', 'force', None,
3599 [('f', 'force', None,
3600 _('run even if remote repository is unrelated')),
3600 _('run even if remote repository is unrelated')),
3601 ('n', 'newest-first', None, _('show newest record first')),
3601 ('n', 'newest-first', None, _('show newest record first')),
3602 ('', 'bundle', '', _('file to store the bundles into')),
3602 ('', 'bundle', '', _('file to store the bundles into')),
3603 ('r', 'rev', [],
3603 ('r', 'rev', [],
3604 _('a remote changeset intended to be added')),
3604 _('a remote changeset intended to be added')),
3605 ('b', 'branch', [],
3605 ('b', 'branch', [],
3606 _('a specific branch you would like to pull')),
3606 _('a specific branch you would like to pull')),
3607 ] + logopts + remoteopts,
3607 ] + logopts + remoteopts,
3608 _('[-p] [-n] [-M] [-f] [-r REV]...'
3608 _('[-p] [-n] [-M] [-f] [-r REV]...'
3609 ' [--bundle FILENAME] [SOURCE]')),
3609 ' [--bundle FILENAME] [SOURCE]')),
3610 "^init":
3610 "^init":
3611 (init,
3611 (init,
3612 remoteopts,
3612 remoteopts,
3613 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3613 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3614 "locate":
3614 "locate":
3615 (locate,
3615 (locate,
3616 [('r', 'rev', '', _('search the repository as it is in REV')),
3616 [('r', 'rev', '', _('search the repository as it is in REV')),
3617 ('0', 'print0', None,
3617 ('0', 'print0', None,
3618 _('end filenames with NUL, for use with xargs')),
3618 _('end filenames with NUL, for use with xargs')),
3619 ('f', 'fullpath', None,
3619 ('f', 'fullpath', None,
3620 _('print complete paths from the filesystem root')),
3620 _('print complete paths from the filesystem root')),
3621 ] + walkopts,
3621 ] + walkopts,
3622 _('[OPTION]... [PATTERN]...')),
3622 _('[OPTION]... [PATTERN]...')),
3623 "^log|history":
3623 "^log|history":
3624 (log,
3624 (log,
3625 [('f', 'follow', None,
3625 [('f', 'follow', None,
3626 _('follow changeset history,'
3626 _('follow changeset history,'
3627 ' or file history across copies and renames')),
3627 ' or file history across copies and renames')),
3628 ('', 'follow-first', None,
3628 ('', 'follow-first', None,
3629 _('only follow the first parent of merge changesets')),
3629 _('only follow the first parent of merge changesets')),
3630 ('d', 'date', '', _('show revisions matching date spec')),
3630 ('d', 'date', '', _('show revisions matching date spec')),
3631 ('C', 'copies', None, _('show copied files')),
3631 ('C', 'copies', None, _('show copied files')),
3632 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3632 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3633 ('r', 'rev', [], _('show the specified revision or range')),
3633 ('r', 'rev', [], _('show the specified revision or range')),
3634 ('', 'removed', None, _('include revisions where files were removed')),
3634 ('', 'removed', None, _('include revisions where files were removed')),
3635 ('m', 'only-merges', None, _('show only merges')),
3635 ('m', 'only-merges', None, _('show only merges')),
3636 ('u', 'user', [], _('revisions committed by user')),
3636 ('u', 'user', [], _('revisions committed by user')),
3637 ('b', 'only-branch', [],
3637 ('b', 'only-branch', [],
3638 _('show only changesets within the given named branch')),
3638 _('show only changesets within the given named branch')),
3639 ('P', 'prune', [],
3639 ('P', 'prune', [],
3640 _('do not display revision or any of its ancestors')),
3640 _('do not display revision or any of its ancestors')),
3641 ] + logopts + walkopts,
3641 ] + logopts + walkopts,
3642 _('[OPTION]... [FILE]')),
3642 _('[OPTION]... [FILE]')),
3643 "manifest":
3643 "manifest":
3644 (manifest,
3644 (manifest,
3645 [('r', 'rev', '', _('revision to display'))],
3645 [('r', 'rev', '', _('revision to display'))],
3646 _('[-r REV]')),
3646 _('[-r REV]')),
3647 "^merge":
3647 "^merge":
3648 (merge,
3648 (merge,
3649 [('f', 'force', None, _('force a merge with outstanding changes')),
3649 [('f', 'force', None, _('force a merge with outstanding changes')),
3650 ('r', 'rev', '', _('revision to merge')),
3650 ('r', 'rev', '', _('revision to merge')),
3651 ('P', 'preview', None,
3651 ('P', 'preview', None,
3652 _('review revisions to merge (no merge is performed)'))],
3652 _('review revisions to merge (no merge is performed)'))],
3653 _('[-P] [-f] [[-r] REV]')),
3653 _('[-P] [-f] [[-r] REV]')),
3654 "outgoing|out":
3654 "outgoing|out":
3655 (outgoing,
3655 (outgoing,
3656 [('f', 'force', None,
3656 [('f', 'force', None,
3657 _('run even when the destination is unrelated')),
3657 _('run even when the destination is unrelated')),
3658 ('r', 'rev', [],
3658 ('r', 'rev', [],
3659 _('a changeset intended to be included in the destination')),
3659 _('a changeset intended to be included in the destination')),
3660 ('n', 'newest-first', None, _('show newest record first')),
3660 ('n', 'newest-first', None, _('show newest record first')),
3661 ('b', 'branch', [],
3661 ('b', 'branch', [],
3662 _('a specific branch you would like to push')),
3662 _('a specific branch you would like to push')),
3663 ] + logopts + remoteopts,
3663 ] + logopts + remoteopts,
3664 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3664 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3665 "parents":
3665 "parents":
3666 (parents,
3666 (parents,
3667 [('r', 'rev', '', _('show parents of the specified revision')),
3667 [('r', 'rev', '', _('show parents of the specified revision')),
3668 ] + templateopts,
3668 ] + templateopts,
3669 _('[-r REV] [FILE]')),
3669 _('[-r REV] [FILE]')),
3670 "paths": (paths, [], _('[NAME]')),
3670 "paths": (paths, [], _('[NAME]')),
3671 "^pull":
3671 "^pull":
3672 (pull,
3672 (pull,
3673 [('u', 'update', None,
3673 [('u', 'update', None,
3674 _('update to new branch head if changesets were pulled')),
3674 _('update to new branch head if changesets were pulled')),
3675 ('f', 'force', None,
3675 ('f', 'force', None,
3676 _('run even when remote repository is unrelated')),
3676 _('run even when remote repository is unrelated')),
3677 ('r', 'rev', [],
3677 ('r', 'rev', [],
3678 _('a remote changeset intended to be added')),
3678 _('a remote changeset intended to be added')),
3679 ('b', 'branch', [],
3679 ('b', 'branch', [],
3680 _('a specific branch you would like to pull')),
3680 _('a specific branch you would like to pull')),
3681 ] + remoteopts,
3681 ] + remoteopts,
3682 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3682 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3683 "^push":
3683 "^push":
3684 (push,
3684 (push,
3685 [('f', 'force', None, _('force push')),
3685 [('f', 'force', None, _('force push')),
3686 ('r', 'rev', [],
3686 ('r', 'rev', [],
3687 _('a changeset intended to be included in the destination')),
3687 _('a changeset intended to be included in the destination')),
3688 ('b', 'branch', [],
3688 ('b', 'branch', [],
3689 _('a specific branch you would like to push')),
3689 _('a specific branch you would like to push')),
3690 ] + remoteopts,
3690 ] + remoteopts,
3691 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3691 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3692 "recover": (recover, []),
3692 "recover": (recover, []),
3693 "^remove|rm":
3693 "^remove|rm":
3694 (remove,
3694 (remove,
3695 [('A', 'after', None, _('record delete for missing files')),
3695 [('A', 'after', None, _('record delete for missing files')),
3696 ('f', 'force', None,
3696 ('f', 'force', None,
3697 _('remove (and delete) file even if added or modified')),
3697 _('remove (and delete) file even if added or modified')),
3698 ] + walkopts,
3698 ] + walkopts,
3699 _('[OPTION]... FILE...')),
3699 _('[OPTION]... FILE...')),
3700 "rename|mv":
3700 "rename|mv":
3701 (rename,
3701 (rename,
3702 [('A', 'after', None, _('record a rename that has already occurred')),
3702 [('A', 'after', None, _('record a rename that has already occurred')),
3703 ('f', 'force', None,
3703 ('f', 'force', None,
3704 _('forcibly copy over an existing managed file')),
3704 _('forcibly copy over an existing managed file')),
3705 ] + walkopts + dryrunopts,
3705 ] + walkopts + dryrunopts,
3706 _('[OPTION]... SOURCE... DEST')),
3706 _('[OPTION]... SOURCE... DEST')),
3707 "resolve":
3707 "resolve":
3708 (resolve,
3708 (resolve,
3709 [('a', 'all', None, _('select all unresolved files')),
3709 [('a', 'all', None, _('select all unresolved files')),
3710 ('l', 'list', None, _('list state of files needing merge')),
3710 ('l', 'list', None, _('list state of files needing merge')),
3711 ('m', 'mark', None, _('mark files as resolved')),
3711 ('m', 'mark', None, _('mark files as resolved')),
3712 ('u', 'unmark', None, _('unmark files as resolved')),
3712 ('u', 'unmark', None, _('unmark files as resolved')),
3713 ('n', 'no-status', None, _('hide status prefix'))]
3713 ('n', 'no-status', None, _('hide status prefix'))]
3714 + walkopts,
3714 + walkopts,
3715 _('[OPTION]... [FILE]...')),
3715 _('[OPTION]... [FILE]...')),
3716 "revert":
3716 "revert":
3717 (revert,
3717 (revert,
3718 [('a', 'all', None, _('revert all changes when no arguments given')),
3718 [('a', 'all', None, _('revert all changes when no arguments given')),
3719 ('d', 'date', '', _('tipmost revision matching date')),
3719 ('d', 'date', '', _('tipmost revision matching date')),
3720 ('r', 'rev', '', _('revert to the specified revision')),
3720 ('r', 'rev', '', _('revert to the specified revision')),
3721 ('', 'no-backup', None, _('do not save backup copies of files')),
3721 ('', 'no-backup', None, _('do not save backup copies of files')),
3722 ] + walkopts + dryrunopts,
3722 ] + walkopts + dryrunopts,
3723 _('[OPTION]... [-r REV] [NAME]...')),
3723 _('[OPTION]... [-r REV] [NAME]...')),
3724 "rollback": (rollback, []),
3724 "rollback": (rollback, []),
3725 "root": (root, []),
3725 "root": (root, []),
3726 "^serve":
3726 "^serve":
3727 (serve,
3727 (serve,
3728 [('A', 'accesslog', '', _('name of access log file to write to')),
3728 [('A', 'accesslog', '', _('name of access log file to write to')),
3729 ('d', 'daemon', None, _('run server in background')),
3729 ('d', 'daemon', None, _('run server in background')),
3730 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3730 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3731 ('E', 'errorlog', '', _('name of error log file to write to')),
3731 ('E', 'errorlog', '', _('name of error log file to write to')),
3732 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3732 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3733 ('a', 'address', '',
3733 ('a', 'address', '',
3734 _('address to listen on (default: all interfaces)')),
3734 _('address to listen on (default: all interfaces)')),
3735 ('', 'prefix', '',
3735 ('', 'prefix', '',
3736 _('prefix path to serve from (default: server root)')),
3736 _('prefix path to serve from (default: server root)')),
3737 ('n', 'name', '',
3737 ('n', 'name', '',
3738 _('name to show in web pages (default: working directory)')),
3738 _('name to show in web pages (default: working directory)')),
3739 ('', 'webdir-conf', '', _('name of the webdir config file'
3739 ('', 'webdir-conf', '', _('name of the webdir config file'
3740 ' (serve more than one repository)')),
3740 ' (serve more than one repository)')),
3741 ('', 'pid-file', '', _('name of file to write process ID to')),
3741 ('', 'pid-file', '', _('name of file to write process ID to')),
3742 ('', 'stdio', None, _('for remote clients')),
3742 ('', 'stdio', None, _('for remote clients')),
3743 ('t', 'templates', '', _('web templates to use')),
3743 ('t', 'templates', '', _('web templates to use')),
3744 ('', 'style', '', _('template style to use')),
3744 ('', 'style', '', _('template style to use')),
3745 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3745 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3746 ('', 'certificate', '', _('SSL certificate file'))],
3746 ('', 'certificate', '', _('SSL certificate file'))],
3747 _('[OPTION]...')),
3747 _('[OPTION]...')),
3748 "showconfig|debugconfig":
3748 "showconfig|debugconfig":
3749 (showconfig,
3749 (showconfig,
3750 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3750 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3751 _('[-u] [NAME]...')),
3751 _('[-u] [NAME]...')),
3752 "^summary|sum":
3752 "^summary|sum":
3753 (summary,
3753 (summary,
3754 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
3754 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
3755 "^status|st":
3755 "^status|st":
3756 (status,
3756 (status,
3757 [('A', 'all', None, _('show status of all files')),
3757 [('A', 'all', None, _('show status of all files')),
3758 ('m', 'modified', None, _('show only modified files')),
3758 ('m', 'modified', None, _('show only modified files')),
3759 ('a', 'added', None, _('show only added files')),
3759 ('a', 'added', None, _('show only added files')),
3760 ('r', 'removed', None, _('show only removed files')),
3760 ('r', 'removed', None, _('show only removed files')),
3761 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3761 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3762 ('c', 'clean', None, _('show only files without changes')),
3762 ('c', 'clean', None, _('show only files without changes')),
3763 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3763 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3764 ('i', 'ignored', None, _('show only ignored files')),
3764 ('i', 'ignored', None, _('show only ignored files')),
3765 ('n', 'no-status', None, _('hide status prefix')),
3765 ('n', 'no-status', None, _('hide status prefix')),
3766 ('C', 'copies', None, _('show source of copied files')),
3766 ('C', 'copies', None, _('show source of copied files')),
3767 ('0', 'print0', None,
3767 ('0', 'print0', None,
3768 _('end filenames with NUL, for use with xargs')),
3768 _('end filenames with NUL, for use with xargs')),
3769 ('', 'rev', [], _('show difference from revision')),
3769 ('', 'rev', [], _('show difference from revision')),
3770 ('', 'change', '', _('list the changed files of a revision')),
3770 ('', 'change', '', _('list the changed files of a revision')),
3771 ] + walkopts,
3771 ] + walkopts,
3772 _('[OPTION]... [FILE]...')),
3772 _('[OPTION]... [FILE]...')),
3773 "tag":
3773 "tag":
3774 (tag,
3774 (tag,
3775 [('f', 'force', None, _('replace existing tag')),
3775 [('f', 'force', None, _('replace existing tag')),
3776 ('l', 'local', None, _('make the tag local')),
3776 ('l', 'local', None, _('make the tag local')),
3777 ('r', 'rev', '', _('revision to tag')),
3777 ('r', 'rev', '', _('revision to tag')),
3778 ('', 'remove', None, _('remove a tag')),
3778 ('', 'remove', None, _('remove a tag')),
3779 # -l/--local is already there, commitopts cannot be used
3779 # -l/--local is already there, commitopts cannot be used
3780 ('m', 'message', '', _('use <text> as commit message')),
3780 ('m', 'message', '', _('use <text> as commit message')),
3781 ] + commitopts2,
3781 ] + commitopts2,
3782 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3782 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3783 "tags": (tags, [], ''),
3783 "tags": (tags, [], ''),
3784 "tip":
3784 "tip":
3785 (tip,
3785 (tip,
3786 [('p', 'patch', None, _('show patch')),
3786 [('p', 'patch', None, _('show patch')),
3787 ('g', 'git', None, _('use git extended diff format')),
3787 ('g', 'git', None, _('use git extended diff format')),
3788 ] + templateopts,
3788 ] + templateopts,
3789 _('[-p] [-g]')),
3789 _('[-p] [-g]')),
3790 "unbundle":
3790 "unbundle":
3791 (unbundle,
3791 (unbundle,
3792 [('u', 'update', None,
3792 [('u', 'update', None,
3793 _('update to new branch head if changesets were unbundled'))],
3793 _('update to new branch head if changesets were unbundled'))],
3794 _('[-u] FILE...')),
3794 _('[-u] FILE...')),
3795 "^update|up|checkout|co":
3795 "^update|up|checkout|co":
3796 (update,
3796 (update,
3797 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
3797 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
3798 ('c', 'check', None, _('check for uncommitted changes')),
3798 ('c', 'check', None, _('check for uncommitted changes')),
3799 ('d', 'date', '', _('tipmost revision matching date')),
3799 ('d', 'date', '', _('tipmost revision matching date')),
3800 ('r', 'rev', '', _('revision'))],
3800 ('r', 'rev', '', _('revision'))],
3801 _('[-c] [-C] [-d DATE] [[-r] REV]')),
3801 _('[-c] [-C] [-d DATE] [[-r] REV]')),
3802 "verify": (verify, []),
3802 "verify": (verify, []),
3803 "version": (version_, []),
3803 "version": (version_, []),
3804 }
3804 }
3805
3805
3806 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3806 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3807 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3807 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3808 optionalrepo = ("identify paths serve showconfig debugancestor")
3808 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,655 +1,655 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import util, ignore, osutil, parsers
10 import util, ignore, osutil, parsers
11 import struct, os, stat, errno
11 import struct, os, stat, errno
12 import cStringIO
12 import cStringIO
13
13
14 _unknown = ('?', 0, 0, 0)
14 _unknown = ('?', 0, 0, 0)
15 _format = ">cllll"
15 _format = ">cllll"
16 propertycache = util.propertycache
16 propertycache = util.propertycache
17
17
18 def _finddirs(path):
18 def _finddirs(path):
19 pos = path.rfind('/')
19 pos = path.rfind('/')
20 while pos != -1:
20 while pos != -1:
21 yield path[:pos]
21 yield path[:pos]
22 pos = path.rfind('/', 0, pos)
22 pos = path.rfind('/', 0, pos)
23
23
24 def _incdirs(dirs, path):
24 def _incdirs(dirs, path):
25 for base in _finddirs(path):
25 for base in _finddirs(path):
26 if base in dirs:
26 if base in dirs:
27 dirs[base] += 1
27 dirs[base] += 1
28 return
28 return
29 dirs[base] = 1
29 dirs[base] = 1
30
30
31 def _decdirs(dirs, path):
31 def _decdirs(dirs, path):
32 for base in _finddirs(path):
32 for base in _finddirs(path):
33 if dirs[base] > 1:
33 if dirs[base] > 1:
34 dirs[base] -= 1
34 dirs[base] -= 1
35 return
35 return
36 del dirs[base]
36 del dirs[base]
37
37
38 class dirstate(object):
38 class dirstate(object):
39
39
40 def __init__(self, opener, ui, root):
40 def __init__(self, opener, ui, root):
41 '''Create a new dirstate object.
41 '''Create a new dirstate object.
42
42
43 opener is an open()-like callable that can be used to open the
43 opener is an open()-like callable that can be used to open the
44 dirstate file; root is the root of the directory tracked by
44 dirstate file; root is the root of the directory tracked by
45 the dirstate.
45 the dirstate.
46 '''
46 '''
47 self._opener = opener
47 self._opener = opener
48 self._root = root
48 self._root = root
49 self._rootdir = os.path.join(root, '')
49 self._rootdir = os.path.join(root, '')
50 self._dirty = False
50 self._dirty = False
51 self._dirtypl = False
51 self._dirtypl = False
52 self._ui = ui
52 self._ui = ui
53
53
54 @propertycache
54 @propertycache
55 def _map(self):
55 def _map(self):
56 '''Return the dirstate contents as a map from filename to
56 '''Return the dirstate contents as a map from filename to
57 (state, mode, size, time).'''
57 (state, mode, size, time).'''
58 self._read()
58 self._read()
59 return self._map
59 return self._map
60
60
61 @propertycache
61 @propertycache
62 def _copymap(self):
62 def _copymap(self):
63 self._read()
63 self._read()
64 return self._copymap
64 return self._copymap
65
65
66 @propertycache
66 @propertycache
67 def _foldmap(self):
67 def _foldmap(self):
68 f = {}
68 f = {}
69 for name in self._map:
69 for name in self._map:
70 f[os.path.normcase(name)] = name
70 f[os.path.normcase(name)] = name
71 return f
71 return f
72
72
73 @propertycache
73 @propertycache
74 def _branch(self):
74 def _branch(self):
75 try:
75 try:
76 return self._opener("branch").read().strip() or "default"
76 return self._opener("branch").read().strip() or "default"
77 except IOError:
77 except IOError:
78 return "default"
78 return "default"
79
79
80 @propertycache
80 @propertycache
81 def _pl(self):
81 def _pl(self):
82 try:
82 try:
83 st = self._opener("dirstate").read(40)
83 st = self._opener("dirstate").read(40)
84 l = len(st)
84 l = len(st)
85 if l == 40:
85 if l == 40:
86 return st[:20], st[20:40]
86 return st[:20], st[20:40]
87 elif l > 0 and l < 40:
87 elif l > 0 and l < 40:
88 raise util.Abort(_('working directory state appears damaged!'))
88 raise util.Abort(_('working directory state appears damaged!'))
89 except IOError, err:
89 except IOError, err:
90 if err.errno != errno.ENOENT:
90 if err.errno != errno.ENOENT:
91 raise
91 raise
92 return [nullid, nullid]
92 return [nullid, nullid]
93
93
94 @propertycache
94 @propertycache
95 def _dirs(self):
95 def _dirs(self):
96 dirs = {}
96 dirs = {}
97 for f, s in self._map.iteritems():
97 for f, s in self._map.iteritems():
98 if s[0] != 'r':
98 if s[0] != 'r':
99 _incdirs(dirs, f)
99 _incdirs(dirs, f)
100 return dirs
100 return dirs
101
101
102 @propertycache
102 @propertycache
103 def _ignore(self):
103 def _ignore(self):
104 files = [self._join('.hgignore')]
104 files = [self._join('.hgignore')]
105 for name, path in self._ui.configitems("ui"):
105 for name, path in self._ui.configitems("ui"):
106 if name == 'ignore' or name.startswith('ignore.'):
106 if name == 'ignore' or name.startswith('ignore.'):
107 files.append(util.expandpath(path))
107 files.append(util.expandpath(path))
108 return ignore.ignore(self._root, files, self._ui.warn)
108 return ignore.ignore(self._root, files, self._ui.warn)
109
109
110 @propertycache
110 @propertycache
111 def _slash(self):
111 def _slash(self):
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
113
113
114 @propertycache
114 @propertycache
115 def _checklink(self):
115 def _checklink(self):
116 return util.checklink(self._root)
116 return util.checklink(self._root)
117
117
118 @propertycache
118 @propertycache
119 def _checkexec(self):
119 def _checkexec(self):
120 return util.checkexec(self._root)
120 return util.checkexec(self._root)
121
121
122 @propertycache
122 @propertycache
123 def _checkcase(self):
123 def _checkcase(self):
124 return not util.checkcase(self._join('.hg'))
124 return not util.checkcase(self._join('.hg'))
125
125
126 def _join(self, f):
126 def _join(self, f):
127 # much faster than os.path.join()
127 # much faster than os.path.join()
128 # it's safe because f is always a relative path
128 # it's safe because f is always a relative path
129 return self._rootdir + f
129 return self._rootdir + f
130
130
131 def flagfunc(self, fallback):
131 def flagfunc(self, fallback):
132 if self._checklink:
132 if self._checklink:
133 if self._checkexec:
133 if self._checkexec:
134 def f(x):
134 def f(x):
135 p = self._join(x)
135 p = self._join(x)
136 if os.path.islink(p):
136 if os.path.islink(p):
137 return 'l'
137 return 'l'
138 if util.is_exec(p):
138 if util.is_exec(p):
139 return 'x'
139 return 'x'
140 return ''
140 return ''
141 return f
141 return f
142 def f(x):
142 def f(x):
143 if os.path.islink(self._join(x)):
143 if os.path.islink(self._join(x)):
144 return 'l'
144 return 'l'
145 if 'x' in fallback(x):
145 if 'x' in fallback(x):
146 return 'x'
146 return 'x'
147 return ''
147 return ''
148 return f
148 return f
149 if self._checkexec:
149 if self._checkexec:
150 def f(x):
150 def f(x):
151 if 'l' in fallback(x):
151 if 'l' in fallback(x):
152 return 'l'
152 return 'l'
153 if util.is_exec(self._join(x)):
153 if util.is_exec(self._join(x)):
154 return 'x'
154 return 'x'
155 return ''
155 return ''
156 return f
156 return f
157 return fallback
157 return fallback
158
158
159 def getcwd(self):
159 def getcwd(self):
160 cwd = os.getcwd()
160 cwd = os.getcwd()
161 if cwd == self._root:
161 if cwd == self._root:
162 return ''
162 return ''
163 # self._root ends with a path separator if self._root is '/' or 'C:\'
163 # self._root ends with a path separator if self._root is '/' or 'C:\'
164 rootsep = self._root
164 rootsep = self._root
165 if not util.endswithsep(rootsep):
165 if not util.endswithsep(rootsep):
166 rootsep += os.sep
166 rootsep += os.sep
167 if cwd.startswith(rootsep):
167 if cwd.startswith(rootsep):
168 return cwd[len(rootsep):]
168 return cwd[len(rootsep):]
169 else:
169 else:
170 # we're outside the repo. return an absolute path.
170 # we're outside the repo. return an absolute path.
171 return cwd
171 return cwd
172
172
173 def pathto(self, f, cwd=None):
173 def pathto(self, f, cwd=None):
174 if cwd is None:
174 if cwd is None:
175 cwd = self.getcwd()
175 cwd = self.getcwd()
176 path = util.pathto(self._root, cwd, f)
176 path = util.pathto(self._root, cwd, f)
177 if self._slash:
177 if self._slash:
178 return util.normpath(path)
178 return util.normpath(path)
179 return path
179 return path
180
180
181 def __getitem__(self, key):
181 def __getitem__(self, key):
182 '''Return the current state of key (a filename) in the dirstate.
182 '''Return the current state of key (a filename) in the dirstate.
183
183
184 States are:
184 States are:
185 n normal
185 n normal
186 m needs merging
186 m needs merging
187 r marked for removal
187 r marked for removal
188 a marked for addition
188 a marked for addition
189 ? not tracked
189 ? not tracked
190 '''
190 '''
191 return self._map.get(key, ("?",))[0]
191 return self._map.get(key, ("?",))[0]
192
192
193 def __contains__(self, key):
193 def __contains__(self, key):
194 return key in self._map
194 return key in self._map
195
195
196 def __iter__(self):
196 def __iter__(self):
197 for x in sorted(self._map):
197 for x in sorted(self._map):
198 yield x
198 yield x
199
199
200 def parents(self):
200 def parents(self):
201 return self._pl
201 return self._pl
202
202
203 def branch(self):
203 def branch(self):
204 return self._branch
204 return self._branch
205
205
206 def setparents(self, p1, p2=nullid):
206 def setparents(self, p1, p2=nullid):
207 self._dirty = self._dirtypl = True
207 self._dirty = self._dirtypl = True
208 self._pl = p1, p2
208 self._pl = p1, p2
209
209
210 def setbranch(self, branch):
210 def setbranch(self, branch):
211 self._branch = branch
211 self._branch = branch
212 self._opener("branch", "w").write(branch + '\n')
212 self._opener("branch", "w").write(branch + '\n')
213
213
214 def _read(self):
214 def _read(self):
215 self._map = {}
215 self._map = {}
216 self._copymap = {}
216 self._copymap = {}
217 try:
217 try:
218 st = self._opener("dirstate").read()
218 st = self._opener("dirstate").read()
219 except IOError, err:
219 except IOError, err:
220 if err.errno != errno.ENOENT:
220 if err.errno != errno.ENOENT:
221 raise
221 raise
222 return
222 return
223 if not st:
223 if not st:
224 return
224 return
225
225
226 p = parsers.parse_dirstate(self._map, self._copymap, st)
226 p = parsers.parse_dirstate(self._map, self._copymap, st)
227 if not self._dirtypl:
227 if not self._dirtypl:
228 self._pl = p
228 self._pl = p
229
229
230 def invalidate(self):
230 def invalidate(self):
231 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
231 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
232 if a in self.__dict__:
232 if a in self.__dict__:
233 delattr(self, a)
233 delattr(self, a)
234 self._dirty = False
234 self._dirty = False
235
235
236 def copy(self, source, dest):
236 def copy(self, source, dest):
237 """Mark dest as a copy of source. Unmark dest if source is None."""
237 """Mark dest as a copy of source. Unmark dest if source is None."""
238 if source == dest:
238 if source == dest:
239 return
239 return
240 self._dirty = True
240 self._dirty = True
241 if source is not None:
241 if source is not None:
242 self._copymap[dest] = source
242 self._copymap[dest] = source
243 elif dest in self._copymap:
243 elif dest in self._copymap:
244 del self._copymap[dest]
244 del self._copymap[dest]
245
245
246 def copied(self, file):
246 def copied(self, file):
247 return self._copymap.get(file, None)
247 return self._copymap.get(file, None)
248
248
249 def copies(self):
249 def copies(self):
250 return self._copymap
250 return self._copymap
251
251
252 def _droppath(self, f):
252 def _droppath(self, f):
253 if self[f] not in "?r" and "_dirs" in self.__dict__:
253 if self[f] not in "?r" and "_dirs" in self.__dict__:
254 _decdirs(self._dirs, f)
254 _decdirs(self._dirs, f)
255
255
256 def _addpath(self, f, check=False):
256 def _addpath(self, f, check=False):
257 oldstate = self[f]
257 oldstate = self[f]
258 if check or oldstate == "r":
258 if check or oldstate == "r":
259 if '\r' in f or '\n' in f:
259 if '\r' in f or '\n' in f:
260 raise util.Abort(
260 raise util.Abort(
261 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
261 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
262 if f in self._dirs:
262 if f in self._dirs:
263 raise util.Abort(_('directory %r already in dirstate') % f)
263 raise util.Abort(_('directory %r already in dirstate') % f)
264 # shadows
264 # shadows
265 for d in _finddirs(f):
265 for d in _finddirs(f):
266 if d in self._dirs:
266 if d in self._dirs:
267 break
267 break
268 if d in self._map and self[d] != 'r':
268 if d in self._map and self[d] != 'r':
269 raise util.Abort(
269 raise util.Abort(
270 _('file %r in dirstate clashes with %r') % (d, f))
270 _('file %r in dirstate clashes with %r') % (d, f))
271 if oldstate in "?r" and "_dirs" in self.__dict__:
271 if oldstate in "?r" and "_dirs" in self.__dict__:
272 _incdirs(self._dirs, f)
272 _incdirs(self._dirs, f)
273
273
274 def normal(self, f):
274 def normal(self, f):
275 '''Mark a file normal and clean.'''
275 '''Mark a file normal and clean.'''
276 self._dirty = True
276 self._dirty = True
277 self._addpath(f)
277 self._addpath(f)
278 s = os.lstat(self._join(f))
278 s = os.lstat(self._join(f))
279 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
279 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
280 if f in self._copymap:
280 if f in self._copymap:
281 del self._copymap[f]
281 del self._copymap[f]
282
282
283 def normallookup(self, f):
283 def normallookup(self, f):
284 '''Mark a file normal, but possibly dirty.'''
284 '''Mark a file normal, but possibly dirty.'''
285 if self._pl[1] != nullid and f in self._map:
285 if self._pl[1] != nullid and f in self._map:
286 # if there is a merge going on and the file was either
286 # if there is a merge going on and the file was either
287 # in state 'm' or dirty before being removed, restore that state.
287 # in state 'm' or dirty before being removed, restore that state.
288 entry = self._map[f]
288 entry = self._map[f]
289 if entry[0] == 'r' and entry[2] in (-1, -2):
289 if entry[0] == 'r' and entry[2] in (-1, -2):
290 source = self._copymap.get(f)
290 source = self._copymap.get(f)
291 if entry[2] == -1:
291 if entry[2] == -1:
292 self.merge(f)
292 self.merge(f)
293 elif entry[2] == -2:
293 elif entry[2] == -2:
294 self.normaldirty(f)
294 self.normaldirty(f)
295 if source:
295 if source:
296 self.copy(source, f)
296 self.copy(source, f)
297 return
297 return
298 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
298 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
299 return
299 return
300 self._dirty = True
300 self._dirty = True
301 self._addpath(f)
301 self._addpath(f)
302 self._map[f] = ('n', 0, -1, -1)
302 self._map[f] = ('n', 0, -1, -1)
303 if f in self._copymap:
303 if f in self._copymap:
304 del self._copymap[f]
304 del self._copymap[f]
305
305
306 def normaldirty(self, f):
306 def normaldirty(self, f):
307 '''Mark a file normal, but dirty.'''
307 '''Mark a file normal, but dirty.'''
308 self._dirty = True
308 self._dirty = True
309 self._addpath(f)
309 self._addpath(f)
310 self._map[f] = ('n', 0, -2, -1)
310 self._map[f] = ('n', 0, -2, -1)
311 if f in self._copymap:
311 if f in self._copymap:
312 del self._copymap[f]
312 del self._copymap[f]
313
313
314 def add(self, f):
314 def add(self, f):
315 '''Mark a file added.'''
315 '''Mark a file added.'''
316 self._dirty = True
316 self._dirty = True
317 self._addpath(f, True)
317 self._addpath(f, True)
318 self._map[f] = ('a', 0, -1, -1)
318 self._map[f] = ('a', 0, -1, -1)
319 if f in self._copymap:
319 if f in self._copymap:
320 del self._copymap[f]
320 del self._copymap[f]
321
321
322 def remove(self, f):
322 def remove(self, f):
323 '''Mark a file removed.'''
323 '''Mark a file removed.'''
324 self._dirty = True
324 self._dirty = True
325 self._droppath(f)
325 self._droppath(f)
326 size = 0
326 size = 0
327 if self._pl[1] != nullid and f in self._map:
327 if self._pl[1] != nullid and f in self._map:
328 entry = self._map[f]
328 entry = self._map[f]
329 if entry[0] == 'm':
329 if entry[0] == 'm':
330 size = -1
330 size = -1
331 elif entry[0] == 'n' and entry[2] == -2:
331 elif entry[0] == 'n' and entry[2] == -2:
332 size = -2
332 size = -2
333 self._map[f] = ('r', 0, size, 0)
333 self._map[f] = ('r', 0, size, 0)
334 if size == 0 and f in self._copymap:
334 if size == 0 and f in self._copymap:
335 del self._copymap[f]
335 del self._copymap[f]
336
336
337 def merge(self, f):
337 def merge(self, f):
338 '''Mark a file merged.'''
338 '''Mark a file merged.'''
339 self._dirty = True
339 self._dirty = True
340 s = os.lstat(self._join(f))
340 s = os.lstat(self._join(f))
341 self._addpath(f)
341 self._addpath(f)
342 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
342 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
343 if f in self._copymap:
343 if f in self._copymap:
344 del self._copymap[f]
344 del self._copymap[f]
345
345
346 def forget(self, f):
346 def forget(self, f):
347 '''Forget a file.'''
347 '''Forget a file.'''
348 self._dirty = True
348 self._dirty = True
349 try:
349 try:
350 self._droppath(f)
350 self._droppath(f)
351 del self._map[f]
351 del self._map[f]
352 except KeyError:
352 except KeyError:
353 self._ui.warn(_("not in dirstate: %s\n") % f)
353 self._ui.warn(_("not in dirstate: %s\n") % f)
354
354
355 def _normalize(self, path, knownpath):
355 def _normalize(self, path, knownpath):
356 norm_path = os.path.normcase(path)
356 norm_path = os.path.normcase(path)
357 fold_path = self._foldmap.get(norm_path, None)
357 fold_path = self._foldmap.get(norm_path, None)
358 if fold_path is None:
358 if fold_path is None:
359 if knownpath or not os.path.exists(os.path.join(self._root, path)):
359 if knownpath or not os.path.exists(os.path.join(self._root, path)):
360 fold_path = path
360 fold_path = path
361 else:
361 else:
362 fold_path = self._foldmap.setdefault(norm_path,
362 fold_path = self._foldmap.setdefault(norm_path,
363 util.fspath(path, self._root))
363 util.fspath(path, self._root))
364 return fold_path
364 return fold_path
365
365
366 def clear(self):
366 def clear(self):
367 self._map = {}
367 self._map = {}
368 if "_dirs" in self.__dict__:
368 if "_dirs" in self.__dict__:
369 delattr(self, "_dirs");
369 delattr(self, "_dirs")
370 self._copymap = {}
370 self._copymap = {}
371 self._pl = [nullid, nullid]
371 self._pl = [nullid, nullid]
372 self._dirty = True
372 self._dirty = True
373
373
374 def rebuild(self, parent, files):
374 def rebuild(self, parent, files):
375 self.clear()
375 self.clear()
376 for f in files:
376 for f in files:
377 if 'x' in files.flags(f):
377 if 'x' in files.flags(f):
378 self._map[f] = ('n', 0777, -1, 0)
378 self._map[f] = ('n', 0777, -1, 0)
379 else:
379 else:
380 self._map[f] = ('n', 0666, -1, 0)
380 self._map[f] = ('n', 0666, -1, 0)
381 self._pl = (parent, nullid)
381 self._pl = (parent, nullid)
382 self._dirty = True
382 self._dirty = True
383
383
384 def write(self):
384 def write(self):
385 if not self._dirty:
385 if not self._dirty:
386 return
386 return
387 st = self._opener("dirstate", "w", atomictemp=True)
387 st = self._opener("dirstate", "w", atomictemp=True)
388
388
389 # use the modification time of the newly created temporary file as the
389 # use the modification time of the newly created temporary file as the
390 # filesystem's notion of 'now'
390 # filesystem's notion of 'now'
391 now = int(util.fstat(st).st_mtime)
391 now = int(util.fstat(st).st_mtime)
392
392
393 cs = cStringIO.StringIO()
393 cs = cStringIO.StringIO()
394 copymap = self._copymap
394 copymap = self._copymap
395 pack = struct.pack
395 pack = struct.pack
396 write = cs.write
396 write = cs.write
397 write("".join(self._pl))
397 write("".join(self._pl))
398 for f, e in self._map.iteritems():
398 for f, e in self._map.iteritems():
399 if f in copymap:
399 if f in copymap:
400 f = "%s\0%s" % (f, copymap[f])
400 f = "%s\0%s" % (f, copymap[f])
401
401
402 if e[0] == 'n' and e[3] == now:
402 if e[0] == 'n' and e[3] == now:
403 # The file was last modified "simultaneously" with the current
403 # The file was last modified "simultaneously" with the current
404 # write to dirstate (i.e. within the same second for file-
404 # write to dirstate (i.e. within the same second for file-
405 # systems with a granularity of 1 sec). This commonly happens
405 # systems with a granularity of 1 sec). This commonly happens
406 # for at least a couple of files on 'update'.
406 # for at least a couple of files on 'update'.
407 # The user could change the file without changing its size
407 # The user could change the file without changing its size
408 # within the same second. Invalidate the file's stat data in
408 # within the same second. Invalidate the file's stat data in
409 # dirstate, forcing future 'status' calls to compare the
409 # dirstate, forcing future 'status' calls to compare the
410 # contents of the file. This prevents mistakenly treating such
410 # contents of the file. This prevents mistakenly treating such
411 # files as clean.
411 # files as clean.
412 e = (e[0], 0, -1, -1) # mark entry as 'unset'
412 e = (e[0], 0, -1, -1) # mark entry as 'unset'
413
413
414 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
414 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
415 write(e)
415 write(e)
416 write(f)
416 write(f)
417 st.write(cs.getvalue())
417 st.write(cs.getvalue())
418 st.rename()
418 st.rename()
419 self._dirty = self._dirtypl = False
419 self._dirty = self._dirtypl = False
420
420
421 def _dirignore(self, f):
421 def _dirignore(self, f):
422 if f == '.':
422 if f == '.':
423 return False
423 return False
424 if self._ignore(f):
424 if self._ignore(f):
425 return True
425 return True
426 for p in _finddirs(f):
426 for p in _finddirs(f):
427 if self._ignore(p):
427 if self._ignore(p):
428 return True
428 return True
429 return False
429 return False
430
430
431 def walk(self, match, subrepos, unknown, ignored):
431 def walk(self, match, subrepos, unknown, ignored):
432 '''
432 '''
433 Walk recursively through the directory tree, finding all files
433 Walk recursively through the directory tree, finding all files
434 matched by match.
434 matched by match.
435
435
436 Return a dict mapping filename to stat-like object (either
436 Return a dict mapping filename to stat-like object (either
437 mercurial.osutil.stat instance or return value of os.stat()).
437 mercurial.osutil.stat instance or return value of os.stat()).
438 '''
438 '''
439
439
440 def fwarn(f, msg):
440 def fwarn(f, msg):
441 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
441 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
442 return False
442 return False
443
443
444 def badtype(mode):
444 def badtype(mode):
445 kind = _('unknown')
445 kind = _('unknown')
446 if stat.S_ISCHR(mode):
446 if stat.S_ISCHR(mode):
447 kind = _('character device')
447 kind = _('character device')
448 elif stat.S_ISBLK(mode):
448 elif stat.S_ISBLK(mode):
449 kind = _('block device')
449 kind = _('block device')
450 elif stat.S_ISFIFO(mode):
450 elif stat.S_ISFIFO(mode):
451 kind = _('fifo')
451 kind = _('fifo')
452 elif stat.S_ISSOCK(mode):
452 elif stat.S_ISSOCK(mode):
453 kind = _('socket')
453 kind = _('socket')
454 elif stat.S_ISDIR(mode):
454 elif stat.S_ISDIR(mode):
455 kind = _('directory')
455 kind = _('directory')
456 return _('unsupported file type (type is %s)') % kind
456 return _('unsupported file type (type is %s)') % kind
457
457
458 ignore = self._ignore
458 ignore = self._ignore
459 dirignore = self._dirignore
459 dirignore = self._dirignore
460 if ignored:
460 if ignored:
461 ignore = util.never
461 ignore = util.never
462 dirignore = util.never
462 dirignore = util.never
463 elif not unknown:
463 elif not unknown:
464 # if unknown and ignored are False, skip step 2
464 # if unknown and ignored are False, skip step 2
465 ignore = util.always
465 ignore = util.always
466 dirignore = util.always
466 dirignore = util.always
467
467
468 matchfn = match.matchfn
468 matchfn = match.matchfn
469 badfn = match.bad
469 badfn = match.bad
470 dmap = self._map
470 dmap = self._map
471 normpath = util.normpath
471 normpath = util.normpath
472 listdir = osutil.listdir
472 listdir = osutil.listdir
473 lstat = os.lstat
473 lstat = os.lstat
474 getkind = stat.S_IFMT
474 getkind = stat.S_IFMT
475 dirkind = stat.S_IFDIR
475 dirkind = stat.S_IFDIR
476 regkind = stat.S_IFREG
476 regkind = stat.S_IFREG
477 lnkkind = stat.S_IFLNK
477 lnkkind = stat.S_IFLNK
478 join = self._join
478 join = self._join
479 work = []
479 work = []
480 wadd = work.append
480 wadd = work.append
481
481
482 if self._checkcase:
482 if self._checkcase:
483 normalize = self._normalize
483 normalize = self._normalize
484 else:
484 else:
485 normalize = lambda x, y: x
485 normalize = lambda x, y: x
486
486
487 exact = skipstep3 = False
487 exact = skipstep3 = False
488 if matchfn == match.exact: # match.exact
488 if matchfn == match.exact: # match.exact
489 exact = True
489 exact = True
490 dirignore = util.always # skip step 2
490 dirignore = util.always # skip step 2
491 elif match.files() and not match.anypats(): # match.match, no patterns
491 elif match.files() and not match.anypats(): # match.match, no patterns
492 skipstep3 = True
492 skipstep3 = True
493
493
494 files = set(match.files())
494 files = set(match.files())
495 if not files or '.' in files:
495 if not files or '.' in files:
496 files = ['']
496 files = ['']
497 results = dict.fromkeys(subrepos)
497 results = dict.fromkeys(subrepos)
498 results['.hg'] = None
498 results['.hg'] = None
499
499
500 # step 1: find all explicit files
500 # step 1: find all explicit files
501 for ff in sorted(files):
501 for ff in sorted(files):
502 nf = normalize(normpath(ff), False)
502 nf = normalize(normpath(ff), False)
503 if nf in results:
503 if nf in results:
504 continue
504 continue
505
505
506 try:
506 try:
507 st = lstat(join(nf))
507 st = lstat(join(nf))
508 kind = getkind(st.st_mode)
508 kind = getkind(st.st_mode)
509 if kind == dirkind:
509 if kind == dirkind:
510 skipstep3 = False
510 skipstep3 = False
511 if nf in dmap:
511 if nf in dmap:
512 #file deleted on disk but still in dirstate
512 #file deleted on disk but still in dirstate
513 results[nf] = None
513 results[nf] = None
514 match.dir(nf)
514 match.dir(nf)
515 if not dirignore(nf):
515 if not dirignore(nf):
516 wadd(nf)
516 wadd(nf)
517 elif kind == regkind or kind == lnkkind:
517 elif kind == regkind or kind == lnkkind:
518 results[nf] = st
518 results[nf] = st
519 else:
519 else:
520 badfn(ff, badtype(kind))
520 badfn(ff, badtype(kind))
521 if nf in dmap:
521 if nf in dmap:
522 results[nf] = None
522 results[nf] = None
523 except OSError, inst:
523 except OSError, inst:
524 if nf in dmap: # does it exactly match a file?
524 if nf in dmap: # does it exactly match a file?
525 results[nf] = None
525 results[nf] = None
526 else: # does it match a directory?
526 else: # does it match a directory?
527 prefix = nf + "/"
527 prefix = nf + "/"
528 for fn in dmap:
528 for fn in dmap:
529 if fn.startswith(prefix):
529 if fn.startswith(prefix):
530 match.dir(nf)
530 match.dir(nf)
531 skipstep3 = False
531 skipstep3 = False
532 break
532 break
533 else:
533 else:
534 badfn(ff, inst.strerror)
534 badfn(ff, inst.strerror)
535
535
536 # step 2: visit subdirectories
536 # step 2: visit subdirectories
537 while work:
537 while work:
538 nd = work.pop()
538 nd = work.pop()
539 skip = None
539 skip = None
540 if nd == '.':
540 if nd == '.':
541 nd = ''
541 nd = ''
542 else:
542 else:
543 skip = '.hg'
543 skip = '.hg'
544 try:
544 try:
545 entries = listdir(join(nd), stat=True, skip=skip)
545 entries = listdir(join(nd), stat=True, skip=skip)
546 except OSError, inst:
546 except OSError, inst:
547 if inst.errno == errno.EACCES:
547 if inst.errno == errno.EACCES:
548 fwarn(nd, inst.strerror)
548 fwarn(nd, inst.strerror)
549 continue
549 continue
550 raise
550 raise
551 for f, kind, st in entries:
551 for f, kind, st in entries:
552 nf = normalize(nd and (nd + "/" + f) or f, True)
552 nf = normalize(nd and (nd + "/" + f) or f, True)
553 if nf not in results:
553 if nf not in results:
554 if kind == dirkind:
554 if kind == dirkind:
555 if not ignore(nf):
555 if not ignore(nf):
556 match.dir(nf)
556 match.dir(nf)
557 wadd(nf)
557 wadd(nf)
558 if nf in dmap and matchfn(nf):
558 if nf in dmap and matchfn(nf):
559 results[nf] = None
559 results[nf] = None
560 elif kind == regkind or kind == lnkkind:
560 elif kind == regkind or kind == lnkkind:
561 if nf in dmap:
561 if nf in dmap:
562 if matchfn(nf):
562 if matchfn(nf):
563 results[nf] = st
563 results[nf] = st
564 elif matchfn(nf) and not ignore(nf):
564 elif matchfn(nf) and not ignore(nf):
565 results[nf] = st
565 results[nf] = st
566 elif nf in dmap and matchfn(nf):
566 elif nf in dmap and matchfn(nf):
567 results[nf] = None
567 results[nf] = None
568
568
569 # step 3: report unseen items in the dmap hash
569 # step 3: report unseen items in the dmap hash
570 if not skipstep3 and not exact:
570 if not skipstep3 and not exact:
571 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
571 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
572 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
572 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
573 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
573 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
574 st = None
574 st = None
575 results[nf] = st
575 results[nf] = st
576 for s in subrepos:
576 for s in subrepos:
577 del results[s]
577 del results[s]
578 del results['.hg']
578 del results['.hg']
579 return results
579 return results
580
580
581 def status(self, match, subrepos, ignored, clean, unknown):
581 def status(self, match, subrepos, ignored, clean, unknown):
582 '''Determine the status of the working copy relative to the
582 '''Determine the status of the working copy relative to the
583 dirstate and return a tuple of lists (unsure, modified, added,
583 dirstate and return a tuple of lists (unsure, modified, added,
584 removed, deleted, unknown, ignored, clean), where:
584 removed, deleted, unknown, ignored, clean), where:
585
585
586 unsure:
586 unsure:
587 files that might have been modified since the dirstate was
587 files that might have been modified since the dirstate was
588 written, but need to be read to be sure (size is the same
588 written, but need to be read to be sure (size is the same
589 but mtime differs)
589 but mtime differs)
590 modified:
590 modified:
591 files that have definitely been modified since the dirstate
591 files that have definitely been modified since the dirstate
592 was written (different size or mode)
592 was written (different size or mode)
593 added:
593 added:
594 files that have been explicitly added with hg add
594 files that have been explicitly added with hg add
595 removed:
595 removed:
596 files that have been explicitly removed with hg remove
596 files that have been explicitly removed with hg remove
597 deleted:
597 deleted:
598 files that have been deleted through other means ("missing")
598 files that have been deleted through other means ("missing")
599 unknown:
599 unknown:
600 files not in the dirstate that are not ignored
600 files not in the dirstate that are not ignored
601 ignored:
601 ignored:
602 files not in the dirstate that are ignored
602 files not in the dirstate that are ignored
603 (by _dirignore())
603 (by _dirignore())
604 clean:
604 clean:
605 files that have definitely not been modified since the
605 files that have definitely not been modified since the
606 dirstate was written
606 dirstate was written
607 '''
607 '''
608 listignored, listclean, listunknown = ignored, clean, unknown
608 listignored, listclean, listunknown = ignored, clean, unknown
609 lookup, modified, added, unknown, ignored = [], [], [], [], []
609 lookup, modified, added, unknown, ignored = [], [], [], [], []
610 removed, deleted, clean = [], [], []
610 removed, deleted, clean = [], [], []
611
611
612 dmap = self._map
612 dmap = self._map
613 ladd = lookup.append # aka "unsure"
613 ladd = lookup.append # aka "unsure"
614 madd = modified.append
614 madd = modified.append
615 aadd = added.append
615 aadd = added.append
616 uadd = unknown.append
616 uadd = unknown.append
617 iadd = ignored.append
617 iadd = ignored.append
618 radd = removed.append
618 radd = removed.append
619 dadd = deleted.append
619 dadd = deleted.append
620 cadd = clean.append
620 cadd = clean.append
621
621
622 for fn, st in self.walk(match, subrepos, listunknown,
622 for fn, st in self.walk(match, subrepos, listunknown,
623 listignored).iteritems():
623 listignored).iteritems():
624 if fn not in dmap:
624 if fn not in dmap:
625 if (listignored or match.exact(fn)) and self._dirignore(fn):
625 if (listignored or match.exact(fn)) and self._dirignore(fn):
626 if listignored:
626 if listignored:
627 iadd(fn)
627 iadd(fn)
628 elif listunknown:
628 elif listunknown:
629 uadd(fn)
629 uadd(fn)
630 continue
630 continue
631
631
632 state, mode, size, time = dmap[fn]
632 state, mode, size, time = dmap[fn]
633
633
634 if not st and state in "nma":
634 if not st and state in "nma":
635 dadd(fn)
635 dadd(fn)
636 elif state == 'n':
636 elif state == 'n':
637 if (size >= 0 and
637 if (size >= 0 and
638 (size != st.st_size
638 (size != st.st_size
639 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
639 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
640 or size == -2
640 or size == -2
641 or fn in self._copymap):
641 or fn in self._copymap):
642 madd(fn)
642 madd(fn)
643 elif time != int(st.st_mtime):
643 elif time != int(st.st_mtime):
644 ladd(fn)
644 ladd(fn)
645 elif listclean:
645 elif listclean:
646 cadd(fn)
646 cadd(fn)
647 elif state == 'm':
647 elif state == 'm':
648 madd(fn)
648 madd(fn)
649 elif state == 'a':
649 elif state == 'a':
650 aadd(fn)
650 aadd(fn)
651 elif state == 'r':
651 elif state == 'r':
652 radd(fn)
652 radd(fn)
653
653
654 return (lookup, modified, added, removed, deleted, unknown, ignored,
654 return (lookup, modified, added, removed, deleted, unknown, ignored,
655 clean)
655 clean)
@@ -1,341 +1,341 b''
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import os, re, time
9 import os, re, time
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import ui, hg, util, templater
11 from mercurial import ui, hg, util, templater
12 from mercurial import error, encoding
12 from mercurial import error, encoding
13 from common import ErrorResponse, get_mtime, staticfile, paritygen,\
13 from common import ErrorResponse, get_mtime, staticfile, paritygen, \
14 get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
14 get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR
15 from hgweb_mod import hgweb
15 from hgweb_mod import hgweb
16 from request import wsgirequest
16 from request import wsgirequest
17 import webutil
17 import webutil
18
18
19 def cleannames(items):
19 def cleannames(items):
20 return [(util.pconvert(name).strip('/'), path) for name, path in items]
20 return [(util.pconvert(name).strip('/'), path) for name, path in items]
21
21
22 def findrepos(paths):
22 def findrepos(paths):
23 repos = []
23 repos = []
24 for prefix, root in cleannames(paths):
24 for prefix, root in cleannames(paths):
25 roothead, roottail = os.path.split(root)
25 roothead, roottail = os.path.split(root)
26 # "foo = /bar/*" makes every subrepo of /bar/ to be
26 # "foo = /bar/*" makes every subrepo of /bar/ to be
27 # mounted as foo/subrepo
27 # mounted as foo/subrepo
28 # and "foo = /bar/**" also recurses into the subdirectories,
28 # and "foo = /bar/**" also recurses into the subdirectories,
29 # remember to use it without working dir.
29 # remember to use it without working dir.
30 try:
30 try:
31 recurse = {'*': False, '**': True}[roottail]
31 recurse = {'*': False, '**': True}[roottail]
32 except KeyError:
32 except KeyError:
33 repos.append((prefix, root))
33 repos.append((prefix, root))
34 continue
34 continue
35 roothead = os.path.normpath(roothead)
35 roothead = os.path.normpath(roothead)
36 for path in util.walkrepos(roothead, followsym=True, recurse=recurse):
36 for path in util.walkrepos(roothead, followsym=True, recurse=recurse):
37 path = os.path.normpath(path)
37 path = os.path.normpath(path)
38 name = util.pconvert(path[len(roothead):]).strip('/')
38 name = util.pconvert(path[len(roothead):]).strip('/')
39 if prefix:
39 if prefix:
40 name = prefix + '/' + name
40 name = prefix + '/' + name
41 repos.append((name, path))
41 repos.append((name, path))
42 return repos
42 return repos
43
43
44 class hgwebdir(object):
44 class hgwebdir(object):
45 refreshinterval = 20
45 refreshinterval = 20
46
46
47 def __init__(self, conf, baseui=None):
47 def __init__(self, conf, baseui=None):
48 self.conf = conf
48 self.conf = conf
49 self.baseui = baseui
49 self.baseui = baseui
50 self.lastrefresh = 0
50 self.lastrefresh = 0
51 self.motd = None
51 self.motd = None
52 self.refresh()
52 self.refresh()
53
53
54 def refresh(self):
54 def refresh(self):
55 if self.lastrefresh + self.refreshinterval > time.time():
55 if self.lastrefresh + self.refreshinterval > time.time():
56 return
56 return
57
57
58 if self.baseui:
58 if self.baseui:
59 self.ui = self.baseui.copy()
59 self.ui = self.baseui.copy()
60 else:
60 else:
61 self.ui = ui.ui()
61 self.ui = ui.ui()
62 self.ui.setconfig('ui', 'report_untrusted', 'off')
62 self.ui.setconfig('ui', 'report_untrusted', 'off')
63 self.ui.setconfig('ui', 'interactive', 'off')
63 self.ui.setconfig('ui', 'interactive', 'off')
64
64
65 if not isinstance(self.conf, (dict, list, tuple)):
65 if not isinstance(self.conf, (dict, list, tuple)):
66 map = {'paths': 'hgweb-paths'}
66 map = {'paths': 'hgweb-paths'}
67 self.ui.readconfig(self.conf, remap=map, trust=True)
67 self.ui.readconfig(self.conf, remap=map, trust=True)
68 paths = self.ui.configitems('hgweb-paths')
68 paths = self.ui.configitems('hgweb-paths')
69 elif isinstance(self.conf, (list, tuple)):
69 elif isinstance(self.conf, (list, tuple)):
70 paths = self.conf
70 paths = self.conf
71 elif isinstance(self.conf, dict):
71 elif isinstance(self.conf, dict):
72 paths = self.conf.items()
72 paths = self.conf.items()
73
73
74 encoding.encoding = self.ui.config('web', 'encoding',
74 encoding.encoding = self.ui.config('web', 'encoding',
75 encoding.encoding)
75 encoding.encoding)
76 self.style = self.ui.config('web', 'style', 'paper')
76 self.style = self.ui.config('web', 'style', 'paper')
77 self.stripecount = self.ui.config('web', 'stripes', 1)
77 self.stripecount = self.ui.config('web', 'stripes', 1)
78 if self.stripecount:
78 if self.stripecount:
79 self.stripecount = int(self.stripecount)
79 self.stripecount = int(self.stripecount)
80 self._baseurl = self.ui.config('web', 'baseurl')
80 self._baseurl = self.ui.config('web', 'baseurl')
81
81
82 self.repos = findrepos(paths)
82 self.repos = findrepos(paths)
83 for prefix, root in self.ui.configitems('collections'):
83 for prefix, root in self.ui.configitems('collections'):
84 prefix = util.pconvert(prefix)
84 prefix = util.pconvert(prefix)
85 for path in util.walkrepos(root, followsym=True):
85 for path in util.walkrepos(root, followsym=True):
86 repo = os.path.normpath(path)
86 repo = os.path.normpath(path)
87 name = util.pconvert(repo)
87 name = util.pconvert(repo)
88 if name.startswith(prefix):
88 if name.startswith(prefix):
89 name = name[len(prefix):]
89 name = name[len(prefix):]
90 self.repos.append((name.lstrip('/'), repo))
90 self.repos.append((name.lstrip('/'), repo))
91
91
92 self.lastrefresh = time.time()
92 self.lastrefresh = time.time()
93
93
94 def run(self):
94 def run(self):
95 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
95 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
96 raise RuntimeError("This function is only intended to be "
96 raise RuntimeError("This function is only intended to be "
97 "called while running as a CGI script.")
97 "called while running as a CGI script.")
98 import mercurial.hgweb.wsgicgi as wsgicgi
98 import mercurial.hgweb.wsgicgi as wsgicgi
99 wsgicgi.launch(self)
99 wsgicgi.launch(self)
100
100
101 def __call__(self, env, respond):
101 def __call__(self, env, respond):
102 req = wsgirequest(env, respond)
102 req = wsgirequest(env, respond)
103 return self.run_wsgi(req)
103 return self.run_wsgi(req)
104
104
105 def read_allowed(self, ui, req):
105 def read_allowed(self, ui, req):
106 """Check allow_read and deny_read config options of a repo's ui object
106 """Check allow_read and deny_read config options of a repo's ui object
107 to determine user permissions. By default, with neither option set (or
107 to determine user permissions. By default, with neither option set (or
108 both empty), allow all users to read the repo. There are two ways a
108 both empty), allow all users to read the repo. There are two ways a
109 user can be denied read access: (1) deny_read is not empty, and the
109 user can be denied read access: (1) deny_read is not empty, and the
110 user is unauthenticated or deny_read contains user (or *), and (2)
110 user is unauthenticated or deny_read contains user (or *), and (2)
111 allow_read is not empty and the user is not in allow_read. Return True
111 allow_read is not empty and the user is not in allow_read. Return True
112 if user is allowed to read the repo, else return False."""
112 if user is allowed to read the repo, else return False."""
113
113
114 user = req.env.get('REMOTE_USER')
114 user = req.env.get('REMOTE_USER')
115
115
116 deny_read = ui.configlist('web', 'deny_read', untrusted=True)
116 deny_read = ui.configlist('web', 'deny_read', untrusted=True)
117 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
117 if deny_read and (not user or deny_read == ['*'] or user in deny_read):
118 return False
118 return False
119
119
120 allow_read = ui.configlist('web', 'allow_read', untrusted=True)
120 allow_read = ui.configlist('web', 'allow_read', untrusted=True)
121 # by default, allow reading if no allow_read option has been set
121 # by default, allow reading if no allow_read option has been set
122 if (not allow_read) or (allow_read == ['*']) or (user in allow_read):
122 if (not allow_read) or (allow_read == ['*']) or (user in allow_read):
123 return True
123 return True
124
124
125 return False
125 return False
126
126
127 def run_wsgi(self, req):
127 def run_wsgi(self, req):
128 try:
128 try:
129 try:
129 try:
130 self.refresh()
130 self.refresh()
131
131
132 virtual = req.env.get("PATH_INFO", "").strip('/')
132 virtual = req.env.get("PATH_INFO", "").strip('/')
133 tmpl = self.templater(req)
133 tmpl = self.templater(req)
134 ctype = tmpl('mimetype', encoding=encoding.encoding)
134 ctype = tmpl('mimetype', encoding=encoding.encoding)
135 ctype = templater.stringify(ctype)
135 ctype = templater.stringify(ctype)
136
136
137 # a static file
137 # a static file
138 if virtual.startswith('static/') or 'static' in req.form:
138 if virtual.startswith('static/') or 'static' in req.form:
139 if virtual.startswith('static/'):
139 if virtual.startswith('static/'):
140 fname = virtual[7:]
140 fname = virtual[7:]
141 else:
141 else:
142 fname = req.form['static'][0]
142 fname = req.form['static'][0]
143 static = templater.templatepath('static')
143 static = templater.templatepath('static')
144 return (staticfile(static, fname, req),)
144 return (staticfile(static, fname, req),)
145
145
146 # top-level index
146 # top-level index
147 elif not virtual:
147 elif not virtual:
148 req.respond(HTTP_OK, ctype)
148 req.respond(HTTP_OK, ctype)
149 return self.makeindex(req, tmpl)
149 return self.makeindex(req, tmpl)
150
150
151 # nested indexes and hgwebs
151 # nested indexes and hgwebs
152
152
153 repos = dict(self.repos)
153 repos = dict(self.repos)
154 while virtual:
154 while virtual:
155 real = repos.get(virtual)
155 real = repos.get(virtual)
156 if real:
156 if real:
157 req.env['REPO_NAME'] = virtual
157 req.env['REPO_NAME'] = virtual
158 try:
158 try:
159 repo = hg.repository(self.ui, real)
159 repo = hg.repository(self.ui, real)
160 return hgweb(repo).run_wsgi(req)
160 return hgweb(repo).run_wsgi(req)
161 except IOError, inst:
161 except IOError, inst:
162 msg = inst.strerror
162 msg = inst.strerror
163 raise ErrorResponse(HTTP_SERVER_ERROR, msg)
163 raise ErrorResponse(HTTP_SERVER_ERROR, msg)
164 except error.RepoError, inst:
164 except error.RepoError, inst:
165 raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
165 raise ErrorResponse(HTTP_SERVER_ERROR, str(inst))
166
166
167 # browse subdirectories
167 # browse subdirectories
168 subdir = virtual + '/'
168 subdir = virtual + '/'
169 if [r for r in repos if r.startswith(subdir)]:
169 if [r for r in repos if r.startswith(subdir)]:
170 req.respond(HTTP_OK, ctype)
170 req.respond(HTTP_OK, ctype)
171 return self.makeindex(req, tmpl, subdir)
171 return self.makeindex(req, tmpl, subdir)
172
172
173 up = virtual.rfind('/')
173 up = virtual.rfind('/')
174 if up < 0:
174 if up < 0:
175 break
175 break
176 virtual = virtual[:up]
176 virtual = virtual[:up]
177
177
178 # prefixes not found
178 # prefixes not found
179 req.respond(HTTP_NOT_FOUND, ctype)
179 req.respond(HTTP_NOT_FOUND, ctype)
180 return tmpl("notfound", repo=virtual)
180 return tmpl("notfound", repo=virtual)
181
181
182 except ErrorResponse, err:
182 except ErrorResponse, err:
183 req.respond(err, ctype)
183 req.respond(err, ctype)
184 return tmpl('error', error=err.message or '')
184 return tmpl('error', error=err.message or '')
185 finally:
185 finally:
186 tmpl = None
186 tmpl = None
187
187
188 def makeindex(self, req, tmpl, subdir=""):
188 def makeindex(self, req, tmpl, subdir=""):
189
189
190 def archivelist(ui, nodeid, url):
190 def archivelist(ui, nodeid, url):
191 allowed = ui.configlist("web", "allow_archive", untrusted=True)
191 allowed = ui.configlist("web", "allow_archive", untrusted=True)
192 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
192 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
193 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
193 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
194 untrusted=True):
194 untrusted=True):
195 yield {"type" : i[0], "extension": i[1],
195 yield {"type" : i[0], "extension": i[1],
196 "node": nodeid, "url": url}
196 "node": nodeid, "url": url}
197
197
198 sortdefault = None, False
198 sortdefault = None, False
199 def entries(sortcolumn="", descending=False, subdir="", **map):
199 def entries(sortcolumn="", descending=False, subdir="", **map):
200
200
201 rows = []
201 rows = []
202 parity = paritygen(self.stripecount)
202 parity = paritygen(self.stripecount)
203 descend = self.ui.configbool('web', 'descend', True)
203 descend = self.ui.configbool('web', 'descend', True)
204 for name, path in self.repos:
204 for name, path in self.repos:
205
205
206 if not name.startswith(subdir):
206 if not name.startswith(subdir):
207 continue
207 continue
208 name = name[len(subdir):]
208 name = name[len(subdir):]
209 if not descend and '/' in name:
209 if not descend and '/' in name:
210 continue
210 continue
211
211
212 u = self.ui.copy()
212 u = self.ui.copy()
213 try:
213 try:
214 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
214 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
215 except Exception, e:
215 except Exception, e:
216 u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
216 u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e))
217 continue
217 continue
218 def get(section, name, default=None):
218 def get(section, name, default=None):
219 return u.config(section, name, default, untrusted=True)
219 return u.config(section, name, default, untrusted=True)
220
220
221 if u.configbool("web", "hidden", untrusted=True):
221 if u.configbool("web", "hidden", untrusted=True):
222 continue
222 continue
223
223
224 if not self.read_allowed(u, req):
224 if not self.read_allowed(u, req):
225 continue
225 continue
226
226
227 parts = [name]
227 parts = [name]
228 if 'PATH_INFO' in req.env:
228 if 'PATH_INFO' in req.env:
229 parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
229 parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
230 if req.env['SCRIPT_NAME']:
230 if req.env['SCRIPT_NAME']:
231 parts.insert(0, req.env['SCRIPT_NAME'])
231 parts.insert(0, req.env['SCRIPT_NAME'])
232 m = re.match('((?:https?://)?)(.*)', '/'.join(parts))
232 m = re.match('((?:https?://)?)(.*)', '/'.join(parts))
233 # squish repeated slashes out of the path component
233 # squish repeated slashes out of the path component
234 url = m.group(1) + re.sub('/+', '/', m.group(2)) + '/'
234 url = m.group(1) + re.sub('/+', '/', m.group(2)) + '/'
235
235
236 # update time with local timezone
236 # update time with local timezone
237 try:
237 try:
238 r = hg.repository(self.ui, path)
238 r = hg.repository(self.ui, path)
239 d = (get_mtime(r.spath), util.makedate()[1])
239 d = (get_mtime(r.spath), util.makedate()[1])
240 except OSError:
240 except OSError:
241 continue
241 continue
242
242
243 contact = get_contact(get)
243 contact = get_contact(get)
244 description = get("web", "description", "")
244 description = get("web", "description", "")
245 name = get("web", "name", name)
245 name = get("web", "name", name)
246 row = dict(contact=contact or "unknown",
246 row = dict(contact=contact or "unknown",
247 contact_sort=contact.upper() or "unknown",
247 contact_sort=contact.upper() or "unknown",
248 name=name,
248 name=name,
249 name_sort=name,
249 name_sort=name,
250 url=url,
250 url=url,
251 description=description or "unknown",
251 description=description or "unknown",
252 description_sort=description.upper() or "unknown",
252 description_sort=description.upper() or "unknown",
253 lastchange=d,
253 lastchange=d,
254 lastchange_sort=d[1]-d[0],
254 lastchange_sort=d[1]-d[0],
255 archives=archivelist(u, "tip", url))
255 archives=archivelist(u, "tip", url))
256 if (not sortcolumn or (sortcolumn, descending) == sortdefault):
256 if (not sortcolumn or (sortcolumn, descending) == sortdefault):
257 # fast path for unsorted output
257 # fast path for unsorted output
258 row['parity'] = parity.next()
258 row['parity'] = parity.next()
259 yield row
259 yield row
260 else:
260 else:
261 rows.append((row["%s_sort" % sortcolumn], row))
261 rows.append((row["%s_sort" % sortcolumn], row))
262 if rows:
262 if rows:
263 rows.sort()
263 rows.sort()
264 if descending:
264 if descending:
265 rows.reverse()
265 rows.reverse()
266 for key, row in rows:
266 for key, row in rows:
267 row['parity'] = parity.next()
267 row['parity'] = parity.next()
268 yield row
268 yield row
269
269
270 self.refresh()
270 self.refresh()
271 sortable = ["name", "description", "contact", "lastchange"]
271 sortable = ["name", "description", "contact", "lastchange"]
272 sortcolumn, descending = sortdefault
272 sortcolumn, descending = sortdefault
273 if 'sort' in req.form:
273 if 'sort' in req.form:
274 sortcolumn = req.form['sort'][0]
274 sortcolumn = req.form['sort'][0]
275 descending = sortcolumn.startswith('-')
275 descending = sortcolumn.startswith('-')
276 if descending:
276 if descending:
277 sortcolumn = sortcolumn[1:]
277 sortcolumn = sortcolumn[1:]
278 if sortcolumn not in sortable:
278 if sortcolumn not in sortable:
279 sortcolumn = ""
279 sortcolumn = ""
280
280
281 sort = [("sort_%s" % column,
281 sort = [("sort_%s" % column,
282 "%s%s" % ((not descending and column == sortcolumn)
282 "%s%s" % ((not descending and column == sortcolumn)
283 and "-" or "", column))
283 and "-" or "", column))
284 for column in sortable]
284 for column in sortable]
285
285
286 self.refresh()
286 self.refresh()
287 if self._baseurl is not None:
287 if self._baseurl is not None:
288 req.env['SCRIPT_NAME'] = self._baseurl
288 req.env['SCRIPT_NAME'] = self._baseurl
289
289
290 return tmpl("index", entries=entries, subdir=subdir,
290 return tmpl("index", entries=entries, subdir=subdir,
291 sortcolumn=sortcolumn, descending=descending,
291 sortcolumn=sortcolumn, descending=descending,
292 **dict(sort))
292 **dict(sort))
293
293
294 def templater(self, req):
294 def templater(self, req):
295
295
296 def header(**map):
296 def header(**map):
297 yield tmpl('header', encoding=encoding.encoding, **map)
297 yield tmpl('header', encoding=encoding.encoding, **map)
298
298
299 def footer(**map):
299 def footer(**map):
300 yield tmpl("footer", **map)
300 yield tmpl("footer", **map)
301
301
302 def motd(**map):
302 def motd(**map):
303 if self.motd is not None:
303 if self.motd is not None:
304 yield self.motd
304 yield self.motd
305 else:
305 else:
306 yield config('web', 'motd', '')
306 yield config('web', 'motd', '')
307
307
308 def config(section, name, default=None, untrusted=True):
308 def config(section, name, default=None, untrusted=True):
309 return self.ui.config(section, name, default, untrusted)
309 return self.ui.config(section, name, default, untrusted)
310
310
311 if self._baseurl is not None:
311 if self._baseurl is not None:
312 req.env['SCRIPT_NAME'] = self._baseurl
312 req.env['SCRIPT_NAME'] = self._baseurl
313
313
314 url = req.env.get('SCRIPT_NAME', '')
314 url = req.env.get('SCRIPT_NAME', '')
315 if not url.endswith('/'):
315 if not url.endswith('/'):
316 url += '/'
316 url += '/'
317
317
318 vars = {}
318 vars = {}
319 styles = (
319 styles = (
320 req.form.get('style', [None])[0],
320 req.form.get('style', [None])[0],
321 config('web', 'style'),
321 config('web', 'style'),
322 'paper'
322 'paper'
323 )
323 )
324 style, mapfile = templater.stylemap(styles)
324 style, mapfile = templater.stylemap(styles)
325 if style == styles[0]:
325 if style == styles[0]:
326 vars['style'] = style
326 vars['style'] = style
327
327
328 start = url[-1] == '?' and '&' or '?'
328 start = url[-1] == '?' and '&' or '?'
329 sessionvars = webutil.sessionvars(vars, start)
329 sessionvars = webutil.sessionvars(vars, start)
330 staticurl = config('web', 'staticurl') or url + 'static/'
330 staticurl = config('web', 'staticurl') or url + 'static/'
331 if not staticurl.endswith('/'):
331 if not staticurl.endswith('/'):
332 staticurl += '/'
332 staticurl += '/'
333
333
334 tmpl = templater.templater(mapfile,
334 tmpl = templater.templater(mapfile,
335 defaults={"header": header,
335 defaults={"header": header,
336 "footer": footer,
336 "footer": footer,
337 "motd": motd,
337 "motd": motd,
338 "url": url,
338 "url": url,
339 "staticurl": staticurl,
339 "staticurl": staticurl,
340 "sessionvars": sessionvars})
340 "sessionvars": sessionvars})
341 return tmpl
341 return tmpl
@@ -1,727 +1,727 b''
1 #
1 #
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os, mimetypes, re, cgi, copy
8 import os, mimetypes, re, cgi, copy
9 import webutil
9 import webutil
10 from mercurial import error, archival, templater, templatefilters
10 from mercurial import error, archival, templater, templatefilters
11 from mercurial.node import short, hex
11 from mercurial.node import short, hex
12 from mercurial.util import binary
12 from mercurial.util import binary
13 from common import paritygen, staticfile, get_contact, ErrorResponse
13 from common import paritygen, staticfile, get_contact, ErrorResponse
14 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
14 from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
15 from mercurial import graphmod
15 from mercurial import graphmod
16
16
17 # __all__ is populated with the allowed commands. Be sure to add to it if
17 # __all__ is populated with the allowed commands. Be sure to add to it if
18 # you're adding a new command, or the new command won't work.
18 # you're adding a new command, or the new command won't work.
19
19
20 __all__ = [
20 __all__ = [
21 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
21 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
22 'manifest', 'tags', 'branches', 'summary', 'filediff', 'diff', 'annotate',
22 'manifest', 'tags', 'branches', 'summary', 'filediff', 'diff', 'annotate',
23 'filelog', 'archive', 'static', 'graph',
23 'filelog', 'archive', 'static', 'graph',
24 ]
24 ]
25
25
26 def log(web, req, tmpl):
26 def log(web, req, tmpl):
27 if 'file' in req.form and req.form['file'][0]:
27 if 'file' in req.form and req.form['file'][0]:
28 return filelog(web, req, tmpl)
28 return filelog(web, req, tmpl)
29 else:
29 else:
30 return changelog(web, req, tmpl)
30 return changelog(web, req, tmpl)
31
31
32 def rawfile(web, req, tmpl):
32 def rawfile(web, req, tmpl):
33 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
33 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
34 if not path:
34 if not path:
35 content = manifest(web, req, tmpl)
35 content = manifest(web, req, tmpl)
36 req.respond(HTTP_OK, web.ctype)
36 req.respond(HTTP_OK, web.ctype)
37 return content
37 return content
38
38
39 try:
39 try:
40 fctx = webutil.filectx(web.repo, req)
40 fctx = webutil.filectx(web.repo, req)
41 except error.LookupError, inst:
41 except error.LookupError, inst:
42 try:
42 try:
43 content = manifest(web, req, tmpl)
43 content = manifest(web, req, tmpl)
44 req.respond(HTTP_OK, web.ctype)
44 req.respond(HTTP_OK, web.ctype)
45 return content
45 return content
46 except ErrorResponse:
46 except ErrorResponse:
47 raise inst
47 raise inst
48
48
49 path = fctx.path()
49 path = fctx.path()
50 text = fctx.data()
50 text = fctx.data()
51 mt = mimetypes.guess_type(path)[0]
51 mt = mimetypes.guess_type(path)[0]
52 if mt is None:
52 if mt is None:
53 mt = binary(text) and 'application/octet-stream' or 'text/plain'
53 mt = binary(text) and 'application/octet-stream' or 'text/plain'
54
54
55 req.respond(HTTP_OK, mt, path, len(text))
55 req.respond(HTTP_OK, mt, path, len(text))
56 return [text]
56 return [text]
57
57
58 def _filerevision(web, tmpl, fctx):
58 def _filerevision(web, tmpl, fctx):
59 f = fctx.path()
59 f = fctx.path()
60 text = fctx.data()
60 text = fctx.data()
61 parity = paritygen(web.stripecount)
61 parity = paritygen(web.stripecount)
62
62
63 if binary(text):
63 if binary(text):
64 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
64 mt = mimetypes.guess_type(f)[0] or 'application/octet-stream'
65 text = '(binary:%s)' % mt
65 text = '(binary:%s)' % mt
66
66
67 def lines():
67 def lines():
68 for lineno, t in enumerate(text.splitlines(True)):
68 for lineno, t in enumerate(text.splitlines(True)):
69 yield {"line": t,
69 yield {"line": t,
70 "lineid": "l%d" % (lineno + 1),
70 "lineid": "l%d" % (lineno + 1),
71 "linenumber": "% 6d" % (lineno + 1),
71 "linenumber": "% 6d" % (lineno + 1),
72 "parity": parity.next()}
72 "parity": parity.next()}
73
73
74 return tmpl("filerevision",
74 return tmpl("filerevision",
75 file=f,
75 file=f,
76 path=webutil.up(f),
76 path=webutil.up(f),
77 text=lines(),
77 text=lines(),
78 rev=fctx.rev(),
78 rev=fctx.rev(),
79 node=hex(fctx.node()),
79 node=hex(fctx.node()),
80 author=fctx.user(),
80 author=fctx.user(),
81 date=fctx.date(),
81 date=fctx.date(),
82 desc=fctx.description(),
82 desc=fctx.description(),
83 branch=webutil.nodebranchnodefault(fctx),
83 branch=webutil.nodebranchnodefault(fctx),
84 parent=webutil.parents(fctx),
84 parent=webutil.parents(fctx),
85 child=webutil.children(fctx),
85 child=webutil.children(fctx),
86 rename=webutil.renamelink(fctx),
86 rename=webutil.renamelink(fctx),
87 permissions=fctx.manifest().flags(f))
87 permissions=fctx.manifest().flags(f))
88
88
89 def file(web, req, tmpl):
89 def file(web, req, tmpl):
90 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
90 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
91 if not path:
91 if not path:
92 return manifest(web, req, tmpl)
92 return manifest(web, req, tmpl)
93 try:
93 try:
94 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
94 return _filerevision(web, tmpl, webutil.filectx(web.repo, req))
95 except error.LookupError, inst:
95 except error.LookupError, inst:
96 try:
96 try:
97 return manifest(web, req, tmpl)
97 return manifest(web, req, tmpl)
98 except ErrorResponse:
98 except ErrorResponse:
99 raise inst
99 raise inst
100
100
101 def _search(web, req, tmpl):
101 def _search(web, req, tmpl):
102
102
103 query = req.form['rev'][0]
103 query = req.form['rev'][0]
104 revcount = web.maxchanges
104 revcount = web.maxchanges
105 if 'revcount' in req.form:
105 if 'revcount' in req.form:
106 revcount = int(req.form.get('revcount', [revcount])[0])
106 revcount = int(req.form.get('revcount', [revcount])[0])
107 tmpl.defaults['sessionvars']['revcount'] = revcount
107 tmpl.defaults['sessionvars']['revcount'] = revcount
108
108
109 lessvars = copy.copy(tmpl.defaults['sessionvars'])
109 lessvars = copy.copy(tmpl.defaults['sessionvars'])
110 lessvars['revcount'] = revcount / 2
110 lessvars['revcount'] = revcount / 2
111 lessvars['rev'] = query
111 lessvars['rev'] = query
112 morevars = copy.copy(tmpl.defaults['sessionvars'])
112 morevars = copy.copy(tmpl.defaults['sessionvars'])
113 morevars['revcount'] = revcount * 2
113 morevars['revcount'] = revcount * 2
114 morevars['rev'] = query
114 morevars['rev'] = query
115
115
116 def changelist(**map):
116 def changelist(**map):
117 cl = web.repo.changelog
117 cl = web.repo.changelog
118 count = 0
118 count = 0
119 qw = query.lower().split()
119 qw = query.lower().split()
120
120
121 def revgen():
121 def revgen():
122 for i in xrange(len(cl) - 1, 0, -100):
122 for i in xrange(len(cl) - 1, 0, -100):
123 l = []
123 l = []
124 for j in xrange(max(0, i - 100), i + 1):
124 for j in xrange(max(0, i - 100), i + 1):
125 ctx = web.repo[j]
125 ctx = web.repo[j]
126 l.append(ctx)
126 l.append(ctx)
127 l.reverse()
127 l.reverse()
128 for e in l:
128 for e in l:
129 yield e
129 yield e
130
130
131 for ctx in revgen():
131 for ctx in revgen():
132 miss = 0
132 miss = 0
133 for q in qw:
133 for q in qw:
134 if not (q in ctx.user().lower() or
134 if not (q in ctx.user().lower() or
135 q in ctx.description().lower() or
135 q in ctx.description().lower() or
136 q in " ".join(ctx.files()).lower()):
136 q in " ".join(ctx.files()).lower()):
137 miss = 1
137 miss = 1
138 break
138 break
139 if miss:
139 if miss:
140 continue
140 continue
141
141
142 count += 1
142 count += 1
143 n = ctx.node()
143 n = ctx.node()
144 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
144 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
145 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
145 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
146
146
147 yield tmpl('searchentry',
147 yield tmpl('searchentry',
148 parity=parity.next(),
148 parity=parity.next(),
149 author=ctx.user(),
149 author=ctx.user(),
150 parent=webutil.parents(ctx),
150 parent=webutil.parents(ctx),
151 child=webutil.children(ctx),
151 child=webutil.children(ctx),
152 changelogtag=showtags,
152 changelogtag=showtags,
153 desc=ctx.description(),
153 desc=ctx.description(),
154 date=ctx.date(),
154 date=ctx.date(),
155 files=files,
155 files=files,
156 rev=ctx.rev(),
156 rev=ctx.rev(),
157 node=hex(n),
157 node=hex(n),
158 tags=webutil.nodetagsdict(web.repo, n),
158 tags=webutil.nodetagsdict(web.repo, n),
159 inbranch=webutil.nodeinbranch(web.repo, ctx),
159 inbranch=webutil.nodeinbranch(web.repo, ctx),
160 branches=webutil.nodebranchdict(web.repo, ctx))
160 branches=webutil.nodebranchdict(web.repo, ctx))
161
161
162 if count >= revcount:
162 if count >= revcount:
163 break
163 break
164
164
165 cl = web.repo.changelog
165 cl = web.repo.changelog
166 parity = paritygen(web.stripecount)
166 parity = paritygen(web.stripecount)
167
167
168 return tmpl('search', query=query, node=hex(cl.tip()),
168 return tmpl('search', query=query, node=hex(cl.tip()),
169 entries=changelist, archives=web.archivelist("tip"),
169 entries=changelist, archives=web.archivelist("tip"),
170 morevars=morevars, lessvars=lessvars)
170 morevars=morevars, lessvars=lessvars)
171
171
172 def changelog(web, req, tmpl, shortlog=False):
172 def changelog(web, req, tmpl, shortlog=False):
173
173
174 if 'node' in req.form:
174 if 'node' in req.form:
175 ctx = webutil.changectx(web.repo, req)
175 ctx = webutil.changectx(web.repo, req)
176 else:
176 else:
177 if 'rev' in req.form:
177 if 'rev' in req.form:
178 hi = req.form['rev'][0]
178 hi = req.form['rev'][0]
179 else:
179 else:
180 hi = len(web.repo) - 1
180 hi = len(web.repo) - 1
181 try:
181 try:
182 ctx = web.repo[hi]
182 ctx = web.repo[hi]
183 except error.RepoError:
183 except error.RepoError:
184 return _search(web, req, tmpl) # XXX redirect to 404 page?
184 return _search(web, req, tmpl) # XXX redirect to 404 page?
185
185
186 def changelist(limit=0, **map):
186 def changelist(limit=0, **map):
187 l = [] # build a list in forward order for efficiency
187 l = [] # build a list in forward order for efficiency
188 for i in xrange(start, end):
188 for i in xrange(start, end):
189 ctx = web.repo[i]
189 ctx = web.repo[i]
190 n = ctx.node()
190 n = ctx.node()
191 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
191 showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n)
192 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
192 files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
193
193
194 l.insert(0, {"parity": parity.next(),
194 l.insert(0, {"parity": parity.next(),
195 "author": ctx.user(),
195 "author": ctx.user(),
196 "parent": webutil.parents(ctx, i - 1),
196 "parent": webutil.parents(ctx, i - 1),
197 "child": webutil.children(ctx, i + 1),
197 "child": webutil.children(ctx, i + 1),
198 "changelogtag": showtags,
198 "changelogtag": showtags,
199 "desc": ctx.description(),
199 "desc": ctx.description(),
200 "date": ctx.date(),
200 "date": ctx.date(),
201 "files": files,
201 "files": files,
202 "rev": i,
202 "rev": i,
203 "node": hex(n),
203 "node": hex(n),
204 "tags": webutil.nodetagsdict(web.repo, n),
204 "tags": webutil.nodetagsdict(web.repo, n),
205 "inbranch": webutil.nodeinbranch(web.repo, ctx),
205 "inbranch": webutil.nodeinbranch(web.repo, ctx),
206 "branches": webutil.nodebranchdict(web.repo, ctx)
206 "branches": webutil.nodebranchdict(web.repo, ctx)
207 })
207 })
208
208
209 if limit > 0:
209 if limit > 0:
210 l = l[:limit]
210 l = l[:limit]
211
211
212 for e in l:
212 for e in l:
213 yield e
213 yield e
214
214
215 revcount = shortlog and web.maxshortchanges or web.maxchanges
215 revcount = shortlog and web.maxshortchanges or web.maxchanges
216 if 'revcount' in req.form:
216 if 'revcount' in req.form:
217 revcount = int(req.form.get('revcount', [revcount])[0])
217 revcount = int(req.form.get('revcount', [revcount])[0])
218 tmpl.defaults['sessionvars']['revcount'] = revcount
218 tmpl.defaults['sessionvars']['revcount'] = revcount
219
219
220 lessvars = copy.copy(tmpl.defaults['sessionvars'])
220 lessvars = copy.copy(tmpl.defaults['sessionvars'])
221 lessvars['revcount'] = revcount / 2
221 lessvars['revcount'] = revcount / 2
222 morevars = copy.copy(tmpl.defaults['sessionvars'])
222 morevars = copy.copy(tmpl.defaults['sessionvars'])
223 morevars['revcount'] = revcount * 2
223 morevars['revcount'] = revcount * 2
224
224
225 cl = web.repo.changelog
225 cl = web.repo.changelog
226 count = len(cl)
226 count = len(cl)
227 pos = ctx.rev()
227 pos = ctx.rev()
228 start = max(0, pos - revcount + 1)
228 start = max(0, pos - revcount + 1)
229 end = min(count, start + revcount)
229 end = min(count, start + revcount)
230 pos = end - 1
230 pos = end - 1
231 parity = paritygen(web.stripecount, offset=start - end)
231 parity = paritygen(web.stripecount, offset=start - end)
232
232
233 changenav = webutil.revnavgen(pos, revcount, count, web.repo.changectx)
233 changenav = webutil.revnavgen(pos, revcount, count, web.repo.changectx)
234
234
235 return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav,
235 return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav,
236 node=hex(ctx.node()), rev=pos, changesets=count,
236 node=hex(ctx.node()), rev=pos, changesets=count,
237 entries=lambda **x: changelist(limit=0,**x),
237 entries=lambda **x: changelist(limit=0,**x),
238 latestentry=lambda **x: changelist(limit=1,**x),
238 latestentry=lambda **x: changelist(limit=1,**x),
239 archives=web.archivelist("tip"), revcount=revcount,
239 archives=web.archivelist("tip"), revcount=revcount,
240 morevars=morevars, lessvars=lessvars)
240 morevars=morevars, lessvars=lessvars)
241
241
242 def shortlog(web, req, tmpl):
242 def shortlog(web, req, tmpl):
243 return changelog(web, req, tmpl, shortlog = True)
243 return changelog(web, req, tmpl, shortlog = True)
244
244
245 def changeset(web, req, tmpl):
245 def changeset(web, req, tmpl):
246 ctx = webutil.changectx(web.repo, req)
246 ctx = webutil.changectx(web.repo, req)
247 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
247 showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node())
248 showbranch = webutil.nodebranchnodefault(ctx)
248 showbranch = webutil.nodebranchnodefault(ctx)
249
249
250 files = []
250 files = []
251 parity = paritygen(web.stripecount)
251 parity = paritygen(web.stripecount)
252 for f in ctx.files():
252 for f in ctx.files():
253 template = f in ctx and 'filenodelink' or 'filenolink'
253 template = f in ctx and 'filenodelink' or 'filenolink'
254 files.append(tmpl(template,
254 files.append(tmpl(template,
255 node=ctx.hex(), file=f,
255 node=ctx.hex(), file=f,
256 parity=parity.next()))
256 parity=parity.next()))
257
257
258 parity = paritygen(web.stripecount)
258 parity = paritygen(web.stripecount)
259 style = web.config('web', 'style', 'paper')
259 style = web.config('web', 'style', 'paper')
260 if 'style' in req.form:
260 if 'style' in req.form:
261 style = req.form['style'][0]
261 style = req.form['style'][0]
262
262
263 diffs = webutil.diffs(web.repo, tmpl, ctx, None, parity, style)
263 diffs = webutil.diffs(web.repo, tmpl, ctx, None, parity, style)
264 return tmpl('changeset',
264 return tmpl('changeset',
265 diff=diffs,
265 diff=diffs,
266 rev=ctx.rev(),
266 rev=ctx.rev(),
267 node=ctx.hex(),
267 node=ctx.hex(),
268 parent=webutil.parents(ctx),
268 parent=webutil.parents(ctx),
269 child=webutil.children(ctx),
269 child=webutil.children(ctx),
270 changesettag=showtags,
270 changesettag=showtags,
271 changesetbranch=showbranch,
271 changesetbranch=showbranch,
272 author=ctx.user(),
272 author=ctx.user(),
273 desc=ctx.description(),
273 desc=ctx.description(),
274 date=ctx.date(),
274 date=ctx.date(),
275 files=files,
275 files=files,
276 archives=web.archivelist(ctx.hex()),
276 archives=web.archivelist(ctx.hex()),
277 tags=webutil.nodetagsdict(web.repo, ctx.node()),
277 tags=webutil.nodetagsdict(web.repo, ctx.node()),
278 branch=webutil.nodebranchnodefault(ctx),
278 branch=webutil.nodebranchnodefault(ctx),
279 inbranch=webutil.nodeinbranch(web.repo, ctx),
279 inbranch=webutil.nodeinbranch(web.repo, ctx),
280 branches=webutil.nodebranchdict(web.repo, ctx))
280 branches=webutil.nodebranchdict(web.repo, ctx))
281
281
282 rev = changeset
282 rev = changeset
283
283
284 def manifest(web, req, tmpl):
284 def manifest(web, req, tmpl):
285 ctx = webutil.changectx(web.repo, req)
285 ctx = webutil.changectx(web.repo, req)
286 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
286 path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
287 mf = ctx.manifest()
287 mf = ctx.manifest()
288 node = ctx.node()
288 node = ctx.node()
289
289
290 files = {}
290 files = {}
291 dirs = {}
291 dirs = {}
292 parity = paritygen(web.stripecount)
292 parity = paritygen(web.stripecount)
293
293
294 if path and path[-1] != "/":
294 if path and path[-1] != "/":
295 path += "/"
295 path += "/"
296 l = len(path)
296 l = len(path)
297 abspath = "/" + path
297 abspath = "/" + path
298
298
299 for f, n in mf.iteritems():
299 for f, n in mf.iteritems():
300 if f[:l] != path:
300 if f[:l] != path:
301 continue
301 continue
302 remain = f[l:]
302 remain = f[l:]
303 elements = remain.split('/')
303 elements = remain.split('/')
304 if len(elements) == 1:
304 if len(elements) == 1:
305 files[remain] = f
305 files[remain] = f
306 else:
306 else:
307 h = dirs # need to retain ref to dirs (root)
307 h = dirs # need to retain ref to dirs (root)
308 for elem in elements[0:-1]:
308 for elem in elements[0:-1]:
309 if elem not in h:
309 if elem not in h:
310 h[elem] = {}
310 h[elem] = {}
311 h = h[elem]
311 h = h[elem]
312 if len(h) > 1:
312 if len(h) > 1:
313 break
313 break
314 h[None] = None # denotes files present
314 h[None] = None # denotes files present
315
315
316 if mf and not files and not dirs:
316 if mf and not files and not dirs:
317 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
317 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
318
318
319 def filelist(**map):
319 def filelist(**map):
320 for f in sorted(files):
320 for f in sorted(files):
321 full = files[f]
321 full = files[f]
322
322
323 fctx = ctx.filectx(full)
323 fctx = ctx.filectx(full)
324 yield {"file": full,
324 yield {"file": full,
325 "parity": parity.next(),
325 "parity": parity.next(),
326 "basename": f,
326 "basename": f,
327 "date": fctx.date(),
327 "date": fctx.date(),
328 "size": fctx.size(),
328 "size": fctx.size(),
329 "permissions": mf.flags(full)}
329 "permissions": mf.flags(full)}
330
330
331 def dirlist(**map):
331 def dirlist(**map):
332 for d in sorted(dirs):
332 for d in sorted(dirs):
333
333
334 emptydirs = []
334 emptydirs = []
335 h = dirs[d]
335 h = dirs[d]
336 while isinstance(h, dict) and len(h) == 1:
336 while isinstance(h, dict) and len(h) == 1:
337 k, v = h.items()[0]
337 k, v = h.items()[0]
338 if v:
338 if v:
339 emptydirs.append(k)
339 emptydirs.append(k)
340 h = v
340 h = v
341
341
342 path = "%s%s" % (abspath, d)
342 path = "%s%s" % (abspath, d)
343 yield {"parity": parity.next(),
343 yield {"parity": parity.next(),
344 "path": path,
344 "path": path,
345 "emptydirs": "/".join(emptydirs),
345 "emptydirs": "/".join(emptydirs),
346 "basename": d}
346 "basename": d}
347
347
348 return tmpl("manifest",
348 return tmpl("manifest",
349 rev=ctx.rev(),
349 rev=ctx.rev(),
350 node=hex(node),
350 node=hex(node),
351 path=abspath,
351 path=abspath,
352 up=webutil.up(abspath),
352 up=webutil.up(abspath),
353 upparity=parity.next(),
353 upparity=parity.next(),
354 fentries=filelist,
354 fentries=filelist,
355 dentries=dirlist,
355 dentries=dirlist,
356 archives=web.archivelist(hex(node)),
356 archives=web.archivelist(hex(node)),
357 tags=webutil.nodetagsdict(web.repo, node),
357 tags=webutil.nodetagsdict(web.repo, node),
358 inbranch=webutil.nodeinbranch(web.repo, ctx),
358 inbranch=webutil.nodeinbranch(web.repo, ctx),
359 branches=webutil.nodebranchdict(web.repo, ctx))
359 branches=webutil.nodebranchdict(web.repo, ctx))
360
360
361 def tags(web, req, tmpl):
361 def tags(web, req, tmpl):
362 i = web.repo.tagslist()
362 i = web.repo.tagslist()
363 i.reverse()
363 i.reverse()
364 parity = paritygen(web.stripecount)
364 parity = paritygen(web.stripecount)
365
365
366 def entries(notip=False, limit=0, **map):
366 def entries(notip=False, limit=0, **map):
367 count = 0
367 count = 0
368 for k, n in i:
368 for k, n in i:
369 if notip and k == "tip":
369 if notip and k == "tip":
370 continue
370 continue
371 if limit > 0 and count >= limit:
371 if limit > 0 and count >= limit:
372 continue
372 continue
373 count = count + 1
373 count = count + 1
374 yield {"parity": parity.next(),
374 yield {"parity": parity.next(),
375 "tag": k,
375 "tag": k,
376 "date": web.repo[n].date(),
376 "date": web.repo[n].date(),
377 "node": hex(n)}
377 "node": hex(n)}
378
378
379 return tmpl("tags",
379 return tmpl("tags",
380 node=hex(web.repo.changelog.tip()),
380 node=hex(web.repo.changelog.tip()),
381 entries=lambda **x: entries(False, 0, **x),
381 entries=lambda **x: entries(False, 0, **x),
382 entriesnotip=lambda **x: entries(True, 0, **x),
382 entriesnotip=lambda **x: entries(True, 0, **x),
383 latestentry=lambda **x: entries(True, 1, **x))
383 latestentry=lambda **x: entries(True, 1, **x))
384
384
385 def branches(web, req, tmpl):
385 def branches(web, req, tmpl):
386 b = web.repo.branchtags()
386 b = web.repo.branchtags()
387 tips = (web.repo[n] for t, n in web.repo.branchtags().iteritems())
387 tips = (web.repo[n] for t, n in web.repo.branchtags().iteritems())
388 heads = web.repo.heads()
388 heads = web.repo.heads()
389 parity = paritygen(web.stripecount)
389 parity = paritygen(web.stripecount)
390 sortkey = lambda ctx: ('close' not in ctx.extra(), ctx.rev())
390 sortkey = lambda ctx: ('close' not in ctx.extra(), ctx.rev())
391
391
392 def entries(limit, **map):
392 def entries(limit, **map):
393 count = 0
393 count = 0
394 for ctx in sorted(tips, key=sortkey, reverse=True):
394 for ctx in sorted(tips, key=sortkey, reverse=True):
395 if limit > 0 and count >= limit:
395 if limit > 0 and count >= limit:
396 return
396 return
397 count += 1
397 count += 1
398 if ctx.node() not in heads:
398 if ctx.node() not in heads:
399 status = 'inactive'
399 status = 'inactive'
400 elif not web.repo.branchheads(ctx.branch()):
400 elif not web.repo.branchheads(ctx.branch()):
401 status = 'closed'
401 status = 'closed'
402 else:
402 else:
403 status = 'open'
403 status = 'open'
404 yield {'parity': parity.next(),
404 yield {'parity': parity.next(),
405 'branch': ctx.branch(),
405 'branch': ctx.branch(),
406 'status': status,
406 'status': status,
407 'node': ctx.hex(),
407 'node': ctx.hex(),
408 'date': ctx.date()}
408 'date': ctx.date()}
409
409
410 return tmpl('branches', node=hex(web.repo.changelog.tip()),
410 return tmpl('branches', node=hex(web.repo.changelog.tip()),
411 entries=lambda **x: entries(0, **x),
411 entries=lambda **x: entries(0, **x),
412 latestentry=lambda **x: entries(1, **x))
412 latestentry=lambda **x: entries(1, **x))
413
413
414 def summary(web, req, tmpl):
414 def summary(web, req, tmpl):
415 i = web.repo.tagslist()
415 i = web.repo.tagslist()
416 i.reverse()
416 i.reverse()
417
417
418 def tagentries(**map):
418 def tagentries(**map):
419 parity = paritygen(web.stripecount)
419 parity = paritygen(web.stripecount)
420 count = 0
420 count = 0
421 for k, n in i:
421 for k, n in i:
422 if k == "tip": # skip tip
422 if k == "tip": # skip tip
423 continue
423 continue
424
424
425 count += 1
425 count += 1
426 if count > 10: # limit to 10 tags
426 if count > 10: # limit to 10 tags
427 break
427 break
428
428
429 yield tmpl("tagentry",
429 yield tmpl("tagentry",
430 parity=parity.next(),
430 parity=parity.next(),
431 tag=k,
431 tag=k,
432 node=hex(n),
432 node=hex(n),
433 date=web.repo[n].date())
433 date=web.repo[n].date())
434
434
435 def branches(**map):
435 def branches(**map):
436 parity = paritygen(web.stripecount)
436 parity = paritygen(web.stripecount)
437
437
438 b = web.repo.branchtags()
438 b = web.repo.branchtags()
439 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.iteritems()]
439 l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.iteritems()]
440 for r, n, t in sorted(l):
440 for r, n, t in sorted(l):
441 yield {'parity': parity.next(),
441 yield {'parity': parity.next(),
442 'branch': t,
442 'branch': t,
443 'node': hex(n),
443 'node': hex(n),
444 'date': web.repo[n].date()}
444 'date': web.repo[n].date()}
445
445
446 def changelist(**map):
446 def changelist(**map):
447 parity = paritygen(web.stripecount, offset=start - end)
447 parity = paritygen(web.stripecount, offset=start - end)
448 l = [] # build a list in forward order for efficiency
448 l = [] # build a list in forward order for efficiency
449 for i in xrange(start, end):
449 for i in xrange(start, end):
450 ctx = web.repo[i]
450 ctx = web.repo[i]
451 n = ctx.node()
451 n = ctx.node()
452 hn = hex(n)
452 hn = hex(n)
453
453
454 l.insert(0, tmpl(
454 l.insert(0, tmpl(
455 'shortlogentry',
455 'shortlogentry',
456 parity=parity.next(),
456 parity=parity.next(),
457 author=ctx.user(),
457 author=ctx.user(),
458 desc=ctx.description(),
458 desc=ctx.description(),
459 date=ctx.date(),
459 date=ctx.date(),
460 rev=i,
460 rev=i,
461 node=hn,
461 node=hn,
462 tags=webutil.nodetagsdict(web.repo, n),
462 tags=webutil.nodetagsdict(web.repo, n),
463 inbranch=webutil.nodeinbranch(web.repo, ctx),
463 inbranch=webutil.nodeinbranch(web.repo, ctx),
464 branches=webutil.nodebranchdict(web.repo, ctx)))
464 branches=webutil.nodebranchdict(web.repo, ctx)))
465
465
466 yield l
466 yield l
467
467
468 cl = web.repo.changelog
468 cl = web.repo.changelog
469 count = len(cl)
469 count = len(cl)
470 start = max(0, count - web.maxchanges)
470 start = max(0, count - web.maxchanges)
471 end = min(count, start + web.maxchanges)
471 end = min(count, start + web.maxchanges)
472
472
473 return tmpl("summary",
473 return tmpl("summary",
474 desc=web.config("web", "description", "unknown"),
474 desc=web.config("web", "description", "unknown"),
475 owner=get_contact(web.config) or "unknown",
475 owner=get_contact(web.config) or "unknown",
476 lastchange=cl.read(cl.tip())[2],
476 lastchange=cl.read(cl.tip())[2],
477 tags=tagentries,
477 tags=tagentries,
478 branches=branches,
478 branches=branches,
479 shortlog=changelist,
479 shortlog=changelist,
480 node=hex(cl.tip()),
480 node=hex(cl.tip()),
481 archives=web.archivelist("tip"))
481 archives=web.archivelist("tip"))
482
482
483 def filediff(web, req, tmpl):
483 def filediff(web, req, tmpl):
484 fctx, ctx = None, None
484 fctx, ctx = None, None
485 try:
485 try:
486 fctx = webutil.filectx(web.repo, req)
486 fctx = webutil.filectx(web.repo, req)
487 except LookupError:
487 except LookupError:
488 ctx = webutil.changectx(web.repo, req)
488 ctx = webutil.changectx(web.repo, req)
489 path = webutil.cleanpath(web.repo, req.form['file'][0])
489 path = webutil.cleanpath(web.repo, req.form['file'][0])
490 if path not in ctx.files():
490 if path not in ctx.files():
491 raise
491 raise
492
492
493 if fctx is not None:
493 if fctx is not None:
494 n = fctx.node()
494 n = fctx.node()
495 path = fctx.path()
495 path = fctx.path()
496 else:
496 else:
497 n = ctx.node()
497 n = ctx.node()
498 # path already defined in except clause
498 # path already defined in except clause
499
499
500 parity = paritygen(web.stripecount)
500 parity = paritygen(web.stripecount)
501 style = web.config('web', 'style', 'paper')
501 style = web.config('web', 'style', 'paper')
502 if 'style' in req.form:
502 if 'style' in req.form:
503 style = req.form['style'][0]
503 style = req.form['style'][0]
504
504
505 diffs = webutil.diffs(web.repo, tmpl, fctx or ctx, [path], parity, style)
505 diffs = webutil.diffs(web.repo, tmpl, fctx or ctx, [path], parity, style)
506 rename = fctx and webutil.renamelink(fctx) or []
506 rename = fctx and webutil.renamelink(fctx) or []
507 ctx = fctx and fctx or ctx
507 ctx = fctx and fctx or ctx
508 return tmpl("filediff",
508 return tmpl("filediff",
509 file=path,
509 file=path,
510 node=hex(n),
510 node=hex(n),
511 rev=ctx.rev(),
511 rev=ctx.rev(),
512 date=ctx.date(),
512 date=ctx.date(),
513 desc=ctx.description(),
513 desc=ctx.description(),
514 author=ctx.user(),
514 author=ctx.user(),
515 rename=rename,
515 rename=rename,
516 branch=webutil.nodebranchnodefault(ctx),
516 branch=webutil.nodebranchnodefault(ctx),
517 parent=webutil.parents(ctx),
517 parent=webutil.parents(ctx),
518 child=webutil.children(ctx),
518 child=webutil.children(ctx),
519 diff=diffs)
519 diff=diffs)
520
520
521 diff = filediff
521 diff = filediff
522
522
523 def annotate(web, req, tmpl):
523 def annotate(web, req, tmpl):
524 fctx = webutil.filectx(web.repo, req)
524 fctx = webutil.filectx(web.repo, req)
525 f = fctx.path()
525 f = fctx.path()
526 parity = paritygen(web.stripecount)
526 parity = paritygen(web.stripecount)
527
527
528 def annotate(**map):
528 def annotate(**map):
529 last = None
529 last = None
530 if binary(fctx.data()):
530 if binary(fctx.data()):
531 mt = (mimetypes.guess_type(fctx.path())[0]
531 mt = (mimetypes.guess_type(fctx.path())[0]
532 or 'application/octet-stream')
532 or 'application/octet-stream')
533 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
533 lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
534 '(binary:%s)' % mt)])
534 '(binary:%s)' % mt)])
535 else:
535 else:
536 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
536 lines = enumerate(fctx.annotate(follow=True, linenumber=True))
537 for lineno, ((f, targetline), l) in lines:
537 for lineno, ((f, targetline), l) in lines:
538 fnode = f.filenode()
538 fnode = f.filenode()
539
539
540 if last != fnode:
540 if last != fnode:
541 last = fnode
541 last = fnode
542
542
543 yield {"parity": parity.next(),
543 yield {"parity": parity.next(),
544 "node": hex(f.node()),
544 "node": hex(f.node()),
545 "rev": f.rev(),
545 "rev": f.rev(),
546 "author": f.user(),
546 "author": f.user(),
547 "desc": f.description(),
547 "desc": f.description(),
548 "file": f.path(),
548 "file": f.path(),
549 "targetline": targetline,
549 "targetline": targetline,
550 "line": l,
550 "line": l,
551 "lineid": "l%d" % (lineno + 1),
551 "lineid": "l%d" % (lineno + 1),
552 "linenumber": "% 6d" % (lineno + 1)}
552 "linenumber": "% 6d" % (lineno + 1)}
553
553
554 return tmpl("fileannotate",
554 return tmpl("fileannotate",
555 file=f,
555 file=f,
556 annotate=annotate,
556 annotate=annotate,
557 path=webutil.up(f),
557 path=webutil.up(f),
558 rev=fctx.rev(),
558 rev=fctx.rev(),
559 node=hex(fctx.node()),
559 node=hex(fctx.node()),
560 author=fctx.user(),
560 author=fctx.user(),
561 date=fctx.date(),
561 date=fctx.date(),
562 desc=fctx.description(),
562 desc=fctx.description(),
563 rename=webutil.renamelink(fctx),
563 rename=webutil.renamelink(fctx),
564 branch=webutil.nodebranchnodefault(fctx),
564 branch=webutil.nodebranchnodefault(fctx),
565 parent=webutil.parents(fctx),
565 parent=webutil.parents(fctx),
566 child=webutil.children(fctx),
566 child=webutil.children(fctx),
567 permissions=fctx.manifest().flags(f))
567 permissions=fctx.manifest().flags(f))
568
568
569 def filelog(web, req, tmpl):
569 def filelog(web, req, tmpl):
570
570
571 try:
571 try:
572 fctx = webutil.filectx(web.repo, req)
572 fctx = webutil.filectx(web.repo, req)
573 f = fctx.path()
573 f = fctx.path()
574 fl = fctx.filelog()
574 fl = fctx.filelog()
575 except error.LookupError:
575 except error.LookupError:
576 f = webutil.cleanpath(web.repo, req.form['file'][0])
576 f = webutil.cleanpath(web.repo, req.form['file'][0])
577 fl = web.repo.file(f)
577 fl = web.repo.file(f)
578 numrevs = len(fl)
578 numrevs = len(fl)
579 if not numrevs: # file doesn't exist at all
579 if not numrevs: # file doesn't exist at all
580 raise
580 raise
581 rev = webutil.changectx(web.repo, req).rev()
581 rev = webutil.changectx(web.repo, req).rev()
582 first = fl.linkrev(0)
582 first = fl.linkrev(0)
583 if rev < first: # current rev is from before file existed
583 if rev < first: # current rev is from before file existed
584 raise
584 raise
585 frev = numrevs - 1
585 frev = numrevs - 1
586 while fl.linkrev(frev) > rev:
586 while fl.linkrev(frev) > rev:
587 frev -= 1
587 frev -= 1
588 fctx = web.repo.filectx(f, fl.linkrev(frev))
588 fctx = web.repo.filectx(f, fl.linkrev(frev))
589
589
590 revcount = web.maxshortchanges
590 revcount = web.maxshortchanges
591 if 'revcount' in req.form:
591 if 'revcount' in req.form:
592 revcount = int(req.form.get('revcount', [revcount])[0])
592 revcount = int(req.form.get('revcount', [revcount])[0])
593 tmpl.defaults['sessionvars']['revcount'] = revcount
593 tmpl.defaults['sessionvars']['revcount'] = revcount
594
594
595 lessvars = copy.copy(tmpl.defaults['sessionvars'])
595 lessvars = copy.copy(tmpl.defaults['sessionvars'])
596 lessvars['revcount'] = revcount / 2
596 lessvars['revcount'] = revcount / 2
597 morevars = copy.copy(tmpl.defaults['sessionvars'])
597 morevars = copy.copy(tmpl.defaults['sessionvars'])
598 morevars['revcount'] = revcount * 2
598 morevars['revcount'] = revcount * 2
599
599
600 count = fctx.filerev() + 1
600 count = fctx.filerev() + 1
601 start = max(0, fctx.filerev() - revcount + 1) # first rev on this page
601 start = max(0, fctx.filerev() - revcount + 1) # first rev on this page
602 end = min(count, start + revcount) # last rev on this page
602 end = min(count, start + revcount) # last rev on this page
603 parity = paritygen(web.stripecount, offset=start - end)
603 parity = paritygen(web.stripecount, offset=start - end)
604
604
605 def entries(limit=0, **map):
605 def entries(limit=0, **map):
606 l = []
606 l = []
607
607
608 repo = web.repo
608 repo = web.repo
609 for i in xrange(start, end):
609 for i in xrange(start, end):
610 iterfctx = fctx.filectx(i)
610 iterfctx = fctx.filectx(i)
611
611
612 l.insert(0, {"parity": parity.next(),
612 l.insert(0, {"parity": parity.next(),
613 "filerev": i,
613 "filerev": i,
614 "file": f,
614 "file": f,
615 "node": hex(iterfctx.node()),
615 "node": hex(iterfctx.node()),
616 "author": iterfctx.user(),
616 "author": iterfctx.user(),
617 "date": iterfctx.date(),
617 "date": iterfctx.date(),
618 "rename": webutil.renamelink(iterfctx),
618 "rename": webutil.renamelink(iterfctx),
619 "parent": webutil.parents(iterfctx),
619 "parent": webutil.parents(iterfctx),
620 "child": webutil.children(iterfctx),
620 "child": webutil.children(iterfctx),
621 "desc": iterfctx.description(),
621 "desc": iterfctx.description(),
622 "tags": webutil.nodetagsdict(repo, iterfctx.node()),
622 "tags": webutil.nodetagsdict(repo, iterfctx.node()),
623 "branch": webutil.nodebranchnodefault(iterfctx),
623 "branch": webutil.nodebranchnodefault(iterfctx),
624 "inbranch": webutil.nodeinbranch(repo, iterfctx),
624 "inbranch": webutil.nodeinbranch(repo, iterfctx),
625 "branches": webutil.nodebranchdict(repo, iterfctx)})
625 "branches": webutil.nodebranchdict(repo, iterfctx)})
626
626
627 if limit > 0:
627 if limit > 0:
628 l = l[:limit]
628 l = l[:limit]
629
629
630 for e in l:
630 for e in l:
631 yield e
631 yield e
632
632
633 nodefunc = lambda x: fctx.filectx(fileid=x)
633 nodefunc = lambda x: fctx.filectx(fileid=x)
634 nav = webutil.revnavgen(end - 1, revcount, count, nodefunc)
634 nav = webutil.revnavgen(end - 1, revcount, count, nodefunc)
635 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
635 return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav,
636 entries=lambda **x: entries(limit=0, **x),
636 entries=lambda **x: entries(limit=0, **x),
637 latestentry=lambda **x: entries(limit=1, **x),
637 latestentry=lambda **x: entries(limit=1, **x),
638 revcount=revcount, morevars=morevars, lessvars=lessvars)
638 revcount=revcount, morevars=morevars, lessvars=lessvars)
639
639
640 def archive(web, req, tmpl):
640 def archive(web, req, tmpl):
641 type_ = req.form.get('type', [None])[0]
641 type_ = req.form.get('type', [None])[0]
642 allowed = web.configlist("web", "allow_archive")
642 allowed = web.configlist("web", "allow_archive")
643 key = req.form['node'][0]
643 key = req.form['node'][0]
644
644
645 if type_ not in web.archives:
645 if type_ not in web.archives:
646 msg = 'Unsupported archive type: %s' % type_
646 msg = 'Unsupported archive type: %s' % type_
647 raise ErrorResponse(HTTP_NOT_FOUND, msg)
647 raise ErrorResponse(HTTP_NOT_FOUND, msg)
648
648
649 if not ((type_ in allowed or
649 if not ((type_ in allowed or
650 web.configbool("web", "allow" + type_, False))):
650 web.configbool("web", "allow" + type_, False))):
651 msg = 'Archive type not allowed: %s' % type_
651 msg = 'Archive type not allowed: %s' % type_
652 raise ErrorResponse(HTTP_FORBIDDEN, msg)
652 raise ErrorResponse(HTTP_FORBIDDEN, msg)
653
653
654 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
654 reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame))
655 cnode = web.repo.lookup(key)
655 cnode = web.repo.lookup(key)
656 arch_version = key
656 arch_version = key
657 if cnode == key or key == 'tip':
657 if cnode == key or key == 'tip':
658 arch_version = short(cnode)
658 arch_version = short(cnode)
659 name = "%s-%s" % (reponame, arch_version)
659 name = "%s-%s" % (reponame, arch_version)
660 mimetype, artype, extension, encoding = web.archive_specs[type_]
660 mimetype, artype, extension, encoding = web.archive_specs[type_]
661 headers = [
661 headers = [
662 ('Content-Type', mimetype),
662 ('Content-Type', mimetype),
663 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
663 ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension))
664 ]
664 ]
665 if encoding:
665 if encoding:
666 headers.append(('Content-Encoding', encoding))
666 headers.append(('Content-Encoding', encoding))
667 req.header(headers)
667 req.header(headers)
668 req.respond(HTTP_OK)
668 req.respond(HTTP_OK)
669 archival.archive(web.repo, req, cnode, artype, prefix=name)
669 archival.archive(web.repo, req, cnode, artype, prefix=name)
670 return []
670 return []
671
671
672
672
673 def static(web, req, tmpl):
673 def static(web, req, tmpl):
674 fname = req.form['file'][0]
674 fname = req.form['file'][0]
675 # a repo owner may set web.static in .hg/hgrc to get any file
675 # a repo owner may set web.static in .hg/hgrc to get any file
676 # readable by the user running the CGI script
676 # readable by the user running the CGI script
677 static = web.config("web", "static", None, untrusted=False)
677 static = web.config("web", "static", None, untrusted=False)
678 if not static:
678 if not static:
679 tp = web.templatepath or templater.templatepath()
679 tp = web.templatepath or templater.templatepath()
680 if isinstance(tp, str):
680 if isinstance(tp, str):
681 tp = [tp]
681 tp = [tp]
682 static = [os.path.join(p, 'static') for p in tp]
682 static = [os.path.join(p, 'static') for p in tp]
683 return [staticfile(static, fname, req)]
683 return [staticfile(static, fname, req)]
684
684
685 def graph(web, req, tmpl):
685 def graph(web, req, tmpl):
686
686
687 rev = webutil.changectx(web.repo, req).rev()
687 rev = webutil.changectx(web.repo, req).rev()
688 bg_height = 39
688 bg_height = 39
689 revcount = web.maxshortchanges
689 revcount = web.maxshortchanges
690 if 'revcount' in req.form:
690 if 'revcount' in req.form:
691 revcount = int(req.form.get('revcount', [revcount])[0])
691 revcount = int(req.form.get('revcount', [revcount])[0])
692 tmpl.defaults['sessionvars']['revcount'] = revcount
692 tmpl.defaults['sessionvars']['revcount'] = revcount
693
693
694 lessvars = copy.copy(tmpl.defaults['sessionvars'])
694 lessvars = copy.copy(tmpl.defaults['sessionvars'])
695 lessvars['revcount'] = revcount / 2
695 lessvars['revcount'] = revcount / 2
696 morevars = copy.copy(tmpl.defaults['sessionvars'])
696 morevars = copy.copy(tmpl.defaults['sessionvars'])
697 morevars['revcount'] = revcount * 2
697 morevars['revcount'] = revcount * 2
698
698
699 max_rev = len(web.repo) - 1
699 max_rev = len(web.repo) - 1
700 revcount = min(max_rev, revcount)
700 revcount = min(max_rev, revcount)
701 revnode = web.repo.changelog.node(rev)
701 revnode = web.repo.changelog.node(rev)
702 revnode_hex = hex(revnode)
702 revnode_hex = hex(revnode)
703 uprev = min(max_rev, rev + revcount)
703 uprev = min(max_rev, rev + revcount)
704 downrev = max(0, rev - revcount)
704 downrev = max(0, rev - revcount)
705 count = len(web.repo)
705 count = len(web.repo)
706 changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx)
706 changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx)
707
707
708 dag = graphmod.revisions(web.repo, rev, downrev)
708 dag = graphmod.revisions(web.repo, rev, downrev)
709 tree = list(graphmod.colored(dag))
709 tree = list(graphmod.colored(dag))
710 canvasheight = (len(tree) + 1) * bg_height - 27;
710 canvasheight = (len(tree) + 1) * bg_height - 27
711 data = []
711 data = []
712 for (id, type, ctx, vtx, edges) in tree:
712 for (id, type, ctx, vtx, edges) in tree:
713 if type != graphmod.CHANGESET:
713 if type != graphmod.CHANGESET:
714 continue
714 continue
715 node = short(ctx.node())
715 node = short(ctx.node())
716 age = templatefilters.age(ctx.date())
716 age = templatefilters.age(ctx.date())
717 desc = templatefilters.firstline(ctx.description())
717 desc = templatefilters.firstline(ctx.description())
718 desc = cgi.escape(templatefilters.nonempty(desc))
718 desc = cgi.escape(templatefilters.nonempty(desc))
719 user = cgi.escape(templatefilters.person(ctx.user()))
719 user = cgi.escape(templatefilters.person(ctx.user()))
720 branch = ctx.branch()
720 branch = ctx.branch()
721 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
721 branch = branch, web.repo.branchtags().get(branch) == ctx.node()
722 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
722 data.append((node, vtx, edges, desc, user, age, branch, ctx.tags()))
723
723
724 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
724 return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
725 lessvars=lessvars, morevars=morevars, downrev=downrev,
725 lessvars=lessvars, morevars=morevars, downrev=downrev,
726 canvasheight=canvasheight, jsdata=data, bg_height=bg_height,
726 canvasheight=canvasheight, jsdata=data, bg_height=bg_height,
727 node=revnode_hex, changenav=changenav)
727 node=revnode_hex, changenav=changenav)
@@ -1,765 +1,765 b''
1 # This library is free software; you can redistribute it and/or
1 # This library is free software; you can redistribute it and/or
2 # modify it under the terms of the GNU Lesser General Public
2 # modify it under the terms of the GNU Lesser General Public
3 # License as published by the Free Software Foundation; either
3 # License as published by the Free Software Foundation; either
4 # version 2.1 of the License, or (at your option) any later version.
4 # version 2.1 of the License, or (at your option) any later version.
5 #
5 #
6 # This library is distributed in the hope that it will be useful,
6 # This library is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
9 # Lesser General Public License for more details.
9 # Lesser General Public License for more details.
10 #
10 #
11 # You should have received a copy of the GNU Lesser General Public
11 # You should have received a copy of the GNU Lesser General Public
12 # License along with this library; if not, write to the
12 # License along with this library; if not, write to the
13 # Free Software Foundation, Inc.,
13 # Free Software Foundation, Inc.,
14 # 59 Temple Place, Suite 330,
14 # 59 Temple Place, Suite 330,
15 # Boston, MA 02111-1307 USA
15 # Boston, MA 02111-1307 USA
16
16
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
17 # This file is part of urlgrabber, a high-level cross-protocol url-grabber
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
18 # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
19
19
20 # Modified by Benoit Boissinot:
20 # Modified by Benoit Boissinot:
21 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
21 # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
22 # Modified by Dirkjan Ochtman:
22 # Modified by Dirkjan Ochtman:
23 # - import md5 function from a local util module
23 # - import md5 function from a local util module
24 # Modified by Martin Geisler:
24 # Modified by Martin Geisler:
25 # - moved md5 function from local util module to this module
25 # - moved md5 function from local util module to this module
26 # Modified by Augie Fackler:
26 # Modified by Augie Fackler:
27 # - add safesend method and use it to prevent broken pipe errors
27 # - add safesend method and use it to prevent broken pipe errors
28 # on large POST requests
28 # on large POST requests
29
29
30 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
30 """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
31
31
32 >>> import urllib2
32 >>> import urllib2
33 >>> from keepalive import HTTPHandler
33 >>> from keepalive import HTTPHandler
34 >>> keepalive_handler = HTTPHandler()
34 >>> keepalive_handler = HTTPHandler()
35 >>> opener = urllib2.build_opener(keepalive_handler)
35 >>> opener = urllib2.build_opener(keepalive_handler)
36 >>> urllib2.install_opener(opener)
36 >>> urllib2.install_opener(opener)
37 >>>
37 >>>
38 >>> fo = urllib2.urlopen('http://www.python.org')
38 >>> fo = urllib2.urlopen('http://www.python.org')
39
39
40 If a connection to a given host is requested, and all of the existing
40 If a connection to a given host is requested, and all of the existing
41 connections are still in use, another connection will be opened. If
41 connections are still in use, another connection will be opened. If
42 the handler tries to use an existing connection but it fails in some
42 the handler tries to use an existing connection but it fails in some
43 way, it will be closed and removed from the pool.
43 way, it will be closed and removed from the pool.
44
44
45 To remove the handler, simply re-run build_opener with no arguments, and
45 To remove the handler, simply re-run build_opener with no arguments, and
46 install that opener.
46 install that opener.
47
47
48 You can explicitly close connections by using the close_connection()
48 You can explicitly close connections by using the close_connection()
49 method of the returned file-like object (described below) or you can
49 method of the returned file-like object (described below) or you can
50 use the handler methods:
50 use the handler methods:
51
51
52 close_connection(host)
52 close_connection(host)
53 close_all()
53 close_all()
54 open_connections()
54 open_connections()
55
55
56 NOTE: using the close_connection and close_all methods of the handler
56 NOTE: using the close_connection and close_all methods of the handler
57 should be done with care when using multiple threads.
57 should be done with care when using multiple threads.
58 * there is nothing that prevents another thread from creating new
58 * there is nothing that prevents another thread from creating new
59 connections immediately after connections are closed
59 connections immediately after connections are closed
60 * no checks are done to prevent in-use connections from being closed
60 * no checks are done to prevent in-use connections from being closed
61
61
62 >>> keepalive_handler.close_all()
62 >>> keepalive_handler.close_all()
63
63
64 EXTRA ATTRIBUTES AND METHODS
64 EXTRA ATTRIBUTES AND METHODS
65
65
66 Upon a status of 200, the object returned has a few additional
66 Upon a status of 200, the object returned has a few additional
67 attributes and methods, which should not be used if you want to
67 attributes and methods, which should not be used if you want to
68 remain consistent with the normal urllib2-returned objects:
68 remain consistent with the normal urllib2-returned objects:
69
69
70 close_connection() - close the connection to the host
70 close_connection() - close the connection to the host
71 readlines() - you know, readlines()
71 readlines() - you know, readlines()
72 status - the return status (ie 404)
72 status - the return status (ie 404)
73 reason - english translation of status (ie 'File not found')
73 reason - english translation of status (ie 'File not found')
74
74
75 If you want the best of both worlds, use this inside an
75 If you want the best of both worlds, use this inside an
76 AttributeError-catching try:
76 AttributeError-catching try:
77
77
78 >>> try: status = fo.status
78 >>> try: status = fo.status
79 >>> except AttributeError: status = None
79 >>> except AttributeError: status = None
80
80
81 Unfortunately, these are ONLY there if status == 200, so it's not
81 Unfortunately, these are ONLY there if status == 200, so it's not
82 easy to distinguish between non-200 responses. The reason is that
82 easy to distinguish between non-200 responses. The reason is that
83 urllib2 tries to do clever things with error codes 301, 302, 401,
83 urllib2 tries to do clever things with error codes 301, 302, 401,
84 and 407, and it wraps the object upon return.
84 and 407, and it wraps the object upon return.
85
85
86 For python versions earlier than 2.4, you can avoid this fancy error
86 For python versions earlier than 2.4, you can avoid this fancy error
87 handling by setting the module-level global HANDLE_ERRORS to zero.
87 handling by setting the module-level global HANDLE_ERRORS to zero.
88 You see, prior to 2.4, it's the HTTP Handler's job to determine what
88 You see, prior to 2.4, it's the HTTP Handler's job to determine what
89 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
89 to handle specially, and what to just pass up. HANDLE_ERRORS == 0
90 means "pass everything up". In python 2.4, however, this job no
90 means "pass everything up". In python 2.4, however, this job no
91 longer belongs to the HTTP Handler and is now done by a NEW handler,
91 longer belongs to the HTTP Handler and is now done by a NEW handler,
92 HTTPErrorProcessor. Here's the bottom line:
92 HTTPErrorProcessor. Here's the bottom line:
93
93
94 python version < 2.4
94 python version < 2.4
95 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
95 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
96 errors
96 errors
97 HANDLE_ERRORS == 0 pass everything up, error processing is
97 HANDLE_ERRORS == 0 pass everything up, error processing is
98 left to the calling code
98 left to the calling code
99 python version >= 2.4
99 python version >= 2.4
100 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
100 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
101 HANDLE_ERRORS == 0 (default) pass everything up, let the
101 HANDLE_ERRORS == 0 (default) pass everything up, let the
102 other handlers (specifically,
102 other handlers (specifically,
103 HTTPErrorProcessor) decide what to do
103 HTTPErrorProcessor) decide what to do
104
104
105 In practice, setting the variable either way makes little difference
105 In practice, setting the variable either way makes little difference
106 in python 2.4, so for the most consistent behavior across versions,
106 in python 2.4, so for the most consistent behavior across versions,
107 you probably just want to use the defaults, which will give you
107 you probably just want to use the defaults, which will give you
108 exceptions on errors.
108 exceptions on errors.
109
109
110 """
110 """
111
111
112 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
112 # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
113
113
114 import errno
114 import errno
115 import httplib
115 import httplib
116 import socket
116 import socket
117 import thread
117 import thread
118 import urllib2
118 import urllib2
119
119
120 DEBUG = None
120 DEBUG = None
121
121
122 import sys
122 import sys
123 if sys.version_info < (2, 4):
123 if sys.version_info < (2, 4):
124 HANDLE_ERRORS = 1
124 HANDLE_ERRORS = 1
125 else: HANDLE_ERRORS = 0
125 else: HANDLE_ERRORS = 0
126
126
127 class ConnectionManager:
127 class ConnectionManager:
128 """
128 """
129 The connection manager must be able to:
129 The connection manager must be able to:
130 * keep track of all existing
130 * keep track of all existing
131 """
131 """
132 def __init__(self):
132 def __init__(self):
133 self._lock = thread.allocate_lock()
133 self._lock = thread.allocate_lock()
134 self._hostmap = {} # map hosts to a list of connections
134 self._hostmap = {} # map hosts to a list of connections
135 self._connmap = {} # map connections to host
135 self._connmap = {} # map connections to host
136 self._readymap = {} # map connection to ready state
136 self._readymap = {} # map connection to ready state
137
137
138 def add(self, host, connection, ready):
138 def add(self, host, connection, ready):
139 self._lock.acquire()
139 self._lock.acquire()
140 try:
140 try:
141 if not host in self._hostmap:
141 if not host in self._hostmap:
142 self._hostmap[host] = []
142 self._hostmap[host] = []
143 self._hostmap[host].append(connection)
143 self._hostmap[host].append(connection)
144 self._connmap[connection] = host
144 self._connmap[connection] = host
145 self._readymap[connection] = ready
145 self._readymap[connection] = ready
146 finally:
146 finally:
147 self._lock.release()
147 self._lock.release()
148
148
149 def remove(self, connection):
149 def remove(self, connection):
150 self._lock.acquire()
150 self._lock.acquire()
151 try:
151 try:
152 try:
152 try:
153 host = self._connmap[connection]
153 host = self._connmap[connection]
154 except KeyError:
154 except KeyError:
155 pass
155 pass
156 else:
156 else:
157 del self._connmap[connection]
157 del self._connmap[connection]
158 del self._readymap[connection]
158 del self._readymap[connection]
159 self._hostmap[host].remove(connection)
159 self._hostmap[host].remove(connection)
160 if not self._hostmap[host]: del self._hostmap[host]
160 if not self._hostmap[host]: del self._hostmap[host]
161 finally:
161 finally:
162 self._lock.release()
162 self._lock.release()
163
163
164 def set_ready(self, connection, ready):
164 def set_ready(self, connection, ready):
165 try:
165 try:
166 self._readymap[connection] = ready
166 self._readymap[connection] = ready
167 except KeyError:
167 except KeyError:
168 pass
168 pass
169
169
170 def get_ready_conn(self, host):
170 def get_ready_conn(self, host):
171 conn = None
171 conn = None
172 self._lock.acquire()
172 self._lock.acquire()
173 try:
173 try:
174 if host in self._hostmap:
174 if host in self._hostmap:
175 for c in self._hostmap[host]:
175 for c in self._hostmap[host]:
176 if self._readymap[c]:
176 if self._readymap[c]:
177 self._readymap[c] = 0
177 self._readymap[c] = 0
178 conn = c
178 conn = c
179 break
179 break
180 finally:
180 finally:
181 self._lock.release()
181 self._lock.release()
182 return conn
182 return conn
183
183
184 def get_all(self, host=None):
184 def get_all(self, host=None):
185 if host:
185 if host:
186 return list(self._hostmap.get(host, []))
186 return list(self._hostmap.get(host, []))
187 else:
187 else:
188 return dict(self._hostmap)
188 return dict(self._hostmap)
189
189
190 class KeepAliveHandler:
190 class KeepAliveHandler:
191 def __init__(self):
191 def __init__(self):
192 self._cm = ConnectionManager()
192 self._cm = ConnectionManager()
193
193
194 #### Connection Management
194 #### Connection Management
195 def open_connections(self):
195 def open_connections(self):
196 """return a list of connected hosts and the number of connections
196 """return a list of connected hosts and the number of connections
197 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
197 to each. [('foo.com:80', 2), ('bar.org', 1)]"""
198 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
198 return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
199
199
200 def close_connection(self, host):
200 def close_connection(self, host):
201 """close connection(s) to <host>
201 """close connection(s) to <host>
202 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
202 host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
203 no error occurs if there is no connection to that host."""
203 no error occurs if there is no connection to that host."""
204 for h in self._cm.get_all(host):
204 for h in self._cm.get_all(host):
205 self._cm.remove(h)
205 self._cm.remove(h)
206 h.close()
206 h.close()
207
207
208 def close_all(self):
208 def close_all(self):
209 """close all open connections"""
209 """close all open connections"""
210 for host, conns in self._cm.get_all().iteritems():
210 for host, conns in self._cm.get_all().iteritems():
211 for h in conns:
211 for h in conns:
212 self._cm.remove(h)
212 self._cm.remove(h)
213 h.close()
213 h.close()
214
214
215 def _request_closed(self, request, host, connection):
215 def _request_closed(self, request, host, connection):
216 """tells us that this request is now closed and the the
216 """tells us that this request is now closed and the the
217 connection is ready for another request"""
217 connection is ready for another request"""
218 self._cm.set_ready(connection, 1)
218 self._cm.set_ready(connection, 1)
219
219
220 def _remove_connection(self, host, connection, close=0):
220 def _remove_connection(self, host, connection, close=0):
221 if close:
221 if close:
222 connection.close()
222 connection.close()
223 self._cm.remove(connection)
223 self._cm.remove(connection)
224
224
225 #### Transaction Execution
225 #### Transaction Execution
226 def http_open(self, req):
226 def http_open(self, req):
227 return self.do_open(HTTPConnection, req)
227 return self.do_open(HTTPConnection, req)
228
228
229 def do_open(self, http_class, req):
229 def do_open(self, http_class, req):
230 host = req.get_host()
230 host = req.get_host()
231 if not host:
231 if not host:
232 raise urllib2.URLError('no host given')
232 raise urllib2.URLError('no host given')
233
233
234 try:
234 try:
235 h = self._cm.get_ready_conn(host)
235 h = self._cm.get_ready_conn(host)
236 while h:
236 while h:
237 r = self._reuse_connection(h, req, host)
237 r = self._reuse_connection(h, req, host)
238
238
239 # if this response is non-None, then it worked and we're
239 # if this response is non-None, then it worked and we're
240 # done. Break out, skipping the else block.
240 # done. Break out, skipping the else block.
241 if r:
241 if r:
242 break
242 break
243
243
244 # connection is bad - possibly closed by server
244 # connection is bad - possibly closed by server
245 # discard it and ask for the next free connection
245 # discard it and ask for the next free connection
246 h.close()
246 h.close()
247 self._cm.remove(h)
247 self._cm.remove(h)
248 h = self._cm.get_ready_conn(host)
248 h = self._cm.get_ready_conn(host)
249 else:
249 else:
250 # no (working) free connections were found. Create a new one.
250 # no (working) free connections were found. Create a new one.
251 h = http_class(host)
251 h = http_class(host)
252 if DEBUG:
252 if DEBUG:
253 DEBUG.info("creating new connection to %s (%d)",
253 DEBUG.info("creating new connection to %s (%d)",
254 host, id(h))
254 host, id(h))
255 self._cm.add(host, h, 0)
255 self._cm.add(host, h, 0)
256 self._start_transaction(h, req)
256 self._start_transaction(h, req)
257 r = h.getresponse()
257 r = h.getresponse()
258 except (socket.error, httplib.HTTPException), err:
258 except (socket.error, httplib.HTTPException), err:
259 raise urllib2.URLError(err)
259 raise urllib2.URLError(err)
260
260
261 # if not a persistent connection, don't try to reuse it
261 # if not a persistent connection, don't try to reuse it
262 if r.will_close:
262 if r.will_close:
263 self._cm.remove(h)
263 self._cm.remove(h)
264
264
265 if DEBUG:
265 if DEBUG:
266 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
266 DEBUG.info("STATUS: %s, %s", r.status, r.reason)
267 r._handler = self
267 r._handler = self
268 r._host = host
268 r._host = host
269 r._url = req.get_full_url()
269 r._url = req.get_full_url()
270 r._connection = h
270 r._connection = h
271 r.code = r.status
271 r.code = r.status
272 r.headers = r.msg
272 r.headers = r.msg
273 r.msg = r.reason
273 r.msg = r.reason
274
274
275 if r.status == 200 or not HANDLE_ERRORS:
275 if r.status == 200 or not HANDLE_ERRORS:
276 return r
276 return r
277 else:
277 else:
278 return self.parent.error('http', req, r,
278 return self.parent.error('http', req, r,
279 r.status, r.msg, r.headers)
279 r.status, r.msg, r.headers)
280
280
281 def _reuse_connection(self, h, req, host):
281 def _reuse_connection(self, h, req, host):
282 """start the transaction with a re-used connection
282 """start the transaction with a re-used connection
283 return a response object (r) upon success or None on failure.
283 return a response object (r) upon success or None on failure.
284 This DOES not close or remove bad connections in cases where
284 This DOES not close or remove bad connections in cases where
285 it returns. However, if an unexpected exception occurs, it
285 it returns. However, if an unexpected exception occurs, it
286 will close and remove the connection before re-raising.
286 will close and remove the connection before re-raising.
287 """
287 """
288 try:
288 try:
289 self._start_transaction(h, req)
289 self._start_transaction(h, req)
290 r = h.getresponse()
290 r = h.getresponse()
291 # note: just because we got something back doesn't mean it
291 # note: just because we got something back doesn't mean it
292 # worked. We'll check the version below, too.
292 # worked. We'll check the version below, too.
293 except (socket.error, httplib.HTTPException):
293 except (socket.error, httplib.HTTPException):
294 r = None
294 r = None
295 except:
295 except:
296 # adding this block just in case we've missed
296 # adding this block just in case we've missed
297 # something we will still raise the exception, but
297 # something we will still raise the exception, but
298 # lets try and close the connection and remove it
298 # lets try and close the connection and remove it
299 # first. We previously got into a nasty loop
299 # first. We previously got into a nasty loop
300 # where an exception was uncaught, and so the
300 # where an exception was uncaught, and so the
301 # connection stayed open. On the next try, the
301 # connection stayed open. On the next try, the
302 # same exception was raised, etc. The tradeoff is
302 # same exception was raised, etc. The tradeoff is
303 # that it's now possible this call will raise
303 # that it's now possible this call will raise
304 # a DIFFERENT exception
304 # a DIFFERENT exception
305 if DEBUG:
305 if DEBUG:
306 DEBUG.error("unexpected exception - closing "
306 DEBUG.error("unexpected exception - closing "
307 "connection to %s (%d)", host, id(h))
307 "connection to %s (%d)", host, id(h))
308 self._cm.remove(h)
308 self._cm.remove(h)
309 h.close()
309 h.close()
310 raise
310 raise
311
311
312 if r is None or r.version == 9:
312 if r is None or r.version == 9:
313 # httplib falls back to assuming HTTP 0.9 if it gets a
313 # httplib falls back to assuming HTTP 0.9 if it gets a
314 # bad header back. This is most likely to happen if
314 # bad header back. This is most likely to happen if
315 # the socket has been closed by the server since we
315 # the socket has been closed by the server since we
316 # last used the connection.
316 # last used the connection.
317 if DEBUG:
317 if DEBUG:
318 DEBUG.info("failed to re-use connection to %s (%d)",
318 DEBUG.info("failed to re-use connection to %s (%d)",
319 host, id(h))
319 host, id(h))
320 r = None
320 r = None
321 else:
321 else:
322 if DEBUG:
322 if DEBUG:
323 DEBUG.info("re-using connection to %s (%d)", host, id(h))
323 DEBUG.info("re-using connection to %s (%d)", host, id(h))
324
324
325 return r
325 return r
326
326
327 def _start_transaction(self, h, req):
327 def _start_transaction(self, h, req):
328 # What follows mostly reimplements HTTPConnection.request()
328 # What follows mostly reimplements HTTPConnection.request()
329 # except it adds self.parent.addheaders in the mix.
329 # except it adds self.parent.addheaders in the mix.
330 headers = req.headers.copy()
330 headers = req.headers.copy()
331 if sys.version_info >= (2, 4):
331 if sys.version_info >= (2, 4):
332 headers.update(req.unredirected_hdrs)
332 headers.update(req.unredirected_hdrs)
333 headers.update(self.parent.addheaders)
333 headers.update(self.parent.addheaders)
334 headers = dict((n.lower(), v) for n, v in headers.items())
334 headers = dict((n.lower(), v) for n, v in headers.items())
335 skipheaders = {}
335 skipheaders = {}
336 for n in ('host', 'accept-encoding'):
336 for n in ('host', 'accept-encoding'):
337 if n in headers:
337 if n in headers:
338 skipheaders['skip_' + n.replace('-', '_')] = 1
338 skipheaders['skip_' + n.replace('-', '_')] = 1
339 try:
339 try:
340 if req.has_data():
340 if req.has_data():
341 data = req.get_data()
341 data = req.get_data()
342 h.putrequest('POST', req.get_selector(), **skipheaders)
342 h.putrequest('POST', req.get_selector(), **skipheaders)
343 if 'content-type' not in headers:
343 if 'content-type' not in headers:
344 h.putheader('Content-type',
344 h.putheader('Content-type',
345 'application/x-www-form-urlencoded')
345 'application/x-www-form-urlencoded')
346 if 'content-length' not in headers:
346 if 'content-length' not in headers:
347 h.putheader('Content-length', '%d' % len(data))
347 h.putheader('Content-length', '%d' % len(data))
348 else:
348 else:
349 h.putrequest('GET', req.get_selector(), **skipheaders)
349 h.putrequest('GET', req.get_selector(), **skipheaders)
350 except (socket.error), err:
350 except (socket.error), err:
351 raise urllib2.URLError(err)
351 raise urllib2.URLError(err)
352 for k, v in headers.items():
352 for k, v in headers.items():
353 h.putheader(k, v)
353 h.putheader(k, v)
354 h.endheaders()
354 h.endheaders()
355 if req.has_data():
355 if req.has_data():
356 h.send(data)
356 h.send(data)
357
357
358 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
358 class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
359 pass
359 pass
360
360
361 class HTTPResponse(httplib.HTTPResponse):
361 class HTTPResponse(httplib.HTTPResponse):
362 # we need to subclass HTTPResponse in order to
362 # we need to subclass HTTPResponse in order to
363 # 1) add readline() and readlines() methods
363 # 1) add readline() and readlines() methods
364 # 2) add close_connection() methods
364 # 2) add close_connection() methods
365 # 3) add info() and geturl() methods
365 # 3) add info() and geturl() methods
366
366
367 # in order to add readline(), read must be modified to deal with a
367 # in order to add readline(), read must be modified to deal with a
368 # buffer. example: readline must read a buffer and then spit back
368 # buffer. example: readline must read a buffer and then spit back
369 # one line at a time. The only real alternative is to read one
369 # one line at a time. The only real alternative is to read one
370 # BYTE at a time (ick). Once something has been read, it can't be
370 # BYTE at a time (ick). Once something has been read, it can't be
371 # put back (ok, maybe it can, but that's even uglier than this),
371 # put back (ok, maybe it can, but that's even uglier than this),
372 # so if you THEN do a normal read, you must first take stuff from
372 # so if you THEN do a normal read, you must first take stuff from
373 # the buffer.
373 # the buffer.
374
374
375 # the read method wraps the original to accomodate buffering,
375 # the read method wraps the original to accomodate buffering,
376 # although read() never adds to the buffer.
376 # although read() never adds to the buffer.
377 # Both readline and readlines have been stolen with almost no
377 # Both readline and readlines have been stolen with almost no
378 # modification from socket.py
378 # modification from socket.py
379
379
380
380
381 def __init__(self, sock, debuglevel=0, strict=0, method=None):
381 def __init__(self, sock, debuglevel=0, strict=0, method=None):
382 if method: # the httplib in python 2.3 uses the method arg
382 if method: # the httplib in python 2.3 uses the method arg
383 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
383 httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
384 else: # 2.2 doesn't
384 else: # 2.2 doesn't
385 httplib.HTTPResponse.__init__(self, sock, debuglevel)
385 httplib.HTTPResponse.__init__(self, sock, debuglevel)
386 self.fileno = sock.fileno
386 self.fileno = sock.fileno
387 self.code = None
387 self.code = None
388 self._rbuf = ''
388 self._rbuf = ''
389 self._rbufsize = 8096
389 self._rbufsize = 8096
390 self._handler = None # inserted by the handler later
390 self._handler = None # inserted by the handler later
391 self._host = None # (same)
391 self._host = None # (same)
392 self._url = None # (same)
392 self._url = None # (same)
393 self._connection = None # (same)
393 self._connection = None # (same)
394
394
395 _raw_read = httplib.HTTPResponse.read
395 _raw_read = httplib.HTTPResponse.read
396
396
397 def close(self):
397 def close(self):
398 if self.fp:
398 if self.fp:
399 self.fp.close()
399 self.fp.close()
400 self.fp = None
400 self.fp = None
401 if self._handler:
401 if self._handler:
402 self._handler._request_closed(self, self._host,
402 self._handler._request_closed(self, self._host,
403 self._connection)
403 self._connection)
404
404
405 def close_connection(self):
405 def close_connection(self):
406 self._handler._remove_connection(self._host, self._connection, close=1)
406 self._handler._remove_connection(self._host, self._connection, close=1)
407 self.close()
407 self.close()
408
408
409 def info(self):
409 def info(self):
410 return self.headers
410 return self.headers
411
411
412 def geturl(self):
412 def geturl(self):
413 return self._url
413 return self._url
414
414
415 def read(self, amt=None):
415 def read(self, amt=None):
416 # the _rbuf test is only in this first if for speed. It's not
416 # the _rbuf test is only in this first if for speed. It's not
417 # logically necessary
417 # logically necessary
418 if self._rbuf and not amt is None:
418 if self._rbuf and not amt is None:
419 L = len(self._rbuf)
419 L = len(self._rbuf)
420 if amt > L:
420 if amt > L:
421 amt -= L
421 amt -= L
422 else:
422 else:
423 s = self._rbuf[:amt]
423 s = self._rbuf[:amt]
424 self._rbuf = self._rbuf[amt:]
424 self._rbuf = self._rbuf[amt:]
425 return s
425 return s
426
426
427 s = self._rbuf + self._raw_read(amt)
427 s = self._rbuf + self._raw_read(amt)
428 self._rbuf = ''
428 self._rbuf = ''
429 return s
429 return s
430
430
431 # stolen from Python SVN #68532 to fix issue1088
431 # stolen from Python SVN #68532 to fix issue1088
432 def _read_chunked(self, amt):
432 def _read_chunked(self, amt):
433 chunk_left = self.chunk_left
433 chunk_left = self.chunk_left
434 value = ''
434 value = ''
435
435
436 # XXX This accumulates chunks by repeated string concatenation,
436 # XXX This accumulates chunks by repeated string concatenation,
437 # which is not efficient as the number or size of chunks gets big.
437 # which is not efficient as the number or size of chunks gets big.
438 while True:
438 while True:
439 if chunk_left is None:
439 if chunk_left is None:
440 line = self.fp.readline()
440 line = self.fp.readline()
441 i = line.find(';')
441 i = line.find(';')
442 if i >= 0:
442 if i >= 0:
443 line = line[:i] # strip chunk-extensions
443 line = line[:i] # strip chunk-extensions
444 try:
444 try:
445 chunk_left = int(line, 16)
445 chunk_left = int(line, 16)
446 except ValueError:
446 except ValueError:
447 # close the connection as protocol synchronisation is
447 # close the connection as protocol synchronisation is
448 # probably lost
448 # probably lost
449 self.close()
449 self.close()
450 raise httplib.IncompleteRead(value)
450 raise httplib.IncompleteRead(value)
451 if chunk_left == 0:
451 if chunk_left == 0:
452 break
452 break
453 if amt is None:
453 if amt is None:
454 value += self._safe_read(chunk_left)
454 value += self._safe_read(chunk_left)
455 elif amt < chunk_left:
455 elif amt < chunk_left:
456 value += self._safe_read(amt)
456 value += self._safe_read(amt)
457 self.chunk_left = chunk_left - amt
457 self.chunk_left = chunk_left - amt
458 return value
458 return value
459 elif amt == chunk_left:
459 elif amt == chunk_left:
460 value += self._safe_read(amt)
460 value += self._safe_read(amt)
461 self._safe_read(2) # toss the CRLF at the end of the chunk
461 self._safe_read(2) # toss the CRLF at the end of the chunk
462 self.chunk_left = None
462 self.chunk_left = None
463 return value
463 return value
464 else:
464 else:
465 value += self._safe_read(chunk_left)
465 value += self._safe_read(chunk_left)
466 amt -= chunk_left
466 amt -= chunk_left
467
467
468 # we read the whole chunk, get another
468 # we read the whole chunk, get another
469 self._safe_read(2) # toss the CRLF at the end of the chunk
469 self._safe_read(2) # toss the CRLF at the end of the chunk
470 chunk_left = None
470 chunk_left = None
471
471
472 # read and discard trailer up to the CRLF terminator
472 # read and discard trailer up to the CRLF terminator
473 ### note: we shouldn't have any trailers!
473 ### note: we shouldn't have any trailers!
474 while True:
474 while True:
475 line = self.fp.readline()
475 line = self.fp.readline()
476 if not line:
476 if not line:
477 # a vanishingly small number of sites EOF without
477 # a vanishingly small number of sites EOF without
478 # sending the trailer
478 # sending the trailer
479 break
479 break
480 if line == '\r\n':
480 if line == '\r\n':
481 break
481 break
482
482
483 # we read everything; close the "file"
483 # we read everything; close the "file"
484 self.close()
484 self.close()
485
485
486 return value
486 return value
487
487
488 def readline(self, limit=-1):
488 def readline(self, limit=-1):
489 i = self._rbuf.find('\n')
489 i = self._rbuf.find('\n')
490 while i < 0 and not (0 < limit <= len(self._rbuf)):
490 while i < 0 and not (0 < limit <= len(self._rbuf)):
491 new = self._raw_read(self._rbufsize)
491 new = self._raw_read(self._rbufsize)
492 if not new:
492 if not new:
493 break
493 break
494 i = new.find('\n')
494 i = new.find('\n')
495 if i >= 0:
495 if i >= 0:
496 i = i + len(self._rbuf)
496 i = i + len(self._rbuf)
497 self._rbuf = self._rbuf + new
497 self._rbuf = self._rbuf + new
498 if i < 0:
498 if i < 0:
499 i = len(self._rbuf)
499 i = len(self._rbuf)
500 else:
500 else:
501 i = i + 1
501 i = i + 1
502 if 0 <= limit < len(self._rbuf):
502 if 0 <= limit < len(self._rbuf):
503 i = limit
503 i = limit
504 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
504 data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
505 return data
505 return data
506
506
507 def readlines(self, sizehint = 0):
507 def readlines(self, sizehint = 0):
508 total = 0
508 total = 0
509 list = []
509 list = []
510 while 1:
510 while 1:
511 line = self.readline()
511 line = self.readline()
512 if not line:
512 if not line:
513 break
513 break
514 list.append(line)
514 list.append(line)
515 total += len(line)
515 total += len(line)
516 if sizehint and total >= sizehint:
516 if sizehint and total >= sizehint:
517 break
517 break
518 return list
518 return list
519
519
520 def safesend(self, str):
520 def safesend(self, str):
521 """Send `str' to the server.
521 """Send `str' to the server.
522
522
523 Shamelessly ripped off from httplib to patch a bad behavior.
523 Shamelessly ripped off from httplib to patch a bad behavior.
524 """
524 """
525 # _broken_pipe_resp is an attribute we set in this function
525 # _broken_pipe_resp is an attribute we set in this function
526 # if the socket is closed while we're sending data but
526 # if the socket is closed while we're sending data but
527 # the server sent us a response before hanging up.
527 # the server sent us a response before hanging up.
528 # In that case, we want to pretend to send the rest of the
528 # In that case, we want to pretend to send the rest of the
529 # outgoing data, and then let the user use getresponse()
529 # outgoing data, and then let the user use getresponse()
530 # (which we wrap) to get this last response before
530 # (which we wrap) to get this last response before
531 # opening a new socket.
531 # opening a new socket.
532 if getattr(self, '_broken_pipe_resp', None) is not None:
532 if getattr(self, '_broken_pipe_resp', None) is not None:
533 return
533 return
534
534
535 if self.sock is None:
535 if self.sock is None:
536 if self.auto_open:
536 if self.auto_open:
537 self.connect()
537 self.connect()
538 else:
538 else:
539 raise httplib.NotConnected()
539 raise httplib.NotConnected()
540
540
541 # send the data to the server. if we get a broken pipe, then close
541 # send the data to the server. if we get a broken pipe, then close
542 # the socket. we want to reconnect when somebody tries to send again.
542 # the socket. we want to reconnect when somebody tries to send again.
543 #
543 #
544 # NOTE: we DO propagate the error, though, because we cannot simply
544 # NOTE: we DO propagate the error, though, because we cannot simply
545 # ignore the error... the caller will know if they can retry.
545 # ignore the error... the caller will know if they can retry.
546 if self.debuglevel > 0:
546 if self.debuglevel > 0:
547 print "send:", repr(str)
547 print "send:", repr(str)
548 try:
548 try:
549 blocksize = 8192
549 blocksize = 8192
550 if hasattr(str,'read') :
550 if hasattr(str,'read') :
551 if self.debuglevel > 0:
551 if self.debuglevel > 0:
552 print "sendIng a read()able"
552 print "sendIng a read()able"
553 data=str.read(blocksize)
553 data = str.read(blocksize)
554 while data:
554 while data:
555 self.sock.sendall(data)
555 self.sock.sendall(data)
556 data=str.read(blocksize)
556 data = str.read(blocksize)
557 else:
557 else:
558 self.sock.sendall(str)
558 self.sock.sendall(str)
559 except socket.error, v:
559 except socket.error, v:
560 reraise = True
560 reraise = True
561 if v[0] == errno.EPIPE: # Broken pipe
561 if v[0] == errno.EPIPE: # Broken pipe
562 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
562 if self._HTTPConnection__state == httplib._CS_REQ_SENT:
563 self._broken_pipe_resp = None
563 self._broken_pipe_resp = None
564 self._broken_pipe_resp = self.getresponse()
564 self._broken_pipe_resp = self.getresponse()
565 reraise = False
565 reraise = False
566 self.close()
566 self.close()
567 if reraise:
567 if reraise:
568 raise
568 raise
569
569
570 def wrapgetresponse(cls):
570 def wrapgetresponse(cls):
571 """Wraps getresponse in cls with a broken-pipe sane version.
571 """Wraps getresponse in cls with a broken-pipe sane version.
572 """
572 """
573 def safegetresponse(self):
573 def safegetresponse(self):
574 # In safesend() we might set the _broken_pipe_resp
574 # In safesend() we might set the _broken_pipe_resp
575 # attribute, in which case the socket has already
575 # attribute, in which case the socket has already
576 # been closed and we just need to give them the response
576 # been closed and we just need to give them the response
577 # back. Otherwise, we use the normal response path.
577 # back. Otherwise, we use the normal response path.
578 r = getattr(self, '_broken_pipe_resp', None)
578 r = getattr(self, '_broken_pipe_resp', None)
579 if r is not None:
579 if r is not None:
580 return r
580 return r
581 return cls.getresponse(self)
581 return cls.getresponse(self)
582 safegetresponse.__doc__ = cls.getresponse.__doc__
582 safegetresponse.__doc__ = cls.getresponse.__doc__
583 return safegetresponse
583 return safegetresponse
584
584
585 class HTTPConnection(httplib.HTTPConnection):
585 class HTTPConnection(httplib.HTTPConnection):
586 # use the modified response class
586 # use the modified response class
587 response_class = HTTPResponse
587 response_class = HTTPResponse
588 send = safesend
588 send = safesend
589 getresponse = wrapgetresponse(httplib.HTTPConnection)
589 getresponse = wrapgetresponse(httplib.HTTPConnection)
590
590
591
591
592 #########################################################################
592 #########################################################################
593 ##### TEST FUNCTIONS
593 ##### TEST FUNCTIONS
594 #########################################################################
594 #########################################################################
595
595
596 def error_handler(url):
596 def error_handler(url):
597 global HANDLE_ERRORS
597 global HANDLE_ERRORS
598 orig = HANDLE_ERRORS
598 orig = HANDLE_ERRORS
599 keepalive_handler = HTTPHandler()
599 keepalive_handler = HTTPHandler()
600 opener = urllib2.build_opener(keepalive_handler)
600 opener = urllib2.build_opener(keepalive_handler)
601 urllib2.install_opener(opener)
601 urllib2.install_opener(opener)
602 pos = {0: 'off', 1: 'on'}
602 pos = {0: 'off', 1: 'on'}
603 for i in (0, 1):
603 for i in (0, 1):
604 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
604 print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
605 HANDLE_ERRORS = i
605 HANDLE_ERRORS = i
606 try:
606 try:
607 fo = urllib2.urlopen(url)
607 fo = urllib2.urlopen(url)
608 fo.read()
608 fo.read()
609 fo.close()
609 fo.close()
610 try:
610 try:
611 status, reason = fo.status, fo.reason
611 status, reason = fo.status, fo.reason
612 except AttributeError:
612 except AttributeError:
613 status, reason = None, None
613 status, reason = None, None
614 except IOError, e:
614 except IOError, e:
615 print " EXCEPTION: %s" % e
615 print " EXCEPTION: %s" % e
616 raise
616 raise
617 else:
617 else:
618 print " status = %s, reason = %s" % (status, reason)
618 print " status = %s, reason = %s" % (status, reason)
619 HANDLE_ERRORS = orig
619 HANDLE_ERRORS = orig
620 hosts = keepalive_handler.open_connections()
620 hosts = keepalive_handler.open_connections()
621 print "open connections:", hosts
621 print "open connections:", hosts
622 keepalive_handler.close_all()
622 keepalive_handler.close_all()
623
623
624 def md5(s):
624 def md5(s):
625 try:
625 try:
626 from hashlib import md5 as _md5
626 from hashlib import md5 as _md5
627 except ImportError:
627 except ImportError:
628 from md5 import md5 as _md5
628 from md5 import md5 as _md5
629 global md5
629 global md5
630 md5 = _md5
630 md5 = _md5
631 return _md5(s)
631 return _md5(s)
632
632
633 def continuity(url):
633 def continuity(url):
634 format = '%25s: %s'
634 format = '%25s: %s'
635
635
636 # first fetch the file with the normal http handler
636 # first fetch the file with the normal http handler
637 opener = urllib2.build_opener()
637 opener = urllib2.build_opener()
638 urllib2.install_opener(opener)
638 urllib2.install_opener(opener)
639 fo = urllib2.urlopen(url)
639 fo = urllib2.urlopen(url)
640 foo = fo.read()
640 foo = fo.read()
641 fo.close()
641 fo.close()
642 m = md5.new(foo)
642 m = md5.new(foo)
643 print format % ('normal urllib', m.hexdigest())
643 print format % ('normal urllib', m.hexdigest())
644
644
645 # now install the keepalive handler and try again
645 # now install the keepalive handler and try again
646 opener = urllib2.build_opener(HTTPHandler())
646 opener = urllib2.build_opener(HTTPHandler())
647 urllib2.install_opener(opener)
647 urllib2.install_opener(opener)
648
648
649 fo = urllib2.urlopen(url)
649 fo = urllib2.urlopen(url)
650 foo = fo.read()
650 foo = fo.read()
651 fo.close()
651 fo.close()
652 m = md5.new(foo)
652 m = md5.new(foo)
653 print format % ('keepalive read', m.hexdigest())
653 print format % ('keepalive read', m.hexdigest())
654
654
655 fo = urllib2.urlopen(url)
655 fo = urllib2.urlopen(url)
656 foo = ''
656 foo = ''
657 while 1:
657 while 1:
658 f = fo.readline()
658 f = fo.readline()
659 if f:
659 if f:
660 foo = foo + f
660 foo = foo + f
661 else: break
661 else: break
662 fo.close()
662 fo.close()
663 m = md5.new(foo)
663 m = md5.new(foo)
664 print format % ('keepalive readline', m.hexdigest())
664 print format % ('keepalive readline', m.hexdigest())
665
665
666 def comp(N, url):
666 def comp(N, url):
667 print ' making %i connections to:\n %s' % (N, url)
667 print ' making %i connections to:\n %s' % (N, url)
668
668
669 sys.stdout.write(' first using the normal urllib handlers')
669 sys.stdout.write(' first using the normal urllib handlers')
670 # first use normal opener
670 # first use normal opener
671 opener = urllib2.build_opener()
671 opener = urllib2.build_opener()
672 urllib2.install_opener(opener)
672 urllib2.install_opener(opener)
673 t1 = fetch(N, url)
673 t1 = fetch(N, url)
674 print ' TIME: %.3f s' % t1
674 print ' TIME: %.3f s' % t1
675
675
676 sys.stdout.write(' now using the keepalive handler ')
676 sys.stdout.write(' now using the keepalive handler ')
677 # now install the keepalive handler and try again
677 # now install the keepalive handler and try again
678 opener = urllib2.build_opener(HTTPHandler())
678 opener = urllib2.build_opener(HTTPHandler())
679 urllib2.install_opener(opener)
679 urllib2.install_opener(opener)
680 t2 = fetch(N, url)
680 t2 = fetch(N, url)
681 print ' TIME: %.3f s' % t2
681 print ' TIME: %.3f s' % t2
682 print ' improvement factor: %.2f' % (t1 / t2)
682 print ' improvement factor: %.2f' % (t1 / t2)
683
683
684 def fetch(N, url, delay=0):
684 def fetch(N, url, delay=0):
685 import time
685 import time
686 lens = []
686 lens = []
687 starttime = time.time()
687 starttime = time.time()
688 for i in range(N):
688 for i in range(N):
689 if delay and i > 0:
689 if delay and i > 0:
690 time.sleep(delay)
690 time.sleep(delay)
691 fo = urllib2.urlopen(url)
691 fo = urllib2.urlopen(url)
692 foo = fo.read()
692 foo = fo.read()
693 fo.close()
693 fo.close()
694 lens.append(len(foo))
694 lens.append(len(foo))
695 diff = time.time() - starttime
695 diff = time.time() - starttime
696
696
697 j = 0
697 j = 0
698 for i in lens[1:]:
698 for i in lens[1:]:
699 j = j + 1
699 j = j + 1
700 if not i == lens[0]:
700 if not i == lens[0]:
701 print "WARNING: inconsistent length on read %i: %i" % (j, i)
701 print "WARNING: inconsistent length on read %i: %i" % (j, i)
702
702
703 return diff
703 return diff
704
704
705 def test_timeout(url):
705 def test_timeout(url):
706 global DEBUG
706 global DEBUG
707 dbbackup = DEBUG
707 dbbackup = DEBUG
708 class FakeLogger:
708 class FakeLogger:
709 def debug(self, msg, *args):
709 def debug(self, msg, *args):
710 print msg % args
710 print msg % args
711 info = warning = error = debug
711 info = warning = error = debug
712 DEBUG = FakeLogger()
712 DEBUG = FakeLogger()
713 print " fetching the file to establish a connection"
713 print " fetching the file to establish a connection"
714 fo = urllib2.urlopen(url)
714 fo = urllib2.urlopen(url)
715 data1 = fo.read()
715 data1 = fo.read()
716 fo.close()
716 fo.close()
717
717
718 i = 20
718 i = 20
719 print " waiting %i seconds for the server to close the connection" % i
719 print " waiting %i seconds for the server to close the connection" % i
720 while i > 0:
720 while i > 0:
721 sys.stdout.write('\r %2i' % i)
721 sys.stdout.write('\r %2i' % i)
722 sys.stdout.flush()
722 sys.stdout.flush()
723 time.sleep(1)
723 time.sleep(1)
724 i -= 1
724 i -= 1
725 sys.stderr.write('\r')
725 sys.stderr.write('\r')
726
726
727 print " fetching the file a second time"
727 print " fetching the file a second time"
728 fo = urllib2.urlopen(url)
728 fo = urllib2.urlopen(url)
729 data2 = fo.read()
729 data2 = fo.read()
730 fo.close()
730 fo.close()
731
731
732 if data1 == data2:
732 if data1 == data2:
733 print ' data are identical'
733 print ' data are identical'
734 else:
734 else:
735 print ' ERROR: DATA DIFFER'
735 print ' ERROR: DATA DIFFER'
736
736
737 DEBUG = dbbackup
737 DEBUG = dbbackup
738
738
739
739
740 def test(url, N=10):
740 def test(url, N=10):
741 print "checking error hander (do this on a non-200)"
741 print "checking error hander (do this on a non-200)"
742 try: error_handler(url)
742 try: error_handler(url)
743 except IOError:
743 except IOError:
744 print "exiting - exception will prevent further tests"
744 print "exiting - exception will prevent further tests"
745 sys.exit()
745 sys.exit()
746 print
746 print
747 print "performing continuity test (making sure stuff isn't corrupted)"
747 print "performing continuity test (making sure stuff isn't corrupted)"
748 continuity(url)
748 continuity(url)
749 print
749 print
750 print "performing speed comparison"
750 print "performing speed comparison"
751 comp(N, url)
751 comp(N, url)
752 print
752 print
753 print "performing dropped-connection check"
753 print "performing dropped-connection check"
754 test_timeout(url)
754 test_timeout(url)
755
755
756 if __name__ == '__main__':
756 if __name__ == '__main__':
757 import time
757 import time
758 import sys
758 import sys
759 try:
759 try:
760 N = int(sys.argv[1])
760 N = int(sys.argv[1])
761 url = sys.argv[2]
761 url = sys.argv[2]
762 except:
762 except:
763 print "%s <integer> <url>" % sys.argv[0]
763 print "%s <integer> <url>" % sys.argv[0]
764 else:
764 else:
765 test(url, N)
765 test(url, N)
@@ -1,265 +1,265 b''
1 # templatekw.py - common changeset template keywords
1 # templatekw.py - common changeset template keywords
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex
8 from node import hex
9 import encoding, patch, util, error
9 import encoding, patch, util, error
10
10
11 def showlist(name, values, plural=None, **args):
11 def showlist(name, values, plural=None, **args):
12 '''expand set of values.
12 '''expand set of values.
13 name is name of key in template map.
13 name is name of key in template map.
14 values is list of strings or dicts.
14 values is list of strings or dicts.
15 plural is plural of name, if not simply name + 's'.
15 plural is plural of name, if not simply name + 's'.
16
16
17 expansion works like this, given name 'foo'.
17 expansion works like this, given name 'foo'.
18
18
19 if values is empty, expand 'no_foos'.
19 if values is empty, expand 'no_foos'.
20
20
21 if 'foo' not in template map, return values as a string,
21 if 'foo' not in template map, return values as a string,
22 joined by space.
22 joined by space.
23
23
24 expand 'start_foos'.
24 expand 'start_foos'.
25
25
26 for each value, expand 'foo'. if 'last_foo' in template
26 for each value, expand 'foo'. if 'last_foo' in template
27 map, expand it instead of 'foo' for last key.
27 map, expand it instead of 'foo' for last key.
28
28
29 expand 'end_foos'.
29 expand 'end_foos'.
30 '''
30 '''
31 templ = args['templ']
31 templ = args['templ']
32 if plural:
32 if plural:
33 names = plural
33 names = plural
34 else: names = name + 's'
34 else: names = name + 's'
35 if not values:
35 if not values:
36 noname = 'no_' + names
36 noname = 'no_' + names
37 if noname in templ:
37 if noname in templ:
38 yield templ(noname, **args)
38 yield templ(noname, **args)
39 return
39 return
40 if name not in templ:
40 if name not in templ:
41 if isinstance(values[0], str):
41 if isinstance(values[0], str):
42 yield ' '.join(values)
42 yield ' '.join(values)
43 else:
43 else:
44 for v in values:
44 for v in values:
45 yield dict(v, **args)
45 yield dict(v, **args)
46 return
46 return
47 startname = 'start_' + names
47 startname = 'start_' + names
48 if startname in templ:
48 if startname in templ:
49 yield templ(startname, **args)
49 yield templ(startname, **args)
50 vargs = args.copy()
50 vargs = args.copy()
51 def one(v, tag=name):
51 def one(v, tag=name):
52 try:
52 try:
53 vargs.update(v)
53 vargs.update(v)
54 except (AttributeError, ValueError):
54 except (AttributeError, ValueError):
55 try:
55 try:
56 for a, b in v:
56 for a, b in v:
57 vargs[a] = b
57 vargs[a] = b
58 except ValueError:
58 except ValueError:
59 vargs[name] = v
59 vargs[name] = v
60 return templ(tag, **vargs)
60 return templ(tag, **vargs)
61 lastname = 'last_' + name
61 lastname = 'last_' + name
62 if lastname in templ:
62 if lastname in templ:
63 last = values.pop()
63 last = values.pop()
64 else:
64 else:
65 last = None
65 last = None
66 for v in values:
66 for v in values:
67 yield one(v)
67 yield one(v)
68 if last is not None:
68 if last is not None:
69 yield one(last, tag=lastname)
69 yield one(last, tag=lastname)
70 endname = 'end_' + names
70 endname = 'end_' + names
71 if endname in templ:
71 if endname in templ:
72 yield templ(endname, **args)
72 yield templ(endname, **args)
73
73
74 def getfiles(repo, ctx, revcache):
74 def getfiles(repo, ctx, revcache):
75 if 'files' not in revcache:
75 if 'files' not in revcache:
76 revcache['files'] = repo.status(ctx.parents()[0].node(),
76 revcache['files'] = repo.status(ctx.parents()[0].node(),
77 ctx.node())[:3]
77 ctx.node())[:3]
78 return revcache['files']
78 return revcache['files']
79
79
80 def getlatesttags(repo, ctx, cache):
80 def getlatesttags(repo, ctx, cache):
81 '''return date, distance and name for the latest tag of rev'''
81 '''return date, distance and name for the latest tag of rev'''
82
82
83 if 'latesttags' not in cache:
83 if 'latesttags' not in cache:
84 # Cache mapping from rev to a tuple with tag date, tag
84 # Cache mapping from rev to a tuple with tag date, tag
85 # distance and tag name
85 # distance and tag name
86 cache['latesttags'] = {-1: (0, 0, 'null')}
86 cache['latesttags'] = {-1: (0, 0, 'null')}
87 latesttags = cache['latesttags']
87 latesttags = cache['latesttags']
88
88
89 rev = ctx.rev()
89 rev = ctx.rev()
90 todo = [rev]
90 todo = [rev]
91 while todo:
91 while todo:
92 rev = todo.pop()
92 rev = todo.pop()
93 if rev in latesttags:
93 if rev in latesttags:
94 continue
94 continue
95 ctx = repo[rev]
95 ctx = repo[rev]
96 tags = [t for t in ctx.tags() if repo.tagtype(t) == 'global']
96 tags = [t for t in ctx.tags() if repo.tagtype(t) == 'global']
97 if tags:
97 if tags:
98 latesttags[rev] = ctx.date()[0], 0, ':'.join(sorted(tags))
98 latesttags[rev] = ctx.date()[0], 0, ':'.join(sorted(tags))
99 continue
99 continue
100 try:
100 try:
101 # The tuples are laid out so the right one can be found by
101 # The tuples are laid out so the right one can be found by
102 # comparison.
102 # comparison.
103 pdate, pdist, ptag = max(
103 pdate, pdist, ptag = max(
104 latesttags[p.rev()] for p in ctx.parents())
104 latesttags[p.rev()] for p in ctx.parents())
105 except KeyError:
105 except KeyError:
106 # Cache miss - recurse
106 # Cache miss - recurse
107 todo.append(rev)
107 todo.append(rev)
108 todo.extend(p.rev() for p in ctx.parents())
108 todo.extend(p.rev() for p in ctx.parents())
109 continue
109 continue
110 latesttags[rev] = pdate, pdist + 1, ptag
110 latesttags[rev] = pdate, pdist + 1, ptag
111 return latesttags[rev]
111 return latesttags[rev]
112
112
113 def getrenamedfn(repo, endrev=None):
113 def getrenamedfn(repo, endrev=None):
114 rcache = {}
114 rcache = {}
115 if endrev is None:
115 if endrev is None:
116 endrev = len(repo)
116 endrev = len(repo)
117
117
118 def getrenamed(fn, rev):
118 def getrenamed(fn, rev):
119 '''looks up all renames for a file (up to endrev) the first
119 '''looks up all renames for a file (up to endrev) the first
120 time the file is given. It indexes on the changerev and only
120 time the file is given. It indexes on the changerev and only
121 parses the manifest if linkrev != changerev.
121 parses the manifest if linkrev != changerev.
122 Returns rename info for fn at changerev rev.'''
122 Returns rename info for fn at changerev rev.'''
123 if fn not in rcache:
123 if fn not in rcache:
124 rcache[fn] = {}
124 rcache[fn] = {}
125 fl = repo.file(fn)
125 fl = repo.file(fn)
126 for i in fl:
126 for i in fl:
127 lr = fl.linkrev(i)
127 lr = fl.linkrev(i)
128 renamed = fl.renamed(fl.node(i))
128 renamed = fl.renamed(fl.node(i))
129 rcache[fn][lr] = renamed
129 rcache[fn][lr] = renamed
130 if lr >= endrev:
130 if lr >= endrev:
131 break
131 break
132 if rev in rcache[fn]:
132 if rev in rcache[fn]:
133 return rcache[fn][rev]
133 return rcache[fn][rev]
134
134
135 # If linkrev != rev (i.e. rev not found in rcache) fallback to
135 # If linkrev != rev (i.e. rev not found in rcache) fallback to
136 # filectx logic.
136 # filectx logic.
137 try:
137 try:
138 return repo[rev][fn].renamed()
138 return repo[rev][fn].renamed()
139 except error.LookupError:
139 except error.LookupError:
140 return None
140 return None
141
141
142 return getrenamed
142 return getrenamed
143
143
144
144
145 def showauthor(repo, ctx, templ, **args):
145 def showauthor(repo, ctx, templ, **args):
146 return ctx.user()
146 return ctx.user()
147
147
148 def showbranches(**args):
148 def showbranches(**args):
149 branch = args['ctx'].branch()
149 branch = args['ctx'].branch()
150 if branch != 'default':
150 if branch != 'default':
151 branch = encoding.tolocal(branch)
151 branch = encoding.tolocal(branch)
152 return showlist('branch', [branch], plural='branches', **args)
152 return showlist('branch', [branch], plural='branches', **args)
153
153
154 def showdate(repo, ctx, templ, **args):
154 def showdate(repo, ctx, templ, **args):
155 return ctx.date()
155 return ctx.date()
156
156
157 def showdescription(repo, ctx, templ, **args):
157 def showdescription(repo, ctx, templ, **args):
158 return ctx.description().strip()
158 return ctx.description().strip()
159
159
160 def showdiffstat(repo, ctx, templ, **args):
160 def showdiffstat(repo, ctx, templ, **args):
161 diff = patch.diff(repo, ctx.parents()[0].node(), ctx.node())
161 diff = patch.diff(repo, ctx.parents()[0].node(), ctx.node())
162 files, adds, removes = 0, 0, 0
162 files, adds, removes = 0, 0, 0
163 for i in patch.diffstatdata(util.iterlines(diff)):
163 for i in patch.diffstatdata(util.iterlines(diff)):
164 files += 1
164 files += 1
165 adds += i[1]
165 adds += i[1]
166 removes += i[2]
166 removes += i[2]
167 return '%s: +%s/-%s' % (files, adds, removes)
167 return '%s: +%s/-%s' % (files, adds, removes)
168
168
169 def showextras(**args):
169 def showextras(**args):
170 templ = args['templ']
170 templ = args['templ']
171 for key, value in sorted(args['ctx'].extra().items()):
171 for key, value in sorted(args['ctx'].extra().items()):
172 args = args.copy()
172 args = args.copy()
173 args.update(dict(key=key, value=value))
173 args.update(dict(key=key, value=value))
174 yield templ('extra', **args)
174 yield templ('extra', **args)
175
175
176 def showfileadds(**args):
176 def showfileadds(**args):
177 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
177 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
178 return showlist('file_add', getfiles(repo, ctx, revcache)[1], **args)
178 return showlist('file_add', getfiles(repo, ctx, revcache)[1], **args)
179
179
180 def showfilecopies(**args):
180 def showfilecopies(**args):
181 cache, ctx= args['cache'], args['ctx']
181 cache, ctx = args['cache'], args['ctx']
182 copies = args['revcache'].get('copies')
182 copies = args['revcache'].get('copies')
183 if copies is None:
183 if copies is None:
184 if 'getrenamed' not in cache:
184 if 'getrenamed' not in cache:
185 cache['getrenamed'] = getrenamedfn(args['repo'])
185 cache['getrenamed'] = getrenamedfn(args['repo'])
186 copies = []
186 copies = []
187 getrenamed = cache['getrenamed']
187 getrenamed = cache['getrenamed']
188 for fn in ctx.files():
188 for fn in ctx.files():
189 rename = getrenamed(fn, ctx.rev())
189 rename = getrenamed(fn, ctx.rev())
190 if rename:
190 if rename:
191 copies.append((fn, rename[0]))
191 copies.append((fn, rename[0]))
192
192
193 c = [{'name': x[0], 'source': x[1]} for x in copies]
193 c = [{'name': x[0], 'source': x[1]} for x in copies]
194 return showlist('file_copy', c, plural='file_copies', **args)
194 return showlist('file_copy', c, plural='file_copies', **args)
195
195
196 # showfilecopiesswitch() displays file copies only if copy records are
196 # showfilecopiesswitch() displays file copies only if copy records are
197 # provided before calling the templater, usually with a --copies
197 # provided before calling the templater, usually with a --copies
198 # command line switch.
198 # command line switch.
199 def showfilecopiesswitch(**args):
199 def showfilecopiesswitch(**args):
200 copies = args['revcache'].get('copies') or []
200 copies = args['revcache'].get('copies') or []
201 c = [{'name': x[0], 'source': x[1]} for x in copies]
201 c = [{'name': x[0], 'source': x[1]} for x in copies]
202 return showlist('file_copy', c, plural='file_copies', **args)
202 return showlist('file_copy', c, plural='file_copies', **args)
203
203
204 def showfiledels(**args):
204 def showfiledels(**args):
205 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
205 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
206 return showlist('file_del', getfiles(repo, ctx, revcache)[2], **args)
206 return showlist('file_del', getfiles(repo, ctx, revcache)[2], **args)
207
207
208 def showfilemods(**args):
208 def showfilemods(**args):
209 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
209 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
210 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], **args)
210 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], **args)
211
211
212 def showfiles(**args):
212 def showfiles(**args):
213 return showlist('file', args['ctx'].files(), **args)
213 return showlist('file', args['ctx'].files(), **args)
214
214
215 def showlatesttag(repo, ctx, templ, cache, **args):
215 def showlatesttag(repo, ctx, templ, cache, **args):
216 return getlatesttags(repo, ctx, cache)[2]
216 return getlatesttags(repo, ctx, cache)[2]
217
217
218 def showlatesttagdistance(repo, ctx, templ, cache, **args):
218 def showlatesttagdistance(repo, ctx, templ, cache, **args):
219 return getlatesttags(repo, ctx, cache)[1]
219 return getlatesttags(repo, ctx, cache)[1]
220
220
221 def showmanifest(**args):
221 def showmanifest(**args):
222 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
222 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
223 args = args.copy()
223 args = args.copy()
224 args.update(dict(rev=repo.manifest.rev(ctx.changeset()[0]),
224 args.update(dict(rev=repo.manifest.rev(ctx.changeset()[0]),
225 node=hex(ctx.changeset()[0])))
225 node=hex(ctx.changeset()[0])))
226 return templ('manifest', **args)
226 return templ('manifest', **args)
227
227
228 def shownode(repo, ctx, templ, **args):
228 def shownode(repo, ctx, templ, **args):
229 return ctx.hex()
229 return ctx.hex()
230
230
231 def showrev(repo, ctx, templ, **args):
231 def showrev(repo, ctx, templ, **args):
232 return ctx.rev()
232 return ctx.rev()
233
233
234 def showtags(**args):
234 def showtags(**args):
235 return showlist('tag', args['ctx'].tags(), **args)
235 return showlist('tag', args['ctx'].tags(), **args)
236
236
237 # keywords are callables like:
237 # keywords are callables like:
238 # fn(repo, ctx, templ, cache, revcache, **args)
238 # fn(repo, ctx, templ, cache, revcache, **args)
239 # with:
239 # with:
240 # repo - current repository instance
240 # repo - current repository instance
241 # ctx - the changectx being displayed
241 # ctx - the changectx being displayed
242 # templ - the templater instance
242 # templ - the templater instance
243 # cache - a cache dictionary for the whole templater run
243 # cache - a cache dictionary for the whole templater run
244 # revcache - a cache dictionary for the current revision
244 # revcache - a cache dictionary for the current revision
245 keywords = {
245 keywords = {
246 'author': showauthor,
246 'author': showauthor,
247 'branches': showbranches,
247 'branches': showbranches,
248 'date': showdate,
248 'date': showdate,
249 'desc': showdescription,
249 'desc': showdescription,
250 'diffstat': showdiffstat,
250 'diffstat': showdiffstat,
251 'extras': showextras,
251 'extras': showextras,
252 'file_adds': showfileadds,
252 'file_adds': showfileadds,
253 'file_copies': showfilecopies,
253 'file_copies': showfilecopies,
254 'file_copies_switch': showfilecopiesswitch,
254 'file_copies_switch': showfilecopiesswitch,
255 'file_dels': showfiledels,
255 'file_dels': showfiledels,
256 'file_mods': showfilemods,
256 'file_mods': showfilemods,
257 'files': showfiles,
257 'files': showfiles,
258 'latesttag': showlatesttag,
258 'latesttag': showlatesttag,
259 'latesttagdistance': showlatesttagdistance,
259 'latesttagdistance': showlatesttagdistance,
260 'manifest': showmanifest,
260 'manifest': showmanifest,
261 'node': shownode,
261 'node': shownode,
262 'rev': showrev,
262 'rev': showrev,
263 'tags': showtags,
263 'tags': showtags,
264 }
264 }
265
265
@@ -1,366 +1,365 b''
1 # windows.py - Windows utility function implementations for Mercurial
1 # windows.py - Windows utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, error
9 import osutil, error
10 import errno, msvcrt, os, re, sys, random, subprocess
10 import errno, msvcrt, os, re, sys, random, subprocess
11
11
12 nulldev = 'NUL:'
12 nulldev = 'NUL:'
13 umask = 002
13 umask = 002
14
14
15 # wrap osutil.posixfile to provide friendlier exceptions
15 # wrap osutil.posixfile to provide friendlier exceptions
16 def posixfile(name, mode='r', buffering=-1):
16 def posixfile(name, mode='r', buffering=-1):
17 try:
17 try:
18 return osutil.posixfile(name, mode, buffering)
18 return osutil.posixfile(name, mode, buffering)
19 except WindowsError, err:
19 except WindowsError, err:
20 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
20 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
21 posixfile.__doc__ = osutil.posixfile.__doc__
21 posixfile.__doc__ = osutil.posixfile.__doc__
22
22
23 class winstdout(object):
23 class winstdout(object):
24 '''stdout on windows misbehaves if sent through a pipe'''
24 '''stdout on windows misbehaves if sent through a pipe'''
25
25
26 def __init__(self, fp):
26 def __init__(self, fp):
27 self.fp = fp
27 self.fp = fp
28
28
29 def __getattr__(self, key):
29 def __getattr__(self, key):
30 return getattr(self.fp, key)
30 return getattr(self.fp, key)
31
31
32 def close(self):
32 def close(self):
33 try:
33 try:
34 self.fp.close()
34 self.fp.close()
35 except: pass
35 except: pass
36
36
37 def write(self, s):
37 def write(self, s):
38 try:
38 try:
39 # This is workaround for "Not enough space" error on
39 # This is workaround for "Not enough space" error on
40 # writing large size of data to console.
40 # writing large size of data to console.
41 limit = 16000
41 limit = 16000
42 l = len(s)
42 l = len(s)
43 start = 0
43 start = 0
44 self.softspace = 0;
44 self.softspace = 0
45 while start < l:
45 while start < l:
46 end = start + limit
46 end = start + limit
47 self.fp.write(s[start:end])
47 self.fp.write(s[start:end])
48 start = end
48 start = end
49 except IOError, inst:
49 except IOError, inst:
50 if inst.errno != 0:
50 if inst.errno != 0:
51 raise
51 raise
52 self.close()
52 self.close()
53 raise IOError(errno.EPIPE, 'Broken pipe')
53 raise IOError(errno.EPIPE, 'Broken pipe')
54
54
55 def flush(self):
55 def flush(self):
56 try:
56 try:
57 return self.fp.flush()
57 return self.fp.flush()
58 except IOError, inst:
58 except IOError, inst:
59 if inst.errno != errno.EINVAL:
59 if inst.errno != errno.EINVAL:
60 raise
60 raise
61 self.close()
61 self.close()
62 raise IOError(errno.EPIPE, 'Broken pipe')
62 raise IOError(errno.EPIPE, 'Broken pipe')
63
63
64 sys.stdout = winstdout(sys.stdout)
64 sys.stdout = winstdout(sys.stdout)
65
65
66 def _is_win_9x():
66 def _is_win_9x():
67 '''return true if run on windows 95, 98 or me.'''
67 '''return true if run on windows 95, 98 or me.'''
68 try:
68 try:
69 return sys.getwindowsversion()[3] == 1
69 return sys.getwindowsversion()[3] == 1
70 except AttributeError:
70 except AttributeError:
71 return 'command' in os.environ.get('comspec', '')
71 return 'command' in os.environ.get('comspec', '')
72
72
73 def openhardlinks():
73 def openhardlinks():
74 return not _is_win_9x() and "win32api" in globals()
74 return not _is_win_9x() and "win32api" in globals()
75
75
76 def system_rcpath():
76 def system_rcpath():
77 try:
77 try:
78 return system_rcpath_win32()
78 return system_rcpath_win32()
79 except:
79 except:
80 return [r'c:\mercurial\mercurial.ini']
80 return [r'c:\mercurial\mercurial.ini']
81
81
82 def user_rcpath():
82 def user_rcpath():
83 '''return os-specific hgrc search path to the user dir'''
83 '''return os-specific hgrc search path to the user dir'''
84 try:
84 try:
85 path = user_rcpath_win32()
85 path = user_rcpath_win32()
86 except:
86 except:
87 home = os.path.expanduser('~')
87 home = os.path.expanduser('~')
88 path = [os.path.join(home, 'mercurial.ini'),
88 path = [os.path.join(home, 'mercurial.ini'),
89 os.path.join(home, '.hgrc')]
89 os.path.join(home, '.hgrc')]
90 userprofile = os.environ.get('USERPROFILE')
90 userprofile = os.environ.get('USERPROFILE')
91 if userprofile:
91 if userprofile:
92 path.append(os.path.join(userprofile, 'mercurial.ini'))
92 path.append(os.path.join(userprofile, 'mercurial.ini'))
93 path.append(os.path.join(userprofile, '.hgrc'))
93 path.append(os.path.join(userprofile, '.hgrc'))
94 return path
94 return path
95
95
96 def parse_patch_output(output_line):
96 def parse_patch_output(output_line):
97 """parses the output produced by patch and returns the filename"""
97 """parses the output produced by patch and returns the filename"""
98 pf = output_line[14:]
98 pf = output_line[14:]
99 if pf[0] == '`':
99 if pf[0] == '`':
100 pf = pf[1:-1] # Remove the quotes
100 pf = pf[1:-1] # Remove the quotes
101 return pf
101 return pf
102
102
103 def sshargs(sshcmd, host, user, port):
103 def sshargs(sshcmd, host, user, port):
104 '''Build argument list for ssh or Plink'''
104 '''Build argument list for ssh or Plink'''
105 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
105 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
106 args = user and ("%s@%s" % (user, host)) or host
106 args = user and ("%s@%s" % (user, host)) or host
107 return port and ("%s %s %s" % (args, pflag, port)) or args
107 return port and ("%s %s %s" % (args, pflag, port)) or args
108
108
109 def testpid(pid):
109 def testpid(pid):
110 '''return False if pid dead, True if running or not known'''
110 '''return False if pid dead, True if running or not known'''
111 return True
111 return True
112
112
113 def set_flags(f, l, x):
113 def set_flags(f, l, x):
114 pass
114 pass
115
115
116 def set_binary(fd):
116 def set_binary(fd):
117 # When run without console, pipes may expose invalid
117 # When run without console, pipes may expose invalid
118 # fileno(), usually set to -1.
118 # fileno(), usually set to -1.
119 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
119 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
120 msvcrt.setmode(fd.fileno(), os.O_BINARY)
120 msvcrt.setmode(fd.fileno(), os.O_BINARY)
121
121
122 def pconvert(path):
122 def pconvert(path):
123 return '/'.join(path.split(os.sep))
123 return '/'.join(path.split(os.sep))
124
124
125 def localpath(path):
125 def localpath(path):
126 return path.replace('/', '\\')
126 return path.replace('/', '\\')
127
127
128 def normpath(path):
128 def normpath(path):
129 return pconvert(os.path.normpath(path))
129 return pconvert(os.path.normpath(path))
130
130
131 def realpath(path):
131 def realpath(path):
132 '''
132 '''
133 Returns the true, canonical file system path equivalent to the given
133 Returns the true, canonical file system path equivalent to the given
134 path.
134 path.
135 '''
135 '''
136 # TODO: There may be a more clever way to do this that also handles other,
136 # TODO: There may be a more clever way to do this that also handles other,
137 # less common file systems.
137 # less common file systems.
138 return os.path.normpath(os.path.normcase(os.path.realpath(path)))
138 return os.path.normpath(os.path.normcase(os.path.realpath(path)))
139
139
140 def samestat(s1, s2):
140 def samestat(s1, s2):
141 return False
141 return False
142
142
143 # A sequence of backslashes is special iff it precedes a double quote:
143 # A sequence of backslashes is special iff it precedes a double quote:
144 # - if there's an even number of backslashes, the double quote is not
144 # - if there's an even number of backslashes, the double quote is not
145 # quoted (i.e. it ends the quoted region)
145 # quoted (i.e. it ends the quoted region)
146 # - if there's an odd number of backslashes, the double quote is quoted
146 # - if there's an odd number of backslashes, the double quote is quoted
147 # - in both cases, every pair of backslashes is unquoted into a single
147 # - in both cases, every pair of backslashes is unquoted into a single
148 # backslash
148 # backslash
149 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
149 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
150 # So, to quote a string, we must surround it in double quotes, double
150 # So, to quote a string, we must surround it in double quotes, double
151 # the number of backslashes that preceed double quotes and add another
151 # the number of backslashes that preceed double quotes and add another
152 # backslash before every double quote (being careful with the double
152 # backslash before every double quote (being careful with the double
153 # quote we've appended to the end)
153 # quote we've appended to the end)
154 _quotere = None
154 _quotere = None
155 def shellquote(s):
155 def shellquote(s):
156 global _quotere
156 global _quotere
157 if _quotere is None:
157 if _quotere is None:
158 _quotere = re.compile(r'(\\*)("|\\$)')
158 _quotere = re.compile(r'(\\*)("|\\$)')
159 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
159 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
160
160
161 def quotecommand(cmd):
161 def quotecommand(cmd):
162 """Build a command string suitable for os.popen* calls."""
162 """Build a command string suitable for os.popen* calls."""
163 # The extra quotes are needed because popen* runs the command
163 # The extra quotes are needed because popen* runs the command
164 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
164 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
165 return '"' + cmd + '"'
165 return '"' + cmd + '"'
166
166
167 def popen(command, mode='r'):
167 def popen(command, mode='r'):
168 # Work around "popen spawned process may not write to stdout
168 # Work around "popen spawned process may not write to stdout
169 # under windows"
169 # under windows"
170 # http://bugs.python.org/issue1366
170 # http://bugs.python.org/issue1366
171 command += " 2> %s" % nulldev
171 command += " 2> %s" % nulldev
172 return os.popen(quotecommand(command), mode)
172 return os.popen(quotecommand(command), mode)
173
173
174 def explain_exit(code):
174 def explain_exit(code):
175 return _("exited with status %d") % code, code
175 return _("exited with status %d") % code, code
176
176
177 # if you change this stub into a real check, please try to implement the
177 # if you change this stub into a real check, please try to implement the
178 # username and groupname functions above, too.
178 # username and groupname functions above, too.
179 def isowner(st):
179 def isowner(st):
180 return True
180 return True
181
181
182 def find_exe(command):
182 def find_exe(command):
183 '''Find executable for command searching like cmd.exe does.
183 '''Find executable for command searching like cmd.exe does.
184 If command is a basename then PATH is searched for command.
184 If command is a basename then PATH is searched for command.
185 PATH isn't searched if command is an absolute or relative path.
185 PATH isn't searched if command is an absolute or relative path.
186 An extension from PATHEXT is found and added if not present.
186 An extension from PATHEXT is found and added if not present.
187 If command isn't found None is returned.'''
187 If command isn't found None is returned.'''
188 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
188 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
189 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
189 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
190 if os.path.splitext(command)[1].lower() in pathexts:
190 if os.path.splitext(command)[1].lower() in pathexts:
191 pathexts = ['']
191 pathexts = ['']
192
192
193 def findexisting(pathcommand):
193 def findexisting(pathcommand):
194 'Will append extension (if needed) and return existing file'
194 'Will append extension (if needed) and return existing file'
195 for ext in pathexts:
195 for ext in pathexts:
196 executable = pathcommand + ext
196 executable = pathcommand + ext
197 if os.path.exists(executable):
197 if os.path.exists(executable):
198 return executable
198 return executable
199 return None
199 return None
200
200
201 if os.sep in command:
201 if os.sep in command:
202 return findexisting(command)
202 return findexisting(command)
203
203
204 for path in os.environ.get('PATH', '').split(os.pathsep):
204 for path in os.environ.get('PATH', '').split(os.pathsep):
205 executable = findexisting(os.path.join(path, command))
205 executable = findexisting(os.path.join(path, command))
206 if executable is not None:
206 if executable is not None:
207 return executable
207 return executable
208 return findexisting(os.path.expanduser(os.path.expandvars(command)))
208 return findexisting(os.path.expanduser(os.path.expandvars(command)))
209
209
210 def set_signal_handler():
210 def set_signal_handler():
211 try:
211 try:
212 set_signal_handler_win32()
212 set_signal_handler_win32()
213 except NameError:
213 except NameError:
214 pass
214 pass
215
215
216 def statfiles(files):
216 def statfiles(files):
217 '''Stat each file in files and yield stat or None if file does not exist.
217 '''Stat each file in files and yield stat or None if file does not exist.
218 Cluster and cache stat per directory to minimize number of OS stat calls.'''
218 Cluster and cache stat per directory to minimize number of OS stat calls.'''
219 ncase = os.path.normcase
219 ncase = os.path.normcase
220 sep = os.sep
221 dircache = {} # dirname -> filename -> status | None if file does not exist
220 dircache = {} # dirname -> filename -> status | None if file does not exist
222 for nf in files:
221 for nf in files:
223 nf = ncase(nf)
222 nf = ncase(nf)
224 dir, base = os.path.split(nf)
223 dir, base = os.path.split(nf)
225 if not dir:
224 if not dir:
226 dir = '.'
225 dir = '.'
227 cache = dircache.get(dir, None)
226 cache = dircache.get(dir, None)
228 if cache is None:
227 if cache is None:
229 try:
228 try:
230 dmap = dict([(ncase(n), s)
229 dmap = dict([(ncase(n), s)
231 for n, k, s in osutil.listdir(dir, True)])
230 for n, k, s in osutil.listdir(dir, True)])
232 except OSError, err:
231 except OSError, err:
233 # handle directory not found in Python version prior to 2.5
232 # handle directory not found in Python version prior to 2.5
234 # Python <= 2.4 returns native Windows code 3 in errno
233 # Python <= 2.4 returns native Windows code 3 in errno
235 # Python >= 2.5 returns ENOENT and adds winerror field
234 # Python >= 2.5 returns ENOENT and adds winerror field
236 # EINVAL is raised if dir is not a directory.
235 # EINVAL is raised if dir is not a directory.
237 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
236 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
238 errno.ENOTDIR):
237 errno.ENOTDIR):
239 raise
238 raise
240 dmap = {}
239 dmap = {}
241 cache = dircache.setdefault(dir, dmap)
240 cache = dircache.setdefault(dir, dmap)
242 yield cache.get(base, None)
241 yield cache.get(base, None)
243
242
244 def getuser():
243 def getuser():
245 '''return name of current user'''
244 '''return name of current user'''
246 raise error.Abort(_('user name not available - set USERNAME '
245 raise error.Abort(_('user name not available - set USERNAME '
247 'environment variable'))
246 'environment variable'))
248
247
249 def username(uid=None):
248 def username(uid=None):
250 """Return the name of the user with the given uid.
249 """Return the name of the user with the given uid.
251
250
252 If uid is None, return the name of the current user."""
251 If uid is None, return the name of the current user."""
253 return None
252 return None
254
253
255 def groupname(gid=None):
254 def groupname(gid=None):
256 """Return the name of the group with the given gid.
255 """Return the name of the group with the given gid.
257
256
258 If gid is None, return the name of the current group."""
257 If gid is None, return the name of the current group."""
259 return None
258 return None
260
259
261 def _removedirs(name):
260 def _removedirs(name):
262 """special version of os.removedirs that does not remove symlinked
261 """special version of os.removedirs that does not remove symlinked
263 directories or junction points if they actually contain files"""
262 directories or junction points if they actually contain files"""
264 if osutil.listdir(name):
263 if osutil.listdir(name):
265 return
264 return
266 os.rmdir(name)
265 os.rmdir(name)
267 head, tail = os.path.split(name)
266 head, tail = os.path.split(name)
268 if not tail:
267 if not tail:
269 head, tail = os.path.split(head)
268 head, tail = os.path.split(head)
270 while head and tail:
269 while head and tail:
271 try:
270 try:
272 if osutil.listdir(head):
271 if osutil.listdir(head):
273 return
272 return
274 os.rmdir(head)
273 os.rmdir(head)
275 except:
274 except:
276 break
275 break
277 head, tail = os.path.split(head)
276 head, tail = os.path.split(head)
278
277
279 def unlink(f):
278 def unlink(f):
280 """unlink and remove the directory if it is empty"""
279 """unlink and remove the directory if it is empty"""
281 os.unlink(f)
280 os.unlink(f)
282 # try removing directories that might now be empty
281 # try removing directories that might now be empty
283 try:
282 try:
284 _removedirs(os.path.dirname(f))
283 _removedirs(os.path.dirname(f))
285 except OSError:
284 except OSError:
286 pass
285 pass
287
286
288 def rename(src, dst):
287 def rename(src, dst):
289 '''atomically rename file src to dst, replacing dst if it exists'''
288 '''atomically rename file src to dst, replacing dst if it exists'''
290 try:
289 try:
291 os.rename(src, dst)
290 os.rename(src, dst)
292 except OSError, err: # FIXME: check err (EEXIST ?)
291 except OSError, err: # FIXME: check err (EEXIST ?)
293
292
294 # On windows, rename to existing file is not allowed, so we
293 # On windows, rename to existing file is not allowed, so we
295 # must delete destination first. But if a file is open, unlink
294 # must delete destination first. But if a file is open, unlink
296 # schedules it for delete but does not delete it. Rename
295 # schedules it for delete but does not delete it. Rename
297 # happens immediately even for open files, so we rename
296 # happens immediately even for open files, so we rename
298 # destination to a temporary name, then delete that. Then
297 # destination to a temporary name, then delete that. Then
299 # rename is safe to do.
298 # rename is safe to do.
300 # The temporary name is chosen at random to avoid the situation
299 # The temporary name is chosen at random to avoid the situation
301 # where a file is left lying around from a previous aborted run.
300 # where a file is left lying around from a previous aborted run.
302 # The usual race condition this introduces can't be avoided as
301 # The usual race condition this introduces can't be avoided as
303 # we need the name to rename into, and not the file itself. Due
302 # we need the name to rename into, and not the file itself. Due
304 # to the nature of the operation however, any races will at worst
303 # to the nature of the operation however, any races will at worst
305 # lead to the rename failing and the current operation aborting.
304 # lead to the rename failing and the current operation aborting.
306
305
307 def tempname(prefix):
306 def tempname(prefix):
308 for tries in xrange(10):
307 for tries in xrange(10):
309 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
308 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
310 if not os.path.exists(temp):
309 if not os.path.exists(temp):
311 return temp
310 return temp
312 raise IOError, (errno.EEXIST, "No usable temporary filename found")
311 raise IOError, (errno.EEXIST, "No usable temporary filename found")
313
312
314 temp = tempname(dst)
313 temp = tempname(dst)
315 os.rename(dst, temp)
314 os.rename(dst, temp)
316 try:
315 try:
317 os.unlink(temp)
316 os.unlink(temp)
318 except:
317 except:
319 # Some rude AV-scanners on Windows may cause the unlink to
318 # Some rude AV-scanners on Windows may cause the unlink to
320 # fail. Not aborting here just leaks the temp file, whereas
319 # fail. Not aborting here just leaks the temp file, whereas
321 # aborting at this point may leave serious inconsistencies.
320 # aborting at this point may leave serious inconsistencies.
322 # Ideally, we would notify the user here.
321 # Ideally, we would notify the user here.
323 pass
322 pass
324 os.rename(src, dst)
323 os.rename(src, dst)
325
324
326 def spawndetached(args):
325 def spawndetached(args):
327 # No standard library function really spawns a fully detached
326 # No standard library function really spawns a fully detached
328 # process under win32 because they allocate pipes or other objects
327 # process under win32 because they allocate pipes or other objects
329 # to handle standard streams communications. Passing these objects
328 # to handle standard streams communications. Passing these objects
330 # to the child process requires handle inheritance to be enabled
329 # to the child process requires handle inheritance to be enabled
331 # which makes really detached processes impossible.
330 # which makes really detached processes impossible.
332 class STARTUPINFO:
331 class STARTUPINFO:
333 dwFlags = subprocess.STARTF_USESHOWWINDOW
332 dwFlags = subprocess.STARTF_USESHOWWINDOW
334 hStdInput = None
333 hStdInput = None
335 hStdOutput = None
334 hStdOutput = None
336 hStdError = None
335 hStdError = None
337 wShowWindow = subprocess.SW_HIDE
336 wShowWindow = subprocess.SW_HIDE
338
337
339 args = subprocess.list2cmdline(args)
338 args = subprocess.list2cmdline(args)
340 # Not running the command in shell mode makes python26 hang when
339 # Not running the command in shell mode makes python26 hang when
341 # writing to hgweb output socket.
340 # writing to hgweb output socket.
342 comspec = os.environ.get("COMSPEC", "cmd.exe")
341 comspec = os.environ.get("COMSPEC", "cmd.exe")
343 args = comspec + " /c " + args
342 args = comspec + " /c " + args
344 hp, ht, pid, tid = subprocess.CreateProcess(
343 hp, ht, pid, tid = subprocess.CreateProcess(
345 None, args,
344 None, args,
346 # no special security
345 # no special security
347 None, None,
346 None, None,
348 # Do not inherit handles
347 # Do not inherit handles
349 0,
348 0,
350 # DETACHED_PROCESS
349 # DETACHED_PROCESS
351 0x00000008,
350 0x00000008,
352 os.environ,
351 os.environ,
353 os.getcwd(),
352 os.getcwd(),
354 STARTUPINFO())
353 STARTUPINFO())
355 return pid
354 return pid
356
355
357 def gethgcmd():
356 def gethgcmd():
358 return [sys.executable] + sys.argv[:1]
357 return [sys.executable] + sys.argv[:1]
359
358
360 try:
359 try:
361 # override functions with win32 versions if possible
360 # override functions with win32 versions if possible
362 from win32 import *
361 from win32 import *
363 except ImportError:
362 except ImportError:
364 pass
363 pass
365
364
366 expandglobs = True
365 expandglobs = True
General Comments 0
You need to be logged in to leave comments. Login now