##// END OF EJS Templates
i18n: mark strings for translation in Mercurial
Martin Geisler -
r6953:63b5f4c7 default
parent child Browse files
Show More
@@ -1,294 +1,294 b''
1 """
1 """
2 bundlerepo.py - repository class for viewing uncompressed bundles
2 bundlerepo.py - repository class for viewing uncompressed bundles
3
3
4 This provides a read-only repository interface to bundles as if
4 This provides a read-only repository interface to bundles as if
5 they were part of the actual repository.
5 they were part of the actual repository.
6
6
7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
7 Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import hex, nullid, short
13 from node import hex, nullid, short
14 from i18n import _
14 from i18n import _
15 import changegroup, util, os, struct, bz2, zlib, tempfile, shutil, mdiff
15 import changegroup, util, os, struct, bz2, zlib, tempfile, shutil, mdiff
16 import repo, localrepo, changelog, manifest, filelog, revlog
16 import repo, localrepo, changelog, manifest, filelog, revlog
17
17
18 class bundlerevlog(revlog.revlog):
18 class bundlerevlog(revlog.revlog):
19 def __init__(self, opener, indexfile, bundlefile,
19 def __init__(self, opener, indexfile, bundlefile,
20 linkmapper=None):
20 linkmapper=None):
21 # How it works:
21 # How it works:
22 # to retrieve a revision, we need to know the offset of
22 # to retrieve a revision, we need to know the offset of
23 # the revision in the bundlefile (an opened file).
23 # the revision in the bundlefile (an opened file).
24 #
24 #
25 # We store this offset in the index (start), to differentiate a
25 # We store this offset in the index (start), to differentiate a
26 # rev in the bundle and from a rev in the revlog, we check
26 # rev in the bundle and from a rev in the revlog, we check
27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
27 # len(index[r]). If the tuple is bigger than 7, it is a bundle
28 # (it is bigger since we store the node to which the delta is)
28 # (it is bigger since we store the node to which the delta is)
29 #
29 #
30 revlog.revlog.__init__(self, opener, indexfile)
30 revlog.revlog.__init__(self, opener, indexfile)
31 self.bundlefile = bundlefile
31 self.bundlefile = bundlefile
32 self.basemap = {}
32 self.basemap = {}
33 def chunkpositer():
33 def chunkpositer():
34 for chunk in changegroup.chunkiter(bundlefile):
34 for chunk in changegroup.chunkiter(bundlefile):
35 pos = bundlefile.tell()
35 pos = bundlefile.tell()
36 yield chunk, pos - len(chunk)
36 yield chunk, pos - len(chunk)
37 n = len(self)
37 n = len(self)
38 prev = None
38 prev = None
39 for chunk, start in chunkpositer():
39 for chunk, start in chunkpositer():
40 size = len(chunk)
40 size = len(chunk)
41 if size < 80:
41 if size < 80:
42 raise util.Abort("invalid changegroup")
42 raise util.Abort(_("invalid changegroup"))
43 start += 80
43 start += 80
44 size -= 80
44 size -= 80
45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
45 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
46 if node in self.nodemap:
46 if node in self.nodemap:
47 prev = node
47 prev = node
48 continue
48 continue
49 for p in (p1, p2):
49 for p in (p1, p2):
50 if not p in self.nodemap:
50 if not p in self.nodemap:
51 raise revlog.LookupError(p1, self.indexfile,
51 raise revlog.LookupError(p1, self.indexfile,
52 _("unknown parent"))
52 _("unknown parent"))
53 if linkmapper is None:
53 if linkmapper is None:
54 link = n
54 link = n
55 else:
55 else:
56 link = linkmapper(cs)
56 link = linkmapper(cs)
57
57
58 if not prev:
58 if not prev:
59 prev = p1
59 prev = p1
60 # start, size, full unc. size, base (unused), link, p1, p2, node
60 # start, size, full unc. size, base (unused), link, p1, p2, node
61 e = (revlog.offset_type(start, 0), size, -1, -1, link,
61 e = (revlog.offset_type(start, 0), size, -1, -1, link,
62 self.rev(p1), self.rev(p2), node)
62 self.rev(p1), self.rev(p2), node)
63 self.basemap[n] = prev
63 self.basemap[n] = prev
64 self.index.insert(-1, e)
64 self.index.insert(-1, e)
65 self.nodemap[node] = n
65 self.nodemap[node] = n
66 prev = node
66 prev = node
67 n += 1
67 n += 1
68
68
69 def bundle(self, rev):
69 def bundle(self, rev):
70 """is rev from the bundle"""
70 """is rev from the bundle"""
71 if rev < 0:
71 if rev < 0:
72 return False
72 return False
73 return rev in self.basemap
73 return rev in self.basemap
74 def bundlebase(self, rev): return self.basemap[rev]
74 def bundlebase(self, rev): return self.basemap[rev]
75 def chunk(self, rev, df=None, cachelen=4096):
75 def chunk(self, rev, df=None, cachelen=4096):
76 # Warning: in case of bundle, the diff is against bundlebase,
76 # Warning: in case of bundle, the diff is against bundlebase,
77 # not against rev - 1
77 # not against rev - 1
78 # XXX: could use some caching
78 # XXX: could use some caching
79 if not self.bundle(rev):
79 if not self.bundle(rev):
80 return revlog.revlog.chunk(self, rev, df)
80 return revlog.revlog.chunk(self, rev, df)
81 self.bundlefile.seek(self.start(rev))
81 self.bundlefile.seek(self.start(rev))
82 return self.bundlefile.read(self.length(rev))
82 return self.bundlefile.read(self.length(rev))
83
83
84 def revdiff(self, rev1, rev2):
84 def revdiff(self, rev1, rev2):
85 """return or calculate a delta between two revisions"""
85 """return or calculate a delta between two revisions"""
86 if self.bundle(rev1) and self.bundle(rev2):
86 if self.bundle(rev1) and self.bundle(rev2):
87 # hot path for bundle
87 # hot path for bundle
88 revb = self.rev(self.bundlebase(rev2))
88 revb = self.rev(self.bundlebase(rev2))
89 if revb == rev1:
89 if revb == rev1:
90 return self.chunk(rev2)
90 return self.chunk(rev2)
91 elif not self.bundle(rev1) and not self.bundle(rev2):
91 elif not self.bundle(rev1) and not self.bundle(rev2):
92 return revlog.revlog.revdiff(self, rev1, rev2)
92 return revlog.revlog.revdiff(self, rev1, rev2)
93
93
94 return mdiff.textdiff(self.revision(self.node(rev1)),
94 return mdiff.textdiff(self.revision(self.node(rev1)),
95 self.revision(self.node(rev2)))
95 self.revision(self.node(rev2)))
96
96
97 def revision(self, node):
97 def revision(self, node):
98 """return an uncompressed revision of a given"""
98 """return an uncompressed revision of a given"""
99 if node == nullid: return ""
99 if node == nullid: return ""
100
100
101 text = None
101 text = None
102 chain = []
102 chain = []
103 iter_node = node
103 iter_node = node
104 rev = self.rev(iter_node)
104 rev = self.rev(iter_node)
105 # reconstruct the revision if it is from a changegroup
105 # reconstruct the revision if it is from a changegroup
106 while self.bundle(rev):
106 while self.bundle(rev):
107 if self._cache and self._cache[0] == iter_node:
107 if self._cache and self._cache[0] == iter_node:
108 text = self._cache[2]
108 text = self._cache[2]
109 break
109 break
110 chain.append(rev)
110 chain.append(rev)
111 iter_node = self.bundlebase(rev)
111 iter_node = self.bundlebase(rev)
112 rev = self.rev(iter_node)
112 rev = self.rev(iter_node)
113 if text is None:
113 if text is None:
114 text = revlog.revlog.revision(self, iter_node)
114 text = revlog.revlog.revision(self, iter_node)
115
115
116 while chain:
116 while chain:
117 delta = self.chunk(chain.pop())
117 delta = self.chunk(chain.pop())
118 text = mdiff.patches(text, [delta])
118 text = mdiff.patches(text, [delta])
119
119
120 p1, p2 = self.parents(node)
120 p1, p2 = self.parents(node)
121 if node != revlog.hash(text, p1, p2):
121 if node != revlog.hash(text, p1, p2):
122 raise revlog.RevlogError(_("integrity check failed on %s:%d")
122 raise revlog.RevlogError(_("integrity check failed on %s:%d")
123 % (self.datafile, self.rev(node)))
123 % (self.datafile, self.rev(node)))
124
124
125 self._cache = (node, self.rev(node), text)
125 self._cache = (node, self.rev(node), text)
126 return text
126 return text
127
127
128 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
128 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
129 raise NotImplementedError
129 raise NotImplementedError
130 def addgroup(self, revs, linkmapper, transaction):
130 def addgroup(self, revs, linkmapper, transaction):
131 raise NotImplementedError
131 raise NotImplementedError
132 def strip(self, rev, minlink):
132 def strip(self, rev, minlink):
133 raise NotImplementedError
133 raise NotImplementedError
134 def checksize(self):
134 def checksize(self):
135 raise NotImplementedError
135 raise NotImplementedError
136
136
137 class bundlechangelog(bundlerevlog, changelog.changelog):
137 class bundlechangelog(bundlerevlog, changelog.changelog):
138 def __init__(self, opener, bundlefile):
138 def __init__(self, opener, bundlefile):
139 changelog.changelog.__init__(self, opener)
139 changelog.changelog.__init__(self, opener)
140 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
140 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile)
141
141
142 class bundlemanifest(bundlerevlog, manifest.manifest):
142 class bundlemanifest(bundlerevlog, manifest.manifest):
143 def __init__(self, opener, bundlefile, linkmapper):
143 def __init__(self, opener, bundlefile, linkmapper):
144 manifest.manifest.__init__(self, opener)
144 manifest.manifest.__init__(self, opener)
145 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
145 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
146 linkmapper)
146 linkmapper)
147
147
148 class bundlefilelog(bundlerevlog, filelog.filelog):
148 class bundlefilelog(bundlerevlog, filelog.filelog):
149 def __init__(self, opener, path, bundlefile, linkmapper):
149 def __init__(self, opener, path, bundlefile, linkmapper):
150 filelog.filelog.__init__(self, opener, path)
150 filelog.filelog.__init__(self, opener, path)
151 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
151 bundlerevlog.__init__(self, opener, self.indexfile, bundlefile,
152 linkmapper)
152 linkmapper)
153
153
154 class bundlerepository(localrepo.localrepository):
154 class bundlerepository(localrepo.localrepository):
155 def __init__(self, ui, path, bundlename):
155 def __init__(self, ui, path, bundlename):
156 self._tempparent = None
156 self._tempparent = None
157 try:
157 try:
158 localrepo.localrepository.__init__(self, ui, path)
158 localrepo.localrepository.__init__(self, ui, path)
159 except repo.RepoError:
159 except repo.RepoError:
160 self._tempparent = tempfile.mkdtemp()
160 self._tempparent = tempfile.mkdtemp()
161 tmprepo = localrepo.instance(ui,self._tempparent,1)
161 tmprepo = localrepo.instance(ui,self._tempparent,1)
162 localrepo.localrepository.__init__(self, ui, self._tempparent)
162 localrepo.localrepository.__init__(self, ui, self._tempparent)
163
163
164 if path:
164 if path:
165 self._url = 'bundle:' + path + '+' + bundlename
165 self._url = 'bundle:' + path + '+' + bundlename
166 else:
166 else:
167 self._url = 'bundle:' + bundlename
167 self._url = 'bundle:' + bundlename
168
168
169 self.tempfile = None
169 self.tempfile = None
170 self.bundlefile = open(bundlename, "rb")
170 self.bundlefile = open(bundlename, "rb")
171 header = self.bundlefile.read(6)
171 header = self.bundlefile.read(6)
172 if not header.startswith("HG"):
172 if not header.startswith("HG"):
173 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
173 raise util.Abort(_("%s: not a Mercurial bundle file") % bundlename)
174 elif not header.startswith("HG10"):
174 elif not header.startswith("HG10"):
175 raise util.Abort(_("%s: unknown bundle version") % bundlename)
175 raise util.Abort(_("%s: unknown bundle version") % bundlename)
176 elif (header == "HG10BZ") or (header == "HG10GZ"):
176 elif (header == "HG10BZ") or (header == "HG10GZ"):
177 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
177 fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
178 suffix=".hg10un", dir=self.path)
178 suffix=".hg10un", dir=self.path)
179 self.tempfile = temp
179 self.tempfile = temp
180 fptemp = os.fdopen(fdtemp, 'wb')
180 fptemp = os.fdopen(fdtemp, 'wb')
181 def generator(f):
181 def generator(f):
182 if header == "HG10BZ":
182 if header == "HG10BZ":
183 zd = bz2.BZ2Decompressor()
183 zd = bz2.BZ2Decompressor()
184 zd.decompress("BZ")
184 zd.decompress("BZ")
185 elif header == "HG10GZ":
185 elif header == "HG10GZ":
186 zd = zlib.decompressobj()
186 zd = zlib.decompressobj()
187 for chunk in f:
187 for chunk in f:
188 yield zd.decompress(chunk)
188 yield zd.decompress(chunk)
189 gen = generator(util.filechunkiter(self.bundlefile, 4096))
189 gen = generator(util.filechunkiter(self.bundlefile, 4096))
190
190
191 try:
191 try:
192 fptemp.write("HG10UN")
192 fptemp.write("HG10UN")
193 for chunk in gen:
193 for chunk in gen:
194 fptemp.write(chunk)
194 fptemp.write(chunk)
195 finally:
195 finally:
196 fptemp.close()
196 fptemp.close()
197 self.bundlefile.close()
197 self.bundlefile.close()
198
198
199 self.bundlefile = open(self.tempfile, "rb")
199 self.bundlefile = open(self.tempfile, "rb")
200 # seek right after the header
200 # seek right after the header
201 self.bundlefile.seek(6)
201 self.bundlefile.seek(6)
202 elif header == "HG10UN":
202 elif header == "HG10UN":
203 # nothing to do
203 # nothing to do
204 pass
204 pass
205 else:
205 else:
206 raise util.Abort(_("%s: unknown bundle compression type")
206 raise util.Abort(_("%s: unknown bundle compression type")
207 % bundlename)
207 % bundlename)
208 # dict with the mapping 'filename' -> position in the bundle
208 # dict with the mapping 'filename' -> position in the bundle
209 self.bundlefilespos = {}
209 self.bundlefilespos = {}
210
210
211 def __getattr__(self, name):
211 def __getattr__(self, name):
212 if name == 'changelog':
212 if name == 'changelog':
213 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
213 self.changelog = bundlechangelog(self.sopener, self.bundlefile)
214 self.manstart = self.bundlefile.tell()
214 self.manstart = self.bundlefile.tell()
215 return self.changelog
215 return self.changelog
216 if name == 'manifest':
216 if name == 'manifest':
217 self.bundlefile.seek(self.manstart)
217 self.bundlefile.seek(self.manstart)
218 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
218 self.manifest = bundlemanifest(self.sopener, self.bundlefile,
219 self.changelog.rev)
219 self.changelog.rev)
220 self.filestart = self.bundlefile.tell()
220 self.filestart = self.bundlefile.tell()
221 return self.manifest
221 return self.manifest
222 if name == 'manstart':
222 if name == 'manstart':
223 self.changelog
223 self.changelog
224 return self.manstart
224 return self.manstart
225 if name == 'filestart':
225 if name == 'filestart':
226 self.manifest
226 self.manifest
227 return self.filestart
227 return self.filestart
228 return localrepo.localrepository.__getattr__(self, name)
228 return localrepo.localrepository.__getattr__(self, name)
229
229
230 def url(self):
230 def url(self):
231 return self._url
231 return self._url
232
232
233 def file(self, f):
233 def file(self, f):
234 if not self.bundlefilespos:
234 if not self.bundlefilespos:
235 self.bundlefile.seek(self.filestart)
235 self.bundlefile.seek(self.filestart)
236 while 1:
236 while 1:
237 chunk = changegroup.getchunk(self.bundlefile)
237 chunk = changegroup.getchunk(self.bundlefile)
238 if not chunk:
238 if not chunk:
239 break
239 break
240 self.bundlefilespos[chunk] = self.bundlefile.tell()
240 self.bundlefilespos[chunk] = self.bundlefile.tell()
241 for c in changegroup.chunkiter(self.bundlefile):
241 for c in changegroup.chunkiter(self.bundlefile):
242 pass
242 pass
243
243
244 if f[0] == '/':
244 if f[0] == '/':
245 f = f[1:]
245 f = f[1:]
246 if f in self.bundlefilespos:
246 if f in self.bundlefilespos:
247 self.bundlefile.seek(self.bundlefilespos[f])
247 self.bundlefile.seek(self.bundlefilespos[f])
248 return bundlefilelog(self.sopener, f, self.bundlefile,
248 return bundlefilelog(self.sopener, f, self.bundlefile,
249 self.changelog.rev)
249 self.changelog.rev)
250 else:
250 else:
251 return filelog.filelog(self.sopener, f)
251 return filelog.filelog(self.sopener, f)
252
252
253 def close(self):
253 def close(self):
254 """Close assigned bundle file immediately."""
254 """Close assigned bundle file immediately."""
255 self.bundlefile.close()
255 self.bundlefile.close()
256
256
257 def __del__(self):
257 def __del__(self):
258 bundlefile = getattr(self, 'bundlefile', None)
258 bundlefile = getattr(self, 'bundlefile', None)
259 if bundlefile and not bundlefile.closed:
259 if bundlefile and not bundlefile.closed:
260 bundlefile.close()
260 bundlefile.close()
261 tempfile = getattr(self, 'tempfile', None)
261 tempfile = getattr(self, 'tempfile', None)
262 if tempfile is not None:
262 if tempfile is not None:
263 os.unlink(tempfile)
263 os.unlink(tempfile)
264 if self._tempparent:
264 if self._tempparent:
265 shutil.rmtree(self._tempparent, True)
265 shutil.rmtree(self._tempparent, True)
266
266
267 def cancopy(self):
267 def cancopy(self):
268 return False
268 return False
269
269
270 def instance(ui, path, create):
270 def instance(ui, path, create):
271 if create:
271 if create:
272 raise util.Abort(_('cannot create new bundle repository'))
272 raise util.Abort(_('cannot create new bundle repository'))
273 parentpath = ui.config("bundle", "mainreporoot", "")
273 parentpath = ui.config("bundle", "mainreporoot", "")
274 if parentpath:
274 if parentpath:
275 # Try to make the full path relative so we get a nice, short URL.
275 # Try to make the full path relative so we get a nice, short URL.
276 # In particular, we don't want temp dir names in test outputs.
276 # In particular, we don't want temp dir names in test outputs.
277 cwd = os.getcwd()
277 cwd = os.getcwd()
278 if parentpath == cwd:
278 if parentpath == cwd:
279 parentpath = ''
279 parentpath = ''
280 else:
280 else:
281 cwd = os.path.join(cwd,'')
281 cwd = os.path.join(cwd,'')
282 if parentpath.startswith(cwd):
282 if parentpath.startswith(cwd):
283 parentpath = parentpath[len(cwd):]
283 parentpath = parentpath[len(cwd):]
284 path = util.drop_scheme('file', path)
284 path = util.drop_scheme('file', path)
285 if path.startswith('bundle:'):
285 if path.startswith('bundle:'):
286 path = util.drop_scheme('bundle', path)
286 path = util.drop_scheme('bundle', path)
287 s = path.split("+", 1)
287 s = path.split("+", 1)
288 if len(s) == 1:
288 if len(s) == 1:
289 repopath, bundlename = parentpath, s[0]
289 repopath, bundlename = parentpath, s[0]
290 else:
290 else:
291 repopath, bundlename = s
291 repopath, bundlename = s
292 else:
292 else:
293 repopath, bundlename = parentpath, path
293 repopath, bundlename = parentpath, path
294 return bundlerepository(ui, repopath, bundlename)
294 return bundlerepository(ui, repopath, bundlename)
@@ -1,1183 +1,1183 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20
20
21 def findpossible(ui, cmd, table):
21 def findpossible(ui, cmd, table):
22 """
22 """
23 Return cmd -> (aliases, command table entry)
23 Return cmd -> (aliases, command table entry)
24 for each matching command.
24 for each matching command.
25 Return debug commands (or their aliases) only if no normal command matches.
25 Return debug commands (or their aliases) only if no normal command matches.
26 """
26 """
27 choice = {}
27 choice = {}
28 debugchoice = {}
28 debugchoice = {}
29 for e in table.keys():
29 for e in table.keys():
30 aliases = e.lstrip("^").split("|")
30 aliases = e.lstrip("^").split("|")
31 found = None
31 found = None
32 if cmd in aliases:
32 if cmd in aliases:
33 found = cmd
33 found = cmd
34 elif not ui.config("ui", "strict"):
34 elif not ui.config("ui", "strict"):
35 for a in aliases:
35 for a in aliases:
36 if a.startswith(cmd):
36 if a.startswith(cmd):
37 found = a
37 found = a
38 break
38 break
39 if found is not None:
39 if found is not None:
40 if aliases[0].startswith("debug") or found.startswith("debug"):
40 if aliases[0].startswith("debug") or found.startswith("debug"):
41 debugchoice[found] = (aliases, table[e])
41 debugchoice[found] = (aliases, table[e])
42 else:
42 else:
43 choice[found] = (aliases, table[e])
43 choice[found] = (aliases, table[e])
44
44
45 if not choice and debugchoice:
45 if not choice and debugchoice:
46 choice = debugchoice
46 choice = debugchoice
47
47
48 return choice
48 return choice
49
49
50 def findcmd(ui, cmd, table):
50 def findcmd(ui, cmd, table):
51 """Return (aliases, command table entry) for command string."""
51 """Return (aliases, command table entry) for command string."""
52 choice = findpossible(ui, cmd, table)
52 choice = findpossible(ui, cmd, table)
53
53
54 if cmd in choice:
54 if cmd in choice:
55 return choice[cmd]
55 return choice[cmd]
56
56
57 if len(choice) > 1:
57 if len(choice) > 1:
58 clist = choice.keys()
58 clist = choice.keys()
59 clist.sort()
59 clist.sort()
60 raise AmbiguousCommand(cmd, clist)
60 raise AmbiguousCommand(cmd, clist)
61
61
62 if choice:
62 if choice:
63 return choice.values()[0]
63 return choice.values()[0]
64
64
65 raise UnknownCommand(cmd)
65 raise UnknownCommand(cmd)
66
66
67 def bail_if_changed(repo):
67 def bail_if_changed(repo):
68 if repo.dirstate.parents()[1] != nullid:
68 if repo.dirstate.parents()[1] != nullid:
69 raise util.Abort(_('outstanding uncommitted merge'))
69 raise util.Abort(_('outstanding uncommitted merge'))
70 modified, added, removed, deleted = repo.status()[:4]
70 modified, added, removed, deleted = repo.status()[:4]
71 if modified or added or removed or deleted:
71 if modified or added or removed or deleted:
72 raise util.Abort(_("outstanding uncommitted changes"))
72 raise util.Abort(_("outstanding uncommitted changes"))
73
73
74 def logmessage(opts):
74 def logmessage(opts):
75 """ get the log message according to -m and -l option """
75 """ get the log message according to -m and -l option """
76 message = opts['message']
76 message = opts['message']
77 logfile = opts['logfile']
77 logfile = opts['logfile']
78
78
79 if message and logfile:
79 if message and logfile:
80 raise util.Abort(_('options --message and --logfile are mutually '
80 raise util.Abort(_('options --message and --logfile are mutually '
81 'exclusive'))
81 'exclusive'))
82 if not message and logfile:
82 if not message and logfile:
83 try:
83 try:
84 if logfile == '-':
84 if logfile == '-':
85 message = sys.stdin.read()
85 message = sys.stdin.read()
86 else:
86 else:
87 message = open(logfile).read()
87 message = open(logfile).read()
88 except IOError, inst:
88 except IOError, inst:
89 raise util.Abort(_("can't read commit message '%s': %s") %
89 raise util.Abort(_("can't read commit message '%s': %s") %
90 (logfile, inst.strerror))
90 (logfile, inst.strerror))
91 return message
91 return message
92
92
93 def loglimit(opts):
93 def loglimit(opts):
94 """get the log limit according to option -l/--limit"""
94 """get the log limit according to option -l/--limit"""
95 limit = opts.get('limit')
95 limit = opts.get('limit')
96 if limit:
96 if limit:
97 try:
97 try:
98 limit = int(limit)
98 limit = int(limit)
99 except ValueError:
99 except ValueError:
100 raise util.Abort(_('limit must be a positive integer'))
100 raise util.Abort(_('limit must be a positive integer'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 if limit <= 0: raise util.Abort(_('limit must be positive'))
102 else:
102 else:
103 limit = sys.maxint
103 limit = sys.maxint
104 return limit
104 return limit
105
105
106 def setremoteconfig(ui, opts):
106 def setremoteconfig(ui, opts):
107 "copy remote options to ui tree"
107 "copy remote options to ui tree"
108 if opts.get('ssh'):
108 if opts.get('ssh'):
109 ui.setconfig("ui", "ssh", opts['ssh'])
109 ui.setconfig("ui", "ssh", opts['ssh'])
110 if opts.get('remotecmd'):
110 if opts.get('remotecmd'):
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
112
112
113 def revpair(repo, revs):
113 def revpair(repo, revs):
114 '''return pair of nodes, given list of revisions. second item can
114 '''return pair of nodes, given list of revisions. second item can
115 be None, meaning use working dir.'''
115 be None, meaning use working dir.'''
116
116
117 def revfix(repo, val, defval):
117 def revfix(repo, val, defval):
118 if not val and val != 0 and defval is not None:
118 if not val and val != 0 and defval is not None:
119 val = defval
119 val = defval
120 return repo.lookup(val)
120 return repo.lookup(val)
121
121
122 if not revs:
122 if not revs:
123 return repo.dirstate.parents()[0], None
123 return repo.dirstate.parents()[0], None
124 end = None
124 end = None
125 if len(revs) == 1:
125 if len(revs) == 1:
126 if revrangesep in revs[0]:
126 if revrangesep in revs[0]:
127 start, end = revs[0].split(revrangesep, 1)
127 start, end = revs[0].split(revrangesep, 1)
128 start = revfix(repo, start, 0)
128 start = revfix(repo, start, 0)
129 end = revfix(repo, end, len(repo) - 1)
129 end = revfix(repo, end, len(repo) - 1)
130 else:
130 else:
131 start = revfix(repo, revs[0], None)
131 start = revfix(repo, revs[0], None)
132 elif len(revs) == 2:
132 elif len(revs) == 2:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
133 if revrangesep in revs[0] or revrangesep in revs[1]:
134 raise util.Abort(_('too many revisions specified'))
134 raise util.Abort(_('too many revisions specified'))
135 start = revfix(repo, revs[0], None)
135 start = revfix(repo, revs[0], None)
136 end = revfix(repo, revs[1], None)
136 end = revfix(repo, revs[1], None)
137 else:
137 else:
138 raise util.Abort(_('too many revisions specified'))
138 raise util.Abort(_('too many revisions specified'))
139 return start, end
139 return start, end
140
140
141 def revrange(repo, revs):
141 def revrange(repo, revs):
142 """Yield revision as strings from a list of revision specifications."""
142 """Yield revision as strings from a list of revision specifications."""
143
143
144 def revfix(repo, val, defval):
144 def revfix(repo, val, defval):
145 if not val and val != 0 and defval is not None:
145 if not val and val != 0 and defval is not None:
146 return defval
146 return defval
147 return repo.changelog.rev(repo.lookup(val))
147 return repo.changelog.rev(repo.lookup(val))
148
148
149 seen, l = {}, []
149 seen, l = {}, []
150 for spec in revs:
150 for spec in revs:
151 if revrangesep in spec:
151 if revrangesep in spec:
152 start, end = spec.split(revrangesep, 1)
152 start, end = spec.split(revrangesep, 1)
153 start = revfix(repo, start, 0)
153 start = revfix(repo, start, 0)
154 end = revfix(repo, end, len(repo) - 1)
154 end = revfix(repo, end, len(repo) - 1)
155 step = start > end and -1 or 1
155 step = start > end and -1 or 1
156 for rev in xrange(start, end+step, step):
156 for rev in xrange(start, end+step, step):
157 if rev in seen:
157 if rev in seen:
158 continue
158 continue
159 seen[rev] = 1
159 seen[rev] = 1
160 l.append(rev)
160 l.append(rev)
161 else:
161 else:
162 rev = revfix(repo, spec, None)
162 rev = revfix(repo, spec, None)
163 if rev in seen:
163 if rev in seen:
164 continue
164 continue
165 seen[rev] = 1
165 seen[rev] = 1
166 l.append(rev)
166 l.append(rev)
167
167
168 return l
168 return l
169
169
170 def make_filename(repo, pat, node,
170 def make_filename(repo, pat, node,
171 total=None, seqno=None, revwidth=None, pathname=None):
171 total=None, seqno=None, revwidth=None, pathname=None):
172 node_expander = {
172 node_expander = {
173 'H': lambda: hex(node),
173 'H': lambda: hex(node),
174 'R': lambda: str(repo.changelog.rev(node)),
174 'R': lambda: str(repo.changelog.rev(node)),
175 'h': lambda: short(node),
175 'h': lambda: short(node),
176 }
176 }
177 expander = {
177 expander = {
178 '%': lambda: '%',
178 '%': lambda: '%',
179 'b': lambda: os.path.basename(repo.root),
179 'b': lambda: os.path.basename(repo.root),
180 }
180 }
181
181
182 try:
182 try:
183 if node:
183 if node:
184 expander.update(node_expander)
184 expander.update(node_expander)
185 if node:
185 if node:
186 expander['r'] = (lambda:
186 expander['r'] = (lambda:
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 str(repo.changelog.rev(node)).zfill(revwidth or 0))
188 if total is not None:
188 if total is not None:
189 expander['N'] = lambda: str(total)
189 expander['N'] = lambda: str(total)
190 if seqno is not None:
190 if seqno is not None:
191 expander['n'] = lambda: str(seqno)
191 expander['n'] = lambda: str(seqno)
192 if total is not None and seqno is not None:
192 if total is not None and seqno is not None:
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
194 if pathname is not None:
194 if pathname is not None:
195 expander['s'] = lambda: os.path.basename(pathname)
195 expander['s'] = lambda: os.path.basename(pathname)
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 expander['d'] = lambda: os.path.dirname(pathname) or '.'
197 expander['p'] = lambda: pathname
197 expander['p'] = lambda: pathname
198
198
199 newname = []
199 newname = []
200 patlen = len(pat)
200 patlen = len(pat)
201 i = 0
201 i = 0
202 while i < patlen:
202 while i < patlen:
203 c = pat[i]
203 c = pat[i]
204 if c == '%':
204 if c == '%':
205 i += 1
205 i += 1
206 c = pat[i]
206 c = pat[i]
207 c = expander[c]()
207 c = expander[c]()
208 newname.append(c)
208 newname.append(c)
209 i += 1
209 i += 1
210 return ''.join(newname)
210 return ''.join(newname)
211 except KeyError, inst:
211 except KeyError, inst:
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
213 inst.args[0])
213 inst.args[0])
214
214
215 def make_file(repo, pat, node=None,
215 def make_file(repo, pat, node=None,
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
217 if not pat or pat == '-':
217 if not pat or pat == '-':
218 return 'w' in mode and sys.stdout or sys.stdin
218 return 'w' in mode and sys.stdout or sys.stdin
219 if hasattr(pat, 'write') and 'w' in mode:
219 if hasattr(pat, 'write') and 'w' in mode:
220 return pat
220 return pat
221 if hasattr(pat, 'read') and 'r' in mode:
221 if hasattr(pat, 'read') and 'r' in mode:
222 return pat
222 return pat
223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
223 return open(make_filename(repo, pat, node, total, seqno, revwidth,
224 pathname),
224 pathname),
225 mode)
225 mode)
226
226
227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
227 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
228 if not globbed and default == 'relpath':
228 if not globbed and default == 'relpath':
229 pats = util.expand_glob(pats or [])
229 pats = util.expand_glob(pats or [])
230 m = _match.match(repo.root, repo.getcwd(), pats,
230 m = _match.match(repo.root, repo.getcwd(), pats,
231 opts.get('include'), opts.get('exclude'), default)
231 opts.get('include'), opts.get('exclude'), default)
232 def badfn(f, msg):
232 def badfn(f, msg):
233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
233 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
234 return False
234 return False
235 m.bad = badfn
235 m.bad = badfn
236 return m
236 return m
237
237
238 def matchall(repo):
238 def matchall(repo):
239 return _match.always(repo.root, repo.getcwd())
239 return _match.always(repo.root, repo.getcwd())
240
240
241 def matchfiles(repo, files):
241 def matchfiles(repo, files):
242 return _match.exact(repo.root, repo.getcwd(), files)
242 return _match.exact(repo.root, repo.getcwd(), files)
243
243
244 def findrenames(repo, added=None, removed=None, threshold=0.5):
244 def findrenames(repo, added=None, removed=None, threshold=0.5):
245 '''find renamed files -- yields (before, after, score) tuples'''
245 '''find renamed files -- yields (before, after, score) tuples'''
246 if added is None or removed is None:
246 if added is None or removed is None:
247 added, removed = repo.status()[1:3]
247 added, removed = repo.status()[1:3]
248 ctx = repo['.']
248 ctx = repo['.']
249 for a in added:
249 for a in added:
250 aa = repo.wread(a)
250 aa = repo.wread(a)
251 bestname, bestscore = None, threshold
251 bestname, bestscore = None, threshold
252 for r in removed:
252 for r in removed:
253 rr = ctx.filectx(r).data()
253 rr = ctx.filectx(r).data()
254
254
255 # bdiff.blocks() returns blocks of matching lines
255 # bdiff.blocks() returns blocks of matching lines
256 # count the number of bytes in each
256 # count the number of bytes in each
257 equal = 0
257 equal = 0
258 alines = mdiff.splitnewlines(aa)
258 alines = mdiff.splitnewlines(aa)
259 matches = bdiff.blocks(aa, rr)
259 matches = bdiff.blocks(aa, rr)
260 for x1,x2,y1,y2 in matches:
260 for x1,x2,y1,y2 in matches:
261 for line in alines[x1:x2]:
261 for line in alines[x1:x2]:
262 equal += len(line)
262 equal += len(line)
263
263
264 lengths = len(aa) + len(rr)
264 lengths = len(aa) + len(rr)
265 if lengths:
265 if lengths:
266 myscore = equal*2.0 / lengths
266 myscore = equal*2.0 / lengths
267 if myscore >= bestscore:
267 if myscore >= bestscore:
268 bestname, bestscore = r, myscore
268 bestname, bestscore = r, myscore
269 if bestname:
269 if bestname:
270 yield bestname, a, bestscore
270 yield bestname, a, bestscore
271
271
272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
272 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
273 if dry_run is None:
273 if dry_run is None:
274 dry_run = opts.get('dry_run')
274 dry_run = opts.get('dry_run')
275 if similarity is None:
275 if similarity is None:
276 similarity = float(opts.get('similarity') or 0)
276 similarity = float(opts.get('similarity') or 0)
277 add, remove = [], []
277 add, remove = [], []
278 mapping = {}
278 mapping = {}
279 audit_path = util.path_auditor(repo.root)
279 audit_path = util.path_auditor(repo.root)
280 m = match(repo, pats, opts)
280 m = match(repo, pats, opts)
281 for abs in repo.walk(m):
281 for abs in repo.walk(m):
282 target = repo.wjoin(abs)
282 target = repo.wjoin(abs)
283 good = True
283 good = True
284 try:
284 try:
285 audit_path(abs)
285 audit_path(abs)
286 except:
286 except:
287 good = False
287 good = False
288 rel = m.rel(abs)
288 rel = m.rel(abs)
289 exact = m.exact(abs)
289 exact = m.exact(abs)
290 if good and abs not in repo.dirstate:
290 if good and abs not in repo.dirstate:
291 add.append(abs)
291 add.append(abs)
292 mapping[abs] = rel, m.exact(abs)
292 mapping[abs] = rel, m.exact(abs)
293 if repo.ui.verbose or not exact:
293 if repo.ui.verbose or not exact:
294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
294 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
295 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
296 or (os.path.isdir(target) and not os.path.islink(target))):
296 or (os.path.isdir(target) and not os.path.islink(target))):
297 remove.append(abs)
297 remove.append(abs)
298 mapping[abs] = rel, exact
298 mapping[abs] = rel, exact
299 if repo.ui.verbose or not exact:
299 if repo.ui.verbose or not exact:
300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
300 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
301 if not dry_run:
301 if not dry_run:
302 repo.remove(remove)
302 repo.remove(remove)
303 repo.add(add)
303 repo.add(add)
304 if similarity > 0:
304 if similarity > 0:
305 for old, new, score in findrenames(repo, add, remove, similarity):
305 for old, new, score in findrenames(repo, add, remove, similarity):
306 oldrel, oldexact = mapping[old]
306 oldrel, oldexact = mapping[old]
307 newrel, newexact = mapping[new]
307 newrel, newexact = mapping[new]
308 if repo.ui.verbose or not oldexact or not newexact:
308 if repo.ui.verbose or not oldexact or not newexact:
309 repo.ui.status(_('recording removal of %s as rename to %s '
309 repo.ui.status(_('recording removal of %s as rename to %s '
310 '(%d%% similar)\n') %
310 '(%d%% similar)\n') %
311 (oldrel, newrel, score * 100))
311 (oldrel, newrel, score * 100))
312 if not dry_run:
312 if not dry_run:
313 repo.copy(old, new)
313 repo.copy(old, new)
314
314
315 def copy(ui, repo, pats, opts, rename=False):
315 def copy(ui, repo, pats, opts, rename=False):
316 # called with the repo lock held
316 # called with the repo lock held
317 #
317 #
318 # hgsep => pathname that uses "/" to separate directories
318 # hgsep => pathname that uses "/" to separate directories
319 # ossep => pathname that uses os.sep to separate directories
319 # ossep => pathname that uses os.sep to separate directories
320 cwd = repo.getcwd()
320 cwd = repo.getcwd()
321 targets = {}
321 targets = {}
322 after = opts.get("after")
322 after = opts.get("after")
323 dryrun = opts.get("dry_run")
323 dryrun = opts.get("dry_run")
324
324
325 def walkpat(pat):
325 def walkpat(pat):
326 srcs = []
326 srcs = []
327 m = match(repo, [pat], opts, globbed=True)
327 m = match(repo, [pat], opts, globbed=True)
328 for abs in repo.walk(m):
328 for abs in repo.walk(m):
329 state = repo.dirstate[abs]
329 state = repo.dirstate[abs]
330 rel = m.rel(abs)
330 rel = m.rel(abs)
331 exact = m.exact(abs)
331 exact = m.exact(abs)
332 if state in '?r':
332 if state in '?r':
333 if exact and state == '?':
333 if exact and state == '?':
334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
334 ui.warn(_('%s: not copying - file is not managed\n') % rel)
335 if exact and state == 'r':
335 if exact and state == 'r':
336 ui.warn(_('%s: not copying - file has been marked for'
336 ui.warn(_('%s: not copying - file has been marked for'
337 ' remove\n') % rel)
337 ' remove\n') % rel)
338 continue
338 continue
339 # abs: hgsep
339 # abs: hgsep
340 # rel: ossep
340 # rel: ossep
341 srcs.append((abs, rel, exact))
341 srcs.append((abs, rel, exact))
342 return srcs
342 return srcs
343
343
344 # abssrc: hgsep
344 # abssrc: hgsep
345 # relsrc: ossep
345 # relsrc: ossep
346 # otarget: ossep
346 # otarget: ossep
347 def copyfile(abssrc, relsrc, otarget, exact):
347 def copyfile(abssrc, relsrc, otarget, exact):
348 abstarget = util.canonpath(repo.root, cwd, otarget)
348 abstarget = util.canonpath(repo.root, cwd, otarget)
349 reltarget = repo.pathto(abstarget, cwd)
349 reltarget = repo.pathto(abstarget, cwd)
350 target = repo.wjoin(abstarget)
350 target = repo.wjoin(abstarget)
351 src = repo.wjoin(abssrc)
351 src = repo.wjoin(abssrc)
352 state = repo.dirstate[abstarget]
352 state = repo.dirstate[abstarget]
353
353
354 # check for collisions
354 # check for collisions
355 prevsrc = targets.get(abstarget)
355 prevsrc = targets.get(abstarget)
356 if prevsrc is not None:
356 if prevsrc is not None:
357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
357 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
358 (reltarget, repo.pathto(abssrc, cwd),
358 (reltarget, repo.pathto(abssrc, cwd),
359 repo.pathto(prevsrc, cwd)))
359 repo.pathto(prevsrc, cwd)))
360 return
360 return
361
361
362 # check for overwrites
362 # check for overwrites
363 exists = os.path.exists(target)
363 exists = os.path.exists(target)
364 if (not after and exists or after and state in 'mn'):
364 if (not after and exists or after and state in 'mn'):
365 if not opts['force']:
365 if not opts['force']:
366 ui.warn(_('%s: not overwriting - file exists\n') %
366 ui.warn(_('%s: not overwriting - file exists\n') %
367 reltarget)
367 reltarget)
368 return
368 return
369
369
370 if after:
370 if after:
371 if not exists:
371 if not exists:
372 return
372 return
373 elif not dryrun:
373 elif not dryrun:
374 try:
374 try:
375 if exists:
375 if exists:
376 os.unlink(target)
376 os.unlink(target)
377 targetdir = os.path.dirname(target) or '.'
377 targetdir = os.path.dirname(target) or '.'
378 if not os.path.isdir(targetdir):
378 if not os.path.isdir(targetdir):
379 os.makedirs(targetdir)
379 os.makedirs(targetdir)
380 util.copyfile(src, target)
380 util.copyfile(src, target)
381 except IOError, inst:
381 except IOError, inst:
382 if inst.errno == errno.ENOENT:
382 if inst.errno == errno.ENOENT:
383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
383 ui.warn(_('%s: deleted in working copy\n') % relsrc)
384 else:
384 else:
385 ui.warn(_('%s: cannot copy - %s\n') %
385 ui.warn(_('%s: cannot copy - %s\n') %
386 (relsrc, inst.strerror))
386 (relsrc, inst.strerror))
387 return True # report a failure
387 return True # report a failure
388
388
389 if ui.verbose or not exact:
389 if ui.verbose or not exact:
390 action = rename and "moving" or "copying"
390 action = rename and "moving" or "copying"
391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
391 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
392
392
393 targets[abstarget] = abssrc
393 targets[abstarget] = abssrc
394
394
395 # fix up dirstate
395 # fix up dirstate
396 origsrc = repo.dirstate.copied(abssrc) or abssrc
396 origsrc = repo.dirstate.copied(abssrc) or abssrc
397 if abstarget == origsrc: # copying back a copy?
397 if abstarget == origsrc: # copying back a copy?
398 if state not in 'mn' and not dryrun:
398 if state not in 'mn' and not dryrun:
399 repo.dirstate.normallookup(abstarget)
399 repo.dirstate.normallookup(abstarget)
400 else:
400 else:
401 if repo.dirstate[origsrc] == 'a':
401 if repo.dirstate[origsrc] == 'a':
402 if not ui.quiet:
402 if not ui.quiet:
403 ui.warn(_("%s has not been committed yet, so no copy "
403 ui.warn(_("%s has not been committed yet, so no copy "
404 "data will be stored for %s.\n")
404 "data will be stored for %s.\n")
405 % (repo.pathto(origsrc, cwd), reltarget))
405 % (repo.pathto(origsrc, cwd), reltarget))
406 if abstarget not in repo.dirstate and not dryrun:
406 if abstarget not in repo.dirstate and not dryrun:
407 repo.add([abstarget])
407 repo.add([abstarget])
408 elif not dryrun:
408 elif not dryrun:
409 repo.copy(origsrc, abstarget)
409 repo.copy(origsrc, abstarget)
410
410
411 if rename and not dryrun:
411 if rename and not dryrun:
412 repo.remove([abssrc], not after)
412 repo.remove([abssrc], not after)
413
413
414 # pat: ossep
414 # pat: ossep
415 # dest ossep
415 # dest ossep
416 # srcs: list of (hgsep, hgsep, ossep, bool)
416 # srcs: list of (hgsep, hgsep, ossep, bool)
417 # return: function that takes hgsep and returns ossep
417 # return: function that takes hgsep and returns ossep
418 def targetpathfn(pat, dest, srcs):
418 def targetpathfn(pat, dest, srcs):
419 if os.path.isdir(pat):
419 if os.path.isdir(pat):
420 abspfx = util.canonpath(repo.root, cwd, pat)
420 abspfx = util.canonpath(repo.root, cwd, pat)
421 abspfx = util.localpath(abspfx)
421 abspfx = util.localpath(abspfx)
422 if destdirexists:
422 if destdirexists:
423 striplen = len(os.path.split(abspfx)[0])
423 striplen = len(os.path.split(abspfx)[0])
424 else:
424 else:
425 striplen = len(abspfx)
425 striplen = len(abspfx)
426 if striplen:
426 if striplen:
427 striplen += len(os.sep)
427 striplen += len(os.sep)
428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
428 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
429 elif destdirexists:
429 elif destdirexists:
430 res = lambda p: os.path.join(dest,
430 res = lambda p: os.path.join(dest,
431 os.path.basename(util.localpath(p)))
431 os.path.basename(util.localpath(p)))
432 else:
432 else:
433 res = lambda p: dest
433 res = lambda p: dest
434 return res
434 return res
435
435
436 # pat: ossep
436 # pat: ossep
437 # dest ossep
437 # dest ossep
438 # srcs: list of (hgsep, hgsep, ossep, bool)
438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 # return: function that takes hgsep and returns ossep
439 # return: function that takes hgsep and returns ossep
440 def targetpathafterfn(pat, dest, srcs):
440 def targetpathafterfn(pat, dest, srcs):
441 if util.patkind(pat, None)[0]:
441 if util.patkind(pat, None)[0]:
442 # a mercurial pattern
442 # a mercurial pattern
443 res = lambda p: os.path.join(dest,
443 res = lambda p: os.path.join(dest,
444 os.path.basename(util.localpath(p)))
444 os.path.basename(util.localpath(p)))
445 else:
445 else:
446 abspfx = util.canonpath(repo.root, cwd, pat)
446 abspfx = util.canonpath(repo.root, cwd, pat)
447 if len(abspfx) < len(srcs[0][0]):
447 if len(abspfx) < len(srcs[0][0]):
448 # A directory. Either the target path contains the last
448 # A directory. Either the target path contains the last
449 # component of the source path or it does not.
449 # component of the source path or it does not.
450 def evalpath(striplen):
450 def evalpath(striplen):
451 score = 0
451 score = 0
452 for s in srcs:
452 for s in srcs:
453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
453 t = os.path.join(dest, util.localpath(s[0])[striplen:])
454 if os.path.exists(t):
454 if os.path.exists(t):
455 score += 1
455 score += 1
456 return score
456 return score
457
457
458 abspfx = util.localpath(abspfx)
458 abspfx = util.localpath(abspfx)
459 striplen = len(abspfx)
459 striplen = len(abspfx)
460 if striplen:
460 if striplen:
461 striplen += len(os.sep)
461 striplen += len(os.sep)
462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
462 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
463 score = evalpath(striplen)
463 score = evalpath(striplen)
464 striplen1 = len(os.path.split(abspfx)[0])
464 striplen1 = len(os.path.split(abspfx)[0])
465 if striplen1:
465 if striplen1:
466 striplen1 += len(os.sep)
466 striplen1 += len(os.sep)
467 if evalpath(striplen1) > score:
467 if evalpath(striplen1) > score:
468 striplen = striplen1
468 striplen = striplen1
469 res = lambda p: os.path.join(dest,
469 res = lambda p: os.path.join(dest,
470 util.localpath(p)[striplen:])
470 util.localpath(p)[striplen:])
471 else:
471 else:
472 # a file
472 # a file
473 if destdirexists:
473 if destdirexists:
474 res = lambda p: os.path.join(dest,
474 res = lambda p: os.path.join(dest,
475 os.path.basename(util.localpath(p)))
475 os.path.basename(util.localpath(p)))
476 else:
476 else:
477 res = lambda p: dest
477 res = lambda p: dest
478 return res
478 return res
479
479
480
480
481 pats = util.expand_glob(pats)
481 pats = util.expand_glob(pats)
482 if not pats:
482 if not pats:
483 raise util.Abort(_('no source or destination specified'))
483 raise util.Abort(_('no source or destination specified'))
484 if len(pats) == 1:
484 if len(pats) == 1:
485 raise util.Abort(_('no destination specified'))
485 raise util.Abort(_('no destination specified'))
486 dest = pats.pop()
486 dest = pats.pop()
487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
487 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
488 if not destdirexists:
488 if not destdirexists:
489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
489 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
490 raise util.Abort(_('with multiple sources, destination must be an '
490 raise util.Abort(_('with multiple sources, destination must be an '
491 'existing directory'))
491 'existing directory'))
492 if util.endswithsep(dest):
492 if util.endswithsep(dest):
493 raise util.Abort(_('destination %s is not a directory') % dest)
493 raise util.Abort(_('destination %s is not a directory') % dest)
494
494
495 tfn = targetpathfn
495 tfn = targetpathfn
496 if after:
496 if after:
497 tfn = targetpathafterfn
497 tfn = targetpathafterfn
498 copylist = []
498 copylist = []
499 for pat in pats:
499 for pat in pats:
500 srcs = walkpat(pat)
500 srcs = walkpat(pat)
501 if not srcs:
501 if not srcs:
502 continue
502 continue
503 copylist.append((tfn(pat, dest, srcs), srcs))
503 copylist.append((tfn(pat, dest, srcs), srcs))
504 if not copylist:
504 if not copylist:
505 raise util.Abort(_('no files to copy'))
505 raise util.Abort(_('no files to copy'))
506
506
507 errors = 0
507 errors = 0
508 for targetpath, srcs in copylist:
508 for targetpath, srcs in copylist:
509 for abssrc, relsrc, exact in srcs:
509 for abssrc, relsrc, exact in srcs:
510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
510 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
511 errors += 1
511 errors += 1
512
512
513 if errors:
513 if errors:
514 ui.warn(_('(consider using --after)\n'))
514 ui.warn(_('(consider using --after)\n'))
515
515
516 return errors
516 return errors
517
517
518 def service(opts, parentfn=None, initfn=None, runfn=None):
518 def service(opts, parentfn=None, initfn=None, runfn=None):
519 '''Run a command as a service.'''
519 '''Run a command as a service.'''
520
520
521 if opts['daemon'] and not opts['daemon_pipefds']:
521 if opts['daemon'] and not opts['daemon_pipefds']:
522 rfd, wfd = os.pipe()
522 rfd, wfd = os.pipe()
523 args = sys.argv[:]
523 args = sys.argv[:]
524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
524 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
525 # Don't pass --cwd to the child process, because we've already
525 # Don't pass --cwd to the child process, because we've already
526 # changed directory.
526 # changed directory.
527 for i in xrange(1,len(args)):
527 for i in xrange(1,len(args)):
528 if args[i].startswith('--cwd='):
528 if args[i].startswith('--cwd='):
529 del args[i]
529 del args[i]
530 break
530 break
531 elif args[i].startswith('--cwd'):
531 elif args[i].startswith('--cwd'):
532 del args[i:i+2]
532 del args[i:i+2]
533 break
533 break
534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
535 args[0], args)
535 args[0], args)
536 os.close(wfd)
536 os.close(wfd)
537 os.read(rfd, 1)
537 os.read(rfd, 1)
538 if parentfn:
538 if parentfn:
539 return parentfn(pid)
539 return parentfn(pid)
540 else:
540 else:
541 os._exit(0)
541 os._exit(0)
542
542
543 if initfn:
543 if initfn:
544 initfn()
544 initfn()
545
545
546 if opts['pid_file']:
546 if opts['pid_file']:
547 fp = open(opts['pid_file'], 'w')
547 fp = open(opts['pid_file'], 'w')
548 fp.write(str(os.getpid()) + '\n')
548 fp.write(str(os.getpid()) + '\n')
549 fp.close()
549 fp.close()
550
550
551 if opts['daemon_pipefds']:
551 if opts['daemon_pipefds']:
552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
552 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
553 os.close(rfd)
553 os.close(rfd)
554 try:
554 try:
555 os.setsid()
555 os.setsid()
556 except AttributeError:
556 except AttributeError:
557 pass
557 pass
558 os.write(wfd, 'y')
558 os.write(wfd, 'y')
559 os.close(wfd)
559 os.close(wfd)
560 sys.stdout.flush()
560 sys.stdout.flush()
561 sys.stderr.flush()
561 sys.stderr.flush()
562 fd = os.open(util.nulldev, os.O_RDWR)
562 fd = os.open(util.nulldev, os.O_RDWR)
563 if fd != 0: os.dup2(fd, 0)
563 if fd != 0: os.dup2(fd, 0)
564 if fd != 1: os.dup2(fd, 1)
564 if fd != 1: os.dup2(fd, 1)
565 if fd != 2: os.dup2(fd, 2)
565 if fd != 2: os.dup2(fd, 2)
566 if fd not in (0, 1, 2): os.close(fd)
566 if fd not in (0, 1, 2): os.close(fd)
567
567
568 if runfn:
568 if runfn:
569 return runfn()
569 return runfn()
570
570
571 class changeset_printer(object):
571 class changeset_printer(object):
572 '''show changeset information when templating not requested.'''
572 '''show changeset information when templating not requested.'''
573
573
574 def __init__(self, ui, repo, patch, buffered):
574 def __init__(self, ui, repo, patch, buffered):
575 self.ui = ui
575 self.ui = ui
576 self.repo = repo
576 self.repo = repo
577 self.buffered = buffered
577 self.buffered = buffered
578 self.patch = patch
578 self.patch = patch
579 self.header = {}
579 self.header = {}
580 self.hunk = {}
580 self.hunk = {}
581 self.lastheader = None
581 self.lastheader = None
582
582
583 def flush(self, rev):
583 def flush(self, rev):
584 if rev in self.header:
584 if rev in self.header:
585 h = self.header[rev]
585 h = self.header[rev]
586 if h != self.lastheader:
586 if h != self.lastheader:
587 self.lastheader = h
587 self.lastheader = h
588 self.ui.write(h)
588 self.ui.write(h)
589 del self.header[rev]
589 del self.header[rev]
590 if rev in self.hunk:
590 if rev in self.hunk:
591 self.ui.write(self.hunk[rev])
591 self.ui.write(self.hunk[rev])
592 del self.hunk[rev]
592 del self.hunk[rev]
593 return 1
593 return 1
594 return 0
594 return 0
595
595
596 def show(self, rev=0, changenode=None, copies=(), **props):
596 def show(self, rev=0, changenode=None, copies=(), **props):
597 if self.buffered:
597 if self.buffered:
598 self.ui.pushbuffer()
598 self.ui.pushbuffer()
599 self._show(rev, changenode, copies, props)
599 self._show(rev, changenode, copies, props)
600 self.hunk[rev] = self.ui.popbuffer()
600 self.hunk[rev] = self.ui.popbuffer()
601 else:
601 else:
602 self._show(rev, changenode, copies, props)
602 self._show(rev, changenode, copies, props)
603
603
604 def _show(self, rev, changenode, copies, props):
604 def _show(self, rev, changenode, copies, props):
605 '''show a single changeset or file revision'''
605 '''show a single changeset or file revision'''
606 log = self.repo.changelog
606 log = self.repo.changelog
607 if changenode is None:
607 if changenode is None:
608 changenode = log.node(rev)
608 changenode = log.node(rev)
609 elif not rev:
609 elif not rev:
610 rev = log.rev(changenode)
610 rev = log.rev(changenode)
611
611
612 if self.ui.quiet:
612 if self.ui.quiet:
613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
613 self.ui.write("%d:%s\n" % (rev, short(changenode)))
614 return
614 return
615
615
616 changes = log.read(changenode)
616 changes = log.read(changenode)
617 date = util.datestr(changes[2])
617 date = util.datestr(changes[2])
618 extra = changes[5]
618 extra = changes[5]
619 branch = extra.get("branch")
619 branch = extra.get("branch")
620
620
621 hexfunc = self.ui.debugflag and hex or short
621 hexfunc = self.ui.debugflag and hex or short
622
622
623 parents = [(p, hexfunc(log.node(p)))
623 parents = [(p, hexfunc(log.node(p)))
624 for p in self._meaningful_parentrevs(log, rev)]
624 for p in self._meaningful_parentrevs(log, rev)]
625
625
626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
626 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
627
627
628 # don't show the default branch name
628 # don't show the default branch name
629 if branch != 'default':
629 if branch != 'default':
630 branch = util.tolocal(branch)
630 branch = util.tolocal(branch)
631 self.ui.write(_("branch: %s\n") % branch)
631 self.ui.write(_("branch: %s\n") % branch)
632 for tag in self.repo.nodetags(changenode):
632 for tag in self.repo.nodetags(changenode):
633 self.ui.write(_("tag: %s\n") % tag)
633 self.ui.write(_("tag: %s\n") % tag)
634 for parent in parents:
634 for parent in parents:
635 self.ui.write(_("parent: %d:%s\n") % parent)
635 self.ui.write(_("parent: %d:%s\n") % parent)
636
636
637 if self.ui.debugflag:
637 if self.ui.debugflag:
638 self.ui.write(_("manifest: %d:%s\n") %
638 self.ui.write(_("manifest: %d:%s\n") %
639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
639 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
640 self.ui.write(_("user: %s\n") % changes[1])
640 self.ui.write(_("user: %s\n") % changes[1])
641 self.ui.write(_("date: %s\n") % date)
641 self.ui.write(_("date: %s\n") % date)
642
642
643 if self.ui.debugflag:
643 if self.ui.debugflag:
644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
644 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
645 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
646 files):
646 files):
647 if value:
647 if value:
648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
648 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
649 elif changes[3] and self.ui.verbose:
649 elif changes[3] and self.ui.verbose:
650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
650 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
651 if copies and self.ui.verbose:
651 if copies and self.ui.verbose:
652 copies = ['%s (%s)' % c for c in copies]
652 copies = ['%s (%s)' % c for c in copies]
653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
653 self.ui.write(_("copies: %s\n") % ' '.join(copies))
654
654
655 if extra and self.ui.debugflag:
655 if extra and self.ui.debugflag:
656 for key, value in util.sort(extra.items()):
656 for key, value in util.sort(extra.items()):
657 self.ui.write(_("extra: %s=%s\n")
657 self.ui.write(_("extra: %s=%s\n")
658 % (key, value.encode('string_escape')))
658 % (key, value.encode('string_escape')))
659
659
660 description = changes[4].strip()
660 description = changes[4].strip()
661 if description:
661 if description:
662 if self.ui.verbose:
662 if self.ui.verbose:
663 self.ui.write(_("description:\n"))
663 self.ui.write(_("description:\n"))
664 self.ui.write(description)
664 self.ui.write(description)
665 self.ui.write("\n\n")
665 self.ui.write("\n\n")
666 else:
666 else:
667 self.ui.write(_("summary: %s\n") %
667 self.ui.write(_("summary: %s\n") %
668 description.splitlines()[0])
668 description.splitlines()[0])
669 self.ui.write("\n")
669 self.ui.write("\n")
670
670
671 self.showpatch(changenode)
671 self.showpatch(changenode)
672
672
673 def showpatch(self, node):
673 def showpatch(self, node):
674 if self.patch:
674 if self.patch:
675 prev = self.repo.changelog.parents(node)[0]
675 prev = self.repo.changelog.parents(node)[0]
676 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
676 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
677 opts=patch.diffopts(self.ui))
677 opts=patch.diffopts(self.ui))
678 self.ui.write("\n")
678 self.ui.write("\n")
679
679
680 def _meaningful_parentrevs(self, log, rev):
680 def _meaningful_parentrevs(self, log, rev):
681 """Return list of meaningful (or all if debug) parentrevs for rev.
681 """Return list of meaningful (or all if debug) parentrevs for rev.
682
682
683 For merges (two non-nullrev revisions) both parents are meaningful.
683 For merges (two non-nullrev revisions) both parents are meaningful.
684 Otherwise the first parent revision is considered meaningful if it
684 Otherwise the first parent revision is considered meaningful if it
685 is not the preceding revision.
685 is not the preceding revision.
686 """
686 """
687 parents = log.parentrevs(rev)
687 parents = log.parentrevs(rev)
688 if not self.ui.debugflag and parents[1] == nullrev:
688 if not self.ui.debugflag and parents[1] == nullrev:
689 if parents[0] >= rev - 1:
689 if parents[0] >= rev - 1:
690 parents = []
690 parents = []
691 else:
691 else:
692 parents = [parents[0]]
692 parents = [parents[0]]
693 return parents
693 return parents
694
694
695
695
696 class changeset_templater(changeset_printer):
696 class changeset_templater(changeset_printer):
697 '''format changeset information.'''
697 '''format changeset information.'''
698
698
699 def __init__(self, ui, repo, patch, mapfile, buffered):
699 def __init__(self, ui, repo, patch, mapfile, buffered):
700 changeset_printer.__init__(self, ui, repo, patch, buffered)
700 changeset_printer.__init__(self, ui, repo, patch, buffered)
701 filters = templatefilters.filters.copy()
701 filters = templatefilters.filters.copy()
702 filters['formatnode'] = (ui.debugflag and (lambda x: x)
702 filters['formatnode'] = (ui.debugflag and (lambda x: x)
703 or (lambda x: x[:12]))
703 or (lambda x: x[:12]))
704 self.t = templater.templater(mapfile, filters,
704 self.t = templater.templater(mapfile, filters,
705 cache={
705 cache={
706 'parent': '{rev}:{node|formatnode} ',
706 'parent': '{rev}:{node|formatnode} ',
707 'manifest': '{rev}:{node|formatnode}',
707 'manifest': '{rev}:{node|formatnode}',
708 'filecopy': '{name} ({source})'})
708 'filecopy': '{name} ({source})'})
709
709
710 def use_template(self, t):
710 def use_template(self, t):
711 '''set template string to use'''
711 '''set template string to use'''
712 self.t.cache['changeset'] = t
712 self.t.cache['changeset'] = t
713
713
714 def _show(self, rev, changenode, copies, props):
714 def _show(self, rev, changenode, copies, props):
715 '''show a single changeset or file revision'''
715 '''show a single changeset or file revision'''
716 log = self.repo.changelog
716 log = self.repo.changelog
717 if changenode is None:
717 if changenode is None:
718 changenode = log.node(rev)
718 changenode = log.node(rev)
719 elif not rev:
719 elif not rev:
720 rev = log.rev(changenode)
720 rev = log.rev(changenode)
721
721
722 changes = log.read(changenode)
722 changes = log.read(changenode)
723
723
724 def showlist(name, values, plural=None, **args):
724 def showlist(name, values, plural=None, **args):
725 '''expand set of values.
725 '''expand set of values.
726 name is name of key in template map.
726 name is name of key in template map.
727 values is list of strings or dicts.
727 values is list of strings or dicts.
728 plural is plural of name, if not simply name + 's'.
728 plural is plural of name, if not simply name + 's'.
729
729
730 expansion works like this, given name 'foo'.
730 expansion works like this, given name 'foo'.
731
731
732 if values is empty, expand 'no_foos'.
732 if values is empty, expand 'no_foos'.
733
733
734 if 'foo' not in template map, return values as a string,
734 if 'foo' not in template map, return values as a string,
735 joined by space.
735 joined by space.
736
736
737 expand 'start_foos'.
737 expand 'start_foos'.
738
738
739 for each value, expand 'foo'. if 'last_foo' in template
739 for each value, expand 'foo'. if 'last_foo' in template
740 map, expand it instead of 'foo' for last key.
740 map, expand it instead of 'foo' for last key.
741
741
742 expand 'end_foos'.
742 expand 'end_foos'.
743 '''
743 '''
744 if plural: names = plural
744 if plural: names = plural
745 else: names = name + 's'
745 else: names = name + 's'
746 if not values:
746 if not values:
747 noname = 'no_' + names
747 noname = 'no_' + names
748 if noname in self.t:
748 if noname in self.t:
749 yield self.t(noname, **args)
749 yield self.t(noname, **args)
750 return
750 return
751 if name not in self.t:
751 if name not in self.t:
752 if isinstance(values[0], str):
752 if isinstance(values[0], str):
753 yield ' '.join(values)
753 yield ' '.join(values)
754 else:
754 else:
755 for v in values:
755 for v in values:
756 yield dict(v, **args)
756 yield dict(v, **args)
757 return
757 return
758 startname = 'start_' + names
758 startname = 'start_' + names
759 if startname in self.t:
759 if startname in self.t:
760 yield self.t(startname, **args)
760 yield self.t(startname, **args)
761 vargs = args.copy()
761 vargs = args.copy()
762 def one(v, tag=name):
762 def one(v, tag=name):
763 try:
763 try:
764 vargs.update(v)
764 vargs.update(v)
765 except (AttributeError, ValueError):
765 except (AttributeError, ValueError):
766 try:
766 try:
767 for a, b in v:
767 for a, b in v:
768 vargs[a] = b
768 vargs[a] = b
769 except ValueError:
769 except ValueError:
770 vargs[name] = v
770 vargs[name] = v
771 return self.t(tag, **vargs)
771 return self.t(tag, **vargs)
772 lastname = 'last_' + name
772 lastname = 'last_' + name
773 if lastname in self.t:
773 if lastname in self.t:
774 last = values.pop()
774 last = values.pop()
775 else:
775 else:
776 last = None
776 last = None
777 for v in values:
777 for v in values:
778 yield one(v)
778 yield one(v)
779 if last is not None:
779 if last is not None:
780 yield one(last, tag=lastname)
780 yield one(last, tag=lastname)
781 endname = 'end_' + names
781 endname = 'end_' + names
782 if endname in self.t:
782 if endname in self.t:
783 yield self.t(endname, **args)
783 yield self.t(endname, **args)
784
784
785 def showbranches(**args):
785 def showbranches(**args):
786 branch = changes[5].get("branch")
786 branch = changes[5].get("branch")
787 if branch != 'default':
787 if branch != 'default':
788 branch = util.tolocal(branch)
788 branch = util.tolocal(branch)
789 return showlist('branch', [branch], plural='branches', **args)
789 return showlist('branch', [branch], plural='branches', **args)
790
790
791 def showparents(**args):
791 def showparents(**args):
792 parents = [[('rev', p), ('node', hex(log.node(p)))]
792 parents = [[('rev', p), ('node', hex(log.node(p)))]
793 for p in self._meaningful_parentrevs(log, rev)]
793 for p in self._meaningful_parentrevs(log, rev)]
794 return showlist('parent', parents, **args)
794 return showlist('parent', parents, **args)
795
795
796 def showtags(**args):
796 def showtags(**args):
797 return showlist('tag', self.repo.nodetags(changenode), **args)
797 return showlist('tag', self.repo.nodetags(changenode), **args)
798
798
799 def showextras(**args):
799 def showextras(**args):
800 for key, value in util.sort(changes[5].items()):
800 for key, value in util.sort(changes[5].items()):
801 args = args.copy()
801 args = args.copy()
802 args.update(dict(key=key, value=value))
802 args.update(dict(key=key, value=value))
803 yield self.t('extra', **args)
803 yield self.t('extra', **args)
804
804
805 def showcopies(**args):
805 def showcopies(**args):
806 c = [{'name': x[0], 'source': x[1]} for x in copies]
806 c = [{'name': x[0], 'source': x[1]} for x in copies]
807 return showlist('file_copy', c, plural='file_copies', **args)
807 return showlist('file_copy', c, plural='file_copies', **args)
808
808
809 files = []
809 files = []
810 def getfiles():
810 def getfiles():
811 if not files:
811 if not files:
812 files[:] = self.repo.status(
812 files[:] = self.repo.status(
813 log.parents(changenode)[0], changenode)[:3]
813 log.parents(changenode)[0], changenode)[:3]
814 return files
814 return files
815 def showfiles(**args):
815 def showfiles(**args):
816 return showlist('file', changes[3], **args)
816 return showlist('file', changes[3], **args)
817 def showmods(**args):
817 def showmods(**args):
818 return showlist('file_mod', getfiles()[0], **args)
818 return showlist('file_mod', getfiles()[0], **args)
819 def showadds(**args):
819 def showadds(**args):
820 return showlist('file_add', getfiles()[1], **args)
820 return showlist('file_add', getfiles()[1], **args)
821 def showdels(**args):
821 def showdels(**args):
822 return showlist('file_del', getfiles()[2], **args)
822 return showlist('file_del', getfiles()[2], **args)
823 def showmanifest(**args):
823 def showmanifest(**args):
824 args = args.copy()
824 args = args.copy()
825 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
825 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
826 node=hex(changes[0])))
826 node=hex(changes[0])))
827 return self.t('manifest', **args)
827 return self.t('manifest', **args)
828
828
829 defprops = {
829 defprops = {
830 'author': changes[1],
830 'author': changes[1],
831 'branches': showbranches,
831 'branches': showbranches,
832 'date': changes[2],
832 'date': changes[2],
833 'desc': changes[4].strip(),
833 'desc': changes[4].strip(),
834 'file_adds': showadds,
834 'file_adds': showadds,
835 'file_dels': showdels,
835 'file_dels': showdels,
836 'file_mods': showmods,
836 'file_mods': showmods,
837 'files': showfiles,
837 'files': showfiles,
838 'file_copies': showcopies,
838 'file_copies': showcopies,
839 'manifest': showmanifest,
839 'manifest': showmanifest,
840 'node': hex(changenode),
840 'node': hex(changenode),
841 'parents': showparents,
841 'parents': showparents,
842 'rev': rev,
842 'rev': rev,
843 'tags': showtags,
843 'tags': showtags,
844 'extras': showextras,
844 'extras': showextras,
845 }
845 }
846 props = props.copy()
846 props = props.copy()
847 props.update(defprops)
847 props.update(defprops)
848
848
849 try:
849 try:
850 if self.ui.debugflag and 'header_debug' in self.t:
850 if self.ui.debugflag and 'header_debug' in self.t:
851 key = 'header_debug'
851 key = 'header_debug'
852 elif self.ui.quiet and 'header_quiet' in self.t:
852 elif self.ui.quiet and 'header_quiet' in self.t:
853 key = 'header_quiet'
853 key = 'header_quiet'
854 elif self.ui.verbose and 'header_verbose' in self.t:
854 elif self.ui.verbose and 'header_verbose' in self.t:
855 key = 'header_verbose'
855 key = 'header_verbose'
856 elif 'header' in self.t:
856 elif 'header' in self.t:
857 key = 'header'
857 key = 'header'
858 else:
858 else:
859 key = ''
859 key = ''
860 if key:
860 if key:
861 h = templater.stringify(self.t(key, **props))
861 h = templater.stringify(self.t(key, **props))
862 if self.buffered:
862 if self.buffered:
863 self.header[rev] = h
863 self.header[rev] = h
864 else:
864 else:
865 self.ui.write(h)
865 self.ui.write(h)
866 if self.ui.debugflag and 'changeset_debug' in self.t:
866 if self.ui.debugflag and 'changeset_debug' in self.t:
867 key = 'changeset_debug'
867 key = 'changeset_debug'
868 elif self.ui.quiet and 'changeset_quiet' in self.t:
868 elif self.ui.quiet and 'changeset_quiet' in self.t:
869 key = 'changeset_quiet'
869 key = 'changeset_quiet'
870 elif self.ui.verbose and 'changeset_verbose' in self.t:
870 elif self.ui.verbose and 'changeset_verbose' in self.t:
871 key = 'changeset_verbose'
871 key = 'changeset_verbose'
872 else:
872 else:
873 key = 'changeset'
873 key = 'changeset'
874 self.ui.write(templater.stringify(self.t(key, **props)))
874 self.ui.write(templater.stringify(self.t(key, **props)))
875 self.showpatch(changenode)
875 self.showpatch(changenode)
876 except KeyError, inst:
876 except KeyError, inst:
877 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
877 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
878 inst.args[0]))
878 inst.args[0]))
879 except SyntaxError, inst:
879 except SyntaxError, inst:
880 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
880 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
881
881
882 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
882 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
883 """show one changeset using template or regular display.
883 """show one changeset using template or regular display.
884
884
885 Display format will be the first non-empty hit of:
885 Display format will be the first non-empty hit of:
886 1. option 'template'
886 1. option 'template'
887 2. option 'style'
887 2. option 'style'
888 3. [ui] setting 'logtemplate'
888 3. [ui] setting 'logtemplate'
889 4. [ui] setting 'style'
889 4. [ui] setting 'style'
890 If all of these values are either the unset or the empty string,
890 If all of these values are either the unset or the empty string,
891 regular display via changeset_printer() is done.
891 regular display via changeset_printer() is done.
892 """
892 """
893 # options
893 # options
894 patch = False
894 patch = False
895 if opts.get('patch'):
895 if opts.get('patch'):
896 patch = matchfn or matchall(repo)
896 patch = matchfn or matchall(repo)
897
897
898 tmpl = opts.get('template')
898 tmpl = opts.get('template')
899 mapfile = None
899 mapfile = None
900 if tmpl:
900 if tmpl:
901 tmpl = templater.parsestring(tmpl, quoted=False)
901 tmpl = templater.parsestring(tmpl, quoted=False)
902 else:
902 else:
903 mapfile = opts.get('style')
903 mapfile = opts.get('style')
904 # ui settings
904 # ui settings
905 if not mapfile:
905 if not mapfile:
906 tmpl = ui.config('ui', 'logtemplate')
906 tmpl = ui.config('ui', 'logtemplate')
907 if tmpl:
907 if tmpl:
908 tmpl = templater.parsestring(tmpl)
908 tmpl = templater.parsestring(tmpl)
909 else:
909 else:
910 mapfile = ui.config('ui', 'style')
910 mapfile = ui.config('ui', 'style')
911
911
912 if tmpl or mapfile:
912 if tmpl or mapfile:
913 if mapfile:
913 if mapfile:
914 if not os.path.split(mapfile)[0]:
914 if not os.path.split(mapfile)[0]:
915 mapname = (templater.templatepath('map-cmdline.' + mapfile)
915 mapname = (templater.templatepath('map-cmdline.' + mapfile)
916 or templater.templatepath(mapfile))
916 or templater.templatepath(mapfile))
917 if mapname: mapfile = mapname
917 if mapname: mapfile = mapname
918 try:
918 try:
919 t = changeset_templater(ui, repo, patch, mapfile, buffered)
919 t = changeset_templater(ui, repo, patch, mapfile, buffered)
920 except SyntaxError, inst:
920 except SyntaxError, inst:
921 raise util.Abort(inst.args[0])
921 raise util.Abort(inst.args[0])
922 if tmpl: t.use_template(tmpl)
922 if tmpl: t.use_template(tmpl)
923 return t
923 return t
924 return changeset_printer(ui, repo, patch, buffered)
924 return changeset_printer(ui, repo, patch, buffered)
925
925
926 def finddate(ui, repo, date):
926 def finddate(ui, repo, date):
927 """Find the tipmost changeset that matches the given date spec"""
927 """Find the tipmost changeset that matches the given date spec"""
928 df = util.matchdate(date)
928 df = util.matchdate(date)
929 get = util.cachefunc(lambda r: repo[r].changeset())
929 get = util.cachefunc(lambda r: repo[r].changeset())
930 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
930 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
931 results = {}
931 results = {}
932 for st, rev, fns in changeiter:
932 for st, rev, fns in changeiter:
933 if st == 'add':
933 if st == 'add':
934 d = get(rev)[2]
934 d = get(rev)[2]
935 if df(d[0]):
935 if df(d[0]):
936 results[rev] = d
936 results[rev] = d
937 elif st == 'iter':
937 elif st == 'iter':
938 if rev in results:
938 if rev in results:
939 ui.status("Found revision %s from %s\n" %
939 ui.status(_("Found revision %s from %s\n") %
940 (rev, util.datestr(results[rev])))
940 (rev, util.datestr(results[rev])))
941 return str(rev)
941 return str(rev)
942
942
943 raise util.Abort(_("revision matching date not found"))
943 raise util.Abort(_("revision matching date not found"))
944
944
945 def walkchangerevs(ui, repo, pats, change, opts):
945 def walkchangerevs(ui, repo, pats, change, opts):
946 '''Iterate over files and the revs they changed in.
946 '''Iterate over files and the revs they changed in.
947
947
948 Callers most commonly need to iterate backwards over the history
948 Callers most commonly need to iterate backwards over the history
949 it is interested in. Doing so has awful (quadratic-looking)
949 it is interested in. Doing so has awful (quadratic-looking)
950 performance, so we use iterators in a "windowed" way.
950 performance, so we use iterators in a "windowed" way.
951
951
952 We walk a window of revisions in the desired order. Within the
952 We walk a window of revisions in the desired order. Within the
953 window, we first walk forwards to gather data, then in the desired
953 window, we first walk forwards to gather data, then in the desired
954 order (usually backwards) to display it.
954 order (usually backwards) to display it.
955
955
956 This function returns an (iterator, matchfn) tuple. The iterator
956 This function returns an (iterator, matchfn) tuple. The iterator
957 yields 3-tuples. They will be of one of the following forms:
957 yields 3-tuples. They will be of one of the following forms:
958
958
959 "window", incrementing, lastrev: stepping through a window,
959 "window", incrementing, lastrev: stepping through a window,
960 positive if walking forwards through revs, last rev in the
960 positive if walking forwards through revs, last rev in the
961 sequence iterated over - use to reset state for the current window
961 sequence iterated over - use to reset state for the current window
962
962
963 "add", rev, fns: out-of-order traversal of the given file names
963 "add", rev, fns: out-of-order traversal of the given file names
964 fns, which changed during revision rev - use to gather data for
964 fns, which changed during revision rev - use to gather data for
965 possible display
965 possible display
966
966
967 "iter", rev, None: in-order traversal of the revs earlier iterated
967 "iter", rev, None: in-order traversal of the revs earlier iterated
968 over with "add" - use to display data'''
968 over with "add" - use to display data'''
969
969
970 def increasing_windows(start, end, windowsize=8, sizelimit=512):
970 def increasing_windows(start, end, windowsize=8, sizelimit=512):
971 if start < end:
971 if start < end:
972 while start < end:
972 while start < end:
973 yield start, min(windowsize, end-start)
973 yield start, min(windowsize, end-start)
974 start += windowsize
974 start += windowsize
975 if windowsize < sizelimit:
975 if windowsize < sizelimit:
976 windowsize *= 2
976 windowsize *= 2
977 else:
977 else:
978 while start > end:
978 while start > end:
979 yield start, min(windowsize, start-end-1)
979 yield start, min(windowsize, start-end-1)
980 start -= windowsize
980 start -= windowsize
981 if windowsize < sizelimit:
981 if windowsize < sizelimit:
982 windowsize *= 2
982 windowsize *= 2
983
983
984 m = match(repo, pats, opts)
984 m = match(repo, pats, opts)
985 follow = opts.get('follow') or opts.get('follow_first')
985 follow = opts.get('follow') or opts.get('follow_first')
986
986
987 if not len(repo):
987 if not len(repo):
988 return [], m
988 return [], m
989
989
990 if follow:
990 if follow:
991 defrange = '%s:0' % repo['.'].rev()
991 defrange = '%s:0' % repo['.'].rev()
992 else:
992 else:
993 defrange = '-1:0'
993 defrange = '-1:0'
994 revs = revrange(repo, opts['rev'] or [defrange])
994 revs = revrange(repo, opts['rev'] or [defrange])
995 wanted = {}
995 wanted = {}
996 slowpath = m.anypats() or opts.get('removed')
996 slowpath = m.anypats() or opts.get('removed')
997 fncache = {}
997 fncache = {}
998
998
999 if not slowpath and not m.files():
999 if not slowpath and not m.files():
1000 # No files, no patterns. Display all revs.
1000 # No files, no patterns. Display all revs.
1001 wanted = dict.fromkeys(revs)
1001 wanted = dict.fromkeys(revs)
1002 copies = []
1002 copies = []
1003 if not slowpath:
1003 if not slowpath:
1004 # Only files, no patterns. Check the history of each file.
1004 # Only files, no patterns. Check the history of each file.
1005 def filerevgen(filelog, node):
1005 def filerevgen(filelog, node):
1006 cl_count = len(repo)
1006 cl_count = len(repo)
1007 if node is None:
1007 if node is None:
1008 last = len(filelog) - 1
1008 last = len(filelog) - 1
1009 else:
1009 else:
1010 last = filelog.rev(node)
1010 last = filelog.rev(node)
1011 for i, window in increasing_windows(last, nullrev):
1011 for i, window in increasing_windows(last, nullrev):
1012 revs = []
1012 revs = []
1013 for j in xrange(i - window, i + 1):
1013 for j in xrange(i - window, i + 1):
1014 n = filelog.node(j)
1014 n = filelog.node(j)
1015 revs.append((filelog.linkrev(n),
1015 revs.append((filelog.linkrev(n),
1016 follow and filelog.renamed(n)))
1016 follow and filelog.renamed(n)))
1017 revs.reverse()
1017 revs.reverse()
1018 for rev in revs:
1018 for rev in revs:
1019 # only yield rev for which we have the changelog, it can
1019 # only yield rev for which we have the changelog, it can
1020 # happen while doing "hg log" during a pull or commit
1020 # happen while doing "hg log" during a pull or commit
1021 if rev[0] < cl_count:
1021 if rev[0] < cl_count:
1022 yield rev
1022 yield rev
1023 def iterfiles():
1023 def iterfiles():
1024 for filename in m.files():
1024 for filename in m.files():
1025 yield filename, None
1025 yield filename, None
1026 for filename_node in copies:
1026 for filename_node in copies:
1027 yield filename_node
1027 yield filename_node
1028 minrev, maxrev = min(revs), max(revs)
1028 minrev, maxrev = min(revs), max(revs)
1029 for file_, node in iterfiles():
1029 for file_, node in iterfiles():
1030 filelog = repo.file(file_)
1030 filelog = repo.file(file_)
1031 if not len(filelog):
1031 if not len(filelog):
1032 if node is None:
1032 if node is None:
1033 # A zero count may be a directory or deleted file, so
1033 # A zero count may be a directory or deleted file, so
1034 # try to find matching entries on the slow path.
1034 # try to find matching entries on the slow path.
1035 slowpath = True
1035 slowpath = True
1036 break
1036 break
1037 else:
1037 else:
1038 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1038 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1039 % (file_, short(node)))
1039 % (file_, short(node)))
1040 continue
1040 continue
1041 for rev, copied in filerevgen(filelog, node):
1041 for rev, copied in filerevgen(filelog, node):
1042 if rev <= maxrev:
1042 if rev <= maxrev:
1043 if rev < minrev:
1043 if rev < minrev:
1044 break
1044 break
1045 fncache.setdefault(rev, [])
1045 fncache.setdefault(rev, [])
1046 fncache[rev].append(file_)
1046 fncache[rev].append(file_)
1047 wanted[rev] = 1
1047 wanted[rev] = 1
1048 if follow and copied:
1048 if follow and copied:
1049 copies.append(copied)
1049 copies.append(copied)
1050 if slowpath:
1050 if slowpath:
1051 if follow:
1051 if follow:
1052 raise util.Abort(_('can only follow copies/renames for explicit '
1052 raise util.Abort(_('can only follow copies/renames for explicit '
1053 'file names'))
1053 'file names'))
1054
1054
1055 # The slow path checks files modified in every changeset.
1055 # The slow path checks files modified in every changeset.
1056 def changerevgen():
1056 def changerevgen():
1057 for i, window in increasing_windows(len(repo) - 1, nullrev):
1057 for i, window in increasing_windows(len(repo) - 1, nullrev):
1058 for j in xrange(i - window, i + 1):
1058 for j in xrange(i - window, i + 1):
1059 yield j, change(j)[3]
1059 yield j, change(j)[3]
1060
1060
1061 for rev, changefiles in changerevgen():
1061 for rev, changefiles in changerevgen():
1062 matches = filter(m, changefiles)
1062 matches = filter(m, changefiles)
1063 if matches:
1063 if matches:
1064 fncache[rev] = matches
1064 fncache[rev] = matches
1065 wanted[rev] = 1
1065 wanted[rev] = 1
1066
1066
1067 class followfilter:
1067 class followfilter:
1068 def __init__(self, onlyfirst=False):
1068 def __init__(self, onlyfirst=False):
1069 self.startrev = nullrev
1069 self.startrev = nullrev
1070 self.roots = []
1070 self.roots = []
1071 self.onlyfirst = onlyfirst
1071 self.onlyfirst = onlyfirst
1072
1072
1073 def match(self, rev):
1073 def match(self, rev):
1074 def realparents(rev):
1074 def realparents(rev):
1075 if self.onlyfirst:
1075 if self.onlyfirst:
1076 return repo.changelog.parentrevs(rev)[0:1]
1076 return repo.changelog.parentrevs(rev)[0:1]
1077 else:
1077 else:
1078 return filter(lambda x: x != nullrev,
1078 return filter(lambda x: x != nullrev,
1079 repo.changelog.parentrevs(rev))
1079 repo.changelog.parentrevs(rev))
1080
1080
1081 if self.startrev == nullrev:
1081 if self.startrev == nullrev:
1082 self.startrev = rev
1082 self.startrev = rev
1083 return True
1083 return True
1084
1084
1085 if rev > self.startrev:
1085 if rev > self.startrev:
1086 # forward: all descendants
1086 # forward: all descendants
1087 if not self.roots:
1087 if not self.roots:
1088 self.roots.append(self.startrev)
1088 self.roots.append(self.startrev)
1089 for parent in realparents(rev):
1089 for parent in realparents(rev):
1090 if parent in self.roots:
1090 if parent in self.roots:
1091 self.roots.append(rev)
1091 self.roots.append(rev)
1092 return True
1092 return True
1093 else:
1093 else:
1094 # backwards: all parents
1094 # backwards: all parents
1095 if not self.roots:
1095 if not self.roots:
1096 self.roots.extend(realparents(self.startrev))
1096 self.roots.extend(realparents(self.startrev))
1097 if rev in self.roots:
1097 if rev in self.roots:
1098 self.roots.remove(rev)
1098 self.roots.remove(rev)
1099 self.roots.extend(realparents(rev))
1099 self.roots.extend(realparents(rev))
1100 return True
1100 return True
1101
1101
1102 return False
1102 return False
1103
1103
1104 # it might be worthwhile to do this in the iterator if the rev range
1104 # it might be worthwhile to do this in the iterator if the rev range
1105 # is descending and the prune args are all within that range
1105 # is descending and the prune args are all within that range
1106 for rev in opts.get('prune', ()):
1106 for rev in opts.get('prune', ()):
1107 rev = repo.changelog.rev(repo.lookup(rev))
1107 rev = repo.changelog.rev(repo.lookup(rev))
1108 ff = followfilter()
1108 ff = followfilter()
1109 stop = min(revs[0], revs[-1])
1109 stop = min(revs[0], revs[-1])
1110 for x in xrange(rev, stop-1, -1):
1110 for x in xrange(rev, stop-1, -1):
1111 if ff.match(x) and x in wanted:
1111 if ff.match(x) and x in wanted:
1112 del wanted[x]
1112 del wanted[x]
1113
1113
1114 def iterate():
1114 def iterate():
1115 if follow and not m.files():
1115 if follow and not m.files():
1116 ff = followfilter(onlyfirst=opts.get('follow_first'))
1116 ff = followfilter(onlyfirst=opts.get('follow_first'))
1117 def want(rev):
1117 def want(rev):
1118 if ff.match(rev) and rev in wanted:
1118 if ff.match(rev) and rev in wanted:
1119 return True
1119 return True
1120 return False
1120 return False
1121 else:
1121 else:
1122 def want(rev):
1122 def want(rev):
1123 return rev in wanted
1123 return rev in wanted
1124
1124
1125 for i, window in increasing_windows(0, len(revs)):
1125 for i, window in increasing_windows(0, len(revs)):
1126 yield 'window', revs[0] < revs[-1], revs[-1]
1126 yield 'window', revs[0] < revs[-1], revs[-1]
1127 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1127 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1128 for rev in util.sort(list(nrevs)):
1128 for rev in util.sort(list(nrevs)):
1129 fns = fncache.get(rev)
1129 fns = fncache.get(rev)
1130 if not fns:
1130 if not fns:
1131 def fns_generator():
1131 def fns_generator():
1132 for f in change(rev)[3]:
1132 for f in change(rev)[3]:
1133 if m(f):
1133 if m(f):
1134 yield f
1134 yield f
1135 fns = fns_generator()
1135 fns = fns_generator()
1136 yield 'add', rev, fns
1136 yield 'add', rev, fns
1137 for rev in nrevs:
1137 for rev in nrevs:
1138 yield 'iter', rev, None
1138 yield 'iter', rev, None
1139 return iterate(), m
1139 return iterate(), m
1140
1140
1141 def commit(ui, repo, commitfunc, pats, opts):
1141 def commit(ui, repo, commitfunc, pats, opts):
1142 '''commit the specified files or all outstanding changes'''
1142 '''commit the specified files or all outstanding changes'''
1143 date = opts.get('date')
1143 date = opts.get('date')
1144 if date:
1144 if date:
1145 opts['date'] = util.parsedate(date)
1145 opts['date'] = util.parsedate(date)
1146 message = logmessage(opts)
1146 message = logmessage(opts)
1147
1147
1148 # extract addremove carefully -- this function can be called from a command
1148 # extract addremove carefully -- this function can be called from a command
1149 # that doesn't support addremove
1149 # that doesn't support addremove
1150 if opts.get('addremove'):
1150 if opts.get('addremove'):
1151 addremove(repo, pats, opts)
1151 addremove(repo, pats, opts)
1152
1152
1153 m = match(repo, pats, opts)
1153 m = match(repo, pats, opts)
1154 if pats:
1154 if pats:
1155 modified, added, removed = repo.status(match=m)[:3]
1155 modified, added, removed = repo.status(match=m)[:3]
1156 files = util.sort(modified + added + removed)
1156 files = util.sort(modified + added + removed)
1157 slist = None
1157 slist = None
1158 for f in m.files():
1158 for f in m.files():
1159 if f == '.':
1159 if f == '.':
1160 continue
1160 continue
1161 if f not in files:
1161 if f not in files:
1162 rf = repo.wjoin(f)
1162 rf = repo.wjoin(f)
1163 rel = repo.pathto(f)
1163 rel = repo.pathto(f)
1164 try:
1164 try:
1165 mode = os.lstat(rf)[stat.ST_MODE]
1165 mode = os.lstat(rf)[stat.ST_MODE]
1166 except OSError:
1166 except OSError:
1167 raise util.Abort(_("file %s not found!") % rel)
1167 raise util.Abort(_("file %s not found!") % rel)
1168 if stat.S_ISDIR(mode):
1168 if stat.S_ISDIR(mode):
1169 name = f + '/'
1169 name = f + '/'
1170 i = bisect.bisect(files, name)
1170 i = bisect.bisect(files, name)
1171 if i >= len(files) or not files[i].startswith(name):
1171 if i >= len(files) or not files[i].startswith(name):
1172 raise util.Abort(_("no match under directory %s!")
1172 raise util.Abort(_("no match under directory %s!")
1173 % rel)
1173 % rel)
1174 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1174 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1175 raise util.Abort(_("can't commit %s: "
1175 raise util.Abort(_("can't commit %s: "
1176 "unsupported file type!") % rel)
1176 "unsupported file type!") % rel)
1177 elif f not in repo.dirstate:
1177 elif f not in repo.dirstate:
1178 raise util.Abort(_("file %s not tracked!") % rel)
1178 raise util.Abort(_("file %s not tracked!") % rel)
1179 m = matchfiles(repo, files)
1179 m = matchfiles(repo, files)
1180 try:
1180 try:
1181 return commitfunc(ui, repo, message, m, opts)
1181 return commitfunc(ui, repo, message, m, opts)
1182 except ValueError, inst:
1182 except ValueError, inst:
1183 raise util.Abort(str(inst))
1183 raise util.Abort(str(inst))
@@ -1,3320 +1,3320 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from repo import RepoError, NoCapability
9 from repo import RepoError, NoCapability
10 from i18n import _
10 from i18n import _
11 import os, re, sys, urllib
11 import os, re, sys, urllib
12 import hg, util, revlog, bundlerepo, extensions, copies
12 import hg, util, revlog, bundlerepo, extensions, copies
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import version, socket
14 import version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the repository.
23 Schedule files to be version controlled and added to the repository.
24
24
25 The files will be added to the repository at the next commit. To
25 The files will be added to the repository at the next commit. To
26 undo an add before that, see hg revert.
26 undo an add before that, see hg revert.
27
27
28 If no names are given, add all files in the repository.
28 If no names are given, add all files in the repository.
29 """
29 """
30
30
31 rejected = None
31 rejected = None
32 exacts = {}
32 exacts = {}
33 names = []
33 names = []
34 m = cmdutil.match(repo, pats, opts)
34 m = cmdutil.match(repo, pats, opts)
35 m.bad = lambda x,y: True
35 m.bad = lambda x,y: True
36 for abs in repo.walk(m):
36 for abs in repo.walk(m):
37 if m.exact(abs):
37 if m.exact(abs):
38 if ui.verbose:
38 if ui.verbose:
39 ui.status(_('adding %s\n') % m.rel(abs))
39 ui.status(_('adding %s\n') % m.rel(abs))
40 names.append(abs)
40 names.append(abs)
41 exacts[abs] = 1
41 exacts[abs] = 1
42 elif abs not in repo.dirstate:
42 elif abs not in repo.dirstate:
43 ui.status(_('adding %s\n') % m.rel(abs))
43 ui.status(_('adding %s\n') % m.rel(abs))
44 names.append(abs)
44 names.append(abs)
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 rejected = repo.add(names)
46 rejected = repo.add(names)
47 rejected = [p for p in rejected if p in exacts]
47 rejected = [p for p in rejected if p in exacts]
48 return rejected and 1 or 0
48 return rejected and 1 or 0
49
49
50 def addremove(ui, repo, *pats, **opts):
50 def addremove(ui, repo, *pats, **opts):
51 """add all new files, delete all missing files
51 """add all new files, delete all missing files
52
52
53 Add all new files and remove all missing files from the repository.
53 Add all new files and remove all missing files from the repository.
54
54
55 New files are ignored if they match any of the patterns in .hgignore. As
55 New files are ignored if they match any of the patterns in .hgignore. As
56 with add, these changes take effect at the next commit.
56 with add, these changes take effect at the next commit.
57
57
58 Use the -s option to detect renamed files. With a parameter > 0,
58 Use the -s option to detect renamed files. With a parameter > 0,
59 this compares every removed file with every added file and records
59 this compares every removed file with every added file and records
60 those similar enough as renames. This option takes a percentage
60 those similar enough as renames. This option takes a percentage
61 between 0 (disabled) and 100 (files must be identical) as its
61 between 0 (disabled) and 100 (files must be identical) as its
62 parameter. Detecting renamed files this way can be expensive.
62 parameter. Detecting renamed files this way can be expensive.
63 """
63 """
64 try:
64 try:
65 sim = float(opts.get('similarity') or 0)
65 sim = float(opts.get('similarity') or 0)
66 except ValueError:
66 except ValueError:
67 raise util.Abort(_('similarity must be a number'))
67 raise util.Abort(_('similarity must be a number'))
68 if sim < 0 or sim > 100:
68 if sim < 0 or sim > 100:
69 raise util.Abort(_('similarity must be between 0 and 100'))
69 raise util.Abort(_('similarity must be between 0 and 100'))
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71
71
72 def annotate(ui, repo, *pats, **opts):
72 def annotate(ui, repo, *pats, **opts):
73 """show changeset information per file line
73 """show changeset information per file line
74
74
75 List changes in files, showing the revision id responsible for each line
75 List changes in files, showing the revision id responsible for each line
76
76
77 This command is useful to discover who did a change or when a change took
77 This command is useful to discover who did a change or when a change took
78 place.
78 place.
79
79
80 Without the -a option, annotate will avoid processing files it
80 Without the -a option, annotate will avoid processing files it
81 detects as binary. With -a, annotate will generate an annotation
81 detects as binary. With -a, annotate will generate an annotation
82 anyway, probably with undesirable results.
82 anyway, probably with undesirable results.
83 """
83 """
84 datefunc = ui.quiet and util.shortdate or util.datestr
84 datefunc = ui.quiet and util.shortdate or util.datestr
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86
86
87 if not pats:
87 if not pats:
88 raise util.Abort(_('at least one file name or pattern required'))
88 raise util.Abort(_('at least one file name or pattern required'))
89
89
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 ('number', lambda x: str(x[0].rev())),
91 ('number', lambda x: str(x[0].rev())),
92 ('changeset', lambda x: short(x[0].node())),
92 ('changeset', lambda x: short(x[0].node())),
93 ('date', getdate),
93 ('date', getdate),
94 ('follow', lambda x: x[0].path()),
94 ('follow', lambda x: x[0].path()),
95 ]
95 ]
96
96
97 if (not opts['user'] and not opts['changeset'] and not opts['date']
97 if (not opts['user'] and not opts['changeset'] and not opts['date']
98 and not opts['follow']):
98 and not opts['follow']):
99 opts['number'] = 1
99 opts['number'] = 1
100
100
101 linenumber = opts.get('line_number') is not None
101 linenumber = opts.get('line_number') is not None
102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104
104
105 funcmap = [func for op, func in opmap if opts.get(op)]
105 funcmap = [func for op, func in opmap if opts.get(op)]
106 if linenumber:
106 if linenumber:
107 lastfunc = funcmap[-1]
107 lastfunc = funcmap[-1]
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109
109
110 ctx = repo[opts['rev']]
110 ctx = repo[opts['rev']]
111
111
112 m = cmdutil.match(repo, pats, opts)
112 m = cmdutil.match(repo, pats, opts)
113 for abs in ctx.walk(m):
113 for abs in ctx.walk(m):
114 fctx = ctx[abs]
114 fctx = ctx[abs]
115 if not opts['text'] and util.binary(fctx.data()):
115 if not opts['text'] and util.binary(fctx.data()):
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 continue
117 continue
118
118
119 lines = fctx.annotate(follow=opts.get('follow'),
119 lines = fctx.annotate(follow=opts.get('follow'),
120 linenumber=linenumber)
120 linenumber=linenumber)
121 pieces = []
121 pieces = []
122
122
123 for f in funcmap:
123 for f in funcmap:
124 l = [f(n) for n, dummy in lines]
124 l = [f(n) for n, dummy in lines]
125 if l:
125 if l:
126 m = max(map(len, l))
126 m = max(map(len, l))
127 pieces.append(["%*s" % (m, x) for x in l])
127 pieces.append(["%*s" % (m, x) for x in l])
128
128
129 if pieces:
129 if pieces:
130 for p, l in zip(zip(*pieces), lines):
130 for p, l in zip(zip(*pieces), lines):
131 ui.write("%s: %s" % (" ".join(p), l[1]))
131 ui.write("%s: %s" % (" ".join(p), l[1]))
132
132
133 def archive(ui, repo, dest, **opts):
133 def archive(ui, repo, dest, **opts):
134 '''create unversioned archive of a repository revision
134 '''create unversioned archive of a repository revision
135
135
136 By default, the revision used is the parent of the working
136 By default, the revision used is the parent of the working
137 directory; use "-r" to specify a different revision.
137 directory; use "-r" to specify a different revision.
138
138
139 To specify the type of archive to create, use "-t". Valid
139 To specify the type of archive to create, use "-t". Valid
140 types are:
140 types are:
141
141
142 "files" (default): a directory full of files
142 "files" (default): a directory full of files
143 "tar": tar archive, uncompressed
143 "tar": tar archive, uncompressed
144 "tbz2": tar archive, compressed using bzip2
144 "tbz2": tar archive, compressed using bzip2
145 "tgz": tar archive, compressed using gzip
145 "tgz": tar archive, compressed using gzip
146 "uzip": zip archive, uncompressed
146 "uzip": zip archive, uncompressed
147 "zip": zip archive, compressed using deflate
147 "zip": zip archive, compressed using deflate
148
148
149 The exact name of the destination archive or directory is given
149 The exact name of the destination archive or directory is given
150 using a format string; see "hg help export" for details.
150 using a format string; see "hg help export" for details.
151
151
152 Each member added to an archive file has a directory prefix
152 Each member added to an archive file has a directory prefix
153 prepended. Use "-p" to specify a format string for the prefix.
153 prepended. Use "-p" to specify a format string for the prefix.
154 The default is the basename of the archive, with suffixes removed.
154 The default is the basename of the archive, with suffixes removed.
155 '''
155 '''
156
156
157 ctx = repo[opts['rev']]
157 ctx = repo[opts['rev']]
158 if not ctx:
158 if not ctx:
159 raise util.Abort(_('repository has no revisions'))
159 raise util.Abort(_('repository has no revisions'))
160 node = ctx.node()
160 node = ctx.node()
161 dest = cmdutil.make_filename(repo, dest, node)
161 dest = cmdutil.make_filename(repo, dest, node)
162 if os.path.realpath(dest) == repo.root:
162 if os.path.realpath(dest) == repo.root:
163 raise util.Abort(_('repository root cannot be destination'))
163 raise util.Abort(_('repository root cannot be destination'))
164 matchfn = cmdutil.match(repo, [], opts)
164 matchfn = cmdutil.match(repo, [], opts)
165 kind = opts.get('type') or 'files'
165 kind = opts.get('type') or 'files'
166 prefix = opts['prefix']
166 prefix = opts['prefix']
167 if dest == '-':
167 if dest == '-':
168 if kind == 'files':
168 if kind == 'files':
169 raise util.Abort(_('cannot archive plain files to stdout'))
169 raise util.Abort(_('cannot archive plain files to stdout'))
170 dest = sys.stdout
170 dest = sys.stdout
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 prefix = cmdutil.make_filename(repo, prefix, node)
172 prefix = cmdutil.make_filename(repo, prefix, node)
173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
174 matchfn, prefix)
174 matchfn, prefix)
175
175
176 def backout(ui, repo, node=None, rev=None, **opts):
176 def backout(ui, repo, node=None, rev=None, **opts):
177 '''reverse effect of earlier changeset
177 '''reverse effect of earlier changeset
178
178
179 Commit the backed out changes as a new changeset. The new
179 Commit the backed out changes as a new changeset. The new
180 changeset is a child of the backed out changeset.
180 changeset is a child of the backed out changeset.
181
181
182 If you back out a changeset other than the tip, a new head is
182 If you back out a changeset other than the tip, a new head is
183 created. This head will be the new tip and you should merge this
183 created. This head will be the new tip and you should merge this
184 backout changeset with another head (current one by default).
184 backout changeset with another head (current one by default).
185
185
186 The --merge option remembers the parent of the working directory
186 The --merge option remembers the parent of the working directory
187 before starting the backout, then merges the new head with that
187 before starting the backout, then merges the new head with that
188 changeset afterwards. This saves you from doing the merge by
188 changeset afterwards. This saves you from doing the merge by
189 hand. The result of this merge is not committed, as for a normal
189 hand. The result of this merge is not committed, as for a normal
190 merge.
190 merge.
191
191
192 See \'hg help dates\' for a list of formats valid for -d/--date.
192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 '''
193 '''
194 if rev and node:
194 if rev and node:
195 raise util.Abort(_("please specify just one revision"))
195 raise util.Abort(_("please specify just one revision"))
196
196
197 if not rev:
197 if not rev:
198 rev = node
198 rev = node
199
199
200 if not rev:
200 if not rev:
201 raise util.Abort(_("please specify a revision to backout"))
201 raise util.Abort(_("please specify a revision to backout"))
202
202
203 date = opts.get('date')
203 date = opts.get('date')
204 if date:
204 if date:
205 opts['date'] = util.parsedate(date)
205 opts['date'] = util.parsedate(date)
206
206
207 cmdutil.bail_if_changed(repo)
207 cmdutil.bail_if_changed(repo)
208 node = repo.lookup(rev)
208 node = repo.lookup(rev)
209
209
210 op1, op2 = repo.dirstate.parents()
210 op1, op2 = repo.dirstate.parents()
211 a = repo.changelog.ancestor(op1, node)
211 a = repo.changelog.ancestor(op1, node)
212 if a != node:
212 if a != node:
213 raise util.Abort(_('cannot back out change on a different branch'))
213 raise util.Abort(_('cannot back out change on a different branch'))
214
214
215 p1, p2 = repo.changelog.parents(node)
215 p1, p2 = repo.changelog.parents(node)
216 if p1 == nullid:
216 if p1 == nullid:
217 raise util.Abort(_('cannot back out a change with no parents'))
217 raise util.Abort(_('cannot back out a change with no parents'))
218 if p2 != nullid:
218 if p2 != nullid:
219 if not opts['parent']:
219 if not opts['parent']:
220 raise util.Abort(_('cannot back out a merge changeset without '
220 raise util.Abort(_('cannot back out a merge changeset without '
221 '--parent'))
221 '--parent'))
222 p = repo.lookup(opts['parent'])
222 p = repo.lookup(opts['parent'])
223 if p not in (p1, p2):
223 if p not in (p1, p2):
224 raise util.Abort(_('%s is not a parent of %s') %
224 raise util.Abort(_('%s is not a parent of %s') %
225 (short(p), short(node)))
225 (short(p), short(node)))
226 parent = p
226 parent = p
227 else:
227 else:
228 if opts['parent']:
228 if opts['parent']:
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 parent = p1
230 parent = p1
231
231
232 # the backout should appear on the same branch
232 # the backout should appear on the same branch
233 branch = repo.dirstate.branch()
233 branch = repo.dirstate.branch()
234 hg.clean(repo, node, show_stats=False)
234 hg.clean(repo, node, show_stats=False)
235 repo.dirstate.setbranch(branch)
235 repo.dirstate.setbranch(branch)
236 revert_opts = opts.copy()
236 revert_opts = opts.copy()
237 revert_opts['date'] = None
237 revert_opts['date'] = None
238 revert_opts['all'] = True
238 revert_opts['all'] = True
239 revert_opts['rev'] = hex(parent)
239 revert_opts['rev'] = hex(parent)
240 revert_opts['no_backup'] = None
240 revert_opts['no_backup'] = None
241 revert(ui, repo, **revert_opts)
241 revert(ui, repo, **revert_opts)
242 commit_opts = opts.copy()
242 commit_opts = opts.copy()
243 commit_opts['addremove'] = False
243 commit_opts['addremove'] = False
244 if not commit_opts['message'] and not commit_opts['logfile']:
244 if not commit_opts['message'] and not commit_opts['logfile']:
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 commit_opts['force_editor'] = True
246 commit_opts['force_editor'] = True
247 commit(ui, repo, **commit_opts)
247 commit(ui, repo, **commit_opts)
248 def nice(node):
248 def nice(node):
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 ui.status(_('changeset %s backs out changeset %s\n') %
250 ui.status(_('changeset %s backs out changeset %s\n') %
251 (nice(repo.changelog.tip()), nice(node)))
251 (nice(repo.changelog.tip()), nice(node)))
252 if op1 != node:
252 if op1 != node:
253 hg.clean(repo, op1, show_stats=False)
253 hg.clean(repo, op1, show_stats=False)
254 if opts['merge']:
254 if opts['merge']:
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
256 hg.merge(repo, hex(repo.changelog.tip()))
257 else:
257 else:
258 ui.status(_('the backout changeset is a new head - '
258 ui.status(_('the backout changeset is a new head - '
259 'do not forget to merge\n'))
259 'do not forget to merge\n'))
260 ui.status(_('(use "backout --merge" '
260 ui.status(_('(use "backout --merge" '
261 'if you want to auto-merge)\n'))
261 'if you want to auto-merge)\n'))
262
262
263 def bisect(ui, repo, rev=None, extra=None,
263 def bisect(ui, repo, rev=None, extra=None,
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 """subdivision search of changesets
265 """subdivision search of changesets
266
266
267 This command helps to find changesets which introduce problems.
267 This command helps to find changesets which introduce problems.
268 To use, mark the earliest changeset you know exhibits the problem
268 To use, mark the earliest changeset you know exhibits the problem
269 as bad, then mark the latest changeset which is free from the
269 as bad, then mark the latest changeset which is free from the
270 problem as good. Bisect will update your working directory to a
270 problem as good. Bisect will update your working directory to a
271 revision for testing (unless the --noupdate option is specified).
271 revision for testing (unless the --noupdate option is specified).
272 Once you have performed tests, mark the working directory as bad
272 Once you have performed tests, mark the working directory as bad
273 or good and bisect will either update to another candidate changeset
273 or good and bisect will either update to another candidate changeset
274 or announce that it has found the bad revision.
274 or announce that it has found the bad revision.
275
275
276 As a shortcut, you can also use the revision argument to mark a
276 As a shortcut, you can also use the revision argument to mark a
277 revision as good or bad without checking it out first.
277 revision as good or bad without checking it out first.
278 """
278 """
279 # backward compatibility
279 # backward compatibility
280 if rev in "good bad reset init".split():
280 if rev in "good bad reset init".split():
281 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
281 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
282 cmd, rev, extra = rev, extra, None
282 cmd, rev, extra = rev, extra, None
283 if cmd == "good":
283 if cmd == "good":
284 good = True
284 good = True
285 elif cmd == "bad":
285 elif cmd == "bad":
286 bad = True
286 bad = True
287 else:
287 else:
288 reset = True
288 reset = True
289 elif extra or good + bad + skip + reset > 1:
289 elif extra or good + bad + skip + reset > 1:
290 raise util.Abort(_('incompatible arguments'))
290 raise util.Abort(_('incompatible arguments'))
291 elif not (good or bad or skip or reset):
291 elif not (good or bad or skip or reset):
292 ui.status(_('(no action selected)\n'))
292 ui.status(_('(no action selected)\n'))
293 return
293 return
294
294
295 if reset:
295 if reset:
296 p = repo.join("bisect.state")
296 p = repo.join("bisect.state")
297 if os.path.exists(p):
297 if os.path.exists(p):
298 os.unlink(p)
298 os.unlink(p)
299 return
299 return
300
300
301 # load state
301 # load state
302 state = {'good': [], 'bad': [], 'skip': []}
302 state = {'good': [], 'bad': [], 'skip': []}
303 if os.path.exists(repo.join("bisect.state")):
303 if os.path.exists(repo.join("bisect.state")):
304 for l in repo.opener("bisect.state"):
304 for l in repo.opener("bisect.state"):
305 kind, node = l[:-1].split()
305 kind, node = l[:-1].split()
306 node = repo.lookup(node)
306 node = repo.lookup(node)
307 if kind not in state:
307 if kind not in state:
308 raise util.Abort(_("unknown bisect kind %s") % kind)
308 raise util.Abort(_("unknown bisect kind %s") % kind)
309 state[kind].append(node)
309 state[kind].append(node)
310
310
311 # update state
311 # update state
312 node = repo.lookup(rev or '.')
312 node = repo.lookup(rev or '.')
313 if good:
313 if good:
314 state['good'].append(node)
314 state['good'].append(node)
315 elif bad:
315 elif bad:
316 state['bad'].append(node)
316 state['bad'].append(node)
317 elif skip:
317 elif skip:
318 state['skip'].append(node)
318 state['skip'].append(node)
319
319
320 # save state
320 # save state
321 f = repo.opener("bisect.state", "w", atomictemp=True)
321 f = repo.opener("bisect.state", "w", atomictemp=True)
322 wlock = repo.wlock()
322 wlock = repo.wlock()
323 try:
323 try:
324 for kind in state:
324 for kind in state:
325 for node in state[kind]:
325 for node in state[kind]:
326 f.write("%s %s\n" % (kind, hex(node)))
326 f.write("%s %s\n" % (kind, hex(node)))
327 f.rename()
327 f.rename()
328 finally:
328 finally:
329 del wlock
329 del wlock
330
330
331 if not state['good'] or not state['bad']:
331 if not state['good'] or not state['bad']:
332 return
332 return
333
333
334 # actually bisect
334 # actually bisect
335 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
335 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
336 if changesets == 0:
336 if changesets == 0:
337 displayer = cmdutil.show_changeset(ui, repo, {})
337 displayer = cmdutil.show_changeset(ui, repo, {})
338 transition = (good and "good" or "bad")
338 transition = (good and "good" or "bad")
339 if len(nodes) == 1:
339 if len(nodes) == 1:
340 # narrowed it down to a single revision
340 # narrowed it down to a single revision
341 ui.write(_("The first %s revision is:\n") % transition)
341 ui.write(_("The first %s revision is:\n") % transition)
342 displayer.show(changenode=nodes[0])
342 displayer.show(changenode=nodes[0])
343 else:
343 else:
344 # multiple possible revisions
344 # multiple possible revisions
345 ui.write(_("Due to skipped revisions, the first "
345 ui.write(_("Due to skipped revisions, the first "
346 "%s revision could be any of:\n") % transition)
346 "%s revision could be any of:\n") % transition)
347 for n in nodes:
347 for n in nodes:
348 displayer.show(changenode=n)
348 displayer.show(changenode=n)
349 else:
349 else:
350 assert len(nodes) == 1 # only a single node can be tested next
350 assert len(nodes) == 1 # only a single node can be tested next
351 node = nodes[0]
351 node = nodes[0]
352 # compute the approximate number of remaining tests
352 # compute the approximate number of remaining tests
353 tests, size = 0, 2
353 tests, size = 0, 2
354 while size <= changesets:
354 while size <= changesets:
355 tests, size = tests + 1, size * 2
355 tests, size = tests + 1, size * 2
356 rev = repo.changelog.rev(node)
356 rev = repo.changelog.rev(node)
357 ui.write(_("Testing changeset %s:%s "
357 ui.write(_("Testing changeset %s:%s "
358 "(%s changesets remaining, ~%s tests)\n")
358 "(%s changesets remaining, ~%s tests)\n")
359 % (rev, short(node), changesets, tests))
359 % (rev, short(node), changesets, tests))
360 if not noupdate:
360 if not noupdate:
361 cmdutil.bail_if_changed(repo)
361 cmdutil.bail_if_changed(repo)
362 return hg.clean(repo, node)
362 return hg.clean(repo, node)
363
363
364 def branch(ui, repo, label=None, **opts):
364 def branch(ui, repo, label=None, **opts):
365 """set or show the current branch name
365 """set or show the current branch name
366
366
367 With no argument, show the current branch name. With one argument,
367 With no argument, show the current branch name. With one argument,
368 set the working directory branch name (the branch does not exist in
368 set the working directory branch name (the branch does not exist in
369 the repository until the next commit).
369 the repository until the next commit).
370
370
371 Unless --force is specified, branch will not let you set a
371 Unless --force is specified, branch will not let you set a
372 branch name that shadows an existing branch.
372 branch name that shadows an existing branch.
373
373
374 Use the command 'hg update' to switch to an existing branch.
374 Use the command 'hg update' to switch to an existing branch.
375 """
375 """
376
376
377 if label:
377 if label:
378 if not opts.get('force') and label in repo.branchtags():
378 if not opts.get('force') and label in repo.branchtags():
379 if label not in [p.branch() for p in repo.parents()]:
379 if label not in [p.branch() for p in repo.parents()]:
380 raise util.Abort(_('a branch of the same name already exists'
380 raise util.Abort(_('a branch of the same name already exists'
381 ' (use --force to override)'))
381 ' (use --force to override)'))
382 repo.dirstate.setbranch(util.fromlocal(label))
382 repo.dirstate.setbranch(util.fromlocal(label))
383 ui.status(_('marked working directory as branch %s\n') % label)
383 ui.status(_('marked working directory as branch %s\n') % label)
384 else:
384 else:
385 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
385 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
386
386
387 def branches(ui, repo, active=False):
387 def branches(ui, repo, active=False):
388 """list repository named branches
388 """list repository named branches
389
389
390 List the repository's named branches, indicating which ones are
390 List the repository's named branches, indicating which ones are
391 inactive. If active is specified, only show active branches.
391 inactive. If active is specified, only show active branches.
392
392
393 A branch is considered active if it contains repository heads.
393 A branch is considered active if it contains repository heads.
394
394
395 Use the command 'hg update' to switch to an existing branch.
395 Use the command 'hg update' to switch to an existing branch.
396 """
396 """
397 hexfunc = ui.debugflag and hex or short
397 hexfunc = ui.debugflag and hex or short
398 activebranches = [util.tolocal(repo[n].branch())
398 activebranches = [util.tolocal(repo[n].branch())
399 for n in repo.heads()]
399 for n in repo.heads()]
400 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
400 branches = util.sort([(tag in activebranches, repo.changelog.rev(node), tag)
401 for tag, node in repo.branchtags().items()])
401 for tag, node in repo.branchtags().items()])
402 branches.reverse()
402 branches.reverse()
403
403
404 for isactive, node, tag in branches:
404 for isactive, node, tag in branches:
405 if (not active) or isactive:
405 if (not active) or isactive:
406 if ui.quiet:
406 if ui.quiet:
407 ui.write("%s\n" % tag)
407 ui.write("%s\n" % tag)
408 else:
408 else:
409 rev = str(node).rjust(31 - util.locallen(tag))
409 rev = str(node).rjust(31 - util.locallen(tag))
410 isinactive = ((not isactive) and " (inactive)") or ''
410 isinactive = ((not isactive) and " (inactive)") or ''
411 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
411 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
412 ui.write("%s %s:%s%s\n" % data)
412 ui.write("%s %s:%s%s\n" % data)
413
413
414 def bundle(ui, repo, fname, dest=None, **opts):
414 def bundle(ui, repo, fname, dest=None, **opts):
415 """create a changegroup file
415 """create a changegroup file
416
416
417 Generate a compressed changegroup file collecting changesets not
417 Generate a compressed changegroup file collecting changesets not
418 found in the other repository.
418 found in the other repository.
419
419
420 If no destination repository is specified the destination is
420 If no destination repository is specified the destination is
421 assumed to have all the nodes specified by one or more --base
421 assumed to have all the nodes specified by one or more --base
422 parameters. To create a bundle containing all changesets, use
422 parameters. To create a bundle containing all changesets, use
423 --all (or --base null). To change the compression method applied,
423 --all (or --base null). To change the compression method applied,
424 use the -t option (by default, bundles are compressed using bz2).
424 use the -t option (by default, bundles are compressed using bz2).
425
425
426 The bundle file can then be transferred using conventional means and
426 The bundle file can then be transferred using conventional means and
427 applied to another repository with the unbundle or pull command.
427 applied to another repository with the unbundle or pull command.
428 This is useful when direct push and pull are not available or when
428 This is useful when direct push and pull are not available or when
429 exporting an entire repository is undesirable.
429 exporting an entire repository is undesirable.
430
430
431 Applying bundles preserves all changeset contents including
431 Applying bundles preserves all changeset contents including
432 permissions, copy/rename information, and revision history.
432 permissions, copy/rename information, and revision history.
433 """
433 """
434 revs = opts.get('rev') or None
434 revs = opts.get('rev') or None
435 if revs:
435 if revs:
436 revs = [repo.lookup(rev) for rev in revs]
436 revs = [repo.lookup(rev) for rev in revs]
437 if opts.get('all'):
437 if opts.get('all'):
438 base = ['null']
438 base = ['null']
439 else:
439 else:
440 base = opts.get('base')
440 base = opts.get('base')
441 if base:
441 if base:
442 if dest:
442 if dest:
443 raise util.Abort(_("--base is incompatible with specifiying "
443 raise util.Abort(_("--base is incompatible with specifiying "
444 "a destination"))
444 "a destination"))
445 base = [repo.lookup(rev) for rev in base]
445 base = [repo.lookup(rev) for rev in base]
446 # create the right base
446 # create the right base
447 # XXX: nodesbetween / changegroup* should be "fixed" instead
447 # XXX: nodesbetween / changegroup* should be "fixed" instead
448 o = []
448 o = []
449 has = {nullid: None}
449 has = {nullid: None}
450 for n in base:
450 for n in base:
451 has.update(repo.changelog.reachable(n))
451 has.update(repo.changelog.reachable(n))
452 if revs:
452 if revs:
453 visit = list(revs)
453 visit = list(revs)
454 else:
454 else:
455 visit = repo.changelog.heads()
455 visit = repo.changelog.heads()
456 seen = {}
456 seen = {}
457 while visit:
457 while visit:
458 n = visit.pop(0)
458 n = visit.pop(0)
459 parents = [p for p in repo.changelog.parents(n) if p not in has]
459 parents = [p for p in repo.changelog.parents(n) if p not in has]
460 if len(parents) == 0:
460 if len(parents) == 0:
461 o.insert(0, n)
461 o.insert(0, n)
462 else:
462 else:
463 for p in parents:
463 for p in parents:
464 if p not in seen:
464 if p not in seen:
465 seen[p] = 1
465 seen[p] = 1
466 visit.append(p)
466 visit.append(p)
467 else:
467 else:
468 cmdutil.setremoteconfig(ui, opts)
468 cmdutil.setremoteconfig(ui, opts)
469 dest, revs, checkout = hg.parseurl(
469 dest, revs, checkout = hg.parseurl(
470 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
470 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
471 other = hg.repository(ui, dest)
471 other = hg.repository(ui, dest)
472 o = repo.findoutgoing(other, force=opts['force'])
472 o = repo.findoutgoing(other, force=opts['force'])
473
473
474 if revs:
474 if revs:
475 cg = repo.changegroupsubset(o, revs, 'bundle')
475 cg = repo.changegroupsubset(o, revs, 'bundle')
476 else:
476 else:
477 cg = repo.changegroup(o, 'bundle')
477 cg = repo.changegroup(o, 'bundle')
478
478
479 bundletype = opts.get('type', 'bzip2').lower()
479 bundletype = opts.get('type', 'bzip2').lower()
480 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
480 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
481 bundletype = btypes.get(bundletype)
481 bundletype = btypes.get(bundletype)
482 if bundletype not in changegroup.bundletypes:
482 if bundletype not in changegroup.bundletypes:
483 raise util.Abort(_('unknown bundle type specified with --type'))
483 raise util.Abort(_('unknown bundle type specified with --type'))
484
484
485 changegroup.writebundle(cg, fname, bundletype)
485 changegroup.writebundle(cg, fname, bundletype)
486
486
487 def cat(ui, repo, file1, *pats, **opts):
487 def cat(ui, repo, file1, *pats, **opts):
488 """output the current or given revision of files
488 """output the current or given revision of files
489
489
490 Print the specified files as they were at the given revision.
490 Print the specified files as they were at the given revision.
491 If no revision is given, the parent of the working directory is used,
491 If no revision is given, the parent of the working directory is used,
492 or tip if no revision is checked out.
492 or tip if no revision is checked out.
493
493
494 Output may be to a file, in which case the name of the file is
494 Output may be to a file, in which case the name of the file is
495 given using a format string. The formatting rules are the same as
495 given using a format string. The formatting rules are the same as
496 for the export command, with the following additions:
496 for the export command, with the following additions:
497
497
498 %s basename of file being printed
498 %s basename of file being printed
499 %d dirname of file being printed, or '.' if in repo root
499 %d dirname of file being printed, or '.' if in repo root
500 %p root-relative path name of file being printed
500 %p root-relative path name of file being printed
501 """
501 """
502 ctx = repo[opts['rev']]
502 ctx = repo[opts['rev']]
503 err = 1
503 err = 1
504 m = cmdutil.match(repo, (file1,) + pats, opts)
504 m = cmdutil.match(repo, (file1,) + pats, opts)
505 for abs in ctx.walk(m):
505 for abs in ctx.walk(m):
506 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
506 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
507 data = ctx[abs].data()
507 data = ctx[abs].data()
508 if opts.get('decode'):
508 if opts.get('decode'):
509 data = repo.wwritedata(abs, data)
509 data = repo.wwritedata(abs, data)
510 fp.write(data)
510 fp.write(data)
511 err = 0
511 err = 0
512 return err
512 return err
513
513
514 def clone(ui, source, dest=None, **opts):
514 def clone(ui, source, dest=None, **opts):
515 """make a copy of an existing repository
515 """make a copy of an existing repository
516
516
517 Create a copy of an existing repository in a new directory.
517 Create a copy of an existing repository in a new directory.
518
518
519 If no destination directory name is specified, it defaults to the
519 If no destination directory name is specified, it defaults to the
520 basename of the source.
520 basename of the source.
521
521
522 The location of the source is added to the new repository's
522 The location of the source is added to the new repository's
523 .hg/hgrc file, as the default to be used for future pulls.
523 .hg/hgrc file, as the default to be used for future pulls.
524
524
525 For efficiency, hardlinks are used for cloning whenever the source
525 For efficiency, hardlinks are used for cloning whenever the source
526 and destination are on the same filesystem (note this applies only
526 and destination are on the same filesystem (note this applies only
527 to the repository data, not to the checked out files). Some
527 to the repository data, not to the checked out files). Some
528 filesystems, such as AFS, implement hardlinking incorrectly, but
528 filesystems, such as AFS, implement hardlinking incorrectly, but
529 do not report errors. In these cases, use the --pull option to
529 do not report errors. In these cases, use the --pull option to
530 avoid hardlinking.
530 avoid hardlinking.
531
531
532 In some cases, you can clone repositories and checked out files
532 In some cases, you can clone repositories and checked out files
533 using full hardlinks with
533 using full hardlinks with
534
534
535 $ cp -al REPO REPOCLONE
535 $ cp -al REPO REPOCLONE
536
536
537 This is the fastest way to clone, but it is not always safe. The
537 This is the fastest way to clone, but it is not always safe. The
538 operation is not atomic (making sure REPO is not modified during
538 operation is not atomic (making sure REPO is not modified during
539 the operation is up to you) and you have to make sure your editor
539 the operation is up to you) and you have to make sure your editor
540 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
540 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
541 this is not compatible with certain extensions that place their
541 this is not compatible with certain extensions that place their
542 metadata under the .hg directory, such as mq.
542 metadata under the .hg directory, such as mq.
543
543
544 If you use the -r option to clone up to a specific revision, no
544 If you use the -r option to clone up to a specific revision, no
545 subsequent revisions will be present in the cloned repository.
545 subsequent revisions will be present in the cloned repository.
546 This option implies --pull, even on local repositories.
546 This option implies --pull, even on local repositories.
547
547
548 If the -U option is used, the new clone will contain only a repository
548 If the -U option is used, the new clone will contain only a repository
549 (.hg) and no working copy (the working copy parent is the null revision).
549 (.hg) and no working copy (the working copy parent is the null revision).
550
550
551 See pull for valid source format details.
551 See pull for valid source format details.
552
552
553 It is possible to specify an ssh:// URL as the destination, but no
553 It is possible to specify an ssh:// URL as the destination, but no
554 .hg/hgrc and working directory will be created on the remote side.
554 .hg/hgrc and working directory will be created on the remote side.
555 Look at the help text for the pull command for important details
555 Look at the help text for the pull command for important details
556 about ssh:// URLs.
556 about ssh:// URLs.
557 """
557 """
558 cmdutil.setremoteconfig(ui, opts)
558 cmdutil.setremoteconfig(ui, opts)
559 hg.clone(ui, source, dest,
559 hg.clone(ui, source, dest,
560 pull=opts['pull'],
560 pull=opts['pull'],
561 stream=opts['uncompressed'],
561 stream=opts['uncompressed'],
562 rev=opts['rev'],
562 rev=opts['rev'],
563 update=not opts['noupdate'])
563 update=not opts['noupdate'])
564
564
565 def commit(ui, repo, *pats, **opts):
565 def commit(ui, repo, *pats, **opts):
566 """commit the specified files or all outstanding changes
566 """commit the specified files or all outstanding changes
567
567
568 Commit changes to the given files into the repository.
568 Commit changes to the given files into the repository.
569
569
570 If a list of files is omitted, all changes reported by "hg status"
570 If a list of files is omitted, all changes reported by "hg status"
571 will be committed.
571 will be committed.
572
572
573 If you are committing the result of a merge, do not provide any
573 If you are committing the result of a merge, do not provide any
574 file names or -I/-X filters.
574 file names or -I/-X filters.
575
575
576 If no commit message is specified, the configured editor is started to
576 If no commit message is specified, the configured editor is started to
577 enter a message.
577 enter a message.
578
578
579 See 'hg help dates' for a list of formats valid for -d/--date.
579 See 'hg help dates' for a list of formats valid for -d/--date.
580 """
580 """
581 def commitfunc(ui, repo, message, match, opts):
581 def commitfunc(ui, repo, message, match, opts):
582 return repo.commit(match.files(), message, opts['user'], opts['date'],
582 return repo.commit(match.files(), message, opts['user'], opts['date'],
583 match, force_editor=opts.get('force_editor'))
583 match, force_editor=opts.get('force_editor'))
584
584
585 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
585 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
586 if not node:
586 if not node:
587 return
587 return
588 cl = repo.changelog
588 cl = repo.changelog
589 rev = cl.rev(node)
589 rev = cl.rev(node)
590 parents = cl.parentrevs(rev)
590 parents = cl.parentrevs(rev)
591 if rev - 1 in parents:
591 if rev - 1 in parents:
592 # one of the parents was the old tip
592 # one of the parents was the old tip
593 pass
593 pass
594 elif (parents == (nullrev, nullrev) or
594 elif (parents == (nullrev, nullrev) or
595 len(cl.heads(cl.node(parents[0]))) > 1 and
595 len(cl.heads(cl.node(parents[0]))) > 1 and
596 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
596 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
597 ui.status(_('created new head\n'))
597 ui.status(_('created new head\n'))
598
598
599 if ui.debugflag:
599 if ui.debugflag:
600 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
600 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
601 elif ui.verbose:
601 elif ui.verbose:
602 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
602 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
603
603
604 def copy(ui, repo, *pats, **opts):
604 def copy(ui, repo, *pats, **opts):
605 """mark files as copied for the next commit
605 """mark files as copied for the next commit
606
606
607 Mark dest as having copies of source files. If dest is a
607 Mark dest as having copies of source files. If dest is a
608 directory, copies are put in that directory. If dest is a file,
608 directory, copies are put in that directory. If dest is a file,
609 there can only be one source.
609 there can only be one source.
610
610
611 By default, this command copies the contents of files as they
611 By default, this command copies the contents of files as they
612 stand in the working directory. If invoked with --after, the
612 stand in the working directory. If invoked with --after, the
613 operation is recorded, but no copying is performed.
613 operation is recorded, but no copying is performed.
614
614
615 This command takes effect in the next commit. To undo a copy
615 This command takes effect in the next commit. To undo a copy
616 before that, see hg revert.
616 before that, see hg revert.
617 """
617 """
618 wlock = repo.wlock(False)
618 wlock = repo.wlock(False)
619 try:
619 try:
620 return cmdutil.copy(ui, repo, pats, opts)
620 return cmdutil.copy(ui, repo, pats, opts)
621 finally:
621 finally:
622 del wlock
622 del wlock
623
623
624 def debugancestor(ui, repo, *args):
624 def debugancestor(ui, repo, *args):
625 """find the ancestor revision of two revisions in a given index"""
625 """find the ancestor revision of two revisions in a given index"""
626 if len(args) == 3:
626 if len(args) == 3:
627 index, rev1, rev2 = args
627 index, rev1, rev2 = args
628 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
628 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
629 lookup = r.lookup
629 lookup = r.lookup
630 elif len(args) == 2:
630 elif len(args) == 2:
631 if not repo:
631 if not repo:
632 raise util.Abort(_("There is no Mercurial repository here "
632 raise util.Abort(_("There is no Mercurial repository here "
633 "(.hg not found)"))
633 "(.hg not found)"))
634 rev1, rev2 = args
634 rev1, rev2 = args
635 r = repo.changelog
635 r = repo.changelog
636 lookup = repo.lookup
636 lookup = repo.lookup
637 else:
637 else:
638 raise util.Abort(_('either two or three arguments required'))
638 raise util.Abort(_('either two or three arguments required'))
639 a = r.ancestor(lookup(rev1), lookup(rev2))
639 a = r.ancestor(lookup(rev1), lookup(rev2))
640 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
640 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
641
641
642 def debugcomplete(ui, cmd='', **opts):
642 def debugcomplete(ui, cmd='', **opts):
643 """returns the completion list associated with the given command"""
643 """returns the completion list associated with the given command"""
644
644
645 if opts['options']:
645 if opts['options']:
646 options = []
646 options = []
647 otables = [globalopts]
647 otables = [globalopts]
648 if cmd:
648 if cmd:
649 aliases, entry = cmdutil.findcmd(ui, cmd, table)
649 aliases, entry = cmdutil.findcmd(ui, cmd, table)
650 otables.append(entry[1])
650 otables.append(entry[1])
651 for t in otables:
651 for t in otables:
652 for o in t:
652 for o in t:
653 if o[0]:
653 if o[0]:
654 options.append('-%s' % o[0])
654 options.append('-%s' % o[0])
655 options.append('--%s' % o[1])
655 options.append('--%s' % o[1])
656 ui.write("%s\n" % "\n".join(options))
656 ui.write("%s\n" % "\n".join(options))
657 return
657 return
658
658
659 ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table))))
659 ui.write("%s\n" % "\n".join(util.sort(cmdutil.findpossible(ui, cmd, table))))
660
660
661 def debugfsinfo(ui, path = "."):
661 def debugfsinfo(ui, path = "."):
662 file('.debugfsinfo', 'w').write('')
662 file('.debugfsinfo', 'w').write('')
663 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
663 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
664 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
664 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
665 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
665 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
666 and 'yes' or 'no'))
666 and 'yes' or 'no'))
667 os.unlink('.debugfsinfo')
667 os.unlink('.debugfsinfo')
668
668
669 def debugrebuildstate(ui, repo, rev="tip"):
669 def debugrebuildstate(ui, repo, rev="tip"):
670 """rebuild the dirstate as it would look like for the given revision"""
670 """rebuild the dirstate as it would look like for the given revision"""
671 ctx = repo[rev]
671 ctx = repo[rev]
672 wlock = repo.wlock()
672 wlock = repo.wlock()
673 try:
673 try:
674 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
674 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
675 finally:
675 finally:
676 del wlock
676 del wlock
677
677
678 def debugcheckstate(ui, repo):
678 def debugcheckstate(ui, repo):
679 """validate the correctness of the current dirstate"""
679 """validate the correctness of the current dirstate"""
680 parent1, parent2 = repo.dirstate.parents()
680 parent1, parent2 = repo.dirstate.parents()
681 m1 = repo[parent1].manifest()
681 m1 = repo[parent1].manifest()
682 m2 = repo[parent2].manifest()
682 m2 = repo[parent2].manifest()
683 errors = 0
683 errors = 0
684 for f in repo.dirstate:
684 for f in repo.dirstate:
685 state = repo.dirstate[f]
685 state = repo.dirstate[f]
686 if state in "nr" and f not in m1:
686 if state in "nr" and f not in m1:
687 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
687 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
688 errors += 1
688 errors += 1
689 if state in "a" and f in m1:
689 if state in "a" and f in m1:
690 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
690 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
691 errors += 1
691 errors += 1
692 if state in "m" and f not in m1 and f not in m2:
692 if state in "m" and f not in m1 and f not in m2:
693 ui.warn(_("%s in state %s, but not in either manifest\n") %
693 ui.warn(_("%s in state %s, but not in either manifest\n") %
694 (f, state))
694 (f, state))
695 errors += 1
695 errors += 1
696 for f in m1:
696 for f in m1:
697 state = repo.dirstate[f]
697 state = repo.dirstate[f]
698 if state not in "nrm":
698 if state not in "nrm":
699 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
699 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
700 errors += 1
700 errors += 1
701 if errors:
701 if errors:
702 error = _(".hg/dirstate inconsistent with current parent's manifest")
702 error = _(".hg/dirstate inconsistent with current parent's manifest")
703 raise util.Abort(error)
703 raise util.Abort(error)
704
704
705 def showconfig(ui, repo, *values, **opts):
705 def showconfig(ui, repo, *values, **opts):
706 """show combined config settings from all hgrc files
706 """show combined config settings from all hgrc files
707
707
708 With no args, print names and values of all config items.
708 With no args, print names and values of all config items.
709
709
710 With one arg of the form section.name, print just the value of
710 With one arg of the form section.name, print just the value of
711 that config item.
711 that config item.
712
712
713 With multiple args, print names and values of all config items
713 With multiple args, print names and values of all config items
714 with matching section names."""
714 with matching section names."""
715
715
716 untrusted = bool(opts.get('untrusted'))
716 untrusted = bool(opts.get('untrusted'))
717 if values:
717 if values:
718 if len([v for v in values if '.' in v]) > 1:
718 if len([v for v in values if '.' in v]) > 1:
719 raise util.Abort(_('only one config item permitted'))
719 raise util.Abort(_('only one config item permitted'))
720 for section, name, value in ui.walkconfig(untrusted=untrusted):
720 for section, name, value in ui.walkconfig(untrusted=untrusted):
721 sectname = section + '.' + name
721 sectname = section + '.' + name
722 if values:
722 if values:
723 for v in values:
723 for v in values:
724 if v == section:
724 if v == section:
725 ui.write('%s=%s\n' % (sectname, value))
725 ui.write('%s=%s\n' % (sectname, value))
726 elif v == sectname:
726 elif v == sectname:
727 ui.write(value, '\n')
727 ui.write(value, '\n')
728 else:
728 else:
729 ui.write('%s=%s\n' % (sectname, value))
729 ui.write('%s=%s\n' % (sectname, value))
730
730
731 def debugsetparents(ui, repo, rev1, rev2=None):
731 def debugsetparents(ui, repo, rev1, rev2=None):
732 """manually set the parents of the current working directory
732 """manually set the parents of the current working directory
733
733
734 This is useful for writing repository conversion tools, but should
734 This is useful for writing repository conversion tools, but should
735 be used with care.
735 be used with care.
736 """
736 """
737
737
738 if not rev2:
738 if not rev2:
739 rev2 = hex(nullid)
739 rev2 = hex(nullid)
740
740
741 wlock = repo.wlock()
741 wlock = repo.wlock()
742 try:
742 try:
743 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
743 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
744 finally:
744 finally:
745 del wlock
745 del wlock
746
746
747 def debugstate(ui, repo, nodates=None):
747 def debugstate(ui, repo, nodates=None):
748 """show the contents of the current dirstate"""
748 """show the contents of the current dirstate"""
749 timestr = ""
749 timestr = ""
750 showdate = not nodates
750 showdate = not nodates
751 for file_, ent in util.sort(repo.dirstate._map.items()):
751 for file_, ent in util.sort(repo.dirstate._map.items()):
752 if showdate:
752 if showdate:
753 if ent[3] == -1:
753 if ent[3] == -1:
754 # Pad or slice to locale representation
754 # Pad or slice to locale representation
755 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
755 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
756 timestr = 'unset'
756 timestr = 'unset'
757 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
757 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
758 else:
758 else:
759 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
759 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
760 if ent[1] & 020000:
760 if ent[1] & 020000:
761 mode = 'lnk'
761 mode = 'lnk'
762 else:
762 else:
763 mode = '%3o' % (ent[1] & 0777)
763 mode = '%3o' % (ent[1] & 0777)
764 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
764 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
765 for f in repo.dirstate.copies():
765 for f in repo.dirstate.copies():
766 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
766 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
767
767
768 def debugdata(ui, file_, rev):
768 def debugdata(ui, file_, rev):
769 """dump the contents of a data file revision"""
769 """dump the contents of a data file revision"""
770 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
770 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
771 try:
771 try:
772 ui.write(r.revision(r.lookup(rev)))
772 ui.write(r.revision(r.lookup(rev)))
773 except KeyError:
773 except KeyError:
774 raise util.Abort(_('invalid revision identifier %s') % rev)
774 raise util.Abort(_('invalid revision identifier %s') % rev)
775
775
776 def debugdate(ui, date, range=None, **opts):
776 def debugdate(ui, date, range=None, **opts):
777 """parse and display a date"""
777 """parse and display a date"""
778 if opts["extended"]:
778 if opts["extended"]:
779 d = util.parsedate(date, util.extendeddateformats)
779 d = util.parsedate(date, util.extendeddateformats)
780 else:
780 else:
781 d = util.parsedate(date)
781 d = util.parsedate(date)
782 ui.write("internal: %s %s\n" % d)
782 ui.write("internal: %s %s\n" % d)
783 ui.write("standard: %s\n" % util.datestr(d))
783 ui.write("standard: %s\n" % util.datestr(d))
784 if range:
784 if range:
785 m = util.matchdate(range)
785 m = util.matchdate(range)
786 ui.write("match: %s\n" % m(d[0]))
786 ui.write("match: %s\n" % m(d[0]))
787
787
788 def debugindex(ui, file_):
788 def debugindex(ui, file_):
789 """dump the contents of an index file"""
789 """dump the contents of an index file"""
790 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
790 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
791 ui.write(" rev offset length base linkrev" +
791 ui.write(" rev offset length base linkrev" +
792 " nodeid p1 p2\n")
792 " nodeid p1 p2\n")
793 for i in r:
793 for i in r:
794 node = r.node(i)
794 node = r.node(i)
795 try:
795 try:
796 pp = r.parents(node)
796 pp = r.parents(node)
797 except:
797 except:
798 pp = [nullid, nullid]
798 pp = [nullid, nullid]
799 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
799 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
800 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
800 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
801 short(node), short(pp[0]), short(pp[1])))
801 short(node), short(pp[0]), short(pp[1])))
802
802
803 def debugindexdot(ui, file_):
803 def debugindexdot(ui, file_):
804 """dump an index DAG as a .dot file"""
804 """dump an index DAG as a .dot file"""
805 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
805 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
806 ui.write("digraph G {\n")
806 ui.write("digraph G {\n")
807 for i in r:
807 for i in r:
808 node = r.node(i)
808 node = r.node(i)
809 pp = r.parents(node)
809 pp = r.parents(node)
810 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
810 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
811 if pp[1] != nullid:
811 if pp[1] != nullid:
812 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
812 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
813 ui.write("}\n")
813 ui.write("}\n")
814
814
815 def debuginstall(ui):
815 def debuginstall(ui):
816 '''test Mercurial installation'''
816 '''test Mercurial installation'''
817
817
818 def writetemp(contents):
818 def writetemp(contents):
819 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
819 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
820 f = os.fdopen(fd, "wb")
820 f = os.fdopen(fd, "wb")
821 f.write(contents)
821 f.write(contents)
822 f.close()
822 f.close()
823 return name
823 return name
824
824
825 problems = 0
825 problems = 0
826
826
827 # encoding
827 # encoding
828 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
828 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
829 try:
829 try:
830 util.fromlocal("test")
830 util.fromlocal("test")
831 except util.Abort, inst:
831 except util.Abort, inst:
832 ui.write(" %s\n" % inst)
832 ui.write(" %s\n" % inst)
833 ui.write(_(" (check that your locale is properly set)\n"))
833 ui.write(_(" (check that your locale is properly set)\n"))
834 problems += 1
834 problems += 1
835
835
836 # compiled modules
836 # compiled modules
837 ui.status(_("Checking extensions...\n"))
837 ui.status(_("Checking extensions...\n"))
838 try:
838 try:
839 import bdiff, mpatch, base85
839 import bdiff, mpatch, base85
840 except Exception, inst:
840 except Exception, inst:
841 ui.write(" %s\n" % inst)
841 ui.write(" %s\n" % inst)
842 ui.write(_(" One or more extensions could not be found"))
842 ui.write(_(" One or more extensions could not be found"))
843 ui.write(_(" (check that you compiled the extensions)\n"))
843 ui.write(_(" (check that you compiled the extensions)\n"))
844 problems += 1
844 problems += 1
845
845
846 # templates
846 # templates
847 ui.status(_("Checking templates...\n"))
847 ui.status(_("Checking templates...\n"))
848 try:
848 try:
849 import templater
849 import templater
850 t = templater.templater(templater.templatepath("map-cmdline.default"))
850 t = templater.templater(templater.templatepath("map-cmdline.default"))
851 except Exception, inst:
851 except Exception, inst:
852 ui.write(" %s\n" % inst)
852 ui.write(" %s\n" % inst)
853 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
853 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
854 problems += 1
854 problems += 1
855
855
856 # patch
856 # patch
857 ui.status(_("Checking patch...\n"))
857 ui.status(_("Checking patch...\n"))
858 patchproblems = 0
858 patchproblems = 0
859 a = "1\n2\n3\n4\n"
859 a = "1\n2\n3\n4\n"
860 b = "1\n2\n3\ninsert\n4\n"
860 b = "1\n2\n3\ninsert\n4\n"
861 fa = writetemp(a)
861 fa = writetemp(a)
862 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
862 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
863 os.path.basename(fa))
863 os.path.basename(fa))
864 fd = writetemp(d)
864 fd = writetemp(d)
865
865
866 files = {}
866 files = {}
867 try:
867 try:
868 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
868 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
869 except util.Abort, e:
869 except util.Abort, e:
870 ui.write(_(" patch call failed:\n"))
870 ui.write(_(" patch call failed:\n"))
871 ui.write(" " + str(e) + "\n")
871 ui.write(" " + str(e) + "\n")
872 patchproblems += 1
872 patchproblems += 1
873 else:
873 else:
874 if list(files) != [os.path.basename(fa)]:
874 if list(files) != [os.path.basename(fa)]:
875 ui.write(_(" unexpected patch output!\n"))
875 ui.write(_(" unexpected patch output!\n"))
876 patchproblems += 1
876 patchproblems += 1
877 a = file(fa).read()
877 a = file(fa).read()
878 if a != b:
878 if a != b:
879 ui.write(_(" patch test failed!\n"))
879 ui.write(_(" patch test failed!\n"))
880 patchproblems += 1
880 patchproblems += 1
881
881
882 if patchproblems:
882 if patchproblems:
883 if ui.config('ui', 'patch'):
883 if ui.config('ui', 'patch'):
884 ui.write(_(" (Current patch tool may be incompatible with patch,"
884 ui.write(_(" (Current patch tool may be incompatible with patch,"
885 " or misconfigured. Please check your .hgrc file)\n"))
885 " or misconfigured. Please check your .hgrc file)\n"))
886 else:
886 else:
887 ui.write(_(" Internal patcher failure, please report this error"
887 ui.write(_(" Internal patcher failure, please report this error"
888 " to http://www.selenic.com/mercurial/bts\n"))
888 " to http://www.selenic.com/mercurial/bts\n"))
889 problems += patchproblems
889 problems += patchproblems
890
890
891 os.unlink(fa)
891 os.unlink(fa)
892 os.unlink(fd)
892 os.unlink(fd)
893
893
894 # editor
894 # editor
895 ui.status(_("Checking commit editor...\n"))
895 ui.status(_("Checking commit editor...\n"))
896 editor = ui.geteditor()
896 editor = ui.geteditor()
897 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
897 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
898 if not cmdpath:
898 if not cmdpath:
899 if editor == 'vi':
899 if editor == 'vi':
900 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
900 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
901 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
901 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
902 else:
902 else:
903 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
903 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
904 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
904 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
905 problems += 1
905 problems += 1
906
906
907 # check username
907 # check username
908 ui.status(_("Checking username...\n"))
908 ui.status(_("Checking username...\n"))
909 user = os.environ.get("HGUSER")
909 user = os.environ.get("HGUSER")
910 if user is None:
910 if user is None:
911 user = ui.config("ui", "username")
911 user = ui.config("ui", "username")
912 if user is None:
912 if user is None:
913 user = os.environ.get("EMAIL")
913 user = os.environ.get("EMAIL")
914 if not user:
914 if not user:
915 ui.warn(" ")
915 ui.warn(" ")
916 ui.username()
916 ui.username()
917 ui.write(_(" (specify a username in your .hgrc file)\n"))
917 ui.write(_(" (specify a username in your .hgrc file)\n"))
918
918
919 if not problems:
919 if not problems:
920 ui.status(_("No problems detected\n"))
920 ui.status(_("No problems detected\n"))
921 else:
921 else:
922 ui.write(_("%s problems detected,"
922 ui.write(_("%s problems detected,"
923 " please check your install!\n") % problems)
923 " please check your install!\n") % problems)
924
924
925 return problems
925 return problems
926
926
927 def debugrename(ui, repo, file1, *pats, **opts):
927 def debugrename(ui, repo, file1, *pats, **opts):
928 """dump rename information"""
928 """dump rename information"""
929
929
930 ctx = repo[opts.get('rev')]
930 ctx = repo[opts.get('rev')]
931 m = cmdutil.match(repo, (file1,) + pats, opts)
931 m = cmdutil.match(repo, (file1,) + pats, opts)
932 for abs in ctx.walk(m):
932 for abs in ctx.walk(m):
933 fctx = ctx[abs]
933 fctx = ctx[abs]
934 o = fctx.filelog().renamed(fctx.filenode())
934 o = fctx.filelog().renamed(fctx.filenode())
935 rel = m.rel(abs)
935 rel = m.rel(abs)
936 if o:
936 if o:
937 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
937 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
938 else:
938 else:
939 ui.write(_("%s not renamed\n") % rel)
939 ui.write(_("%s not renamed\n") % rel)
940
940
941 def debugwalk(ui, repo, *pats, **opts):
941 def debugwalk(ui, repo, *pats, **opts):
942 """show how files match on given patterns"""
942 """show how files match on given patterns"""
943 m = cmdutil.match(repo, pats, opts)
943 m = cmdutil.match(repo, pats, opts)
944 items = list(repo.walk(m))
944 items = list(repo.walk(m))
945 if not items:
945 if not items:
946 return
946 return
947 fmt = 'f %%-%ds %%-%ds %%s' % (
947 fmt = 'f %%-%ds %%-%ds %%s' % (
948 max([len(abs) for abs in items]),
948 max([len(abs) for abs in items]),
949 max([len(m.rel(abs)) for abs in items]))
949 max([len(m.rel(abs)) for abs in items]))
950 for abs in items:
950 for abs in items:
951 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
951 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
952 ui.write("%s\n" % line.rstrip())
952 ui.write("%s\n" % line.rstrip())
953
953
954 def diff(ui, repo, *pats, **opts):
954 def diff(ui, repo, *pats, **opts):
955 """diff repository (or selected files)
955 """diff repository (or selected files)
956
956
957 Show differences between revisions for the specified files.
957 Show differences between revisions for the specified files.
958
958
959 Differences between files are shown using the unified diff format.
959 Differences between files are shown using the unified diff format.
960
960
961 NOTE: diff may generate unexpected results for merges, as it will
961 NOTE: diff may generate unexpected results for merges, as it will
962 default to comparing against the working directory's first parent
962 default to comparing against the working directory's first parent
963 changeset if no revisions are specified.
963 changeset if no revisions are specified.
964
964
965 When two revision arguments are given, then changes are shown
965 When two revision arguments are given, then changes are shown
966 between those revisions. If only one revision is specified then
966 between those revisions. If only one revision is specified then
967 that revision is compared to the working directory, and, when no
967 that revision is compared to the working directory, and, when no
968 revisions are specified, the working directory files are compared
968 revisions are specified, the working directory files are compared
969 to its parent.
969 to its parent.
970
970
971 Without the -a option, diff will avoid generating diffs of files
971 Without the -a option, diff will avoid generating diffs of files
972 it detects as binary. With -a, diff will generate a diff anyway,
972 it detects as binary. With -a, diff will generate a diff anyway,
973 probably with undesirable results.
973 probably with undesirable results.
974 """
974 """
975 node1, node2 = cmdutil.revpair(repo, opts['rev'])
975 node1, node2 = cmdutil.revpair(repo, opts['rev'])
976
976
977 m = cmdutil.match(repo, pats, opts)
977 m = cmdutil.match(repo, pats, opts)
978 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
978 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
979
979
980 def export(ui, repo, *changesets, **opts):
980 def export(ui, repo, *changesets, **opts):
981 """dump the header and diffs for one or more changesets
981 """dump the header and diffs for one or more changesets
982
982
983 Print the changeset header and diffs for one or more revisions.
983 Print the changeset header and diffs for one or more revisions.
984
984
985 The information shown in the changeset header is: author,
985 The information shown in the changeset header is: author,
986 changeset hash, parent(s) and commit comment.
986 changeset hash, parent(s) and commit comment.
987
987
988 NOTE: export may generate unexpected diff output for merge changesets,
988 NOTE: export may generate unexpected diff output for merge changesets,
989 as it will compare the merge changeset against its first parent only.
989 as it will compare the merge changeset against its first parent only.
990
990
991 Output may be to a file, in which case the name of the file is
991 Output may be to a file, in which case the name of the file is
992 given using a format string. The formatting rules are as follows:
992 given using a format string. The formatting rules are as follows:
993
993
994 %% literal "%" character
994 %% literal "%" character
995 %H changeset hash (40 bytes of hexadecimal)
995 %H changeset hash (40 bytes of hexadecimal)
996 %N number of patches being generated
996 %N number of patches being generated
997 %R changeset revision number
997 %R changeset revision number
998 %b basename of the exporting repository
998 %b basename of the exporting repository
999 %h short-form changeset hash (12 bytes of hexadecimal)
999 %h short-form changeset hash (12 bytes of hexadecimal)
1000 %n zero-padded sequence number, starting at 1
1000 %n zero-padded sequence number, starting at 1
1001 %r zero-padded changeset revision number
1001 %r zero-padded changeset revision number
1002
1002
1003 Without the -a option, export will avoid generating diffs of files
1003 Without the -a option, export will avoid generating diffs of files
1004 it detects as binary. With -a, export will generate a diff anyway,
1004 it detects as binary. With -a, export will generate a diff anyway,
1005 probably with undesirable results.
1005 probably with undesirable results.
1006
1006
1007 With the --switch-parent option, the diff will be against the second
1007 With the --switch-parent option, the diff will be against the second
1008 parent. It can be useful to review a merge.
1008 parent. It can be useful to review a merge.
1009 """
1009 """
1010 if not changesets:
1010 if not changesets:
1011 raise util.Abort(_("export requires at least one changeset"))
1011 raise util.Abort(_("export requires at least one changeset"))
1012 revs = cmdutil.revrange(repo, changesets)
1012 revs = cmdutil.revrange(repo, changesets)
1013 if len(revs) > 1:
1013 if len(revs) > 1:
1014 ui.note(_('exporting patches:\n'))
1014 ui.note(_('exporting patches:\n'))
1015 else:
1015 else:
1016 ui.note(_('exporting patch:\n'))
1016 ui.note(_('exporting patch:\n'))
1017 patch.export(repo, revs, template=opts['output'],
1017 patch.export(repo, revs, template=opts['output'],
1018 switch_parent=opts['switch_parent'],
1018 switch_parent=opts['switch_parent'],
1019 opts=patch.diffopts(ui, opts))
1019 opts=patch.diffopts(ui, opts))
1020
1020
1021 def grep(ui, repo, pattern, *pats, **opts):
1021 def grep(ui, repo, pattern, *pats, **opts):
1022 """search for a pattern in specified files and revisions
1022 """search for a pattern in specified files and revisions
1023
1023
1024 Search revisions of files for a regular expression.
1024 Search revisions of files for a regular expression.
1025
1025
1026 This command behaves differently than Unix grep. It only accepts
1026 This command behaves differently than Unix grep. It only accepts
1027 Python/Perl regexps. It searches repository history, not the
1027 Python/Perl regexps. It searches repository history, not the
1028 working directory. It always prints the revision number in which
1028 working directory. It always prints the revision number in which
1029 a match appears.
1029 a match appears.
1030
1030
1031 By default, grep only prints output for the first revision of a
1031 By default, grep only prints output for the first revision of a
1032 file in which it finds a match. To get it to print every revision
1032 file in which it finds a match. To get it to print every revision
1033 that contains a change in match status ("-" for a match that
1033 that contains a change in match status ("-" for a match that
1034 becomes a non-match, or "+" for a non-match that becomes a match),
1034 becomes a non-match, or "+" for a non-match that becomes a match),
1035 use the --all flag.
1035 use the --all flag.
1036 """
1036 """
1037 reflags = 0
1037 reflags = 0
1038 if opts['ignore_case']:
1038 if opts['ignore_case']:
1039 reflags |= re.I
1039 reflags |= re.I
1040 try:
1040 try:
1041 regexp = re.compile(pattern, reflags)
1041 regexp = re.compile(pattern, reflags)
1042 except Exception, inst:
1042 except Exception, inst:
1043 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1043 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1044 return None
1044 return None
1045 sep, eol = ':', '\n'
1045 sep, eol = ':', '\n'
1046 if opts['print0']:
1046 if opts['print0']:
1047 sep = eol = '\0'
1047 sep = eol = '\0'
1048
1048
1049 fcache = {}
1049 fcache = {}
1050 def getfile(fn):
1050 def getfile(fn):
1051 if fn not in fcache:
1051 if fn not in fcache:
1052 fcache[fn] = repo.file(fn)
1052 fcache[fn] = repo.file(fn)
1053 return fcache[fn]
1053 return fcache[fn]
1054
1054
1055 def matchlines(body):
1055 def matchlines(body):
1056 begin = 0
1056 begin = 0
1057 linenum = 0
1057 linenum = 0
1058 while True:
1058 while True:
1059 match = regexp.search(body, begin)
1059 match = regexp.search(body, begin)
1060 if not match:
1060 if not match:
1061 break
1061 break
1062 mstart, mend = match.span()
1062 mstart, mend = match.span()
1063 linenum += body.count('\n', begin, mstart) + 1
1063 linenum += body.count('\n', begin, mstart) + 1
1064 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1064 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1065 lend = body.find('\n', mend)
1065 lend = body.find('\n', mend)
1066 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1066 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1067 begin = lend + 1
1067 begin = lend + 1
1068
1068
1069 class linestate(object):
1069 class linestate(object):
1070 def __init__(self, line, linenum, colstart, colend):
1070 def __init__(self, line, linenum, colstart, colend):
1071 self.line = line
1071 self.line = line
1072 self.linenum = linenum
1072 self.linenum = linenum
1073 self.colstart = colstart
1073 self.colstart = colstart
1074 self.colend = colend
1074 self.colend = colend
1075
1075
1076 def __hash__(self):
1076 def __hash__(self):
1077 return hash((self.linenum, self.line))
1077 return hash((self.linenum, self.line))
1078
1078
1079 def __eq__(self, other):
1079 def __eq__(self, other):
1080 return self.line == other.line
1080 return self.line == other.line
1081
1081
1082 matches = {}
1082 matches = {}
1083 copies = {}
1083 copies = {}
1084 def grepbody(fn, rev, body):
1084 def grepbody(fn, rev, body):
1085 matches[rev].setdefault(fn, [])
1085 matches[rev].setdefault(fn, [])
1086 m = matches[rev][fn]
1086 m = matches[rev][fn]
1087 for lnum, cstart, cend, line in matchlines(body):
1087 for lnum, cstart, cend, line in matchlines(body):
1088 s = linestate(line, lnum, cstart, cend)
1088 s = linestate(line, lnum, cstart, cend)
1089 m.append(s)
1089 m.append(s)
1090
1090
1091 def difflinestates(a, b):
1091 def difflinestates(a, b):
1092 sm = difflib.SequenceMatcher(None, a, b)
1092 sm = difflib.SequenceMatcher(None, a, b)
1093 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1093 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1094 if tag == 'insert':
1094 if tag == 'insert':
1095 for i in xrange(blo, bhi):
1095 for i in xrange(blo, bhi):
1096 yield ('+', b[i])
1096 yield ('+', b[i])
1097 elif tag == 'delete':
1097 elif tag == 'delete':
1098 for i in xrange(alo, ahi):
1098 for i in xrange(alo, ahi):
1099 yield ('-', a[i])
1099 yield ('-', a[i])
1100 elif tag == 'replace':
1100 elif tag == 'replace':
1101 for i in xrange(alo, ahi):
1101 for i in xrange(alo, ahi):
1102 yield ('-', a[i])
1102 yield ('-', a[i])
1103 for i in xrange(blo, bhi):
1103 for i in xrange(blo, bhi):
1104 yield ('+', b[i])
1104 yield ('+', b[i])
1105
1105
1106 prev = {}
1106 prev = {}
1107 def display(fn, rev, states, prevstates):
1107 def display(fn, rev, states, prevstates):
1108 datefunc = ui.quiet and util.shortdate or util.datestr
1108 datefunc = ui.quiet and util.shortdate or util.datestr
1109 found = False
1109 found = False
1110 filerevmatches = {}
1110 filerevmatches = {}
1111 r = prev.get(fn, -1)
1111 r = prev.get(fn, -1)
1112 if opts['all']:
1112 if opts['all']:
1113 iter = difflinestates(states, prevstates)
1113 iter = difflinestates(states, prevstates)
1114 else:
1114 else:
1115 iter = [('', l) for l in prevstates]
1115 iter = [('', l) for l in prevstates]
1116 for change, l in iter:
1116 for change, l in iter:
1117 cols = [fn, str(r)]
1117 cols = [fn, str(r)]
1118 if opts['line_number']:
1118 if opts['line_number']:
1119 cols.append(str(l.linenum))
1119 cols.append(str(l.linenum))
1120 if opts['all']:
1120 if opts['all']:
1121 cols.append(change)
1121 cols.append(change)
1122 if opts['user']:
1122 if opts['user']:
1123 cols.append(ui.shortuser(get(r)[1]))
1123 cols.append(ui.shortuser(get(r)[1]))
1124 if opts.get('date'):
1124 if opts.get('date'):
1125 cols.append(datefunc(get(r)[2]))
1125 cols.append(datefunc(get(r)[2]))
1126 if opts['files_with_matches']:
1126 if opts['files_with_matches']:
1127 c = (fn, r)
1127 c = (fn, r)
1128 if c in filerevmatches:
1128 if c in filerevmatches:
1129 continue
1129 continue
1130 filerevmatches[c] = 1
1130 filerevmatches[c] = 1
1131 else:
1131 else:
1132 cols.append(l.line)
1132 cols.append(l.line)
1133 ui.write(sep.join(cols), eol)
1133 ui.write(sep.join(cols), eol)
1134 found = True
1134 found = True
1135 return found
1135 return found
1136
1136
1137 fstate = {}
1137 fstate = {}
1138 skip = {}
1138 skip = {}
1139 get = util.cachefunc(lambda r: repo[r].changeset())
1139 get = util.cachefunc(lambda r: repo[r].changeset())
1140 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1140 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1141 found = False
1141 found = False
1142 follow = opts.get('follow')
1142 follow = opts.get('follow')
1143 for st, rev, fns in changeiter:
1143 for st, rev, fns in changeiter:
1144 if st == 'window':
1144 if st == 'window':
1145 matches.clear()
1145 matches.clear()
1146 elif st == 'add':
1146 elif st == 'add':
1147 ctx = repo[rev]
1147 ctx = repo[rev]
1148 matches[rev] = {}
1148 matches[rev] = {}
1149 for fn in fns:
1149 for fn in fns:
1150 if fn in skip:
1150 if fn in skip:
1151 continue
1151 continue
1152 try:
1152 try:
1153 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1153 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1154 fstate.setdefault(fn, [])
1154 fstate.setdefault(fn, [])
1155 if follow:
1155 if follow:
1156 copied = getfile(fn).renamed(ctx.filenode(fn))
1156 copied = getfile(fn).renamed(ctx.filenode(fn))
1157 if copied:
1157 if copied:
1158 copies.setdefault(rev, {})[fn] = copied[0]
1158 copies.setdefault(rev, {})[fn] = copied[0]
1159 except revlog.LookupError:
1159 except revlog.LookupError:
1160 pass
1160 pass
1161 elif st == 'iter':
1161 elif st == 'iter':
1162 for fn, m in util.sort(matches[rev].items()):
1162 for fn, m in util.sort(matches[rev].items()):
1163 copy = copies.get(rev, {}).get(fn)
1163 copy = copies.get(rev, {}).get(fn)
1164 if fn in skip:
1164 if fn in skip:
1165 if copy:
1165 if copy:
1166 skip[copy] = True
1166 skip[copy] = True
1167 continue
1167 continue
1168 if fn in prev or fstate[fn]:
1168 if fn in prev or fstate[fn]:
1169 r = display(fn, rev, m, fstate[fn])
1169 r = display(fn, rev, m, fstate[fn])
1170 found = found or r
1170 found = found or r
1171 if r and not opts['all']:
1171 if r and not opts['all']:
1172 skip[fn] = True
1172 skip[fn] = True
1173 if copy:
1173 if copy:
1174 skip[copy] = True
1174 skip[copy] = True
1175 fstate[fn] = m
1175 fstate[fn] = m
1176 if copy:
1176 if copy:
1177 fstate[copy] = m
1177 fstate[copy] = m
1178 prev[fn] = rev
1178 prev[fn] = rev
1179
1179
1180 for fn, state in util.sort(fstate.items()):
1180 for fn, state in util.sort(fstate.items()):
1181 if fn in skip:
1181 if fn in skip:
1182 continue
1182 continue
1183 if fn not in copies.get(prev[fn], {}):
1183 if fn not in copies.get(prev[fn], {}):
1184 found = display(fn, rev, {}, state) or found
1184 found = display(fn, rev, {}, state) or found
1185 return (not found and 1) or 0
1185 return (not found and 1) or 0
1186
1186
1187 def heads(ui, repo, *branchrevs, **opts):
1187 def heads(ui, repo, *branchrevs, **opts):
1188 """show current repository heads or show branch heads
1188 """show current repository heads or show branch heads
1189
1189
1190 With no arguments, show all repository head changesets.
1190 With no arguments, show all repository head changesets.
1191
1191
1192 If branch or revisions names are given this will show the heads of
1192 If branch or revisions names are given this will show the heads of
1193 the specified branches or the branches those revisions are tagged
1193 the specified branches or the branches those revisions are tagged
1194 with.
1194 with.
1195
1195
1196 Repository "heads" are changesets that don't have child
1196 Repository "heads" are changesets that don't have child
1197 changesets. They are where development generally takes place and
1197 changesets. They are where development generally takes place and
1198 are the usual targets for update and merge operations.
1198 are the usual targets for update and merge operations.
1199
1199
1200 Branch heads are changesets that have a given branch tag, but have
1200 Branch heads are changesets that have a given branch tag, but have
1201 no child changesets with that tag. They are usually where
1201 no child changesets with that tag. They are usually where
1202 development on the given branch takes place.
1202 development on the given branch takes place.
1203 """
1203 """
1204 if opts['rev']:
1204 if opts['rev']:
1205 start = repo.lookup(opts['rev'])
1205 start = repo.lookup(opts['rev'])
1206 else:
1206 else:
1207 start = None
1207 start = None
1208 if not branchrevs:
1208 if not branchrevs:
1209 # Assume we're looking repo-wide heads if no revs were specified.
1209 # Assume we're looking repo-wide heads if no revs were specified.
1210 heads = repo.heads(start)
1210 heads = repo.heads(start)
1211 else:
1211 else:
1212 heads = []
1212 heads = []
1213 visitedset = util.set()
1213 visitedset = util.set()
1214 for branchrev in branchrevs:
1214 for branchrev in branchrevs:
1215 branch = repo[branchrev].branch()
1215 branch = repo[branchrev].branch()
1216 if branch in visitedset:
1216 if branch in visitedset:
1217 continue
1217 continue
1218 visitedset.add(branch)
1218 visitedset.add(branch)
1219 bheads = repo.branchheads(branch, start)
1219 bheads = repo.branchheads(branch, start)
1220 if not bheads:
1220 if not bheads:
1221 if branch != branchrev:
1221 if branch != branchrev:
1222 ui.warn(_("no changes on branch %s containing %s are "
1222 ui.warn(_("no changes on branch %s containing %s are "
1223 "reachable from %s\n")
1223 "reachable from %s\n")
1224 % (branch, branchrev, opts['rev']))
1224 % (branch, branchrev, opts['rev']))
1225 else:
1225 else:
1226 ui.warn(_("no changes on branch %s are reachable from %s\n")
1226 ui.warn(_("no changes on branch %s are reachable from %s\n")
1227 % (branch, opts['rev']))
1227 % (branch, opts['rev']))
1228 heads.extend(bheads)
1228 heads.extend(bheads)
1229 if not heads:
1229 if not heads:
1230 return 1
1230 return 1
1231 displayer = cmdutil.show_changeset(ui, repo, opts)
1231 displayer = cmdutil.show_changeset(ui, repo, opts)
1232 for n in heads:
1232 for n in heads:
1233 displayer.show(changenode=n)
1233 displayer.show(changenode=n)
1234
1234
1235 def help_(ui, name=None, with_version=False):
1235 def help_(ui, name=None, with_version=False):
1236 """show help for a command, extension, or list of commands
1236 """show help for a command, extension, or list of commands
1237
1237
1238 With no arguments, print a list of commands and short help.
1238 With no arguments, print a list of commands and short help.
1239
1239
1240 Given a command name, print help for that command.
1240 Given a command name, print help for that command.
1241
1241
1242 Given an extension name, print help for that extension, and the
1242 Given an extension name, print help for that extension, and the
1243 commands it provides."""
1243 commands it provides."""
1244 option_lists = []
1244 option_lists = []
1245
1245
1246 def addglobalopts(aliases):
1246 def addglobalopts(aliases):
1247 if ui.verbose:
1247 if ui.verbose:
1248 option_lists.append((_("global options:"), globalopts))
1248 option_lists.append((_("global options:"), globalopts))
1249 if name == 'shortlist':
1249 if name == 'shortlist':
1250 option_lists.append((_('use "hg help" for the full list '
1250 option_lists.append((_('use "hg help" for the full list '
1251 'of commands'), ()))
1251 'of commands'), ()))
1252 else:
1252 else:
1253 if name == 'shortlist':
1253 if name == 'shortlist':
1254 msg = _('use "hg help" for the full list of commands '
1254 msg = _('use "hg help" for the full list of commands '
1255 'or "hg -v" for details')
1255 'or "hg -v" for details')
1256 elif aliases:
1256 elif aliases:
1257 msg = _('use "hg -v help%s" to show aliases and '
1257 msg = _('use "hg -v help%s" to show aliases and '
1258 'global options') % (name and " " + name or "")
1258 'global options') % (name and " " + name or "")
1259 else:
1259 else:
1260 msg = _('use "hg -v help %s" to show global options') % name
1260 msg = _('use "hg -v help %s" to show global options') % name
1261 option_lists.append((msg, ()))
1261 option_lists.append((msg, ()))
1262
1262
1263 def helpcmd(name):
1263 def helpcmd(name):
1264 if with_version:
1264 if with_version:
1265 version_(ui)
1265 version_(ui)
1266 ui.write('\n')
1266 ui.write('\n')
1267
1267
1268 try:
1268 try:
1269 aliases, i = cmdutil.findcmd(ui, name, table)
1269 aliases, i = cmdutil.findcmd(ui, name, table)
1270 except cmdutil.AmbiguousCommand, inst:
1270 except cmdutil.AmbiguousCommand, inst:
1271 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1271 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1272 helplist(_('list of commands:\n\n'), select)
1272 helplist(_('list of commands:\n\n'), select)
1273 return
1273 return
1274
1274
1275 # synopsis
1275 # synopsis
1276 ui.write("%s\n" % i[2])
1276 ui.write("%s\n" % i[2])
1277
1277
1278 # aliases
1278 # aliases
1279 if not ui.quiet and len(aliases) > 1:
1279 if not ui.quiet and len(aliases) > 1:
1280 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1280 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1281
1281
1282 # description
1282 # description
1283 doc = i[0].__doc__
1283 doc = i[0].__doc__
1284 if not doc:
1284 if not doc:
1285 doc = _("(No help text available)")
1285 doc = _("(No help text available)")
1286 if ui.quiet:
1286 if ui.quiet:
1287 doc = doc.splitlines(0)[0]
1287 doc = doc.splitlines(0)[0]
1288 ui.write("\n%s\n" % doc.rstrip())
1288 ui.write("\n%s\n" % doc.rstrip())
1289
1289
1290 if not ui.quiet:
1290 if not ui.quiet:
1291 # options
1291 # options
1292 if i[1]:
1292 if i[1]:
1293 option_lists.append((_("options:\n"), i[1]))
1293 option_lists.append((_("options:\n"), i[1]))
1294
1294
1295 addglobalopts(False)
1295 addglobalopts(False)
1296
1296
1297 def helplist(header, select=None):
1297 def helplist(header, select=None):
1298 h = {}
1298 h = {}
1299 cmds = {}
1299 cmds = {}
1300 for c, e in table.items():
1300 for c, e in table.items():
1301 f = c.split("|", 1)[0]
1301 f = c.split("|", 1)[0]
1302 if select and not select(f):
1302 if select and not select(f):
1303 continue
1303 continue
1304 if name == "shortlist" and not f.startswith("^"):
1304 if name == "shortlist" and not f.startswith("^"):
1305 continue
1305 continue
1306 f = f.lstrip("^")
1306 f = f.lstrip("^")
1307 if not ui.debugflag and f.startswith("debug"):
1307 if not ui.debugflag and f.startswith("debug"):
1308 continue
1308 continue
1309 doc = e[0].__doc__
1309 doc = e[0].__doc__
1310 if not doc:
1310 if not doc:
1311 doc = _("(No help text available)")
1311 doc = _("(No help text available)")
1312 h[f] = doc.splitlines(0)[0].rstrip()
1312 h[f] = doc.splitlines(0)[0].rstrip()
1313 cmds[f] = c.lstrip("^")
1313 cmds[f] = c.lstrip("^")
1314
1314
1315 if not h:
1315 if not h:
1316 ui.status(_('no commands defined\n'))
1316 ui.status(_('no commands defined\n'))
1317 return
1317 return
1318
1318
1319 ui.status(header)
1319 ui.status(header)
1320 fns = util.sort(h)
1320 fns = util.sort(h)
1321 m = max(map(len, fns))
1321 m = max(map(len, fns))
1322 for f in fns:
1322 for f in fns:
1323 if ui.verbose:
1323 if ui.verbose:
1324 commands = cmds[f].replace("|",", ")
1324 commands = cmds[f].replace("|",", ")
1325 ui.write(" %s:\n %s\n"%(commands, h[f]))
1325 ui.write(" %s:\n %s\n"%(commands, h[f]))
1326 else:
1326 else:
1327 ui.write(' %-*s %s\n' % (m, f, h[f]))
1327 ui.write(' %-*s %s\n' % (m, f, h[f]))
1328
1328
1329 if not ui.quiet:
1329 if not ui.quiet:
1330 addglobalopts(True)
1330 addglobalopts(True)
1331
1331
1332 def helptopic(name):
1332 def helptopic(name):
1333 v = None
1333 v = None
1334 for i, d in help.helptable:
1334 for i, d in help.helptable:
1335 l = i.split('|')
1335 l = i.split('|')
1336 if name in l:
1336 if name in l:
1337 v = i
1337 v = i
1338 header = l[-1]
1338 header = l[-1]
1339 doc = d
1339 doc = d
1340 if not v:
1340 if not v:
1341 raise cmdutil.UnknownCommand(name)
1341 raise cmdutil.UnknownCommand(name)
1342
1342
1343 # description
1343 # description
1344 if not doc:
1344 if not doc:
1345 doc = _("(No help text available)")
1345 doc = _("(No help text available)")
1346 if callable(doc):
1346 if callable(doc):
1347 doc = doc()
1347 doc = doc()
1348
1348
1349 ui.write("%s\n" % header)
1349 ui.write("%s\n" % header)
1350 ui.write("%s\n" % doc.rstrip())
1350 ui.write("%s\n" % doc.rstrip())
1351
1351
1352 def helpext(name):
1352 def helpext(name):
1353 try:
1353 try:
1354 mod = extensions.find(name)
1354 mod = extensions.find(name)
1355 except KeyError:
1355 except KeyError:
1356 raise cmdutil.UnknownCommand(name)
1356 raise cmdutil.UnknownCommand(name)
1357
1357
1358 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1358 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1359 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1359 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1360 for d in doc[1:]:
1360 for d in doc[1:]:
1361 ui.write(d, '\n')
1361 ui.write(d, '\n')
1362
1362
1363 ui.status('\n')
1363 ui.status('\n')
1364
1364
1365 try:
1365 try:
1366 ct = mod.cmdtable
1366 ct = mod.cmdtable
1367 except AttributeError:
1367 except AttributeError:
1368 ct = {}
1368 ct = {}
1369
1369
1370 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1370 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1371 helplist(_('list of commands:\n\n'), modcmds.has_key)
1371 helplist(_('list of commands:\n\n'), modcmds.has_key)
1372
1372
1373 if name and name != 'shortlist':
1373 if name and name != 'shortlist':
1374 i = None
1374 i = None
1375 for f in (helpcmd, helptopic, helpext):
1375 for f in (helpcmd, helptopic, helpext):
1376 try:
1376 try:
1377 f(name)
1377 f(name)
1378 i = None
1378 i = None
1379 break
1379 break
1380 except cmdutil.UnknownCommand, inst:
1380 except cmdutil.UnknownCommand, inst:
1381 i = inst
1381 i = inst
1382 if i:
1382 if i:
1383 raise i
1383 raise i
1384
1384
1385 else:
1385 else:
1386 # program name
1386 # program name
1387 if ui.verbose or with_version:
1387 if ui.verbose or with_version:
1388 version_(ui)
1388 version_(ui)
1389 else:
1389 else:
1390 ui.status(_("Mercurial Distributed SCM\n"))
1390 ui.status(_("Mercurial Distributed SCM\n"))
1391 ui.status('\n')
1391 ui.status('\n')
1392
1392
1393 # list of commands
1393 # list of commands
1394 if name == "shortlist":
1394 if name == "shortlist":
1395 header = _('basic commands:\n\n')
1395 header = _('basic commands:\n\n')
1396 else:
1396 else:
1397 header = _('list of commands:\n\n')
1397 header = _('list of commands:\n\n')
1398
1398
1399 helplist(header)
1399 helplist(header)
1400
1400
1401 # list all option lists
1401 # list all option lists
1402 opt_output = []
1402 opt_output = []
1403 for title, options in option_lists:
1403 for title, options in option_lists:
1404 opt_output.append(("\n%s" % title, None))
1404 opt_output.append(("\n%s" % title, None))
1405 for shortopt, longopt, default, desc in options:
1405 for shortopt, longopt, default, desc in options:
1406 if "DEPRECATED" in desc and not ui.verbose: continue
1406 if "DEPRECATED" in desc and not ui.verbose: continue
1407 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1407 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1408 longopt and " --%s" % longopt),
1408 longopt and " --%s" % longopt),
1409 "%s%s" % (desc,
1409 "%s%s" % (desc,
1410 default
1410 default
1411 and _(" (default: %s)") % default
1411 and _(" (default: %s)") % default
1412 or "")))
1412 or "")))
1413
1413
1414 if ui.verbose:
1414 if ui.verbose:
1415 ui.write(_("\nspecial help topics:\n"))
1415 ui.write(_("\nspecial help topics:\n"))
1416 topics = []
1416 topics = []
1417 for i, d in help.helptable:
1417 for i, d in help.helptable:
1418 l = i.split('|')
1418 l = i.split('|')
1419 topics.append((", ".join(l[:-1]), l[-1]))
1419 topics.append((", ".join(l[:-1]), l[-1]))
1420 topics_len = max([len(s[0]) for s in topics])
1420 topics_len = max([len(s[0]) for s in topics])
1421 for t, desc in topics:
1421 for t, desc in topics:
1422 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1422 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1423
1423
1424 if opt_output:
1424 if opt_output:
1425 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1425 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1426 for first, second in opt_output:
1426 for first, second in opt_output:
1427 if second:
1427 if second:
1428 ui.write(" %-*s %s\n" % (opts_len, first, second))
1428 ui.write(" %-*s %s\n" % (opts_len, first, second))
1429 else:
1429 else:
1430 ui.write("%s\n" % first)
1430 ui.write("%s\n" % first)
1431
1431
1432 def identify(ui, repo, source=None,
1432 def identify(ui, repo, source=None,
1433 rev=None, num=None, id=None, branch=None, tags=None):
1433 rev=None, num=None, id=None, branch=None, tags=None):
1434 """identify the working copy or specified revision
1434 """identify the working copy or specified revision
1435
1435
1436 With no revision, print a summary of the current state of the repo.
1436 With no revision, print a summary of the current state of the repo.
1437
1437
1438 With a path, do a lookup in another repository.
1438 With a path, do a lookup in another repository.
1439
1439
1440 This summary identifies the repository state using one or two parent
1440 This summary identifies the repository state using one or two parent
1441 hash identifiers, followed by a "+" if there are uncommitted changes
1441 hash identifiers, followed by a "+" if there are uncommitted changes
1442 in the working directory, a list of tags for this revision and a branch
1442 in the working directory, a list of tags for this revision and a branch
1443 name for non-default branches.
1443 name for non-default branches.
1444 """
1444 """
1445
1445
1446 if not repo and not source:
1446 if not repo and not source:
1447 raise util.Abort(_("There is no Mercurial repository here "
1447 raise util.Abort(_("There is no Mercurial repository here "
1448 "(.hg not found)"))
1448 "(.hg not found)"))
1449
1449
1450 hexfunc = ui.debugflag and hex or short
1450 hexfunc = ui.debugflag and hex or short
1451 default = not (num or id or branch or tags)
1451 default = not (num or id or branch or tags)
1452 output = []
1452 output = []
1453
1453
1454 if source:
1454 if source:
1455 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1455 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1456 srepo = hg.repository(ui, source)
1456 srepo = hg.repository(ui, source)
1457 if not rev and revs:
1457 if not rev and revs:
1458 rev = revs[0]
1458 rev = revs[0]
1459 if not rev:
1459 if not rev:
1460 rev = "tip"
1460 rev = "tip"
1461 if num or branch or tags:
1461 if num or branch or tags:
1462 raise util.Abort(
1462 raise util.Abort(
1463 "can't query remote revision number, branch, or tags")
1463 "can't query remote revision number, branch, or tags")
1464 output = [hexfunc(srepo.lookup(rev))]
1464 output = [hexfunc(srepo.lookup(rev))]
1465 elif not rev:
1465 elif not rev:
1466 ctx = repo[None]
1466 ctx = repo[None]
1467 parents = ctx.parents()
1467 parents = ctx.parents()
1468 changed = False
1468 changed = False
1469 if default or id or num:
1469 if default or id or num:
1470 changed = ctx.files() + ctx.deleted()
1470 changed = ctx.files() + ctx.deleted()
1471 if default or id:
1471 if default or id:
1472 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1472 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1473 (changed) and "+" or "")]
1473 (changed) and "+" or "")]
1474 if num:
1474 if num:
1475 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1475 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1476 (changed) and "+" or ""))
1476 (changed) and "+" or ""))
1477 else:
1477 else:
1478 ctx = repo[rev]
1478 ctx = repo[rev]
1479 if default or id:
1479 if default or id:
1480 output = [hexfunc(ctx.node())]
1480 output = [hexfunc(ctx.node())]
1481 if num:
1481 if num:
1482 output.append(str(ctx.rev()))
1482 output.append(str(ctx.rev()))
1483
1483
1484 if not source and default and not ui.quiet:
1484 if not source and default and not ui.quiet:
1485 b = util.tolocal(ctx.branch())
1485 b = util.tolocal(ctx.branch())
1486 if b != 'default':
1486 if b != 'default':
1487 output.append("(%s)" % b)
1487 output.append("(%s)" % b)
1488
1488
1489 # multiple tags for a single parent separated by '/'
1489 # multiple tags for a single parent separated by '/'
1490 t = "/".join(ctx.tags())
1490 t = "/".join(ctx.tags())
1491 if t:
1491 if t:
1492 output.append(t)
1492 output.append(t)
1493
1493
1494 if branch:
1494 if branch:
1495 output.append(util.tolocal(ctx.branch()))
1495 output.append(util.tolocal(ctx.branch()))
1496
1496
1497 if tags:
1497 if tags:
1498 output.extend(ctx.tags())
1498 output.extend(ctx.tags())
1499
1499
1500 ui.write("%s\n" % ' '.join(output))
1500 ui.write("%s\n" % ' '.join(output))
1501
1501
1502 def import_(ui, repo, patch1, *patches, **opts):
1502 def import_(ui, repo, patch1, *patches, **opts):
1503 """import an ordered set of patches
1503 """import an ordered set of patches
1504
1504
1505 Import a list of patches and commit them individually.
1505 Import a list of patches and commit them individually.
1506
1506
1507 If there are outstanding changes in the working directory, import
1507 If there are outstanding changes in the working directory, import
1508 will abort unless given the -f flag.
1508 will abort unless given the -f flag.
1509
1509
1510 You can import a patch straight from a mail message. Even patches
1510 You can import a patch straight from a mail message. Even patches
1511 as attachments work (body part must be type text/plain or
1511 as attachments work (body part must be type text/plain or
1512 text/x-patch to be used). From and Subject headers of email
1512 text/x-patch to be used). From and Subject headers of email
1513 message are used as default committer and commit message. All
1513 message are used as default committer and commit message. All
1514 text/plain body parts before first diff are added to commit
1514 text/plain body parts before first diff are added to commit
1515 message.
1515 message.
1516
1516
1517 If the imported patch was generated by hg export, user and description
1517 If the imported patch was generated by hg export, user and description
1518 from patch override values from message headers and body. Values
1518 from patch override values from message headers and body. Values
1519 given on command line with -m and -u override these.
1519 given on command line with -m and -u override these.
1520
1520
1521 If --exact is specified, import will set the working directory
1521 If --exact is specified, import will set the working directory
1522 to the parent of each patch before applying it, and will abort
1522 to the parent of each patch before applying it, and will abort
1523 if the resulting changeset has a different ID than the one
1523 if the resulting changeset has a different ID than the one
1524 recorded in the patch. This may happen due to character set
1524 recorded in the patch. This may happen due to character set
1525 problems or other deficiencies in the text patch format.
1525 problems or other deficiencies in the text patch format.
1526
1526
1527 To read a patch from standard input, use patch name "-".
1527 To read a patch from standard input, use patch name "-".
1528 See 'hg help dates' for a list of formats valid for -d/--date.
1528 See 'hg help dates' for a list of formats valid for -d/--date.
1529 """
1529 """
1530 patches = (patch1,) + patches
1530 patches = (patch1,) + patches
1531
1531
1532 date = opts.get('date')
1532 date = opts.get('date')
1533 if date:
1533 if date:
1534 opts['date'] = util.parsedate(date)
1534 opts['date'] = util.parsedate(date)
1535
1535
1536 if opts.get('exact') or not opts['force']:
1536 if opts.get('exact') or not opts['force']:
1537 cmdutil.bail_if_changed(repo)
1537 cmdutil.bail_if_changed(repo)
1538
1538
1539 d = opts["base"]
1539 d = opts["base"]
1540 strip = opts["strip"]
1540 strip = opts["strip"]
1541 wlock = lock = None
1541 wlock = lock = None
1542 try:
1542 try:
1543 wlock = repo.wlock()
1543 wlock = repo.wlock()
1544 lock = repo.lock()
1544 lock = repo.lock()
1545 for p in patches:
1545 for p in patches:
1546 pf = os.path.join(d, p)
1546 pf = os.path.join(d, p)
1547
1547
1548 if pf == '-':
1548 if pf == '-':
1549 ui.status(_("applying patch from stdin\n"))
1549 ui.status(_("applying patch from stdin\n"))
1550 data = patch.extract(ui, sys.stdin)
1550 data = patch.extract(ui, sys.stdin)
1551 else:
1551 else:
1552 ui.status(_("applying %s\n") % p)
1552 ui.status(_("applying %s\n") % p)
1553 if os.path.exists(pf):
1553 if os.path.exists(pf):
1554 data = patch.extract(ui, file(pf, 'rb'))
1554 data = patch.extract(ui, file(pf, 'rb'))
1555 else:
1555 else:
1556 data = patch.extract(ui, urllib.urlopen(pf))
1556 data = patch.extract(ui, urllib.urlopen(pf))
1557 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1557 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1558
1558
1559 if tmpname is None:
1559 if tmpname is None:
1560 raise util.Abort(_('no diffs found'))
1560 raise util.Abort(_('no diffs found'))
1561
1561
1562 try:
1562 try:
1563 cmdline_message = cmdutil.logmessage(opts)
1563 cmdline_message = cmdutil.logmessage(opts)
1564 if cmdline_message:
1564 if cmdline_message:
1565 # pickup the cmdline msg
1565 # pickup the cmdline msg
1566 message = cmdline_message
1566 message = cmdline_message
1567 elif message:
1567 elif message:
1568 # pickup the patch msg
1568 # pickup the patch msg
1569 message = message.strip()
1569 message = message.strip()
1570 else:
1570 else:
1571 # launch the editor
1571 # launch the editor
1572 message = None
1572 message = None
1573 ui.debug(_('message:\n%s\n') % message)
1573 ui.debug(_('message:\n%s\n') % message)
1574
1574
1575 wp = repo.parents()
1575 wp = repo.parents()
1576 if opts.get('exact'):
1576 if opts.get('exact'):
1577 if not nodeid or not p1:
1577 if not nodeid or not p1:
1578 raise util.Abort(_('not a mercurial patch'))
1578 raise util.Abort(_('not a mercurial patch'))
1579 p1 = repo.lookup(p1)
1579 p1 = repo.lookup(p1)
1580 p2 = repo.lookup(p2 or hex(nullid))
1580 p2 = repo.lookup(p2 or hex(nullid))
1581
1581
1582 if p1 != wp[0].node():
1582 if p1 != wp[0].node():
1583 hg.clean(repo, p1)
1583 hg.clean(repo, p1)
1584 repo.dirstate.setparents(p1, p2)
1584 repo.dirstate.setparents(p1, p2)
1585 elif p2:
1585 elif p2:
1586 try:
1586 try:
1587 p1 = repo.lookup(p1)
1587 p1 = repo.lookup(p1)
1588 p2 = repo.lookup(p2)
1588 p2 = repo.lookup(p2)
1589 if p1 == wp[0].node():
1589 if p1 == wp[0].node():
1590 repo.dirstate.setparents(p1, p2)
1590 repo.dirstate.setparents(p1, p2)
1591 except RepoError:
1591 except RepoError:
1592 pass
1592 pass
1593 if opts.get('exact') or opts.get('import_branch'):
1593 if opts.get('exact') or opts.get('import_branch'):
1594 repo.dirstate.setbranch(branch or 'default')
1594 repo.dirstate.setbranch(branch or 'default')
1595
1595
1596 files = {}
1596 files = {}
1597 try:
1597 try:
1598 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1598 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1599 files=files)
1599 files=files)
1600 finally:
1600 finally:
1601 files = patch.updatedir(ui, repo, files)
1601 files = patch.updatedir(ui, repo, files)
1602 if not opts.get('no_commit'):
1602 if not opts.get('no_commit'):
1603 n = repo.commit(files, message, opts.get('user') or user,
1603 n = repo.commit(files, message, opts.get('user') or user,
1604 opts.get('date') or date)
1604 opts.get('date') or date)
1605 if opts.get('exact'):
1605 if opts.get('exact'):
1606 if hex(n) != nodeid:
1606 if hex(n) != nodeid:
1607 repo.rollback()
1607 repo.rollback()
1608 raise util.Abort(_('patch is damaged'
1608 raise util.Abort(_('patch is damaged'
1609 ' or loses information'))
1609 ' or loses information'))
1610 # Force a dirstate write so that the next transaction
1610 # Force a dirstate write so that the next transaction
1611 # backups an up-do-date file.
1611 # backups an up-do-date file.
1612 repo.dirstate.write()
1612 repo.dirstate.write()
1613 finally:
1613 finally:
1614 os.unlink(tmpname)
1614 os.unlink(tmpname)
1615 finally:
1615 finally:
1616 del lock, wlock
1616 del lock, wlock
1617
1617
1618 def incoming(ui, repo, source="default", **opts):
1618 def incoming(ui, repo, source="default", **opts):
1619 """show new changesets found in source
1619 """show new changesets found in source
1620
1620
1621 Show new changesets found in the specified path/URL or the default
1621 Show new changesets found in the specified path/URL or the default
1622 pull location. These are the changesets that would be pulled if a pull
1622 pull location. These are the changesets that would be pulled if a pull
1623 was requested.
1623 was requested.
1624
1624
1625 For remote repository, using --bundle avoids downloading the changesets
1625 For remote repository, using --bundle avoids downloading the changesets
1626 twice if the incoming is followed by a pull.
1626 twice if the incoming is followed by a pull.
1627
1627
1628 See pull for valid source format details.
1628 See pull for valid source format details.
1629 """
1629 """
1630 limit = cmdutil.loglimit(opts)
1630 limit = cmdutil.loglimit(opts)
1631 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1631 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1632 cmdutil.setremoteconfig(ui, opts)
1632 cmdutil.setremoteconfig(ui, opts)
1633
1633
1634 other = hg.repository(ui, source)
1634 other = hg.repository(ui, source)
1635 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1635 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1636 if revs:
1636 if revs:
1637 revs = [other.lookup(rev) for rev in revs]
1637 revs = [other.lookup(rev) for rev in revs]
1638 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1638 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1639 if not incoming:
1639 if not incoming:
1640 try:
1640 try:
1641 os.unlink(opts["bundle"])
1641 os.unlink(opts["bundle"])
1642 except:
1642 except:
1643 pass
1643 pass
1644 ui.status(_("no changes found\n"))
1644 ui.status(_("no changes found\n"))
1645 return 1
1645 return 1
1646
1646
1647 cleanup = None
1647 cleanup = None
1648 try:
1648 try:
1649 fname = opts["bundle"]
1649 fname = opts["bundle"]
1650 if fname or not other.local():
1650 if fname or not other.local():
1651 # create a bundle (uncompressed if other repo is not local)
1651 # create a bundle (uncompressed if other repo is not local)
1652 if revs is None:
1652 if revs is None:
1653 cg = other.changegroup(incoming, "incoming")
1653 cg = other.changegroup(incoming, "incoming")
1654 else:
1654 else:
1655 cg = other.changegroupsubset(incoming, revs, 'incoming')
1655 cg = other.changegroupsubset(incoming, revs, 'incoming')
1656 bundletype = other.local() and "HG10BZ" or "HG10UN"
1656 bundletype = other.local() and "HG10BZ" or "HG10UN"
1657 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1657 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1658 # keep written bundle?
1658 # keep written bundle?
1659 if opts["bundle"]:
1659 if opts["bundle"]:
1660 cleanup = None
1660 cleanup = None
1661 if not other.local():
1661 if not other.local():
1662 # use the created uncompressed bundlerepo
1662 # use the created uncompressed bundlerepo
1663 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1663 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1664
1664
1665 o = other.changelog.nodesbetween(incoming, revs)[0]
1665 o = other.changelog.nodesbetween(incoming, revs)[0]
1666 if opts['newest_first']:
1666 if opts['newest_first']:
1667 o.reverse()
1667 o.reverse()
1668 displayer = cmdutil.show_changeset(ui, other, opts)
1668 displayer = cmdutil.show_changeset(ui, other, opts)
1669 count = 0
1669 count = 0
1670 for n in o:
1670 for n in o:
1671 if count >= limit:
1671 if count >= limit:
1672 break
1672 break
1673 parents = [p for p in other.changelog.parents(n) if p != nullid]
1673 parents = [p for p in other.changelog.parents(n) if p != nullid]
1674 if opts['no_merges'] and len(parents) == 2:
1674 if opts['no_merges'] and len(parents) == 2:
1675 continue
1675 continue
1676 count += 1
1676 count += 1
1677 displayer.show(changenode=n)
1677 displayer.show(changenode=n)
1678 finally:
1678 finally:
1679 if hasattr(other, 'close'):
1679 if hasattr(other, 'close'):
1680 other.close()
1680 other.close()
1681 if cleanup:
1681 if cleanup:
1682 os.unlink(cleanup)
1682 os.unlink(cleanup)
1683
1683
1684 def init(ui, dest=".", **opts):
1684 def init(ui, dest=".", **opts):
1685 """create a new repository in the given directory
1685 """create a new repository in the given directory
1686
1686
1687 Initialize a new repository in the given directory. If the given
1687 Initialize a new repository in the given directory. If the given
1688 directory does not exist, it is created.
1688 directory does not exist, it is created.
1689
1689
1690 If no directory is given, the current directory is used.
1690 If no directory is given, the current directory is used.
1691
1691
1692 It is possible to specify an ssh:// URL as the destination.
1692 It is possible to specify an ssh:// URL as the destination.
1693 Look at the help text for the pull command for important details
1693 Look at the help text for the pull command for important details
1694 about ssh:// URLs.
1694 about ssh:// URLs.
1695 """
1695 """
1696 cmdutil.setremoteconfig(ui, opts)
1696 cmdutil.setremoteconfig(ui, opts)
1697 hg.repository(ui, dest, create=1)
1697 hg.repository(ui, dest, create=1)
1698
1698
1699 def locate(ui, repo, *pats, **opts):
1699 def locate(ui, repo, *pats, **opts):
1700 """locate files matching specific patterns
1700 """locate files matching specific patterns
1701
1701
1702 Print all files under Mercurial control whose names match the
1702 Print all files under Mercurial control whose names match the
1703 given patterns.
1703 given patterns.
1704
1704
1705 This command searches the entire repository by default. To search
1705 This command searches the entire repository by default. To search
1706 just the current directory and its subdirectories, use
1706 just the current directory and its subdirectories, use
1707 "--include .".
1707 "--include .".
1708
1708
1709 If no patterns are given to match, this command prints all file
1709 If no patterns are given to match, this command prints all file
1710 names.
1710 names.
1711
1711
1712 If you want to feed the output of this command into the "xargs"
1712 If you want to feed the output of this command into the "xargs"
1713 command, use the "-0" option to both this command and "xargs".
1713 command, use the "-0" option to both this command and "xargs".
1714 This will avoid the problem of "xargs" treating single filenames
1714 This will avoid the problem of "xargs" treating single filenames
1715 that contain white space as multiple filenames.
1715 that contain white space as multiple filenames.
1716 """
1716 """
1717 end = opts['print0'] and '\0' or '\n'
1717 end = opts['print0'] and '\0' or '\n'
1718 rev = opts.get('rev') or None
1718 rev = opts.get('rev') or None
1719
1719
1720 ret = 1
1720 ret = 1
1721 m = cmdutil.match(repo, pats, opts, default='relglob')
1721 m = cmdutil.match(repo, pats, opts, default='relglob')
1722 m.bad = lambda x,y: False
1722 m.bad = lambda x,y: False
1723 for abs in repo[rev].walk(m):
1723 for abs in repo[rev].walk(m):
1724 if not rev and abs not in repo.dirstate:
1724 if not rev and abs not in repo.dirstate:
1725 continue
1725 continue
1726 if opts['fullpath']:
1726 if opts['fullpath']:
1727 ui.write(os.path.join(repo.root, abs), end)
1727 ui.write(os.path.join(repo.root, abs), end)
1728 else:
1728 else:
1729 ui.write(((pats and m.rel(abs)) or abs), end)
1729 ui.write(((pats and m.rel(abs)) or abs), end)
1730 ret = 0
1730 ret = 0
1731
1731
1732 return ret
1732 return ret
1733
1733
1734 def log(ui, repo, *pats, **opts):
1734 def log(ui, repo, *pats, **opts):
1735 """show revision history of entire repository or files
1735 """show revision history of entire repository or files
1736
1736
1737 Print the revision history of the specified files or the entire
1737 Print the revision history of the specified files or the entire
1738 project.
1738 project.
1739
1739
1740 File history is shown without following rename or copy history of
1740 File history is shown without following rename or copy history of
1741 files. Use -f/--follow with a file name to follow history across
1741 files. Use -f/--follow with a file name to follow history across
1742 renames and copies. --follow without a file name will only show
1742 renames and copies. --follow without a file name will only show
1743 ancestors or descendants of the starting revision. --follow-first
1743 ancestors or descendants of the starting revision. --follow-first
1744 only follows the first parent of merge revisions.
1744 only follows the first parent of merge revisions.
1745
1745
1746 If no revision range is specified, the default is tip:0 unless
1746 If no revision range is specified, the default is tip:0 unless
1747 --follow is set, in which case the working directory parent is
1747 --follow is set, in which case the working directory parent is
1748 used as the starting revision.
1748 used as the starting revision.
1749
1749
1750 See 'hg help dates' for a list of formats valid for -d/--date.
1750 See 'hg help dates' for a list of formats valid for -d/--date.
1751
1751
1752 By default this command outputs: changeset id and hash, tags,
1752 By default this command outputs: changeset id and hash, tags,
1753 non-trivial parents, user, date and time, and a summary for each
1753 non-trivial parents, user, date and time, and a summary for each
1754 commit. When the -v/--verbose switch is used, the list of changed
1754 commit. When the -v/--verbose switch is used, the list of changed
1755 files and full commit message is shown.
1755 files and full commit message is shown.
1756
1756
1757 NOTE: log -p may generate unexpected diff output for merge
1757 NOTE: log -p may generate unexpected diff output for merge
1758 changesets, as it will compare the merge changeset against its
1758 changesets, as it will compare the merge changeset against its
1759 first parent only. Also, the files: list will only reflect files
1759 first parent only. Also, the files: list will only reflect files
1760 that are different from BOTH parents.
1760 that are different from BOTH parents.
1761
1761
1762 """
1762 """
1763
1763
1764 get = util.cachefunc(lambda r: repo[r].changeset())
1764 get = util.cachefunc(lambda r: repo[r].changeset())
1765 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1765 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1766
1766
1767 limit = cmdutil.loglimit(opts)
1767 limit = cmdutil.loglimit(opts)
1768 count = 0
1768 count = 0
1769
1769
1770 if opts['copies'] and opts['rev']:
1770 if opts['copies'] and opts['rev']:
1771 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1771 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1772 else:
1772 else:
1773 endrev = len(repo)
1773 endrev = len(repo)
1774 rcache = {}
1774 rcache = {}
1775 ncache = {}
1775 ncache = {}
1776 def getrenamed(fn, rev):
1776 def getrenamed(fn, rev):
1777 '''looks up all renames for a file (up to endrev) the first
1777 '''looks up all renames for a file (up to endrev) the first
1778 time the file is given. It indexes on the changerev and only
1778 time the file is given. It indexes on the changerev and only
1779 parses the manifest if linkrev != changerev.
1779 parses the manifest if linkrev != changerev.
1780 Returns rename info for fn at changerev rev.'''
1780 Returns rename info for fn at changerev rev.'''
1781 if fn not in rcache:
1781 if fn not in rcache:
1782 rcache[fn] = {}
1782 rcache[fn] = {}
1783 ncache[fn] = {}
1783 ncache[fn] = {}
1784 fl = repo.file(fn)
1784 fl = repo.file(fn)
1785 for i in fl:
1785 for i in fl:
1786 node = fl.node(i)
1786 node = fl.node(i)
1787 lr = fl.linkrev(node)
1787 lr = fl.linkrev(node)
1788 renamed = fl.renamed(node)
1788 renamed = fl.renamed(node)
1789 rcache[fn][lr] = renamed
1789 rcache[fn][lr] = renamed
1790 if renamed:
1790 if renamed:
1791 ncache[fn][node] = renamed
1791 ncache[fn][node] = renamed
1792 if lr >= endrev:
1792 if lr >= endrev:
1793 break
1793 break
1794 if rev in rcache[fn]:
1794 if rev in rcache[fn]:
1795 return rcache[fn][rev]
1795 return rcache[fn][rev]
1796
1796
1797 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1797 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1798 # filectx logic.
1798 # filectx logic.
1799
1799
1800 try:
1800 try:
1801 return repo[rev][fn].renamed()
1801 return repo[rev][fn].renamed()
1802 except revlog.LookupError:
1802 except revlog.LookupError:
1803 pass
1803 pass
1804 return None
1804 return None
1805
1805
1806 df = False
1806 df = False
1807 if opts["date"]:
1807 if opts["date"]:
1808 df = util.matchdate(opts["date"])
1808 df = util.matchdate(opts["date"])
1809
1809
1810 only_branches = opts['only_branch']
1810 only_branches = opts['only_branch']
1811
1811
1812 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1812 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1813 for st, rev, fns in changeiter:
1813 for st, rev, fns in changeiter:
1814 if st == 'add':
1814 if st == 'add':
1815 changenode = repo.changelog.node(rev)
1815 changenode = repo.changelog.node(rev)
1816 parents = [p for p in repo.changelog.parentrevs(rev)
1816 parents = [p for p in repo.changelog.parentrevs(rev)
1817 if p != nullrev]
1817 if p != nullrev]
1818 if opts['no_merges'] and len(parents) == 2:
1818 if opts['no_merges'] and len(parents) == 2:
1819 continue
1819 continue
1820 if opts['only_merges'] and len(parents) != 2:
1820 if opts['only_merges'] and len(parents) != 2:
1821 continue
1821 continue
1822
1822
1823 if only_branches:
1823 if only_branches:
1824 revbranch = get(rev)[5]['branch']
1824 revbranch = get(rev)[5]['branch']
1825 if revbranch not in only_branches:
1825 if revbranch not in only_branches:
1826 continue
1826 continue
1827
1827
1828 if df:
1828 if df:
1829 changes = get(rev)
1829 changes = get(rev)
1830 if not df(changes[2][0]):
1830 if not df(changes[2][0]):
1831 continue
1831 continue
1832
1832
1833 if opts['keyword']:
1833 if opts['keyword']:
1834 changes = get(rev)
1834 changes = get(rev)
1835 miss = 0
1835 miss = 0
1836 for k in [kw.lower() for kw in opts['keyword']]:
1836 for k in [kw.lower() for kw in opts['keyword']]:
1837 if not (k in changes[1].lower() or
1837 if not (k in changes[1].lower() or
1838 k in changes[4].lower() or
1838 k in changes[4].lower() or
1839 k in " ".join(changes[3]).lower()):
1839 k in " ".join(changes[3]).lower()):
1840 miss = 1
1840 miss = 1
1841 break
1841 break
1842 if miss:
1842 if miss:
1843 continue
1843 continue
1844
1844
1845 copies = []
1845 copies = []
1846 if opts.get('copies') and rev:
1846 if opts.get('copies') and rev:
1847 for fn in get(rev)[3]:
1847 for fn in get(rev)[3]:
1848 rename = getrenamed(fn, rev)
1848 rename = getrenamed(fn, rev)
1849 if rename:
1849 if rename:
1850 copies.append((fn, rename[0]))
1850 copies.append((fn, rename[0]))
1851 displayer.show(rev, changenode, copies=copies)
1851 displayer.show(rev, changenode, copies=copies)
1852 elif st == 'iter':
1852 elif st == 'iter':
1853 if count == limit: break
1853 if count == limit: break
1854 if displayer.flush(rev):
1854 if displayer.flush(rev):
1855 count += 1
1855 count += 1
1856
1856
1857 def manifest(ui, repo, node=None, rev=None):
1857 def manifest(ui, repo, node=None, rev=None):
1858 """output the current or given revision of the project manifest
1858 """output the current or given revision of the project manifest
1859
1859
1860 Print a list of version controlled files for the given revision.
1860 Print a list of version controlled files for the given revision.
1861 If no revision is given, the parent of the working directory is used,
1861 If no revision is given, the parent of the working directory is used,
1862 or tip if no revision is checked out.
1862 or tip if no revision is checked out.
1863
1863
1864 The manifest is the list of files being version controlled. If no revision
1864 The manifest is the list of files being version controlled. If no revision
1865 is given then the first parent of the working directory is used.
1865 is given then the first parent of the working directory is used.
1866
1866
1867 With -v flag, print file permissions, symlink and executable bits. With
1867 With -v flag, print file permissions, symlink and executable bits. With
1868 --debug flag, print file revision hashes.
1868 --debug flag, print file revision hashes.
1869 """
1869 """
1870
1870
1871 if rev and node:
1871 if rev and node:
1872 raise util.Abort(_("please specify just one revision"))
1872 raise util.Abort(_("please specify just one revision"))
1873
1873
1874 if not node:
1874 if not node:
1875 node = rev
1875 node = rev
1876
1876
1877 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1877 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
1878 ctx = repo[node]
1878 ctx = repo[node]
1879 for f in ctx:
1879 for f in ctx:
1880 if ui.debugflag:
1880 if ui.debugflag:
1881 ui.write("%40s " % hex(ctx.manifest()[f]))
1881 ui.write("%40s " % hex(ctx.manifest()[f]))
1882 if ui.verbose:
1882 if ui.verbose:
1883 ui.write(decor[ctx.flags(f)])
1883 ui.write(decor[ctx.flags(f)])
1884 ui.write("%s\n" % f)
1884 ui.write("%s\n" % f)
1885
1885
1886 def merge(ui, repo, node=None, force=None, rev=None):
1886 def merge(ui, repo, node=None, force=None, rev=None):
1887 """merge working directory with another revision
1887 """merge working directory with another revision
1888
1888
1889 Merge the contents of the current working directory and the
1889 Merge the contents of the current working directory and the
1890 requested revision. Files that changed between either parent are
1890 requested revision. Files that changed between either parent are
1891 marked as changed for the next commit and a commit must be
1891 marked as changed for the next commit and a commit must be
1892 performed before any further updates are allowed.
1892 performed before any further updates are allowed.
1893
1893
1894 If no revision is specified, the working directory's parent is a
1894 If no revision is specified, the working directory's parent is a
1895 head revision, and the current branch contains exactly one other head,
1895 head revision, and the current branch contains exactly one other head,
1896 the other head is merged with by default. Otherwise, an explicit
1896 the other head is merged with by default. Otherwise, an explicit
1897 revision to merge with must be provided.
1897 revision to merge with must be provided.
1898 """
1898 """
1899
1899
1900 if rev and node:
1900 if rev and node:
1901 raise util.Abort(_("please specify just one revision"))
1901 raise util.Abort(_("please specify just one revision"))
1902 if not node:
1902 if not node:
1903 node = rev
1903 node = rev
1904
1904
1905 if not node:
1905 if not node:
1906 branch = repo.changectx(None).branch()
1906 branch = repo.changectx(None).branch()
1907 bheads = repo.branchheads(branch)
1907 bheads = repo.branchheads(branch)
1908 if len(bheads) > 2:
1908 if len(bheads) > 2:
1909 raise util.Abort(_("branch '%s' has %d heads - "
1909 raise util.Abort(_("branch '%s' has %d heads - "
1910 "please merge with an explicit rev") %
1910 "please merge with an explicit rev") %
1911 (branch, len(bheads)))
1911 (branch, len(bheads)))
1912
1912
1913 parent = repo.dirstate.parents()[0]
1913 parent = repo.dirstate.parents()[0]
1914 if len(bheads) == 1:
1914 if len(bheads) == 1:
1915 if len(repo.heads()) > 1:
1915 if len(repo.heads()) > 1:
1916 raise util.Abort(_("branch '%s' has one head - "
1916 raise util.Abort(_("branch '%s' has one head - "
1917 "please merge with an explicit rev") %
1917 "please merge with an explicit rev") %
1918 branch)
1918 branch)
1919 msg = _('there is nothing to merge')
1919 msg = _('there is nothing to merge')
1920 if parent != repo.lookup(repo[None].branch()):
1920 if parent != repo.lookup(repo[None].branch()):
1921 msg = _('%s - use "hg update" instead') % msg
1921 msg = _('%s - use "hg update" instead') % msg
1922 raise util.Abort(msg)
1922 raise util.Abort(msg)
1923
1923
1924 if parent not in bheads:
1924 if parent not in bheads:
1925 raise util.Abort(_('working dir not at a head rev - '
1925 raise util.Abort(_('working dir not at a head rev - '
1926 'use "hg update" or merge with an explicit rev'))
1926 'use "hg update" or merge with an explicit rev'))
1927 node = parent == bheads[0] and bheads[-1] or bheads[0]
1927 node = parent == bheads[0] and bheads[-1] or bheads[0]
1928 return hg.merge(repo, node, force=force)
1928 return hg.merge(repo, node, force=force)
1929
1929
1930 def outgoing(ui, repo, dest=None, **opts):
1930 def outgoing(ui, repo, dest=None, **opts):
1931 """show changesets not found in destination
1931 """show changesets not found in destination
1932
1932
1933 Show changesets not found in the specified destination repository or
1933 Show changesets not found in the specified destination repository or
1934 the default push location. These are the changesets that would be pushed
1934 the default push location. These are the changesets that would be pushed
1935 if a push was requested.
1935 if a push was requested.
1936
1936
1937 See pull for valid destination format details.
1937 See pull for valid destination format details.
1938 """
1938 """
1939 limit = cmdutil.loglimit(opts)
1939 limit = cmdutil.loglimit(opts)
1940 dest, revs, checkout = hg.parseurl(
1940 dest, revs, checkout = hg.parseurl(
1941 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1941 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1942 cmdutil.setremoteconfig(ui, opts)
1942 cmdutil.setremoteconfig(ui, opts)
1943 if revs:
1943 if revs:
1944 revs = [repo.lookup(rev) for rev in revs]
1944 revs = [repo.lookup(rev) for rev in revs]
1945
1945
1946 other = hg.repository(ui, dest)
1946 other = hg.repository(ui, dest)
1947 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1947 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1948 o = repo.findoutgoing(other, force=opts['force'])
1948 o = repo.findoutgoing(other, force=opts['force'])
1949 if not o:
1949 if not o:
1950 ui.status(_("no changes found\n"))
1950 ui.status(_("no changes found\n"))
1951 return 1
1951 return 1
1952 o = repo.changelog.nodesbetween(o, revs)[0]
1952 o = repo.changelog.nodesbetween(o, revs)[0]
1953 if opts['newest_first']:
1953 if opts['newest_first']:
1954 o.reverse()
1954 o.reverse()
1955 displayer = cmdutil.show_changeset(ui, repo, opts)
1955 displayer = cmdutil.show_changeset(ui, repo, opts)
1956 count = 0
1956 count = 0
1957 for n in o:
1957 for n in o:
1958 if count >= limit:
1958 if count >= limit:
1959 break
1959 break
1960 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1960 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1961 if opts['no_merges'] and len(parents) == 2:
1961 if opts['no_merges'] and len(parents) == 2:
1962 continue
1962 continue
1963 count += 1
1963 count += 1
1964 displayer.show(changenode=n)
1964 displayer.show(changenode=n)
1965
1965
1966 def parents(ui, repo, file_=None, **opts):
1966 def parents(ui, repo, file_=None, **opts):
1967 """show the parents of the working dir or revision
1967 """show the parents of the working dir or revision
1968
1968
1969 Print the working directory's parent revisions. If a
1969 Print the working directory's parent revisions. If a
1970 revision is given via --rev, the parent of that revision
1970 revision is given via --rev, the parent of that revision
1971 will be printed. If a file argument is given, revision in
1971 will be printed. If a file argument is given, revision in
1972 which the file was last changed (before the working directory
1972 which the file was last changed (before the working directory
1973 revision or the argument to --rev if given) is printed.
1973 revision or the argument to --rev if given) is printed.
1974 """
1974 """
1975 rev = opts.get('rev')
1975 rev = opts.get('rev')
1976 if rev:
1976 if rev:
1977 ctx = repo[rev]
1977 ctx = repo[rev]
1978 else:
1978 else:
1979 ctx = repo[None]
1979 ctx = repo[None]
1980
1980
1981 if file_:
1981 if file_:
1982 m = cmdutil.match(repo, (file_,), opts)
1982 m = cmdutil.match(repo, (file_,), opts)
1983 if m.anypats() or len(m.files()) != 1:
1983 if m.anypats() or len(m.files()) != 1:
1984 raise util.Abort(_('can only specify an explicit file name'))
1984 raise util.Abort(_('can only specify an explicit file name'))
1985 file_ = m.files()[0]
1985 file_ = m.files()[0]
1986 filenodes = []
1986 filenodes = []
1987 for cp in ctx.parents():
1987 for cp in ctx.parents():
1988 if not cp:
1988 if not cp:
1989 continue
1989 continue
1990 try:
1990 try:
1991 filenodes.append(cp.filenode(file_))
1991 filenodes.append(cp.filenode(file_))
1992 except revlog.LookupError:
1992 except revlog.LookupError:
1993 pass
1993 pass
1994 if not filenodes:
1994 if not filenodes:
1995 raise util.Abort(_("'%s' not found in manifest!") % file_)
1995 raise util.Abort(_("'%s' not found in manifest!") % file_)
1996 fl = repo.file(file_)
1996 fl = repo.file(file_)
1997 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1997 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1998 else:
1998 else:
1999 p = [cp.node() for cp in ctx.parents()]
1999 p = [cp.node() for cp in ctx.parents()]
2000
2000
2001 displayer = cmdutil.show_changeset(ui, repo, opts)
2001 displayer = cmdutil.show_changeset(ui, repo, opts)
2002 for n in p:
2002 for n in p:
2003 if n != nullid:
2003 if n != nullid:
2004 displayer.show(changenode=n)
2004 displayer.show(changenode=n)
2005
2005
2006 def paths(ui, repo, search=None):
2006 def paths(ui, repo, search=None):
2007 """show definition of symbolic path names
2007 """show definition of symbolic path names
2008
2008
2009 Show definition of symbolic path name NAME. If no name is given, show
2009 Show definition of symbolic path name NAME. If no name is given, show
2010 definition of available names.
2010 definition of available names.
2011
2011
2012 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2012 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2013 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2013 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2014 """
2014 """
2015 if search:
2015 if search:
2016 for name, path in ui.configitems("paths"):
2016 for name, path in ui.configitems("paths"):
2017 if name == search:
2017 if name == search:
2018 ui.write("%s\n" % util.hidepassword(path))
2018 ui.write("%s\n" % util.hidepassword(path))
2019 return
2019 return
2020 ui.warn(_("not found!\n"))
2020 ui.warn(_("not found!\n"))
2021 return 1
2021 return 1
2022 else:
2022 else:
2023 for name, path in ui.configitems("paths"):
2023 for name, path in ui.configitems("paths"):
2024 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2024 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2025
2025
2026 def postincoming(ui, repo, modheads, optupdate, checkout):
2026 def postincoming(ui, repo, modheads, optupdate, checkout):
2027 if modheads == 0:
2027 if modheads == 0:
2028 return
2028 return
2029 if optupdate:
2029 if optupdate:
2030 if modheads <= 1 or checkout:
2030 if modheads <= 1 or checkout:
2031 return hg.update(repo, checkout)
2031 return hg.update(repo, checkout)
2032 else:
2032 else:
2033 ui.status(_("not updating, since new heads added\n"))
2033 ui.status(_("not updating, since new heads added\n"))
2034 if modheads > 1:
2034 if modheads > 1:
2035 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2035 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2036 else:
2036 else:
2037 ui.status(_("(run 'hg update' to get a working copy)\n"))
2037 ui.status(_("(run 'hg update' to get a working copy)\n"))
2038
2038
2039 def pull(ui, repo, source="default", **opts):
2039 def pull(ui, repo, source="default", **opts):
2040 """pull changes from the specified source
2040 """pull changes from the specified source
2041
2041
2042 Pull changes from a remote repository to a local one.
2042 Pull changes from a remote repository to a local one.
2043
2043
2044 This finds all changes from the repository at the specified path
2044 This finds all changes from the repository at the specified path
2045 or URL and adds them to the local repository. By default, this
2045 or URL and adds them to the local repository. By default, this
2046 does not update the copy of the project in the working directory.
2046 does not update the copy of the project in the working directory.
2047
2047
2048 Valid URLs are of the form:
2048 Valid URLs are of the form:
2049
2049
2050 local/filesystem/path (or file://local/filesystem/path)
2050 local/filesystem/path (or file://local/filesystem/path)
2051 http://[user[:pass]@]host[:port]/[path]
2051 http://[user[:pass]@]host[:port]/[path]
2052 https://[user[:pass]@]host[:port]/[path]
2052 https://[user[:pass]@]host[:port]/[path]
2053 ssh://[user[:pass]@]host[:port]/[path]
2053 ssh://[user[:pass]@]host[:port]/[path]
2054 static-http://host[:port]/[path]
2054 static-http://host[:port]/[path]
2055
2055
2056 Paths in the local filesystem can either point to Mercurial
2056 Paths in the local filesystem can either point to Mercurial
2057 repositories or to bundle files (as created by 'hg bundle' or
2057 repositories or to bundle files (as created by 'hg bundle' or
2058 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2058 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2059 allows access to a Mercurial repository where you simply use a web
2059 allows access to a Mercurial repository where you simply use a web
2060 server to publish the .hg directory as static content.
2060 server to publish the .hg directory as static content.
2061
2061
2062 An optional identifier after # indicates a particular branch, tag,
2062 An optional identifier after # indicates a particular branch, tag,
2063 or changeset to pull.
2063 or changeset to pull.
2064
2064
2065 Some notes about using SSH with Mercurial:
2065 Some notes about using SSH with Mercurial:
2066 - SSH requires an accessible shell account on the destination machine
2066 - SSH requires an accessible shell account on the destination machine
2067 and a copy of hg in the remote path or specified with as remotecmd.
2067 and a copy of hg in the remote path or specified with as remotecmd.
2068 - path is relative to the remote user's home directory by default.
2068 - path is relative to the remote user's home directory by default.
2069 Use an extra slash at the start of a path to specify an absolute path:
2069 Use an extra slash at the start of a path to specify an absolute path:
2070 ssh://example.com//tmp/repository
2070 ssh://example.com//tmp/repository
2071 - Mercurial doesn't use its own compression via SSH; the right thing
2071 - Mercurial doesn't use its own compression via SSH; the right thing
2072 to do is to configure it in your ~/.ssh/config, e.g.:
2072 to do is to configure it in your ~/.ssh/config, e.g.:
2073 Host *.mylocalnetwork.example.com
2073 Host *.mylocalnetwork.example.com
2074 Compression no
2074 Compression no
2075 Host *
2075 Host *
2076 Compression yes
2076 Compression yes
2077 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2077 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2078 with the --ssh command line option.
2078 with the --ssh command line option.
2079 """
2079 """
2080 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2080 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2081 cmdutil.setremoteconfig(ui, opts)
2081 cmdutil.setremoteconfig(ui, opts)
2082
2082
2083 other = hg.repository(ui, source)
2083 other = hg.repository(ui, source)
2084 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2084 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2085 if revs:
2085 if revs:
2086 try:
2086 try:
2087 revs = [other.lookup(rev) for rev in revs]
2087 revs = [other.lookup(rev) for rev in revs]
2088 except NoCapability:
2088 except NoCapability:
2089 error = _("Other repository doesn't support revision lookup, "
2089 error = _("Other repository doesn't support revision lookup, "
2090 "so a rev cannot be specified.")
2090 "so a rev cannot be specified.")
2091 raise util.Abort(error)
2091 raise util.Abort(error)
2092
2092
2093 modheads = repo.pull(other, heads=revs, force=opts['force'])
2093 modheads = repo.pull(other, heads=revs, force=opts['force'])
2094 return postincoming(ui, repo, modheads, opts['update'], checkout)
2094 return postincoming(ui, repo, modheads, opts['update'], checkout)
2095
2095
2096 def push(ui, repo, dest=None, **opts):
2096 def push(ui, repo, dest=None, **opts):
2097 """push changes to the specified destination
2097 """push changes to the specified destination
2098
2098
2099 Push changes from the local repository to the given destination.
2099 Push changes from the local repository to the given destination.
2100
2100
2101 This is the symmetrical operation for pull. It helps to move
2101 This is the symmetrical operation for pull. It helps to move
2102 changes from the current repository to a different one. If the
2102 changes from the current repository to a different one. If the
2103 destination is local this is identical to a pull in that directory
2103 destination is local this is identical to a pull in that directory
2104 from the current one.
2104 from the current one.
2105
2105
2106 By default, push will refuse to run if it detects the result would
2106 By default, push will refuse to run if it detects the result would
2107 increase the number of remote heads. This generally indicates the
2107 increase the number of remote heads. This generally indicates the
2108 the client has forgotten to pull and merge before pushing.
2108 the client has forgotten to pull and merge before pushing.
2109
2109
2110 Valid URLs are of the form:
2110 Valid URLs are of the form:
2111
2111
2112 local/filesystem/path (or file://local/filesystem/path)
2112 local/filesystem/path (or file://local/filesystem/path)
2113 ssh://[user[:pass]@]host[:port]/[path]
2113 ssh://[user[:pass]@]host[:port]/[path]
2114 http://[user[:pass]@]host[:port]/[path]
2114 http://[user[:pass]@]host[:port]/[path]
2115 https://[user[:pass]@]host[:port]/[path]
2115 https://[user[:pass]@]host[:port]/[path]
2116
2116
2117 An optional identifier after # indicates a particular branch, tag,
2117 An optional identifier after # indicates a particular branch, tag,
2118 or changeset to push. If -r is used, the named changeset and all its
2118 or changeset to push. If -r is used, the named changeset and all its
2119 ancestors will be pushed to the remote repository.
2119 ancestors will be pushed to the remote repository.
2120
2120
2121 Look at the help text for the pull command for important details
2121 Look at the help text for the pull command for important details
2122 about ssh:// URLs.
2122 about ssh:// URLs.
2123
2123
2124 Pushing to http:// and https:// URLs is only possible, if this
2124 Pushing to http:// and https:// URLs is only possible, if this
2125 feature is explicitly enabled on the remote Mercurial server.
2125 feature is explicitly enabled on the remote Mercurial server.
2126 """
2126 """
2127 dest, revs, checkout = hg.parseurl(
2127 dest, revs, checkout = hg.parseurl(
2128 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2128 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2129 cmdutil.setremoteconfig(ui, opts)
2129 cmdutil.setremoteconfig(ui, opts)
2130
2130
2131 other = hg.repository(ui, dest)
2131 other = hg.repository(ui, dest)
2132 ui.status('pushing to %s\n' % util.hidepassword(dest))
2132 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
2133 if revs:
2133 if revs:
2134 revs = [repo.lookup(rev) for rev in revs]
2134 revs = [repo.lookup(rev) for rev in revs]
2135 r = repo.push(other, opts['force'], revs=revs)
2135 r = repo.push(other, opts['force'], revs=revs)
2136 return r == 0
2136 return r == 0
2137
2137
2138 def rawcommit(ui, repo, *pats, **opts):
2138 def rawcommit(ui, repo, *pats, **opts):
2139 """raw commit interface (DEPRECATED)
2139 """raw commit interface (DEPRECATED)
2140
2140
2141 (DEPRECATED)
2141 (DEPRECATED)
2142 Lowlevel commit, for use in helper scripts.
2142 Lowlevel commit, for use in helper scripts.
2143
2143
2144 This command is not intended to be used by normal users, as it is
2144 This command is not intended to be used by normal users, as it is
2145 primarily useful for importing from other SCMs.
2145 primarily useful for importing from other SCMs.
2146
2146
2147 This command is now deprecated and will be removed in a future
2147 This command is now deprecated and will be removed in a future
2148 release, please use debugsetparents and commit instead.
2148 release, please use debugsetparents and commit instead.
2149 """
2149 """
2150
2150
2151 ui.warn(_("(the rawcommit command is deprecated)\n"))
2151 ui.warn(_("(the rawcommit command is deprecated)\n"))
2152
2152
2153 message = cmdutil.logmessage(opts)
2153 message = cmdutil.logmessage(opts)
2154
2154
2155 files = cmdutil.match(repo, pats, opts).files()
2155 files = cmdutil.match(repo, pats, opts).files()
2156 if opts['files']:
2156 if opts['files']:
2157 files += open(opts['files']).read().splitlines()
2157 files += open(opts['files']).read().splitlines()
2158
2158
2159 parents = [repo.lookup(p) for p in opts['parent']]
2159 parents = [repo.lookup(p) for p in opts['parent']]
2160
2160
2161 try:
2161 try:
2162 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2162 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2163 except ValueError, inst:
2163 except ValueError, inst:
2164 raise util.Abort(str(inst))
2164 raise util.Abort(str(inst))
2165
2165
2166 def recover(ui, repo):
2166 def recover(ui, repo):
2167 """roll back an interrupted transaction
2167 """roll back an interrupted transaction
2168
2168
2169 Recover from an interrupted commit or pull.
2169 Recover from an interrupted commit or pull.
2170
2170
2171 This command tries to fix the repository status after an interrupted
2171 This command tries to fix the repository status after an interrupted
2172 operation. It should only be necessary when Mercurial suggests it.
2172 operation. It should only be necessary when Mercurial suggests it.
2173 """
2173 """
2174 if repo.recover():
2174 if repo.recover():
2175 return hg.verify(repo)
2175 return hg.verify(repo)
2176 return 1
2176 return 1
2177
2177
2178 def remove(ui, repo, *pats, **opts):
2178 def remove(ui, repo, *pats, **opts):
2179 """remove the specified files on the next commit
2179 """remove the specified files on the next commit
2180
2180
2181 Schedule the indicated files for removal from the repository.
2181 Schedule the indicated files for removal from the repository.
2182
2182
2183 This only removes files from the current branch, not from the entire
2183 This only removes files from the current branch, not from the entire
2184 project history. -A can be used to remove only files that have already
2184 project history. -A can be used to remove only files that have already
2185 been deleted, -f can be used to force deletion, and -Af can be used
2185 been deleted, -f can be used to force deletion, and -Af can be used
2186 to remove files from the next revision without deleting them.
2186 to remove files from the next revision without deleting them.
2187
2187
2188 The following table details the behavior of remove for different file
2188 The following table details the behavior of remove for different file
2189 states (columns) and option combinations (rows). The file states are
2189 states (columns) and option combinations (rows). The file states are
2190 Added, Clean, Modified and Missing (as reported by hg status). The
2190 Added, Clean, Modified and Missing (as reported by hg status). The
2191 actions are Warn, Remove (from branch) and Delete (from disk).
2191 actions are Warn, Remove (from branch) and Delete (from disk).
2192
2192
2193 A C M !
2193 A C M !
2194 none W RD W R
2194 none W RD W R
2195 -f R RD RD R
2195 -f R RD RD R
2196 -A W W W R
2196 -A W W W R
2197 -Af R R R R
2197 -Af R R R R
2198
2198
2199 This command schedules the files to be removed at the next commit.
2199 This command schedules the files to be removed at the next commit.
2200 To undo a remove before that, see hg revert.
2200 To undo a remove before that, see hg revert.
2201 """
2201 """
2202
2202
2203 after, force = opts.get('after'), opts.get('force')
2203 after, force = opts.get('after'), opts.get('force')
2204 if not pats and not after:
2204 if not pats and not after:
2205 raise util.Abort(_('no files specified'))
2205 raise util.Abort(_('no files specified'))
2206
2206
2207 m = cmdutil.match(repo, pats, opts)
2207 m = cmdutil.match(repo, pats, opts)
2208 s = repo.status(match=m, clean=True)
2208 s = repo.status(match=m, clean=True)
2209 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2209 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2210
2210
2211 def warn(files, reason):
2211 def warn(files, reason):
2212 for f in files:
2212 for f in files:
2213 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2213 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2214 % (m.rel(f), reason))
2214 % (m.rel(f), reason))
2215
2215
2216 if force:
2216 if force:
2217 remove, forget = modified + deleted + clean, added
2217 remove, forget = modified + deleted + clean, added
2218 elif after:
2218 elif after:
2219 remove, forget = deleted, []
2219 remove, forget = deleted, []
2220 warn(modified + added + clean, _('still exists'))
2220 warn(modified + added + clean, _('still exists'))
2221 else:
2221 else:
2222 remove, forget = deleted + clean, []
2222 remove, forget = deleted + clean, []
2223 warn(modified, _('is modified'))
2223 warn(modified, _('is modified'))
2224 warn(added, _('has been marked for add'))
2224 warn(added, _('has been marked for add'))
2225
2225
2226 for f in util.sort(remove + forget):
2226 for f in util.sort(remove + forget):
2227 if ui.verbose or not m.exact(f):
2227 if ui.verbose or not m.exact(f):
2228 ui.status(_('removing %s\n') % m.rel(f))
2228 ui.status(_('removing %s\n') % m.rel(f))
2229
2229
2230 repo.forget(forget)
2230 repo.forget(forget)
2231 repo.remove(remove, unlink=not after)
2231 repo.remove(remove, unlink=not after)
2232
2232
2233 def rename(ui, repo, *pats, **opts):
2233 def rename(ui, repo, *pats, **opts):
2234 """rename files; equivalent of copy + remove
2234 """rename files; equivalent of copy + remove
2235
2235
2236 Mark dest as copies of sources; mark sources for deletion. If
2236 Mark dest as copies of sources; mark sources for deletion. If
2237 dest is a directory, copies are put in that directory. If dest is
2237 dest is a directory, copies are put in that directory. If dest is
2238 a file, there can only be one source.
2238 a file, there can only be one source.
2239
2239
2240 By default, this command copies the contents of files as they
2240 By default, this command copies the contents of files as they
2241 stand in the working directory. If invoked with --after, the
2241 stand in the working directory. If invoked with --after, the
2242 operation is recorded, but no copying is performed.
2242 operation is recorded, but no copying is performed.
2243
2243
2244 This command takes effect in the next commit. To undo a rename
2244 This command takes effect in the next commit. To undo a rename
2245 before that, see hg revert.
2245 before that, see hg revert.
2246 """
2246 """
2247 wlock = repo.wlock(False)
2247 wlock = repo.wlock(False)
2248 try:
2248 try:
2249 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2249 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2250 finally:
2250 finally:
2251 del wlock
2251 del wlock
2252
2252
2253 def resolve(ui, repo, *pats, **opts):
2253 def resolve(ui, repo, *pats, **opts):
2254 """resolve file merges from a branch merge or update
2254 """resolve file merges from a branch merge or update
2255
2255
2256 This command will attempt to resolve unresolved merges from the
2256 This command will attempt to resolve unresolved merges from the
2257 last update or merge command. This will use the local file
2257 last update or merge command. This will use the local file
2258 revision preserved at the last update or merge to cleanly retry
2258 revision preserved at the last update or merge to cleanly retry
2259 the file merge attempt. With no file or options specified, this
2259 the file merge attempt. With no file or options specified, this
2260 command will attempt to resolve all unresolved files.
2260 command will attempt to resolve all unresolved files.
2261
2261
2262 The codes used to show the status of files are:
2262 The codes used to show the status of files are:
2263 U = unresolved
2263 U = unresolved
2264 R = resolved
2264 R = resolved
2265 """
2265 """
2266
2266
2267 if len([x for x in opts if opts[x]]) > 1:
2267 if len([x for x in opts if opts[x]]) > 1:
2268 raise util.Abort(_("too many options specified"))
2268 raise util.Abort(_("too many options specified"))
2269
2269
2270 ms = merge_.mergestate(repo)
2270 ms = merge_.mergestate(repo)
2271 m = cmdutil.match(repo, pats, opts)
2271 m = cmdutil.match(repo, pats, opts)
2272
2272
2273 for f in ms:
2273 for f in ms:
2274 if m(f):
2274 if m(f):
2275 if opts.get("list"):
2275 if opts.get("list"):
2276 ui.write("%s %s\n" % (ms[f].upper(), f))
2276 ui.write("%s %s\n" % (ms[f].upper(), f))
2277 elif opts.get("mark"):
2277 elif opts.get("mark"):
2278 ms.mark(f, "r")
2278 ms.mark(f, "r")
2279 elif opts.get("unmark"):
2279 elif opts.get("unmark"):
2280 ms.mark(f, "u")
2280 ms.mark(f, "u")
2281 else:
2281 else:
2282 wctx = repo[None]
2282 wctx = repo[None]
2283 mctx = wctx.parents()[-1]
2283 mctx = wctx.parents()[-1]
2284 ms.resolve(f, wctx, mctx)
2284 ms.resolve(f, wctx, mctx)
2285
2285
2286 def revert(ui, repo, *pats, **opts):
2286 def revert(ui, repo, *pats, **opts):
2287 """restore individual files or dirs to an earlier state
2287 """restore individual files or dirs to an earlier state
2288
2288
2289 (use update -r to check out earlier revisions, revert does not
2289 (use update -r to check out earlier revisions, revert does not
2290 change the working dir parents)
2290 change the working dir parents)
2291
2291
2292 With no revision specified, revert the named files or directories
2292 With no revision specified, revert the named files or directories
2293 to the contents they had in the parent of the working directory.
2293 to the contents they had in the parent of the working directory.
2294 This restores the contents of the affected files to an unmodified
2294 This restores the contents of the affected files to an unmodified
2295 state and unschedules adds, removes, copies, and renames. If the
2295 state and unschedules adds, removes, copies, and renames. If the
2296 working directory has two parents, you must explicitly specify the
2296 working directory has two parents, you must explicitly specify the
2297 revision to revert to.
2297 revision to revert to.
2298
2298
2299 Using the -r option, revert the given files or directories to their
2299 Using the -r option, revert the given files or directories to their
2300 contents as of a specific revision. This can be helpful to "roll
2300 contents as of a specific revision. This can be helpful to "roll
2301 back" some or all of an earlier change.
2301 back" some or all of an earlier change.
2302 See 'hg help dates' for a list of formats valid for -d/--date.
2302 See 'hg help dates' for a list of formats valid for -d/--date.
2303
2303
2304 Revert modifies the working directory. It does not commit any
2304 Revert modifies the working directory. It does not commit any
2305 changes, or change the parent of the working directory. If you
2305 changes, or change the parent of the working directory. If you
2306 revert to a revision other than the parent of the working
2306 revert to a revision other than the parent of the working
2307 directory, the reverted files will thus appear modified
2307 directory, the reverted files will thus appear modified
2308 afterwards.
2308 afterwards.
2309
2309
2310 If a file has been deleted, it is restored. If the executable
2310 If a file has been deleted, it is restored. If the executable
2311 mode of a file was changed, it is reset.
2311 mode of a file was changed, it is reset.
2312
2312
2313 If names are given, all files matching the names are reverted.
2313 If names are given, all files matching the names are reverted.
2314 If no arguments are given, no files are reverted.
2314 If no arguments are given, no files are reverted.
2315
2315
2316 Modified files are saved with a .orig suffix before reverting.
2316 Modified files are saved with a .orig suffix before reverting.
2317 To disable these backups, use --no-backup.
2317 To disable these backups, use --no-backup.
2318 """
2318 """
2319
2319
2320 if opts["date"]:
2320 if opts["date"]:
2321 if opts["rev"]:
2321 if opts["rev"]:
2322 raise util.Abort(_("you can't specify a revision and a date"))
2322 raise util.Abort(_("you can't specify a revision and a date"))
2323 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2323 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2324
2324
2325 if not pats and not opts['all']:
2325 if not pats and not opts['all']:
2326 raise util.Abort(_('no files or directories specified; '
2326 raise util.Abort(_('no files or directories specified; '
2327 'use --all to revert the whole repo'))
2327 'use --all to revert the whole repo'))
2328
2328
2329 parent, p2 = repo.dirstate.parents()
2329 parent, p2 = repo.dirstate.parents()
2330 if not opts['rev'] and p2 != nullid:
2330 if not opts['rev'] and p2 != nullid:
2331 raise util.Abort(_('uncommitted merge - please provide a '
2331 raise util.Abort(_('uncommitted merge - please provide a '
2332 'specific revision'))
2332 'specific revision'))
2333 ctx = repo[opts['rev']]
2333 ctx = repo[opts['rev']]
2334 node = ctx.node()
2334 node = ctx.node()
2335 mf = ctx.manifest()
2335 mf = ctx.manifest()
2336 if node == parent:
2336 if node == parent:
2337 pmf = mf
2337 pmf = mf
2338 else:
2338 else:
2339 pmf = None
2339 pmf = None
2340
2340
2341 # need all matching names in dirstate and manifest of target rev,
2341 # need all matching names in dirstate and manifest of target rev,
2342 # so have to walk both. do not print errors if files exist in one
2342 # so have to walk both. do not print errors if files exist in one
2343 # but not other.
2343 # but not other.
2344
2344
2345 names = {}
2345 names = {}
2346
2346
2347 wlock = repo.wlock()
2347 wlock = repo.wlock()
2348 try:
2348 try:
2349 # walk dirstate.
2349 # walk dirstate.
2350 files = []
2350 files = []
2351
2351
2352 m = cmdutil.match(repo, pats, opts)
2352 m = cmdutil.match(repo, pats, opts)
2353 m.bad = lambda x,y: False
2353 m.bad = lambda x,y: False
2354 for abs in repo.walk(m):
2354 for abs in repo.walk(m):
2355 names[abs] = m.rel(abs), m.exact(abs)
2355 names[abs] = m.rel(abs), m.exact(abs)
2356
2356
2357 # walk target manifest.
2357 # walk target manifest.
2358
2358
2359 def badfn(path, msg):
2359 def badfn(path, msg):
2360 if path in names:
2360 if path in names:
2361 return False
2361 return False
2362 path_ = path + '/'
2362 path_ = path + '/'
2363 for f in names:
2363 for f in names:
2364 if f.startswith(path_):
2364 if f.startswith(path_):
2365 return False
2365 return False
2366 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2366 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2367 return False
2367 return False
2368
2368
2369 m = cmdutil.match(repo, pats, opts)
2369 m = cmdutil.match(repo, pats, opts)
2370 m.bad = badfn
2370 m.bad = badfn
2371 for abs in repo[node].walk(m):
2371 for abs in repo[node].walk(m):
2372 if abs not in names:
2372 if abs not in names:
2373 names[abs] = m.rel(abs), m.exact(abs)
2373 names[abs] = m.rel(abs), m.exact(abs)
2374
2374
2375 m = cmdutil.matchfiles(repo, names)
2375 m = cmdutil.matchfiles(repo, names)
2376 changes = repo.status(match=m)[:4]
2376 changes = repo.status(match=m)[:4]
2377 modified, added, removed, deleted = map(dict.fromkeys, changes)
2377 modified, added, removed, deleted = map(dict.fromkeys, changes)
2378
2378
2379 # if f is a rename, also revert the source
2379 # if f is a rename, also revert the source
2380 cwd = repo.getcwd()
2380 cwd = repo.getcwd()
2381 for f in added:
2381 for f in added:
2382 src = repo.dirstate.copied(f)
2382 src = repo.dirstate.copied(f)
2383 if src and src not in names and repo.dirstate[src] == 'r':
2383 if src and src not in names and repo.dirstate[src] == 'r':
2384 removed[src] = None
2384 removed[src] = None
2385 names[src] = (repo.pathto(src, cwd), True)
2385 names[src] = (repo.pathto(src, cwd), True)
2386
2386
2387 def removeforget(abs):
2387 def removeforget(abs):
2388 if repo.dirstate[abs] == 'a':
2388 if repo.dirstate[abs] == 'a':
2389 return _('forgetting %s\n')
2389 return _('forgetting %s\n')
2390 return _('removing %s\n')
2390 return _('removing %s\n')
2391
2391
2392 revert = ([], _('reverting %s\n'))
2392 revert = ([], _('reverting %s\n'))
2393 add = ([], _('adding %s\n'))
2393 add = ([], _('adding %s\n'))
2394 remove = ([], removeforget)
2394 remove = ([], removeforget)
2395 undelete = ([], _('undeleting %s\n'))
2395 undelete = ([], _('undeleting %s\n'))
2396
2396
2397 disptable = (
2397 disptable = (
2398 # dispatch table:
2398 # dispatch table:
2399 # file state
2399 # file state
2400 # action if in target manifest
2400 # action if in target manifest
2401 # action if not in target manifest
2401 # action if not in target manifest
2402 # make backup if in target manifest
2402 # make backup if in target manifest
2403 # make backup if not in target manifest
2403 # make backup if not in target manifest
2404 (modified, revert, remove, True, True),
2404 (modified, revert, remove, True, True),
2405 (added, revert, remove, True, False),
2405 (added, revert, remove, True, False),
2406 (removed, undelete, None, False, False),
2406 (removed, undelete, None, False, False),
2407 (deleted, revert, remove, False, False),
2407 (deleted, revert, remove, False, False),
2408 )
2408 )
2409
2409
2410 for abs, (rel, exact) in util.sort(names.items()):
2410 for abs, (rel, exact) in util.sort(names.items()):
2411 mfentry = mf.get(abs)
2411 mfentry = mf.get(abs)
2412 target = repo.wjoin(abs)
2412 target = repo.wjoin(abs)
2413 def handle(xlist, dobackup):
2413 def handle(xlist, dobackup):
2414 xlist[0].append(abs)
2414 xlist[0].append(abs)
2415 if dobackup and not opts['no_backup'] and util.lexists(target):
2415 if dobackup and not opts['no_backup'] and util.lexists(target):
2416 bakname = "%s.orig" % rel
2416 bakname = "%s.orig" % rel
2417 ui.note(_('saving current version of %s as %s\n') %
2417 ui.note(_('saving current version of %s as %s\n') %
2418 (rel, bakname))
2418 (rel, bakname))
2419 if not opts.get('dry_run'):
2419 if not opts.get('dry_run'):
2420 util.copyfile(target, bakname)
2420 util.copyfile(target, bakname)
2421 if ui.verbose or not exact:
2421 if ui.verbose or not exact:
2422 msg = xlist[1]
2422 msg = xlist[1]
2423 if not isinstance(msg, basestring):
2423 if not isinstance(msg, basestring):
2424 msg = msg(abs)
2424 msg = msg(abs)
2425 ui.status(msg % rel)
2425 ui.status(msg % rel)
2426 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2426 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2427 if abs not in table: continue
2427 if abs not in table: continue
2428 # file has changed in dirstate
2428 # file has changed in dirstate
2429 if mfentry:
2429 if mfentry:
2430 handle(hitlist, backuphit)
2430 handle(hitlist, backuphit)
2431 elif misslist is not None:
2431 elif misslist is not None:
2432 handle(misslist, backupmiss)
2432 handle(misslist, backupmiss)
2433 break
2433 break
2434 else:
2434 else:
2435 if abs not in repo.dirstate:
2435 if abs not in repo.dirstate:
2436 if mfentry:
2436 if mfentry:
2437 handle(add, True)
2437 handle(add, True)
2438 elif exact:
2438 elif exact:
2439 ui.warn(_('file not managed: %s\n') % rel)
2439 ui.warn(_('file not managed: %s\n') % rel)
2440 continue
2440 continue
2441 # file has not changed in dirstate
2441 # file has not changed in dirstate
2442 if node == parent:
2442 if node == parent:
2443 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2443 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2444 continue
2444 continue
2445 if pmf is None:
2445 if pmf is None:
2446 # only need parent manifest in this unlikely case,
2446 # only need parent manifest in this unlikely case,
2447 # so do not read by default
2447 # so do not read by default
2448 pmf = repo[parent].manifest()
2448 pmf = repo[parent].manifest()
2449 if abs in pmf:
2449 if abs in pmf:
2450 if mfentry:
2450 if mfentry:
2451 # if version of file is same in parent and target
2451 # if version of file is same in parent and target
2452 # manifests, do nothing
2452 # manifests, do nothing
2453 if (pmf[abs] != mfentry or
2453 if (pmf[abs] != mfentry or
2454 pmf.flags(abs) != mf.flags(abs)):
2454 pmf.flags(abs) != mf.flags(abs)):
2455 handle(revert, False)
2455 handle(revert, False)
2456 else:
2456 else:
2457 handle(remove, False)
2457 handle(remove, False)
2458
2458
2459 if not opts.get('dry_run'):
2459 if not opts.get('dry_run'):
2460 def checkout(f):
2460 def checkout(f):
2461 fc = ctx[f]
2461 fc = ctx[f]
2462 repo.wwrite(f, fc.data(), fc.flags())
2462 repo.wwrite(f, fc.data(), fc.flags())
2463
2463
2464 audit_path = util.path_auditor(repo.root)
2464 audit_path = util.path_auditor(repo.root)
2465 for f in remove[0]:
2465 for f in remove[0]:
2466 if repo.dirstate[f] == 'a':
2466 if repo.dirstate[f] == 'a':
2467 repo.dirstate.forget(f)
2467 repo.dirstate.forget(f)
2468 continue
2468 continue
2469 audit_path(f)
2469 audit_path(f)
2470 try:
2470 try:
2471 util.unlink(repo.wjoin(f))
2471 util.unlink(repo.wjoin(f))
2472 except OSError:
2472 except OSError:
2473 pass
2473 pass
2474 repo.dirstate.remove(f)
2474 repo.dirstate.remove(f)
2475
2475
2476 normal = None
2476 normal = None
2477 if node == parent:
2477 if node == parent:
2478 # We're reverting to our parent. If possible, we'd like status
2478 # We're reverting to our parent. If possible, we'd like status
2479 # to report the file as clean. We have to use normallookup for
2479 # to report the file as clean. We have to use normallookup for
2480 # merges to avoid losing information about merged/dirty files.
2480 # merges to avoid losing information about merged/dirty files.
2481 if p2 != nullid:
2481 if p2 != nullid:
2482 normal = repo.dirstate.normallookup
2482 normal = repo.dirstate.normallookup
2483 else:
2483 else:
2484 normal = repo.dirstate.normal
2484 normal = repo.dirstate.normal
2485 for f in revert[0]:
2485 for f in revert[0]:
2486 checkout(f)
2486 checkout(f)
2487 if normal:
2487 if normal:
2488 normal(f)
2488 normal(f)
2489
2489
2490 for f in add[0]:
2490 for f in add[0]:
2491 checkout(f)
2491 checkout(f)
2492 repo.dirstate.add(f)
2492 repo.dirstate.add(f)
2493
2493
2494 normal = repo.dirstate.normallookup
2494 normal = repo.dirstate.normallookup
2495 if node == parent and p2 == nullid:
2495 if node == parent and p2 == nullid:
2496 normal = repo.dirstate.normal
2496 normal = repo.dirstate.normal
2497 for f in undelete[0]:
2497 for f in undelete[0]:
2498 checkout(f)
2498 checkout(f)
2499 normal(f)
2499 normal(f)
2500
2500
2501 finally:
2501 finally:
2502 del wlock
2502 del wlock
2503
2503
2504 def rollback(ui, repo):
2504 def rollback(ui, repo):
2505 """roll back the last transaction
2505 """roll back the last transaction
2506
2506
2507 This command should be used with care. There is only one level of
2507 This command should be used with care. There is only one level of
2508 rollback, and there is no way to undo a rollback. It will also
2508 rollback, and there is no way to undo a rollback. It will also
2509 restore the dirstate at the time of the last transaction, losing
2509 restore the dirstate at the time of the last transaction, losing
2510 any dirstate changes since that time.
2510 any dirstate changes since that time.
2511
2511
2512 Transactions are used to encapsulate the effects of all commands
2512 Transactions are used to encapsulate the effects of all commands
2513 that create new changesets or propagate existing changesets into a
2513 that create new changesets or propagate existing changesets into a
2514 repository. For example, the following commands are transactional,
2514 repository. For example, the following commands are transactional,
2515 and their effects can be rolled back:
2515 and their effects can be rolled back:
2516
2516
2517 commit
2517 commit
2518 import
2518 import
2519 pull
2519 pull
2520 push (with this repository as destination)
2520 push (with this repository as destination)
2521 unbundle
2521 unbundle
2522
2522
2523 This command is not intended for use on public repositories. Once
2523 This command is not intended for use on public repositories. Once
2524 changes are visible for pull by other users, rolling a transaction
2524 changes are visible for pull by other users, rolling a transaction
2525 back locally is ineffective (someone else may already have pulled
2525 back locally is ineffective (someone else may already have pulled
2526 the changes). Furthermore, a race is possible with readers of the
2526 the changes). Furthermore, a race is possible with readers of the
2527 repository; for example an in-progress pull from the repository
2527 repository; for example an in-progress pull from the repository
2528 may fail if a rollback is performed.
2528 may fail if a rollback is performed.
2529 """
2529 """
2530 repo.rollback()
2530 repo.rollback()
2531
2531
2532 def root(ui, repo):
2532 def root(ui, repo):
2533 """print the root (top) of the current working dir
2533 """print the root (top) of the current working dir
2534
2534
2535 Print the root directory of the current repository.
2535 Print the root directory of the current repository.
2536 """
2536 """
2537 ui.write(repo.root + "\n")
2537 ui.write(repo.root + "\n")
2538
2538
2539 def serve(ui, repo, **opts):
2539 def serve(ui, repo, **opts):
2540 """export the repository via HTTP
2540 """export the repository via HTTP
2541
2541
2542 Start a local HTTP repository browser and pull server.
2542 Start a local HTTP repository browser and pull server.
2543
2543
2544 By default, the server logs accesses to stdout and errors to
2544 By default, the server logs accesses to stdout and errors to
2545 stderr. Use the "-A" and "-E" options to log to files.
2545 stderr. Use the "-A" and "-E" options to log to files.
2546 """
2546 """
2547
2547
2548 if opts["stdio"]:
2548 if opts["stdio"]:
2549 if repo is None:
2549 if repo is None:
2550 raise RepoError(_("There is no Mercurial repository here"
2550 raise RepoError(_("There is no Mercurial repository here"
2551 " (.hg not found)"))
2551 " (.hg not found)"))
2552 s = sshserver.sshserver(ui, repo)
2552 s = sshserver.sshserver(ui, repo)
2553 s.serve_forever()
2553 s.serve_forever()
2554
2554
2555 parentui = ui.parentui or ui
2555 parentui = ui.parentui or ui
2556 optlist = ("name templates style address port prefix ipv6"
2556 optlist = ("name templates style address port prefix ipv6"
2557 " accesslog errorlog webdir_conf certificate")
2557 " accesslog errorlog webdir_conf certificate")
2558 for o in optlist.split():
2558 for o in optlist.split():
2559 if opts[o]:
2559 if opts[o]:
2560 parentui.setconfig("web", o, str(opts[o]))
2560 parentui.setconfig("web", o, str(opts[o]))
2561 if (repo is not None) and (repo.ui != parentui):
2561 if (repo is not None) and (repo.ui != parentui):
2562 repo.ui.setconfig("web", o, str(opts[o]))
2562 repo.ui.setconfig("web", o, str(opts[o]))
2563
2563
2564 if repo is None and not ui.config("web", "webdir_conf"):
2564 if repo is None and not ui.config("web", "webdir_conf"):
2565 raise RepoError(_("There is no Mercurial repository here"
2565 raise RepoError(_("There is no Mercurial repository here"
2566 " (.hg not found)"))
2566 " (.hg not found)"))
2567
2567
2568 class service:
2568 class service:
2569 def init(self):
2569 def init(self):
2570 util.set_signal_handler()
2570 util.set_signal_handler()
2571 self.httpd = hgweb.server.create_server(parentui, repo)
2571 self.httpd = hgweb.server.create_server(parentui, repo)
2572
2572
2573 if not ui.verbose: return
2573 if not ui.verbose: return
2574
2574
2575 if self.httpd.prefix:
2575 if self.httpd.prefix:
2576 prefix = self.httpd.prefix.strip('/') + '/'
2576 prefix = self.httpd.prefix.strip('/') + '/'
2577 else:
2577 else:
2578 prefix = ''
2578 prefix = ''
2579
2579
2580 port = ':%d' % self.httpd.port
2580 port = ':%d' % self.httpd.port
2581 if port == ':80':
2581 if port == ':80':
2582 port = ''
2582 port = ''
2583
2583
2584 bindaddr = self.httpd.addr
2584 bindaddr = self.httpd.addr
2585 if bindaddr == '0.0.0.0':
2585 if bindaddr == '0.0.0.0':
2586 bindaddr = '*'
2586 bindaddr = '*'
2587 elif ':' in bindaddr: # IPv6
2587 elif ':' in bindaddr: # IPv6
2588 bindaddr = '[%s]' % bindaddr
2588 bindaddr = '[%s]' % bindaddr
2589
2589
2590 fqaddr = self.httpd.fqaddr
2590 fqaddr = self.httpd.fqaddr
2591 if ':' in fqaddr:
2591 if ':' in fqaddr:
2592 fqaddr = '[%s]' % fqaddr
2592 fqaddr = '[%s]' % fqaddr
2593 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2593 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2594 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2594 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2595
2595
2596 def run(self):
2596 def run(self):
2597 self.httpd.serve_forever()
2597 self.httpd.serve_forever()
2598
2598
2599 service = service()
2599 service = service()
2600
2600
2601 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2601 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2602
2602
2603 def status(ui, repo, *pats, **opts):
2603 def status(ui, repo, *pats, **opts):
2604 """show changed files in the working directory
2604 """show changed files in the working directory
2605
2605
2606 Show status of files in the repository. If names are given, only
2606 Show status of files in the repository. If names are given, only
2607 files that match are shown. Files that are clean or ignored or
2607 files that match are shown. Files that are clean or ignored or
2608 source of a copy/move operation, are not listed unless -c (clean),
2608 source of a copy/move operation, are not listed unless -c (clean),
2609 -i (ignored), -C (copies) or -A is given. Unless options described
2609 -i (ignored), -C (copies) or -A is given. Unless options described
2610 with "show only ..." are given, the options -mardu are used.
2610 with "show only ..." are given, the options -mardu are used.
2611
2611
2612 Option -q/--quiet hides untracked (unknown and ignored) files
2612 Option -q/--quiet hides untracked (unknown and ignored) files
2613 unless explicitly requested with -u/--unknown or -i/-ignored.
2613 unless explicitly requested with -u/--unknown or -i/-ignored.
2614
2614
2615 NOTE: status may appear to disagree with diff if permissions have
2615 NOTE: status may appear to disagree with diff if permissions have
2616 changed or a merge has occurred. The standard diff format does not
2616 changed or a merge has occurred. The standard diff format does not
2617 report permission changes and diff only reports changes relative
2617 report permission changes and diff only reports changes relative
2618 to one merge parent.
2618 to one merge parent.
2619
2619
2620 If one revision is given, it is used as the base revision.
2620 If one revision is given, it is used as the base revision.
2621 If two revisions are given, the difference between them is shown.
2621 If two revisions are given, the difference between them is shown.
2622
2622
2623 The codes used to show the status of files are:
2623 The codes used to show the status of files are:
2624 M = modified
2624 M = modified
2625 A = added
2625 A = added
2626 R = removed
2626 R = removed
2627 C = clean
2627 C = clean
2628 ! = deleted, but still tracked
2628 ! = deleted, but still tracked
2629 ? = not tracked
2629 ? = not tracked
2630 I = ignored
2630 I = ignored
2631 = the previous added file was copied from here
2631 = the previous added file was copied from here
2632 """
2632 """
2633
2633
2634 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2634 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2635 cwd = (pats and repo.getcwd()) or ''
2635 cwd = (pats and repo.getcwd()) or ''
2636 end = opts['print0'] and '\0' or '\n'
2636 end = opts['print0'] and '\0' or '\n'
2637 copy = {}
2637 copy = {}
2638 states = 'modified added removed deleted unknown ignored clean'.split()
2638 states = 'modified added removed deleted unknown ignored clean'.split()
2639 show = [k for k in states if opts[k]]
2639 show = [k for k in states if opts[k]]
2640 if opts['all']:
2640 if opts['all']:
2641 show += ui.quiet and (states[:4] + ['clean']) or states
2641 show += ui.quiet and (states[:4] + ['clean']) or states
2642 if not show:
2642 if not show:
2643 show = ui.quiet and states[:4] or states[:5]
2643 show = ui.quiet and states[:4] or states[:5]
2644
2644
2645 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2645 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2646 'ignored' in show, 'clean' in show, 'unknown' in show)
2646 'ignored' in show, 'clean' in show, 'unknown' in show)
2647 changestates = zip(states, 'MAR!?IC', stat)
2647 changestates = zip(states, 'MAR!?IC', stat)
2648
2648
2649 if (opts['all'] or opts['copies']) and not opts['no_status']:
2649 if (opts['all'] or opts['copies']) and not opts['no_status']:
2650 ctxn = repo[nullid]
2650 ctxn = repo[nullid]
2651 ctx1 = repo[node1]
2651 ctx1 = repo[node1]
2652 ctx2 = repo[node2]
2652 ctx2 = repo[node2]
2653 added = stat[1]
2653 added = stat[1]
2654 if node2 is None:
2654 if node2 is None:
2655 added = stat[0] + stat[1] # merged?
2655 added = stat[0] + stat[1] # merged?
2656
2656
2657 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2657 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2658 if k in added:
2658 if k in added:
2659 copy[k] = v
2659 copy[k] = v
2660 elif v in added:
2660 elif v in added:
2661 copy[v] = k
2661 copy[v] = k
2662
2662
2663 for state, char, files in changestates:
2663 for state, char, files in changestates:
2664 if state in show:
2664 if state in show:
2665 format = "%s %%s%s" % (char, end)
2665 format = "%s %%s%s" % (char, end)
2666 if opts['no_status']:
2666 if opts['no_status']:
2667 format = "%%s%s" % end
2667 format = "%%s%s" % end
2668
2668
2669 for f in files:
2669 for f in files:
2670 ui.write(format % repo.pathto(f, cwd))
2670 ui.write(format % repo.pathto(f, cwd))
2671 if f in copy:
2671 if f in copy:
2672 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2672 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2673
2673
2674 def tag(ui, repo, name1, *names, **opts):
2674 def tag(ui, repo, name1, *names, **opts):
2675 """add one or more tags for the current or given revision
2675 """add one or more tags for the current or given revision
2676
2676
2677 Name a particular revision using <name>.
2677 Name a particular revision using <name>.
2678
2678
2679 Tags are used to name particular revisions of the repository and are
2679 Tags are used to name particular revisions of the repository and are
2680 very useful to compare different revisions, to go back to significant
2680 very useful to compare different revisions, to go back to significant
2681 earlier versions or to mark branch points as releases, etc.
2681 earlier versions or to mark branch points as releases, etc.
2682
2682
2683 If no revision is given, the parent of the working directory is used,
2683 If no revision is given, the parent of the working directory is used,
2684 or tip if no revision is checked out.
2684 or tip if no revision is checked out.
2685
2685
2686 To facilitate version control, distribution, and merging of tags,
2686 To facilitate version control, distribution, and merging of tags,
2687 they are stored as a file named ".hgtags" which is managed
2687 they are stored as a file named ".hgtags" which is managed
2688 similarly to other project files and can be hand-edited if
2688 similarly to other project files and can be hand-edited if
2689 necessary. The file '.hg/localtags' is used for local tags (not
2689 necessary. The file '.hg/localtags' is used for local tags (not
2690 shared among repositories).
2690 shared among repositories).
2691
2691
2692 See 'hg help dates' for a list of formats valid for -d/--date.
2692 See 'hg help dates' for a list of formats valid for -d/--date.
2693 """
2693 """
2694
2694
2695 rev_ = "."
2695 rev_ = "."
2696 names = (name1,) + names
2696 names = (name1,) + names
2697 if len(names) != len(dict.fromkeys(names)):
2697 if len(names) != len(dict.fromkeys(names)):
2698 raise util.Abort(_('tag names must be unique'))
2698 raise util.Abort(_('tag names must be unique'))
2699 for n in names:
2699 for n in names:
2700 if n in ['tip', '.', 'null']:
2700 if n in ['tip', '.', 'null']:
2701 raise util.Abort(_('the name \'%s\' is reserved') % n)
2701 raise util.Abort(_('the name \'%s\' is reserved') % n)
2702 if opts['rev'] and opts['remove']:
2702 if opts['rev'] and opts['remove']:
2703 raise util.Abort(_("--rev and --remove are incompatible"))
2703 raise util.Abort(_("--rev and --remove are incompatible"))
2704 if opts['rev']:
2704 if opts['rev']:
2705 rev_ = opts['rev']
2705 rev_ = opts['rev']
2706 message = opts['message']
2706 message = opts['message']
2707 if opts['remove']:
2707 if opts['remove']:
2708 expectedtype = opts['local'] and 'local' or 'global'
2708 expectedtype = opts['local'] and 'local' or 'global'
2709 for n in names:
2709 for n in names:
2710 if not repo.tagtype(n):
2710 if not repo.tagtype(n):
2711 raise util.Abort(_('tag \'%s\' does not exist') % n)
2711 raise util.Abort(_('tag \'%s\' does not exist') % n)
2712 if repo.tagtype(n) != expectedtype:
2712 if repo.tagtype(n) != expectedtype:
2713 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2713 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2714 (n, expectedtype))
2714 (n, expectedtype))
2715 rev_ = nullid
2715 rev_ = nullid
2716 if not message:
2716 if not message:
2717 message = _('Removed tag %s') % ', '.join(names)
2717 message = _('Removed tag %s') % ', '.join(names)
2718 elif not opts['force']:
2718 elif not opts['force']:
2719 for n in names:
2719 for n in names:
2720 if n in repo.tags():
2720 if n in repo.tags():
2721 raise util.Abort(_('tag \'%s\' already exists '
2721 raise util.Abort(_('tag \'%s\' already exists '
2722 '(use -f to force)') % n)
2722 '(use -f to force)') % n)
2723 if not rev_ and repo.dirstate.parents()[1] != nullid:
2723 if not rev_ and repo.dirstate.parents()[1] != nullid:
2724 raise util.Abort(_('uncommitted merge - please provide a '
2724 raise util.Abort(_('uncommitted merge - please provide a '
2725 'specific revision'))
2725 'specific revision'))
2726 r = repo[rev_].node()
2726 r = repo[rev_].node()
2727
2727
2728 if not message:
2728 if not message:
2729 message = (_('Added tag %s for changeset %s') %
2729 message = (_('Added tag %s for changeset %s') %
2730 (', '.join(names), short(r)))
2730 (', '.join(names), short(r)))
2731
2731
2732 date = opts.get('date')
2732 date = opts.get('date')
2733 if date:
2733 if date:
2734 date = util.parsedate(date)
2734 date = util.parsedate(date)
2735
2735
2736 repo.tag(names, r, message, opts['local'], opts['user'], date)
2736 repo.tag(names, r, message, opts['local'], opts['user'], date)
2737
2737
2738 def tags(ui, repo):
2738 def tags(ui, repo):
2739 """list repository tags
2739 """list repository tags
2740
2740
2741 List the repository tags.
2741 List the repository tags.
2742
2742
2743 This lists both regular and local tags. When the -v/--verbose switch
2743 This lists both regular and local tags. When the -v/--verbose switch
2744 is used, a third column "local" is printed for local tags.
2744 is used, a third column "local" is printed for local tags.
2745 """
2745 """
2746
2746
2747 l = repo.tagslist()
2747 l = repo.tagslist()
2748 l.reverse()
2748 l.reverse()
2749 hexfunc = ui.debugflag and hex or short
2749 hexfunc = ui.debugflag and hex or short
2750 tagtype = ""
2750 tagtype = ""
2751
2751
2752 for t, n in l:
2752 for t, n in l:
2753 if ui.quiet:
2753 if ui.quiet:
2754 ui.write("%s\n" % t)
2754 ui.write("%s\n" % t)
2755 continue
2755 continue
2756
2756
2757 try:
2757 try:
2758 hn = hexfunc(n)
2758 hn = hexfunc(n)
2759 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2759 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2760 except revlog.LookupError:
2760 except revlog.LookupError:
2761 r = " ?:%s" % hn
2761 r = " ?:%s" % hn
2762 else:
2762 else:
2763 spaces = " " * (30 - util.locallen(t))
2763 spaces = " " * (30 - util.locallen(t))
2764 if ui.verbose:
2764 if ui.verbose:
2765 if repo.tagtype(t) == 'local':
2765 if repo.tagtype(t) == 'local':
2766 tagtype = " local"
2766 tagtype = " local"
2767 else:
2767 else:
2768 tagtype = ""
2768 tagtype = ""
2769 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2769 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2770
2770
2771 def tip(ui, repo, **opts):
2771 def tip(ui, repo, **opts):
2772 """show the tip revision
2772 """show the tip revision
2773
2773
2774 The tip revision (usually just called the tip) is the most
2774 The tip revision (usually just called the tip) is the most
2775 recently added changeset in the repository, the most recently
2775 recently added changeset in the repository, the most recently
2776 changed head.
2776 changed head.
2777
2777
2778 If you have just made a commit, that commit will be the tip. If
2778 If you have just made a commit, that commit will be the tip. If
2779 you have just pulled changes from another repository, the tip of
2779 you have just pulled changes from another repository, the tip of
2780 that repository becomes the current tip. The "tip" tag is special
2780 that repository becomes the current tip. The "tip" tag is special
2781 and cannot be renamed or assigned to a different changeset.
2781 and cannot be renamed or assigned to a different changeset.
2782 """
2782 """
2783 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2783 cmdutil.show_changeset(ui, repo, opts).show(len(repo) - 1)
2784
2784
2785 def unbundle(ui, repo, fname1, *fnames, **opts):
2785 def unbundle(ui, repo, fname1, *fnames, **opts):
2786 """apply one or more changegroup files
2786 """apply one or more changegroup files
2787
2787
2788 Apply one or more compressed changegroup files generated by the
2788 Apply one or more compressed changegroup files generated by the
2789 bundle command.
2789 bundle command.
2790 """
2790 """
2791 fnames = (fname1,) + fnames
2791 fnames = (fname1,) + fnames
2792
2792
2793 lock = None
2793 lock = None
2794 try:
2794 try:
2795 lock = repo.lock()
2795 lock = repo.lock()
2796 for fname in fnames:
2796 for fname in fnames:
2797 if os.path.exists(fname):
2797 if os.path.exists(fname):
2798 f = open(fname, "rb")
2798 f = open(fname, "rb")
2799 else:
2799 else:
2800 f = urllib.urlopen(fname)
2800 f = urllib.urlopen(fname)
2801 gen = changegroup.readbundle(f, fname)
2801 gen = changegroup.readbundle(f, fname)
2802 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2802 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2803 finally:
2803 finally:
2804 del lock
2804 del lock
2805
2805
2806 return postincoming(ui, repo, modheads, opts['update'], None)
2806 return postincoming(ui, repo, modheads, opts['update'], None)
2807
2807
2808 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2808 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2809 """update working directory
2809 """update working directory
2810
2810
2811 Update the repository's working directory to the specified revision,
2811 Update the repository's working directory to the specified revision,
2812 or the tip of the current branch if none is specified.
2812 or the tip of the current branch if none is specified.
2813
2813
2814 If the requested revision is a descendant of the working
2814 If the requested revision is a descendant of the working
2815 directory, any outstanding changes in the working directory will
2815 directory, any outstanding changes in the working directory will
2816 be merged into the result. If it is not directly descended but is
2816 be merged into the result. If it is not directly descended but is
2817 on the same named branch, update aborts with a suggestion to use
2817 on the same named branch, update aborts with a suggestion to use
2818 merge or update -C instead.
2818 merge or update -C instead.
2819
2819
2820 If the requested revision is on a different named branch and the
2820 If the requested revision is on a different named branch and the
2821 working directory is clean, update quietly switches branches.
2821 working directory is clean, update quietly switches branches.
2822
2822
2823 If you want to update just one file to an older revision, use revert.
2823 If you want to update just one file to an older revision, use revert.
2824
2824
2825 See 'hg help dates' for a list of formats valid for --date.
2825 See 'hg help dates' for a list of formats valid for --date.
2826 """
2826 """
2827 if rev and node:
2827 if rev and node:
2828 raise util.Abort(_("please specify just one revision"))
2828 raise util.Abort(_("please specify just one revision"))
2829
2829
2830 if not rev:
2830 if not rev:
2831 rev = node
2831 rev = node
2832
2832
2833 if date:
2833 if date:
2834 if rev:
2834 if rev:
2835 raise util.Abort(_("you can't specify a revision and a date"))
2835 raise util.Abort(_("you can't specify a revision and a date"))
2836 rev = cmdutil.finddate(ui, repo, date)
2836 rev = cmdutil.finddate(ui, repo, date)
2837
2837
2838 if clean:
2838 if clean:
2839 return hg.clean(repo, rev)
2839 return hg.clean(repo, rev)
2840 else:
2840 else:
2841 return hg.update(repo, rev)
2841 return hg.update(repo, rev)
2842
2842
2843 def verify(ui, repo):
2843 def verify(ui, repo):
2844 """verify the integrity of the repository
2844 """verify the integrity of the repository
2845
2845
2846 Verify the integrity of the current repository.
2846 Verify the integrity of the current repository.
2847
2847
2848 This will perform an extensive check of the repository's
2848 This will perform an extensive check of the repository's
2849 integrity, validating the hashes and checksums of each entry in
2849 integrity, validating the hashes and checksums of each entry in
2850 the changelog, manifest, and tracked files, as well as the
2850 the changelog, manifest, and tracked files, as well as the
2851 integrity of their crosslinks and indices.
2851 integrity of their crosslinks and indices.
2852 """
2852 """
2853 return hg.verify(repo)
2853 return hg.verify(repo)
2854
2854
2855 def version_(ui):
2855 def version_(ui):
2856 """output version and copyright information"""
2856 """output version and copyright information"""
2857 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2857 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2858 % version.get_version())
2858 % version.get_version())
2859 ui.status(_(
2859 ui.status(_(
2860 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2860 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2861 "This is free software; see the source for copying conditions. "
2861 "This is free software; see the source for copying conditions. "
2862 "There is NO\nwarranty; "
2862 "There is NO\nwarranty; "
2863 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2863 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2864 ))
2864 ))
2865
2865
2866 # Command options and aliases are listed here, alphabetically
2866 # Command options and aliases are listed here, alphabetically
2867
2867
2868 globalopts = [
2868 globalopts = [
2869 ('R', 'repository', '',
2869 ('R', 'repository', '',
2870 _('repository root directory or symbolic path name')),
2870 _('repository root directory or symbolic path name')),
2871 ('', 'cwd', '', _('change working directory')),
2871 ('', 'cwd', '', _('change working directory')),
2872 ('y', 'noninteractive', None,
2872 ('y', 'noninteractive', None,
2873 _('do not prompt, assume \'yes\' for any required answers')),
2873 _('do not prompt, assume \'yes\' for any required answers')),
2874 ('q', 'quiet', None, _('suppress output')),
2874 ('q', 'quiet', None, _('suppress output')),
2875 ('v', 'verbose', None, _('enable additional output')),
2875 ('v', 'verbose', None, _('enable additional output')),
2876 ('', 'config', [], _('set/override config option')),
2876 ('', 'config', [], _('set/override config option')),
2877 ('', 'debug', None, _('enable debugging output')),
2877 ('', 'debug', None, _('enable debugging output')),
2878 ('', 'debugger', None, _('start debugger')),
2878 ('', 'debugger', None, _('start debugger')),
2879 ('', 'encoding', util._encoding, _('set the charset encoding')),
2879 ('', 'encoding', util._encoding, _('set the charset encoding')),
2880 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2880 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2881 ('', 'lsprof', None, _('print improved command execution profile')),
2881 ('', 'lsprof', None, _('print improved command execution profile')),
2882 ('', 'traceback', None, _('print traceback on exception')),
2882 ('', 'traceback', None, _('print traceback on exception')),
2883 ('', 'time', None, _('time how long the command takes')),
2883 ('', 'time', None, _('time how long the command takes')),
2884 ('', 'profile', None, _('print command execution profile')),
2884 ('', 'profile', None, _('print command execution profile')),
2885 ('', 'version', None, _('output version information and exit')),
2885 ('', 'version', None, _('output version information and exit')),
2886 ('h', 'help', None, _('display help and exit')),
2886 ('h', 'help', None, _('display help and exit')),
2887 ]
2887 ]
2888
2888
2889 dryrunopts = [('n', 'dry-run', None,
2889 dryrunopts = [('n', 'dry-run', None,
2890 _('do not perform actions, just print output'))]
2890 _('do not perform actions, just print output'))]
2891
2891
2892 remoteopts = [
2892 remoteopts = [
2893 ('e', 'ssh', '', _('specify ssh command to use')),
2893 ('e', 'ssh', '', _('specify ssh command to use')),
2894 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2894 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2895 ]
2895 ]
2896
2896
2897 walkopts = [
2897 walkopts = [
2898 ('I', 'include', [], _('include names matching the given patterns')),
2898 ('I', 'include', [], _('include names matching the given patterns')),
2899 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2899 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2900 ]
2900 ]
2901
2901
2902 commitopts = [
2902 commitopts = [
2903 ('m', 'message', '', _('use <text> as commit message')),
2903 ('m', 'message', '', _('use <text> as commit message')),
2904 ('l', 'logfile', '', _('read commit message from <file>')),
2904 ('l', 'logfile', '', _('read commit message from <file>')),
2905 ]
2905 ]
2906
2906
2907 commitopts2 = [
2907 commitopts2 = [
2908 ('d', 'date', '', _('record datecode as commit date')),
2908 ('d', 'date', '', _('record datecode as commit date')),
2909 ('u', 'user', '', _('record user as committer')),
2909 ('u', 'user', '', _('record user as committer')),
2910 ]
2910 ]
2911
2911
2912 templateopts = [
2912 templateopts = [
2913 ('', 'style', '', _('display using template map file')),
2913 ('', 'style', '', _('display using template map file')),
2914 ('', 'template', '', _('display with template')),
2914 ('', 'template', '', _('display with template')),
2915 ]
2915 ]
2916
2916
2917 logopts = [
2917 logopts = [
2918 ('p', 'patch', None, _('show patch')),
2918 ('p', 'patch', None, _('show patch')),
2919 ('l', 'limit', '', _('limit number of changes displayed')),
2919 ('l', 'limit', '', _('limit number of changes displayed')),
2920 ('M', 'no-merges', None, _('do not show merges')),
2920 ('M', 'no-merges', None, _('do not show merges')),
2921 ] + templateopts
2921 ] + templateopts
2922
2922
2923 diffopts = [
2923 diffopts = [
2924 ('a', 'text', None, _('treat all files as text')),
2924 ('a', 'text', None, _('treat all files as text')),
2925 ('g', 'git', None, _('use git extended diff format')),
2925 ('g', 'git', None, _('use git extended diff format')),
2926 ('', 'nodates', None, _("don't include dates in diff headers"))
2926 ('', 'nodates', None, _("don't include dates in diff headers"))
2927 ]
2927 ]
2928
2928
2929 diffopts2 = [
2929 diffopts2 = [
2930 ('p', 'show-function', None, _('show which function each change is in')),
2930 ('p', 'show-function', None, _('show which function each change is in')),
2931 ('w', 'ignore-all-space', None,
2931 ('w', 'ignore-all-space', None,
2932 _('ignore white space when comparing lines')),
2932 _('ignore white space when comparing lines')),
2933 ('b', 'ignore-space-change', None,
2933 ('b', 'ignore-space-change', None,
2934 _('ignore changes in the amount of white space')),
2934 _('ignore changes in the amount of white space')),
2935 ('B', 'ignore-blank-lines', None,
2935 ('B', 'ignore-blank-lines', None,
2936 _('ignore changes whose lines are all blank')),
2936 _('ignore changes whose lines are all blank')),
2937 ('U', 'unified', '', _('number of lines of context to show'))
2937 ('U', 'unified', '', _('number of lines of context to show'))
2938 ]
2938 ]
2939
2939
2940 table = {
2940 table = {
2941 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2941 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2942 "addremove":
2942 "addremove":
2943 (addremove,
2943 (addremove,
2944 [('s', 'similarity', '',
2944 [('s', 'similarity', '',
2945 _('guess renamed files by similarity (0<=s<=100)')),
2945 _('guess renamed files by similarity (0<=s<=100)')),
2946 ] + walkopts + dryrunopts,
2946 ] + walkopts + dryrunopts,
2947 _('hg addremove [OPTION]... [FILE]...')),
2947 _('hg addremove [OPTION]... [FILE]...')),
2948 "^annotate|blame":
2948 "^annotate|blame":
2949 (annotate,
2949 (annotate,
2950 [('r', 'rev', '', _('annotate the specified revision')),
2950 [('r', 'rev', '', _('annotate the specified revision')),
2951 ('f', 'follow', None, _('follow file copies and renames')),
2951 ('f', 'follow', None, _('follow file copies and renames')),
2952 ('a', 'text', None, _('treat all files as text')),
2952 ('a', 'text', None, _('treat all files as text')),
2953 ('u', 'user', None, _('list the author (long with -v)')),
2953 ('u', 'user', None, _('list the author (long with -v)')),
2954 ('d', 'date', None, _('list the date (short with -q)')),
2954 ('d', 'date', None, _('list the date (short with -q)')),
2955 ('n', 'number', None, _('list the revision number (default)')),
2955 ('n', 'number', None, _('list the revision number (default)')),
2956 ('c', 'changeset', None, _('list the changeset')),
2956 ('c', 'changeset', None, _('list the changeset')),
2957 ('l', 'line-number', None,
2957 ('l', 'line-number', None,
2958 _('show line number at the first appearance'))
2958 _('show line number at the first appearance'))
2959 ] + walkopts,
2959 ] + walkopts,
2960 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2960 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2961 "archive":
2961 "archive":
2962 (archive,
2962 (archive,
2963 [('', 'no-decode', None, _('do not pass files through decoders')),
2963 [('', 'no-decode', None, _('do not pass files through decoders')),
2964 ('p', 'prefix', '', _('directory prefix for files in archive')),
2964 ('p', 'prefix', '', _('directory prefix for files in archive')),
2965 ('r', 'rev', '', _('revision to distribute')),
2965 ('r', 'rev', '', _('revision to distribute')),
2966 ('t', 'type', '', _('type of distribution to create')),
2966 ('t', 'type', '', _('type of distribution to create')),
2967 ] + walkopts,
2967 ] + walkopts,
2968 _('hg archive [OPTION]... DEST')),
2968 _('hg archive [OPTION]... DEST')),
2969 "backout":
2969 "backout":
2970 (backout,
2970 (backout,
2971 [('', 'merge', None,
2971 [('', 'merge', None,
2972 _('merge with old dirstate parent after backout')),
2972 _('merge with old dirstate parent after backout')),
2973 ('', 'parent', '', _('parent to choose when backing out merge')),
2973 ('', 'parent', '', _('parent to choose when backing out merge')),
2974 ('r', 'rev', '', _('revision to backout')),
2974 ('r', 'rev', '', _('revision to backout')),
2975 ] + walkopts + commitopts + commitopts2,
2975 ] + walkopts + commitopts + commitopts2,
2976 _('hg backout [OPTION]... [-r] REV')),
2976 _('hg backout [OPTION]... [-r] REV')),
2977 "bisect":
2977 "bisect":
2978 (bisect,
2978 (bisect,
2979 [('r', 'reset', False, _('reset bisect state')),
2979 [('r', 'reset', False, _('reset bisect state')),
2980 ('g', 'good', False, _('mark changeset good')),
2980 ('g', 'good', False, _('mark changeset good')),
2981 ('b', 'bad', False, _('mark changeset bad')),
2981 ('b', 'bad', False, _('mark changeset bad')),
2982 ('s', 'skip', False, _('skip testing changeset')),
2982 ('s', 'skip', False, _('skip testing changeset')),
2983 ('U', 'noupdate', False, _('do not update to target'))],
2983 ('U', 'noupdate', False, _('do not update to target'))],
2984 _("hg bisect [-gbsr] [REV]")),
2984 _("hg bisect [-gbsr] [REV]")),
2985 "branch":
2985 "branch":
2986 (branch,
2986 (branch,
2987 [('f', 'force', None,
2987 [('f', 'force', None,
2988 _('set branch name even if it shadows an existing branch'))],
2988 _('set branch name even if it shadows an existing branch'))],
2989 _('hg branch [-f] [NAME]')),
2989 _('hg branch [-f] [NAME]')),
2990 "branches":
2990 "branches":
2991 (branches,
2991 (branches,
2992 [('a', 'active', False,
2992 [('a', 'active', False,
2993 _('show only branches that have unmerged heads'))],
2993 _('show only branches that have unmerged heads'))],
2994 _('hg branches [-a]')),
2994 _('hg branches [-a]')),
2995 "bundle":
2995 "bundle":
2996 (bundle,
2996 (bundle,
2997 [('f', 'force', None,
2997 [('f', 'force', None,
2998 _('run even when remote repository is unrelated')),
2998 _('run even when remote repository is unrelated')),
2999 ('r', 'rev', [],
2999 ('r', 'rev', [],
3000 _('a changeset up to which you would like to bundle')),
3000 _('a changeset up to which you would like to bundle')),
3001 ('', 'base', [],
3001 ('', 'base', [],
3002 _('a base changeset to specify instead of a destination')),
3002 _('a base changeset to specify instead of a destination')),
3003 ('a', 'all', None, _('bundle all changesets in the repository')),
3003 ('a', 'all', None, _('bundle all changesets in the repository')),
3004 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3004 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3005 ] + remoteopts,
3005 ] + remoteopts,
3006 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3006 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3007 "cat":
3007 "cat":
3008 (cat,
3008 (cat,
3009 [('o', 'output', '', _('print output to file with formatted name')),
3009 [('o', 'output', '', _('print output to file with formatted name')),
3010 ('r', 'rev', '', _('print the given revision')),
3010 ('r', 'rev', '', _('print the given revision')),
3011 ('', 'decode', None, _('apply any matching decode filter')),
3011 ('', 'decode', None, _('apply any matching decode filter')),
3012 ] + walkopts,
3012 ] + walkopts,
3013 _('hg cat [OPTION]... FILE...')),
3013 _('hg cat [OPTION]... FILE...')),
3014 "^clone":
3014 "^clone":
3015 (clone,
3015 (clone,
3016 [('U', 'noupdate', None,
3016 [('U', 'noupdate', None,
3017 _('the clone will only contain a repository (no working copy)')),
3017 _('the clone will only contain a repository (no working copy)')),
3018 ('r', 'rev', [],
3018 ('r', 'rev', [],
3019 _('a changeset you would like to have after cloning')),
3019 _('a changeset you would like to have after cloning')),
3020 ('', 'pull', None, _('use pull protocol to copy metadata')),
3020 ('', 'pull', None, _('use pull protocol to copy metadata')),
3021 ('', 'uncompressed', None,
3021 ('', 'uncompressed', None,
3022 _('use uncompressed transfer (fast over LAN)')),
3022 _('use uncompressed transfer (fast over LAN)')),
3023 ] + remoteopts,
3023 ] + remoteopts,
3024 _('hg clone [OPTION]... SOURCE [DEST]')),
3024 _('hg clone [OPTION]... SOURCE [DEST]')),
3025 "^commit|ci":
3025 "^commit|ci":
3026 (commit,
3026 (commit,
3027 [('A', 'addremove', None,
3027 [('A', 'addremove', None,
3028 _('mark new/missing files as added/removed before committing')),
3028 _('mark new/missing files as added/removed before committing')),
3029 ] + walkopts + commitopts + commitopts2,
3029 ] + walkopts + commitopts + commitopts2,
3030 _('hg commit [OPTION]... [FILE]...')),
3030 _('hg commit [OPTION]... [FILE]...')),
3031 "copy|cp":
3031 "copy|cp":
3032 (copy,
3032 (copy,
3033 [('A', 'after', None, _('record a copy that has already occurred')),
3033 [('A', 'after', None, _('record a copy that has already occurred')),
3034 ('f', 'force', None,
3034 ('f', 'force', None,
3035 _('forcibly copy over an existing managed file')),
3035 _('forcibly copy over an existing managed file')),
3036 ] + walkopts + dryrunopts,
3036 ] + walkopts + dryrunopts,
3037 _('hg copy [OPTION]... [SOURCE]... DEST')),
3037 _('hg copy [OPTION]... [SOURCE]... DEST')),
3038 "debugancestor": (debugancestor, [],
3038 "debugancestor": (debugancestor, [],
3039 _('hg debugancestor [INDEX] REV1 REV2')),
3039 _('hg debugancestor [INDEX] REV1 REV2')),
3040 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3040 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3041 "debugcomplete":
3041 "debugcomplete":
3042 (debugcomplete,
3042 (debugcomplete,
3043 [('o', 'options', None, _('show the command options'))],
3043 [('o', 'options', None, _('show the command options'))],
3044 _('hg debugcomplete [-o] CMD')),
3044 _('hg debugcomplete [-o] CMD')),
3045 "debugdate":
3045 "debugdate":
3046 (debugdate,
3046 (debugdate,
3047 [('e', 'extended', None, _('try extended date formats'))],
3047 [('e', 'extended', None, _('try extended date formats'))],
3048 _('hg debugdate [-e] DATE [RANGE]')),
3048 _('hg debugdate [-e] DATE [RANGE]')),
3049 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3049 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3050 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3050 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3051 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3051 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3052 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3052 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3053 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3053 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3054 "debugrawcommit|rawcommit":
3054 "debugrawcommit|rawcommit":
3055 (rawcommit,
3055 (rawcommit,
3056 [('p', 'parent', [], _('parent')),
3056 [('p', 'parent', [], _('parent')),
3057 ('F', 'files', '', _('file list'))
3057 ('F', 'files', '', _('file list'))
3058 ] + commitopts + commitopts2,
3058 ] + commitopts + commitopts2,
3059 _('hg debugrawcommit [OPTION]... [FILE]...')),
3059 _('hg debugrawcommit [OPTION]... [FILE]...')),
3060 "debugrebuildstate":
3060 "debugrebuildstate":
3061 (debugrebuildstate,
3061 (debugrebuildstate,
3062 [('r', 'rev', '', _('revision to rebuild to'))],
3062 [('r', 'rev', '', _('revision to rebuild to'))],
3063 _('hg debugrebuildstate [-r REV] [REV]')),
3063 _('hg debugrebuildstate [-r REV] [REV]')),
3064 "debugrename":
3064 "debugrename":
3065 (debugrename,
3065 (debugrename,
3066 [('r', 'rev', '', _('revision to debug'))],
3066 [('r', 'rev', '', _('revision to debug'))],
3067 _('hg debugrename [-r REV] FILE')),
3067 _('hg debugrename [-r REV] FILE')),
3068 "debugsetparents":
3068 "debugsetparents":
3069 (debugsetparents,
3069 (debugsetparents,
3070 [],
3070 [],
3071 _('hg debugsetparents REV1 [REV2]')),
3071 _('hg debugsetparents REV1 [REV2]')),
3072 "debugstate":
3072 "debugstate":
3073 (debugstate,
3073 (debugstate,
3074 [('', 'nodates', None, _('do not display the saved mtime'))],
3074 [('', 'nodates', None, _('do not display the saved mtime'))],
3075 _('hg debugstate [OPTS]')),
3075 _('hg debugstate [OPTS]')),
3076 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3076 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3077 "^diff":
3077 "^diff":
3078 (diff,
3078 (diff,
3079 [('r', 'rev', [], _('revision'))
3079 [('r', 'rev', [], _('revision'))
3080 ] + diffopts + diffopts2 + walkopts,
3080 ] + diffopts + diffopts2 + walkopts,
3081 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3081 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3082 "^export":
3082 "^export":
3083 (export,
3083 (export,
3084 [('o', 'output', '', _('print output to file with formatted name')),
3084 [('o', 'output', '', _('print output to file with formatted name')),
3085 ('', 'switch-parent', None, _('diff against the second parent'))
3085 ('', 'switch-parent', None, _('diff against the second parent'))
3086 ] + diffopts,
3086 ] + diffopts,
3087 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3087 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3088 "grep":
3088 "grep":
3089 (grep,
3089 (grep,
3090 [('0', 'print0', None, _('end fields with NUL')),
3090 [('0', 'print0', None, _('end fields with NUL')),
3091 ('', 'all', None, _('print all revisions that match')),
3091 ('', 'all', None, _('print all revisions that match')),
3092 ('f', 'follow', None,
3092 ('f', 'follow', None,
3093 _('follow changeset history, or file history across copies and renames')),
3093 _('follow changeset history, or file history across copies and renames')),
3094 ('i', 'ignore-case', None, _('ignore case when matching')),
3094 ('i', 'ignore-case', None, _('ignore case when matching')),
3095 ('l', 'files-with-matches', None,
3095 ('l', 'files-with-matches', None,
3096 _('print only filenames and revs that match')),
3096 _('print only filenames and revs that match')),
3097 ('n', 'line-number', None, _('print matching line numbers')),
3097 ('n', 'line-number', None, _('print matching line numbers')),
3098 ('r', 'rev', [], _('search in given revision range')),
3098 ('r', 'rev', [], _('search in given revision range')),
3099 ('u', 'user', None, _('list the author (long with -v)')),
3099 ('u', 'user', None, _('list the author (long with -v)')),
3100 ('d', 'date', None, _('list the date (short with -q)')),
3100 ('d', 'date', None, _('list the date (short with -q)')),
3101 ] + walkopts,
3101 ] + walkopts,
3102 _('hg grep [OPTION]... PATTERN [FILE]...')),
3102 _('hg grep [OPTION]... PATTERN [FILE]...')),
3103 "heads":
3103 "heads":
3104 (heads,
3104 (heads,
3105 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3105 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3106 ] + templateopts,
3106 ] + templateopts,
3107 _('hg heads [-r REV] [REV]...')),
3107 _('hg heads [-r REV] [REV]...')),
3108 "help": (help_, [], _('hg help [COMMAND]')),
3108 "help": (help_, [], _('hg help [COMMAND]')),
3109 "identify|id":
3109 "identify|id":
3110 (identify,
3110 (identify,
3111 [('r', 'rev', '', _('identify the specified rev')),
3111 [('r', 'rev', '', _('identify the specified rev')),
3112 ('n', 'num', None, _('show local revision number')),
3112 ('n', 'num', None, _('show local revision number')),
3113 ('i', 'id', None, _('show global revision id')),
3113 ('i', 'id', None, _('show global revision id')),
3114 ('b', 'branch', None, _('show branch')),
3114 ('b', 'branch', None, _('show branch')),
3115 ('t', 'tags', None, _('show tags'))],
3115 ('t', 'tags', None, _('show tags'))],
3116 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3116 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3117 "import|patch":
3117 "import|patch":
3118 (import_,
3118 (import_,
3119 [('p', 'strip', 1,
3119 [('p', 'strip', 1,
3120 _('directory strip option for patch. This has the same\n'
3120 _('directory strip option for patch. This has the same\n'
3121 'meaning as the corresponding patch option')),
3121 'meaning as the corresponding patch option')),
3122 ('b', 'base', '', _('base path')),
3122 ('b', 'base', '', _('base path')),
3123 ('f', 'force', None,
3123 ('f', 'force', None,
3124 _('skip check for outstanding uncommitted changes')),
3124 _('skip check for outstanding uncommitted changes')),
3125 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3125 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3126 ('', 'exact', None,
3126 ('', 'exact', None,
3127 _('apply patch to the nodes from which it was generated')),
3127 _('apply patch to the nodes from which it was generated')),
3128 ('', 'import-branch', None,
3128 ('', 'import-branch', None,
3129 _('Use any branch information in patch (implied by --exact)'))] +
3129 _('Use any branch information in patch (implied by --exact)'))] +
3130 commitopts + commitopts2,
3130 commitopts + commitopts2,
3131 _('hg import [OPTION]... PATCH...')),
3131 _('hg import [OPTION]... PATCH...')),
3132 "incoming|in":
3132 "incoming|in":
3133 (incoming,
3133 (incoming,
3134 [('f', 'force', None,
3134 [('f', 'force', None,
3135 _('run even when remote repository is unrelated')),
3135 _('run even when remote repository is unrelated')),
3136 ('n', 'newest-first', None, _('show newest record first')),
3136 ('n', 'newest-first', None, _('show newest record first')),
3137 ('', 'bundle', '', _('file to store the bundles into')),
3137 ('', 'bundle', '', _('file to store the bundles into')),
3138 ('r', 'rev', [],
3138 ('r', 'rev', [],
3139 _('a specific revision up to which you would like to pull')),
3139 _('a specific revision up to which you would like to pull')),
3140 ] + logopts + remoteopts,
3140 ] + logopts + remoteopts,
3141 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3141 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3142 ' [--bundle FILENAME] [SOURCE]')),
3142 ' [--bundle FILENAME] [SOURCE]')),
3143 "^init":
3143 "^init":
3144 (init,
3144 (init,
3145 remoteopts,
3145 remoteopts,
3146 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3146 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3147 "locate":
3147 "locate":
3148 (locate,
3148 (locate,
3149 [('r', 'rev', '', _('search the repository as it stood at rev')),
3149 [('r', 'rev', '', _('search the repository as it stood at rev')),
3150 ('0', 'print0', None,
3150 ('0', 'print0', None,
3151 _('end filenames with NUL, for use with xargs')),
3151 _('end filenames with NUL, for use with xargs')),
3152 ('f', 'fullpath', None,
3152 ('f', 'fullpath', None,
3153 _('print complete paths from the filesystem root')),
3153 _('print complete paths from the filesystem root')),
3154 ] + walkopts,
3154 ] + walkopts,
3155 _('hg locate [OPTION]... [PATTERN]...')),
3155 _('hg locate [OPTION]... [PATTERN]...')),
3156 "^log|history":
3156 "^log|history":
3157 (log,
3157 (log,
3158 [('f', 'follow', None,
3158 [('f', 'follow', None,
3159 _('follow changeset history, or file history across copies and renames')),
3159 _('follow changeset history, or file history across copies and renames')),
3160 ('', 'follow-first', None,
3160 ('', 'follow-first', None,
3161 _('only follow the first parent of merge changesets')),
3161 _('only follow the first parent of merge changesets')),
3162 ('d', 'date', '', _('show revs matching date spec')),
3162 ('d', 'date', '', _('show revs matching date spec')),
3163 ('C', 'copies', None, _('show copied files')),
3163 ('C', 'copies', None, _('show copied files')),
3164 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3164 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3165 ('r', 'rev', [], _('show the specified revision or range')),
3165 ('r', 'rev', [], _('show the specified revision or range')),
3166 ('', 'removed', None, _('include revs where files were removed')),
3166 ('', 'removed', None, _('include revs where files were removed')),
3167 ('m', 'only-merges', None, _('show only merges')),
3167 ('m', 'only-merges', None, _('show only merges')),
3168 ('b', 'only-branch', [],
3168 ('b', 'only-branch', [],
3169 _('show only changesets within the given named branch')),
3169 _('show only changesets within the given named branch')),
3170 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3170 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3171 ] + logopts + walkopts,
3171 ] + logopts + walkopts,
3172 _('hg log [OPTION]... [FILE]')),
3172 _('hg log [OPTION]... [FILE]')),
3173 "manifest":
3173 "manifest":
3174 (manifest,
3174 (manifest,
3175 [('r', 'rev', '', _('revision to display'))],
3175 [('r', 'rev', '', _('revision to display'))],
3176 _('hg manifest [-r REV]')),
3176 _('hg manifest [-r REV]')),
3177 "^merge":
3177 "^merge":
3178 (merge,
3178 (merge,
3179 [('f', 'force', None, _('force a merge with outstanding changes')),
3179 [('f', 'force', None, _('force a merge with outstanding changes')),
3180 ('r', 'rev', '', _('revision to merge')),
3180 ('r', 'rev', '', _('revision to merge')),
3181 ],
3181 ],
3182 _('hg merge [-f] [[-r] REV]')),
3182 _('hg merge [-f] [[-r] REV]')),
3183 "outgoing|out":
3183 "outgoing|out":
3184 (outgoing,
3184 (outgoing,
3185 [('f', 'force', None,
3185 [('f', 'force', None,
3186 _('run even when remote repository is unrelated')),
3186 _('run even when remote repository is unrelated')),
3187 ('r', 'rev', [],
3187 ('r', 'rev', [],
3188 _('a specific revision up to which you would like to push')),
3188 _('a specific revision up to which you would like to push')),
3189 ('n', 'newest-first', None, _('show newest record first')),
3189 ('n', 'newest-first', None, _('show newest record first')),
3190 ] + logopts + remoteopts,
3190 ] + logopts + remoteopts,
3191 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3191 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3192 "^parents":
3192 "^parents":
3193 (parents,
3193 (parents,
3194 [('r', 'rev', '', _('show parents from the specified rev')),
3194 [('r', 'rev', '', _('show parents from the specified rev')),
3195 ] + templateopts,
3195 ] + templateopts,
3196 _('hg parents [-r REV] [FILE]')),
3196 _('hg parents [-r REV] [FILE]')),
3197 "paths": (paths, [], _('hg paths [NAME]')),
3197 "paths": (paths, [], _('hg paths [NAME]')),
3198 "^pull":
3198 "^pull":
3199 (pull,
3199 (pull,
3200 [('u', 'update', None,
3200 [('u', 'update', None,
3201 _('update to new tip if changesets were pulled')),
3201 _('update to new tip if changesets were pulled')),
3202 ('f', 'force', None,
3202 ('f', 'force', None,
3203 _('run even when remote repository is unrelated')),
3203 _('run even when remote repository is unrelated')),
3204 ('r', 'rev', [],
3204 ('r', 'rev', [],
3205 _('a specific revision up to which you would like to pull')),
3205 _('a specific revision up to which you would like to pull')),
3206 ] + remoteopts,
3206 ] + remoteopts,
3207 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3207 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3208 "^push":
3208 "^push":
3209 (push,
3209 (push,
3210 [('f', 'force', None, _('force push')),
3210 [('f', 'force', None, _('force push')),
3211 ('r', 'rev', [],
3211 ('r', 'rev', [],
3212 _('a specific revision up to which you would like to push')),
3212 _('a specific revision up to which you would like to push')),
3213 ] + remoteopts,
3213 ] + remoteopts,
3214 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3214 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3215 "recover": (recover, [], _('hg recover')),
3215 "recover": (recover, [], _('hg recover')),
3216 "^remove|rm":
3216 "^remove|rm":
3217 (remove,
3217 (remove,
3218 [('A', 'after', None, _('record delete for missing files')),
3218 [('A', 'after', None, _('record delete for missing files')),
3219 ('f', 'force', None,
3219 ('f', 'force', None,
3220 _('remove (and delete) file even if added or modified')),
3220 _('remove (and delete) file even if added or modified')),
3221 ] + walkopts,
3221 ] + walkopts,
3222 _('hg remove [OPTION]... FILE...')),
3222 _('hg remove [OPTION]... FILE...')),
3223 "rename|mv":
3223 "rename|mv":
3224 (rename,
3224 (rename,
3225 [('A', 'after', None, _('record a rename that has already occurred')),
3225 [('A', 'after', None, _('record a rename that has already occurred')),
3226 ('f', 'force', None,
3226 ('f', 'force', None,
3227 _('forcibly copy over an existing managed file')),
3227 _('forcibly copy over an existing managed file')),
3228 ] + walkopts + dryrunopts,
3228 ] + walkopts + dryrunopts,
3229 _('hg rename [OPTION]... SOURCE... DEST')),
3229 _('hg rename [OPTION]... SOURCE... DEST')),
3230 "resolve":
3230 "resolve":
3231 (resolve,
3231 (resolve,
3232 [('l', 'list', None, _('list state of files needing merge')),
3232 [('l', 'list', None, _('list state of files needing merge')),
3233 ('m', 'mark', None, _('mark files as resolved')),
3233 ('m', 'mark', None, _('mark files as resolved')),
3234 ('u', 'unmark', None, _('unmark files as resolved'))],
3234 ('u', 'unmark', None, _('unmark files as resolved'))],
3235 ('hg resolve [OPTION] [FILES...]')),
3235 ('hg resolve [OPTION] [FILES...]')),
3236 "revert":
3236 "revert":
3237 (revert,
3237 (revert,
3238 [('a', 'all', None, _('revert all changes when no arguments given')),
3238 [('a', 'all', None, _('revert all changes when no arguments given')),
3239 ('d', 'date', '', _('tipmost revision matching date')),
3239 ('d', 'date', '', _('tipmost revision matching date')),
3240 ('r', 'rev', '', _('revision to revert to')),
3240 ('r', 'rev', '', _('revision to revert to')),
3241 ('', 'no-backup', None, _('do not save backup copies of files')),
3241 ('', 'no-backup', None, _('do not save backup copies of files')),
3242 ] + walkopts + dryrunopts,
3242 ] + walkopts + dryrunopts,
3243 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3243 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3244 "rollback": (rollback, [], _('hg rollback')),
3244 "rollback": (rollback, [], _('hg rollback')),
3245 "root": (root, [], _('hg root')),
3245 "root": (root, [], _('hg root')),
3246 "^serve":
3246 "^serve":
3247 (serve,
3247 (serve,
3248 [('A', 'accesslog', '', _('name of access log file to write to')),
3248 [('A', 'accesslog', '', _('name of access log file to write to')),
3249 ('d', 'daemon', None, _('run server in background')),
3249 ('d', 'daemon', None, _('run server in background')),
3250 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3250 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3251 ('E', 'errorlog', '', _('name of error log file to write to')),
3251 ('E', 'errorlog', '', _('name of error log file to write to')),
3252 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3252 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3253 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3253 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3254 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3254 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3255 ('n', 'name', '',
3255 ('n', 'name', '',
3256 _('name to show in web pages (default: working dir)')),
3256 _('name to show in web pages (default: working dir)')),
3257 ('', 'webdir-conf', '', _('name of the webdir config file'
3257 ('', 'webdir-conf', '', _('name of the webdir config file'
3258 ' (serve more than one repo)')),
3258 ' (serve more than one repo)')),
3259 ('', 'pid-file', '', _('name of file to write process ID to')),
3259 ('', 'pid-file', '', _('name of file to write process ID to')),
3260 ('', 'stdio', None, _('for remote clients')),
3260 ('', 'stdio', None, _('for remote clients')),
3261 ('t', 'templates', '', _('web templates to use')),
3261 ('t', 'templates', '', _('web templates to use')),
3262 ('', 'style', '', _('template style to use')),
3262 ('', 'style', '', _('template style to use')),
3263 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3263 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3264 ('', 'certificate', '', _('SSL certificate file'))],
3264 ('', 'certificate', '', _('SSL certificate file'))],
3265 _('hg serve [OPTION]...')),
3265 _('hg serve [OPTION]...')),
3266 "showconfig|debugconfig":
3266 "showconfig|debugconfig":
3267 (showconfig,
3267 (showconfig,
3268 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3268 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3269 _('hg showconfig [-u] [NAME]...')),
3269 _('hg showconfig [-u] [NAME]...')),
3270 "^status|st":
3270 "^status|st":
3271 (status,
3271 (status,
3272 [('A', 'all', None, _('show status of all files')),
3272 [('A', 'all', None, _('show status of all files')),
3273 ('m', 'modified', None, _('show only modified files')),
3273 ('m', 'modified', None, _('show only modified files')),
3274 ('a', 'added', None, _('show only added files')),
3274 ('a', 'added', None, _('show only added files')),
3275 ('r', 'removed', None, _('show only removed files')),
3275 ('r', 'removed', None, _('show only removed files')),
3276 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3276 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3277 ('c', 'clean', None, _('show only files without changes')),
3277 ('c', 'clean', None, _('show only files without changes')),
3278 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3278 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3279 ('i', 'ignored', None, _('show only ignored files')),
3279 ('i', 'ignored', None, _('show only ignored files')),
3280 ('n', 'no-status', None, _('hide status prefix')),
3280 ('n', 'no-status', None, _('hide status prefix')),
3281 ('C', 'copies', None, _('show source of copied files')),
3281 ('C', 'copies', None, _('show source of copied files')),
3282 ('0', 'print0', None,
3282 ('0', 'print0', None,
3283 _('end filenames with NUL, for use with xargs')),
3283 _('end filenames with NUL, for use with xargs')),
3284 ('', 'rev', [], _('show difference from revision')),
3284 ('', 'rev', [], _('show difference from revision')),
3285 ] + walkopts,
3285 ] + walkopts,
3286 _('hg status [OPTION]... [FILE]...')),
3286 _('hg status [OPTION]... [FILE]...')),
3287 "tag":
3287 "tag":
3288 (tag,
3288 (tag,
3289 [('f', 'force', None, _('replace existing tag')),
3289 [('f', 'force', None, _('replace existing tag')),
3290 ('l', 'local', None, _('make the tag local')),
3290 ('l', 'local', None, _('make the tag local')),
3291 ('r', 'rev', '', _('revision to tag')),
3291 ('r', 'rev', '', _('revision to tag')),
3292 ('', 'remove', None, _('remove a tag')),
3292 ('', 'remove', None, _('remove a tag')),
3293 # -l/--local is already there, commitopts cannot be used
3293 # -l/--local is already there, commitopts cannot be used
3294 ('m', 'message', '', _('use <text> as commit message')),
3294 ('m', 'message', '', _('use <text> as commit message')),
3295 ] + commitopts2,
3295 ] + commitopts2,
3296 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3296 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3297 "tags": (tags, [], _('hg tags')),
3297 "tags": (tags, [], _('hg tags')),
3298 "tip":
3298 "tip":
3299 (tip,
3299 (tip,
3300 [('p', 'patch', None, _('show patch')),
3300 [('p', 'patch', None, _('show patch')),
3301 ] + templateopts,
3301 ] + templateopts,
3302 _('hg tip [-p]')),
3302 _('hg tip [-p]')),
3303 "unbundle":
3303 "unbundle":
3304 (unbundle,
3304 (unbundle,
3305 [('u', 'update', None,
3305 [('u', 'update', None,
3306 _('update to new tip if changesets were unbundled'))],
3306 _('update to new tip if changesets were unbundled'))],
3307 _('hg unbundle [-u] FILE...')),
3307 _('hg unbundle [-u] FILE...')),
3308 "^update|up|checkout|co":
3308 "^update|up|checkout|co":
3309 (update,
3309 (update,
3310 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3310 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3311 ('d', 'date', '', _('tipmost revision matching date')),
3311 ('d', 'date', '', _('tipmost revision matching date')),
3312 ('r', 'rev', '', _('revision'))],
3312 ('r', 'rev', '', _('revision'))],
3313 _('hg update [-C] [-d DATE] [[-r] REV]')),
3313 _('hg update [-C] [-d DATE] [[-r] REV]')),
3314 "verify": (verify, [], _('hg verify')),
3314 "verify": (verify, [], _('hg verify')),
3315 "version": (version_, [], _('hg version')),
3315 "version": (version_, [], _('hg version')),
3316 }
3316 }
3317
3317
3318 norepo = ("clone init version help debugcomplete debugdata"
3318 norepo = ("clone init version help debugcomplete debugdata"
3319 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3319 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3320 optionalrepo = ("identify paths serve showconfig debugancestor")
3320 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,294 +1,294 b''
1 # hgweb/server.py - The standalone hg web server.
1 # hgweb/server.py - The standalone hg web server.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
9 import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback
10 from mercurial import hg, util
10 from mercurial import hg, util
11 from mercurial.repo import RepoError
11 from mercurial.repo import RepoError
12 from hgweb_mod import hgweb
12 from hgweb_mod import hgweb
13 from hgwebdir_mod import hgwebdir
13 from hgwebdir_mod import hgwebdir
14 from mercurial.i18n import gettext as _
14 from mercurial.i18n import gettext as _
15
15
16 def _splitURI(uri):
16 def _splitURI(uri):
17 """ Return path and query splited from uri
17 """ Return path and query splited from uri
18
18
19 Just like CGI environment, the path is unquoted, the query is
19 Just like CGI environment, the path is unquoted, the query is
20 not.
20 not.
21 """
21 """
22 if '?' in uri:
22 if '?' in uri:
23 path, query = uri.split('?', 1)
23 path, query = uri.split('?', 1)
24 else:
24 else:
25 path, query = uri, ''
25 path, query = uri, ''
26 return urllib.unquote(path), query
26 return urllib.unquote(path), query
27
27
28 class _error_logger(object):
28 class _error_logger(object):
29 def __init__(self, handler):
29 def __init__(self, handler):
30 self.handler = handler
30 self.handler = handler
31 def flush(self):
31 def flush(self):
32 pass
32 pass
33 def write(self, str):
33 def write(self, str):
34 self.writelines(str.split('\n'))
34 self.writelines(str.split('\n'))
35 def writelines(self, seq):
35 def writelines(self, seq):
36 for msg in seq:
36 for msg in seq:
37 self.handler.log_error("HG error: %s", msg)
37 self.handler.log_error("HG error: %s", msg)
38
38
39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
39 class _hgwebhandler(object, BaseHTTPServer.BaseHTTPRequestHandler):
40
40
41 url_scheme = 'http'
41 url_scheme = 'http'
42
42
43 def __init__(self, *args, **kargs):
43 def __init__(self, *args, **kargs):
44 self.protocol_version = 'HTTP/1.1'
44 self.protocol_version = 'HTTP/1.1'
45 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
45 BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs)
46
46
47 def _log_any(self, fp, format, *args):
47 def _log_any(self, fp, format, *args):
48 fp.write("%s - - [%s] %s\n" % (self.client_address[0],
48 fp.write("%s - - [%s] %s\n" % (self.client_address[0],
49 self.log_date_time_string(),
49 self.log_date_time_string(),
50 format % args))
50 format % args))
51 fp.flush()
51 fp.flush()
52
52
53 def log_error(self, format, *args):
53 def log_error(self, format, *args):
54 self._log_any(self.server.errorlog, format, *args)
54 self._log_any(self.server.errorlog, format, *args)
55
55
56 def log_message(self, format, *args):
56 def log_message(self, format, *args):
57 self._log_any(self.server.accesslog, format, *args)
57 self._log_any(self.server.accesslog, format, *args)
58
58
59 def do_write(self):
59 def do_write(self):
60 try:
60 try:
61 self.do_hgweb()
61 self.do_hgweb()
62 except socket.error, inst:
62 except socket.error, inst:
63 if inst[0] != errno.EPIPE:
63 if inst[0] != errno.EPIPE:
64 raise
64 raise
65
65
66 def do_POST(self):
66 def do_POST(self):
67 try:
67 try:
68 self.do_write()
68 self.do_write()
69 except StandardError, inst:
69 except StandardError, inst:
70 self._start_response("500 Internal Server Error", [])
70 self._start_response("500 Internal Server Error", [])
71 self._write("Internal Server Error")
71 self._write("Internal Server Error")
72 tb = "".join(traceback.format_exception(*sys.exc_info()))
72 tb = "".join(traceback.format_exception(*sys.exc_info()))
73 self.log_error("Exception happened during processing request '%s':\n%s",
73 self.log_error("Exception happened during processing request '%s':\n%s",
74 self.path, tb)
74 self.path, tb)
75
75
76 def do_GET(self):
76 def do_GET(self):
77 self.do_POST()
77 self.do_POST()
78
78
79 def do_hgweb(self):
79 def do_hgweb(self):
80 path, query = _splitURI(self.path)
80 path, query = _splitURI(self.path)
81
81
82 env = {}
82 env = {}
83 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
83 env['GATEWAY_INTERFACE'] = 'CGI/1.1'
84 env['REQUEST_METHOD'] = self.command
84 env['REQUEST_METHOD'] = self.command
85 env['SERVER_NAME'] = self.server.server_name
85 env['SERVER_NAME'] = self.server.server_name
86 env['SERVER_PORT'] = str(self.server.server_port)
86 env['SERVER_PORT'] = str(self.server.server_port)
87 env['REQUEST_URI'] = self.path
87 env['REQUEST_URI'] = self.path
88 env['SCRIPT_NAME'] = self.server.prefix
88 env['SCRIPT_NAME'] = self.server.prefix
89 env['PATH_INFO'] = path[len(self.server.prefix):]
89 env['PATH_INFO'] = path[len(self.server.prefix):]
90 env['REMOTE_HOST'] = self.client_address[0]
90 env['REMOTE_HOST'] = self.client_address[0]
91 env['REMOTE_ADDR'] = self.client_address[0]
91 env['REMOTE_ADDR'] = self.client_address[0]
92 if query:
92 if query:
93 env['QUERY_STRING'] = query
93 env['QUERY_STRING'] = query
94
94
95 if self.headers.typeheader is None:
95 if self.headers.typeheader is None:
96 env['CONTENT_TYPE'] = self.headers.type
96 env['CONTENT_TYPE'] = self.headers.type
97 else:
97 else:
98 env['CONTENT_TYPE'] = self.headers.typeheader
98 env['CONTENT_TYPE'] = self.headers.typeheader
99 length = self.headers.getheader('content-length')
99 length = self.headers.getheader('content-length')
100 if length:
100 if length:
101 env['CONTENT_LENGTH'] = length
101 env['CONTENT_LENGTH'] = length
102 for header in [h for h in self.headers.keys()
102 for header in [h for h in self.headers.keys()
103 if h not in ('content-type', 'content-length')]:
103 if h not in ('content-type', 'content-length')]:
104 hkey = 'HTTP_' + header.replace('-', '_').upper()
104 hkey = 'HTTP_' + header.replace('-', '_').upper()
105 hval = self.headers.getheader(header)
105 hval = self.headers.getheader(header)
106 hval = hval.replace('\n', '').strip()
106 hval = hval.replace('\n', '').strip()
107 if hval:
107 if hval:
108 env[hkey] = hval
108 env[hkey] = hval
109 env['SERVER_PROTOCOL'] = self.request_version
109 env['SERVER_PROTOCOL'] = self.request_version
110 env['wsgi.version'] = (1, 0)
110 env['wsgi.version'] = (1, 0)
111 env['wsgi.url_scheme'] = self.url_scheme
111 env['wsgi.url_scheme'] = self.url_scheme
112 env['wsgi.input'] = self.rfile
112 env['wsgi.input'] = self.rfile
113 env['wsgi.errors'] = _error_logger(self)
113 env['wsgi.errors'] = _error_logger(self)
114 env['wsgi.multithread'] = isinstance(self.server,
114 env['wsgi.multithread'] = isinstance(self.server,
115 SocketServer.ThreadingMixIn)
115 SocketServer.ThreadingMixIn)
116 env['wsgi.multiprocess'] = isinstance(self.server,
116 env['wsgi.multiprocess'] = isinstance(self.server,
117 SocketServer.ForkingMixIn)
117 SocketServer.ForkingMixIn)
118 env['wsgi.run_once'] = 0
118 env['wsgi.run_once'] = 0
119
119
120 self.close_connection = True
120 self.close_connection = True
121 self.saved_status = None
121 self.saved_status = None
122 self.saved_headers = []
122 self.saved_headers = []
123 self.sent_headers = False
123 self.sent_headers = False
124 self.length = None
124 self.length = None
125 for chunk in self.server.application(env, self._start_response):
125 for chunk in self.server.application(env, self._start_response):
126 self._write(chunk)
126 self._write(chunk)
127
127
128 def send_headers(self):
128 def send_headers(self):
129 if not self.saved_status:
129 if not self.saved_status:
130 raise AssertionError("Sending headers before start_response() called")
130 raise AssertionError("Sending headers before start_response() called")
131 saved_status = self.saved_status.split(None, 1)
131 saved_status = self.saved_status.split(None, 1)
132 saved_status[0] = int(saved_status[0])
132 saved_status[0] = int(saved_status[0])
133 self.send_response(*saved_status)
133 self.send_response(*saved_status)
134 should_close = True
134 should_close = True
135 for h in self.saved_headers:
135 for h in self.saved_headers:
136 self.send_header(*h)
136 self.send_header(*h)
137 if h[0].lower() == 'content-length':
137 if h[0].lower() == 'content-length':
138 should_close = False
138 should_close = False
139 self.length = int(h[1])
139 self.length = int(h[1])
140 # The value of the Connection header is a list of case-insensitive
140 # The value of the Connection header is a list of case-insensitive
141 # tokens separated by commas and optional whitespace.
141 # tokens separated by commas and optional whitespace.
142 if 'close' in [token.strip().lower() for token in
142 if 'close' in [token.strip().lower() for token in
143 self.headers.get('connection', '').split(',')]:
143 self.headers.get('connection', '').split(',')]:
144 should_close = True
144 should_close = True
145 if should_close:
145 if should_close:
146 self.send_header('Connection', 'close')
146 self.send_header('Connection', 'close')
147 self.close_connection = should_close
147 self.close_connection = should_close
148 self.end_headers()
148 self.end_headers()
149 self.sent_headers = True
149 self.sent_headers = True
150
150
151 def _start_response(self, http_status, headers, exc_info=None):
151 def _start_response(self, http_status, headers, exc_info=None):
152 code, msg = http_status.split(None, 1)
152 code, msg = http_status.split(None, 1)
153 code = int(code)
153 code = int(code)
154 self.saved_status = http_status
154 self.saved_status = http_status
155 bad_headers = ('connection', 'transfer-encoding')
155 bad_headers = ('connection', 'transfer-encoding')
156 self.saved_headers = [h for h in headers
156 self.saved_headers = [h for h in headers
157 if h[0].lower() not in bad_headers]
157 if h[0].lower() not in bad_headers]
158 return self._write
158 return self._write
159
159
160 def _write(self, data):
160 def _write(self, data):
161 if not self.saved_status:
161 if not self.saved_status:
162 raise AssertionError("data written before start_response() called")
162 raise AssertionError("data written before start_response() called")
163 elif not self.sent_headers:
163 elif not self.sent_headers:
164 self.send_headers()
164 self.send_headers()
165 if self.length is not None:
165 if self.length is not None:
166 if len(data) > self.length:
166 if len(data) > self.length:
167 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
167 raise AssertionError("Content-length header sent, but more bytes than specified are being written.")
168 self.length = self.length - len(data)
168 self.length = self.length - len(data)
169 self.wfile.write(data)
169 self.wfile.write(data)
170 self.wfile.flush()
170 self.wfile.flush()
171
171
172 class _shgwebhandler(_hgwebhandler):
172 class _shgwebhandler(_hgwebhandler):
173
173
174 url_scheme = 'https'
174 url_scheme = 'https'
175
175
176 def setup(self):
176 def setup(self):
177 self.connection = self.request
177 self.connection = self.request
178 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
178 self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
179 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
179 self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
180
180
181 def do_write(self):
181 def do_write(self):
182 from OpenSSL.SSL import SysCallError
182 from OpenSSL.SSL import SysCallError
183 try:
183 try:
184 super(_shgwebhandler, self).do_write()
184 super(_shgwebhandler, self).do_write()
185 except SysCallError, inst:
185 except SysCallError, inst:
186 if inst.args[0] != errno.EPIPE:
186 if inst.args[0] != errno.EPIPE:
187 raise
187 raise
188
188
189 def handle_one_request(self):
189 def handle_one_request(self):
190 from OpenSSL.SSL import SysCallError, ZeroReturnError
190 from OpenSSL.SSL import SysCallError, ZeroReturnError
191 try:
191 try:
192 super(_shgwebhandler, self).handle_one_request()
192 super(_shgwebhandler, self).handle_one_request()
193 except (SysCallError, ZeroReturnError):
193 except (SysCallError, ZeroReturnError):
194 self.close_connection = True
194 self.close_connection = True
195 pass
195 pass
196
196
197 def create_server(ui, repo):
197 def create_server(ui, repo):
198 use_threads = True
198 use_threads = True
199
199
200 def openlog(opt, default):
200 def openlog(opt, default):
201 if opt and opt != '-':
201 if opt and opt != '-':
202 return open(opt, 'a')
202 return open(opt, 'a')
203 return default
203 return default
204
204
205 if repo is None:
205 if repo is None:
206 myui = ui
206 myui = ui
207 else:
207 else:
208 myui = repo.ui
208 myui = repo.ui
209 address = myui.config("web", "address", "")
209 address = myui.config("web", "address", "")
210 port = int(myui.config("web", "port", 8000))
210 port = int(myui.config("web", "port", 8000))
211 prefix = myui.config("web", "prefix", "")
211 prefix = myui.config("web", "prefix", "")
212 if prefix:
212 if prefix:
213 prefix = "/" + prefix.strip("/")
213 prefix = "/" + prefix.strip("/")
214 use_ipv6 = myui.configbool("web", "ipv6")
214 use_ipv6 = myui.configbool("web", "ipv6")
215 webdir_conf = myui.config("web", "webdir_conf")
215 webdir_conf = myui.config("web", "webdir_conf")
216 ssl_cert = myui.config("web", "certificate")
216 ssl_cert = myui.config("web", "certificate")
217 accesslog = openlog(myui.config("web", "accesslog", "-"), sys.stdout)
217 accesslog = openlog(myui.config("web", "accesslog", "-"), sys.stdout)
218 errorlog = openlog(myui.config("web", "errorlog", "-"), sys.stderr)
218 errorlog = openlog(myui.config("web", "errorlog", "-"), sys.stderr)
219
219
220 if use_threads:
220 if use_threads:
221 try:
221 try:
222 from threading import activeCount
222 from threading import activeCount
223 except ImportError:
223 except ImportError:
224 use_threads = False
224 use_threads = False
225
225
226 if use_threads:
226 if use_threads:
227 _mixin = SocketServer.ThreadingMixIn
227 _mixin = SocketServer.ThreadingMixIn
228 else:
228 else:
229 if hasattr(os, "fork"):
229 if hasattr(os, "fork"):
230 _mixin = SocketServer.ForkingMixIn
230 _mixin = SocketServer.ForkingMixIn
231 else:
231 else:
232 class _mixin:
232 class _mixin:
233 pass
233 pass
234
234
235 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
235 class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer):
236
236
237 # SO_REUSEADDR has broken semantics on windows
237 # SO_REUSEADDR has broken semantics on windows
238 if os.name == 'nt':
238 if os.name == 'nt':
239 allow_reuse_address = 0
239 allow_reuse_address = 0
240
240
241 def __init__(self, *args, **kargs):
241 def __init__(self, *args, **kargs):
242 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
242 BaseHTTPServer.HTTPServer.__init__(self, *args, **kargs)
243 self.accesslog = accesslog
243 self.accesslog = accesslog
244 self.errorlog = errorlog
244 self.errorlog = errorlog
245 self.daemon_threads = True
245 self.daemon_threads = True
246 def make_handler():
246 def make_handler():
247 if webdir_conf:
247 if webdir_conf:
248 hgwebobj = hgwebdir(webdir_conf, ui)
248 hgwebobj = hgwebdir(webdir_conf, ui)
249 elif repo is not None:
249 elif repo is not None:
250 hgwebobj = hgweb(hg.repository(repo.ui, repo.root))
250 hgwebobj = hgweb(hg.repository(repo.ui, repo.root))
251 else:
251 else:
252 raise RepoError(_("There is no Mercurial repository here"
252 raise RepoError(_("There is no Mercurial repository here"
253 " (.hg not found)"))
253 " (.hg not found)"))
254 return hgwebobj
254 return hgwebobj
255 self.application = make_handler()
255 self.application = make_handler()
256
256
257 if ssl_cert:
257 if ssl_cert:
258 try:
258 try:
259 from OpenSSL import SSL
259 from OpenSSL import SSL
260 ctx = SSL.Context(SSL.SSLv23_METHOD)
260 ctx = SSL.Context(SSL.SSLv23_METHOD)
261 except ImportError:
261 except ImportError:
262 raise util.Abort("SSL support is unavailable")
262 raise util.Abort(_("SSL support is unavailable"))
263 ctx.use_privatekey_file(ssl_cert)
263 ctx.use_privatekey_file(ssl_cert)
264 ctx.use_certificate_file(ssl_cert)
264 ctx.use_certificate_file(ssl_cert)
265 sock = socket.socket(self.address_family, self.socket_type)
265 sock = socket.socket(self.address_family, self.socket_type)
266 self.socket = SSL.Connection(ctx, sock)
266 self.socket = SSL.Connection(ctx, sock)
267 self.server_bind()
267 self.server_bind()
268 self.server_activate()
268 self.server_activate()
269
269
270 self.addr, self.port = self.socket.getsockname()[0:2]
270 self.addr, self.port = self.socket.getsockname()[0:2]
271 self.prefix = prefix
271 self.prefix = prefix
272 self.fqaddr = socket.getfqdn(address)
272 self.fqaddr = socket.getfqdn(address)
273
273
274 class IPv6HTTPServer(MercurialHTTPServer):
274 class IPv6HTTPServer(MercurialHTTPServer):
275 address_family = getattr(socket, 'AF_INET6', None)
275 address_family = getattr(socket, 'AF_INET6', None)
276
276
277 def __init__(self, *args, **kwargs):
277 def __init__(self, *args, **kwargs):
278 if self.address_family is None:
278 if self.address_family is None:
279 raise RepoError(_('IPv6 not available on this system'))
279 raise RepoError(_('IPv6 not available on this system'))
280 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
280 super(IPv6HTTPServer, self).__init__(*args, **kwargs)
281
281
282 if ssl_cert:
282 if ssl_cert:
283 handler = _shgwebhandler
283 handler = _shgwebhandler
284 else:
284 else:
285 handler = _hgwebhandler
285 handler = _hgwebhandler
286
286
287 try:
287 try:
288 if use_ipv6:
288 if use_ipv6:
289 return IPv6HTTPServer((address, port), handler)
289 return IPv6HTTPServer((address, port), handler)
290 else:
290 else:
291 return MercurialHTTPServer((address, port), handler)
291 return MercurialHTTPServer((address, port), handler)
292 except socket.error, inst:
292 except socket.error, inst:
293 raise util.Abort(_("cannot start server at '%s:%d': %s")
293 raise util.Abort(_("cannot start server at '%s:%d': %s")
294 % (address, port, inst.args[1]))
294 % (address, port, inst.args[1]))
@@ -1,2070 +1,2070 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context, weakref
11 import changelog, dirstate, filelog, manifest, context, weakref
12 import lock, transaction, stat, errno, ui, store
12 import lock, transaction, stat, errno, ui, store
13 import os, revlog, time, util, extensions, hook, inspect
13 import os, revlog, time, util, extensions, hook, inspect
14 import match as match_
14 import match as match_
15 import merge as merge_
15 import merge as merge_
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = util.set(('lookup', 'changegroupsubset'))
18 capabilities = util.set(('lookup', 'changegroupsubset'))
19 supported = ('revlogv1', 'store')
19 supported = ('revlogv1', 'store')
20
20
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.root = os.path.realpath(path)
23 self.root = os.path.realpath(path)
24 self.path = os.path.join(self.root, ".hg")
24 self.path = os.path.join(self.root, ".hg")
25 self.origroot = path
25 self.origroot = path
26 self.opener = util.opener(self.path)
26 self.opener = util.opener(self.path)
27 self.wopener = util.opener(self.root)
27 self.wopener = util.opener(self.root)
28
28
29 if not os.path.isdir(self.path):
29 if not os.path.isdir(self.path):
30 if create:
30 if create:
31 if not os.path.exists(path):
31 if not os.path.exists(path):
32 os.mkdir(path)
32 os.mkdir(path)
33 os.mkdir(self.path)
33 os.mkdir(self.path)
34 requirements = ["revlogv1"]
34 requirements = ["revlogv1"]
35 if parentui.configbool('format', 'usestore', True):
35 if parentui.configbool('format', 'usestore', True):
36 os.mkdir(os.path.join(self.path, "store"))
36 os.mkdir(os.path.join(self.path, "store"))
37 requirements.append("store")
37 requirements.append("store")
38 # create an invalid changelog
38 # create an invalid changelog
39 self.opener("00changelog.i", "a").write(
39 self.opener("00changelog.i", "a").write(
40 '\0\0\0\2' # represents revlogv2
40 '\0\0\0\2' # represents revlogv2
41 ' dummy changelog to prevent using the old repo layout'
41 ' dummy changelog to prevent using the old repo layout'
42 )
42 )
43 reqfile = self.opener("requires", "w")
43 reqfile = self.opener("requires", "w")
44 for r in requirements:
44 for r in requirements:
45 reqfile.write("%s\n" % r)
45 reqfile.write("%s\n" % r)
46 reqfile.close()
46 reqfile.close()
47 else:
47 else:
48 raise repo.RepoError(_("repository %s not found") % path)
48 raise repo.RepoError(_("repository %s not found") % path)
49 elif create:
49 elif create:
50 raise repo.RepoError(_("repository %s already exists") % path)
50 raise repo.RepoError(_("repository %s already exists") % path)
51 else:
51 else:
52 # find requirements
52 # find requirements
53 requirements = []
53 requirements = []
54 try:
54 try:
55 requirements = self.opener("requires").read().splitlines()
55 requirements = self.opener("requires").read().splitlines()
56 for r in requirements:
56 for r in requirements:
57 if r not in self.supported:
57 if r not in self.supported:
58 raise repo.RepoError(_("requirement '%s' not supported") % r)
58 raise repo.RepoError(_("requirement '%s' not supported") % r)
59 except IOError, inst:
59 except IOError, inst:
60 if inst.errno != errno.ENOENT:
60 if inst.errno != errno.ENOENT:
61 raise
61 raise
62
62
63 self.store = store.store(requirements, self.path, util.opener)
63 self.store = store.store(requirements, self.path, util.opener)
64 self.spath = self.store.path
64 self.spath = self.store.path
65 self.sopener = self.store.opener
65 self.sopener = self.store.opener
66 self.sjoin = self.store.join
66 self.sjoin = self.store.join
67 self.opener.createmode = self.store.createmode
67 self.opener.createmode = self.store.createmode
68
68
69 self.ui = ui.ui(parentui=parentui)
69 self.ui = ui.ui(parentui=parentui)
70 try:
70 try:
71 self.ui.readconfig(self.join("hgrc"), self.root)
71 self.ui.readconfig(self.join("hgrc"), self.root)
72 extensions.loadall(self.ui)
72 extensions.loadall(self.ui)
73 except IOError:
73 except IOError:
74 pass
74 pass
75
75
76 self.tagscache = None
76 self.tagscache = None
77 self._tagstypecache = None
77 self._tagstypecache = None
78 self.branchcache = None
78 self.branchcache = None
79 self._ubranchcache = None # UTF-8 version of branchcache
79 self._ubranchcache = None # UTF-8 version of branchcache
80 self._branchcachetip = None
80 self._branchcachetip = None
81 self.nodetagscache = None
81 self.nodetagscache = None
82 self.filterpats = {}
82 self.filterpats = {}
83 self._datafilters = {}
83 self._datafilters = {}
84 self._transref = self._lockref = self._wlockref = None
84 self._transref = self._lockref = self._wlockref = None
85
85
86 def __getattr__(self, name):
86 def __getattr__(self, name):
87 if name == 'changelog':
87 if name == 'changelog':
88 self.changelog = changelog.changelog(self.sopener)
88 self.changelog = changelog.changelog(self.sopener)
89 self.sopener.defversion = self.changelog.version
89 self.sopener.defversion = self.changelog.version
90 return self.changelog
90 return self.changelog
91 if name == 'manifest':
91 if name == 'manifest':
92 self.changelog
92 self.changelog
93 self.manifest = manifest.manifest(self.sopener)
93 self.manifest = manifest.manifest(self.sopener)
94 return self.manifest
94 return self.manifest
95 if name == 'dirstate':
95 if name == 'dirstate':
96 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
96 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
97 return self.dirstate
97 return self.dirstate
98 else:
98 else:
99 raise AttributeError, name
99 raise AttributeError, name
100
100
101 def __getitem__(self, changeid):
101 def __getitem__(self, changeid):
102 if changeid == None:
102 if changeid == None:
103 return context.workingctx(self)
103 return context.workingctx(self)
104 return context.changectx(self, changeid)
104 return context.changectx(self, changeid)
105
105
106 def __nonzero__(self):
106 def __nonzero__(self):
107 return True
107 return True
108
108
109 def __len__(self):
109 def __len__(self):
110 return len(self.changelog)
110 return len(self.changelog)
111
111
112 def __iter__(self):
112 def __iter__(self):
113 for i in xrange(len(self)):
113 for i in xrange(len(self)):
114 yield i
114 yield i
115
115
116 def url(self):
116 def url(self):
117 return 'file:' + self.root
117 return 'file:' + self.root
118
118
119 def hook(self, name, throw=False, **args):
119 def hook(self, name, throw=False, **args):
120 return hook.hook(self.ui, self, name, throw, **args)
120 return hook.hook(self.ui, self, name, throw, **args)
121
121
122 tag_disallowed = ':\r\n'
122 tag_disallowed = ':\r\n'
123
123
124 def _tag(self, names, node, message, local, user, date, parent=None,
124 def _tag(self, names, node, message, local, user, date, parent=None,
125 extra={}):
125 extra={}):
126 use_dirstate = parent is None
126 use_dirstate = parent is None
127
127
128 if isinstance(names, str):
128 if isinstance(names, str):
129 allchars = names
129 allchars = names
130 names = (names,)
130 names = (names,)
131 else:
131 else:
132 allchars = ''.join(names)
132 allchars = ''.join(names)
133 for c in self.tag_disallowed:
133 for c in self.tag_disallowed:
134 if c in allchars:
134 if c in allchars:
135 raise util.Abort(_('%r cannot be used in a tag name') % c)
135 raise util.Abort(_('%r cannot be used in a tag name') % c)
136
136
137 for name in names:
137 for name in names:
138 self.hook('pretag', throw=True, node=hex(node), tag=name,
138 self.hook('pretag', throw=True, node=hex(node), tag=name,
139 local=local)
139 local=local)
140
140
141 def writetags(fp, names, munge, prevtags):
141 def writetags(fp, names, munge, prevtags):
142 fp.seek(0, 2)
142 fp.seek(0, 2)
143 if prevtags and prevtags[-1] != '\n':
143 if prevtags and prevtags[-1] != '\n':
144 fp.write('\n')
144 fp.write('\n')
145 for name in names:
145 for name in names:
146 m = munge and munge(name) or name
146 m = munge and munge(name) or name
147 if self._tagstypecache and name in self._tagstypecache:
147 if self._tagstypecache and name in self._tagstypecache:
148 old = self.tagscache.get(name, nullid)
148 old = self.tagscache.get(name, nullid)
149 fp.write('%s %s\n' % (hex(old), m))
149 fp.write('%s %s\n' % (hex(old), m))
150 fp.write('%s %s\n' % (hex(node), m))
150 fp.write('%s %s\n' % (hex(node), m))
151 fp.close()
151 fp.close()
152
152
153 prevtags = ''
153 prevtags = ''
154 if local:
154 if local:
155 try:
155 try:
156 fp = self.opener('localtags', 'r+')
156 fp = self.opener('localtags', 'r+')
157 except IOError, err:
157 except IOError, err:
158 fp = self.opener('localtags', 'a')
158 fp = self.opener('localtags', 'a')
159 else:
159 else:
160 prevtags = fp.read()
160 prevtags = fp.read()
161
161
162 # local tags are stored in the current charset
162 # local tags are stored in the current charset
163 writetags(fp, names, None, prevtags)
163 writetags(fp, names, None, prevtags)
164 for name in names:
164 for name in names:
165 self.hook('tag', node=hex(node), tag=name, local=local)
165 self.hook('tag', node=hex(node), tag=name, local=local)
166 return
166 return
167
167
168 if use_dirstate:
168 if use_dirstate:
169 try:
169 try:
170 fp = self.wfile('.hgtags', 'rb+')
170 fp = self.wfile('.hgtags', 'rb+')
171 except IOError, err:
171 except IOError, err:
172 fp = self.wfile('.hgtags', 'ab')
172 fp = self.wfile('.hgtags', 'ab')
173 else:
173 else:
174 prevtags = fp.read()
174 prevtags = fp.read()
175 else:
175 else:
176 try:
176 try:
177 prevtags = self.filectx('.hgtags', parent).data()
177 prevtags = self.filectx('.hgtags', parent).data()
178 except revlog.LookupError:
178 except revlog.LookupError:
179 pass
179 pass
180 fp = self.wfile('.hgtags', 'wb')
180 fp = self.wfile('.hgtags', 'wb')
181 if prevtags:
181 if prevtags:
182 fp.write(prevtags)
182 fp.write(prevtags)
183
183
184 # committed tags are stored in UTF-8
184 # committed tags are stored in UTF-8
185 writetags(fp, names, util.fromlocal, prevtags)
185 writetags(fp, names, util.fromlocal, prevtags)
186
186
187 if use_dirstate and '.hgtags' not in self.dirstate:
187 if use_dirstate and '.hgtags' not in self.dirstate:
188 self.add(['.hgtags'])
188 self.add(['.hgtags'])
189
189
190 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
190 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
191 extra=extra)
191 extra=extra)
192
192
193 for name in names:
193 for name in names:
194 self.hook('tag', node=hex(node), tag=name, local=local)
194 self.hook('tag', node=hex(node), tag=name, local=local)
195
195
196 return tagnode
196 return tagnode
197
197
198 def tag(self, names, node, message, local, user, date):
198 def tag(self, names, node, message, local, user, date):
199 '''tag a revision with one or more symbolic names.
199 '''tag a revision with one or more symbolic names.
200
200
201 names is a list of strings or, when adding a single tag, names may be a
201 names is a list of strings or, when adding a single tag, names may be a
202 string.
202 string.
203
203
204 if local is True, the tags are stored in a per-repository file.
204 if local is True, the tags are stored in a per-repository file.
205 otherwise, they are stored in the .hgtags file, and a new
205 otherwise, they are stored in the .hgtags file, and a new
206 changeset is committed with the change.
206 changeset is committed with the change.
207
207
208 keyword arguments:
208 keyword arguments:
209
209
210 local: whether to store tags in non-version-controlled file
210 local: whether to store tags in non-version-controlled file
211 (default False)
211 (default False)
212
212
213 message: commit message to use if committing
213 message: commit message to use if committing
214
214
215 user: name of user to use if committing
215 user: name of user to use if committing
216
216
217 date: date tuple to use if committing'''
217 date: date tuple to use if committing'''
218
218
219 for x in self.status()[:5]:
219 for x in self.status()[:5]:
220 if '.hgtags' in x:
220 if '.hgtags' in x:
221 raise util.Abort(_('working copy of .hgtags is changed '
221 raise util.Abort(_('working copy of .hgtags is changed '
222 '(please commit .hgtags manually)'))
222 '(please commit .hgtags manually)'))
223
223
224 self._tag(names, node, message, local, user, date)
224 self._tag(names, node, message, local, user, date)
225
225
226 def tags(self):
226 def tags(self):
227 '''return a mapping of tag to node'''
227 '''return a mapping of tag to node'''
228 if self.tagscache:
228 if self.tagscache:
229 return self.tagscache
229 return self.tagscache
230
230
231 globaltags = {}
231 globaltags = {}
232 tagtypes = {}
232 tagtypes = {}
233
233
234 def readtags(lines, fn, tagtype):
234 def readtags(lines, fn, tagtype):
235 filetags = {}
235 filetags = {}
236 count = 0
236 count = 0
237
237
238 def warn(msg):
238 def warn(msg):
239 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
239 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
240
240
241 for l in lines:
241 for l in lines:
242 count += 1
242 count += 1
243 if not l:
243 if not l:
244 continue
244 continue
245 s = l.split(" ", 1)
245 s = l.split(" ", 1)
246 if len(s) != 2:
246 if len(s) != 2:
247 warn(_("cannot parse entry"))
247 warn(_("cannot parse entry"))
248 continue
248 continue
249 node, key = s
249 node, key = s
250 key = util.tolocal(key.strip()) # stored in UTF-8
250 key = util.tolocal(key.strip()) # stored in UTF-8
251 try:
251 try:
252 bin_n = bin(node)
252 bin_n = bin(node)
253 except TypeError:
253 except TypeError:
254 warn(_("node '%s' is not well formed") % node)
254 warn(_("node '%s' is not well formed") % node)
255 continue
255 continue
256 if bin_n not in self.changelog.nodemap:
256 if bin_n not in self.changelog.nodemap:
257 warn(_("tag '%s' refers to unknown node") % key)
257 warn(_("tag '%s' refers to unknown node") % key)
258 continue
258 continue
259
259
260 h = []
260 h = []
261 if key in filetags:
261 if key in filetags:
262 n, h = filetags[key]
262 n, h = filetags[key]
263 h.append(n)
263 h.append(n)
264 filetags[key] = (bin_n, h)
264 filetags[key] = (bin_n, h)
265
265
266 for k, nh in filetags.items():
266 for k, nh in filetags.items():
267 if k not in globaltags:
267 if k not in globaltags:
268 globaltags[k] = nh
268 globaltags[k] = nh
269 tagtypes[k] = tagtype
269 tagtypes[k] = tagtype
270 continue
270 continue
271
271
272 # we prefer the global tag if:
272 # we prefer the global tag if:
273 # it supercedes us OR
273 # it supercedes us OR
274 # mutual supercedes and it has a higher rank
274 # mutual supercedes and it has a higher rank
275 # otherwise we win because we're tip-most
275 # otherwise we win because we're tip-most
276 an, ah = nh
276 an, ah = nh
277 bn, bh = globaltags[k]
277 bn, bh = globaltags[k]
278 if (bn != an and an in bh and
278 if (bn != an and an in bh and
279 (bn not in ah or len(bh) > len(ah))):
279 (bn not in ah or len(bh) > len(ah))):
280 an = bn
280 an = bn
281 ah.extend([n for n in bh if n not in ah])
281 ah.extend([n for n in bh if n not in ah])
282 globaltags[k] = an, ah
282 globaltags[k] = an, ah
283 tagtypes[k] = tagtype
283 tagtypes[k] = tagtype
284
284
285 # read the tags file from each head, ending with the tip
285 # read the tags file from each head, ending with the tip
286 f = None
286 f = None
287 for rev, node, fnode in self._hgtagsnodes():
287 for rev, node, fnode in self._hgtagsnodes():
288 f = (f and f.filectx(fnode) or
288 f = (f and f.filectx(fnode) or
289 self.filectx('.hgtags', fileid=fnode))
289 self.filectx('.hgtags', fileid=fnode))
290 readtags(f.data().splitlines(), f, "global")
290 readtags(f.data().splitlines(), f, "global")
291
291
292 try:
292 try:
293 data = util.fromlocal(self.opener("localtags").read())
293 data = util.fromlocal(self.opener("localtags").read())
294 # localtags are stored in the local character set
294 # localtags are stored in the local character set
295 # while the internal tag table is stored in UTF-8
295 # while the internal tag table is stored in UTF-8
296 readtags(data.splitlines(), "localtags", "local")
296 readtags(data.splitlines(), "localtags", "local")
297 except IOError:
297 except IOError:
298 pass
298 pass
299
299
300 self.tagscache = {}
300 self.tagscache = {}
301 self._tagstypecache = {}
301 self._tagstypecache = {}
302 for k,nh in globaltags.items():
302 for k,nh in globaltags.items():
303 n = nh[0]
303 n = nh[0]
304 if n != nullid:
304 if n != nullid:
305 self.tagscache[k] = n
305 self.tagscache[k] = n
306 self._tagstypecache[k] = tagtypes[k]
306 self._tagstypecache[k] = tagtypes[k]
307 self.tagscache['tip'] = self.changelog.tip()
307 self.tagscache['tip'] = self.changelog.tip()
308 return self.tagscache
308 return self.tagscache
309
309
310 def tagtype(self, tagname):
310 def tagtype(self, tagname):
311 '''
311 '''
312 return the type of the given tag. result can be:
312 return the type of the given tag. result can be:
313
313
314 'local' : a local tag
314 'local' : a local tag
315 'global' : a global tag
315 'global' : a global tag
316 None : tag does not exist
316 None : tag does not exist
317 '''
317 '''
318
318
319 self.tags()
319 self.tags()
320
320
321 return self._tagstypecache.get(tagname)
321 return self._tagstypecache.get(tagname)
322
322
323 def _hgtagsnodes(self):
323 def _hgtagsnodes(self):
324 heads = self.heads()
324 heads = self.heads()
325 heads.reverse()
325 heads.reverse()
326 last = {}
326 last = {}
327 ret = []
327 ret = []
328 for node in heads:
328 for node in heads:
329 c = self[node]
329 c = self[node]
330 rev = c.rev()
330 rev = c.rev()
331 try:
331 try:
332 fnode = c.filenode('.hgtags')
332 fnode = c.filenode('.hgtags')
333 except revlog.LookupError:
333 except revlog.LookupError:
334 continue
334 continue
335 ret.append((rev, node, fnode))
335 ret.append((rev, node, fnode))
336 if fnode in last:
336 if fnode in last:
337 ret[last[fnode]] = None
337 ret[last[fnode]] = None
338 last[fnode] = len(ret) - 1
338 last[fnode] = len(ret) - 1
339 return [item for item in ret if item]
339 return [item for item in ret if item]
340
340
341 def tagslist(self):
341 def tagslist(self):
342 '''return a list of tags ordered by revision'''
342 '''return a list of tags ordered by revision'''
343 l = []
343 l = []
344 for t, n in self.tags().items():
344 for t, n in self.tags().items():
345 try:
345 try:
346 r = self.changelog.rev(n)
346 r = self.changelog.rev(n)
347 except:
347 except:
348 r = -2 # sort to the beginning of the list if unknown
348 r = -2 # sort to the beginning of the list if unknown
349 l.append((r, t, n))
349 l.append((r, t, n))
350 return [(t, n) for r, t, n in util.sort(l)]
350 return [(t, n) for r, t, n in util.sort(l)]
351
351
352 def nodetags(self, node):
352 def nodetags(self, node):
353 '''return the tags associated with a node'''
353 '''return the tags associated with a node'''
354 if not self.nodetagscache:
354 if not self.nodetagscache:
355 self.nodetagscache = {}
355 self.nodetagscache = {}
356 for t, n in self.tags().items():
356 for t, n in self.tags().items():
357 self.nodetagscache.setdefault(n, []).append(t)
357 self.nodetagscache.setdefault(n, []).append(t)
358 return self.nodetagscache.get(node, [])
358 return self.nodetagscache.get(node, [])
359
359
360 def _branchtags(self, partial, lrev):
360 def _branchtags(self, partial, lrev):
361 tiprev = len(self) - 1
361 tiprev = len(self) - 1
362 if lrev != tiprev:
362 if lrev != tiprev:
363 self._updatebranchcache(partial, lrev+1, tiprev+1)
363 self._updatebranchcache(partial, lrev+1, tiprev+1)
364 self._writebranchcache(partial, self.changelog.tip(), tiprev)
364 self._writebranchcache(partial, self.changelog.tip(), tiprev)
365
365
366 return partial
366 return partial
367
367
368 def branchtags(self):
368 def branchtags(self):
369 tip = self.changelog.tip()
369 tip = self.changelog.tip()
370 if self.branchcache is not None and self._branchcachetip == tip:
370 if self.branchcache is not None and self._branchcachetip == tip:
371 return self.branchcache
371 return self.branchcache
372
372
373 oldtip = self._branchcachetip
373 oldtip = self._branchcachetip
374 self._branchcachetip = tip
374 self._branchcachetip = tip
375 if self.branchcache is None:
375 if self.branchcache is None:
376 self.branchcache = {} # avoid recursion in changectx
376 self.branchcache = {} # avoid recursion in changectx
377 else:
377 else:
378 self.branchcache.clear() # keep using the same dict
378 self.branchcache.clear() # keep using the same dict
379 if oldtip is None or oldtip not in self.changelog.nodemap:
379 if oldtip is None or oldtip not in self.changelog.nodemap:
380 partial, last, lrev = self._readbranchcache()
380 partial, last, lrev = self._readbranchcache()
381 else:
381 else:
382 lrev = self.changelog.rev(oldtip)
382 lrev = self.changelog.rev(oldtip)
383 partial = self._ubranchcache
383 partial = self._ubranchcache
384
384
385 self._branchtags(partial, lrev)
385 self._branchtags(partial, lrev)
386
386
387 # the branch cache is stored on disk as UTF-8, but in the local
387 # the branch cache is stored on disk as UTF-8, but in the local
388 # charset internally
388 # charset internally
389 for k, v in partial.items():
389 for k, v in partial.items():
390 self.branchcache[util.tolocal(k)] = v
390 self.branchcache[util.tolocal(k)] = v
391 self._ubranchcache = partial
391 self._ubranchcache = partial
392 return self.branchcache
392 return self.branchcache
393
393
394 def _readbranchcache(self):
394 def _readbranchcache(self):
395 partial = {}
395 partial = {}
396 try:
396 try:
397 f = self.opener("branch.cache")
397 f = self.opener("branch.cache")
398 lines = f.read().split('\n')
398 lines = f.read().split('\n')
399 f.close()
399 f.close()
400 except (IOError, OSError):
400 except (IOError, OSError):
401 return {}, nullid, nullrev
401 return {}, nullid, nullrev
402
402
403 try:
403 try:
404 last, lrev = lines.pop(0).split(" ", 1)
404 last, lrev = lines.pop(0).split(" ", 1)
405 last, lrev = bin(last), int(lrev)
405 last, lrev = bin(last), int(lrev)
406 if lrev >= len(self) or self[lrev].node() != last:
406 if lrev >= len(self) or self[lrev].node() != last:
407 # invalidate the cache
407 # invalidate the cache
408 raise ValueError('invalidating branch cache (tip differs)')
408 raise ValueError('invalidating branch cache (tip differs)')
409 for l in lines:
409 for l in lines:
410 if not l: continue
410 if not l: continue
411 node, label = l.split(" ", 1)
411 node, label = l.split(" ", 1)
412 partial[label.strip()] = bin(node)
412 partial[label.strip()] = bin(node)
413 except (KeyboardInterrupt, util.SignalInterrupt):
413 except (KeyboardInterrupt, util.SignalInterrupt):
414 raise
414 raise
415 except Exception, inst:
415 except Exception, inst:
416 if self.ui.debugflag:
416 if self.ui.debugflag:
417 self.ui.warn(str(inst), '\n')
417 self.ui.warn(str(inst), '\n')
418 partial, last, lrev = {}, nullid, nullrev
418 partial, last, lrev = {}, nullid, nullrev
419 return partial, last, lrev
419 return partial, last, lrev
420
420
421 def _writebranchcache(self, branches, tip, tiprev):
421 def _writebranchcache(self, branches, tip, tiprev):
422 try:
422 try:
423 f = self.opener("branch.cache", "w", atomictemp=True)
423 f = self.opener("branch.cache", "w", atomictemp=True)
424 f.write("%s %s\n" % (hex(tip), tiprev))
424 f.write("%s %s\n" % (hex(tip), tiprev))
425 for label, node in branches.iteritems():
425 for label, node in branches.iteritems():
426 f.write("%s %s\n" % (hex(node), label))
426 f.write("%s %s\n" % (hex(node), label))
427 f.rename()
427 f.rename()
428 except (IOError, OSError):
428 except (IOError, OSError):
429 pass
429 pass
430
430
431 def _updatebranchcache(self, partial, start, end):
431 def _updatebranchcache(self, partial, start, end):
432 for r in xrange(start, end):
432 for r in xrange(start, end):
433 c = self[r]
433 c = self[r]
434 b = c.branch()
434 b = c.branch()
435 partial[b] = c.node()
435 partial[b] = c.node()
436
436
437 def lookup(self, key):
437 def lookup(self, key):
438 if key == '.':
438 if key == '.':
439 return self.dirstate.parents()[0]
439 return self.dirstate.parents()[0]
440 elif key == 'null':
440 elif key == 'null':
441 return nullid
441 return nullid
442 n = self.changelog._match(key)
442 n = self.changelog._match(key)
443 if n:
443 if n:
444 return n
444 return n
445 if key in self.tags():
445 if key in self.tags():
446 return self.tags()[key]
446 return self.tags()[key]
447 if key in self.branchtags():
447 if key in self.branchtags():
448 return self.branchtags()[key]
448 return self.branchtags()[key]
449 n = self.changelog._partialmatch(key)
449 n = self.changelog._partialmatch(key)
450 if n:
450 if n:
451 return n
451 return n
452 try:
452 try:
453 if len(key) == 20:
453 if len(key) == 20:
454 key = hex(key)
454 key = hex(key)
455 except:
455 except:
456 pass
456 pass
457 raise repo.RepoError(_("unknown revision '%s'") % key)
457 raise repo.RepoError(_("unknown revision '%s'") % key)
458
458
459 def local(self):
459 def local(self):
460 return True
460 return True
461
461
462 def join(self, f):
462 def join(self, f):
463 return os.path.join(self.path, f)
463 return os.path.join(self.path, f)
464
464
465 def wjoin(self, f):
465 def wjoin(self, f):
466 return os.path.join(self.root, f)
466 return os.path.join(self.root, f)
467
467
468 def rjoin(self, f):
468 def rjoin(self, f):
469 return os.path.join(self.root, util.pconvert(f))
469 return os.path.join(self.root, util.pconvert(f))
470
470
471 def file(self, f):
471 def file(self, f):
472 if f[0] == '/':
472 if f[0] == '/':
473 f = f[1:]
473 f = f[1:]
474 return filelog.filelog(self.sopener, f)
474 return filelog.filelog(self.sopener, f)
475
475
476 def changectx(self, changeid):
476 def changectx(self, changeid):
477 return self[changeid]
477 return self[changeid]
478
478
479 def parents(self, changeid=None):
479 def parents(self, changeid=None):
480 '''get list of changectxs for parents of changeid'''
480 '''get list of changectxs for parents of changeid'''
481 return self[changeid].parents()
481 return self[changeid].parents()
482
482
483 def filectx(self, path, changeid=None, fileid=None):
483 def filectx(self, path, changeid=None, fileid=None):
484 """changeid can be a changeset revision, node, or tag.
484 """changeid can be a changeset revision, node, or tag.
485 fileid can be a file revision or node."""
485 fileid can be a file revision or node."""
486 return context.filectx(self, path, changeid, fileid)
486 return context.filectx(self, path, changeid, fileid)
487
487
488 def getcwd(self):
488 def getcwd(self):
489 return self.dirstate.getcwd()
489 return self.dirstate.getcwd()
490
490
491 def pathto(self, f, cwd=None):
491 def pathto(self, f, cwd=None):
492 return self.dirstate.pathto(f, cwd)
492 return self.dirstate.pathto(f, cwd)
493
493
494 def wfile(self, f, mode='r'):
494 def wfile(self, f, mode='r'):
495 return self.wopener(f, mode)
495 return self.wopener(f, mode)
496
496
497 def _link(self, f):
497 def _link(self, f):
498 return os.path.islink(self.wjoin(f))
498 return os.path.islink(self.wjoin(f))
499
499
500 def _filter(self, filter, filename, data):
500 def _filter(self, filter, filename, data):
501 if filter not in self.filterpats:
501 if filter not in self.filterpats:
502 l = []
502 l = []
503 for pat, cmd in self.ui.configitems(filter):
503 for pat, cmd in self.ui.configitems(filter):
504 mf = util.matcher(self.root, "", [pat], [], [])[1]
504 mf = util.matcher(self.root, "", [pat], [], [])[1]
505 fn = None
505 fn = None
506 params = cmd
506 params = cmd
507 for name, filterfn in self._datafilters.iteritems():
507 for name, filterfn in self._datafilters.iteritems():
508 if cmd.startswith(name):
508 if cmd.startswith(name):
509 fn = filterfn
509 fn = filterfn
510 params = cmd[len(name):].lstrip()
510 params = cmd[len(name):].lstrip()
511 break
511 break
512 if not fn:
512 if not fn:
513 fn = lambda s, c, **kwargs: util.filter(s, c)
513 fn = lambda s, c, **kwargs: util.filter(s, c)
514 # Wrap old filters not supporting keyword arguments
514 # Wrap old filters not supporting keyword arguments
515 if not inspect.getargspec(fn)[2]:
515 if not inspect.getargspec(fn)[2]:
516 oldfn = fn
516 oldfn = fn
517 fn = lambda s, c, **kwargs: oldfn(s, c)
517 fn = lambda s, c, **kwargs: oldfn(s, c)
518 l.append((mf, fn, params))
518 l.append((mf, fn, params))
519 self.filterpats[filter] = l
519 self.filterpats[filter] = l
520
520
521 for mf, fn, cmd in self.filterpats[filter]:
521 for mf, fn, cmd in self.filterpats[filter]:
522 if mf(filename):
522 if mf(filename):
523 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
523 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
524 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
524 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
525 break
525 break
526
526
527 return data
527 return data
528
528
529 def adddatafilter(self, name, filter):
529 def adddatafilter(self, name, filter):
530 self._datafilters[name] = filter
530 self._datafilters[name] = filter
531
531
532 def wread(self, filename):
532 def wread(self, filename):
533 if self._link(filename):
533 if self._link(filename):
534 data = os.readlink(self.wjoin(filename))
534 data = os.readlink(self.wjoin(filename))
535 else:
535 else:
536 data = self.wopener(filename, 'r').read()
536 data = self.wopener(filename, 'r').read()
537 return self._filter("encode", filename, data)
537 return self._filter("encode", filename, data)
538
538
539 def wwrite(self, filename, data, flags):
539 def wwrite(self, filename, data, flags):
540 data = self._filter("decode", filename, data)
540 data = self._filter("decode", filename, data)
541 try:
541 try:
542 os.unlink(self.wjoin(filename))
542 os.unlink(self.wjoin(filename))
543 except OSError:
543 except OSError:
544 pass
544 pass
545 if 'l' in flags:
545 if 'l' in flags:
546 self.wopener.symlink(data, filename)
546 self.wopener.symlink(data, filename)
547 else:
547 else:
548 self.wopener(filename, 'w').write(data)
548 self.wopener(filename, 'w').write(data)
549 if 'x' in flags:
549 if 'x' in flags:
550 util.set_flags(self.wjoin(filename), False, True)
550 util.set_flags(self.wjoin(filename), False, True)
551
551
552 def wwritedata(self, filename, data):
552 def wwritedata(self, filename, data):
553 return self._filter("decode", filename, data)
553 return self._filter("decode", filename, data)
554
554
555 def transaction(self):
555 def transaction(self):
556 if self._transref and self._transref():
556 if self._transref and self._transref():
557 return self._transref().nest()
557 return self._transref().nest()
558
558
559 # abort here if the journal already exists
559 # abort here if the journal already exists
560 if os.path.exists(self.sjoin("journal")):
560 if os.path.exists(self.sjoin("journal")):
561 raise repo.RepoError(_("journal already exists - run hg recover"))
561 raise repo.RepoError(_("journal already exists - run hg recover"))
562
562
563 # save dirstate for rollback
563 # save dirstate for rollback
564 try:
564 try:
565 ds = self.opener("dirstate").read()
565 ds = self.opener("dirstate").read()
566 except IOError:
566 except IOError:
567 ds = ""
567 ds = ""
568 self.opener("journal.dirstate", "w").write(ds)
568 self.opener("journal.dirstate", "w").write(ds)
569 self.opener("journal.branch", "w").write(self.dirstate.branch())
569 self.opener("journal.branch", "w").write(self.dirstate.branch())
570
570
571 renames = [(self.sjoin("journal"), self.sjoin("undo")),
571 renames = [(self.sjoin("journal"), self.sjoin("undo")),
572 (self.join("journal.dirstate"), self.join("undo.dirstate")),
572 (self.join("journal.dirstate"), self.join("undo.dirstate")),
573 (self.join("journal.branch"), self.join("undo.branch"))]
573 (self.join("journal.branch"), self.join("undo.branch"))]
574 tr = transaction.transaction(self.ui.warn, self.sopener,
574 tr = transaction.transaction(self.ui.warn, self.sopener,
575 self.sjoin("journal"),
575 self.sjoin("journal"),
576 aftertrans(renames),
576 aftertrans(renames),
577 self.store.createmode)
577 self.store.createmode)
578 self._transref = weakref.ref(tr)
578 self._transref = weakref.ref(tr)
579 return tr
579 return tr
580
580
581 def recover(self):
581 def recover(self):
582 l = self.lock()
582 l = self.lock()
583 try:
583 try:
584 if os.path.exists(self.sjoin("journal")):
584 if os.path.exists(self.sjoin("journal")):
585 self.ui.status(_("rolling back interrupted transaction\n"))
585 self.ui.status(_("rolling back interrupted transaction\n"))
586 transaction.rollback(self.sopener, self.sjoin("journal"))
586 transaction.rollback(self.sopener, self.sjoin("journal"))
587 self.invalidate()
587 self.invalidate()
588 return True
588 return True
589 else:
589 else:
590 self.ui.warn(_("no interrupted transaction available\n"))
590 self.ui.warn(_("no interrupted transaction available\n"))
591 return False
591 return False
592 finally:
592 finally:
593 del l
593 del l
594
594
595 def rollback(self):
595 def rollback(self):
596 wlock = lock = None
596 wlock = lock = None
597 try:
597 try:
598 wlock = self.wlock()
598 wlock = self.wlock()
599 lock = self.lock()
599 lock = self.lock()
600 if os.path.exists(self.sjoin("undo")):
600 if os.path.exists(self.sjoin("undo")):
601 self.ui.status(_("rolling back last transaction\n"))
601 self.ui.status(_("rolling back last transaction\n"))
602 transaction.rollback(self.sopener, self.sjoin("undo"))
602 transaction.rollback(self.sopener, self.sjoin("undo"))
603 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
603 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
604 try:
604 try:
605 branch = self.opener("undo.branch").read()
605 branch = self.opener("undo.branch").read()
606 self.dirstate.setbranch(branch)
606 self.dirstate.setbranch(branch)
607 except IOError:
607 except IOError:
608 self.ui.warn(_("Named branch could not be reset, "
608 self.ui.warn(_("Named branch could not be reset, "
609 "current branch still is: %s\n")
609 "current branch still is: %s\n")
610 % util.tolocal(self.dirstate.branch()))
610 % util.tolocal(self.dirstate.branch()))
611 self.invalidate()
611 self.invalidate()
612 self.dirstate.invalidate()
612 self.dirstate.invalidate()
613 else:
613 else:
614 self.ui.warn(_("no rollback information available\n"))
614 self.ui.warn(_("no rollback information available\n"))
615 finally:
615 finally:
616 del lock, wlock
616 del lock, wlock
617
617
618 def invalidate(self):
618 def invalidate(self):
619 for a in "changelog manifest".split():
619 for a in "changelog manifest".split():
620 if a in self.__dict__:
620 if a in self.__dict__:
621 delattr(self, a)
621 delattr(self, a)
622 self.tagscache = None
622 self.tagscache = None
623 self._tagstypecache = None
623 self._tagstypecache = None
624 self.nodetagscache = None
624 self.nodetagscache = None
625 self.branchcache = None
625 self.branchcache = None
626 self._ubranchcache = None
626 self._ubranchcache = None
627 self._branchcachetip = None
627 self._branchcachetip = None
628
628
629 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
629 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
630 try:
630 try:
631 l = lock.lock(lockname, 0, releasefn, desc=desc)
631 l = lock.lock(lockname, 0, releasefn, desc=desc)
632 except lock.LockHeld, inst:
632 except lock.LockHeld, inst:
633 if not wait:
633 if not wait:
634 raise
634 raise
635 self.ui.warn(_("waiting for lock on %s held by %r\n") %
635 self.ui.warn(_("waiting for lock on %s held by %r\n") %
636 (desc, inst.locker))
636 (desc, inst.locker))
637 # default to 600 seconds timeout
637 # default to 600 seconds timeout
638 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
638 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
639 releasefn, desc=desc)
639 releasefn, desc=desc)
640 if acquirefn:
640 if acquirefn:
641 acquirefn()
641 acquirefn()
642 return l
642 return l
643
643
644 def lock(self, wait=True):
644 def lock(self, wait=True):
645 if self._lockref and self._lockref():
645 if self._lockref and self._lockref():
646 return self._lockref()
646 return self._lockref()
647
647
648 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
648 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
649 _('repository %s') % self.origroot)
649 _('repository %s') % self.origroot)
650 self._lockref = weakref.ref(l)
650 self._lockref = weakref.ref(l)
651 return l
651 return l
652
652
653 def wlock(self, wait=True):
653 def wlock(self, wait=True):
654 if self._wlockref and self._wlockref():
654 if self._wlockref and self._wlockref():
655 return self._wlockref()
655 return self._wlockref()
656
656
657 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
657 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
658 self.dirstate.invalidate, _('working directory of %s') %
658 self.dirstate.invalidate, _('working directory of %s') %
659 self.origroot)
659 self.origroot)
660 self._wlockref = weakref.ref(l)
660 self._wlockref = weakref.ref(l)
661 return l
661 return l
662
662
663 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
663 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
664 """
664 """
665 commit an individual file as part of a larger transaction
665 commit an individual file as part of a larger transaction
666 """
666 """
667
667
668 fn = fctx.path()
668 fn = fctx.path()
669 t = fctx.data()
669 t = fctx.data()
670 fl = self.file(fn)
670 fl = self.file(fn)
671 fp1 = manifest1.get(fn, nullid)
671 fp1 = manifest1.get(fn, nullid)
672 fp2 = manifest2.get(fn, nullid)
672 fp2 = manifest2.get(fn, nullid)
673
673
674 meta = {}
674 meta = {}
675 cp = fctx.renamed()
675 cp = fctx.renamed()
676 if cp and cp[0] != fn:
676 if cp and cp[0] != fn:
677 # Mark the new revision of this file as a copy of another
677 # Mark the new revision of this file as a copy of another
678 # file. This copy data will effectively act as a parent
678 # file. This copy data will effectively act as a parent
679 # of this new revision. If this is a merge, the first
679 # of this new revision. If this is a merge, the first
680 # parent will be the nullid (meaning "look up the copy data")
680 # parent will be the nullid (meaning "look up the copy data")
681 # and the second one will be the other parent. For example:
681 # and the second one will be the other parent. For example:
682 #
682 #
683 # 0 --- 1 --- 3 rev1 changes file foo
683 # 0 --- 1 --- 3 rev1 changes file foo
684 # \ / rev2 renames foo to bar and changes it
684 # \ / rev2 renames foo to bar and changes it
685 # \- 2 -/ rev3 should have bar with all changes and
685 # \- 2 -/ rev3 should have bar with all changes and
686 # should record that bar descends from
686 # should record that bar descends from
687 # bar in rev2 and foo in rev1
687 # bar in rev2 and foo in rev1
688 #
688 #
689 # this allows this merge to succeed:
689 # this allows this merge to succeed:
690 #
690 #
691 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
691 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
692 # \ / merging rev3 and rev4 should use bar@rev2
692 # \ / merging rev3 and rev4 should use bar@rev2
693 # \- 2 --- 4 as the merge base
693 # \- 2 --- 4 as the merge base
694 #
694 #
695
695
696 cf = cp[0]
696 cf = cp[0]
697 cr = manifest1.get(cf)
697 cr = manifest1.get(cf)
698 nfp = fp2
698 nfp = fp2
699
699
700 if manifest2: # branch merge
700 if manifest2: # branch merge
701 if fp2 == nullid: # copied on remote side
701 if fp2 == nullid: # copied on remote side
702 if fp1 != nullid or cf in manifest2:
702 if fp1 != nullid or cf in manifest2:
703 cr = manifest2[cf]
703 cr = manifest2[cf]
704 nfp = fp1
704 nfp = fp1
705
705
706 # find source in nearest ancestor if we've lost track
706 # find source in nearest ancestor if we've lost track
707 if not cr:
707 if not cr:
708 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
708 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
709 (fn, cf))
709 (fn, cf))
710 for a in self['.'].ancestors():
710 for a in self['.'].ancestors():
711 if cf in a:
711 if cf in a:
712 cr = a[cf].filenode()
712 cr = a[cf].filenode()
713 break
713 break
714
714
715 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
715 self.ui.debug(_(" %s: copy %s:%s\n") % (fn, cf, hex(cr)))
716 meta["copy"] = cf
716 meta["copy"] = cf
717 meta["copyrev"] = hex(cr)
717 meta["copyrev"] = hex(cr)
718 fp1, fp2 = nullid, nfp
718 fp1, fp2 = nullid, nfp
719 elif fp2 != nullid:
719 elif fp2 != nullid:
720 # is one parent an ancestor of the other?
720 # is one parent an ancestor of the other?
721 fpa = fl.ancestor(fp1, fp2)
721 fpa = fl.ancestor(fp1, fp2)
722 if fpa == fp1:
722 if fpa == fp1:
723 fp1, fp2 = fp2, nullid
723 fp1, fp2 = fp2, nullid
724 elif fpa == fp2:
724 elif fpa == fp2:
725 fp2 = nullid
725 fp2 = nullid
726
726
727 # is the file unmodified from the parent? report existing entry
727 # is the file unmodified from the parent? report existing entry
728 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
728 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
729 return fp1
729 return fp1
730
730
731 changelist.append(fn)
731 changelist.append(fn)
732 return fl.add(t, meta, tr, linkrev, fp1, fp2)
732 return fl.add(t, meta, tr, linkrev, fp1, fp2)
733
733
734 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
734 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
735 if p1 is None:
735 if p1 is None:
736 p1, p2 = self.dirstate.parents()
736 p1, p2 = self.dirstate.parents()
737 return self.commit(files=files, text=text, user=user, date=date,
737 return self.commit(files=files, text=text, user=user, date=date,
738 p1=p1, p2=p2, extra=extra, empty_ok=True)
738 p1=p1, p2=p2, extra=extra, empty_ok=True)
739
739
740 def commit(self, files=None, text="", user=None, date=None,
740 def commit(self, files=None, text="", user=None, date=None,
741 match=None, force=False, force_editor=False,
741 match=None, force=False, force_editor=False,
742 p1=None, p2=None, extra={}, empty_ok=False):
742 p1=None, p2=None, extra={}, empty_ok=False):
743 wlock = lock = None
743 wlock = lock = None
744 if files:
744 if files:
745 files = util.unique(files)
745 files = util.unique(files)
746 try:
746 try:
747 wlock = self.wlock()
747 wlock = self.wlock()
748 lock = self.lock()
748 lock = self.lock()
749 use_dirstate = (p1 is None) # not rawcommit
749 use_dirstate = (p1 is None) # not rawcommit
750
750
751 if use_dirstate:
751 if use_dirstate:
752 p1, p2 = self.dirstate.parents()
752 p1, p2 = self.dirstate.parents()
753 update_dirstate = True
753 update_dirstate = True
754
754
755 if (not force and p2 != nullid and
755 if (not force and p2 != nullid and
756 (match and (match.files() or match.anypats()))):
756 (match and (match.files() or match.anypats()))):
757 raise util.Abort(_('cannot partially commit a merge '
757 raise util.Abort(_('cannot partially commit a merge '
758 '(do not specify files or patterns)'))
758 '(do not specify files or patterns)'))
759
759
760 if files:
760 if files:
761 modified, removed = [], []
761 modified, removed = [], []
762 for f in files:
762 for f in files:
763 s = self.dirstate[f]
763 s = self.dirstate[f]
764 if s in 'nma':
764 if s in 'nma':
765 modified.append(f)
765 modified.append(f)
766 elif s == 'r':
766 elif s == 'r':
767 removed.append(f)
767 removed.append(f)
768 else:
768 else:
769 self.ui.warn(_("%s not tracked!\n") % f)
769 self.ui.warn(_("%s not tracked!\n") % f)
770 changes = [modified, [], removed, [], []]
770 changes = [modified, [], removed, [], []]
771 else:
771 else:
772 changes = self.status(match=match)
772 changes = self.status(match=match)
773 else:
773 else:
774 p1, p2 = p1, p2 or nullid
774 p1, p2 = p1, p2 or nullid
775 update_dirstate = (self.dirstate.parents()[0] == p1)
775 update_dirstate = (self.dirstate.parents()[0] == p1)
776 changes = [files, [], [], [], []]
776 changes = [files, [], [], [], []]
777
777
778 ms = merge_.mergestate(self)
778 ms = merge_.mergestate(self)
779 for f in changes[0]:
779 for f in changes[0]:
780 if f in ms and ms[f] == 'u':
780 if f in ms and ms[f] == 'u':
781 raise util.Abort(_("unresolved merge conflicts "
781 raise util.Abort(_("unresolved merge conflicts "
782 "(see hg resolve)"))
782 "(see hg resolve)"))
783 wctx = context.workingctx(self, (p1, p2), text, user, date,
783 wctx = context.workingctx(self, (p1, p2), text, user, date,
784 extra, changes)
784 extra, changes)
785 return self._commitctx(wctx, force, force_editor, empty_ok,
785 return self._commitctx(wctx, force, force_editor, empty_ok,
786 use_dirstate, update_dirstate)
786 use_dirstate, update_dirstate)
787 finally:
787 finally:
788 del lock, wlock
788 del lock, wlock
789
789
790 def commitctx(self, ctx):
790 def commitctx(self, ctx):
791 wlock = lock = None
791 wlock = lock = None
792 try:
792 try:
793 wlock = self.wlock()
793 wlock = self.wlock()
794 lock = self.lock()
794 lock = self.lock()
795 return self._commitctx(ctx, force=True, force_editor=False,
795 return self._commitctx(ctx, force=True, force_editor=False,
796 empty_ok=True, use_dirstate=False,
796 empty_ok=True, use_dirstate=False,
797 update_dirstate=False)
797 update_dirstate=False)
798 finally:
798 finally:
799 del lock, wlock
799 del lock, wlock
800
800
801 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
801 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
802 use_dirstate=True, update_dirstate=True):
802 use_dirstate=True, update_dirstate=True):
803 tr = None
803 tr = None
804 valid = 0 # don't save the dirstate if this isn't set
804 valid = 0 # don't save the dirstate if this isn't set
805 try:
805 try:
806 commit = util.sort(wctx.modified() + wctx.added())
806 commit = util.sort(wctx.modified() + wctx.added())
807 remove = wctx.removed()
807 remove = wctx.removed()
808 extra = wctx.extra().copy()
808 extra = wctx.extra().copy()
809 branchname = extra['branch']
809 branchname = extra['branch']
810 user = wctx.user()
810 user = wctx.user()
811 text = wctx.description()
811 text = wctx.description()
812
812
813 p1, p2 = [p.node() for p in wctx.parents()]
813 p1, p2 = [p.node() for p in wctx.parents()]
814 c1 = self.changelog.read(p1)
814 c1 = self.changelog.read(p1)
815 c2 = self.changelog.read(p2)
815 c2 = self.changelog.read(p2)
816 m1 = self.manifest.read(c1[0]).copy()
816 m1 = self.manifest.read(c1[0]).copy()
817 m2 = self.manifest.read(c2[0])
817 m2 = self.manifest.read(c2[0])
818
818
819 if use_dirstate:
819 if use_dirstate:
820 oldname = c1[5].get("branch") # stored in UTF-8
820 oldname = c1[5].get("branch") # stored in UTF-8
821 if (not commit and not remove and not force and p2 == nullid
821 if (not commit and not remove and not force and p2 == nullid
822 and branchname == oldname):
822 and branchname == oldname):
823 self.ui.status(_("nothing changed\n"))
823 self.ui.status(_("nothing changed\n"))
824 return None
824 return None
825
825
826 xp1 = hex(p1)
826 xp1 = hex(p1)
827 if p2 == nullid: xp2 = ''
827 if p2 == nullid: xp2 = ''
828 else: xp2 = hex(p2)
828 else: xp2 = hex(p2)
829
829
830 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
830 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
831
831
832 tr = self.transaction()
832 tr = self.transaction()
833 trp = weakref.proxy(tr)
833 trp = weakref.proxy(tr)
834
834
835 # check in files
835 # check in files
836 new = {}
836 new = {}
837 changed = []
837 changed = []
838 linkrev = len(self)
838 linkrev = len(self)
839 for f in commit:
839 for f in commit:
840 self.ui.note(f + "\n")
840 self.ui.note(f + "\n")
841 try:
841 try:
842 fctx = wctx.filectx(f)
842 fctx = wctx.filectx(f)
843 newflags = fctx.flags()
843 newflags = fctx.flags()
844 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
844 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
845 if ((not changed or changed[-1] != f) and
845 if ((not changed or changed[-1] != f) and
846 m2.get(f) != new[f]):
846 m2.get(f) != new[f]):
847 # mention the file in the changelog if some
847 # mention the file in the changelog if some
848 # flag changed, even if there was no content
848 # flag changed, even if there was no content
849 # change.
849 # change.
850 if m1.flags(f) != newflags:
850 if m1.flags(f) != newflags:
851 changed.append(f)
851 changed.append(f)
852 m1.set(f, newflags)
852 m1.set(f, newflags)
853 if use_dirstate:
853 if use_dirstate:
854 self.dirstate.normal(f)
854 self.dirstate.normal(f)
855
855
856 except (OSError, IOError):
856 except (OSError, IOError):
857 if use_dirstate:
857 if use_dirstate:
858 self.ui.warn(_("trouble committing %s!\n") % f)
858 self.ui.warn(_("trouble committing %s!\n") % f)
859 raise
859 raise
860 else:
860 else:
861 remove.append(f)
861 remove.append(f)
862
862
863 # update manifest
863 # update manifest
864 m1.update(new)
864 m1.update(new)
865 removed = []
865 removed = []
866
866
867 for f in util.sort(remove):
867 for f in util.sort(remove):
868 if f in m1:
868 if f in m1:
869 del m1[f]
869 del m1[f]
870 removed.append(f)
870 removed.append(f)
871 elif f in m2:
871 elif f in m2:
872 removed.append(f)
872 removed.append(f)
873 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
873 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
874 (new, removed))
874 (new, removed))
875
875
876 # add changeset
876 # add changeset
877 if (not empty_ok and not text) or force_editor:
877 if (not empty_ok and not text) or force_editor:
878 edittext = []
878 edittext = []
879 if text:
879 if text:
880 edittext.append(text)
880 edittext.append(text)
881 edittext.append("")
881 edittext.append("")
882 edittext.append("") # Empty line between message and comments.
882 edittext.append("") # Empty line between message and comments.
883 edittext.append(_("HG: Enter commit message."
883 edittext.append(_("HG: Enter commit message."
884 " Lines beginning with 'HG:' are removed."))
884 " Lines beginning with 'HG:' are removed."))
885 edittext.append("HG: --")
885 edittext.append("HG: --")
886 edittext.append("HG: user: %s" % user)
886 edittext.append("HG: user: %s" % user)
887 if p2 != nullid:
887 if p2 != nullid:
888 edittext.append("HG: branch merge")
888 edittext.append("HG: branch merge")
889 if branchname:
889 if branchname:
890 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
890 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
891 edittext.extend(["HG: changed %s" % f for f in changed])
891 edittext.extend(["HG: changed %s" % f for f in changed])
892 edittext.extend(["HG: removed %s" % f for f in removed])
892 edittext.extend(["HG: removed %s" % f for f in removed])
893 if not changed and not remove:
893 if not changed and not remove:
894 edittext.append("HG: no files changed")
894 edittext.append("HG: no files changed")
895 edittext.append("")
895 edittext.append("")
896 # run editor in the repository root
896 # run editor in the repository root
897 olddir = os.getcwd()
897 olddir = os.getcwd()
898 os.chdir(self.root)
898 os.chdir(self.root)
899 text = self.ui.edit("\n".join(edittext), user)
899 text = self.ui.edit("\n".join(edittext), user)
900 os.chdir(olddir)
900 os.chdir(olddir)
901
901
902 lines = [line.rstrip() for line in text.rstrip().splitlines()]
902 lines = [line.rstrip() for line in text.rstrip().splitlines()]
903 while lines and not lines[0]:
903 while lines and not lines[0]:
904 del lines[0]
904 del lines[0]
905 if not lines and use_dirstate:
905 if not lines and use_dirstate:
906 raise util.Abort(_("empty commit message"))
906 raise util.Abort(_("empty commit message"))
907 text = '\n'.join(lines)
907 text = '\n'.join(lines)
908
908
909 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
909 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
910 user, wctx.date(), extra)
910 user, wctx.date(), extra)
911 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
911 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
912 parent2=xp2)
912 parent2=xp2)
913 tr.close()
913 tr.close()
914
914
915 if self.branchcache:
915 if self.branchcache:
916 self.branchtags()
916 self.branchtags()
917
917
918 if use_dirstate or update_dirstate:
918 if use_dirstate or update_dirstate:
919 self.dirstate.setparents(n)
919 self.dirstate.setparents(n)
920 if use_dirstate:
920 if use_dirstate:
921 for f in removed:
921 for f in removed:
922 self.dirstate.forget(f)
922 self.dirstate.forget(f)
923 valid = 1 # our dirstate updates are complete
923 valid = 1 # our dirstate updates are complete
924
924
925 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
925 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
926 return n
926 return n
927 finally:
927 finally:
928 if not valid: # don't save our updated dirstate
928 if not valid: # don't save our updated dirstate
929 self.dirstate.invalidate()
929 self.dirstate.invalidate()
930 del tr
930 del tr
931
931
932 def walk(self, match, node=None):
932 def walk(self, match, node=None):
933 '''
933 '''
934 walk recursively through the directory tree or a given
934 walk recursively through the directory tree or a given
935 changeset, finding all files matched by the match
935 changeset, finding all files matched by the match
936 function
936 function
937 '''
937 '''
938 return self[node].walk(match)
938 return self[node].walk(match)
939
939
940 def status(self, node1='.', node2=None, match=None,
940 def status(self, node1='.', node2=None, match=None,
941 ignored=False, clean=False, unknown=False):
941 ignored=False, clean=False, unknown=False):
942 """return status of files between two nodes or node and working directory
942 """return status of files between two nodes or node and working directory
943
943
944 If node1 is None, use the first dirstate parent instead.
944 If node1 is None, use the first dirstate parent instead.
945 If node2 is None, compare node1 with working directory.
945 If node2 is None, compare node1 with working directory.
946 """
946 """
947
947
948 def mfmatches(ctx):
948 def mfmatches(ctx):
949 mf = ctx.manifest().copy()
949 mf = ctx.manifest().copy()
950 for fn in mf.keys():
950 for fn in mf.keys():
951 if not match(fn):
951 if not match(fn):
952 del mf[fn]
952 del mf[fn]
953 return mf
953 return mf
954
954
955 ctx1 = self[node1]
955 ctx1 = self[node1]
956 ctx2 = self[node2]
956 ctx2 = self[node2]
957 working = ctx2 == self[None]
957 working = ctx2 == self[None]
958 parentworking = working and ctx1 == self['.']
958 parentworking = working and ctx1 == self['.']
959 match = match or match_.always(self.root, self.getcwd())
959 match = match or match_.always(self.root, self.getcwd())
960 listignored, listclean, listunknown = ignored, clean, unknown
960 listignored, listclean, listunknown = ignored, clean, unknown
961
961
962 if working: # we need to scan the working dir
962 if working: # we need to scan the working dir
963 s = self.dirstate.status(match, listignored, listclean, listunknown)
963 s = self.dirstate.status(match, listignored, listclean, listunknown)
964 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
964 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
965
965
966 # check for any possibly clean files
966 # check for any possibly clean files
967 if parentworking and cmp:
967 if parentworking and cmp:
968 fixup = []
968 fixup = []
969 # do a full compare of any files that might have changed
969 # do a full compare of any files that might have changed
970 for f in cmp:
970 for f in cmp:
971 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
971 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
972 or ctx1[f].cmp(ctx2[f].data())):
972 or ctx1[f].cmp(ctx2[f].data())):
973 modified.append(f)
973 modified.append(f)
974 else:
974 else:
975 fixup.append(f)
975 fixup.append(f)
976
976
977 if listclean:
977 if listclean:
978 clean += fixup
978 clean += fixup
979
979
980 # update dirstate for files that are actually clean
980 # update dirstate for files that are actually clean
981 if fixup:
981 if fixup:
982 wlock = None
982 wlock = None
983 try:
983 try:
984 try:
984 try:
985 wlock = self.wlock(False)
985 wlock = self.wlock(False)
986 for f in fixup:
986 for f in fixup:
987 self.dirstate.normal(f)
987 self.dirstate.normal(f)
988 except lock.LockException:
988 except lock.LockException:
989 pass
989 pass
990 finally:
990 finally:
991 del wlock
991 del wlock
992
992
993 if not parentworking:
993 if not parentworking:
994 mf1 = mfmatches(ctx1)
994 mf1 = mfmatches(ctx1)
995 if working:
995 if working:
996 # we are comparing working dir against non-parent
996 # we are comparing working dir against non-parent
997 # generate a pseudo-manifest for the working dir
997 # generate a pseudo-manifest for the working dir
998 mf2 = mfmatches(self['.'])
998 mf2 = mfmatches(self['.'])
999 for f in cmp + modified + added:
999 for f in cmp + modified + added:
1000 mf2[f] = None
1000 mf2[f] = None
1001 mf2.set(f, ctx2.flags(f))
1001 mf2.set(f, ctx2.flags(f))
1002 for f in removed:
1002 for f in removed:
1003 if f in mf2:
1003 if f in mf2:
1004 del mf2[f]
1004 del mf2[f]
1005 else:
1005 else:
1006 # we are comparing two revisions
1006 # we are comparing two revisions
1007 deleted, unknown, ignored = [], [], []
1007 deleted, unknown, ignored = [], [], []
1008 mf2 = mfmatches(ctx2)
1008 mf2 = mfmatches(ctx2)
1009
1009
1010 modified, added, clean = [], [], []
1010 modified, added, clean = [], [], []
1011 for fn in mf2:
1011 for fn in mf2:
1012 if fn in mf1:
1012 if fn in mf1:
1013 if (mf1.flags(fn) != mf2.flags(fn) or
1013 if (mf1.flags(fn) != mf2.flags(fn) or
1014 (mf1[fn] != mf2[fn] and
1014 (mf1[fn] != mf2[fn] and
1015 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1015 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1016 modified.append(fn)
1016 modified.append(fn)
1017 elif listclean:
1017 elif listclean:
1018 clean.append(fn)
1018 clean.append(fn)
1019 del mf1[fn]
1019 del mf1[fn]
1020 else:
1020 else:
1021 added.append(fn)
1021 added.append(fn)
1022 removed = mf1.keys()
1022 removed = mf1.keys()
1023
1023
1024 r = modified, added, removed, deleted, unknown, ignored, clean
1024 r = modified, added, removed, deleted, unknown, ignored, clean
1025 [l.sort() for l in r]
1025 [l.sort() for l in r]
1026 return r
1026 return r
1027
1027
1028 def add(self, list):
1028 def add(self, list):
1029 wlock = self.wlock()
1029 wlock = self.wlock()
1030 try:
1030 try:
1031 rejected = []
1031 rejected = []
1032 for f in list:
1032 for f in list:
1033 p = self.wjoin(f)
1033 p = self.wjoin(f)
1034 try:
1034 try:
1035 st = os.lstat(p)
1035 st = os.lstat(p)
1036 except:
1036 except:
1037 self.ui.warn(_("%s does not exist!\n") % f)
1037 self.ui.warn(_("%s does not exist!\n") % f)
1038 rejected.append(f)
1038 rejected.append(f)
1039 continue
1039 continue
1040 if st.st_size > 10000000:
1040 if st.st_size > 10000000:
1041 self.ui.warn(_("%s: files over 10MB may cause memory and"
1041 self.ui.warn(_("%s: files over 10MB may cause memory and"
1042 " performance problems\n"
1042 " performance problems\n"
1043 "(use 'hg revert %s' to unadd the file)\n")
1043 "(use 'hg revert %s' to unadd the file)\n")
1044 % (f, f))
1044 % (f, f))
1045 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1045 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1046 self.ui.warn(_("%s not added: only files and symlinks "
1046 self.ui.warn(_("%s not added: only files and symlinks "
1047 "supported currently\n") % f)
1047 "supported currently\n") % f)
1048 rejected.append(p)
1048 rejected.append(p)
1049 elif self.dirstate[f] in 'amn':
1049 elif self.dirstate[f] in 'amn':
1050 self.ui.warn(_("%s already tracked!\n") % f)
1050 self.ui.warn(_("%s already tracked!\n") % f)
1051 elif self.dirstate[f] == 'r':
1051 elif self.dirstate[f] == 'r':
1052 self.dirstate.normallookup(f)
1052 self.dirstate.normallookup(f)
1053 else:
1053 else:
1054 self.dirstate.add(f)
1054 self.dirstate.add(f)
1055 return rejected
1055 return rejected
1056 finally:
1056 finally:
1057 del wlock
1057 del wlock
1058
1058
1059 def forget(self, list):
1059 def forget(self, list):
1060 wlock = self.wlock()
1060 wlock = self.wlock()
1061 try:
1061 try:
1062 for f in list:
1062 for f in list:
1063 if self.dirstate[f] != 'a':
1063 if self.dirstate[f] != 'a':
1064 self.ui.warn(_("%s not added!\n") % f)
1064 self.ui.warn(_("%s not added!\n") % f)
1065 else:
1065 else:
1066 self.dirstate.forget(f)
1066 self.dirstate.forget(f)
1067 finally:
1067 finally:
1068 del wlock
1068 del wlock
1069
1069
1070 def remove(self, list, unlink=False):
1070 def remove(self, list, unlink=False):
1071 wlock = None
1071 wlock = None
1072 try:
1072 try:
1073 if unlink:
1073 if unlink:
1074 for f in list:
1074 for f in list:
1075 try:
1075 try:
1076 util.unlink(self.wjoin(f))
1076 util.unlink(self.wjoin(f))
1077 except OSError, inst:
1077 except OSError, inst:
1078 if inst.errno != errno.ENOENT:
1078 if inst.errno != errno.ENOENT:
1079 raise
1079 raise
1080 wlock = self.wlock()
1080 wlock = self.wlock()
1081 for f in list:
1081 for f in list:
1082 if unlink and os.path.exists(self.wjoin(f)):
1082 if unlink and os.path.exists(self.wjoin(f)):
1083 self.ui.warn(_("%s still exists!\n") % f)
1083 self.ui.warn(_("%s still exists!\n") % f)
1084 elif self.dirstate[f] == 'a':
1084 elif self.dirstate[f] == 'a':
1085 self.dirstate.forget(f)
1085 self.dirstate.forget(f)
1086 elif f not in self.dirstate:
1086 elif f not in self.dirstate:
1087 self.ui.warn(_("%s not tracked!\n") % f)
1087 self.ui.warn(_("%s not tracked!\n") % f)
1088 else:
1088 else:
1089 self.dirstate.remove(f)
1089 self.dirstate.remove(f)
1090 finally:
1090 finally:
1091 del wlock
1091 del wlock
1092
1092
1093 def undelete(self, list):
1093 def undelete(self, list):
1094 wlock = None
1094 wlock = None
1095 try:
1095 try:
1096 manifests = [self.manifest.read(self.changelog.read(p)[0])
1096 manifests = [self.manifest.read(self.changelog.read(p)[0])
1097 for p in self.dirstate.parents() if p != nullid]
1097 for p in self.dirstate.parents() if p != nullid]
1098 wlock = self.wlock()
1098 wlock = self.wlock()
1099 for f in list:
1099 for f in list:
1100 if self.dirstate[f] != 'r':
1100 if self.dirstate[f] != 'r':
1101 self.ui.warn("%s not removed!\n" % f)
1101 self.ui.warn(_("%s not removed!\n") % f)
1102 else:
1102 else:
1103 m = f in manifests[0] and manifests[0] or manifests[1]
1103 m = f in manifests[0] and manifests[0] or manifests[1]
1104 t = self.file(f).read(m[f])
1104 t = self.file(f).read(m[f])
1105 self.wwrite(f, t, m.flags(f))
1105 self.wwrite(f, t, m.flags(f))
1106 self.dirstate.normal(f)
1106 self.dirstate.normal(f)
1107 finally:
1107 finally:
1108 del wlock
1108 del wlock
1109
1109
1110 def copy(self, source, dest):
1110 def copy(self, source, dest):
1111 wlock = None
1111 wlock = None
1112 try:
1112 try:
1113 p = self.wjoin(dest)
1113 p = self.wjoin(dest)
1114 if not (os.path.exists(p) or os.path.islink(p)):
1114 if not (os.path.exists(p) or os.path.islink(p)):
1115 self.ui.warn(_("%s does not exist!\n") % dest)
1115 self.ui.warn(_("%s does not exist!\n") % dest)
1116 elif not (os.path.isfile(p) or os.path.islink(p)):
1116 elif not (os.path.isfile(p) or os.path.islink(p)):
1117 self.ui.warn(_("copy failed: %s is not a file or a "
1117 self.ui.warn(_("copy failed: %s is not a file or a "
1118 "symbolic link\n") % dest)
1118 "symbolic link\n") % dest)
1119 else:
1119 else:
1120 wlock = self.wlock()
1120 wlock = self.wlock()
1121 if dest not in self.dirstate:
1121 if dest not in self.dirstate:
1122 self.dirstate.add(dest)
1122 self.dirstate.add(dest)
1123 self.dirstate.copy(source, dest)
1123 self.dirstate.copy(source, dest)
1124 finally:
1124 finally:
1125 del wlock
1125 del wlock
1126
1126
1127 def heads(self, start=None):
1127 def heads(self, start=None):
1128 heads = self.changelog.heads(start)
1128 heads = self.changelog.heads(start)
1129 # sort the output in rev descending order
1129 # sort the output in rev descending order
1130 heads = [(-self.changelog.rev(h), h) for h in heads]
1130 heads = [(-self.changelog.rev(h), h) for h in heads]
1131 return [n for (r, n) in util.sort(heads)]
1131 return [n for (r, n) in util.sort(heads)]
1132
1132
1133 def branchheads(self, branch=None, start=None):
1133 def branchheads(self, branch=None, start=None):
1134 if branch is None:
1134 if branch is None:
1135 branch = self[None].branch()
1135 branch = self[None].branch()
1136 branches = self.branchtags()
1136 branches = self.branchtags()
1137 if branch not in branches:
1137 if branch not in branches:
1138 return []
1138 return []
1139 # The basic algorithm is this:
1139 # The basic algorithm is this:
1140 #
1140 #
1141 # Start from the branch tip since there are no later revisions that can
1141 # Start from the branch tip since there are no later revisions that can
1142 # possibly be in this branch, and the tip is a guaranteed head.
1142 # possibly be in this branch, and the tip is a guaranteed head.
1143 #
1143 #
1144 # Remember the tip's parents as the first ancestors, since these by
1144 # Remember the tip's parents as the first ancestors, since these by
1145 # definition are not heads.
1145 # definition are not heads.
1146 #
1146 #
1147 # Step backwards from the brach tip through all the revisions. We are
1147 # Step backwards from the brach tip through all the revisions. We are
1148 # guaranteed by the rules of Mercurial that we will now be visiting the
1148 # guaranteed by the rules of Mercurial that we will now be visiting the
1149 # nodes in reverse topological order (children before parents).
1149 # nodes in reverse topological order (children before parents).
1150 #
1150 #
1151 # If a revision is one of the ancestors of a head then we can toss it
1151 # If a revision is one of the ancestors of a head then we can toss it
1152 # out of the ancestors set (we've already found it and won't be
1152 # out of the ancestors set (we've already found it and won't be
1153 # visiting it again) and put its parents in the ancestors set.
1153 # visiting it again) and put its parents in the ancestors set.
1154 #
1154 #
1155 # Otherwise, if a revision is in the branch it's another head, since it
1155 # Otherwise, if a revision is in the branch it's another head, since it
1156 # wasn't in the ancestor list of an existing head. So add it to the
1156 # wasn't in the ancestor list of an existing head. So add it to the
1157 # head list, and add its parents to the ancestor list.
1157 # head list, and add its parents to the ancestor list.
1158 #
1158 #
1159 # If it is not in the branch ignore it.
1159 # If it is not in the branch ignore it.
1160 #
1160 #
1161 # Once we have a list of heads, use nodesbetween to filter out all the
1161 # Once we have a list of heads, use nodesbetween to filter out all the
1162 # heads that cannot be reached from startrev. There may be a more
1162 # heads that cannot be reached from startrev. There may be a more
1163 # efficient way to do this as part of the previous algorithm.
1163 # efficient way to do this as part of the previous algorithm.
1164
1164
1165 set = util.set
1165 set = util.set
1166 heads = [self.changelog.rev(branches[branch])]
1166 heads = [self.changelog.rev(branches[branch])]
1167 # Don't care if ancestors contains nullrev or not.
1167 # Don't care if ancestors contains nullrev or not.
1168 ancestors = set(self.changelog.parentrevs(heads[0]))
1168 ancestors = set(self.changelog.parentrevs(heads[0]))
1169 for rev in xrange(heads[0] - 1, nullrev, -1):
1169 for rev in xrange(heads[0] - 1, nullrev, -1):
1170 if rev in ancestors:
1170 if rev in ancestors:
1171 ancestors.update(self.changelog.parentrevs(rev))
1171 ancestors.update(self.changelog.parentrevs(rev))
1172 ancestors.remove(rev)
1172 ancestors.remove(rev)
1173 elif self[rev].branch() == branch:
1173 elif self[rev].branch() == branch:
1174 heads.append(rev)
1174 heads.append(rev)
1175 ancestors.update(self.changelog.parentrevs(rev))
1175 ancestors.update(self.changelog.parentrevs(rev))
1176 heads = [self.changelog.node(rev) for rev in heads]
1176 heads = [self.changelog.node(rev) for rev in heads]
1177 if start is not None:
1177 if start is not None:
1178 heads = self.changelog.nodesbetween([start], heads)[2]
1178 heads = self.changelog.nodesbetween([start], heads)[2]
1179 return heads
1179 return heads
1180
1180
1181 def branches(self, nodes):
1181 def branches(self, nodes):
1182 if not nodes:
1182 if not nodes:
1183 nodes = [self.changelog.tip()]
1183 nodes = [self.changelog.tip()]
1184 b = []
1184 b = []
1185 for n in nodes:
1185 for n in nodes:
1186 t = n
1186 t = n
1187 while 1:
1187 while 1:
1188 p = self.changelog.parents(n)
1188 p = self.changelog.parents(n)
1189 if p[1] != nullid or p[0] == nullid:
1189 if p[1] != nullid or p[0] == nullid:
1190 b.append((t, n, p[0], p[1]))
1190 b.append((t, n, p[0], p[1]))
1191 break
1191 break
1192 n = p[0]
1192 n = p[0]
1193 return b
1193 return b
1194
1194
1195 def between(self, pairs):
1195 def between(self, pairs):
1196 r = []
1196 r = []
1197
1197
1198 for top, bottom in pairs:
1198 for top, bottom in pairs:
1199 n, l, i = top, [], 0
1199 n, l, i = top, [], 0
1200 f = 1
1200 f = 1
1201
1201
1202 while n != bottom:
1202 while n != bottom:
1203 p = self.changelog.parents(n)[0]
1203 p = self.changelog.parents(n)[0]
1204 if i == f:
1204 if i == f:
1205 l.append(n)
1205 l.append(n)
1206 f = f * 2
1206 f = f * 2
1207 n = p
1207 n = p
1208 i += 1
1208 i += 1
1209
1209
1210 r.append(l)
1210 r.append(l)
1211
1211
1212 return r
1212 return r
1213
1213
1214 def findincoming(self, remote, base=None, heads=None, force=False):
1214 def findincoming(self, remote, base=None, heads=None, force=False):
1215 """Return list of roots of the subsets of missing nodes from remote
1215 """Return list of roots of the subsets of missing nodes from remote
1216
1216
1217 If base dict is specified, assume that these nodes and their parents
1217 If base dict is specified, assume that these nodes and their parents
1218 exist on the remote side and that no child of a node of base exists
1218 exist on the remote side and that no child of a node of base exists
1219 in both remote and self.
1219 in both remote and self.
1220 Furthermore base will be updated to include the nodes that exists
1220 Furthermore base will be updated to include the nodes that exists
1221 in self and remote but no children exists in self and remote.
1221 in self and remote but no children exists in self and remote.
1222 If a list of heads is specified, return only nodes which are heads
1222 If a list of heads is specified, return only nodes which are heads
1223 or ancestors of these heads.
1223 or ancestors of these heads.
1224
1224
1225 All the ancestors of base are in self and in remote.
1225 All the ancestors of base are in self and in remote.
1226 All the descendants of the list returned are missing in self.
1226 All the descendants of the list returned are missing in self.
1227 (and so we know that the rest of the nodes are missing in remote, see
1227 (and so we know that the rest of the nodes are missing in remote, see
1228 outgoing)
1228 outgoing)
1229 """
1229 """
1230 m = self.changelog.nodemap
1230 m = self.changelog.nodemap
1231 search = []
1231 search = []
1232 fetch = {}
1232 fetch = {}
1233 seen = {}
1233 seen = {}
1234 seenbranch = {}
1234 seenbranch = {}
1235 if base == None:
1235 if base == None:
1236 base = {}
1236 base = {}
1237
1237
1238 if not heads:
1238 if not heads:
1239 heads = remote.heads()
1239 heads = remote.heads()
1240
1240
1241 if self.changelog.tip() == nullid:
1241 if self.changelog.tip() == nullid:
1242 base[nullid] = 1
1242 base[nullid] = 1
1243 if heads != [nullid]:
1243 if heads != [nullid]:
1244 return [nullid]
1244 return [nullid]
1245 return []
1245 return []
1246
1246
1247 # assume we're closer to the tip than the root
1247 # assume we're closer to the tip than the root
1248 # and start by examining the heads
1248 # and start by examining the heads
1249 self.ui.status(_("searching for changes\n"))
1249 self.ui.status(_("searching for changes\n"))
1250
1250
1251 unknown = []
1251 unknown = []
1252 for h in heads:
1252 for h in heads:
1253 if h not in m:
1253 if h not in m:
1254 unknown.append(h)
1254 unknown.append(h)
1255 else:
1255 else:
1256 base[h] = 1
1256 base[h] = 1
1257
1257
1258 if not unknown:
1258 if not unknown:
1259 return []
1259 return []
1260
1260
1261 req = dict.fromkeys(unknown)
1261 req = dict.fromkeys(unknown)
1262 reqcnt = 0
1262 reqcnt = 0
1263
1263
1264 # search through remote branches
1264 # search through remote branches
1265 # a 'branch' here is a linear segment of history, with four parts:
1265 # a 'branch' here is a linear segment of history, with four parts:
1266 # head, root, first parent, second parent
1266 # head, root, first parent, second parent
1267 # (a branch always has two parents (or none) by definition)
1267 # (a branch always has two parents (or none) by definition)
1268 unknown = remote.branches(unknown)
1268 unknown = remote.branches(unknown)
1269 while unknown:
1269 while unknown:
1270 r = []
1270 r = []
1271 while unknown:
1271 while unknown:
1272 n = unknown.pop(0)
1272 n = unknown.pop(0)
1273 if n[0] in seen:
1273 if n[0] in seen:
1274 continue
1274 continue
1275
1275
1276 self.ui.debug(_("examining %s:%s\n")
1276 self.ui.debug(_("examining %s:%s\n")
1277 % (short(n[0]), short(n[1])))
1277 % (short(n[0]), short(n[1])))
1278 if n[0] == nullid: # found the end of the branch
1278 if n[0] == nullid: # found the end of the branch
1279 pass
1279 pass
1280 elif n in seenbranch:
1280 elif n in seenbranch:
1281 self.ui.debug(_("branch already found\n"))
1281 self.ui.debug(_("branch already found\n"))
1282 continue
1282 continue
1283 elif n[1] and n[1] in m: # do we know the base?
1283 elif n[1] and n[1] in m: # do we know the base?
1284 self.ui.debug(_("found incomplete branch %s:%s\n")
1284 self.ui.debug(_("found incomplete branch %s:%s\n")
1285 % (short(n[0]), short(n[1])))
1285 % (short(n[0]), short(n[1])))
1286 search.append(n) # schedule branch range for scanning
1286 search.append(n) # schedule branch range for scanning
1287 seenbranch[n] = 1
1287 seenbranch[n] = 1
1288 else:
1288 else:
1289 if n[1] not in seen and n[1] not in fetch:
1289 if n[1] not in seen and n[1] not in fetch:
1290 if n[2] in m and n[3] in m:
1290 if n[2] in m and n[3] in m:
1291 self.ui.debug(_("found new changeset %s\n") %
1291 self.ui.debug(_("found new changeset %s\n") %
1292 short(n[1]))
1292 short(n[1]))
1293 fetch[n[1]] = 1 # earliest unknown
1293 fetch[n[1]] = 1 # earliest unknown
1294 for p in n[2:4]:
1294 for p in n[2:4]:
1295 if p in m:
1295 if p in m:
1296 base[p] = 1 # latest known
1296 base[p] = 1 # latest known
1297
1297
1298 for p in n[2:4]:
1298 for p in n[2:4]:
1299 if p not in req and p not in m:
1299 if p not in req and p not in m:
1300 r.append(p)
1300 r.append(p)
1301 req[p] = 1
1301 req[p] = 1
1302 seen[n[0]] = 1
1302 seen[n[0]] = 1
1303
1303
1304 if r:
1304 if r:
1305 reqcnt += 1
1305 reqcnt += 1
1306 self.ui.debug(_("request %d: %s\n") %
1306 self.ui.debug(_("request %d: %s\n") %
1307 (reqcnt, " ".join(map(short, r))))
1307 (reqcnt, " ".join(map(short, r))))
1308 for p in xrange(0, len(r), 10):
1308 for p in xrange(0, len(r), 10):
1309 for b in remote.branches(r[p:p+10]):
1309 for b in remote.branches(r[p:p+10]):
1310 self.ui.debug(_("received %s:%s\n") %
1310 self.ui.debug(_("received %s:%s\n") %
1311 (short(b[0]), short(b[1])))
1311 (short(b[0]), short(b[1])))
1312 unknown.append(b)
1312 unknown.append(b)
1313
1313
1314 # do binary search on the branches we found
1314 # do binary search on the branches we found
1315 while search:
1315 while search:
1316 n = search.pop(0)
1316 n = search.pop(0)
1317 reqcnt += 1
1317 reqcnt += 1
1318 l = remote.between([(n[0], n[1])])[0]
1318 l = remote.between([(n[0], n[1])])[0]
1319 l.append(n[1])
1319 l.append(n[1])
1320 p = n[0]
1320 p = n[0]
1321 f = 1
1321 f = 1
1322 for i in l:
1322 for i in l:
1323 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1323 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1324 if i in m:
1324 if i in m:
1325 if f <= 2:
1325 if f <= 2:
1326 self.ui.debug(_("found new branch changeset %s\n") %
1326 self.ui.debug(_("found new branch changeset %s\n") %
1327 short(p))
1327 short(p))
1328 fetch[p] = 1
1328 fetch[p] = 1
1329 base[i] = 1
1329 base[i] = 1
1330 else:
1330 else:
1331 self.ui.debug(_("narrowed branch search to %s:%s\n")
1331 self.ui.debug(_("narrowed branch search to %s:%s\n")
1332 % (short(p), short(i)))
1332 % (short(p), short(i)))
1333 search.append((p, i))
1333 search.append((p, i))
1334 break
1334 break
1335 p, f = i, f * 2
1335 p, f = i, f * 2
1336
1336
1337 # sanity check our fetch list
1337 # sanity check our fetch list
1338 for f in fetch.keys():
1338 for f in fetch.keys():
1339 if f in m:
1339 if f in m:
1340 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1340 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1341
1341
1342 if base.keys() == [nullid]:
1342 if base.keys() == [nullid]:
1343 if force:
1343 if force:
1344 self.ui.warn(_("warning: repository is unrelated\n"))
1344 self.ui.warn(_("warning: repository is unrelated\n"))
1345 else:
1345 else:
1346 raise util.Abort(_("repository is unrelated"))
1346 raise util.Abort(_("repository is unrelated"))
1347
1347
1348 self.ui.debug(_("found new changesets starting at ") +
1348 self.ui.debug(_("found new changesets starting at ") +
1349 " ".join([short(f) for f in fetch]) + "\n")
1349 " ".join([short(f) for f in fetch]) + "\n")
1350
1350
1351 self.ui.debug(_("%d total queries\n") % reqcnt)
1351 self.ui.debug(_("%d total queries\n") % reqcnt)
1352
1352
1353 return fetch.keys()
1353 return fetch.keys()
1354
1354
1355 def findoutgoing(self, remote, base=None, heads=None, force=False):
1355 def findoutgoing(self, remote, base=None, heads=None, force=False):
1356 """Return list of nodes that are roots of subsets not in remote
1356 """Return list of nodes that are roots of subsets not in remote
1357
1357
1358 If base dict is specified, assume that these nodes and their parents
1358 If base dict is specified, assume that these nodes and their parents
1359 exist on the remote side.
1359 exist on the remote side.
1360 If a list of heads is specified, return only nodes which are heads
1360 If a list of heads is specified, return only nodes which are heads
1361 or ancestors of these heads, and return a second element which
1361 or ancestors of these heads, and return a second element which
1362 contains all remote heads which get new children.
1362 contains all remote heads which get new children.
1363 """
1363 """
1364 if base == None:
1364 if base == None:
1365 base = {}
1365 base = {}
1366 self.findincoming(remote, base, heads, force=force)
1366 self.findincoming(remote, base, heads, force=force)
1367
1367
1368 self.ui.debug(_("common changesets up to ")
1368 self.ui.debug(_("common changesets up to ")
1369 + " ".join(map(short, base.keys())) + "\n")
1369 + " ".join(map(short, base.keys())) + "\n")
1370
1370
1371 remain = dict.fromkeys(self.changelog.nodemap)
1371 remain = dict.fromkeys(self.changelog.nodemap)
1372
1372
1373 # prune everything remote has from the tree
1373 # prune everything remote has from the tree
1374 del remain[nullid]
1374 del remain[nullid]
1375 remove = base.keys()
1375 remove = base.keys()
1376 while remove:
1376 while remove:
1377 n = remove.pop(0)
1377 n = remove.pop(0)
1378 if n in remain:
1378 if n in remain:
1379 del remain[n]
1379 del remain[n]
1380 for p in self.changelog.parents(n):
1380 for p in self.changelog.parents(n):
1381 remove.append(p)
1381 remove.append(p)
1382
1382
1383 # find every node whose parents have been pruned
1383 # find every node whose parents have been pruned
1384 subset = []
1384 subset = []
1385 # find every remote head that will get new children
1385 # find every remote head that will get new children
1386 updated_heads = {}
1386 updated_heads = {}
1387 for n in remain:
1387 for n in remain:
1388 p1, p2 = self.changelog.parents(n)
1388 p1, p2 = self.changelog.parents(n)
1389 if p1 not in remain and p2 not in remain:
1389 if p1 not in remain and p2 not in remain:
1390 subset.append(n)
1390 subset.append(n)
1391 if heads:
1391 if heads:
1392 if p1 in heads:
1392 if p1 in heads:
1393 updated_heads[p1] = True
1393 updated_heads[p1] = True
1394 if p2 in heads:
1394 if p2 in heads:
1395 updated_heads[p2] = True
1395 updated_heads[p2] = True
1396
1396
1397 # this is the set of all roots we have to push
1397 # this is the set of all roots we have to push
1398 if heads:
1398 if heads:
1399 return subset, updated_heads.keys()
1399 return subset, updated_heads.keys()
1400 else:
1400 else:
1401 return subset
1401 return subset
1402
1402
1403 def pull(self, remote, heads=None, force=False):
1403 def pull(self, remote, heads=None, force=False):
1404 lock = self.lock()
1404 lock = self.lock()
1405 try:
1405 try:
1406 fetch = self.findincoming(remote, heads=heads, force=force)
1406 fetch = self.findincoming(remote, heads=heads, force=force)
1407 if fetch == [nullid]:
1407 if fetch == [nullid]:
1408 self.ui.status(_("requesting all changes\n"))
1408 self.ui.status(_("requesting all changes\n"))
1409
1409
1410 if not fetch:
1410 if not fetch:
1411 self.ui.status(_("no changes found\n"))
1411 self.ui.status(_("no changes found\n"))
1412 return 0
1412 return 0
1413
1413
1414 if heads is None:
1414 if heads is None:
1415 cg = remote.changegroup(fetch, 'pull')
1415 cg = remote.changegroup(fetch, 'pull')
1416 else:
1416 else:
1417 if 'changegroupsubset' not in remote.capabilities:
1417 if 'changegroupsubset' not in remote.capabilities:
1418 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1418 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1419 cg = remote.changegroupsubset(fetch, heads, 'pull')
1419 cg = remote.changegroupsubset(fetch, heads, 'pull')
1420 return self.addchangegroup(cg, 'pull', remote.url())
1420 return self.addchangegroup(cg, 'pull', remote.url())
1421 finally:
1421 finally:
1422 del lock
1422 del lock
1423
1423
1424 def push(self, remote, force=False, revs=None):
1424 def push(self, remote, force=False, revs=None):
1425 # there are two ways to push to remote repo:
1425 # there are two ways to push to remote repo:
1426 #
1426 #
1427 # addchangegroup assumes local user can lock remote
1427 # addchangegroup assumes local user can lock remote
1428 # repo (local filesystem, old ssh servers).
1428 # repo (local filesystem, old ssh servers).
1429 #
1429 #
1430 # unbundle assumes local user cannot lock remote repo (new ssh
1430 # unbundle assumes local user cannot lock remote repo (new ssh
1431 # servers, http servers).
1431 # servers, http servers).
1432
1432
1433 if remote.capable('unbundle'):
1433 if remote.capable('unbundle'):
1434 return self.push_unbundle(remote, force, revs)
1434 return self.push_unbundle(remote, force, revs)
1435 return self.push_addchangegroup(remote, force, revs)
1435 return self.push_addchangegroup(remote, force, revs)
1436
1436
1437 def prepush(self, remote, force, revs):
1437 def prepush(self, remote, force, revs):
1438 base = {}
1438 base = {}
1439 remote_heads = remote.heads()
1439 remote_heads = remote.heads()
1440 inc = self.findincoming(remote, base, remote_heads, force=force)
1440 inc = self.findincoming(remote, base, remote_heads, force=force)
1441
1441
1442 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1442 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1443 if revs is not None:
1443 if revs is not None:
1444 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1444 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1445 else:
1445 else:
1446 bases, heads = update, self.changelog.heads()
1446 bases, heads = update, self.changelog.heads()
1447
1447
1448 if not bases:
1448 if not bases:
1449 self.ui.status(_("no changes found\n"))
1449 self.ui.status(_("no changes found\n"))
1450 return None, 1
1450 return None, 1
1451 elif not force:
1451 elif not force:
1452 # check if we're creating new remote heads
1452 # check if we're creating new remote heads
1453 # to be a remote head after push, node must be either
1453 # to be a remote head after push, node must be either
1454 # - unknown locally
1454 # - unknown locally
1455 # - a local outgoing head descended from update
1455 # - a local outgoing head descended from update
1456 # - a remote head that's known locally and not
1456 # - a remote head that's known locally and not
1457 # ancestral to an outgoing head
1457 # ancestral to an outgoing head
1458
1458
1459 warn = 0
1459 warn = 0
1460
1460
1461 if remote_heads == [nullid]:
1461 if remote_heads == [nullid]:
1462 warn = 0
1462 warn = 0
1463 elif not revs and len(heads) > len(remote_heads):
1463 elif not revs and len(heads) > len(remote_heads):
1464 warn = 1
1464 warn = 1
1465 else:
1465 else:
1466 newheads = list(heads)
1466 newheads = list(heads)
1467 for r in remote_heads:
1467 for r in remote_heads:
1468 if r in self.changelog.nodemap:
1468 if r in self.changelog.nodemap:
1469 desc = self.changelog.heads(r, heads)
1469 desc = self.changelog.heads(r, heads)
1470 l = [h for h in heads if h in desc]
1470 l = [h for h in heads if h in desc]
1471 if not l:
1471 if not l:
1472 newheads.append(r)
1472 newheads.append(r)
1473 else:
1473 else:
1474 newheads.append(r)
1474 newheads.append(r)
1475 if len(newheads) > len(remote_heads):
1475 if len(newheads) > len(remote_heads):
1476 warn = 1
1476 warn = 1
1477
1477
1478 if warn:
1478 if warn:
1479 self.ui.warn(_("abort: push creates new remote heads!\n"))
1479 self.ui.warn(_("abort: push creates new remote heads!\n"))
1480 self.ui.status(_("(did you forget to merge?"
1480 self.ui.status(_("(did you forget to merge?"
1481 " use push -f to force)\n"))
1481 " use push -f to force)\n"))
1482 return None, 0
1482 return None, 0
1483 elif inc:
1483 elif inc:
1484 self.ui.warn(_("note: unsynced remote changes!\n"))
1484 self.ui.warn(_("note: unsynced remote changes!\n"))
1485
1485
1486
1486
1487 if revs is None:
1487 if revs is None:
1488 cg = self.changegroup(update, 'push')
1488 cg = self.changegroup(update, 'push')
1489 else:
1489 else:
1490 cg = self.changegroupsubset(update, revs, 'push')
1490 cg = self.changegroupsubset(update, revs, 'push')
1491 return cg, remote_heads
1491 return cg, remote_heads
1492
1492
1493 def push_addchangegroup(self, remote, force, revs):
1493 def push_addchangegroup(self, remote, force, revs):
1494 lock = remote.lock()
1494 lock = remote.lock()
1495 try:
1495 try:
1496 ret = self.prepush(remote, force, revs)
1496 ret = self.prepush(remote, force, revs)
1497 if ret[0] is not None:
1497 if ret[0] is not None:
1498 cg, remote_heads = ret
1498 cg, remote_heads = ret
1499 return remote.addchangegroup(cg, 'push', self.url())
1499 return remote.addchangegroup(cg, 'push', self.url())
1500 return ret[1]
1500 return ret[1]
1501 finally:
1501 finally:
1502 del lock
1502 del lock
1503
1503
1504 def push_unbundle(self, remote, force, revs):
1504 def push_unbundle(self, remote, force, revs):
1505 # local repo finds heads on server, finds out what revs it
1505 # local repo finds heads on server, finds out what revs it
1506 # must push. once revs transferred, if server finds it has
1506 # must push. once revs transferred, if server finds it has
1507 # different heads (someone else won commit/push race), server
1507 # different heads (someone else won commit/push race), server
1508 # aborts.
1508 # aborts.
1509
1509
1510 ret = self.prepush(remote, force, revs)
1510 ret = self.prepush(remote, force, revs)
1511 if ret[0] is not None:
1511 if ret[0] is not None:
1512 cg, remote_heads = ret
1512 cg, remote_heads = ret
1513 if force: remote_heads = ['force']
1513 if force: remote_heads = ['force']
1514 return remote.unbundle(cg, remote_heads, 'push')
1514 return remote.unbundle(cg, remote_heads, 'push')
1515 return ret[1]
1515 return ret[1]
1516
1516
1517 def changegroupinfo(self, nodes, source):
1517 def changegroupinfo(self, nodes, source):
1518 if self.ui.verbose or source == 'bundle':
1518 if self.ui.verbose or source == 'bundle':
1519 self.ui.status(_("%d changesets found\n") % len(nodes))
1519 self.ui.status(_("%d changesets found\n") % len(nodes))
1520 if self.ui.debugflag:
1520 if self.ui.debugflag:
1521 self.ui.debug(_("List of changesets:\n"))
1521 self.ui.debug(_("List of changesets:\n"))
1522 for node in nodes:
1522 for node in nodes:
1523 self.ui.debug("%s\n" % hex(node))
1523 self.ui.debug("%s\n" % hex(node))
1524
1524
1525 def changegroupsubset(self, bases, heads, source, extranodes=None):
1525 def changegroupsubset(self, bases, heads, source, extranodes=None):
1526 """This function generates a changegroup consisting of all the nodes
1526 """This function generates a changegroup consisting of all the nodes
1527 that are descendents of any of the bases, and ancestors of any of
1527 that are descendents of any of the bases, and ancestors of any of
1528 the heads.
1528 the heads.
1529
1529
1530 It is fairly complex as determining which filenodes and which
1530 It is fairly complex as determining which filenodes and which
1531 manifest nodes need to be included for the changeset to be complete
1531 manifest nodes need to be included for the changeset to be complete
1532 is non-trivial.
1532 is non-trivial.
1533
1533
1534 Another wrinkle is doing the reverse, figuring out which changeset in
1534 Another wrinkle is doing the reverse, figuring out which changeset in
1535 the changegroup a particular filenode or manifestnode belongs to.
1535 the changegroup a particular filenode or manifestnode belongs to.
1536
1536
1537 The caller can specify some nodes that must be included in the
1537 The caller can specify some nodes that must be included in the
1538 changegroup using the extranodes argument. It should be a dict
1538 changegroup using the extranodes argument. It should be a dict
1539 where the keys are the filenames (or 1 for the manifest), and the
1539 where the keys are the filenames (or 1 for the manifest), and the
1540 values are lists of (node, linknode) tuples, where node is a wanted
1540 values are lists of (node, linknode) tuples, where node is a wanted
1541 node and linknode is the changelog node that should be transmitted as
1541 node and linknode is the changelog node that should be transmitted as
1542 the linkrev.
1542 the linkrev.
1543 """
1543 """
1544
1544
1545 self.hook('preoutgoing', throw=True, source=source)
1545 self.hook('preoutgoing', throw=True, source=source)
1546
1546
1547 # Set up some initial variables
1547 # Set up some initial variables
1548 # Make it easy to refer to self.changelog
1548 # Make it easy to refer to self.changelog
1549 cl = self.changelog
1549 cl = self.changelog
1550 # msng is short for missing - compute the list of changesets in this
1550 # msng is short for missing - compute the list of changesets in this
1551 # changegroup.
1551 # changegroup.
1552 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1552 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1553 self.changegroupinfo(msng_cl_lst, source)
1553 self.changegroupinfo(msng_cl_lst, source)
1554 # Some bases may turn out to be superfluous, and some heads may be
1554 # Some bases may turn out to be superfluous, and some heads may be
1555 # too. nodesbetween will return the minimal set of bases and heads
1555 # too. nodesbetween will return the minimal set of bases and heads
1556 # necessary to re-create the changegroup.
1556 # necessary to re-create the changegroup.
1557
1557
1558 # Known heads are the list of heads that it is assumed the recipient
1558 # Known heads are the list of heads that it is assumed the recipient
1559 # of this changegroup will know about.
1559 # of this changegroup will know about.
1560 knownheads = {}
1560 knownheads = {}
1561 # We assume that all parents of bases are known heads.
1561 # We assume that all parents of bases are known heads.
1562 for n in bases:
1562 for n in bases:
1563 for p in cl.parents(n):
1563 for p in cl.parents(n):
1564 if p != nullid:
1564 if p != nullid:
1565 knownheads[p] = 1
1565 knownheads[p] = 1
1566 knownheads = knownheads.keys()
1566 knownheads = knownheads.keys()
1567 if knownheads:
1567 if knownheads:
1568 # Now that we know what heads are known, we can compute which
1568 # Now that we know what heads are known, we can compute which
1569 # changesets are known. The recipient must know about all
1569 # changesets are known. The recipient must know about all
1570 # changesets required to reach the known heads from the null
1570 # changesets required to reach the known heads from the null
1571 # changeset.
1571 # changeset.
1572 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1572 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1573 junk = None
1573 junk = None
1574 # Transform the list into an ersatz set.
1574 # Transform the list into an ersatz set.
1575 has_cl_set = dict.fromkeys(has_cl_set)
1575 has_cl_set = dict.fromkeys(has_cl_set)
1576 else:
1576 else:
1577 # If there were no known heads, the recipient cannot be assumed to
1577 # If there were no known heads, the recipient cannot be assumed to
1578 # know about any changesets.
1578 # know about any changesets.
1579 has_cl_set = {}
1579 has_cl_set = {}
1580
1580
1581 # Make it easy to refer to self.manifest
1581 # Make it easy to refer to self.manifest
1582 mnfst = self.manifest
1582 mnfst = self.manifest
1583 # We don't know which manifests are missing yet
1583 # We don't know which manifests are missing yet
1584 msng_mnfst_set = {}
1584 msng_mnfst_set = {}
1585 # Nor do we know which filenodes are missing.
1585 # Nor do we know which filenodes are missing.
1586 msng_filenode_set = {}
1586 msng_filenode_set = {}
1587
1587
1588 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1588 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1589 junk = None
1589 junk = None
1590
1590
1591 # A changeset always belongs to itself, so the changenode lookup
1591 # A changeset always belongs to itself, so the changenode lookup
1592 # function for a changenode is identity.
1592 # function for a changenode is identity.
1593 def identity(x):
1593 def identity(x):
1594 return x
1594 return x
1595
1595
1596 # A function generating function. Sets up an environment for the
1596 # A function generating function. Sets up an environment for the
1597 # inner function.
1597 # inner function.
1598 def cmp_by_rev_func(revlog):
1598 def cmp_by_rev_func(revlog):
1599 # Compare two nodes by their revision number in the environment's
1599 # Compare two nodes by their revision number in the environment's
1600 # revision history. Since the revision number both represents the
1600 # revision history. Since the revision number both represents the
1601 # most efficient order to read the nodes in, and represents a
1601 # most efficient order to read the nodes in, and represents a
1602 # topological sorting of the nodes, this function is often useful.
1602 # topological sorting of the nodes, this function is often useful.
1603 def cmp_by_rev(a, b):
1603 def cmp_by_rev(a, b):
1604 return cmp(revlog.rev(a), revlog.rev(b))
1604 return cmp(revlog.rev(a), revlog.rev(b))
1605 return cmp_by_rev
1605 return cmp_by_rev
1606
1606
1607 # If we determine that a particular file or manifest node must be a
1607 # If we determine that a particular file or manifest node must be a
1608 # node that the recipient of the changegroup will already have, we can
1608 # node that the recipient of the changegroup will already have, we can
1609 # also assume the recipient will have all the parents. This function
1609 # also assume the recipient will have all the parents. This function
1610 # prunes them from the set of missing nodes.
1610 # prunes them from the set of missing nodes.
1611 def prune_parents(revlog, hasset, msngset):
1611 def prune_parents(revlog, hasset, msngset):
1612 haslst = hasset.keys()
1612 haslst = hasset.keys()
1613 haslst.sort(cmp_by_rev_func(revlog))
1613 haslst.sort(cmp_by_rev_func(revlog))
1614 for node in haslst:
1614 for node in haslst:
1615 parentlst = [p for p in revlog.parents(node) if p != nullid]
1615 parentlst = [p for p in revlog.parents(node) if p != nullid]
1616 while parentlst:
1616 while parentlst:
1617 n = parentlst.pop()
1617 n = parentlst.pop()
1618 if n not in hasset:
1618 if n not in hasset:
1619 hasset[n] = 1
1619 hasset[n] = 1
1620 p = [p for p in revlog.parents(n) if p != nullid]
1620 p = [p for p in revlog.parents(n) if p != nullid]
1621 parentlst.extend(p)
1621 parentlst.extend(p)
1622 for n in hasset:
1622 for n in hasset:
1623 msngset.pop(n, None)
1623 msngset.pop(n, None)
1624
1624
1625 # This is a function generating function used to set up an environment
1625 # This is a function generating function used to set up an environment
1626 # for the inner function to execute in.
1626 # for the inner function to execute in.
1627 def manifest_and_file_collector(changedfileset):
1627 def manifest_and_file_collector(changedfileset):
1628 # This is an information gathering function that gathers
1628 # This is an information gathering function that gathers
1629 # information from each changeset node that goes out as part of
1629 # information from each changeset node that goes out as part of
1630 # the changegroup. The information gathered is a list of which
1630 # the changegroup. The information gathered is a list of which
1631 # manifest nodes are potentially required (the recipient may
1631 # manifest nodes are potentially required (the recipient may
1632 # already have them) and total list of all files which were
1632 # already have them) and total list of all files which were
1633 # changed in any changeset in the changegroup.
1633 # changed in any changeset in the changegroup.
1634 #
1634 #
1635 # We also remember the first changenode we saw any manifest
1635 # We also remember the first changenode we saw any manifest
1636 # referenced by so we can later determine which changenode 'owns'
1636 # referenced by so we can later determine which changenode 'owns'
1637 # the manifest.
1637 # the manifest.
1638 def collect_manifests_and_files(clnode):
1638 def collect_manifests_and_files(clnode):
1639 c = cl.read(clnode)
1639 c = cl.read(clnode)
1640 for f in c[3]:
1640 for f in c[3]:
1641 # This is to make sure we only have one instance of each
1641 # This is to make sure we only have one instance of each
1642 # filename string for each filename.
1642 # filename string for each filename.
1643 changedfileset.setdefault(f, f)
1643 changedfileset.setdefault(f, f)
1644 msng_mnfst_set.setdefault(c[0], clnode)
1644 msng_mnfst_set.setdefault(c[0], clnode)
1645 return collect_manifests_and_files
1645 return collect_manifests_and_files
1646
1646
1647 # Figure out which manifest nodes (of the ones we think might be part
1647 # Figure out which manifest nodes (of the ones we think might be part
1648 # of the changegroup) the recipient must know about and remove them
1648 # of the changegroup) the recipient must know about and remove them
1649 # from the changegroup.
1649 # from the changegroup.
1650 def prune_manifests():
1650 def prune_manifests():
1651 has_mnfst_set = {}
1651 has_mnfst_set = {}
1652 for n in msng_mnfst_set:
1652 for n in msng_mnfst_set:
1653 # If a 'missing' manifest thinks it belongs to a changenode
1653 # If a 'missing' manifest thinks it belongs to a changenode
1654 # the recipient is assumed to have, obviously the recipient
1654 # the recipient is assumed to have, obviously the recipient
1655 # must have that manifest.
1655 # must have that manifest.
1656 linknode = cl.node(mnfst.linkrev(n))
1656 linknode = cl.node(mnfst.linkrev(n))
1657 if linknode in has_cl_set:
1657 if linknode in has_cl_set:
1658 has_mnfst_set[n] = 1
1658 has_mnfst_set[n] = 1
1659 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1659 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1660
1660
1661 # Use the information collected in collect_manifests_and_files to say
1661 # Use the information collected in collect_manifests_and_files to say
1662 # which changenode any manifestnode belongs to.
1662 # which changenode any manifestnode belongs to.
1663 def lookup_manifest_link(mnfstnode):
1663 def lookup_manifest_link(mnfstnode):
1664 return msng_mnfst_set[mnfstnode]
1664 return msng_mnfst_set[mnfstnode]
1665
1665
1666 # A function generating function that sets up the initial environment
1666 # A function generating function that sets up the initial environment
1667 # the inner function.
1667 # the inner function.
1668 def filenode_collector(changedfiles):
1668 def filenode_collector(changedfiles):
1669 next_rev = [0]
1669 next_rev = [0]
1670 # This gathers information from each manifestnode included in the
1670 # This gathers information from each manifestnode included in the
1671 # changegroup about which filenodes the manifest node references
1671 # changegroup about which filenodes the manifest node references
1672 # so we can include those in the changegroup too.
1672 # so we can include those in the changegroup too.
1673 #
1673 #
1674 # It also remembers which changenode each filenode belongs to. It
1674 # It also remembers which changenode each filenode belongs to. It
1675 # does this by assuming the a filenode belongs to the changenode
1675 # does this by assuming the a filenode belongs to the changenode
1676 # the first manifest that references it belongs to.
1676 # the first manifest that references it belongs to.
1677 def collect_msng_filenodes(mnfstnode):
1677 def collect_msng_filenodes(mnfstnode):
1678 r = mnfst.rev(mnfstnode)
1678 r = mnfst.rev(mnfstnode)
1679 if r == next_rev[0]:
1679 if r == next_rev[0]:
1680 # If the last rev we looked at was the one just previous,
1680 # If the last rev we looked at was the one just previous,
1681 # we only need to see a diff.
1681 # we only need to see a diff.
1682 deltamf = mnfst.readdelta(mnfstnode)
1682 deltamf = mnfst.readdelta(mnfstnode)
1683 # For each line in the delta
1683 # For each line in the delta
1684 for f, fnode in deltamf.items():
1684 for f, fnode in deltamf.items():
1685 f = changedfiles.get(f, None)
1685 f = changedfiles.get(f, None)
1686 # And if the file is in the list of files we care
1686 # And if the file is in the list of files we care
1687 # about.
1687 # about.
1688 if f is not None:
1688 if f is not None:
1689 # Get the changenode this manifest belongs to
1689 # Get the changenode this manifest belongs to
1690 clnode = msng_mnfst_set[mnfstnode]
1690 clnode = msng_mnfst_set[mnfstnode]
1691 # Create the set of filenodes for the file if
1691 # Create the set of filenodes for the file if
1692 # there isn't one already.
1692 # there isn't one already.
1693 ndset = msng_filenode_set.setdefault(f, {})
1693 ndset = msng_filenode_set.setdefault(f, {})
1694 # And set the filenode's changelog node to the
1694 # And set the filenode's changelog node to the
1695 # manifest's if it hasn't been set already.
1695 # manifest's if it hasn't been set already.
1696 ndset.setdefault(fnode, clnode)
1696 ndset.setdefault(fnode, clnode)
1697 else:
1697 else:
1698 # Otherwise we need a full manifest.
1698 # Otherwise we need a full manifest.
1699 m = mnfst.read(mnfstnode)
1699 m = mnfst.read(mnfstnode)
1700 # For every file in we care about.
1700 # For every file in we care about.
1701 for f in changedfiles:
1701 for f in changedfiles:
1702 fnode = m.get(f, None)
1702 fnode = m.get(f, None)
1703 # If it's in the manifest
1703 # If it's in the manifest
1704 if fnode is not None:
1704 if fnode is not None:
1705 # See comments above.
1705 # See comments above.
1706 clnode = msng_mnfst_set[mnfstnode]
1706 clnode = msng_mnfst_set[mnfstnode]
1707 ndset = msng_filenode_set.setdefault(f, {})
1707 ndset = msng_filenode_set.setdefault(f, {})
1708 ndset.setdefault(fnode, clnode)
1708 ndset.setdefault(fnode, clnode)
1709 # Remember the revision we hope to see next.
1709 # Remember the revision we hope to see next.
1710 next_rev[0] = r + 1
1710 next_rev[0] = r + 1
1711 return collect_msng_filenodes
1711 return collect_msng_filenodes
1712
1712
1713 # We have a list of filenodes we think we need for a file, lets remove
1713 # We have a list of filenodes we think we need for a file, lets remove
1714 # all those we now the recipient must have.
1714 # all those we now the recipient must have.
1715 def prune_filenodes(f, filerevlog):
1715 def prune_filenodes(f, filerevlog):
1716 msngset = msng_filenode_set[f]
1716 msngset = msng_filenode_set[f]
1717 hasset = {}
1717 hasset = {}
1718 # If a 'missing' filenode thinks it belongs to a changenode we
1718 # If a 'missing' filenode thinks it belongs to a changenode we
1719 # assume the recipient must have, then the recipient must have
1719 # assume the recipient must have, then the recipient must have
1720 # that filenode.
1720 # that filenode.
1721 for n in msngset:
1721 for n in msngset:
1722 clnode = cl.node(filerevlog.linkrev(n))
1722 clnode = cl.node(filerevlog.linkrev(n))
1723 if clnode in has_cl_set:
1723 if clnode in has_cl_set:
1724 hasset[n] = 1
1724 hasset[n] = 1
1725 prune_parents(filerevlog, hasset, msngset)
1725 prune_parents(filerevlog, hasset, msngset)
1726
1726
1727 # A function generator function that sets up the a context for the
1727 # A function generator function that sets up the a context for the
1728 # inner function.
1728 # inner function.
1729 def lookup_filenode_link_func(fname):
1729 def lookup_filenode_link_func(fname):
1730 msngset = msng_filenode_set[fname]
1730 msngset = msng_filenode_set[fname]
1731 # Lookup the changenode the filenode belongs to.
1731 # Lookup the changenode the filenode belongs to.
1732 def lookup_filenode_link(fnode):
1732 def lookup_filenode_link(fnode):
1733 return msngset[fnode]
1733 return msngset[fnode]
1734 return lookup_filenode_link
1734 return lookup_filenode_link
1735
1735
1736 # Add the nodes that were explicitly requested.
1736 # Add the nodes that were explicitly requested.
1737 def add_extra_nodes(name, nodes):
1737 def add_extra_nodes(name, nodes):
1738 if not extranodes or name not in extranodes:
1738 if not extranodes or name not in extranodes:
1739 return
1739 return
1740
1740
1741 for node, linknode in extranodes[name]:
1741 for node, linknode in extranodes[name]:
1742 if node not in nodes:
1742 if node not in nodes:
1743 nodes[node] = linknode
1743 nodes[node] = linknode
1744
1744
1745 # Now that we have all theses utility functions to help out and
1745 # Now that we have all theses utility functions to help out and
1746 # logically divide up the task, generate the group.
1746 # logically divide up the task, generate the group.
1747 def gengroup():
1747 def gengroup():
1748 # The set of changed files starts empty.
1748 # The set of changed files starts empty.
1749 changedfiles = {}
1749 changedfiles = {}
1750 # Create a changenode group generator that will call our functions
1750 # Create a changenode group generator that will call our functions
1751 # back to lookup the owning changenode and collect information.
1751 # back to lookup the owning changenode and collect information.
1752 group = cl.group(msng_cl_lst, identity,
1752 group = cl.group(msng_cl_lst, identity,
1753 manifest_and_file_collector(changedfiles))
1753 manifest_and_file_collector(changedfiles))
1754 for chnk in group:
1754 for chnk in group:
1755 yield chnk
1755 yield chnk
1756
1756
1757 # The list of manifests has been collected by the generator
1757 # The list of manifests has been collected by the generator
1758 # calling our functions back.
1758 # calling our functions back.
1759 prune_manifests()
1759 prune_manifests()
1760 add_extra_nodes(1, msng_mnfst_set)
1760 add_extra_nodes(1, msng_mnfst_set)
1761 msng_mnfst_lst = msng_mnfst_set.keys()
1761 msng_mnfst_lst = msng_mnfst_set.keys()
1762 # Sort the manifestnodes by revision number.
1762 # Sort the manifestnodes by revision number.
1763 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1763 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1764 # Create a generator for the manifestnodes that calls our lookup
1764 # Create a generator for the manifestnodes that calls our lookup
1765 # and data collection functions back.
1765 # and data collection functions back.
1766 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1766 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1767 filenode_collector(changedfiles))
1767 filenode_collector(changedfiles))
1768 for chnk in group:
1768 for chnk in group:
1769 yield chnk
1769 yield chnk
1770
1770
1771 # These are no longer needed, dereference and toss the memory for
1771 # These are no longer needed, dereference and toss the memory for
1772 # them.
1772 # them.
1773 msng_mnfst_lst = None
1773 msng_mnfst_lst = None
1774 msng_mnfst_set.clear()
1774 msng_mnfst_set.clear()
1775
1775
1776 if extranodes:
1776 if extranodes:
1777 for fname in extranodes:
1777 for fname in extranodes:
1778 if isinstance(fname, int):
1778 if isinstance(fname, int):
1779 continue
1779 continue
1780 add_extra_nodes(fname,
1780 add_extra_nodes(fname,
1781 msng_filenode_set.setdefault(fname, {}))
1781 msng_filenode_set.setdefault(fname, {}))
1782 changedfiles[fname] = 1
1782 changedfiles[fname] = 1
1783 # Go through all our files in order sorted by name.
1783 # Go through all our files in order sorted by name.
1784 for fname in util.sort(changedfiles):
1784 for fname in util.sort(changedfiles):
1785 filerevlog = self.file(fname)
1785 filerevlog = self.file(fname)
1786 if not len(filerevlog):
1786 if not len(filerevlog):
1787 raise util.Abort(_("empty or missing revlog for %s") % fname)
1787 raise util.Abort(_("empty or missing revlog for %s") % fname)
1788 # Toss out the filenodes that the recipient isn't really
1788 # Toss out the filenodes that the recipient isn't really
1789 # missing.
1789 # missing.
1790 if fname in msng_filenode_set:
1790 if fname in msng_filenode_set:
1791 prune_filenodes(fname, filerevlog)
1791 prune_filenodes(fname, filerevlog)
1792 msng_filenode_lst = msng_filenode_set[fname].keys()
1792 msng_filenode_lst = msng_filenode_set[fname].keys()
1793 else:
1793 else:
1794 msng_filenode_lst = []
1794 msng_filenode_lst = []
1795 # If any filenodes are left, generate the group for them,
1795 # If any filenodes are left, generate the group for them,
1796 # otherwise don't bother.
1796 # otherwise don't bother.
1797 if len(msng_filenode_lst) > 0:
1797 if len(msng_filenode_lst) > 0:
1798 yield changegroup.chunkheader(len(fname))
1798 yield changegroup.chunkheader(len(fname))
1799 yield fname
1799 yield fname
1800 # Sort the filenodes by their revision #
1800 # Sort the filenodes by their revision #
1801 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1801 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1802 # Create a group generator and only pass in a changenode
1802 # Create a group generator and only pass in a changenode
1803 # lookup function as we need to collect no information
1803 # lookup function as we need to collect no information
1804 # from filenodes.
1804 # from filenodes.
1805 group = filerevlog.group(msng_filenode_lst,
1805 group = filerevlog.group(msng_filenode_lst,
1806 lookup_filenode_link_func(fname))
1806 lookup_filenode_link_func(fname))
1807 for chnk in group:
1807 for chnk in group:
1808 yield chnk
1808 yield chnk
1809 if fname in msng_filenode_set:
1809 if fname in msng_filenode_set:
1810 # Don't need this anymore, toss it to free memory.
1810 # Don't need this anymore, toss it to free memory.
1811 del msng_filenode_set[fname]
1811 del msng_filenode_set[fname]
1812 # Signal that no more groups are left.
1812 # Signal that no more groups are left.
1813 yield changegroup.closechunk()
1813 yield changegroup.closechunk()
1814
1814
1815 if msng_cl_lst:
1815 if msng_cl_lst:
1816 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1816 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1817
1817
1818 return util.chunkbuffer(gengroup())
1818 return util.chunkbuffer(gengroup())
1819
1819
1820 def changegroup(self, basenodes, source):
1820 def changegroup(self, basenodes, source):
1821 """Generate a changegroup of all nodes that we have that a recipient
1821 """Generate a changegroup of all nodes that we have that a recipient
1822 doesn't.
1822 doesn't.
1823
1823
1824 This is much easier than the previous function as we can assume that
1824 This is much easier than the previous function as we can assume that
1825 the recipient has any changenode we aren't sending them."""
1825 the recipient has any changenode we aren't sending them."""
1826
1826
1827 self.hook('preoutgoing', throw=True, source=source)
1827 self.hook('preoutgoing', throw=True, source=source)
1828
1828
1829 cl = self.changelog
1829 cl = self.changelog
1830 nodes = cl.nodesbetween(basenodes, None)[0]
1830 nodes = cl.nodesbetween(basenodes, None)[0]
1831 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1831 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1832 self.changegroupinfo(nodes, source)
1832 self.changegroupinfo(nodes, source)
1833
1833
1834 def identity(x):
1834 def identity(x):
1835 return x
1835 return x
1836
1836
1837 def gennodelst(log):
1837 def gennodelst(log):
1838 for r in log:
1838 for r in log:
1839 n = log.node(r)
1839 n = log.node(r)
1840 if log.linkrev(n) in revset:
1840 if log.linkrev(n) in revset:
1841 yield n
1841 yield n
1842
1842
1843 def changed_file_collector(changedfileset):
1843 def changed_file_collector(changedfileset):
1844 def collect_changed_files(clnode):
1844 def collect_changed_files(clnode):
1845 c = cl.read(clnode)
1845 c = cl.read(clnode)
1846 for fname in c[3]:
1846 for fname in c[3]:
1847 changedfileset[fname] = 1
1847 changedfileset[fname] = 1
1848 return collect_changed_files
1848 return collect_changed_files
1849
1849
1850 def lookuprevlink_func(revlog):
1850 def lookuprevlink_func(revlog):
1851 def lookuprevlink(n):
1851 def lookuprevlink(n):
1852 return cl.node(revlog.linkrev(n))
1852 return cl.node(revlog.linkrev(n))
1853 return lookuprevlink
1853 return lookuprevlink
1854
1854
1855 def gengroup():
1855 def gengroup():
1856 # construct a list of all changed files
1856 # construct a list of all changed files
1857 changedfiles = {}
1857 changedfiles = {}
1858
1858
1859 for chnk in cl.group(nodes, identity,
1859 for chnk in cl.group(nodes, identity,
1860 changed_file_collector(changedfiles)):
1860 changed_file_collector(changedfiles)):
1861 yield chnk
1861 yield chnk
1862
1862
1863 mnfst = self.manifest
1863 mnfst = self.manifest
1864 nodeiter = gennodelst(mnfst)
1864 nodeiter = gennodelst(mnfst)
1865 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1865 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1866 yield chnk
1866 yield chnk
1867
1867
1868 for fname in util.sort(changedfiles):
1868 for fname in util.sort(changedfiles):
1869 filerevlog = self.file(fname)
1869 filerevlog = self.file(fname)
1870 if not len(filerevlog):
1870 if not len(filerevlog):
1871 raise util.Abort(_("empty or missing revlog for %s") % fname)
1871 raise util.Abort(_("empty or missing revlog for %s") % fname)
1872 nodeiter = gennodelst(filerevlog)
1872 nodeiter = gennodelst(filerevlog)
1873 nodeiter = list(nodeiter)
1873 nodeiter = list(nodeiter)
1874 if nodeiter:
1874 if nodeiter:
1875 yield changegroup.chunkheader(len(fname))
1875 yield changegroup.chunkheader(len(fname))
1876 yield fname
1876 yield fname
1877 lookup = lookuprevlink_func(filerevlog)
1877 lookup = lookuprevlink_func(filerevlog)
1878 for chnk in filerevlog.group(nodeiter, lookup):
1878 for chnk in filerevlog.group(nodeiter, lookup):
1879 yield chnk
1879 yield chnk
1880
1880
1881 yield changegroup.closechunk()
1881 yield changegroup.closechunk()
1882
1882
1883 if nodes:
1883 if nodes:
1884 self.hook('outgoing', node=hex(nodes[0]), source=source)
1884 self.hook('outgoing', node=hex(nodes[0]), source=source)
1885
1885
1886 return util.chunkbuffer(gengroup())
1886 return util.chunkbuffer(gengroup())
1887
1887
1888 def addchangegroup(self, source, srctype, url, emptyok=False):
1888 def addchangegroup(self, source, srctype, url, emptyok=False):
1889 """add changegroup to repo.
1889 """add changegroup to repo.
1890
1890
1891 return values:
1891 return values:
1892 - nothing changed or no source: 0
1892 - nothing changed or no source: 0
1893 - more heads than before: 1+added heads (2..n)
1893 - more heads than before: 1+added heads (2..n)
1894 - less heads than before: -1-removed heads (-2..-n)
1894 - less heads than before: -1-removed heads (-2..-n)
1895 - number of heads stays the same: 1
1895 - number of heads stays the same: 1
1896 """
1896 """
1897 def csmap(x):
1897 def csmap(x):
1898 self.ui.debug(_("add changeset %s\n") % short(x))
1898 self.ui.debug(_("add changeset %s\n") % short(x))
1899 return len(cl)
1899 return len(cl)
1900
1900
1901 def revmap(x):
1901 def revmap(x):
1902 return cl.rev(x)
1902 return cl.rev(x)
1903
1903
1904 if not source:
1904 if not source:
1905 return 0
1905 return 0
1906
1906
1907 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1907 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1908
1908
1909 changesets = files = revisions = 0
1909 changesets = files = revisions = 0
1910
1910
1911 # write changelog data to temp files so concurrent readers will not see
1911 # write changelog data to temp files so concurrent readers will not see
1912 # inconsistent view
1912 # inconsistent view
1913 cl = self.changelog
1913 cl = self.changelog
1914 cl.delayupdate()
1914 cl.delayupdate()
1915 oldheads = len(cl.heads())
1915 oldheads = len(cl.heads())
1916
1916
1917 tr = self.transaction()
1917 tr = self.transaction()
1918 try:
1918 try:
1919 trp = weakref.proxy(tr)
1919 trp = weakref.proxy(tr)
1920 # pull off the changeset group
1920 # pull off the changeset group
1921 self.ui.status(_("adding changesets\n"))
1921 self.ui.status(_("adding changesets\n"))
1922 cor = len(cl) - 1
1922 cor = len(cl) - 1
1923 chunkiter = changegroup.chunkiter(source)
1923 chunkiter = changegroup.chunkiter(source)
1924 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1924 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1925 raise util.Abort(_("received changelog group is empty"))
1925 raise util.Abort(_("received changelog group is empty"))
1926 cnr = len(cl) - 1
1926 cnr = len(cl) - 1
1927 changesets = cnr - cor
1927 changesets = cnr - cor
1928
1928
1929 # pull off the manifest group
1929 # pull off the manifest group
1930 self.ui.status(_("adding manifests\n"))
1930 self.ui.status(_("adding manifests\n"))
1931 chunkiter = changegroup.chunkiter(source)
1931 chunkiter = changegroup.chunkiter(source)
1932 # no need to check for empty manifest group here:
1932 # no need to check for empty manifest group here:
1933 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1933 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1934 # no new manifest will be created and the manifest group will
1934 # no new manifest will be created and the manifest group will
1935 # be empty during the pull
1935 # be empty during the pull
1936 self.manifest.addgroup(chunkiter, revmap, trp)
1936 self.manifest.addgroup(chunkiter, revmap, trp)
1937
1937
1938 # process the files
1938 # process the files
1939 self.ui.status(_("adding file changes\n"))
1939 self.ui.status(_("adding file changes\n"))
1940 while 1:
1940 while 1:
1941 f = changegroup.getchunk(source)
1941 f = changegroup.getchunk(source)
1942 if not f:
1942 if not f:
1943 break
1943 break
1944 self.ui.debug(_("adding %s revisions\n") % f)
1944 self.ui.debug(_("adding %s revisions\n") % f)
1945 fl = self.file(f)
1945 fl = self.file(f)
1946 o = len(fl)
1946 o = len(fl)
1947 chunkiter = changegroup.chunkiter(source)
1947 chunkiter = changegroup.chunkiter(source)
1948 if fl.addgroup(chunkiter, revmap, trp) is None:
1948 if fl.addgroup(chunkiter, revmap, trp) is None:
1949 raise util.Abort(_("received file revlog group is empty"))
1949 raise util.Abort(_("received file revlog group is empty"))
1950 revisions += len(fl) - o
1950 revisions += len(fl) - o
1951 files += 1
1951 files += 1
1952
1952
1953 # make changelog see real files again
1953 # make changelog see real files again
1954 cl.finalize(trp)
1954 cl.finalize(trp)
1955
1955
1956 newheads = len(self.changelog.heads())
1956 newheads = len(self.changelog.heads())
1957 heads = ""
1957 heads = ""
1958 if oldheads and newheads != oldheads:
1958 if oldheads and newheads != oldheads:
1959 heads = _(" (%+d heads)") % (newheads - oldheads)
1959 heads = _(" (%+d heads)") % (newheads - oldheads)
1960
1960
1961 self.ui.status(_("added %d changesets"
1961 self.ui.status(_("added %d changesets"
1962 " with %d changes to %d files%s\n")
1962 " with %d changes to %d files%s\n")
1963 % (changesets, revisions, files, heads))
1963 % (changesets, revisions, files, heads))
1964
1964
1965 if changesets > 0:
1965 if changesets > 0:
1966 self.hook('pretxnchangegroup', throw=True,
1966 self.hook('pretxnchangegroup', throw=True,
1967 node=hex(self.changelog.node(cor+1)), source=srctype,
1967 node=hex(self.changelog.node(cor+1)), source=srctype,
1968 url=url)
1968 url=url)
1969
1969
1970 tr.close()
1970 tr.close()
1971 finally:
1971 finally:
1972 del tr
1972 del tr
1973
1973
1974 if changesets > 0:
1974 if changesets > 0:
1975 # forcefully update the on-disk branch cache
1975 # forcefully update the on-disk branch cache
1976 self.ui.debug(_("updating the branch cache\n"))
1976 self.ui.debug(_("updating the branch cache\n"))
1977 self.branchtags()
1977 self.branchtags()
1978 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1978 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1979 source=srctype, url=url)
1979 source=srctype, url=url)
1980
1980
1981 for i in xrange(cor + 1, cnr + 1):
1981 for i in xrange(cor + 1, cnr + 1):
1982 self.hook("incoming", node=hex(self.changelog.node(i)),
1982 self.hook("incoming", node=hex(self.changelog.node(i)),
1983 source=srctype, url=url)
1983 source=srctype, url=url)
1984
1984
1985 # never return 0 here:
1985 # never return 0 here:
1986 if newheads < oldheads:
1986 if newheads < oldheads:
1987 return newheads - oldheads - 1
1987 return newheads - oldheads - 1
1988 else:
1988 else:
1989 return newheads - oldheads + 1
1989 return newheads - oldheads + 1
1990
1990
1991
1991
1992 def stream_in(self, remote):
1992 def stream_in(self, remote):
1993 fp = remote.stream_out()
1993 fp = remote.stream_out()
1994 l = fp.readline()
1994 l = fp.readline()
1995 try:
1995 try:
1996 resp = int(l)
1996 resp = int(l)
1997 except ValueError:
1997 except ValueError:
1998 raise util.UnexpectedOutput(
1998 raise util.UnexpectedOutput(
1999 _('Unexpected response from remote server:'), l)
1999 _('Unexpected response from remote server:'), l)
2000 if resp == 1:
2000 if resp == 1:
2001 raise util.Abort(_('operation forbidden by server'))
2001 raise util.Abort(_('operation forbidden by server'))
2002 elif resp == 2:
2002 elif resp == 2:
2003 raise util.Abort(_('locking the remote repository failed'))
2003 raise util.Abort(_('locking the remote repository failed'))
2004 elif resp != 0:
2004 elif resp != 0:
2005 raise util.Abort(_('the server sent an unknown error code'))
2005 raise util.Abort(_('the server sent an unknown error code'))
2006 self.ui.status(_('streaming all changes\n'))
2006 self.ui.status(_('streaming all changes\n'))
2007 l = fp.readline()
2007 l = fp.readline()
2008 try:
2008 try:
2009 total_files, total_bytes = map(int, l.split(' ', 1))
2009 total_files, total_bytes = map(int, l.split(' ', 1))
2010 except (ValueError, TypeError):
2010 except (ValueError, TypeError):
2011 raise util.UnexpectedOutput(
2011 raise util.UnexpectedOutput(
2012 _('Unexpected response from remote server:'), l)
2012 _('Unexpected response from remote server:'), l)
2013 self.ui.status(_('%d files to transfer, %s of data\n') %
2013 self.ui.status(_('%d files to transfer, %s of data\n') %
2014 (total_files, util.bytecount(total_bytes)))
2014 (total_files, util.bytecount(total_bytes)))
2015 start = time.time()
2015 start = time.time()
2016 for i in xrange(total_files):
2016 for i in xrange(total_files):
2017 # XXX doesn't support '\n' or '\r' in filenames
2017 # XXX doesn't support '\n' or '\r' in filenames
2018 l = fp.readline()
2018 l = fp.readline()
2019 try:
2019 try:
2020 name, size = l.split('\0', 1)
2020 name, size = l.split('\0', 1)
2021 size = int(size)
2021 size = int(size)
2022 except ValueError, TypeError:
2022 except ValueError, TypeError:
2023 raise util.UnexpectedOutput(
2023 raise util.UnexpectedOutput(
2024 _('Unexpected response from remote server:'), l)
2024 _('Unexpected response from remote server:'), l)
2025 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2025 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2026 ofp = self.sopener(name, 'w')
2026 ofp = self.sopener(name, 'w')
2027 for chunk in util.filechunkiter(fp, limit=size):
2027 for chunk in util.filechunkiter(fp, limit=size):
2028 ofp.write(chunk)
2028 ofp.write(chunk)
2029 ofp.close()
2029 ofp.close()
2030 elapsed = time.time() - start
2030 elapsed = time.time() - start
2031 if elapsed <= 0:
2031 if elapsed <= 0:
2032 elapsed = 0.001
2032 elapsed = 0.001
2033 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2033 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2034 (util.bytecount(total_bytes), elapsed,
2034 (util.bytecount(total_bytes), elapsed,
2035 util.bytecount(total_bytes / elapsed)))
2035 util.bytecount(total_bytes / elapsed)))
2036 self.invalidate()
2036 self.invalidate()
2037 return len(self.heads()) + 1
2037 return len(self.heads()) + 1
2038
2038
2039 def clone(self, remote, heads=[], stream=False):
2039 def clone(self, remote, heads=[], stream=False):
2040 '''clone remote repository.
2040 '''clone remote repository.
2041
2041
2042 keyword arguments:
2042 keyword arguments:
2043 heads: list of revs to clone (forces use of pull)
2043 heads: list of revs to clone (forces use of pull)
2044 stream: use streaming clone if possible'''
2044 stream: use streaming clone if possible'''
2045
2045
2046 # now, all clients that can request uncompressed clones can
2046 # now, all clients that can request uncompressed clones can
2047 # read repo formats supported by all servers that can serve
2047 # read repo formats supported by all servers that can serve
2048 # them.
2048 # them.
2049
2049
2050 # if revlog format changes, client will have to check version
2050 # if revlog format changes, client will have to check version
2051 # and format flags on "stream" capability, and use
2051 # and format flags on "stream" capability, and use
2052 # uncompressed only if compatible.
2052 # uncompressed only if compatible.
2053
2053
2054 if stream and not heads and remote.capable('stream'):
2054 if stream and not heads and remote.capable('stream'):
2055 return self.stream_in(remote)
2055 return self.stream_in(remote)
2056 return self.pull(remote, heads)
2056 return self.pull(remote, heads)
2057
2057
2058 # used to avoid circular references so destructors work
2058 # used to avoid circular references so destructors work
2059 def aftertrans(files):
2059 def aftertrans(files):
2060 renamefiles = [tuple(t) for t in files]
2060 renamefiles = [tuple(t) for t in files]
2061 def a():
2061 def a():
2062 for src, dest in renamefiles:
2062 for src, dest in renamefiles:
2063 util.rename(src, dest)
2063 util.rename(src, dest)
2064 return a
2064 return a
2065
2065
2066 def instance(ui, path, create):
2066 def instance(ui, path, create):
2067 return localrepository(ui, util.drop_scheme('file', path), create)
2067 return localrepository(ui, util.drop_scheme('file', path), create)
2068
2068
2069 def islocal(path):
2069 def islocal(path):
2070 return True
2070 return True
@@ -1,499 +1,499 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, hex, bin
8 from node import nullid, nullrev, hex, bin
9 from i18n import _
9 from i18n import _
10 import errno, util, os, filemerge, copies, shutil
10 import errno, util, os, filemerge, copies, shutil
11
11
12 class mergestate(object):
12 class mergestate(object):
13 '''track 3-way merge state of individual files'''
13 '''track 3-way merge state of individual files'''
14 def __init__(self, repo):
14 def __init__(self, repo):
15 self._repo = repo
15 self._repo = repo
16 self._read()
16 self._read()
17 def reset(self, node):
17 def reset(self, node):
18 self._state = {}
18 self._state = {}
19 self._local = node
19 self._local = node
20 shutil.rmtree(self._repo.join("merge"), True)
20 shutil.rmtree(self._repo.join("merge"), True)
21 def _read(self):
21 def _read(self):
22 self._state = {}
22 self._state = {}
23 try:
23 try:
24 localnode = None
24 localnode = None
25 f = self._repo.opener("merge/state")
25 f = self._repo.opener("merge/state")
26 for i, l in enumerate(f):
26 for i, l in enumerate(f):
27 if i == 0:
27 if i == 0:
28 localnode = l[:-1]
28 localnode = l[:-1]
29 else:
29 else:
30 bits = l[:-1].split("\0")
30 bits = l[:-1].split("\0")
31 self._state[bits[0]] = bits[1:]
31 self._state[bits[0]] = bits[1:]
32 self._local = bin(localnode)
32 self._local = bin(localnode)
33 except IOError, err:
33 except IOError, err:
34 if err.errno != errno.ENOENT:
34 if err.errno != errno.ENOENT:
35 raise
35 raise
36 def _write(self):
36 def _write(self):
37 f = self._repo.opener("merge/state", "w")
37 f = self._repo.opener("merge/state", "w")
38 f.write(hex(self._local) + "\n")
38 f.write(hex(self._local) + "\n")
39 for d, v in self._state.items():
39 for d, v in self._state.items():
40 f.write("\0".join([d] + v) + "\n")
40 f.write("\0".join([d] + v) + "\n")
41 def add(self, fcl, fco, fca, fd, flags):
41 def add(self, fcl, fco, fca, fd, flags):
42 hash = util.sha1(fcl.path()).hexdigest()
42 hash = util.sha1(fcl.path()).hexdigest()
43 self._repo.opener("merge/" + hash, "w").write(fcl.data())
43 self._repo.opener("merge/" + hash, "w").write(fcl.data())
44 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
44 self._state[fd] = ['u', hash, fcl.path(), fca.path(),
45 hex(fca.filenode()), fco.path(), flags]
45 hex(fca.filenode()), fco.path(), flags]
46 self._write()
46 self._write()
47 def __contains__(self, dfile):
47 def __contains__(self, dfile):
48 return dfile in self._state
48 return dfile in self._state
49 def __getitem__(self, dfile):
49 def __getitem__(self, dfile):
50 return self._state[dfile][0]
50 return self._state[dfile][0]
51 def __iter__(self):
51 def __iter__(self):
52 l = self._state.keys()
52 l = self._state.keys()
53 l.sort()
53 l.sort()
54 for f in l:
54 for f in l:
55 yield f
55 yield f
56 def mark(self, dfile, state):
56 def mark(self, dfile, state):
57 self._state[dfile][0] = state
57 self._state[dfile][0] = state
58 self._write()
58 self._write()
59 def resolve(self, dfile, wctx, octx):
59 def resolve(self, dfile, wctx, octx):
60 if self[dfile] == 'r':
60 if self[dfile] == 'r':
61 return 0
61 return 0
62 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
62 state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
63 f = self._repo.opener("merge/" + hash)
63 f = self._repo.opener("merge/" + hash)
64 self._repo.wwrite(dfile, f.read(), flags)
64 self._repo.wwrite(dfile, f.read(), flags)
65 fcd = wctx[dfile]
65 fcd = wctx[dfile]
66 fco = octx[ofile]
66 fco = octx[ofile]
67 fca = self._repo.filectx(afile, fileid=anode)
67 fca = self._repo.filectx(afile, fileid=anode)
68 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
68 r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
69 if not r:
69 if not r:
70 self.mark(dfile, 'r')
70 self.mark(dfile, 'r')
71 return r
71 return r
72
72
73 def _checkunknown(wctx, mctx):
73 def _checkunknown(wctx, mctx):
74 "check for collisions between unknown files and files in mctx"
74 "check for collisions between unknown files and files in mctx"
75 for f in wctx.unknown():
75 for f in wctx.unknown():
76 if f in mctx and mctx[f].cmp(wctx[f].data()):
76 if f in mctx and mctx[f].cmp(wctx[f].data()):
77 raise util.Abort(_("untracked file in working directory differs"
77 raise util.Abort(_("untracked file in working directory differs"
78 " from file in requested revision: '%s'") % f)
78 " from file in requested revision: '%s'") % f)
79
79
80 def _checkcollision(mctx):
80 def _checkcollision(mctx):
81 "check for case folding collisions in the destination context"
81 "check for case folding collisions in the destination context"
82 folded = {}
82 folded = {}
83 for fn in mctx:
83 for fn in mctx:
84 fold = fn.lower()
84 fold = fn.lower()
85 if fold in folded:
85 if fold in folded:
86 raise util.Abort(_("case-folding collision between %s and %s")
86 raise util.Abort(_("case-folding collision between %s and %s")
87 % (fn, folded[fold]))
87 % (fn, folded[fold]))
88 folded[fold] = fn
88 folded[fold] = fn
89
89
90 def _forgetremoved(wctx, mctx, branchmerge):
90 def _forgetremoved(wctx, mctx, branchmerge):
91 """
91 """
92 Forget removed files
92 Forget removed files
93
93
94 If we're jumping between revisions (as opposed to merging), and if
94 If we're jumping between revisions (as opposed to merging), and if
95 neither the working directory nor the target rev has the file,
95 neither the working directory nor the target rev has the file,
96 then we need to remove it from the dirstate, to prevent the
96 then we need to remove it from the dirstate, to prevent the
97 dirstate from listing the file when it is no longer in the
97 dirstate from listing the file when it is no longer in the
98 manifest.
98 manifest.
99
99
100 If we're merging, and the other revision has removed a file
100 If we're merging, and the other revision has removed a file
101 that is not present in the working directory, we need to mark it
101 that is not present in the working directory, we need to mark it
102 as removed.
102 as removed.
103 """
103 """
104
104
105 action = []
105 action = []
106 state = branchmerge and 'r' or 'f'
106 state = branchmerge and 'r' or 'f'
107 for f in wctx.deleted():
107 for f in wctx.deleted():
108 if f not in mctx:
108 if f not in mctx:
109 action.append((f, state))
109 action.append((f, state))
110
110
111 if not branchmerge:
111 if not branchmerge:
112 for f in wctx.removed():
112 for f in wctx.removed():
113 if f not in mctx:
113 if f not in mctx:
114 action.append((f, "f"))
114 action.append((f, "f"))
115
115
116 return action
116 return action
117
117
118 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
118 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
119 """
119 """
120 Merge p1 and p2 with ancestor ma and generate merge action list
120 Merge p1 and p2 with ancestor ma and generate merge action list
121
121
122 overwrite = whether we clobber working files
122 overwrite = whether we clobber working files
123 partial = function to filter file lists
123 partial = function to filter file lists
124 """
124 """
125
125
126 repo.ui.note(_("resolving manifests\n"))
126 repo.ui.note(_("resolving manifests\n"))
127 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
127 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
128 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
128 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
129
129
130 m1 = p1.manifest()
130 m1 = p1.manifest()
131 m2 = p2.manifest()
131 m2 = p2.manifest()
132 ma = pa.manifest()
132 ma = pa.manifest()
133 backwards = (pa == p2)
133 backwards = (pa == p2)
134 action = []
134 action = []
135 copy, copied, diverge = {}, {}, {}
135 copy, copied, diverge = {}, {}, {}
136
136
137 def fmerge(f, f2=None, fa=None):
137 def fmerge(f, f2=None, fa=None):
138 """merge flags"""
138 """merge flags"""
139 if not f2:
139 if not f2:
140 f2 = f
140 f2 = f
141 fa = f
141 fa = f
142 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
142 a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2)
143 if m == n: # flags agree
143 if m == n: # flags agree
144 return m # unchanged
144 return m # unchanged
145 if m and n: # flags are set but don't agree
145 if m and n: # flags are set but don't agree
146 if not a: # both differ from parent
146 if not a: # both differ from parent
147 r = repo.ui.prompt(
147 r = repo.ui.prompt(
148 _(" conflicting flags for %s\n"
148 _(" conflicting flags for %s\n"
149 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
149 "(n)one, e(x)ec or sym(l)ink?") % f, "[nxl]", "n")
150 return r != "n" and r or ''
150 return r != "n" and r or ''
151 if m == a:
151 if m == a:
152 return n # changed from m to n
152 return n # changed from m to n
153 return m # changed from n to m
153 return m # changed from n to m
154 if m and m != a: # changed from a to m
154 if m and m != a: # changed from a to m
155 return m
155 return m
156 if n and n != a: # changed from a to n
156 if n and n != a: # changed from a to n
157 return n
157 return n
158 return '' # flag was cleared
158 return '' # flag was cleared
159
159
160 def act(msg, m, f, *args):
160 def act(msg, m, f, *args):
161 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
161 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
162 action.append((f, m) + args)
162 action.append((f, m) + args)
163
163
164 if pa and not (backwards or overwrite):
164 if pa and not (backwards or overwrite):
165 if repo.ui.configbool("merge", "followcopies", True):
165 if repo.ui.configbool("merge", "followcopies", True):
166 dirs = repo.ui.configbool("merge", "followdirs", True)
166 dirs = repo.ui.configbool("merge", "followdirs", True)
167 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
167 copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
168 copied = dict.fromkeys(copy.values())
168 copied = dict.fromkeys(copy.values())
169 for of, fl in diverge.items():
169 for of, fl in diverge.items():
170 act("divergent renames", "dr", of, fl)
170 act("divergent renames", "dr", of, fl)
171
171
172 # Compare manifests
172 # Compare manifests
173 for f, n in m1.iteritems():
173 for f, n in m1.iteritems():
174 if partial and not partial(f):
174 if partial and not partial(f):
175 continue
175 continue
176 if f in m2:
176 if f in m2:
177 if overwrite or backwards:
177 if overwrite or backwards:
178 rflags = m2.flags(f)
178 rflags = m2.flags(f)
179 else:
179 else:
180 rflags = fmerge(f)
180 rflags = fmerge(f)
181 # are files different?
181 # are files different?
182 if n != m2[f]:
182 if n != m2[f]:
183 a = ma.get(f, nullid)
183 a = ma.get(f, nullid)
184 # are we clobbering?
184 # are we clobbering?
185 if overwrite:
185 if overwrite:
186 act("clobbering", "g", f, rflags)
186 act("clobbering", "g", f, rflags)
187 # or are we going back in time and clean?
187 # or are we going back in time and clean?
188 elif backwards and not n[20:]:
188 elif backwards and not n[20:]:
189 act("reverting", "g", f, rflags)
189 act("reverting", "g", f, rflags)
190 # are both different from the ancestor?
190 # are both different from the ancestor?
191 elif n != a and m2[f] != a:
191 elif n != a and m2[f] != a:
192 act("versions differ", "m", f, f, f, rflags, False)
192 act("versions differ", "m", f, f, f, rflags, False)
193 # is remote's version newer?
193 # is remote's version newer?
194 elif m2[f] != a:
194 elif m2[f] != a:
195 act("remote is newer", "g", f, rflags)
195 act("remote is newer", "g", f, rflags)
196 # local is newer, not overwrite, check mode bits
196 # local is newer, not overwrite, check mode bits
197 elif m1.flags(f) != rflags:
197 elif m1.flags(f) != rflags:
198 act("update permissions", "e", f, rflags)
198 act("update permissions", "e", f, rflags)
199 # contents same, check mode bits
199 # contents same, check mode bits
200 elif m1.flags(f) != rflags:
200 elif m1.flags(f) != rflags:
201 act("update permissions", "e", f, rflags)
201 act("update permissions", "e", f, rflags)
202 elif f in copied:
202 elif f in copied:
203 continue
203 continue
204 elif f in copy:
204 elif f in copy:
205 f2 = copy[f]
205 f2 = copy[f]
206 if f2 not in m2: # directory rename
206 if f2 not in m2: # directory rename
207 act("remote renamed directory to " + f2, "d",
207 act("remote renamed directory to " + f2, "d",
208 f, None, f2, m1.flags(f))
208 f, None, f2, m1.flags(f))
209 elif f2 in m1: # case 2 A,B/B/B
209 elif f2 in m1: # case 2 A,B/B/B
210 act("local copied to " + f2, "m",
210 act("local copied to " + f2, "m",
211 f, f2, f, fmerge(f, f2, f2), False)
211 f, f2, f, fmerge(f, f2, f2), False)
212 else: # case 4,21 A/B/B
212 else: # case 4,21 A/B/B
213 act("local moved to " + f2, "m",
213 act("local moved to " + f2, "m",
214 f, f2, f, fmerge(f, f2, f2), False)
214 f, f2, f, fmerge(f, f2, f2), False)
215 elif f in ma:
215 elif f in ma:
216 if n != ma[f] and not overwrite:
216 if n != ma[f] and not overwrite:
217 if repo.ui.prompt(
217 if repo.ui.prompt(
218 _(" local changed %s which remote deleted\n"
218 _(" local changed %s which remote deleted\n"
219 "use (c)hanged version or (d)elete?") % f,
219 "use (c)hanged version or (d)elete?") % f,
220 _("[cd]"), _("c")) == _("d"):
220 _("[cd]"), _("c")) == _("d"):
221 act("prompt delete", "r", f)
221 act("prompt delete", "r", f)
222 else:
222 else:
223 act("other deleted", "r", f)
223 act("other deleted", "r", f)
224 else:
224 else:
225 # file is created on branch or in working directory
225 # file is created on branch or in working directory
226 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
226 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
227 act("remote deleted", "r", f)
227 act("remote deleted", "r", f)
228
228
229 for f, n in m2.iteritems():
229 for f, n in m2.iteritems():
230 if partial and not partial(f):
230 if partial and not partial(f):
231 continue
231 continue
232 if f in m1:
232 if f in m1:
233 continue
233 continue
234 if f in copied:
234 if f in copied:
235 continue
235 continue
236 if f in copy:
236 if f in copy:
237 f2 = copy[f]
237 f2 = copy[f]
238 if f2 not in m1: # directory rename
238 if f2 not in m1: # directory rename
239 act("local renamed directory to " + f2, "d",
239 act("local renamed directory to " + f2, "d",
240 None, f, f2, m2.flags(f))
240 None, f, f2, m2.flags(f))
241 elif f2 in m2: # rename case 1, A/A,B/A
241 elif f2 in m2: # rename case 1, A/A,B/A
242 act("remote copied to " + f, "m",
242 act("remote copied to " + f, "m",
243 f2, f, f, fmerge(f2, f, f2), False)
243 f2, f, f, fmerge(f2, f, f2), False)
244 else: # case 3,20 A/B/A
244 else: # case 3,20 A/B/A
245 act("remote moved to " + f, "m",
245 act("remote moved to " + f, "m",
246 f2, f, f, fmerge(f2, f, f2), True)
246 f2, f, f, fmerge(f2, f, f2), True)
247 elif f in ma:
247 elif f in ma:
248 if overwrite or backwards:
248 if overwrite or backwards:
249 act("recreating", "g", f, m2.flags(f))
249 act("recreating", "g", f, m2.flags(f))
250 elif n != ma[f]:
250 elif n != ma[f]:
251 if repo.ui.prompt(
251 if repo.ui.prompt(
252 _("remote changed %s which local deleted\n"
252 _("remote changed %s which local deleted\n"
253 "use (c)hanged version or leave (d)eleted?") % f,
253 "use (c)hanged version or leave (d)eleted?") % f,
254 _("[cd]"), _("c")) == _("c"):
254 _("[cd]"), _("c")) == _("c"):
255 act("prompt recreating", "g", f, m2.flags(f))
255 act("prompt recreating", "g", f, m2.flags(f))
256 else:
256 else:
257 act("remote created", "g", f, m2.flags(f))
257 act("remote created", "g", f, m2.flags(f))
258
258
259 return action
259 return action
260
260
261 def actioncmp(a1, a2):
261 def actioncmp(a1, a2):
262 m1 = a1[1]
262 m1 = a1[1]
263 m2 = a2[1]
263 m2 = a2[1]
264 if m1 == m2:
264 if m1 == m2:
265 return cmp(a1, a2)
265 return cmp(a1, a2)
266 if m1 == 'r':
266 if m1 == 'r':
267 return -1
267 return -1
268 if m2 == 'r':
268 if m2 == 'r':
269 return 1
269 return 1
270 return cmp(a1, a2)
270 return cmp(a1, a2)
271
271
272 def applyupdates(repo, action, wctx, mctx):
272 def applyupdates(repo, action, wctx, mctx):
273 "apply the merge action list to the working directory"
273 "apply the merge action list to the working directory"
274
274
275 updated, merged, removed, unresolved = 0, 0, 0, 0
275 updated, merged, removed, unresolved = 0, 0, 0, 0
276 ms = mergestate(repo)
276 ms = mergestate(repo)
277 ms.reset(wctx.parents()[0].node())
277 ms.reset(wctx.parents()[0].node())
278 moves = []
278 moves = []
279 action.sort(actioncmp)
279 action.sort(actioncmp)
280
280
281 # prescan for merges
281 # prescan for merges
282 for a in action:
282 for a in action:
283 f, m = a[:2]
283 f, m = a[:2]
284 if m == 'm': # merge
284 if m == 'm': # merge
285 f2, fd, flags, move = a[2:]
285 f2, fd, flags, move = a[2:]
286 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
286 repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
287 fcl = wctx[f]
287 fcl = wctx[f]
288 fco = mctx[f2]
288 fco = mctx[f2]
289 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
289 fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
290 ms.add(fcl, fco, fca, fd, flags)
290 ms.add(fcl, fco, fca, fd, flags)
291 if f != fd and move:
291 if f != fd and move:
292 moves.append(f)
292 moves.append(f)
293
293
294 # remove renamed files after safely stored
294 # remove renamed files after safely stored
295 for f in moves:
295 for f in moves:
296 if util.lexists(repo.wjoin(f)):
296 if util.lexists(repo.wjoin(f)):
297 repo.ui.debug(_("removing %s\n") % f)
297 repo.ui.debug(_("removing %s\n") % f)
298 os.unlink(repo.wjoin(f))
298 os.unlink(repo.wjoin(f))
299
299
300 audit_path = util.path_auditor(repo.root)
300 audit_path = util.path_auditor(repo.root)
301
301
302 for a in action:
302 for a in action:
303 f, m = a[:2]
303 f, m = a[:2]
304 if f and f[0] == "/":
304 if f and f[0] == "/":
305 continue
305 continue
306 if m == "r": # remove
306 if m == "r": # remove
307 repo.ui.note(_("removing %s\n") % f)
307 repo.ui.note(_("removing %s\n") % f)
308 audit_path(f)
308 audit_path(f)
309 try:
309 try:
310 util.unlink(repo.wjoin(f))
310 util.unlink(repo.wjoin(f))
311 except OSError, inst:
311 except OSError, inst:
312 if inst.errno != errno.ENOENT:
312 if inst.errno != errno.ENOENT:
313 repo.ui.warn(_("update failed to remove %s: %s!\n") %
313 repo.ui.warn(_("update failed to remove %s: %s!\n") %
314 (f, inst.strerror))
314 (f, inst.strerror))
315 removed += 1
315 removed += 1
316 elif m == "m": # merge
316 elif m == "m": # merge
317 f2, fd, flags, move = a[2:]
317 f2, fd, flags, move = a[2:]
318 r = ms.resolve(fd, wctx, mctx)
318 r = ms.resolve(fd, wctx, mctx)
319 if r > 0:
319 if r > 0:
320 unresolved += 1
320 unresolved += 1
321 else:
321 else:
322 if r is None:
322 if r is None:
323 updated += 1
323 updated += 1
324 else:
324 else:
325 merged += 1
325 merged += 1
326 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
326 util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
327 if f != fd and move and util.lexists(repo.wjoin(f)):
327 if f != fd and move and util.lexists(repo.wjoin(f)):
328 repo.ui.debug(_("removing %s\n") % f)
328 repo.ui.debug(_("removing %s\n") % f)
329 os.unlink(repo.wjoin(f))
329 os.unlink(repo.wjoin(f))
330 elif m == "g": # get
330 elif m == "g": # get
331 flags = a[2]
331 flags = a[2]
332 repo.ui.note(_("getting %s\n") % f)
332 repo.ui.note(_("getting %s\n") % f)
333 t = mctx.filectx(f).data()
333 t = mctx.filectx(f).data()
334 repo.wwrite(f, t, flags)
334 repo.wwrite(f, t, flags)
335 updated += 1
335 updated += 1
336 elif m == "d": # directory rename
336 elif m == "d": # directory rename
337 f2, fd, flags = a[2:]
337 f2, fd, flags = a[2:]
338 if f:
338 if f:
339 repo.ui.note(_("moving %s to %s\n") % (f, fd))
339 repo.ui.note(_("moving %s to %s\n") % (f, fd))
340 t = wctx.filectx(f).data()
340 t = wctx.filectx(f).data()
341 repo.wwrite(fd, t, flags)
341 repo.wwrite(fd, t, flags)
342 util.unlink(repo.wjoin(f))
342 util.unlink(repo.wjoin(f))
343 if f2:
343 if f2:
344 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
344 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
345 t = mctx.filectx(f2).data()
345 t = mctx.filectx(f2).data()
346 repo.wwrite(fd, t, flags)
346 repo.wwrite(fd, t, flags)
347 updated += 1
347 updated += 1
348 elif m == "dr": # divergent renames
348 elif m == "dr": # divergent renames
349 fl = a[2]
349 fl = a[2]
350 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
350 repo.ui.warn(_("warning: detected divergent renames of %s to:\n") % f)
351 for nf in fl:
351 for nf in fl:
352 repo.ui.warn(" %s\n" % nf)
352 repo.ui.warn(" %s\n" % nf)
353 elif m == "e": # exec
353 elif m == "e": # exec
354 flags = a[2]
354 flags = a[2]
355 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
355 util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags)
356
356
357 return updated, merged, removed, unresolved
357 return updated, merged, removed, unresolved
358
358
359 def recordupdates(repo, action, branchmerge):
359 def recordupdates(repo, action, branchmerge):
360 "record merge actions to the dirstate"
360 "record merge actions to the dirstate"
361
361
362 for a in action:
362 for a in action:
363 f, m = a[:2]
363 f, m = a[:2]
364 if m == "r": # remove
364 if m == "r": # remove
365 if branchmerge:
365 if branchmerge:
366 repo.dirstate.remove(f)
366 repo.dirstate.remove(f)
367 else:
367 else:
368 repo.dirstate.forget(f)
368 repo.dirstate.forget(f)
369 elif m == "f": # forget
369 elif m == "f": # forget
370 repo.dirstate.forget(f)
370 repo.dirstate.forget(f)
371 elif m in "ge": # get or exec change
371 elif m in "ge": # get or exec change
372 if branchmerge:
372 if branchmerge:
373 repo.dirstate.normaldirty(f)
373 repo.dirstate.normaldirty(f)
374 else:
374 else:
375 repo.dirstate.normal(f)
375 repo.dirstate.normal(f)
376 elif m == "m": # merge
376 elif m == "m": # merge
377 f2, fd, flag, move = a[2:]
377 f2, fd, flag, move = a[2:]
378 if branchmerge:
378 if branchmerge:
379 # We've done a branch merge, mark this file as merged
379 # We've done a branch merge, mark this file as merged
380 # so that we properly record the merger later
380 # so that we properly record the merger later
381 repo.dirstate.merge(fd)
381 repo.dirstate.merge(fd)
382 if f != f2: # copy/rename
382 if f != f2: # copy/rename
383 if move:
383 if move:
384 repo.dirstate.remove(f)
384 repo.dirstate.remove(f)
385 if f != fd:
385 if f != fd:
386 repo.dirstate.copy(f, fd)
386 repo.dirstate.copy(f, fd)
387 else:
387 else:
388 repo.dirstate.copy(f2, fd)
388 repo.dirstate.copy(f2, fd)
389 else:
389 else:
390 # We've update-merged a locally modified file, so
390 # We've update-merged a locally modified file, so
391 # we set the dirstate to emulate a normal checkout
391 # we set the dirstate to emulate a normal checkout
392 # of that file some time in the past. Thus our
392 # of that file some time in the past. Thus our
393 # merge will appear as a normal local file
393 # merge will appear as a normal local file
394 # modification.
394 # modification.
395 repo.dirstate.normallookup(fd)
395 repo.dirstate.normallookup(fd)
396 if move:
396 if move:
397 repo.dirstate.forget(f)
397 repo.dirstate.forget(f)
398 elif m == "d": # directory rename
398 elif m == "d": # directory rename
399 f2, fd, flag = a[2:]
399 f2, fd, flag = a[2:]
400 if not f2 and f not in repo.dirstate:
400 if not f2 and f not in repo.dirstate:
401 # untracked file moved
401 # untracked file moved
402 continue
402 continue
403 if branchmerge:
403 if branchmerge:
404 repo.dirstate.add(fd)
404 repo.dirstate.add(fd)
405 if f:
405 if f:
406 repo.dirstate.remove(f)
406 repo.dirstate.remove(f)
407 repo.dirstate.copy(f, fd)
407 repo.dirstate.copy(f, fd)
408 if f2:
408 if f2:
409 repo.dirstate.copy(f2, fd)
409 repo.dirstate.copy(f2, fd)
410 else:
410 else:
411 repo.dirstate.normal(fd)
411 repo.dirstate.normal(fd)
412 if f:
412 if f:
413 repo.dirstate.forget(f)
413 repo.dirstate.forget(f)
414
414
415 def update(repo, node, branchmerge, force, partial):
415 def update(repo, node, branchmerge, force, partial):
416 """
416 """
417 Perform a merge between the working directory and the given node
417 Perform a merge between the working directory and the given node
418
418
419 branchmerge = whether to merge between branches
419 branchmerge = whether to merge between branches
420 force = whether to force branch merging or file overwriting
420 force = whether to force branch merging or file overwriting
421 partial = a function to filter file lists (dirstate not updated)
421 partial = a function to filter file lists (dirstate not updated)
422 """
422 """
423
423
424 wlock = repo.wlock()
424 wlock = repo.wlock()
425 try:
425 try:
426 wc = repo[None]
426 wc = repo[None]
427 if node is None:
427 if node is None:
428 # tip of current branch
428 # tip of current branch
429 try:
429 try:
430 node = repo.branchtags()[wc.branch()]
430 node = repo.branchtags()[wc.branch()]
431 except KeyError:
431 except KeyError:
432 if wc.branch() == "default": # no default branch!
432 if wc.branch() == "default": # no default branch!
433 node = repo.lookup("tip") # update to tip
433 node = repo.lookup("tip") # update to tip
434 else:
434 else:
435 raise util.Abort(_("branch %s not found") % wc.branch())
435 raise util.Abort(_("branch %s not found") % wc.branch())
436 overwrite = force and not branchmerge
436 overwrite = force and not branchmerge
437 pl = wc.parents()
437 pl = wc.parents()
438 p1, p2 = pl[0], repo[node]
438 p1, p2 = pl[0], repo[node]
439 pa = p1.ancestor(p2)
439 pa = p1.ancestor(p2)
440 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
440 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
441 fastforward = False
441 fastforward = False
442
442
443 ### check phase
443 ### check phase
444 if not overwrite and len(pl) > 1:
444 if not overwrite and len(pl) > 1:
445 raise util.Abort(_("outstanding uncommitted merges"))
445 raise util.Abort(_("outstanding uncommitted merges"))
446 if branchmerge:
446 if branchmerge:
447 if pa == p2:
447 if pa == p2:
448 raise util.Abort(_("can't merge with ancestor"))
448 raise util.Abort(_("can't merge with ancestor"))
449 elif pa == p1:
449 elif pa == p1:
450 if p1.branch() != p2.branch():
450 if p1.branch() != p2.branch():
451 fastforward = True
451 fastforward = True
452 else:
452 else:
453 raise util.Abort(_("nothing to merge (use 'hg update'"
453 raise util.Abort(_("nothing to merge (use 'hg update'"
454 " or check 'hg heads')"))
454 " or check 'hg heads')"))
455 if not force and (wc.files() or wc.deleted()):
455 if not force and (wc.files() or wc.deleted()):
456 raise util.Abort(_("outstanding uncommitted changes"))
456 raise util.Abort(_("outstanding uncommitted changes"))
457 elif not overwrite:
457 elif not overwrite:
458 if pa == p1 or pa == p2: # linear
458 if pa == p1 or pa == p2: # linear
459 pass # all good
459 pass # all good
460 elif p1.branch() == p2.branch():
460 elif p1.branch() == p2.branch():
461 if wc.files() or wc.deleted():
461 if wc.files() or wc.deleted():
462 raise util.Abort(_("crosses branches (use 'hg merge' or "
462 raise util.Abort(_("crosses branches (use 'hg merge' or "
463 "'hg update -C' to discard changes)"))
463 "'hg update -C' to discard changes)"))
464 raise util.Abort(_("crosses branches (use 'hg merge' "
464 raise util.Abort(_("crosses branches (use 'hg merge' "
465 "or 'hg update -C')"))
465 "or 'hg update -C')"))
466 elif wc.files() or wc.deleted():
466 elif wc.files() or wc.deleted():
467 raise util.Abort(_("crosses named branches (use "
467 raise util.Abort(_("crosses named branches (use "
468 "'hg update -C' to discard changes)"))
468 "'hg update -C' to discard changes)"))
469 else:
469 else:
470 # Allow jumping branches if there are no changes
470 # Allow jumping branches if there are no changes
471 overwrite = True
471 overwrite = True
472
472
473 ### calculate phase
473 ### calculate phase
474 action = []
474 action = []
475 if not force:
475 if not force:
476 _checkunknown(wc, p2)
476 _checkunknown(wc, p2)
477 if not util.checkcase(repo.path):
477 if not util.checkcase(repo.path):
478 _checkcollision(p2)
478 _checkcollision(p2)
479 action += _forgetremoved(wc, p2, branchmerge)
479 action += _forgetremoved(wc, p2, branchmerge)
480 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
480 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
481
481
482 ### apply phase
482 ### apply phase
483 if not branchmerge: # just jump to the new rev
483 if not branchmerge: # just jump to the new rev
484 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
484 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
485 if not partial:
485 if not partial:
486 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
486 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
487
487
488 stats = applyupdates(repo, action, wc, p2)
488 stats = applyupdates(repo, action, wc, p2)
489
489
490 if not partial:
490 if not partial:
491 recordupdates(repo, action, branchmerge)
491 recordupdates(repo, action, branchmerge)
492 repo.dirstate.setparents(fp1, fp2)
492 repo.dirstate.setparents(fp1, fp2)
493 if not branchmerge and not fastforward:
493 if not branchmerge and not fastforward:
494 repo.dirstate.setbranch(p2.branch())
494 repo.dirstate.setbranch(p2.branch())
495 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
495 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
496
496
497 return stats
497 return stats
498 finally:
498 finally:
499 del wlock
499 del wlock
@@ -1,1330 +1,1330 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid, short
10 from node import hex, nullid, short
11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
12 import cStringIO, email.Parser, os, popen2, re, errno
12 import cStringIO, email.Parser, os, popen2, re, errno
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 class PatchError(Exception):
15 class PatchError(Exception):
16 pass
16 pass
17
17
18 class NoHunks(PatchError):
18 class NoHunks(PatchError):
19 pass
19 pass
20
20
21 # helper functions
21 # helper functions
22
22
23 def copyfile(src, dst, basedir=None):
23 def copyfile(src, dst, basedir=None):
24 if not basedir:
24 if not basedir:
25 basedir = os.getcwd()
25 basedir = os.getcwd()
26
26
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 if os.path.exists(absdst):
28 if os.path.exists(absdst):
29 raise util.Abort(_("cannot create %s: destination already exists") %
29 raise util.Abort(_("cannot create %s: destination already exists") %
30 dst)
30 dst)
31
31
32 targetdir = os.path.dirname(absdst)
32 targetdir = os.path.dirname(absdst)
33 if not os.path.isdir(targetdir):
33 if not os.path.isdir(targetdir):
34 os.makedirs(targetdir)
34 os.makedirs(targetdir)
35
35
36 util.copyfile(abssrc, absdst)
36 util.copyfile(abssrc, absdst)
37
37
38 # public functions
38 # public functions
39
39
40 def extract(ui, fileobj):
40 def extract(ui, fileobj):
41 '''extract patch from data read from fileobj.
41 '''extract patch from data read from fileobj.
42
42
43 patch can be a normal patch or contained in an email message.
43 patch can be a normal patch or contained in an email message.
44
44
45 return tuple (filename, message, user, date, node, p1, p2).
45 return tuple (filename, message, user, date, node, p1, p2).
46 Any item in the returned tuple can be None. If filename is None,
46 Any item in the returned tuple can be None. If filename is None,
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48
48
49 # attempt to detect the start of a patch
49 # attempt to detect the start of a patch
50 # (this heuristic is borrowed from quilt)
50 # (this heuristic is borrowed from quilt)
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54
54
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 tmpfp = os.fdopen(fd, 'w')
56 tmpfp = os.fdopen(fd, 'w')
57 try:
57 try:
58 msg = email.Parser.Parser().parse(fileobj)
58 msg = email.Parser.Parser().parse(fileobj)
59
59
60 subject = msg['Subject']
60 subject = msg['Subject']
61 user = msg['From']
61 user = msg['From']
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 # should try to parse msg['Date']
63 # should try to parse msg['Date']
64 date = None
64 date = None
65 nodeid = None
65 nodeid = None
66 branch = None
66 branch = None
67 parents = []
67 parents = []
68
68
69 if subject:
69 if subject:
70 if subject.startswith('[PATCH'):
70 if subject.startswith('[PATCH'):
71 pend = subject.find(']')
71 pend = subject.find(']')
72 if pend >= 0:
72 if pend >= 0:
73 subject = subject[pend+1:].lstrip()
73 subject = subject[pend+1:].lstrip()
74 subject = subject.replace('\n\t', ' ')
74 subject = subject.replace('\n\t', ' ')
75 ui.debug('Subject: %s\n' % subject)
75 ui.debug('Subject: %s\n' % subject)
76 if user:
76 if user:
77 ui.debug('From: %s\n' % user)
77 ui.debug('From: %s\n' % user)
78 diffs_seen = 0
78 diffs_seen = 0
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 message = ''
80 message = ''
81 for part in msg.walk():
81 for part in msg.walk():
82 content_type = part.get_content_type()
82 content_type = part.get_content_type()
83 ui.debug('Content-Type: %s\n' % content_type)
83 ui.debug('Content-Type: %s\n' % content_type)
84 if content_type not in ok_types:
84 if content_type not in ok_types:
85 continue
85 continue
86 payload = part.get_payload(decode=True)
86 payload = part.get_payload(decode=True)
87 m = diffre.search(payload)
87 m = diffre.search(payload)
88 if m:
88 if m:
89 hgpatch = False
89 hgpatch = False
90 ignoretext = False
90 ignoretext = False
91
91
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 diffs_seen += 1
93 diffs_seen += 1
94 cfp = cStringIO.StringIO()
94 cfp = cStringIO.StringIO()
95 for line in payload[:m.start(0)].splitlines():
95 for line in payload[:m.start(0)].splitlines():
96 if line.startswith('# HG changeset patch'):
96 if line.startswith('# HG changeset patch'):
97 ui.debug(_('patch generated by hg export\n'))
97 ui.debug(_('patch generated by hg export\n'))
98 hgpatch = True
98 hgpatch = True
99 # drop earlier commit message content
99 # drop earlier commit message content
100 cfp.seek(0)
100 cfp.seek(0)
101 cfp.truncate()
101 cfp.truncate()
102 subject = None
102 subject = None
103 elif hgpatch:
103 elif hgpatch:
104 if line.startswith('# User '):
104 if line.startswith('# User '):
105 user = line[7:]
105 user = line[7:]
106 ui.debug('From: %s\n' % user)
106 ui.debug('From: %s\n' % user)
107 elif line.startswith("# Date "):
107 elif line.startswith("# Date "):
108 date = line[7:]
108 date = line[7:]
109 elif line.startswith("# Branch "):
109 elif line.startswith("# Branch "):
110 branch = line[9:]
110 branch = line[9:]
111 elif line.startswith("# Node ID "):
111 elif line.startswith("# Node ID "):
112 nodeid = line[10:]
112 nodeid = line[10:]
113 elif line.startswith("# Parent "):
113 elif line.startswith("# Parent "):
114 parents.append(line[10:])
114 parents.append(line[10:])
115 elif line == '---' and gitsendmail:
115 elif line == '---' and gitsendmail:
116 ignoretext = True
116 ignoretext = True
117 if not line.startswith('# ') and not ignoretext:
117 if not line.startswith('# ') and not ignoretext:
118 cfp.write(line)
118 cfp.write(line)
119 cfp.write('\n')
119 cfp.write('\n')
120 message = cfp.getvalue()
120 message = cfp.getvalue()
121 if tmpfp:
121 if tmpfp:
122 tmpfp.write(payload)
122 tmpfp.write(payload)
123 if not payload.endswith('\n'):
123 if not payload.endswith('\n'):
124 tmpfp.write('\n')
124 tmpfp.write('\n')
125 elif not diffs_seen and message and content_type == 'text/plain':
125 elif not diffs_seen and message and content_type == 'text/plain':
126 message += '\n' + payload
126 message += '\n' + payload
127 except:
127 except:
128 tmpfp.close()
128 tmpfp.close()
129 os.unlink(tmpname)
129 os.unlink(tmpname)
130 raise
130 raise
131
131
132 if subject and not message.startswith(subject):
132 if subject and not message.startswith(subject):
133 message = '%s\n%s' % (subject, message)
133 message = '%s\n%s' % (subject, message)
134 tmpfp.close()
134 tmpfp.close()
135 if not diffs_seen:
135 if not diffs_seen:
136 os.unlink(tmpname)
136 os.unlink(tmpname)
137 return None, message, user, date, branch, None, None, None
137 return None, message, user, date, branch, None, None, None
138 p1 = parents and parents.pop(0) or None
138 p1 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
139 p2 = parents and parents.pop(0) or None
140 return tmpname, message, user, date, branch, nodeid, p1, p2
140 return tmpname, message, user, date, branch, nodeid, p1, p2
141
141
142 GP_PATCH = 1 << 0 # we have to run patch
142 GP_PATCH = 1 << 0 # we have to run patch
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 GP_BINARY = 1 << 2 # there's a binary patch
144 GP_BINARY = 1 << 2 # there's a binary patch
145
145
146 def readgitpatch(fp, firstline=None):
146 def readgitpatch(fp, firstline=None):
147 """extract git-style metadata about patches from <patchname>"""
147 """extract git-style metadata about patches from <patchname>"""
148 class gitpatch:
148 class gitpatch:
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 def __init__(self, path):
150 def __init__(self, path):
151 self.path = path
151 self.path = path
152 self.oldpath = None
152 self.oldpath = None
153 self.mode = None
153 self.mode = None
154 self.op = 'MODIFY'
154 self.op = 'MODIFY'
155 self.lineno = 0
155 self.lineno = 0
156 self.binary = False
156 self.binary = False
157
157
158 def reader(fp, firstline):
158 def reader(fp, firstline):
159 if firstline is not None:
159 if firstline is not None:
160 yield firstline
160 yield firstline
161 for line in fp:
161 for line in fp:
162 yield line
162 yield line
163
163
164 # Filter patch for git information
164 # Filter patch for git information
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 gp = None
166 gp = None
167 gitpatches = []
167 gitpatches = []
168 # Can have a git patch with only metadata, causing patch to complain
168 # Can have a git patch with only metadata, causing patch to complain
169 dopatch = 0
169 dopatch = 0
170
170
171 lineno = 0
171 lineno = 0
172 for line in reader(fp, firstline):
172 for line in reader(fp, firstline):
173 lineno += 1
173 lineno += 1
174 if line.startswith('diff --git'):
174 if line.startswith('diff --git'):
175 m = gitre.match(line)
175 m = gitre.match(line)
176 if m:
176 if m:
177 if gp:
177 if gp:
178 gitpatches.append(gp)
178 gitpatches.append(gp)
179 src, dst = m.group(1, 2)
179 src, dst = m.group(1, 2)
180 gp = gitpatch(dst)
180 gp = gitpatch(dst)
181 gp.lineno = lineno
181 gp.lineno = lineno
182 elif gp:
182 elif gp:
183 if line.startswith('--- '):
183 if line.startswith('--- '):
184 if gp.op in ('COPY', 'RENAME'):
184 if gp.op in ('COPY', 'RENAME'):
185 dopatch |= GP_FILTER
185 dopatch |= GP_FILTER
186 gitpatches.append(gp)
186 gitpatches.append(gp)
187 gp = None
187 gp = None
188 dopatch |= GP_PATCH
188 dopatch |= GP_PATCH
189 continue
189 continue
190 if line.startswith('rename from '):
190 if line.startswith('rename from '):
191 gp.op = 'RENAME'
191 gp.op = 'RENAME'
192 gp.oldpath = line[12:].rstrip()
192 gp.oldpath = line[12:].rstrip()
193 elif line.startswith('rename to '):
193 elif line.startswith('rename to '):
194 gp.path = line[10:].rstrip()
194 gp.path = line[10:].rstrip()
195 elif line.startswith('copy from '):
195 elif line.startswith('copy from '):
196 gp.op = 'COPY'
196 gp.op = 'COPY'
197 gp.oldpath = line[10:].rstrip()
197 gp.oldpath = line[10:].rstrip()
198 elif line.startswith('copy to '):
198 elif line.startswith('copy to '):
199 gp.path = line[8:].rstrip()
199 gp.path = line[8:].rstrip()
200 elif line.startswith('deleted file'):
200 elif line.startswith('deleted file'):
201 gp.op = 'DELETE'
201 gp.op = 'DELETE'
202 elif line.startswith('new file mode '):
202 elif line.startswith('new file mode '):
203 gp.op = 'ADD'
203 gp.op = 'ADD'
204 gp.mode = int(line.rstrip()[-6:], 8)
204 gp.mode = int(line.rstrip()[-6:], 8)
205 elif line.startswith('new mode '):
205 elif line.startswith('new mode '):
206 gp.mode = int(line.rstrip()[-6:], 8)
206 gp.mode = int(line.rstrip()[-6:], 8)
207 elif line.startswith('GIT binary patch'):
207 elif line.startswith('GIT binary patch'):
208 dopatch |= GP_BINARY
208 dopatch |= GP_BINARY
209 gp.binary = True
209 gp.binary = True
210 if gp:
210 if gp:
211 gitpatches.append(gp)
211 gitpatches.append(gp)
212
212
213 if not gitpatches:
213 if not gitpatches:
214 dopatch = GP_PATCH
214 dopatch = GP_PATCH
215
215
216 return (dopatch, gitpatches)
216 return (dopatch, gitpatches)
217
217
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 """apply <patchname> to the working directory.
219 """apply <patchname> to the working directory.
220 returns whether patch was applied with fuzz factor."""
220 returns whether patch was applied with fuzz factor."""
221 patcher = ui.config('ui', 'patch')
221 patcher = ui.config('ui', 'patch')
222 args = []
222 args = []
223 try:
223 try:
224 if patcher:
224 if patcher:
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 files)
226 files)
227 else:
227 else:
228 try:
228 try:
229 return internalpatch(patchname, ui, strip, cwd, files)
229 return internalpatch(patchname, ui, strip, cwd, files)
230 except NoHunks:
230 except NoHunks:
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 ui.debug('no valid hunks found; trying with %r instead\n' %
232 ui.debug(_('no valid hunks found; trying with %r instead\n') %
233 patcher)
233 patcher)
234 if util.needbinarypatch():
234 if util.needbinarypatch():
235 args.append('--binary')
235 args.append('--binary')
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 files)
237 files)
238 except PatchError, err:
238 except PatchError, err:
239 s = str(err)
239 s = str(err)
240 if s:
240 if s:
241 raise util.Abort(s)
241 raise util.Abort(s)
242 else:
242 else:
243 raise util.Abort(_('patch failed to apply'))
243 raise util.Abort(_('patch failed to apply'))
244
244
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 """use <patcher> to apply <patchname> to the working directory.
246 """use <patcher> to apply <patchname> to the working directory.
247 returns whether patch was applied with fuzz factor."""
247 returns whether patch was applied with fuzz factor."""
248
248
249 fuzz = False
249 fuzz = False
250 if cwd:
250 if cwd:
251 args.append('-d %s' % util.shellquote(cwd))
251 args.append('-d %s' % util.shellquote(cwd))
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 util.shellquote(patchname)))
253 util.shellquote(patchname)))
254
254
255 for line in fp:
255 for line in fp:
256 line = line.rstrip()
256 line = line.rstrip()
257 ui.note(line + '\n')
257 ui.note(line + '\n')
258 if line.startswith('patching file '):
258 if line.startswith('patching file '):
259 pf = util.parse_patch_output(line)
259 pf = util.parse_patch_output(line)
260 printed_file = False
260 printed_file = False
261 files.setdefault(pf, (None, None))
261 files.setdefault(pf, (None, None))
262 elif line.find('with fuzz') >= 0:
262 elif line.find('with fuzz') >= 0:
263 fuzz = True
263 fuzz = True
264 if not printed_file:
264 if not printed_file:
265 ui.warn(pf + '\n')
265 ui.warn(pf + '\n')
266 printed_file = True
266 printed_file = True
267 ui.warn(line + '\n')
267 ui.warn(line + '\n')
268 elif line.find('saving rejects to file') >= 0:
268 elif line.find('saving rejects to file') >= 0:
269 ui.warn(line + '\n')
269 ui.warn(line + '\n')
270 elif line.find('FAILED') >= 0:
270 elif line.find('FAILED') >= 0:
271 if not printed_file:
271 if not printed_file:
272 ui.warn(pf + '\n')
272 ui.warn(pf + '\n')
273 printed_file = True
273 printed_file = True
274 ui.warn(line + '\n')
274 ui.warn(line + '\n')
275 code = fp.close()
275 code = fp.close()
276 if code:
276 if code:
277 raise PatchError(_("patch command failed: %s") %
277 raise PatchError(_("patch command failed: %s") %
278 util.explain_exit(code)[0])
278 util.explain_exit(code)[0])
279 return fuzz
279 return fuzz
280
280
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 """use builtin patch to apply <patchobj> to the working directory.
282 """use builtin patch to apply <patchobj> to the working directory.
283 returns whether patch was applied with fuzz factor."""
283 returns whether patch was applied with fuzz factor."""
284 try:
284 try:
285 fp = file(patchobj, 'rb')
285 fp = file(patchobj, 'rb')
286 except TypeError:
286 except TypeError:
287 fp = patchobj
287 fp = patchobj
288 if cwd:
288 if cwd:
289 curdir = os.getcwd()
289 curdir = os.getcwd()
290 os.chdir(cwd)
290 os.chdir(cwd)
291 try:
291 try:
292 ret = applydiff(ui, fp, files, strip=strip)
292 ret = applydiff(ui, fp, files, strip=strip)
293 finally:
293 finally:
294 if cwd:
294 if cwd:
295 os.chdir(curdir)
295 os.chdir(curdir)
296 if ret < 0:
296 if ret < 0:
297 raise PatchError
297 raise PatchError
298 return ret > 0
298 return ret > 0
299
299
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303
303
304 class patchfile:
304 class patchfile:
305 def __init__(self, ui, fname, missing=False):
305 def __init__(self, ui, fname, missing=False):
306 self.fname = fname
306 self.fname = fname
307 self.ui = ui
307 self.ui = ui
308 self.lines = []
308 self.lines = []
309 self.exists = False
309 self.exists = False
310 self.missing = missing
310 self.missing = missing
311 if not missing:
311 if not missing:
312 try:
312 try:
313 fp = file(fname, 'rb')
313 fp = file(fname, 'rb')
314 self.lines = fp.readlines()
314 self.lines = fp.readlines()
315 self.exists = True
315 self.exists = True
316 except IOError:
316 except IOError:
317 pass
317 pass
318 else:
318 else:
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320
320
321 if not self.exists:
321 if not self.exists:
322 dirname = os.path.dirname(fname)
322 dirname = os.path.dirname(fname)
323 if dirname and not os.path.isdir(dirname):
323 if dirname and not os.path.isdir(dirname):
324 os.makedirs(dirname)
324 os.makedirs(dirname)
325
325
326 self.hash = {}
326 self.hash = {}
327 self.dirty = 0
327 self.dirty = 0
328 self.offset = 0
328 self.offset = 0
329 self.rej = []
329 self.rej = []
330 self.fileprinted = False
330 self.fileprinted = False
331 self.printfile(False)
331 self.printfile(False)
332 self.hunks = 0
332 self.hunks = 0
333
333
334 def printfile(self, warn):
334 def printfile(self, warn):
335 if self.fileprinted:
335 if self.fileprinted:
336 return
336 return
337 if warn or self.ui.verbose:
337 if warn or self.ui.verbose:
338 self.fileprinted = True
338 self.fileprinted = True
339 s = _("patching file %s\n") % self.fname
339 s = _("patching file %s\n") % self.fname
340 if warn:
340 if warn:
341 self.ui.warn(s)
341 self.ui.warn(s)
342 else:
342 else:
343 self.ui.note(s)
343 self.ui.note(s)
344
344
345
345
346 def findlines(self, l, linenum):
346 def findlines(self, l, linenum):
347 # looks through the hash and finds candidate lines. The
347 # looks through the hash and finds candidate lines. The
348 # result is a list of line numbers sorted based on distance
348 # result is a list of line numbers sorted based on distance
349 # from linenum
349 # from linenum
350 def sorter(a, b):
350 def sorter(a, b):
351 vala = abs(a - linenum)
351 vala = abs(a - linenum)
352 valb = abs(b - linenum)
352 valb = abs(b - linenum)
353 return cmp(vala, valb)
353 return cmp(vala, valb)
354
354
355 try:
355 try:
356 cand = self.hash[l]
356 cand = self.hash[l]
357 except:
357 except:
358 return []
358 return []
359
359
360 if len(cand) > 1:
360 if len(cand) > 1:
361 # resort our list of potentials forward then back.
361 # resort our list of potentials forward then back.
362 cand.sort(sorter)
362 cand.sort(sorter)
363 return cand
363 return cand
364
364
365 def hashlines(self):
365 def hashlines(self):
366 self.hash = {}
366 self.hash = {}
367 for x in xrange(len(self.lines)):
367 for x in xrange(len(self.lines)):
368 s = self.lines[x]
368 s = self.lines[x]
369 self.hash.setdefault(s, []).append(x)
369 self.hash.setdefault(s, []).append(x)
370
370
371 def write_rej(self):
371 def write_rej(self):
372 # our rejects are a little different from patch(1). This always
372 # our rejects are a little different from patch(1). This always
373 # creates rejects in the same form as the original patch. A file
373 # creates rejects in the same form as the original patch. A file
374 # header is inserted so that you can run the reject through patch again
374 # header is inserted so that you can run the reject through patch again
375 # without having to type the filename.
375 # without having to type the filename.
376
376
377 if not self.rej:
377 if not self.rej:
378 return
378 return
379
379
380 fname = self.fname + ".rej"
380 fname = self.fname + ".rej"
381 self.ui.warn(
381 self.ui.warn(
382 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
382 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
383 (len(self.rej), self.hunks, fname))
383 (len(self.rej), self.hunks, fname))
384 try: os.unlink(fname)
384 try: os.unlink(fname)
385 except:
385 except:
386 pass
386 pass
387 fp = file(fname, 'wb')
387 fp = file(fname, 'wb')
388 base = os.path.basename(self.fname)
388 base = os.path.basename(self.fname)
389 fp.write("--- %s\n+++ %s\n" % (base, base))
389 fp.write("--- %s\n+++ %s\n" % (base, base))
390 for x in self.rej:
390 for x in self.rej:
391 for l in x.hunk:
391 for l in x.hunk:
392 fp.write(l)
392 fp.write(l)
393 if l[-1] != '\n':
393 if l[-1] != '\n':
394 fp.write("\n\ No newline at end of file\n")
394 fp.write("\n\ No newline at end of file\n")
395
395
396 def write(self, dest=None):
396 def write(self, dest=None):
397 if self.dirty:
397 if self.dirty:
398 if not dest:
398 if not dest:
399 dest = self.fname
399 dest = self.fname
400 st = None
400 st = None
401 try:
401 try:
402 st = os.lstat(dest)
402 st = os.lstat(dest)
403 except OSError, inst:
403 except OSError, inst:
404 if inst.errno != errno.ENOENT:
404 if inst.errno != errno.ENOENT:
405 raise
405 raise
406 if st and st.st_nlink > 1:
406 if st and st.st_nlink > 1:
407 os.unlink(dest)
407 os.unlink(dest)
408 fp = file(dest, 'wb')
408 fp = file(dest, 'wb')
409 if st and st.st_nlink > 1:
409 if st and st.st_nlink > 1:
410 os.chmod(dest, st.st_mode)
410 os.chmod(dest, st.st_mode)
411 fp.writelines(self.lines)
411 fp.writelines(self.lines)
412 fp.close()
412 fp.close()
413
413
414 def close(self):
414 def close(self):
415 self.write()
415 self.write()
416 self.write_rej()
416 self.write_rej()
417
417
418 def apply(self, h, reverse):
418 def apply(self, h, reverse):
419 if not h.complete():
419 if not h.complete():
420 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
420 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
421 (h.number, h.desc, len(h.a), h.lena, len(h.b),
421 (h.number, h.desc, len(h.a), h.lena, len(h.b),
422 h.lenb))
422 h.lenb))
423
423
424 self.hunks += 1
424 self.hunks += 1
425 if reverse:
425 if reverse:
426 h.reverse()
426 h.reverse()
427
427
428 if self.missing:
428 if self.missing:
429 self.rej.append(h)
429 self.rej.append(h)
430 return -1
430 return -1
431
431
432 if self.exists and h.createfile():
432 if self.exists and h.createfile():
433 self.ui.warn(_("file %s already exists\n") % self.fname)
433 self.ui.warn(_("file %s already exists\n") % self.fname)
434 self.rej.append(h)
434 self.rej.append(h)
435 return -1
435 return -1
436
436
437 if isinstance(h, binhunk):
437 if isinstance(h, binhunk):
438 if h.rmfile():
438 if h.rmfile():
439 os.unlink(self.fname)
439 os.unlink(self.fname)
440 else:
440 else:
441 self.lines[:] = h.new()
441 self.lines[:] = h.new()
442 self.offset += len(h.new())
442 self.offset += len(h.new())
443 self.dirty = 1
443 self.dirty = 1
444 return 0
444 return 0
445
445
446 # fast case first, no offsets, no fuzz
446 # fast case first, no offsets, no fuzz
447 old = h.old()
447 old = h.old()
448 # patch starts counting at 1 unless we are adding the file
448 # patch starts counting at 1 unless we are adding the file
449 if h.starta == 0:
449 if h.starta == 0:
450 start = 0
450 start = 0
451 else:
451 else:
452 start = h.starta + self.offset - 1
452 start = h.starta + self.offset - 1
453 orig_start = start
453 orig_start = start
454 if diffhelpers.testhunk(old, self.lines, start) == 0:
454 if diffhelpers.testhunk(old, self.lines, start) == 0:
455 if h.rmfile():
455 if h.rmfile():
456 os.unlink(self.fname)
456 os.unlink(self.fname)
457 else:
457 else:
458 self.lines[start : start + h.lena] = h.new()
458 self.lines[start : start + h.lena] = h.new()
459 self.offset += h.lenb - h.lena
459 self.offset += h.lenb - h.lena
460 self.dirty = 1
460 self.dirty = 1
461 return 0
461 return 0
462
462
463 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
463 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
464 self.hashlines()
464 self.hashlines()
465 if h.hunk[-1][0] != ' ':
465 if h.hunk[-1][0] != ' ':
466 # if the hunk tried to put something at the bottom of the file
466 # if the hunk tried to put something at the bottom of the file
467 # override the start line and use eof here
467 # override the start line and use eof here
468 search_start = len(self.lines)
468 search_start = len(self.lines)
469 else:
469 else:
470 search_start = orig_start
470 search_start = orig_start
471
471
472 for fuzzlen in xrange(3):
472 for fuzzlen in xrange(3):
473 for toponly in [ True, False ]:
473 for toponly in [ True, False ]:
474 old = h.old(fuzzlen, toponly)
474 old = h.old(fuzzlen, toponly)
475
475
476 cand = self.findlines(old[0][1:], search_start)
476 cand = self.findlines(old[0][1:], search_start)
477 for l in cand:
477 for l in cand:
478 if diffhelpers.testhunk(old, self.lines, l) == 0:
478 if diffhelpers.testhunk(old, self.lines, l) == 0:
479 newlines = h.new(fuzzlen, toponly)
479 newlines = h.new(fuzzlen, toponly)
480 self.lines[l : l + len(old)] = newlines
480 self.lines[l : l + len(old)] = newlines
481 self.offset += len(newlines) - len(old)
481 self.offset += len(newlines) - len(old)
482 self.dirty = 1
482 self.dirty = 1
483 if fuzzlen:
483 if fuzzlen:
484 fuzzstr = "with fuzz %d " % fuzzlen
484 fuzzstr = "with fuzz %d " % fuzzlen
485 f = self.ui.warn
485 f = self.ui.warn
486 self.printfile(True)
486 self.printfile(True)
487 else:
487 else:
488 fuzzstr = ""
488 fuzzstr = ""
489 f = self.ui.note
489 f = self.ui.note
490 offset = l - orig_start - fuzzlen
490 offset = l - orig_start - fuzzlen
491 if offset == 1:
491 if offset == 1:
492 linestr = "line"
492 linestr = "line"
493 else:
493 else:
494 linestr = "lines"
494 linestr = "lines"
495 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
495 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
496 (h.number, l+1, fuzzstr, offset, linestr))
496 (h.number, l+1, fuzzstr, offset, linestr))
497 return fuzzlen
497 return fuzzlen
498 self.printfile(True)
498 self.printfile(True)
499 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
499 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
500 self.rej.append(h)
500 self.rej.append(h)
501 return -1
501 return -1
502
502
503 class hunk:
503 class hunk:
504 def __init__(self, desc, num, lr, context, create=False, remove=False):
504 def __init__(self, desc, num, lr, context, create=False, remove=False):
505 self.number = num
505 self.number = num
506 self.desc = desc
506 self.desc = desc
507 self.hunk = [ desc ]
507 self.hunk = [ desc ]
508 self.a = []
508 self.a = []
509 self.b = []
509 self.b = []
510 if context:
510 if context:
511 self.read_context_hunk(lr)
511 self.read_context_hunk(lr)
512 else:
512 else:
513 self.read_unified_hunk(lr)
513 self.read_unified_hunk(lr)
514 self.create = create
514 self.create = create
515 self.remove = remove and not create
515 self.remove = remove and not create
516
516
517 def read_unified_hunk(self, lr):
517 def read_unified_hunk(self, lr):
518 m = unidesc.match(self.desc)
518 m = unidesc.match(self.desc)
519 if not m:
519 if not m:
520 raise PatchError(_("bad hunk #%d") % self.number)
520 raise PatchError(_("bad hunk #%d") % self.number)
521 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
521 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
522 if self.lena == None:
522 if self.lena == None:
523 self.lena = 1
523 self.lena = 1
524 else:
524 else:
525 self.lena = int(self.lena)
525 self.lena = int(self.lena)
526 if self.lenb == None:
526 if self.lenb == None:
527 self.lenb = 1
527 self.lenb = 1
528 else:
528 else:
529 self.lenb = int(self.lenb)
529 self.lenb = int(self.lenb)
530 self.starta = int(self.starta)
530 self.starta = int(self.starta)
531 self.startb = int(self.startb)
531 self.startb = int(self.startb)
532 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
532 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
533 # if we hit eof before finishing out the hunk, the last line will
533 # if we hit eof before finishing out the hunk, the last line will
534 # be zero length. Lets try to fix it up.
534 # be zero length. Lets try to fix it up.
535 while len(self.hunk[-1]) == 0:
535 while len(self.hunk[-1]) == 0:
536 del self.hunk[-1]
536 del self.hunk[-1]
537 del self.a[-1]
537 del self.a[-1]
538 del self.b[-1]
538 del self.b[-1]
539 self.lena -= 1
539 self.lena -= 1
540 self.lenb -= 1
540 self.lenb -= 1
541
541
542 def read_context_hunk(self, lr):
542 def read_context_hunk(self, lr):
543 self.desc = lr.readline()
543 self.desc = lr.readline()
544 m = contextdesc.match(self.desc)
544 m = contextdesc.match(self.desc)
545 if not m:
545 if not m:
546 raise PatchError(_("bad hunk #%d") % self.number)
546 raise PatchError(_("bad hunk #%d") % self.number)
547 foo, self.starta, foo2, aend, foo3 = m.groups()
547 foo, self.starta, foo2, aend, foo3 = m.groups()
548 self.starta = int(self.starta)
548 self.starta = int(self.starta)
549 if aend == None:
549 if aend == None:
550 aend = self.starta
550 aend = self.starta
551 self.lena = int(aend) - self.starta
551 self.lena = int(aend) - self.starta
552 if self.starta:
552 if self.starta:
553 self.lena += 1
553 self.lena += 1
554 for x in xrange(self.lena):
554 for x in xrange(self.lena):
555 l = lr.readline()
555 l = lr.readline()
556 if l.startswith('---'):
556 if l.startswith('---'):
557 lr.push(l)
557 lr.push(l)
558 break
558 break
559 s = l[2:]
559 s = l[2:]
560 if l.startswith('- ') or l.startswith('! '):
560 if l.startswith('- ') or l.startswith('! '):
561 u = '-' + s
561 u = '-' + s
562 elif l.startswith(' '):
562 elif l.startswith(' '):
563 u = ' ' + s
563 u = ' ' + s
564 else:
564 else:
565 raise PatchError(_("bad hunk #%d old text line %d") %
565 raise PatchError(_("bad hunk #%d old text line %d") %
566 (self.number, x))
566 (self.number, x))
567 self.a.append(u)
567 self.a.append(u)
568 self.hunk.append(u)
568 self.hunk.append(u)
569
569
570 l = lr.readline()
570 l = lr.readline()
571 if l.startswith('\ '):
571 if l.startswith('\ '):
572 s = self.a[-1][:-1]
572 s = self.a[-1][:-1]
573 self.a[-1] = s
573 self.a[-1] = s
574 self.hunk[-1] = s
574 self.hunk[-1] = s
575 l = lr.readline()
575 l = lr.readline()
576 m = contextdesc.match(l)
576 m = contextdesc.match(l)
577 if not m:
577 if not m:
578 raise PatchError(_("bad hunk #%d") % self.number)
578 raise PatchError(_("bad hunk #%d") % self.number)
579 foo, self.startb, foo2, bend, foo3 = m.groups()
579 foo, self.startb, foo2, bend, foo3 = m.groups()
580 self.startb = int(self.startb)
580 self.startb = int(self.startb)
581 if bend == None:
581 if bend == None:
582 bend = self.startb
582 bend = self.startb
583 self.lenb = int(bend) - self.startb
583 self.lenb = int(bend) - self.startb
584 if self.startb:
584 if self.startb:
585 self.lenb += 1
585 self.lenb += 1
586 hunki = 1
586 hunki = 1
587 for x in xrange(self.lenb):
587 for x in xrange(self.lenb):
588 l = lr.readline()
588 l = lr.readline()
589 if l.startswith('\ '):
589 if l.startswith('\ '):
590 s = self.b[-1][:-1]
590 s = self.b[-1][:-1]
591 self.b[-1] = s
591 self.b[-1] = s
592 self.hunk[hunki-1] = s
592 self.hunk[hunki-1] = s
593 continue
593 continue
594 if not l:
594 if not l:
595 lr.push(l)
595 lr.push(l)
596 break
596 break
597 s = l[2:]
597 s = l[2:]
598 if l.startswith('+ ') or l.startswith('! '):
598 if l.startswith('+ ') or l.startswith('! '):
599 u = '+' + s
599 u = '+' + s
600 elif l.startswith(' '):
600 elif l.startswith(' '):
601 u = ' ' + s
601 u = ' ' + s
602 elif len(self.b) == 0:
602 elif len(self.b) == 0:
603 # this can happen when the hunk does not add any lines
603 # this can happen when the hunk does not add any lines
604 lr.push(l)
604 lr.push(l)
605 break
605 break
606 else:
606 else:
607 raise PatchError(_("bad hunk #%d old text line %d") %
607 raise PatchError(_("bad hunk #%d old text line %d") %
608 (self.number, x))
608 (self.number, x))
609 self.b.append(s)
609 self.b.append(s)
610 while True:
610 while True:
611 if hunki >= len(self.hunk):
611 if hunki >= len(self.hunk):
612 h = ""
612 h = ""
613 else:
613 else:
614 h = self.hunk[hunki]
614 h = self.hunk[hunki]
615 hunki += 1
615 hunki += 1
616 if h == u:
616 if h == u:
617 break
617 break
618 elif h.startswith('-'):
618 elif h.startswith('-'):
619 continue
619 continue
620 else:
620 else:
621 self.hunk.insert(hunki-1, u)
621 self.hunk.insert(hunki-1, u)
622 break
622 break
623
623
624 if not self.a:
624 if not self.a:
625 # this happens when lines were only added to the hunk
625 # this happens when lines were only added to the hunk
626 for x in self.hunk:
626 for x in self.hunk:
627 if x.startswith('-') or x.startswith(' '):
627 if x.startswith('-') or x.startswith(' '):
628 self.a.append(x)
628 self.a.append(x)
629 if not self.b:
629 if not self.b:
630 # this happens when lines were only deleted from the hunk
630 # this happens when lines were only deleted from the hunk
631 for x in self.hunk:
631 for x in self.hunk:
632 if x.startswith('+') or x.startswith(' '):
632 if x.startswith('+') or x.startswith(' '):
633 self.b.append(x[1:])
633 self.b.append(x[1:])
634 # @@ -start,len +start,len @@
634 # @@ -start,len +start,len @@
635 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
635 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
636 self.startb, self.lenb)
636 self.startb, self.lenb)
637 self.hunk[0] = self.desc
637 self.hunk[0] = self.desc
638
638
639 def reverse(self):
639 def reverse(self):
640 self.create, self.remove = self.remove, self.create
640 self.create, self.remove = self.remove, self.create
641 origlena = self.lena
641 origlena = self.lena
642 origstarta = self.starta
642 origstarta = self.starta
643 self.lena = self.lenb
643 self.lena = self.lenb
644 self.starta = self.startb
644 self.starta = self.startb
645 self.lenb = origlena
645 self.lenb = origlena
646 self.startb = origstarta
646 self.startb = origstarta
647 self.a = []
647 self.a = []
648 self.b = []
648 self.b = []
649 # self.hunk[0] is the @@ description
649 # self.hunk[0] is the @@ description
650 for x in xrange(1, len(self.hunk)):
650 for x in xrange(1, len(self.hunk)):
651 o = self.hunk[x]
651 o = self.hunk[x]
652 if o.startswith('-'):
652 if o.startswith('-'):
653 n = '+' + o[1:]
653 n = '+' + o[1:]
654 self.b.append(o[1:])
654 self.b.append(o[1:])
655 elif o.startswith('+'):
655 elif o.startswith('+'):
656 n = '-' + o[1:]
656 n = '-' + o[1:]
657 self.a.append(n)
657 self.a.append(n)
658 else:
658 else:
659 n = o
659 n = o
660 self.b.append(o[1:])
660 self.b.append(o[1:])
661 self.a.append(o)
661 self.a.append(o)
662 self.hunk[x] = o
662 self.hunk[x] = o
663
663
664 def fix_newline(self):
664 def fix_newline(self):
665 diffhelpers.fix_newline(self.hunk, self.a, self.b)
665 diffhelpers.fix_newline(self.hunk, self.a, self.b)
666
666
667 def complete(self):
667 def complete(self):
668 return len(self.a) == self.lena and len(self.b) == self.lenb
668 return len(self.a) == self.lena and len(self.b) == self.lenb
669
669
670 def createfile(self):
670 def createfile(self):
671 return self.starta == 0 and self.lena == 0 and self.create
671 return self.starta == 0 and self.lena == 0 and self.create
672
672
673 def rmfile(self):
673 def rmfile(self):
674 return self.startb == 0 and self.lenb == 0 and self.remove
674 return self.startb == 0 and self.lenb == 0 and self.remove
675
675
676 def fuzzit(self, l, fuzz, toponly):
676 def fuzzit(self, l, fuzz, toponly):
677 # this removes context lines from the top and bottom of list 'l'. It
677 # this removes context lines from the top and bottom of list 'l'. It
678 # checks the hunk to make sure only context lines are removed, and then
678 # checks the hunk to make sure only context lines are removed, and then
679 # returns a new shortened list of lines.
679 # returns a new shortened list of lines.
680 fuzz = min(fuzz, len(l)-1)
680 fuzz = min(fuzz, len(l)-1)
681 if fuzz:
681 if fuzz:
682 top = 0
682 top = 0
683 bot = 0
683 bot = 0
684 hlen = len(self.hunk)
684 hlen = len(self.hunk)
685 for x in xrange(hlen-1):
685 for x in xrange(hlen-1):
686 # the hunk starts with the @@ line, so use x+1
686 # the hunk starts with the @@ line, so use x+1
687 if self.hunk[x+1][0] == ' ':
687 if self.hunk[x+1][0] == ' ':
688 top += 1
688 top += 1
689 else:
689 else:
690 break
690 break
691 if not toponly:
691 if not toponly:
692 for x in xrange(hlen-1):
692 for x in xrange(hlen-1):
693 if self.hunk[hlen-bot-1][0] == ' ':
693 if self.hunk[hlen-bot-1][0] == ' ':
694 bot += 1
694 bot += 1
695 else:
695 else:
696 break
696 break
697
697
698 # top and bot now count context in the hunk
698 # top and bot now count context in the hunk
699 # adjust them if either one is short
699 # adjust them if either one is short
700 context = max(top, bot, 3)
700 context = max(top, bot, 3)
701 if bot < context:
701 if bot < context:
702 bot = max(0, fuzz - (context - bot))
702 bot = max(0, fuzz - (context - bot))
703 else:
703 else:
704 bot = min(fuzz, bot)
704 bot = min(fuzz, bot)
705 if top < context:
705 if top < context:
706 top = max(0, fuzz - (context - top))
706 top = max(0, fuzz - (context - top))
707 else:
707 else:
708 top = min(fuzz, top)
708 top = min(fuzz, top)
709
709
710 return l[top:len(l)-bot]
710 return l[top:len(l)-bot]
711 return l
711 return l
712
712
713 def old(self, fuzz=0, toponly=False):
713 def old(self, fuzz=0, toponly=False):
714 return self.fuzzit(self.a, fuzz, toponly)
714 return self.fuzzit(self.a, fuzz, toponly)
715
715
716 def newctrl(self):
716 def newctrl(self):
717 res = []
717 res = []
718 for x in self.hunk:
718 for x in self.hunk:
719 c = x[0]
719 c = x[0]
720 if c == ' ' or c == '+':
720 if c == ' ' or c == '+':
721 res.append(x)
721 res.append(x)
722 return res
722 return res
723
723
724 def new(self, fuzz=0, toponly=False):
724 def new(self, fuzz=0, toponly=False):
725 return self.fuzzit(self.b, fuzz, toponly)
725 return self.fuzzit(self.b, fuzz, toponly)
726
726
727 class binhunk:
727 class binhunk:
728 'A binary patch file. Only understands literals so far.'
728 'A binary patch file. Only understands literals so far.'
729 def __init__(self, gitpatch):
729 def __init__(self, gitpatch):
730 self.gitpatch = gitpatch
730 self.gitpatch = gitpatch
731 self.text = None
731 self.text = None
732 self.hunk = ['GIT binary patch\n']
732 self.hunk = ['GIT binary patch\n']
733
733
734 def createfile(self):
734 def createfile(self):
735 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
735 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
736
736
737 def rmfile(self):
737 def rmfile(self):
738 return self.gitpatch.op == 'DELETE'
738 return self.gitpatch.op == 'DELETE'
739
739
740 def complete(self):
740 def complete(self):
741 return self.text is not None
741 return self.text is not None
742
742
743 def new(self):
743 def new(self):
744 return [self.text]
744 return [self.text]
745
745
746 def extract(self, fp):
746 def extract(self, fp):
747 line = fp.readline()
747 line = fp.readline()
748 self.hunk.append(line)
748 self.hunk.append(line)
749 while line and not line.startswith('literal '):
749 while line and not line.startswith('literal '):
750 line = fp.readline()
750 line = fp.readline()
751 self.hunk.append(line)
751 self.hunk.append(line)
752 if not line:
752 if not line:
753 raise PatchError(_('could not extract binary patch'))
753 raise PatchError(_('could not extract binary patch'))
754 size = int(line[8:].rstrip())
754 size = int(line[8:].rstrip())
755 dec = []
755 dec = []
756 line = fp.readline()
756 line = fp.readline()
757 self.hunk.append(line)
757 self.hunk.append(line)
758 while len(line) > 1:
758 while len(line) > 1:
759 l = line[0]
759 l = line[0]
760 if l <= 'Z' and l >= 'A':
760 if l <= 'Z' and l >= 'A':
761 l = ord(l) - ord('A') + 1
761 l = ord(l) - ord('A') + 1
762 else:
762 else:
763 l = ord(l) - ord('a') + 27
763 l = ord(l) - ord('a') + 27
764 dec.append(base85.b85decode(line[1:-1])[:l])
764 dec.append(base85.b85decode(line[1:-1])[:l])
765 line = fp.readline()
765 line = fp.readline()
766 self.hunk.append(line)
766 self.hunk.append(line)
767 text = zlib.decompress(''.join(dec))
767 text = zlib.decompress(''.join(dec))
768 if len(text) != size:
768 if len(text) != size:
769 raise PatchError(_('binary patch is %d bytes, not %d') %
769 raise PatchError(_('binary patch is %d bytes, not %d') %
770 len(text), size)
770 len(text), size)
771 self.text = text
771 self.text = text
772
772
773 def parsefilename(str):
773 def parsefilename(str):
774 # --- filename \t|space stuff
774 # --- filename \t|space stuff
775 s = str[4:].rstrip('\r\n')
775 s = str[4:].rstrip('\r\n')
776 i = s.find('\t')
776 i = s.find('\t')
777 if i < 0:
777 if i < 0:
778 i = s.find(' ')
778 i = s.find(' ')
779 if i < 0:
779 if i < 0:
780 return s
780 return s
781 return s[:i]
781 return s[:i]
782
782
783 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
783 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
784 def pathstrip(path, count=1):
784 def pathstrip(path, count=1):
785 pathlen = len(path)
785 pathlen = len(path)
786 i = 0
786 i = 0
787 if count == 0:
787 if count == 0:
788 return '', path.rstrip()
788 return '', path.rstrip()
789 while count > 0:
789 while count > 0:
790 i = path.find('/', i)
790 i = path.find('/', i)
791 if i == -1:
791 if i == -1:
792 raise PatchError(_("unable to strip away %d dirs from %s") %
792 raise PatchError(_("unable to strip away %d dirs from %s") %
793 (count, path))
793 (count, path))
794 i += 1
794 i += 1
795 # consume '//' in the path
795 # consume '//' in the path
796 while i < pathlen - 1 and path[i] == '/':
796 while i < pathlen - 1 and path[i] == '/':
797 i += 1
797 i += 1
798 count -= 1
798 count -= 1
799 return path[:i].lstrip(), path[i:].rstrip()
799 return path[:i].lstrip(), path[i:].rstrip()
800
800
801 nulla = afile_orig == "/dev/null"
801 nulla = afile_orig == "/dev/null"
802 nullb = bfile_orig == "/dev/null"
802 nullb = bfile_orig == "/dev/null"
803 abase, afile = pathstrip(afile_orig, strip)
803 abase, afile = pathstrip(afile_orig, strip)
804 gooda = not nulla and os.path.exists(afile)
804 gooda = not nulla and os.path.exists(afile)
805 bbase, bfile = pathstrip(bfile_orig, strip)
805 bbase, bfile = pathstrip(bfile_orig, strip)
806 if afile == bfile:
806 if afile == bfile:
807 goodb = gooda
807 goodb = gooda
808 else:
808 else:
809 goodb = not nullb and os.path.exists(bfile)
809 goodb = not nullb and os.path.exists(bfile)
810 createfunc = hunk.createfile
810 createfunc = hunk.createfile
811 if reverse:
811 if reverse:
812 createfunc = hunk.rmfile
812 createfunc = hunk.rmfile
813 missing = not goodb and not gooda and not createfunc()
813 missing = not goodb and not gooda and not createfunc()
814 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
814 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
815 # diff is between a file and its backup. In this case, the original
815 # diff is between a file and its backup. In this case, the original
816 # file should be patched (see original mpatch code).
816 # file should be patched (see original mpatch code).
817 isbackup = (abase == bbase and bfile.startswith(afile))
817 isbackup = (abase == bbase and bfile.startswith(afile))
818 fname = None
818 fname = None
819 if not missing:
819 if not missing:
820 if gooda and goodb:
820 if gooda and goodb:
821 fname = isbackup and afile or bfile
821 fname = isbackup and afile or bfile
822 elif gooda:
822 elif gooda:
823 fname = afile
823 fname = afile
824
824
825 if not fname:
825 if not fname:
826 if not nullb:
826 if not nullb:
827 fname = isbackup and afile or bfile
827 fname = isbackup and afile or bfile
828 elif not nulla:
828 elif not nulla:
829 fname = afile
829 fname = afile
830 else:
830 else:
831 raise PatchError(_("undefined source and destination files"))
831 raise PatchError(_("undefined source and destination files"))
832
832
833 return fname, missing
833 return fname, missing
834
834
835 class linereader:
835 class linereader:
836 # simple class to allow pushing lines back into the input stream
836 # simple class to allow pushing lines back into the input stream
837 def __init__(self, fp):
837 def __init__(self, fp):
838 self.fp = fp
838 self.fp = fp
839 self.buf = []
839 self.buf = []
840
840
841 def push(self, line):
841 def push(self, line):
842 self.buf.append(line)
842 self.buf.append(line)
843
843
844 def readline(self):
844 def readline(self):
845 if self.buf:
845 if self.buf:
846 l = self.buf[0]
846 l = self.buf[0]
847 del self.buf[0]
847 del self.buf[0]
848 return l
848 return l
849 return self.fp.readline()
849 return self.fp.readline()
850
850
851 def iterhunks(ui, fp, sourcefile=None):
851 def iterhunks(ui, fp, sourcefile=None):
852 """Read a patch and yield the following events:
852 """Read a patch and yield the following events:
853 - ("file", afile, bfile, firsthunk): select a new target file.
853 - ("file", afile, bfile, firsthunk): select a new target file.
854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
855 "file" event.
855 "file" event.
856 - ("git", gitchanges): current diff is in git format, gitchanges
856 - ("git", gitchanges): current diff is in git format, gitchanges
857 maps filenames to gitpatch records. Unique event.
857 maps filenames to gitpatch records. Unique event.
858 """
858 """
859
859
860 def scangitpatch(fp, firstline):
860 def scangitpatch(fp, firstline):
861 '''git patches can modify a file, then copy that file to
861 '''git patches can modify a file, then copy that file to
862 a new file, but expect the source to be the unmodified form.
862 a new file, but expect the source to be the unmodified form.
863 So we scan the patch looking for that case so we can do
863 So we scan the patch looking for that case so we can do
864 the copies ahead of time.'''
864 the copies ahead of time.'''
865
865
866 pos = 0
866 pos = 0
867 try:
867 try:
868 pos = fp.tell()
868 pos = fp.tell()
869 except IOError:
869 except IOError:
870 fp = cStringIO.StringIO(fp.read())
870 fp = cStringIO.StringIO(fp.read())
871
871
872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
872 (dopatch, gitpatches) = readgitpatch(fp, firstline)
873 fp.seek(pos)
873 fp.seek(pos)
874
874
875 return fp, dopatch, gitpatches
875 return fp, dopatch, gitpatches
876
876
877 changed = {}
877 changed = {}
878 current_hunk = None
878 current_hunk = None
879 afile = ""
879 afile = ""
880 bfile = ""
880 bfile = ""
881 state = None
881 state = None
882 hunknum = 0
882 hunknum = 0
883 emitfile = False
883 emitfile = False
884
884
885 git = False
885 git = False
886 gitre = re.compile('diff --git (a/.*) (b/.*)')
886 gitre = re.compile('diff --git (a/.*) (b/.*)')
887
887
888 # our states
888 # our states
889 BFILE = 1
889 BFILE = 1
890 context = None
890 context = None
891 lr = linereader(fp)
891 lr = linereader(fp)
892 dopatch = True
892 dopatch = True
893 # gitworkdone is True if a git operation (copy, rename, ...) was
893 # gitworkdone is True if a git operation (copy, rename, ...) was
894 # performed already for the current file. Useful when the file
894 # performed already for the current file. Useful when the file
895 # section may have no hunk.
895 # section may have no hunk.
896 gitworkdone = False
896 gitworkdone = False
897
897
898 while True:
898 while True:
899 newfile = False
899 newfile = False
900 x = lr.readline()
900 x = lr.readline()
901 if not x:
901 if not x:
902 break
902 break
903 if current_hunk:
903 if current_hunk:
904 if x.startswith('\ '):
904 if x.startswith('\ '):
905 current_hunk.fix_newline()
905 current_hunk.fix_newline()
906 yield 'hunk', current_hunk
906 yield 'hunk', current_hunk
907 current_hunk = None
907 current_hunk = None
908 gitworkdone = False
908 gitworkdone = False
909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
909 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
910 ((context or context == None) and x.startswith('***************')))):
910 ((context or context == None) and x.startswith('***************')))):
911 try:
911 try:
912 if context == None and x.startswith('***************'):
912 if context == None and x.startswith('***************'):
913 context = True
913 context = True
914 gpatch = changed.get(bfile[2:], (None, None))[1]
914 gpatch = changed.get(bfile[2:], (None, None))[1]
915 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
915 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
916 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
916 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
917 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
917 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
918 except PatchError, err:
918 except PatchError, err:
919 ui.debug(err)
919 ui.debug(err)
920 current_hunk = None
920 current_hunk = None
921 continue
921 continue
922 hunknum += 1
922 hunknum += 1
923 if emitfile:
923 if emitfile:
924 emitfile = False
924 emitfile = False
925 yield 'file', (afile, bfile, current_hunk)
925 yield 'file', (afile, bfile, current_hunk)
926 elif state == BFILE and x.startswith('GIT binary patch'):
926 elif state == BFILE and x.startswith('GIT binary patch'):
927 current_hunk = binhunk(changed[bfile[2:]][1])
927 current_hunk = binhunk(changed[bfile[2:]][1])
928 hunknum += 1
928 hunknum += 1
929 if emitfile:
929 if emitfile:
930 emitfile = False
930 emitfile = False
931 yield 'file', (afile, bfile, current_hunk)
931 yield 'file', (afile, bfile, current_hunk)
932 current_hunk.extract(fp)
932 current_hunk.extract(fp)
933 elif x.startswith('diff --git'):
933 elif x.startswith('diff --git'):
934 # check for git diff, scanning the whole patch file if needed
934 # check for git diff, scanning the whole patch file if needed
935 m = gitre.match(x)
935 m = gitre.match(x)
936 if m:
936 if m:
937 afile, bfile = m.group(1, 2)
937 afile, bfile = m.group(1, 2)
938 if not git:
938 if not git:
939 git = True
939 git = True
940 fp, dopatch, gitpatches = scangitpatch(fp, x)
940 fp, dopatch, gitpatches = scangitpatch(fp, x)
941 yield 'git', gitpatches
941 yield 'git', gitpatches
942 for gp in gitpatches:
942 for gp in gitpatches:
943 changed[gp.path] = (gp.op, gp)
943 changed[gp.path] = (gp.op, gp)
944 # else error?
944 # else error?
945 # copy/rename + modify should modify target, not source
945 # copy/rename + modify should modify target, not source
946 gitop = changed.get(bfile[2:], (None, None))[0]
946 gitop = changed.get(bfile[2:], (None, None))[0]
947 if gitop in ('COPY', 'DELETE', 'RENAME'):
947 if gitop in ('COPY', 'DELETE', 'RENAME'):
948 afile = bfile
948 afile = bfile
949 gitworkdone = True
949 gitworkdone = True
950 newfile = True
950 newfile = True
951 elif x.startswith('---'):
951 elif x.startswith('---'):
952 # check for a unified diff
952 # check for a unified diff
953 l2 = lr.readline()
953 l2 = lr.readline()
954 if not l2.startswith('+++'):
954 if not l2.startswith('+++'):
955 lr.push(l2)
955 lr.push(l2)
956 continue
956 continue
957 newfile = True
957 newfile = True
958 context = False
958 context = False
959 afile = parsefilename(x)
959 afile = parsefilename(x)
960 bfile = parsefilename(l2)
960 bfile = parsefilename(l2)
961 elif x.startswith('***'):
961 elif x.startswith('***'):
962 # check for a context diff
962 # check for a context diff
963 l2 = lr.readline()
963 l2 = lr.readline()
964 if not l2.startswith('---'):
964 if not l2.startswith('---'):
965 lr.push(l2)
965 lr.push(l2)
966 continue
966 continue
967 l3 = lr.readline()
967 l3 = lr.readline()
968 lr.push(l3)
968 lr.push(l3)
969 if not l3.startswith("***************"):
969 if not l3.startswith("***************"):
970 lr.push(l2)
970 lr.push(l2)
971 continue
971 continue
972 newfile = True
972 newfile = True
973 context = True
973 context = True
974 afile = parsefilename(x)
974 afile = parsefilename(x)
975 bfile = parsefilename(l2)
975 bfile = parsefilename(l2)
976
976
977 if newfile:
977 if newfile:
978 emitfile = True
978 emitfile = True
979 state = BFILE
979 state = BFILE
980 hunknum = 0
980 hunknum = 0
981 if current_hunk:
981 if current_hunk:
982 if current_hunk.complete():
982 if current_hunk.complete():
983 yield 'hunk', current_hunk
983 yield 'hunk', current_hunk
984 else:
984 else:
985 raise PatchError(_("malformed patch %s %s") % (afile,
985 raise PatchError(_("malformed patch %s %s") % (afile,
986 current_hunk.desc))
986 current_hunk.desc))
987
987
988 if hunknum == 0 and dopatch and not gitworkdone:
988 if hunknum == 0 and dopatch and not gitworkdone:
989 raise NoHunks
989 raise NoHunks
990
990
991 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
991 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
992 rejmerge=None, updatedir=None):
992 rejmerge=None, updatedir=None):
993 """reads a patch from fp and tries to apply it. The dict 'changed' is
993 """reads a patch from fp and tries to apply it. The dict 'changed' is
994 filled in with all of the filenames changed by the patch. Returns 0
994 filled in with all of the filenames changed by the patch. Returns 0
995 for a clean patch, -1 if any rejects were found and 1 if there was
995 for a clean patch, -1 if any rejects were found and 1 if there was
996 any fuzz."""
996 any fuzz."""
997
997
998 rejects = 0
998 rejects = 0
999 err = 0
999 err = 0
1000 current_file = None
1000 current_file = None
1001 gitpatches = None
1001 gitpatches = None
1002
1002
1003 def closefile():
1003 def closefile():
1004 if not current_file:
1004 if not current_file:
1005 return 0
1005 return 0
1006 current_file.close()
1006 current_file.close()
1007 if rejmerge:
1007 if rejmerge:
1008 rejmerge(current_file)
1008 rejmerge(current_file)
1009 return len(current_file.rej)
1009 return len(current_file.rej)
1010
1010
1011 for state, values in iterhunks(ui, fp, sourcefile):
1011 for state, values in iterhunks(ui, fp, sourcefile):
1012 if state == 'hunk':
1012 if state == 'hunk':
1013 if not current_file:
1013 if not current_file:
1014 continue
1014 continue
1015 current_hunk = values
1015 current_hunk = values
1016 ret = current_file.apply(current_hunk, reverse)
1016 ret = current_file.apply(current_hunk, reverse)
1017 if ret >= 0:
1017 if ret >= 0:
1018 changed.setdefault(current_file.fname, (None, None))
1018 changed.setdefault(current_file.fname, (None, None))
1019 if ret > 0:
1019 if ret > 0:
1020 err = 1
1020 err = 1
1021 elif state == 'file':
1021 elif state == 'file':
1022 rejects += closefile()
1022 rejects += closefile()
1023 afile, bfile, first_hunk = values
1023 afile, bfile, first_hunk = values
1024 try:
1024 try:
1025 if sourcefile:
1025 if sourcefile:
1026 current_file = patchfile(ui, sourcefile)
1026 current_file = patchfile(ui, sourcefile)
1027 else:
1027 else:
1028 current_file, missing = selectfile(afile, bfile, first_hunk,
1028 current_file, missing = selectfile(afile, bfile, first_hunk,
1029 strip, reverse)
1029 strip, reverse)
1030 current_file = patchfile(ui, current_file, missing)
1030 current_file = patchfile(ui, current_file, missing)
1031 except PatchError, err:
1031 except PatchError, err:
1032 ui.warn(str(err) + '\n')
1032 ui.warn(str(err) + '\n')
1033 current_file, current_hunk = None, None
1033 current_file, current_hunk = None, None
1034 rejects += 1
1034 rejects += 1
1035 continue
1035 continue
1036 elif state == 'git':
1036 elif state == 'git':
1037 gitpatches = values
1037 gitpatches = values
1038 cwd = os.getcwd()
1038 cwd = os.getcwd()
1039 for gp in gitpatches:
1039 for gp in gitpatches:
1040 if gp.op in ('COPY', 'RENAME'):
1040 if gp.op in ('COPY', 'RENAME'):
1041 src, dst = [util.canonpath(cwd, cwd, x)
1041 src, dst = [util.canonpath(cwd, cwd, x)
1042 for x in [gp.oldpath, gp.path]]
1042 for x in [gp.oldpath, gp.path]]
1043 copyfile(src, dst)
1043 copyfile(src, dst)
1044 changed[gp.path] = (gp.op, gp)
1044 changed[gp.path] = (gp.op, gp)
1045 else:
1045 else:
1046 raise util.Abort(_('unsupported parser state: %s') % state)
1046 raise util.Abort(_('unsupported parser state: %s') % state)
1047
1047
1048 rejects += closefile()
1048 rejects += closefile()
1049
1049
1050 if updatedir and gitpatches:
1050 if updatedir and gitpatches:
1051 updatedir(gitpatches)
1051 updatedir(gitpatches)
1052 if rejects:
1052 if rejects:
1053 return -1
1053 return -1
1054 return err
1054 return err
1055
1055
1056 def diffopts(ui, opts={}, untrusted=False):
1056 def diffopts(ui, opts={}, untrusted=False):
1057 def get(key, name=None, getter=ui.configbool):
1057 def get(key, name=None, getter=ui.configbool):
1058 return (opts.get(key) or
1058 return (opts.get(key) or
1059 getter('diff', name or key, None, untrusted=untrusted))
1059 getter('diff', name or key, None, untrusted=untrusted))
1060 return mdiff.diffopts(
1060 return mdiff.diffopts(
1061 text=opts.get('text'),
1061 text=opts.get('text'),
1062 git=get('git'),
1062 git=get('git'),
1063 nodates=get('nodates'),
1063 nodates=get('nodates'),
1064 showfunc=get('show_function', 'showfunc'),
1064 showfunc=get('show_function', 'showfunc'),
1065 ignorews=get('ignore_all_space', 'ignorews'),
1065 ignorews=get('ignore_all_space', 'ignorews'),
1066 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1066 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1067 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1067 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1068 context=get('unified', getter=ui.config))
1068 context=get('unified', getter=ui.config))
1069
1069
1070 def updatedir(ui, repo, patches):
1070 def updatedir(ui, repo, patches):
1071 '''Update dirstate after patch application according to metadata'''
1071 '''Update dirstate after patch application according to metadata'''
1072 if not patches:
1072 if not patches:
1073 return
1073 return
1074 copies = []
1074 copies = []
1075 removes = {}
1075 removes = {}
1076 cfiles = patches.keys()
1076 cfiles = patches.keys()
1077 cwd = repo.getcwd()
1077 cwd = repo.getcwd()
1078 if cwd:
1078 if cwd:
1079 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1079 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1080 for f in patches:
1080 for f in patches:
1081 ctype, gp = patches[f]
1081 ctype, gp = patches[f]
1082 if ctype == 'RENAME':
1082 if ctype == 'RENAME':
1083 copies.append((gp.oldpath, gp.path))
1083 copies.append((gp.oldpath, gp.path))
1084 removes[gp.oldpath] = 1
1084 removes[gp.oldpath] = 1
1085 elif ctype == 'COPY':
1085 elif ctype == 'COPY':
1086 copies.append((gp.oldpath, gp.path))
1086 copies.append((gp.oldpath, gp.path))
1087 elif ctype == 'DELETE':
1087 elif ctype == 'DELETE':
1088 removes[gp.path] = 1
1088 removes[gp.path] = 1
1089 for src, dst in copies:
1089 for src, dst in copies:
1090 repo.copy(src, dst)
1090 repo.copy(src, dst)
1091 removes = removes.keys()
1091 removes = removes.keys()
1092 if removes:
1092 if removes:
1093 repo.remove(util.sort(removes), True)
1093 repo.remove(util.sort(removes), True)
1094 for f in patches:
1094 for f in patches:
1095 ctype, gp = patches[f]
1095 ctype, gp = patches[f]
1096 if gp and gp.mode:
1096 if gp and gp.mode:
1097 flags = ''
1097 flags = ''
1098 if gp.mode & 0100:
1098 if gp.mode & 0100:
1099 flags = 'x'
1099 flags = 'x'
1100 elif gp.mode & 020000:
1100 elif gp.mode & 020000:
1101 flags = 'l'
1101 flags = 'l'
1102 dst = os.path.join(repo.root, gp.path)
1102 dst = os.path.join(repo.root, gp.path)
1103 # patch won't create empty files
1103 # patch won't create empty files
1104 if ctype == 'ADD' and not os.path.exists(dst):
1104 if ctype == 'ADD' and not os.path.exists(dst):
1105 repo.wwrite(gp.path, '', flags)
1105 repo.wwrite(gp.path, '', flags)
1106 else:
1106 else:
1107 util.set_flags(dst, 'l' in flags, 'x' in flags)
1107 util.set_flags(dst, 'l' in flags, 'x' in flags)
1108 cmdutil.addremove(repo, cfiles)
1108 cmdutil.addremove(repo, cfiles)
1109 files = patches.keys()
1109 files = patches.keys()
1110 files.extend([r for r in removes if r not in files])
1110 files.extend([r for r in removes if r not in files])
1111 return util.sort(files)
1111 return util.sort(files)
1112
1112
1113 def b85diff(to, tn):
1113 def b85diff(to, tn):
1114 '''print base85-encoded binary diff'''
1114 '''print base85-encoded binary diff'''
1115 def gitindex(text):
1115 def gitindex(text):
1116 if not text:
1116 if not text:
1117 return '0' * 40
1117 return '0' * 40
1118 l = len(text)
1118 l = len(text)
1119 s = util.sha1('blob %d\0' % l)
1119 s = util.sha1('blob %d\0' % l)
1120 s.update(text)
1120 s.update(text)
1121 return s.hexdigest()
1121 return s.hexdigest()
1122
1122
1123 def fmtline(line):
1123 def fmtline(line):
1124 l = len(line)
1124 l = len(line)
1125 if l <= 26:
1125 if l <= 26:
1126 l = chr(ord('A') + l - 1)
1126 l = chr(ord('A') + l - 1)
1127 else:
1127 else:
1128 l = chr(l - 26 + ord('a') - 1)
1128 l = chr(l - 26 + ord('a') - 1)
1129 return '%c%s\n' % (l, base85.b85encode(line, True))
1129 return '%c%s\n' % (l, base85.b85encode(line, True))
1130
1130
1131 def chunk(text, csize=52):
1131 def chunk(text, csize=52):
1132 l = len(text)
1132 l = len(text)
1133 i = 0
1133 i = 0
1134 while i < l:
1134 while i < l:
1135 yield text[i:i+csize]
1135 yield text[i:i+csize]
1136 i += csize
1136 i += csize
1137
1137
1138 tohash = gitindex(to)
1138 tohash = gitindex(to)
1139 tnhash = gitindex(tn)
1139 tnhash = gitindex(tn)
1140 if tohash == tnhash:
1140 if tohash == tnhash:
1141 return ""
1141 return ""
1142
1142
1143 # TODO: deltas
1143 # TODO: deltas
1144 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1144 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1145 (tohash, tnhash, len(tn))]
1145 (tohash, tnhash, len(tn))]
1146 for l in chunk(zlib.compress(tn)):
1146 for l in chunk(zlib.compress(tn)):
1147 ret.append(fmtline(l))
1147 ret.append(fmtline(l))
1148 ret.append('\n')
1148 ret.append('\n')
1149 return ''.join(ret)
1149 return ''.join(ret)
1150
1150
1151 def diff(repo, node1=None, node2=None, match=None,
1151 def diff(repo, node1=None, node2=None, match=None,
1152 fp=None, changes=None, opts=None):
1152 fp=None, changes=None, opts=None):
1153 '''print diff of changes to files between two nodes, or node and
1153 '''print diff of changes to files between two nodes, or node and
1154 working directory.
1154 working directory.
1155
1155
1156 if node1 is None, use first dirstate parent instead.
1156 if node1 is None, use first dirstate parent instead.
1157 if node2 is None, compare node1 with working directory.'''
1157 if node2 is None, compare node1 with working directory.'''
1158
1158
1159 if not match:
1159 if not match:
1160 match = cmdutil.matchall(repo)
1160 match = cmdutil.matchall(repo)
1161
1161
1162 if opts is None:
1162 if opts is None:
1163 opts = mdiff.defaultopts
1163 opts = mdiff.defaultopts
1164 if fp is None:
1164 if fp is None:
1165 fp = repo.ui
1165 fp = repo.ui
1166
1166
1167 if not node1:
1167 if not node1:
1168 node1 = repo.dirstate.parents()[0]
1168 node1 = repo.dirstate.parents()[0]
1169
1169
1170 flcache = {}
1170 flcache = {}
1171 def getfilectx(f, ctx):
1171 def getfilectx(f, ctx):
1172 flctx = ctx.filectx(f, filelog=flcache.get(f))
1172 flctx = ctx.filectx(f, filelog=flcache.get(f))
1173 if f not in flcache:
1173 if f not in flcache:
1174 flcache[f] = flctx._filelog
1174 flcache[f] = flctx._filelog
1175 return flctx
1175 return flctx
1176
1176
1177 # reading the data for node1 early allows it to play nicely
1177 # reading the data for node1 early allows it to play nicely
1178 # with repo.status and the revlog cache.
1178 # with repo.status and the revlog cache.
1179 ctx1 = repo[node1]
1179 ctx1 = repo[node1]
1180 # force manifest reading
1180 # force manifest reading
1181 man1 = ctx1.manifest()
1181 man1 = ctx1.manifest()
1182 date1 = util.datestr(ctx1.date())
1182 date1 = util.datestr(ctx1.date())
1183
1183
1184 if not changes:
1184 if not changes:
1185 changes = repo.status(node1, node2, match=match)
1185 changes = repo.status(node1, node2, match=match)
1186 modified, added, removed = changes[:3]
1186 modified, added, removed = changes[:3]
1187
1187
1188 if not modified and not added and not removed:
1188 if not modified and not added and not removed:
1189 return
1189 return
1190
1190
1191 ctx2 = repo[node2]
1191 ctx2 = repo[node2]
1192
1192
1193 if repo.ui.quiet:
1193 if repo.ui.quiet:
1194 r = None
1194 r = None
1195 else:
1195 else:
1196 hexfunc = repo.ui.debugflag and hex or short
1196 hexfunc = repo.ui.debugflag and hex or short
1197 r = [hexfunc(node) for node in [node1, node2] if node]
1197 r = [hexfunc(node) for node in [node1, node2] if node]
1198
1198
1199 if opts.git:
1199 if opts.git:
1200 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1200 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1201 for k, v in copy.items():
1201 for k, v in copy.items():
1202 copy[v] = k
1202 copy[v] = k
1203
1203
1204 gone = {}
1204 gone = {}
1205 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1205 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1206
1206
1207 for f in util.sort(modified + added + removed):
1207 for f in util.sort(modified + added + removed):
1208 to = None
1208 to = None
1209 tn = None
1209 tn = None
1210 dodiff = True
1210 dodiff = True
1211 header = []
1211 header = []
1212 if f in man1:
1212 if f in man1:
1213 to = getfilectx(f, ctx1).data()
1213 to = getfilectx(f, ctx1).data()
1214 if f not in removed:
1214 if f not in removed:
1215 tn = getfilectx(f, ctx2).data()
1215 tn = getfilectx(f, ctx2).data()
1216 a, b = f, f
1216 a, b = f, f
1217 if opts.git:
1217 if opts.git:
1218 def addmodehdr(header, omode, nmode):
1218 def addmodehdr(header, omode, nmode):
1219 if omode != nmode:
1219 if omode != nmode:
1220 header.append('old mode %s\n' % omode)
1220 header.append('old mode %s\n' % omode)
1221 header.append('new mode %s\n' % nmode)
1221 header.append('new mode %s\n' % nmode)
1222
1222
1223 if f in added:
1223 if f in added:
1224 mode = gitmode[ctx2.flags(f)]
1224 mode = gitmode[ctx2.flags(f)]
1225 if f in copy:
1225 if f in copy:
1226 a = copy[f]
1226 a = copy[f]
1227 omode = gitmode[man1.flags(a)]
1227 omode = gitmode[man1.flags(a)]
1228 addmodehdr(header, omode, mode)
1228 addmodehdr(header, omode, mode)
1229 if a in removed and a not in gone:
1229 if a in removed and a not in gone:
1230 op = 'rename'
1230 op = 'rename'
1231 gone[a] = 1
1231 gone[a] = 1
1232 else:
1232 else:
1233 op = 'copy'
1233 op = 'copy'
1234 header.append('%s from %s\n' % (op, a))
1234 header.append('%s from %s\n' % (op, a))
1235 header.append('%s to %s\n' % (op, f))
1235 header.append('%s to %s\n' % (op, f))
1236 to = getfilectx(a, ctx1).data()
1236 to = getfilectx(a, ctx1).data()
1237 else:
1237 else:
1238 header.append('new file mode %s\n' % mode)
1238 header.append('new file mode %s\n' % mode)
1239 if util.binary(tn):
1239 if util.binary(tn):
1240 dodiff = 'binary'
1240 dodiff = 'binary'
1241 elif f in removed:
1241 elif f in removed:
1242 # have we already reported a copy above?
1242 # have we already reported a copy above?
1243 if f in copy and copy[f] in added and copy[copy[f]] == f:
1243 if f in copy and copy[f] in added and copy[copy[f]] == f:
1244 dodiff = False
1244 dodiff = False
1245 else:
1245 else:
1246 header.append('deleted file mode %s\n' %
1246 header.append('deleted file mode %s\n' %
1247 gitmode[man1.flags(f)])
1247 gitmode[man1.flags(f)])
1248 else:
1248 else:
1249 omode = gitmode[man1.flags(f)]
1249 omode = gitmode[man1.flags(f)]
1250 nmode = gitmode[ctx2.flags(f)]
1250 nmode = gitmode[ctx2.flags(f)]
1251 addmodehdr(header, omode, nmode)
1251 addmodehdr(header, omode, nmode)
1252 if util.binary(to) or util.binary(tn):
1252 if util.binary(to) or util.binary(tn):
1253 dodiff = 'binary'
1253 dodiff = 'binary'
1254 r = None
1254 r = None
1255 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1255 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1256 if dodiff:
1256 if dodiff:
1257 if dodiff == 'binary':
1257 if dodiff == 'binary':
1258 text = b85diff(to, tn)
1258 text = b85diff(to, tn)
1259 else:
1259 else:
1260 text = mdiff.unidiff(to, date1,
1260 text = mdiff.unidiff(to, date1,
1261 # ctx2 date may be dynamic
1261 # ctx2 date may be dynamic
1262 tn, util.datestr(ctx2.date()),
1262 tn, util.datestr(ctx2.date()),
1263 a, b, r, opts=opts)
1263 a, b, r, opts=opts)
1264 if text or len(header) > 1:
1264 if text or len(header) > 1:
1265 fp.write(''.join(header))
1265 fp.write(''.join(header))
1266 fp.write(text)
1266 fp.write(text)
1267
1267
1268 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1268 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1269 opts=None):
1269 opts=None):
1270 '''export changesets as hg patches.'''
1270 '''export changesets as hg patches.'''
1271
1271
1272 total = len(revs)
1272 total = len(revs)
1273 revwidth = max([len(str(rev)) for rev in revs])
1273 revwidth = max([len(str(rev)) for rev in revs])
1274
1274
1275 def single(rev, seqno, fp):
1275 def single(rev, seqno, fp):
1276 ctx = repo[rev]
1276 ctx = repo[rev]
1277 node = ctx.node()
1277 node = ctx.node()
1278 parents = [p.node() for p in ctx.parents() if p]
1278 parents = [p.node() for p in ctx.parents() if p]
1279 branch = ctx.branch()
1279 branch = ctx.branch()
1280 if switch_parent:
1280 if switch_parent:
1281 parents.reverse()
1281 parents.reverse()
1282 prev = (parents and parents[0]) or nullid
1282 prev = (parents and parents[0]) or nullid
1283
1283
1284 if not fp:
1284 if not fp:
1285 fp = cmdutil.make_file(repo, template, node, total=total,
1285 fp = cmdutil.make_file(repo, template, node, total=total,
1286 seqno=seqno, revwidth=revwidth)
1286 seqno=seqno, revwidth=revwidth)
1287 if fp != sys.stdout and hasattr(fp, 'name'):
1287 if fp != sys.stdout and hasattr(fp, 'name'):
1288 repo.ui.note("%s\n" % fp.name)
1288 repo.ui.note("%s\n" % fp.name)
1289
1289
1290 fp.write("# HG changeset patch\n")
1290 fp.write("# HG changeset patch\n")
1291 fp.write("# User %s\n" % ctx.user())
1291 fp.write("# User %s\n" % ctx.user())
1292 fp.write("# Date %d %d\n" % ctx.date())
1292 fp.write("# Date %d %d\n" % ctx.date())
1293 if branch and (branch != 'default'):
1293 if branch and (branch != 'default'):
1294 fp.write("# Branch %s\n" % branch)
1294 fp.write("# Branch %s\n" % branch)
1295 fp.write("# Node ID %s\n" % hex(node))
1295 fp.write("# Node ID %s\n" % hex(node))
1296 fp.write("# Parent %s\n" % hex(prev))
1296 fp.write("# Parent %s\n" % hex(prev))
1297 if len(parents) > 1:
1297 if len(parents) > 1:
1298 fp.write("# Parent %s\n" % hex(parents[1]))
1298 fp.write("# Parent %s\n" % hex(parents[1]))
1299 fp.write(ctx.description().rstrip())
1299 fp.write(ctx.description().rstrip())
1300 fp.write("\n\n")
1300 fp.write("\n\n")
1301
1301
1302 diff(repo, prev, node, fp=fp, opts=opts)
1302 diff(repo, prev, node, fp=fp, opts=opts)
1303 if fp not in (sys.stdout, repo.ui):
1303 if fp not in (sys.stdout, repo.ui):
1304 fp.close()
1304 fp.close()
1305
1305
1306 for seqno, rev in enumerate(revs):
1306 for seqno, rev in enumerate(revs):
1307 single(rev, seqno+1, fp)
1307 single(rev, seqno+1, fp)
1308
1308
1309 def diffstat(patchlines):
1309 def diffstat(patchlines):
1310 if not util.find_exe('diffstat'):
1310 if not util.find_exe('diffstat'):
1311 return
1311 return
1312 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1312 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1313 try:
1313 try:
1314 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1314 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1315 try:
1315 try:
1316 for line in patchlines:
1316 for line in patchlines:
1317 p.tochild.write(line + "\n")
1317 p.tochild.write(line + "\n")
1318 p.tochild.close()
1318 p.tochild.close()
1319 if p.wait(): return
1319 if p.wait(): return
1320 fp = os.fdopen(fd, 'r')
1320 fp = os.fdopen(fd, 'r')
1321 stat = []
1321 stat = []
1322 for line in fp: stat.append(line.lstrip())
1322 for line in fp: stat.append(line.lstrip())
1323 last = stat.pop()
1323 last = stat.pop()
1324 stat.insert(0, last)
1324 stat.insert(0, last)
1325 stat = ''.join(stat)
1325 stat = ''.join(stat)
1326 return stat
1326 return stat
1327 except: raise
1327 except: raise
1328 finally:
1328 finally:
1329 try: os.unlink(name)
1329 try: os.unlink(name)
1330 except: pass
1330 except: pass
@@ -1,135 +1,136 b''
1 # repair.py - functions for repository repair for mercurial
1 # repair.py - functions for repository repair for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 # Copyright 2007 Matt Mackall
4 # Copyright 2007 Matt Mackall
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import changegroup, os
9 import changegroup, os
10 from node import nullrev, short
10 from node import nullrev, short
11 from i18n import _
11
12
12 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
13 def _bundle(repo, bases, heads, node, suffix, extranodes=None):
13 """create a bundle with the specified revisions as a backup"""
14 """create a bundle with the specified revisions as a backup"""
14 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
15 cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
15 backupdir = repo.join("strip-backup")
16 backupdir = repo.join("strip-backup")
16 if not os.path.isdir(backupdir):
17 if not os.path.isdir(backupdir):
17 os.mkdir(backupdir)
18 os.mkdir(backupdir)
18 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
19 name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
19 repo.ui.warn("saving bundle to %s\n" % name)
20 repo.ui.warn(_("saving bundle to %s\n") % name)
20 return changegroup.writebundle(cg, name, "HG10BZ")
21 return changegroup.writebundle(cg, name, "HG10BZ")
21
22
22 def _collectfiles(repo, striprev):
23 def _collectfiles(repo, striprev):
23 """find out the filelogs affected by the strip"""
24 """find out the filelogs affected by the strip"""
24 files = {}
25 files = {}
25
26
26 for x in xrange(striprev, len(repo)):
27 for x in xrange(striprev, len(repo)):
27 for name in repo[x].files():
28 for name in repo[x].files():
28 if name in files:
29 if name in files:
29 continue
30 continue
30 files[name] = 1
31 files[name] = 1
31
32
32 files = files.keys()
33 files = files.keys()
33 files.sort()
34 files.sort()
34 return files
35 return files
35
36
36 def _collectextranodes(repo, files, link):
37 def _collectextranodes(repo, files, link):
37 """return the nodes that have to be saved before the strip"""
38 """return the nodes that have to be saved before the strip"""
38 def collectone(revlog):
39 def collectone(revlog):
39 extra = []
40 extra = []
40 startrev = count = len(revlog)
41 startrev = count = len(revlog)
41 # find the truncation point of the revlog
42 # find the truncation point of the revlog
42 for i in xrange(0, count):
43 for i in xrange(0, count):
43 node = revlog.node(i)
44 node = revlog.node(i)
44 lrev = revlog.linkrev(node)
45 lrev = revlog.linkrev(node)
45 if lrev >= link:
46 if lrev >= link:
46 startrev = i + 1
47 startrev = i + 1
47 break
48 break
48
49
49 # see if any revision after that point has a linkrev less than link
50 # see if any revision after that point has a linkrev less than link
50 # (we have to manually save these guys)
51 # (we have to manually save these guys)
51 for i in xrange(startrev, count):
52 for i in xrange(startrev, count):
52 node = revlog.node(i)
53 node = revlog.node(i)
53 lrev = revlog.linkrev(node)
54 lrev = revlog.linkrev(node)
54 if lrev < link:
55 if lrev < link:
55 extra.append((node, cl.node(lrev)))
56 extra.append((node, cl.node(lrev)))
56
57
57 return extra
58 return extra
58
59
59 extranodes = {}
60 extranodes = {}
60 cl = repo.changelog
61 cl = repo.changelog
61 extra = collectone(repo.manifest)
62 extra = collectone(repo.manifest)
62 if extra:
63 if extra:
63 extranodes[1] = extra
64 extranodes[1] = extra
64 for fname in files:
65 for fname in files:
65 f = repo.file(fname)
66 f = repo.file(fname)
66 extra = collectone(f)
67 extra = collectone(f)
67 if extra:
68 if extra:
68 extranodes[fname] = extra
69 extranodes[fname] = extra
69
70
70 return extranodes
71 return extranodes
71
72
72 def strip(ui, repo, node, backup="all"):
73 def strip(ui, repo, node, backup="all"):
73 cl = repo.changelog
74 cl = repo.changelog
74 # TODO delete the undo files, and handle undo of merge sets
75 # TODO delete the undo files, and handle undo of merge sets
75 striprev = cl.rev(node)
76 striprev = cl.rev(node)
76
77
77 # Some revisions with rev > striprev may not be descendants of striprev.
78 # Some revisions with rev > striprev may not be descendants of striprev.
78 # We have to find these revisions and put them in a bundle, so that
79 # We have to find these revisions and put them in a bundle, so that
79 # we can restore them after the truncations.
80 # we can restore them after the truncations.
80 # To create the bundle we use repo.changegroupsubset which requires
81 # To create the bundle we use repo.changegroupsubset which requires
81 # the list of heads and bases of the set of interesting revisions.
82 # the list of heads and bases of the set of interesting revisions.
82 # (head = revision in the set that has no descendant in the set;
83 # (head = revision in the set that has no descendant in the set;
83 # base = revision in the set that has no ancestor in the set)
84 # base = revision in the set that has no ancestor in the set)
84 tostrip = {striprev: 1}
85 tostrip = {striprev: 1}
85 saveheads = {}
86 saveheads = {}
86 savebases = []
87 savebases = []
87 for r in xrange(striprev + 1, len(cl)):
88 for r in xrange(striprev + 1, len(cl)):
88 parents = cl.parentrevs(r)
89 parents = cl.parentrevs(r)
89 if parents[0] in tostrip or parents[1] in tostrip:
90 if parents[0] in tostrip or parents[1] in tostrip:
90 # r is a descendant of striprev
91 # r is a descendant of striprev
91 tostrip[r] = 1
92 tostrip[r] = 1
92 # if this is a merge and one of the parents does not descend
93 # if this is a merge and one of the parents does not descend
93 # from striprev, mark that parent as a savehead.
94 # from striprev, mark that parent as a savehead.
94 if parents[1] != nullrev:
95 if parents[1] != nullrev:
95 for p in parents:
96 for p in parents:
96 if p not in tostrip and p > striprev:
97 if p not in tostrip and p > striprev:
97 saveheads[p] = 1
98 saveheads[p] = 1
98 else:
99 else:
99 # if no parents of this revision will be stripped, mark it as
100 # if no parents of this revision will be stripped, mark it as
100 # a savebase
101 # a savebase
101 if parents[0] < striprev and parents[1] < striprev:
102 if parents[0] < striprev and parents[1] < striprev:
102 savebases.append(cl.node(r))
103 savebases.append(cl.node(r))
103
104
104 for p in parents:
105 for p in parents:
105 if p in saveheads:
106 if p in saveheads:
106 del saveheads[p]
107 del saveheads[p]
107 saveheads[r] = 1
108 saveheads[r] = 1
108
109
109 saveheads = [cl.node(r) for r in saveheads]
110 saveheads = [cl.node(r) for r in saveheads]
110 files = _collectfiles(repo, striprev)
111 files = _collectfiles(repo, striprev)
111
112
112 extranodes = _collectextranodes(repo, files, striprev)
113 extranodes = _collectextranodes(repo, files, striprev)
113
114
114 # create a changegroup for all the branches we need to keep
115 # create a changegroup for all the branches we need to keep
115 if backup == "all":
116 if backup == "all":
116 _bundle(repo, [node], cl.heads(), node, 'backup')
117 _bundle(repo, [node], cl.heads(), node, 'backup')
117 if saveheads or extranodes:
118 if saveheads or extranodes:
118 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
119 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
119 extranodes)
120 extranodes)
120
121
121 cl.strip(striprev)
122 cl.strip(striprev)
122 repo.manifest.strip(striprev)
123 repo.manifest.strip(striprev)
123 for name in files:
124 for name in files:
124 f = repo.file(name)
125 f = repo.file(name)
125 f.strip(striprev)
126 f.strip(striprev)
126
127
127 if saveheads or extranodes:
128 if saveheads or extranodes:
128 ui.status("adding branch\n")
129 ui.status(_("adding branch\n"))
129 f = open(chgrpfile, "rb")
130 f = open(chgrpfile, "rb")
130 gen = changegroup.readbundle(f, chgrpfile)
131 gen = changegroup.readbundle(f, chgrpfile)
131 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
132 repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True)
132 f.close()
133 f.close()
133 if backup != "strip":
134 if backup != "strip":
134 os.unlink(chgrpfile)
135 os.unlink(chgrpfile)
135
136
@@ -1,247 +1,247 b''
1 # sshrepo.py - ssh repository proxy class for mercurial
1 # sshrepo.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import bin, hex
8 from node import bin, hex
9 from i18n import _
9 from i18n import _
10 import repo, os, re, util
10 import repo, os, re, util
11
11
12 class remotelock(object):
12 class remotelock(object):
13 def __init__(self, repo):
13 def __init__(self, repo):
14 self.repo = repo
14 self.repo = repo
15 def release(self):
15 def release(self):
16 self.repo.unlock()
16 self.repo.unlock()
17 self.repo = None
17 self.repo = None
18 def __del__(self):
18 def __del__(self):
19 if self.repo:
19 if self.repo:
20 self.release()
20 self.release()
21
21
22 class sshrepository(repo.repository):
22 class sshrepository(repo.repository):
23 def __init__(self, ui, path, create=0):
23 def __init__(self, ui, path, create=0):
24 self._url = path
24 self._url = path
25 self.ui = ui
25 self.ui = ui
26
26
27 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
27 m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
28 if not m:
28 if not m:
29 self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
29 self.raise_(repo.RepoError(_("couldn't parse location %s") % path))
30
30
31 self.user = m.group(2)
31 self.user = m.group(2)
32 self.host = m.group(3)
32 self.host = m.group(3)
33 self.port = m.group(5)
33 self.port = m.group(5)
34 self.path = m.group(7) or "."
34 self.path = m.group(7) or "."
35
35
36 sshcmd = self.ui.config("ui", "ssh", "ssh")
36 sshcmd = self.ui.config("ui", "ssh", "ssh")
37 remotecmd = self.ui.config("ui", "remotecmd", "hg")
37 remotecmd = self.ui.config("ui", "remotecmd", "hg")
38
38
39 args = util.sshargs(sshcmd, self.host, self.user, self.port)
39 args = util.sshargs(sshcmd, self.host, self.user, self.port)
40
40
41 if create:
41 if create:
42 cmd = '%s %s "%s init %s"'
42 cmd = '%s %s "%s init %s"'
43 cmd = cmd % (sshcmd, args, remotecmd, self.path)
43 cmd = cmd % (sshcmd, args, remotecmd, self.path)
44
44
45 ui.note('running %s\n' % cmd)
45 ui.note(_('running %s\n') % cmd)
46 res = util.system(cmd)
46 res = util.system(cmd)
47 if res != 0:
47 if res != 0:
48 self.raise_(repo.RepoError(_("could not create remote repo")))
48 self.raise_(repo.RepoError(_("could not create remote repo")))
49
49
50 self.validate_repo(ui, sshcmd, args, remotecmd)
50 self.validate_repo(ui, sshcmd, args, remotecmd)
51
51
52 def url(self):
52 def url(self):
53 return self._url
53 return self._url
54
54
55 def validate_repo(self, ui, sshcmd, args, remotecmd):
55 def validate_repo(self, ui, sshcmd, args, remotecmd):
56 # cleanup up previous run
56 # cleanup up previous run
57 self.cleanup()
57 self.cleanup()
58
58
59 cmd = '%s %s "%s -R %s serve --stdio"'
59 cmd = '%s %s "%s -R %s serve --stdio"'
60 cmd = cmd % (sshcmd, args, remotecmd, self.path)
60 cmd = cmd % (sshcmd, args, remotecmd, self.path)
61
61
62 cmd = util.quotecommand(cmd)
62 cmd = util.quotecommand(cmd)
63 ui.note('running %s\n' % cmd)
63 ui.note(_('running %s\n') % cmd)
64 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
64 self.pipeo, self.pipei, self.pipee = os.popen3(cmd, 'b')
65
65
66 # skip any noise generated by remote shell
66 # skip any noise generated by remote shell
67 self.do_cmd("hello")
67 self.do_cmd("hello")
68 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
68 r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
69 lines = ["", "dummy"]
69 lines = ["", "dummy"]
70 max_noise = 500
70 max_noise = 500
71 while lines[-1] and max_noise:
71 while lines[-1] and max_noise:
72 l = r.readline()
72 l = r.readline()
73 self.readerr()
73 self.readerr()
74 if lines[-1] == "1\n" and l == "\n":
74 if lines[-1] == "1\n" and l == "\n":
75 break
75 break
76 if l:
76 if l:
77 ui.debug(_("remote: "), l)
77 ui.debug(_("remote: "), l)
78 lines.append(l)
78 lines.append(l)
79 max_noise -= 1
79 max_noise -= 1
80 else:
80 else:
81 self.raise_(repo.RepoError(_("no suitable response from remote hg")))
81 self.raise_(repo.RepoError(_("no suitable response from remote hg")))
82
82
83 self.capabilities = util.set()
83 self.capabilities = util.set()
84 lines.reverse()
84 lines.reverse()
85 for l in lines:
85 for l in lines:
86 if l.startswith("capabilities:"):
86 if l.startswith("capabilities:"):
87 self.capabilities.update(l[:-1].split(":")[1].split())
87 self.capabilities.update(l[:-1].split(":")[1].split())
88 break
88 break
89
89
90 def readerr(self):
90 def readerr(self):
91 while 1:
91 while 1:
92 size = util.fstat(self.pipee).st_size
92 size = util.fstat(self.pipee).st_size
93 if size == 0: break
93 if size == 0: break
94 l = self.pipee.readline()
94 l = self.pipee.readline()
95 if not l: break
95 if not l: break
96 self.ui.status(_("remote: "), l)
96 self.ui.status(_("remote: "), l)
97
97
98 def raise_(self, exception):
98 def raise_(self, exception):
99 self.cleanup()
99 self.cleanup()
100 raise exception
100 raise exception
101
101
102 def cleanup(self):
102 def cleanup(self):
103 try:
103 try:
104 self.pipeo.close()
104 self.pipeo.close()
105 self.pipei.close()
105 self.pipei.close()
106 # read the error descriptor until EOF
106 # read the error descriptor until EOF
107 for l in self.pipee:
107 for l in self.pipee:
108 self.ui.status(_("remote: "), l)
108 self.ui.status(_("remote: "), l)
109 self.pipee.close()
109 self.pipee.close()
110 except:
110 except:
111 pass
111 pass
112
112
113 __del__ = cleanup
113 __del__ = cleanup
114
114
115 def do_cmd(self, cmd, **args):
115 def do_cmd(self, cmd, **args):
116 self.ui.debug(_("sending %s command\n") % cmd)
116 self.ui.debug(_("sending %s command\n") % cmd)
117 self.pipeo.write("%s\n" % cmd)
117 self.pipeo.write("%s\n" % cmd)
118 for k, v in args.items():
118 for k, v in args.items():
119 self.pipeo.write("%s %d\n" % (k, len(v)))
119 self.pipeo.write("%s %d\n" % (k, len(v)))
120 self.pipeo.write(v)
120 self.pipeo.write(v)
121 self.pipeo.flush()
121 self.pipeo.flush()
122
122
123 return self.pipei
123 return self.pipei
124
124
125 def call(self, cmd, **args):
125 def call(self, cmd, **args):
126 self.do_cmd(cmd, **args)
126 self.do_cmd(cmd, **args)
127 return self._recv()
127 return self._recv()
128
128
129 def _recv(self):
129 def _recv(self):
130 l = self.pipei.readline()
130 l = self.pipei.readline()
131 self.readerr()
131 self.readerr()
132 try:
132 try:
133 l = int(l)
133 l = int(l)
134 except:
134 except:
135 self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
135 self.raise_(util.UnexpectedOutput(_("unexpected response:"), l))
136 return self.pipei.read(l)
136 return self.pipei.read(l)
137
137
138 def _send(self, data, flush=False):
138 def _send(self, data, flush=False):
139 self.pipeo.write("%d\n" % len(data))
139 self.pipeo.write("%d\n" % len(data))
140 if data:
140 if data:
141 self.pipeo.write(data)
141 self.pipeo.write(data)
142 if flush:
142 if flush:
143 self.pipeo.flush()
143 self.pipeo.flush()
144 self.readerr()
144 self.readerr()
145
145
146 def lock(self):
146 def lock(self):
147 self.call("lock")
147 self.call("lock")
148 return remotelock(self)
148 return remotelock(self)
149
149
150 def unlock(self):
150 def unlock(self):
151 self.call("unlock")
151 self.call("unlock")
152
152
153 def lookup(self, key):
153 def lookup(self, key):
154 self.requirecap('lookup', _('look up remote revision'))
154 self.requirecap('lookup', _('look up remote revision'))
155 d = self.call("lookup", key=key)
155 d = self.call("lookup", key=key)
156 success, data = d[:-1].split(" ", 1)
156 success, data = d[:-1].split(" ", 1)
157 if int(success):
157 if int(success):
158 return bin(data)
158 return bin(data)
159 else:
159 else:
160 self.raise_(repo.RepoError(data))
160 self.raise_(repo.RepoError(data))
161
161
162 def heads(self):
162 def heads(self):
163 d = self.call("heads")
163 d = self.call("heads")
164 try:
164 try:
165 return map(bin, d[:-1].split(" "))
165 return map(bin, d[:-1].split(" "))
166 except:
166 except:
167 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
167 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
168
168
169 def branches(self, nodes):
169 def branches(self, nodes):
170 n = " ".join(map(hex, nodes))
170 n = " ".join(map(hex, nodes))
171 d = self.call("branches", nodes=n)
171 d = self.call("branches", nodes=n)
172 try:
172 try:
173 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
173 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
174 return br
174 return br
175 except:
175 except:
176 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
176 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
177
177
178 def between(self, pairs):
178 def between(self, pairs):
179 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
179 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
180 d = self.call("between", pairs=n)
180 d = self.call("between", pairs=n)
181 try:
181 try:
182 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
182 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
183 return p
183 return p
184 except:
184 except:
185 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
185 self.raise_(util.UnexpectedOutput(_("unexpected response:"), d))
186
186
187 def changegroup(self, nodes, kind):
187 def changegroup(self, nodes, kind):
188 n = " ".join(map(hex, nodes))
188 n = " ".join(map(hex, nodes))
189 return self.do_cmd("changegroup", roots=n)
189 return self.do_cmd("changegroup", roots=n)
190
190
191 def changegroupsubset(self, bases, heads, kind):
191 def changegroupsubset(self, bases, heads, kind):
192 self.requirecap('changegroupsubset', _('look up remote changes'))
192 self.requirecap('changegroupsubset', _('look up remote changes'))
193 bases = " ".join(map(hex, bases))
193 bases = " ".join(map(hex, bases))
194 heads = " ".join(map(hex, heads))
194 heads = " ".join(map(hex, heads))
195 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
195 return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
196
196
197 def unbundle(self, cg, heads, source):
197 def unbundle(self, cg, heads, source):
198 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
198 d = self.call("unbundle", heads=' '.join(map(hex, heads)))
199 if d:
199 if d:
200 # remote may send "unsynced changes"
200 # remote may send "unsynced changes"
201 self.raise_(repo.RepoError(_("push refused: %s") % d))
201 self.raise_(repo.RepoError(_("push refused: %s") % d))
202
202
203 while 1:
203 while 1:
204 d = cg.read(4096)
204 d = cg.read(4096)
205 if not d:
205 if not d:
206 break
206 break
207 self._send(d)
207 self._send(d)
208
208
209 self._send("", flush=True)
209 self._send("", flush=True)
210
210
211 r = self._recv()
211 r = self._recv()
212 if r:
212 if r:
213 # remote may send "unsynced changes"
213 # remote may send "unsynced changes"
214 self.raise_(repo.RepoError(_("push failed: %s") % r))
214 self.raise_(repo.RepoError(_("push failed: %s") % r))
215
215
216 r = self._recv()
216 r = self._recv()
217 try:
217 try:
218 return int(r)
218 return int(r)
219 except:
219 except:
220 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
220 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
221
221
222 def addchangegroup(self, cg, source, url):
222 def addchangegroup(self, cg, source, url):
223 d = self.call("addchangegroup")
223 d = self.call("addchangegroup")
224 if d:
224 if d:
225 self.raise_(repo.RepoError(_("push refused: %s") % d))
225 self.raise_(repo.RepoError(_("push refused: %s") % d))
226 while 1:
226 while 1:
227 d = cg.read(4096)
227 d = cg.read(4096)
228 if not d:
228 if not d:
229 break
229 break
230 self.pipeo.write(d)
230 self.pipeo.write(d)
231 self.readerr()
231 self.readerr()
232
232
233 self.pipeo.flush()
233 self.pipeo.flush()
234
234
235 self.readerr()
235 self.readerr()
236 r = self._recv()
236 r = self._recv()
237 if not r:
237 if not r:
238 return 1
238 return 1
239 try:
239 try:
240 return int(r)
240 return int(r)
241 except:
241 except:
242 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
242 self.raise_(util.UnexpectedOutput(_("unexpected response:"), r))
243
243
244 def stream_out(self):
244 def stream_out(self):
245 return self.do_cmd('stream_out')
245 return self.do_cmd('stream_out')
246
246
247 instance = sshrepository
247 instance = sshrepository
@@ -1,64 +1,65 b''
1 # streamclone.py - streaming clone server support for mercurial
1 # streamclone.py - streaming clone server support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import util, lock
8 import util, lock
9 from i18n import _
9
10
10 class StreamException(Exception):
11 class StreamException(Exception):
11 def __init__(self, code):
12 def __init__(self, code):
12 Exception.__init__(self)
13 Exception.__init__(self)
13 self.code = code
14 self.code = code
14 def __str__(self):
15 def __str__(self):
15 return '%i\n' % self.code
16 return '%i\n' % self.code
16
17
17 # if server supports streaming clone, it advertises "stream"
18 # if server supports streaming clone, it advertises "stream"
18 # capability with value that is version+flags of repo it is serving.
19 # capability with value that is version+flags of repo it is serving.
19 # client only streams if it can read that repo format.
20 # client only streams if it can read that repo format.
20
21
21 # stream file format is simple.
22 # stream file format is simple.
22 #
23 #
23 # server writes out line that says how many files, how many total
24 # server writes out line that says how many files, how many total
24 # bytes. separator is ascii space, byte counts are strings.
25 # bytes. separator is ascii space, byte counts are strings.
25 #
26 #
26 # then for each file:
27 # then for each file:
27 #
28 #
28 # server writes out line that says file name, how many bytes in
29 # server writes out line that says file name, how many bytes in
29 # file. separator is ascii nul, byte count is string.
30 # file. separator is ascii nul, byte count is string.
30 #
31 #
31 # server writes out raw file data.
32 # server writes out raw file data.
32
33
33 def stream_out(repo, untrusted=False):
34 def stream_out(repo, untrusted=False):
34 '''stream out all metadata files in repository.
35 '''stream out all metadata files in repository.
35 writes to file-like object, must support write() and optional flush().'''
36 writes to file-like object, must support write() and optional flush().'''
36
37
37 if not repo.ui.configbool('server', 'uncompressed', untrusted=untrusted):
38 if not repo.ui.configbool('server', 'uncompressed', untrusted=untrusted):
38 raise StreamException(1)
39 raise StreamException(1)
39
40
40 entries = []
41 entries = []
41 total_bytes = 0
42 total_bytes = 0
42 try:
43 try:
43 l = None
44 l = None
44 try:
45 try:
45 repo.ui.debug('scanning\n')
46 repo.ui.debug(_('scanning\n'))
46 # get consistent snapshot of repo, lock during scan
47 # get consistent snapshot of repo, lock during scan
47 l = repo.lock()
48 l = repo.lock()
48 for name, ename, size in repo.store.walk():
49 for name, ename, size in repo.store.walk():
49 entries.append((name, size))
50 entries.append((name, size))
50 total_bytes += size
51 total_bytes += size
51 finally:
52 finally:
52 del l
53 del l
53 except (lock.LockHeld, lock.LockUnavailable), inst:
54 except (lock.LockHeld, lock.LockUnavailable), inst:
54 raise StreamException(2)
55 raise StreamException(2)
55
56
56 yield '0\n'
57 yield '0\n'
57 repo.ui.debug('%d files, %d bytes to transfer\n' %
58 repo.ui.debug(_('%d files, %d bytes to transfer\n') %
58 (len(entries), total_bytes))
59 (len(entries), total_bytes))
59 yield '%d %d\n' % (len(entries), total_bytes)
60 yield '%d %d\n' % (len(entries), total_bytes)
60 for name, size in entries:
61 for name, size in entries:
61 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
62 repo.ui.debug(_('sending %s (%d bytes)\n') % (name, size))
62 yield '%s\0%d\n' % (name, size)
63 yield '%s\0%d\n' % (name, size)
63 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
64 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
64 yield chunk
65 yield chunk
General Comments 0
You need to be logged in to leave comments. Login now