##// END OF EJS Templates
dirstate: add __contains__ and make __getitem__ more useful...
Matt Mackall -
r4906:30847b8a default
parent child Browse files
Show More
@@ -1,97 +1,97 b''
1 # hg backend for convert extension
1 # hg backend for convert extension
2
2
3 import os, time
3 import os, time
4 from mercurial import hg
4 from mercurial import hg
5
5
6 from common import NoRepo, converter_sink
6 from common import NoRepo, converter_sink
7
7
8 class convert_mercurial(converter_sink):
8 class convert_mercurial(converter_sink):
9 def __init__(self, ui, path):
9 def __init__(self, ui, path):
10 self.path = path
10 self.path = path
11 self.ui = ui
11 self.ui = ui
12 try:
12 try:
13 self.repo = hg.repository(self.ui, path)
13 self.repo = hg.repository(self.ui, path)
14 except:
14 except:
15 raise NoRepo("could open hg repo %s" % path)
15 raise NoRepo("could open hg repo %s" % path)
16
16
17 def mapfile(self):
17 def mapfile(self):
18 return os.path.join(self.path, ".hg", "shamap")
18 return os.path.join(self.path, ".hg", "shamap")
19
19
20 def authorfile(self):
20 def authorfile(self):
21 return os.path.join(self.path, ".hg", "authormap")
21 return os.path.join(self.path, ".hg", "authormap")
22
22
23 def getheads(self):
23 def getheads(self):
24 h = self.repo.changelog.heads()
24 h = self.repo.changelog.heads()
25 return [ hg.hex(x) for x in h ]
25 return [ hg.hex(x) for x in h ]
26
26
27 def putfile(self, f, e, data):
27 def putfile(self, f, e, data):
28 self.repo.wwrite(f, data, e)
28 self.repo.wwrite(f, data, e)
29 if self.repo.dirstate.state(f) == '?':
29 if f not in self.repo.dirstate:
30 self.repo.dirstate.add(f)
30 self.repo.dirstate.add(f)
31
31
32 def copyfile(self, source, dest):
32 def copyfile(self, source, dest):
33 self.repo.copy(source, dest)
33 self.repo.copy(source, dest)
34
34
35 def delfile(self, f):
35 def delfile(self, f):
36 try:
36 try:
37 os.unlink(self.repo.wjoin(f))
37 os.unlink(self.repo.wjoin(f))
38 #self.repo.remove([f])
38 #self.repo.remove([f])
39 except:
39 except:
40 pass
40 pass
41
41
42 def putcommit(self, files, parents, commit):
42 def putcommit(self, files, parents, commit):
43 seen = {}
43 seen = {}
44 pl = []
44 pl = []
45 for p in parents:
45 for p in parents:
46 if p not in seen:
46 if p not in seen:
47 pl.append(p)
47 pl.append(p)
48 seen[p] = 1
48 seen[p] = 1
49 parents = pl
49 parents = pl
50
50
51 if len(parents) < 2: parents.append("0" * 40)
51 if len(parents) < 2: parents.append("0" * 40)
52 if len(parents) < 2: parents.append("0" * 40)
52 if len(parents) < 2: parents.append("0" * 40)
53 p2 = parents.pop(0)
53 p2 = parents.pop(0)
54
54
55 text = commit.desc
55 text = commit.desc
56 extra = {}
56 extra = {}
57 if commit.branch:
57 if commit.branch:
58 extra['branch'] = commit.branch
58 extra['branch'] = commit.branch
59 if commit.rev:
59 if commit.rev:
60 extra['convert_revision'] = commit.rev
60 extra['convert_revision'] = commit.rev
61
61
62 while parents:
62 while parents:
63 p1 = p2
63 p1 = p2
64 p2 = parents.pop(0)
64 p2 = parents.pop(0)
65 a = self.repo.rawcommit(files, text, commit.author, commit.date,
65 a = self.repo.rawcommit(files, text, commit.author, commit.date,
66 hg.bin(p1), hg.bin(p2), extra=extra)
66 hg.bin(p1), hg.bin(p2), extra=extra)
67 text = "(octopus merge fixup)\n"
67 text = "(octopus merge fixup)\n"
68 p2 = hg.hex(self.repo.changelog.tip())
68 p2 = hg.hex(self.repo.changelog.tip())
69
69
70 return p2
70 return p2
71
71
72 def puttags(self, tags):
72 def puttags(self, tags):
73 try:
73 try:
74 old = self.repo.wfile(".hgtags").read()
74 old = self.repo.wfile(".hgtags").read()
75 oldlines = old.splitlines(1)
75 oldlines = old.splitlines(1)
76 oldlines.sort()
76 oldlines.sort()
77 except:
77 except:
78 oldlines = []
78 oldlines = []
79
79
80 k = tags.keys()
80 k = tags.keys()
81 k.sort()
81 k.sort()
82 newlines = []
82 newlines = []
83 for tag in k:
83 for tag in k:
84 newlines.append("%s %s\n" % (tags[tag], tag))
84 newlines.append("%s %s\n" % (tags[tag], tag))
85
85
86 newlines.sort()
86 newlines.sort()
87
87
88 if newlines != oldlines:
88 if newlines != oldlines:
89 self.ui.status("updating tags\n")
89 self.ui.status("updating tags\n")
90 f = self.repo.wfile(".hgtags", "w")
90 f = self.repo.wfile(".hgtags", "w")
91 f.write("".join(newlines))
91 f.write("".join(newlines))
92 f.close()
92 f.close()
93 if not oldlines: self.repo.add([".hgtags"])
93 if not oldlines: self.repo.add([".hgtags"])
94 date = "%s 0" % int(time.mktime(time.gmtime()))
94 date = "%s 0" % int(time.mktime(time.gmtime()))
95 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
95 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
96 date, self.repo.changelog.tip(), hg.nullid)
96 date, self.repo.changelog.tip(), hg.nullid)
97 return hg.hex(self.repo.changelog.tip())
97 return hg.hex(self.repo.changelog.tip())
@@ -1,280 +1,280 b''
1 # GnuPG signing extension for Mercurial
1 # GnuPG signing extension for Mercurial
2 #
2 #
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util
9 from mercurial import util
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 class gpg:
13 class gpg:
14 def __init__(self, path, key=None):
14 def __init__(self, path, key=None):
15 self.path = path
15 self.path = path
16 self.key = (key and " --local-user \"%s\"" % key) or ""
16 self.key = (key and " --local-user \"%s\"" % key) or ""
17
17
18 def sign(self, data):
18 def sign(self, data):
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 return util.filter(data, gpgcmd)
20 return util.filter(data, gpgcmd)
21
21
22 def verify(self, data, sig):
22 def verify(self, data, sig):
23 """ returns of the good and bad signatures"""
23 """ returns of the good and bad signatures"""
24 sigfile = datafile = None
24 sigfile = datafile = None
25 try:
25 try:
26 # create temporary files
26 # create temporary files
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 fp = os.fdopen(fd, 'wb')
28 fp = os.fdopen(fd, 'wb')
29 fp.write(sig)
29 fp.write(sig)
30 fp.close()
30 fp.close()
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 fp = os.fdopen(fd, 'wb')
32 fp = os.fdopen(fd, 'wb')
33 fp.write(data)
33 fp.write(data)
34 fp.close()
34 fp.close()
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 ret = util.filter("", gpgcmd)
37 ret = util.filter("", gpgcmd)
38 finally:
38 finally:
39 for f in (sigfile, datafile):
39 for f in (sigfile, datafile):
40 try:
40 try:
41 if f: os.unlink(f)
41 if f: os.unlink(f)
42 except: pass
42 except: pass
43 keys = []
43 keys = []
44 key, fingerprint = None, None
44 key, fingerprint = None, None
45 err = ""
45 err = ""
46 for l in ret.splitlines():
46 for l in ret.splitlines():
47 # see DETAILS in the gnupg documentation
47 # see DETAILS in the gnupg documentation
48 # filter the logger output
48 # filter the logger output
49 if not l.startswith("[GNUPG:]"):
49 if not l.startswith("[GNUPG:]"):
50 continue
50 continue
51 l = l[9:]
51 l = l[9:]
52 if l.startswith("ERRSIG"):
52 if l.startswith("ERRSIG"):
53 err = _("error while verifying signature")
53 err = _("error while verifying signature")
54 break
54 break
55 elif l.startswith("VALIDSIG"):
55 elif l.startswith("VALIDSIG"):
56 # fingerprint of the primary key
56 # fingerprint of the primary key
57 fingerprint = l.split()[10]
57 fingerprint = l.split()[10]
58 elif (l.startswith("GOODSIG") or
58 elif (l.startswith("GOODSIG") or
59 l.startswith("EXPSIG") or
59 l.startswith("EXPSIG") or
60 l.startswith("EXPKEYSIG") or
60 l.startswith("EXPKEYSIG") or
61 l.startswith("BADSIG")):
61 l.startswith("BADSIG")):
62 if key is not None:
62 if key is not None:
63 keys.append(key + [fingerprint])
63 keys.append(key + [fingerprint])
64 key = l.split(" ", 2)
64 key = l.split(" ", 2)
65 fingerprint = None
65 fingerprint = None
66 if err:
66 if err:
67 return err, []
67 return err, []
68 if key is not None:
68 if key is not None:
69 keys.append(key + [fingerprint])
69 keys.append(key + [fingerprint])
70 return err, keys
70 return err, keys
71
71
72 def newgpg(ui, **opts):
72 def newgpg(ui, **opts):
73 """create a new gpg instance"""
73 """create a new gpg instance"""
74 gpgpath = ui.config("gpg", "cmd", "gpg")
74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 gpgkey = opts.get('key')
75 gpgkey = opts.get('key')
76 if not gpgkey:
76 if not gpgkey:
77 gpgkey = ui.config("gpg", "key", None)
77 gpgkey = ui.config("gpg", "key", None)
78 return gpg(gpgpath, gpgkey)
78 return gpg(gpgpath, gpgkey)
79
79
80 def sigwalk(repo):
80 def sigwalk(repo):
81 """
81 """
82 walk over every sigs, yields a couple
82 walk over every sigs, yields a couple
83 ((node, version, sig), (filename, linenumber))
83 ((node, version, sig), (filename, linenumber))
84 """
84 """
85 def parsefile(fileiter, context):
85 def parsefile(fileiter, context):
86 ln = 1
86 ln = 1
87 for l in fileiter:
87 for l in fileiter:
88 if not l:
88 if not l:
89 continue
89 continue
90 yield (l.split(" ", 2), (context, ln))
90 yield (l.split(" ", 2), (context, ln))
91 ln +=1
91 ln +=1
92
92
93 fl = repo.file(".hgsigs")
93 fl = repo.file(".hgsigs")
94 h = fl.heads()
94 h = fl.heads()
95 h.reverse()
95 h.reverse()
96 # read the heads
96 # read the heads
97 for r in h:
97 for r in h:
98 fn = ".hgsigs|%s" % hgnode.short(r)
98 fn = ".hgsigs|%s" % hgnode.short(r)
99 for item in parsefile(fl.read(r).splitlines(), fn):
99 for item in parsefile(fl.read(r).splitlines(), fn):
100 yield item
100 yield item
101 try:
101 try:
102 # read local signatures
102 # read local signatures
103 fn = "localsigs"
103 fn = "localsigs"
104 for item in parsefile(repo.opener(fn), fn):
104 for item in parsefile(repo.opener(fn), fn):
105 yield item
105 yield item
106 except IOError:
106 except IOError:
107 pass
107 pass
108
108
109 def getkeys(ui, repo, mygpg, sigdata, context):
109 def getkeys(ui, repo, mygpg, sigdata, context):
110 """get the keys who signed a data"""
110 """get the keys who signed a data"""
111 fn, ln = context
111 fn, ln = context
112 node, version, sig = sigdata
112 node, version, sig = sigdata
113 prefix = "%s:%d" % (fn, ln)
113 prefix = "%s:%d" % (fn, ln)
114 node = hgnode.bin(node)
114 node = hgnode.bin(node)
115
115
116 data = node2txt(repo, node, version)
116 data = node2txt(repo, node, version)
117 sig = binascii.a2b_base64(sig)
117 sig = binascii.a2b_base64(sig)
118 err, keys = mygpg.verify(data, sig)
118 err, keys = mygpg.verify(data, sig)
119 if err:
119 if err:
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 return None
121 return None
122
122
123 validkeys = []
123 validkeys = []
124 # warn for expired key and/or sigs
124 # warn for expired key and/or sigs
125 for key in keys:
125 for key in keys:
126 if key[0] == "BADSIG":
126 if key[0] == "BADSIG":
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 continue
128 continue
129 if key[0] == "EXPSIG":
129 if key[0] == "EXPSIG":
130 ui.write(_("%s Note: Signature has expired"
130 ui.write(_("%s Note: Signature has expired"
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 elif key[0] == "EXPKEYSIG":
132 elif key[0] == "EXPKEYSIG":
133 ui.write(_("%s Note: This key has expired"
133 ui.write(_("%s Note: This key has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 validkeys.append((key[1], key[2], key[3]))
135 validkeys.append((key[1], key[2], key[3]))
136 return validkeys
136 return validkeys
137
137
138 def sigs(ui, repo):
138 def sigs(ui, repo):
139 """list signed changesets"""
139 """list signed changesets"""
140 mygpg = newgpg(ui)
140 mygpg = newgpg(ui)
141 revs = {}
141 revs = {}
142
142
143 for data, context in sigwalk(repo):
143 for data, context in sigwalk(repo):
144 node, version, sig = data
144 node, version, sig = data
145 fn, ln = context
145 fn, ln = context
146 try:
146 try:
147 n = repo.lookup(node)
147 n = repo.lookup(node)
148 except KeyError:
148 except KeyError:
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 continue
150 continue
151 r = repo.changelog.rev(n)
151 r = repo.changelog.rev(n)
152 keys = getkeys(ui, repo, mygpg, data, context)
152 keys = getkeys(ui, repo, mygpg, data, context)
153 if not keys:
153 if not keys:
154 continue
154 continue
155 revs.setdefault(r, [])
155 revs.setdefault(r, [])
156 revs[r].extend(keys)
156 revs[r].extend(keys)
157 nodes = list(revs)
157 nodes = list(revs)
158 nodes.reverse()
158 nodes.reverse()
159 for rev in nodes:
159 for rev in nodes:
160 for k in revs[rev]:
160 for k in revs[rev]:
161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
163
163
164 def check(ui, repo, rev):
164 def check(ui, repo, rev):
165 """verify all the signatures there may be for a particular revision"""
165 """verify all the signatures there may be for a particular revision"""
166 mygpg = newgpg(ui)
166 mygpg = newgpg(ui)
167 rev = repo.lookup(rev)
167 rev = repo.lookup(rev)
168 hexrev = hgnode.hex(rev)
168 hexrev = hgnode.hex(rev)
169 keys = []
169 keys = []
170
170
171 for data, context in sigwalk(repo):
171 for data, context in sigwalk(repo):
172 node, version, sig = data
172 node, version, sig = data
173 if node == hexrev:
173 if node == hexrev:
174 k = getkeys(ui, repo, mygpg, data, context)
174 k = getkeys(ui, repo, mygpg, data, context)
175 if k:
175 if k:
176 keys.extend(k)
176 keys.extend(k)
177
177
178 if not keys:
178 if not keys:
179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
180 return
180 return
181
181
182 # print summary
182 # print summary
183 ui.write("%s is signed by:\n" % hgnode.short(rev))
183 ui.write("%s is signed by:\n" % hgnode.short(rev))
184 for key in keys:
184 for key in keys:
185 ui.write(" %s\n" % keystr(ui, key))
185 ui.write(" %s\n" % keystr(ui, key))
186
186
187 def keystr(ui, key):
187 def keystr(ui, key):
188 """associate a string to a key (username, comment)"""
188 """associate a string to a key (username, comment)"""
189 keyid, user, fingerprint = key
189 keyid, user, fingerprint = key
190 comment = ui.config("gpg", fingerprint, None)
190 comment = ui.config("gpg", fingerprint, None)
191 if comment:
191 if comment:
192 return "%s (%s)" % (user, comment)
192 return "%s (%s)" % (user, comment)
193 else:
193 else:
194 return user
194 return user
195
195
196 def sign(ui, repo, *revs, **opts):
196 def sign(ui, repo, *revs, **opts):
197 """add a signature for the current or given revision
197 """add a signature for the current or given revision
198
198
199 If no revision is given, the parent of the working directory is used,
199 If no revision is given, the parent of the working directory is used,
200 or tip if no revision is checked out.
200 or tip if no revision is checked out.
201 """
201 """
202
202
203 mygpg = newgpg(ui, **opts)
203 mygpg = newgpg(ui, **opts)
204 sigver = "0"
204 sigver = "0"
205 sigmessage = ""
205 sigmessage = ""
206 if revs:
206 if revs:
207 nodes = [repo.lookup(n) for n in revs]
207 nodes = [repo.lookup(n) for n in revs]
208 else:
208 else:
209 nodes = [node for node in repo.dirstate.parents()
209 nodes = [node for node in repo.dirstate.parents()
210 if node != hgnode.nullid]
210 if node != hgnode.nullid]
211 if len(nodes) > 1:
211 if len(nodes) > 1:
212 raise util.Abort(_('uncommitted merge - please provide a '
212 raise util.Abort(_('uncommitted merge - please provide a '
213 'specific revision'))
213 'specific revision'))
214 if not nodes:
214 if not nodes:
215 nodes = [repo.changelog.tip()]
215 nodes = [repo.changelog.tip()]
216
216
217 for n in nodes:
217 for n in nodes:
218 hexnode = hgnode.hex(n)
218 hexnode = hgnode.hex(n)
219 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
219 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
220 hgnode.short(n)))
220 hgnode.short(n)))
221 # build data
221 # build data
222 data = node2txt(repo, n, sigver)
222 data = node2txt(repo, n, sigver)
223 sig = mygpg.sign(data)
223 sig = mygpg.sign(data)
224 if not sig:
224 if not sig:
225 raise util.Abort(_("Error while signing"))
225 raise util.Abort(_("Error while signing"))
226 sig = binascii.b2a_base64(sig)
226 sig = binascii.b2a_base64(sig)
227 sig = sig.replace("\n", "")
227 sig = sig.replace("\n", "")
228 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
228 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
229
229
230 # write it
230 # write it
231 if opts['local']:
231 if opts['local']:
232 repo.opener("localsigs", "ab").write(sigmessage)
232 repo.opener("localsigs", "ab").write(sigmessage)
233 return
233 return
234
234
235 for x in repo.status()[:5]:
235 for x in repo.status()[:5]:
236 if ".hgsigs" in x and not opts["force"]:
236 if ".hgsigs" in x and not opts["force"]:
237 raise util.Abort(_("working copy of .hgsigs is changed "
237 raise util.Abort(_("working copy of .hgsigs is changed "
238 "(please commit .hgsigs manually "
238 "(please commit .hgsigs manually "
239 "or use --force)"))
239 "or use --force)"))
240
240
241 repo.wfile(".hgsigs", "ab").write(sigmessage)
241 repo.wfile(".hgsigs", "ab").write(sigmessage)
242
242
243 if repo.dirstate.state(".hgsigs") == '?':
243 if '.hgsigs' not in repo.dirstate:
244 repo.add([".hgsigs"])
244 repo.add([".hgsigs"])
245
245
246 if opts["no_commit"]:
246 if opts["no_commit"]:
247 return
247 return
248
248
249 message = opts['message']
249 message = opts['message']
250 if not message:
250 if not message:
251 message = "\n".join([_("Added signature for changeset %s")
251 message = "\n".join([_("Added signature for changeset %s")
252 % hgnode.hex(n)
252 % hgnode.hex(n)
253 for n in nodes])
253 for n in nodes])
254 try:
254 try:
255 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
255 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
256 except ValueError, inst:
256 except ValueError, inst:
257 raise util.Abort(str(inst))
257 raise util.Abort(str(inst))
258
258
259 def node2txt(repo, node, ver):
259 def node2txt(repo, node, ver):
260 """map a manifest into some text"""
260 """map a manifest into some text"""
261 if ver == "0":
261 if ver == "0":
262 return "%s\n" % hgnode.hex(node)
262 return "%s\n" % hgnode.hex(node)
263 else:
263 else:
264 raise util.Abort(_("unknown signature version"))
264 raise util.Abort(_("unknown signature version"))
265
265
266 cmdtable = {
266 cmdtable = {
267 "sign":
267 "sign":
268 (sign,
268 (sign,
269 [('l', 'local', None, _('make the signature local')),
269 [('l', 'local', None, _('make the signature local')),
270 ('f', 'force', None, _('sign even if the sigfile is modified')),
270 ('f', 'force', None, _('sign even if the sigfile is modified')),
271 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
271 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
272 ('m', 'message', '', _('commit message')),
272 ('m', 'message', '', _('commit message')),
273 ('d', 'date', '', _('date code')),
273 ('d', 'date', '', _('date code')),
274 ('u', 'user', '', _('user')),
274 ('u', 'user', '', _('user')),
275 ('k', 'key', '', _('the key id to sign with'))],
275 ('k', 'key', '', _('the key id to sign with'))],
276 _('hg sign [OPTION]... [REVISION]...')),
276 _('hg sign [OPTION]... [REVISION]...')),
277 "sigcheck": (check, [], _('hg sigcheck REVISION')),
277 "sigcheck": (check, [], _('hg sigcheck REVISION')),
278 "sigs": (sigs, [], _('hg sigs')),
278 "sigs": (sigs, [], _('hg sigs')),
279 }
279 }
280
280
@@ -1,2235 +1,2235 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 from mercurial import repair
34 from mercurial import repair
35 import os, sys, re, errno
35 import os, sys, re, errno
36
36
37 commands.norepo += " qclone qversion"
37 commands.norepo += " qclone qversion"
38
38
39 # Patch names looks like unix-file names.
39 # Patch names looks like unix-file names.
40 # They must be joinable with queue directory and result in the patch path.
40 # They must be joinable with queue directory and result in the patch path.
41 normname = util.normpath
41 normname = util.normpath
42
42
43 class statusentry:
43 class statusentry:
44 def __init__(self, rev, name=None):
44 def __init__(self, rev, name=None):
45 if not name:
45 if not name:
46 fields = rev.split(':', 1)
46 fields = rev.split(':', 1)
47 if len(fields) == 2:
47 if len(fields) == 2:
48 self.rev, self.name = fields
48 self.rev, self.name = fields
49 else:
49 else:
50 self.rev, self.name = None, None
50 self.rev, self.name = None, None
51 else:
51 else:
52 self.rev, self.name = rev, name
52 self.rev, self.name = rev, name
53
53
54 def __str__(self):
54 def __str__(self):
55 return self.rev + ':' + self.name
55 return self.rev + ':' + self.name
56
56
57 class queue:
57 class queue:
58 def __init__(self, ui, path, patchdir=None):
58 def __init__(self, ui, path, patchdir=None):
59 self.basepath = path
59 self.basepath = path
60 self.path = patchdir or os.path.join(path, "patches")
60 self.path = patchdir or os.path.join(path, "patches")
61 self.opener = util.opener(self.path)
61 self.opener = util.opener(self.path)
62 self.ui = ui
62 self.ui = ui
63 self.applied = []
63 self.applied = []
64 self.full_series = []
64 self.full_series = []
65 self.applied_dirty = 0
65 self.applied_dirty = 0
66 self.series_dirty = 0
66 self.series_dirty = 0
67 self.series_path = "series"
67 self.series_path = "series"
68 self.status_path = "status"
68 self.status_path = "status"
69 self.guards_path = "guards"
69 self.guards_path = "guards"
70 self.active_guards = None
70 self.active_guards = None
71 self.guards_dirty = False
71 self.guards_dirty = False
72 self._diffopts = None
72 self._diffopts = None
73
73
74 if os.path.exists(self.join(self.series_path)):
74 if os.path.exists(self.join(self.series_path)):
75 self.full_series = self.opener(self.series_path).read().splitlines()
75 self.full_series = self.opener(self.series_path).read().splitlines()
76 self.parse_series()
76 self.parse_series()
77
77
78 if os.path.exists(self.join(self.status_path)):
78 if os.path.exists(self.join(self.status_path)):
79 lines = self.opener(self.status_path).read().splitlines()
79 lines = self.opener(self.status_path).read().splitlines()
80 self.applied = [statusentry(l) for l in lines]
80 self.applied = [statusentry(l) for l in lines]
81
81
82 def diffopts(self):
82 def diffopts(self):
83 if self._diffopts is None:
83 if self._diffopts is None:
84 self._diffopts = patch.diffopts(self.ui)
84 self._diffopts = patch.diffopts(self.ui)
85 return self._diffopts
85 return self._diffopts
86
86
87 def join(self, *p):
87 def join(self, *p):
88 return os.path.join(self.path, *p)
88 return os.path.join(self.path, *p)
89
89
90 def find_series(self, patch):
90 def find_series(self, patch):
91 pre = re.compile("(\s*)([^#]+)")
91 pre = re.compile("(\s*)([^#]+)")
92 index = 0
92 index = 0
93 for l in self.full_series:
93 for l in self.full_series:
94 m = pre.match(l)
94 m = pre.match(l)
95 if m:
95 if m:
96 s = m.group(2)
96 s = m.group(2)
97 s = s.rstrip()
97 s = s.rstrip()
98 if s == patch:
98 if s == patch:
99 return index
99 return index
100 index += 1
100 index += 1
101 return None
101 return None
102
102
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104
104
105 def parse_series(self):
105 def parse_series(self):
106 self.series = []
106 self.series = []
107 self.series_guards = []
107 self.series_guards = []
108 for l in self.full_series:
108 for l in self.full_series:
109 h = l.find('#')
109 h = l.find('#')
110 if h == -1:
110 if h == -1:
111 patch = l
111 patch = l
112 comment = ''
112 comment = ''
113 elif h == 0:
113 elif h == 0:
114 continue
114 continue
115 else:
115 else:
116 patch = l[:h]
116 patch = l[:h]
117 comment = l[h:]
117 comment = l[h:]
118 patch = patch.strip()
118 patch = patch.strip()
119 if patch:
119 if patch:
120 if patch in self.series:
120 if patch in self.series:
121 raise util.Abort(_('%s appears more than once in %s') %
121 raise util.Abort(_('%s appears more than once in %s') %
122 (patch, self.join(self.series_path)))
122 (patch, self.join(self.series_path)))
123 self.series.append(patch)
123 self.series.append(patch)
124 self.series_guards.append(self.guard_re.findall(comment))
124 self.series_guards.append(self.guard_re.findall(comment))
125
125
126 def check_guard(self, guard):
126 def check_guard(self, guard):
127 bad_chars = '# \t\r\n\f'
127 bad_chars = '# \t\r\n\f'
128 first = guard[0]
128 first = guard[0]
129 for c in '-+':
129 for c in '-+':
130 if first == c:
130 if first == c:
131 return (_('guard %r starts with invalid character: %r') %
131 return (_('guard %r starts with invalid character: %r') %
132 (guard, c))
132 (guard, c))
133 for c in bad_chars:
133 for c in bad_chars:
134 if c in guard:
134 if c in guard:
135 return _('invalid character in guard %r: %r') % (guard, c)
135 return _('invalid character in guard %r: %r') % (guard, c)
136
136
137 def set_active(self, guards):
137 def set_active(self, guards):
138 for guard in guards:
138 for guard in guards:
139 bad = self.check_guard(guard)
139 bad = self.check_guard(guard)
140 if bad:
140 if bad:
141 raise util.Abort(bad)
141 raise util.Abort(bad)
142 guards = dict.fromkeys(guards).keys()
142 guards = dict.fromkeys(guards).keys()
143 guards.sort()
143 guards.sort()
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 self.active_guards = guards
145 self.active_guards = guards
146 self.guards_dirty = True
146 self.guards_dirty = True
147
147
148 def active(self):
148 def active(self):
149 if self.active_guards is None:
149 if self.active_guards is None:
150 self.active_guards = []
150 self.active_guards = []
151 try:
151 try:
152 guards = self.opener(self.guards_path).read().split()
152 guards = self.opener(self.guards_path).read().split()
153 except IOError, err:
153 except IOError, err:
154 if err.errno != errno.ENOENT: raise
154 if err.errno != errno.ENOENT: raise
155 guards = []
155 guards = []
156 for i, guard in enumerate(guards):
156 for i, guard in enumerate(guards):
157 bad = self.check_guard(guard)
157 bad = self.check_guard(guard)
158 if bad:
158 if bad:
159 self.ui.warn('%s:%d: %s\n' %
159 self.ui.warn('%s:%d: %s\n' %
160 (self.join(self.guards_path), i + 1, bad))
160 (self.join(self.guards_path), i + 1, bad))
161 else:
161 else:
162 self.active_guards.append(guard)
162 self.active_guards.append(guard)
163 return self.active_guards
163 return self.active_guards
164
164
165 def set_guards(self, idx, guards):
165 def set_guards(self, idx, guards):
166 for g in guards:
166 for g in guards:
167 if len(g) < 2:
167 if len(g) < 2:
168 raise util.Abort(_('guard %r too short') % g)
168 raise util.Abort(_('guard %r too short') % g)
169 if g[0] not in '-+':
169 if g[0] not in '-+':
170 raise util.Abort(_('guard %r starts with invalid char') % g)
170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 bad = self.check_guard(g[1:])
171 bad = self.check_guard(g[1:])
172 if bad:
172 if bad:
173 raise util.Abort(bad)
173 raise util.Abort(bad)
174 drop = self.guard_re.sub('', self.full_series[idx])
174 drop = self.guard_re.sub('', self.full_series[idx])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 self.parse_series()
176 self.parse_series()
177 self.series_dirty = True
177 self.series_dirty = True
178
178
179 def pushable(self, idx):
179 def pushable(self, idx):
180 if isinstance(idx, str):
180 if isinstance(idx, str):
181 idx = self.series.index(idx)
181 idx = self.series.index(idx)
182 patchguards = self.series_guards[idx]
182 patchguards = self.series_guards[idx]
183 if not patchguards:
183 if not patchguards:
184 return True, None
184 return True, None
185 default = False
185 default = False
186 guards = self.active()
186 guards = self.active()
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 if exactneg:
188 if exactneg:
189 return False, exactneg[0]
189 return False, exactneg[0]
190 pos = [g for g in patchguards if g[0] == '+']
190 pos = [g for g in patchguards if g[0] == '+']
191 exactpos = [g for g in pos if g[1:] in guards]
191 exactpos = [g for g in pos if g[1:] in guards]
192 if pos:
192 if pos:
193 if exactpos:
193 if exactpos:
194 return True, exactpos[0]
194 return True, exactpos[0]
195 return False, pos
195 return False, pos
196 return True, ''
196 return True, ''
197
197
198 def explain_pushable(self, idx, all_patches=False):
198 def explain_pushable(self, idx, all_patches=False):
199 write = all_patches and self.ui.write or self.ui.warn
199 write = all_patches and self.ui.write or self.ui.warn
200 if all_patches or self.ui.verbose:
200 if all_patches or self.ui.verbose:
201 if isinstance(idx, str):
201 if isinstance(idx, str):
202 idx = self.series.index(idx)
202 idx = self.series.index(idx)
203 pushable, why = self.pushable(idx)
203 pushable, why = self.pushable(idx)
204 if all_patches and pushable:
204 if all_patches and pushable:
205 if why is None:
205 if why is None:
206 write(_('allowing %s - no guards in effect\n') %
206 write(_('allowing %s - no guards in effect\n') %
207 self.series[idx])
207 self.series[idx])
208 else:
208 else:
209 if not why:
209 if not why:
210 write(_('allowing %s - no matching negative guards\n') %
210 write(_('allowing %s - no matching negative guards\n') %
211 self.series[idx])
211 self.series[idx])
212 else:
212 else:
213 write(_('allowing %s - guarded by %r\n') %
213 write(_('allowing %s - guarded by %r\n') %
214 (self.series[idx], why))
214 (self.series[idx], why))
215 if not pushable:
215 if not pushable:
216 if why:
216 if why:
217 write(_('skipping %s - guarded by %r\n') %
217 write(_('skipping %s - guarded by %r\n') %
218 (self.series[idx], why))
218 (self.series[idx], why))
219 else:
219 else:
220 write(_('skipping %s - no matching guards\n') %
220 write(_('skipping %s - no matching guards\n') %
221 self.series[idx])
221 self.series[idx])
222
222
223 def save_dirty(self):
223 def save_dirty(self):
224 def write_list(items, path):
224 def write_list(items, path):
225 fp = self.opener(path, 'w')
225 fp = self.opener(path, 'w')
226 for i in items:
226 for i in items:
227 print >> fp, i
227 print >> fp, i
228 fp.close()
228 fp.close()
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232
232
233 def readheaders(self, patch):
233 def readheaders(self, patch):
234 def eatdiff(lines):
234 def eatdiff(lines):
235 while lines:
235 while lines:
236 l = lines[-1]
236 l = lines[-1]
237 if (l.startswith("diff -") or
237 if (l.startswith("diff -") or
238 l.startswith("Index:") or
238 l.startswith("Index:") or
239 l.startswith("===========")):
239 l.startswith("===========")):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243 def eatempty(lines):
243 def eatempty(lines):
244 while lines:
244 while lines:
245 l = lines[-1]
245 l = lines[-1]
246 if re.match('\s*$', l):
246 if re.match('\s*$', l):
247 del lines[-1]
247 del lines[-1]
248 else:
248 else:
249 break
249 break
250
250
251 pf = self.join(patch)
251 pf = self.join(patch)
252 message = []
252 message = []
253 comments = []
253 comments = []
254 user = None
254 user = None
255 date = None
255 date = None
256 format = None
256 format = None
257 subject = None
257 subject = None
258 diffstart = 0
258 diffstart = 0
259
259
260 for line in file(pf):
260 for line in file(pf):
261 line = line.rstrip()
261 line = line.rstrip()
262 if line.startswith('diff --git'):
262 if line.startswith('diff --git'):
263 diffstart = 2
263 diffstart = 2
264 break
264 break
265 if diffstart:
265 if diffstart:
266 if line.startswith('+++ '):
266 if line.startswith('+++ '):
267 diffstart = 2
267 diffstart = 2
268 break
268 break
269 if line.startswith("--- "):
269 if line.startswith("--- "):
270 diffstart = 1
270 diffstart = 1
271 continue
271 continue
272 elif format == "hgpatch":
272 elif format == "hgpatch":
273 # parse values when importing the result of an hg export
273 # parse values when importing the result of an hg export
274 if line.startswith("# User "):
274 if line.startswith("# User "):
275 user = line[7:]
275 user = line[7:]
276 elif line.startswith("# Date "):
276 elif line.startswith("# Date "):
277 date = line[7:]
277 date = line[7:]
278 elif not line.startswith("# ") and line:
278 elif not line.startswith("# ") and line:
279 message.append(line)
279 message.append(line)
280 format = None
280 format = None
281 elif line == '# HG changeset patch':
281 elif line == '# HG changeset patch':
282 format = "hgpatch"
282 format = "hgpatch"
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 line.startswith("subject: "))):
284 line.startswith("subject: "))):
285 subject = line[9:]
285 subject = line[9:]
286 format = "tag"
286 format = "tag"
287 elif (format != "tagdone" and (line.startswith("From: ") or
287 elif (format != "tagdone" and (line.startswith("From: ") or
288 line.startswith("from: "))):
288 line.startswith("from: "))):
289 user = line[6:]
289 user = line[6:]
290 format = "tag"
290 format = "tag"
291 elif format == "tag" and line == "":
291 elif format == "tag" and line == "":
292 # when looking for tags (subject: from: etc) they
292 # when looking for tags (subject: from: etc) they
293 # end once you find a blank line in the source
293 # end once you find a blank line in the source
294 format = "tagdone"
294 format = "tagdone"
295 elif message or line:
295 elif message or line:
296 message.append(line)
296 message.append(line)
297 comments.append(line)
297 comments.append(line)
298
298
299 eatdiff(message)
299 eatdiff(message)
300 eatdiff(comments)
300 eatdiff(comments)
301 eatempty(message)
301 eatempty(message)
302 eatempty(comments)
302 eatempty(comments)
303
303
304 # make sure message isn't empty
304 # make sure message isn't empty
305 if format and format.startswith("tag") and subject:
305 if format and format.startswith("tag") and subject:
306 message.insert(0, "")
306 message.insert(0, "")
307 message.insert(0, subject)
307 message.insert(0, subject)
308 return (message, comments, user, date, diffstart > 1)
308 return (message, comments, user, date, diffstart > 1)
309
309
310 def removeundo(self, repo):
310 def removeundo(self, repo):
311 undo = repo.sjoin('undo')
311 undo = repo.sjoin('undo')
312 if not os.path.exists(undo):
312 if not os.path.exists(undo):
313 return
313 return
314 try:
314 try:
315 os.unlink(undo)
315 os.unlink(undo)
316 except OSError, inst:
316 except OSError, inst:
317 self.ui.warn('error removing undo: %s\n' % str(inst))
317 self.ui.warn('error removing undo: %s\n' % str(inst))
318
318
319 def printdiff(self, repo, node1, node2=None, files=None,
319 def printdiff(self, repo, node1, node2=None, files=None,
320 fp=None, changes=None, opts={}):
320 fp=None, changes=None, opts={}):
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322
322
323 patch.diff(repo, node1, node2, fns, match=matchfn,
323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 fp=fp, changes=changes, opts=self.diffopts())
324 fp=fp, changes=changes, opts=self.diffopts())
325
325
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
327 # first try just applying the patch
327 # first try just applying the patch
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 strict=True, merge=rev, wlock=wlock)
329 strict=True, merge=rev, wlock=wlock)
330
330
331 if err == 0:
331 if err == 0:
332 return (err, n)
332 return (err, n)
333
333
334 if n is None:
334 if n is None:
335 raise util.Abort(_("apply failed for patch %s") % patch)
335 raise util.Abort(_("apply failed for patch %s") % patch)
336
336
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338
338
339 # apply failed, strip away that rev and merge.
339 # apply failed, strip away that rev and merge.
340 hg.clean(repo, head, wlock=wlock)
340 hg.clean(repo, head, wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
342
342
343 ctx = repo.changectx(rev)
343 ctx = repo.changectx(rev)
344 ret = hg.merge(repo, rev, wlock=wlock)
344 ret = hg.merge(repo, rev, wlock=wlock)
345 if ret:
345 if ret:
346 raise util.Abort(_("update returned %d") % ret)
346 raise util.Abort(_("update returned %d") % ret)
347 n = repo.commit(None, ctx.description(), ctx.user(),
347 n = repo.commit(None, ctx.description(), ctx.user(),
348 force=1, wlock=wlock)
348 force=1, wlock=wlock)
349 if n == None:
349 if n == None:
350 raise util.Abort(_("repo commit failed"))
350 raise util.Abort(_("repo commit failed"))
351 try:
351 try:
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 except:
353 except:
354 raise util.Abort(_("unable to read %s") % patch)
354 raise util.Abort(_("unable to read %s") % patch)
355
355
356 patchf = self.opener(patch, "w")
356 patchf = self.opener(patch, "w")
357 if comments:
357 if comments:
358 comments = "\n".join(comments) + '\n\n'
358 comments = "\n".join(comments) + '\n\n'
359 patchf.write(comments)
359 patchf.write(comments)
360 self.printdiff(repo, head, n, fp=patchf)
360 self.printdiff(repo, head, n, fp=patchf)
361 patchf.close()
361 patchf.close()
362 self.removeundo(repo)
362 self.removeundo(repo)
363 return (0, n)
363 return (0, n)
364
364
365 def qparents(self, repo, rev=None):
365 def qparents(self, repo, rev=None):
366 if rev is None:
366 if rev is None:
367 (p1, p2) = repo.dirstate.parents()
367 (p1, p2) = repo.dirstate.parents()
368 if p2 == revlog.nullid:
368 if p2 == revlog.nullid:
369 return p1
369 return p1
370 if len(self.applied) == 0:
370 if len(self.applied) == 0:
371 return None
371 return None
372 return revlog.bin(self.applied[-1].rev)
372 return revlog.bin(self.applied[-1].rev)
373 pp = repo.changelog.parents(rev)
373 pp = repo.changelog.parents(rev)
374 if pp[1] != revlog.nullid:
374 if pp[1] != revlog.nullid:
375 arevs = [ x.rev for x in self.applied ]
375 arevs = [ x.rev for x in self.applied ]
376 p0 = revlog.hex(pp[0])
376 p0 = revlog.hex(pp[0])
377 p1 = revlog.hex(pp[1])
377 p1 = revlog.hex(pp[1])
378 if p0 in arevs:
378 if p0 in arevs:
379 return pp[0]
379 return pp[0]
380 if p1 in arevs:
380 if p1 in arevs:
381 return pp[1]
381 return pp[1]
382 return pp[0]
382 return pp[0]
383
383
384 def mergepatch(self, repo, mergeq, series, wlock):
384 def mergepatch(self, repo, mergeq, series, wlock):
385 if len(self.applied) == 0:
385 if len(self.applied) == 0:
386 # each of the patches merged in will have two parents. This
386 # each of the patches merged in will have two parents. This
387 # can confuse the qrefresh, qdiff, and strip code because it
387 # can confuse the qrefresh, qdiff, and strip code because it
388 # needs to know which parent is actually in the patch queue.
388 # needs to know which parent is actually in the patch queue.
389 # so, we insert a merge marker with only one parent. This way
389 # so, we insert a merge marker with only one parent. This way
390 # the first patch in the queue is never a merge patch
390 # the first patch in the queue is never a merge patch
391 #
391 #
392 pname = ".hg.patches.merge.marker"
392 pname = ".hg.patches.merge.marker"
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
394 wlock=wlock)
394 wlock=wlock)
395 self.removeundo(repo)
395 self.removeundo(repo)
396 self.applied.append(statusentry(revlog.hex(n), pname))
396 self.applied.append(statusentry(revlog.hex(n), pname))
397 self.applied_dirty = 1
397 self.applied_dirty = 1
398
398
399 head = self.qparents(repo)
399 head = self.qparents(repo)
400
400
401 for patch in series:
401 for patch in series:
402 patch = mergeq.lookup(patch, strict=True)
402 patch = mergeq.lookup(patch, strict=True)
403 if not patch:
403 if not patch:
404 self.ui.warn("patch %s does not exist\n" % patch)
404 self.ui.warn("patch %s does not exist\n" % patch)
405 return (1, None)
405 return (1, None)
406 pushable, reason = self.pushable(patch)
406 pushable, reason = self.pushable(patch)
407 if not pushable:
407 if not pushable:
408 self.explain_pushable(patch, all_patches=True)
408 self.explain_pushable(patch, all_patches=True)
409 continue
409 continue
410 info = mergeq.isapplied(patch)
410 info = mergeq.isapplied(patch)
411 if not info:
411 if not info:
412 self.ui.warn("patch %s is not applied\n" % patch)
412 self.ui.warn("patch %s is not applied\n" % patch)
413 return (1, None)
413 return (1, None)
414 rev = revlog.bin(info[1])
414 rev = revlog.bin(info[1])
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
416 if head:
416 if head:
417 self.applied.append(statusentry(revlog.hex(head), patch))
417 self.applied.append(statusentry(revlog.hex(head), patch))
418 self.applied_dirty = 1
418 self.applied_dirty = 1
419 if err:
419 if err:
420 return (err, head)
420 return (err, head)
421 self.save_dirty()
421 self.save_dirty()
422 return (0, head)
422 return (0, head)
423
423
424 def patch(self, repo, patchfile):
424 def patch(self, repo, patchfile):
425 '''Apply patchfile to the working directory.
425 '''Apply patchfile to the working directory.
426 patchfile: file name of patch'''
426 patchfile: file name of patch'''
427 files = {}
427 files = {}
428 try:
428 try:
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 files=files)
430 files=files)
431 except Exception, inst:
431 except Exception, inst:
432 self.ui.note(str(inst) + '\n')
432 self.ui.note(str(inst) + '\n')
433 if not self.ui.verbose:
433 if not self.ui.verbose:
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 return (False, files, False)
435 return (False, files, False)
436
436
437 return (True, files, fuzz)
437 return (True, files, fuzz)
438
438
439 def apply(self, repo, series, list=False, update_status=True,
439 def apply(self, repo, series, list=False, update_status=True,
440 strict=False, patchdir=None, merge=None, wlock=None,
440 strict=False, patchdir=None, merge=None, wlock=None,
441 all_files={}):
441 all_files={}):
442 if not wlock:
442 if not wlock:
443 wlock = repo.wlock()
443 wlock = repo.wlock()
444 lock = repo.lock()
444 lock = repo.lock()
445 tr = repo.transaction()
445 tr = repo.transaction()
446 try:
446 try:
447 ret = self._apply(tr, repo, series, list, update_status,
447 ret = self._apply(tr, repo, series, list, update_status,
448 strict, patchdir, merge, wlock,
448 strict, patchdir, merge, wlock,
449 lock=lock, all_files=all_files)
449 lock=lock, all_files=all_files)
450 tr.close()
450 tr.close()
451 self.save_dirty()
451 self.save_dirty()
452 return ret
452 return ret
453 except:
453 except:
454 try:
454 try:
455 tr.abort()
455 tr.abort()
456 finally:
456 finally:
457 repo.invalidate()
457 repo.invalidate()
458 repo.dirstate.invalidate()
458 repo.dirstate.invalidate()
459 raise
459 raise
460
460
461 def _apply(self, tr, repo, series, list=False, update_status=True,
461 def _apply(self, tr, repo, series, list=False, update_status=True,
462 strict=False, patchdir=None, merge=None, wlock=None,
462 strict=False, patchdir=None, merge=None, wlock=None,
463 lock=None, all_files={}):
463 lock=None, all_files={}):
464 # TODO unify with commands.py
464 # TODO unify with commands.py
465 if not patchdir:
465 if not patchdir:
466 patchdir = self.path
466 patchdir = self.path
467 err = 0
467 err = 0
468 n = None
468 n = None
469 for patchname in series:
469 for patchname in series:
470 pushable, reason = self.pushable(patchname)
470 pushable, reason = self.pushable(patchname)
471 if not pushable:
471 if not pushable:
472 self.explain_pushable(patchname, all_patches=True)
472 self.explain_pushable(patchname, all_patches=True)
473 continue
473 continue
474 self.ui.warn("applying %s\n" % patchname)
474 self.ui.warn("applying %s\n" % patchname)
475 pf = os.path.join(patchdir, patchname)
475 pf = os.path.join(patchdir, patchname)
476
476
477 try:
477 try:
478 message, comments, user, date, patchfound = self.readheaders(patchname)
478 message, comments, user, date, patchfound = self.readheaders(patchname)
479 except:
479 except:
480 self.ui.warn("Unable to read %s\n" % patchname)
480 self.ui.warn("Unable to read %s\n" % patchname)
481 err = 1
481 err = 1
482 break
482 break
483
483
484 if not message:
484 if not message:
485 message = "imported patch %s\n" % patchname
485 message = "imported patch %s\n" % patchname
486 else:
486 else:
487 if list:
487 if list:
488 message.append("\nimported patch %s" % patchname)
488 message.append("\nimported patch %s" % patchname)
489 message = '\n'.join(message)
489 message = '\n'.join(message)
490
490
491 (patcherr, files, fuzz) = self.patch(repo, pf)
491 (patcherr, files, fuzz) = self.patch(repo, pf)
492 all_files.update(files)
492 all_files.update(files)
493 patcherr = not patcherr
493 patcherr = not patcherr
494
494
495 if merge and files:
495 if merge and files:
496 # Mark as removed/merged and update dirstate parent info
496 # Mark as removed/merged and update dirstate parent info
497 removed = []
497 removed = []
498 merged = []
498 merged = []
499 for f in files:
499 for f in files:
500 if os.path.exists(repo.wjoin(f)):
500 if os.path.exists(repo.wjoin(f)):
501 merged.append(f)
501 merged.append(f)
502 else:
502 else:
503 removed.append(f)
503 removed.append(f)
504 for f in removed:
504 for f in removed:
505 repo.dirstate.remove(f)
505 repo.dirstate.remove(f)
506 for f in merged:
506 for f in merged:
507 repo.dirstate.merge(f)
507 repo.dirstate.merge(f)
508 p1, p2 = repo.dirstate.parents()
508 p1, p2 = repo.dirstate.parents()
509 repo.dirstate.setparents(p1, merge)
509 repo.dirstate.setparents(p1, merge)
510 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
510 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
511 n = repo.commit(files, message, user, date, force=1, lock=lock,
511 n = repo.commit(files, message, user, date, force=1, lock=lock,
512 wlock=wlock)
512 wlock=wlock)
513
513
514 if n == None:
514 if n == None:
515 raise util.Abort(_("repo commit failed"))
515 raise util.Abort(_("repo commit failed"))
516
516
517 if update_status:
517 if update_status:
518 self.applied.append(statusentry(revlog.hex(n), patchname))
518 self.applied.append(statusentry(revlog.hex(n), patchname))
519
519
520 if patcherr:
520 if patcherr:
521 if not patchfound:
521 if not patchfound:
522 self.ui.warn("patch %s is empty\n" % patchname)
522 self.ui.warn("patch %s is empty\n" % patchname)
523 err = 0
523 err = 0
524 else:
524 else:
525 self.ui.warn("patch failed, rejects left in working dir\n")
525 self.ui.warn("patch failed, rejects left in working dir\n")
526 err = 1
526 err = 1
527 break
527 break
528
528
529 if fuzz and strict:
529 if fuzz and strict:
530 self.ui.warn("fuzz found when applying patch, stopping\n")
530 self.ui.warn("fuzz found when applying patch, stopping\n")
531 err = 1
531 err = 1
532 break
532 break
533 self.removeundo(repo)
533 self.removeundo(repo)
534 return (err, n)
534 return (err, n)
535
535
536 def delete(self, repo, patches, opts):
536 def delete(self, repo, patches, opts):
537 if not patches and not opts.get('rev'):
537 if not patches and not opts.get('rev'):
538 raise util.Abort(_('qdelete requires at least one revision or '
538 raise util.Abort(_('qdelete requires at least one revision or '
539 'patch name'))
539 'patch name'))
540
540
541 realpatches = []
541 realpatches = []
542 for patch in patches:
542 for patch in patches:
543 patch = self.lookup(patch, strict=True)
543 patch = self.lookup(patch, strict=True)
544 info = self.isapplied(patch)
544 info = self.isapplied(patch)
545 if info:
545 if info:
546 raise util.Abort(_("cannot delete applied patch %s") % patch)
546 raise util.Abort(_("cannot delete applied patch %s") % patch)
547 if patch not in self.series:
547 if patch not in self.series:
548 raise util.Abort(_("patch %s not in series file") % patch)
548 raise util.Abort(_("patch %s not in series file") % patch)
549 realpatches.append(patch)
549 realpatches.append(patch)
550
550
551 appliedbase = 0
551 appliedbase = 0
552 if opts.get('rev'):
552 if opts.get('rev'):
553 if not self.applied:
553 if not self.applied:
554 raise util.Abort(_('no patches applied'))
554 raise util.Abort(_('no patches applied'))
555 revs = cmdutil.revrange(repo, opts['rev'])
555 revs = cmdutil.revrange(repo, opts['rev'])
556 if len(revs) > 1 and revs[0] > revs[1]:
556 if len(revs) > 1 and revs[0] > revs[1]:
557 revs.reverse()
557 revs.reverse()
558 for rev in revs:
558 for rev in revs:
559 if appliedbase >= len(self.applied):
559 if appliedbase >= len(self.applied):
560 raise util.Abort(_("revision %d is not managed") % rev)
560 raise util.Abort(_("revision %d is not managed") % rev)
561
561
562 base = revlog.bin(self.applied[appliedbase].rev)
562 base = revlog.bin(self.applied[appliedbase].rev)
563 node = repo.changelog.node(rev)
563 node = repo.changelog.node(rev)
564 if node != base:
564 if node != base:
565 raise util.Abort(_("cannot delete revision %d above "
565 raise util.Abort(_("cannot delete revision %d above "
566 "applied patches") % rev)
566 "applied patches") % rev)
567 realpatches.append(self.applied[appliedbase].name)
567 realpatches.append(self.applied[appliedbase].name)
568 appliedbase += 1
568 appliedbase += 1
569
569
570 if not opts.get('keep'):
570 if not opts.get('keep'):
571 r = self.qrepo()
571 r = self.qrepo()
572 if r:
572 if r:
573 r.remove(realpatches, True)
573 r.remove(realpatches, True)
574 else:
574 else:
575 for p in realpatches:
575 for p in realpatches:
576 os.unlink(self.join(p))
576 os.unlink(self.join(p))
577
577
578 if appliedbase:
578 if appliedbase:
579 del self.applied[:appliedbase]
579 del self.applied[:appliedbase]
580 self.applied_dirty = 1
580 self.applied_dirty = 1
581 indices = [self.find_series(p) for p in realpatches]
581 indices = [self.find_series(p) for p in realpatches]
582 indices.sort()
582 indices.sort()
583 for i in indices[-1::-1]:
583 for i in indices[-1::-1]:
584 del self.full_series[i]
584 del self.full_series[i]
585 self.parse_series()
585 self.parse_series()
586 self.series_dirty = 1
586 self.series_dirty = 1
587
587
588 def check_toppatch(self, repo):
588 def check_toppatch(self, repo):
589 if len(self.applied) > 0:
589 if len(self.applied) > 0:
590 top = revlog.bin(self.applied[-1].rev)
590 top = revlog.bin(self.applied[-1].rev)
591 pp = repo.dirstate.parents()
591 pp = repo.dirstate.parents()
592 if top not in pp:
592 if top not in pp:
593 raise util.Abort(_("queue top not at same revision as working directory"))
593 raise util.Abort(_("queue top not at same revision as working directory"))
594 return top
594 return top
595 return None
595 return None
596 def check_localchanges(self, repo, force=False, refresh=True):
596 def check_localchanges(self, repo, force=False, refresh=True):
597 m, a, r, d = repo.status()[:4]
597 m, a, r, d = repo.status()[:4]
598 if m or a or r or d:
598 if m or a or r or d:
599 if not force:
599 if not force:
600 if refresh:
600 if refresh:
601 raise util.Abort(_("local changes found, refresh first"))
601 raise util.Abort(_("local changes found, refresh first"))
602 else:
602 else:
603 raise util.Abort(_("local changes found"))
603 raise util.Abort(_("local changes found"))
604 return m, a, r, d
604 return m, a, r, d
605
605
606 def new(self, repo, patch, *pats, **opts):
606 def new(self, repo, patch, *pats, **opts):
607 msg = opts.get('msg')
607 msg = opts.get('msg')
608 force = opts.get('force')
608 force = opts.get('force')
609 if os.path.exists(self.join(patch)):
609 if os.path.exists(self.join(patch)):
610 raise util.Abort(_('patch "%s" already exists') % patch)
610 raise util.Abort(_('patch "%s" already exists') % patch)
611 if opts.get('include') or opts.get('exclude') or pats:
611 if opts.get('include') or opts.get('exclude') or pats:
612 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
612 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
613 m, a, r, d = repo.status(files=fns, match=match)[:4]
613 m, a, r, d = repo.status(files=fns, match=match)[:4]
614 else:
614 else:
615 m, a, r, d = self.check_localchanges(repo, force)
615 m, a, r, d = self.check_localchanges(repo, force)
616 commitfiles = m + a + r
616 commitfiles = m + a + r
617 self.check_toppatch(repo)
617 self.check_toppatch(repo)
618 wlock = repo.wlock()
618 wlock = repo.wlock()
619 insert = self.full_series_end()
619 insert = self.full_series_end()
620 if msg:
620 if msg:
621 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
621 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
622 else:
622 else:
623 n = repo.commit(commitfiles,
623 n = repo.commit(commitfiles,
624 "[mq]: %s" % patch, force=True, wlock=wlock)
624 "[mq]: %s" % patch, force=True, wlock=wlock)
625 if n == None:
625 if n == None:
626 raise util.Abort(_("repo commit failed"))
626 raise util.Abort(_("repo commit failed"))
627 self.full_series[insert:insert] = [patch]
627 self.full_series[insert:insert] = [patch]
628 self.applied.append(statusentry(revlog.hex(n), patch))
628 self.applied.append(statusentry(revlog.hex(n), patch))
629 self.parse_series()
629 self.parse_series()
630 self.series_dirty = 1
630 self.series_dirty = 1
631 self.applied_dirty = 1
631 self.applied_dirty = 1
632 p = self.opener(patch, "w")
632 p = self.opener(patch, "w")
633 if msg:
633 if msg:
634 msg = msg + "\n"
634 msg = msg + "\n"
635 p.write(msg)
635 p.write(msg)
636 p.close()
636 p.close()
637 wlock = None
637 wlock = None
638 r = self.qrepo()
638 r = self.qrepo()
639 if r: r.add([patch])
639 if r: r.add([patch])
640 if commitfiles:
640 if commitfiles:
641 self.refresh(repo, short=True)
641 self.refresh(repo, short=True)
642 self.removeundo(repo)
642 self.removeundo(repo)
643
643
644 def strip(self, repo, rev, update=True, backup="all", wlock=None):
644 def strip(self, repo, rev, update=True, backup="all", wlock=None):
645 if not wlock:
645 if not wlock:
646 wlock = repo.wlock()
646 wlock = repo.wlock()
647 lock = repo.lock()
647 lock = repo.lock()
648
648
649 if update:
649 if update:
650 self.check_localchanges(repo, refresh=False)
650 self.check_localchanges(repo, refresh=False)
651 urev = self.qparents(repo, rev)
651 urev = self.qparents(repo, rev)
652 hg.clean(repo, urev, wlock=wlock)
652 hg.clean(repo, urev, wlock=wlock)
653 repo.dirstate.write()
653 repo.dirstate.write()
654
654
655 self.removeundo(repo)
655 self.removeundo(repo)
656 repair.strip(self.ui, repo, rev, backup)
656 repair.strip(self.ui, repo, rev, backup)
657
657
658 def isapplied(self, patch):
658 def isapplied(self, patch):
659 """returns (index, rev, patch)"""
659 """returns (index, rev, patch)"""
660 for i in xrange(len(self.applied)):
660 for i in xrange(len(self.applied)):
661 a = self.applied[i]
661 a = self.applied[i]
662 if a.name == patch:
662 if a.name == patch:
663 return (i, a.rev, a.name)
663 return (i, a.rev, a.name)
664 return None
664 return None
665
665
666 # if the exact patch name does not exist, we try a few
666 # if the exact patch name does not exist, we try a few
667 # variations. If strict is passed, we try only #1
667 # variations. If strict is passed, we try only #1
668 #
668 #
669 # 1) a number to indicate an offset in the series file
669 # 1) a number to indicate an offset in the series file
670 # 2) a unique substring of the patch name was given
670 # 2) a unique substring of the patch name was given
671 # 3) patchname[-+]num to indicate an offset in the series file
671 # 3) patchname[-+]num to indicate an offset in the series file
672 def lookup(self, patch, strict=False):
672 def lookup(self, patch, strict=False):
673 patch = patch and str(patch)
673 patch = patch and str(patch)
674
674
675 def partial_name(s):
675 def partial_name(s):
676 if s in self.series:
676 if s in self.series:
677 return s
677 return s
678 matches = [x for x in self.series if s in x]
678 matches = [x for x in self.series if s in x]
679 if len(matches) > 1:
679 if len(matches) > 1:
680 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
680 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
681 for m in matches:
681 for m in matches:
682 self.ui.warn(' %s\n' % m)
682 self.ui.warn(' %s\n' % m)
683 return None
683 return None
684 if matches:
684 if matches:
685 return matches[0]
685 return matches[0]
686 if len(self.series) > 0 and len(self.applied) > 0:
686 if len(self.series) > 0 and len(self.applied) > 0:
687 if s == 'qtip':
687 if s == 'qtip':
688 return self.series[self.series_end(True)-1]
688 return self.series[self.series_end(True)-1]
689 if s == 'qbase':
689 if s == 'qbase':
690 return self.series[0]
690 return self.series[0]
691 return None
691 return None
692 if patch == None:
692 if patch == None:
693 return None
693 return None
694
694
695 # we don't want to return a partial match until we make
695 # we don't want to return a partial match until we make
696 # sure the file name passed in does not exist (checked below)
696 # sure the file name passed in does not exist (checked below)
697 res = partial_name(patch)
697 res = partial_name(patch)
698 if res and res == patch:
698 if res and res == patch:
699 return res
699 return res
700
700
701 if not os.path.isfile(self.join(patch)):
701 if not os.path.isfile(self.join(patch)):
702 try:
702 try:
703 sno = int(patch)
703 sno = int(patch)
704 except(ValueError, OverflowError):
704 except(ValueError, OverflowError):
705 pass
705 pass
706 else:
706 else:
707 if sno < len(self.series):
707 if sno < len(self.series):
708 return self.series[sno]
708 return self.series[sno]
709 if not strict:
709 if not strict:
710 # return any partial match made above
710 # return any partial match made above
711 if res:
711 if res:
712 return res
712 return res
713 minus = patch.rfind('-')
713 minus = patch.rfind('-')
714 if minus >= 0:
714 if minus >= 0:
715 res = partial_name(patch[:minus])
715 res = partial_name(patch[:minus])
716 if res:
716 if res:
717 i = self.series.index(res)
717 i = self.series.index(res)
718 try:
718 try:
719 off = int(patch[minus+1:] or 1)
719 off = int(patch[minus+1:] or 1)
720 except(ValueError, OverflowError):
720 except(ValueError, OverflowError):
721 pass
721 pass
722 else:
722 else:
723 if i - off >= 0:
723 if i - off >= 0:
724 return self.series[i - off]
724 return self.series[i - off]
725 plus = patch.rfind('+')
725 plus = patch.rfind('+')
726 if plus >= 0:
726 if plus >= 0:
727 res = partial_name(patch[:plus])
727 res = partial_name(patch[:plus])
728 if res:
728 if res:
729 i = self.series.index(res)
729 i = self.series.index(res)
730 try:
730 try:
731 off = int(patch[plus+1:] or 1)
731 off = int(patch[plus+1:] or 1)
732 except(ValueError, OverflowError):
732 except(ValueError, OverflowError):
733 pass
733 pass
734 else:
734 else:
735 if i + off < len(self.series):
735 if i + off < len(self.series):
736 return self.series[i + off]
736 return self.series[i + off]
737 raise util.Abort(_("patch %s not in series") % patch)
737 raise util.Abort(_("patch %s not in series") % patch)
738
738
739 def push(self, repo, patch=None, force=False, list=False,
739 def push(self, repo, patch=None, force=False, list=False,
740 mergeq=None, wlock=None):
740 mergeq=None, wlock=None):
741 if not wlock:
741 if not wlock:
742 wlock = repo.wlock()
742 wlock = repo.wlock()
743 patch = self.lookup(patch)
743 patch = self.lookup(patch)
744 # Suppose our series file is: A B C and the current 'top' patch is B.
744 # Suppose our series file is: A B C and the current 'top' patch is B.
745 # qpush C should be performed (moving forward)
745 # qpush C should be performed (moving forward)
746 # qpush B is a NOP (no change)
746 # qpush B is a NOP (no change)
747 # qpush A is an error (can't go backwards with qpush)
747 # qpush A is an error (can't go backwards with qpush)
748 if patch:
748 if patch:
749 info = self.isapplied(patch)
749 info = self.isapplied(patch)
750 if info:
750 if info:
751 if info[0] < len(self.applied) - 1:
751 if info[0] < len(self.applied) - 1:
752 raise util.Abort(_("cannot push to a previous patch: %s") %
752 raise util.Abort(_("cannot push to a previous patch: %s") %
753 patch)
753 patch)
754 if info[0] < len(self.series) - 1:
754 if info[0] < len(self.series) - 1:
755 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
755 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
756 else:
756 else:
757 self.ui.warn(_('all patches are currently applied\n'))
757 self.ui.warn(_('all patches are currently applied\n'))
758 return
758 return
759
759
760 # Following the above example, starting at 'top' of B:
760 # Following the above example, starting at 'top' of B:
761 # qpush should be performed (pushes C), but a subsequent qpush without
761 # qpush should be performed (pushes C), but a subsequent qpush without
762 # an argument is an error (nothing to apply). This allows a loop
762 # an argument is an error (nothing to apply). This allows a loop
763 # of "...while hg qpush..." to work as it detects an error when done
763 # of "...while hg qpush..." to work as it detects an error when done
764 if self.series_end() == len(self.series):
764 if self.series_end() == len(self.series):
765 self.ui.warn(_('patch series already fully applied\n'))
765 self.ui.warn(_('patch series already fully applied\n'))
766 return 1
766 return 1
767 if not force:
767 if not force:
768 self.check_localchanges(repo)
768 self.check_localchanges(repo)
769
769
770 self.applied_dirty = 1;
770 self.applied_dirty = 1;
771 start = self.series_end()
771 start = self.series_end()
772 if start > 0:
772 if start > 0:
773 self.check_toppatch(repo)
773 self.check_toppatch(repo)
774 if not patch:
774 if not patch:
775 patch = self.series[start]
775 patch = self.series[start]
776 end = start + 1
776 end = start + 1
777 else:
777 else:
778 end = self.series.index(patch, start) + 1
778 end = self.series.index(patch, start) + 1
779 s = self.series[start:end]
779 s = self.series[start:end]
780 all_files = {}
780 all_files = {}
781 try:
781 try:
782 if mergeq:
782 if mergeq:
783 ret = self.mergepatch(repo, mergeq, s, wlock)
783 ret = self.mergepatch(repo, mergeq, s, wlock)
784 else:
784 else:
785 ret = self.apply(repo, s, list, wlock=wlock,
785 ret = self.apply(repo, s, list, wlock=wlock,
786 all_files=all_files)
786 all_files=all_files)
787 except:
787 except:
788 self.ui.warn(_('cleaning up working directory...'))
788 self.ui.warn(_('cleaning up working directory...'))
789 node = repo.dirstate.parents()[0]
789 node = repo.dirstate.parents()[0]
790 hg.revert(repo, node, None, wlock)
790 hg.revert(repo, node, None, wlock)
791 unknown = repo.status(wlock=wlock)[4]
791 unknown = repo.status(wlock=wlock)[4]
792 # only remove unknown files that we know we touched or
792 # only remove unknown files that we know we touched or
793 # created while patching
793 # created while patching
794 for f in unknown:
794 for f in unknown:
795 if f in all_files:
795 if f in all_files:
796 util.unlink(repo.wjoin(f))
796 util.unlink(repo.wjoin(f))
797 self.ui.warn(_('done\n'))
797 self.ui.warn(_('done\n'))
798 raise
798 raise
799 top = self.applied[-1].name
799 top = self.applied[-1].name
800 if ret[0]:
800 if ret[0]:
801 self.ui.write("Errors during apply, please fix and refresh %s\n" %
801 self.ui.write("Errors during apply, please fix and refresh %s\n" %
802 top)
802 top)
803 else:
803 else:
804 self.ui.write("Now at: %s\n" % top)
804 self.ui.write("Now at: %s\n" % top)
805 return ret[0]
805 return ret[0]
806
806
807 def pop(self, repo, patch=None, force=False, update=True, all=False,
807 def pop(self, repo, patch=None, force=False, update=True, all=False,
808 wlock=None):
808 wlock=None):
809 def getfile(f, rev):
809 def getfile(f, rev):
810 t = repo.file(f).read(rev)
810 t = repo.file(f).read(rev)
811 repo.wfile(f, "w").write(t)
811 repo.wfile(f, "w").write(t)
812
812
813 if not wlock:
813 if not wlock:
814 wlock = repo.wlock()
814 wlock = repo.wlock()
815 if patch:
815 if patch:
816 # index, rev, patch
816 # index, rev, patch
817 info = self.isapplied(patch)
817 info = self.isapplied(patch)
818 if not info:
818 if not info:
819 patch = self.lookup(patch)
819 patch = self.lookup(patch)
820 info = self.isapplied(patch)
820 info = self.isapplied(patch)
821 if not info:
821 if not info:
822 raise util.Abort(_("patch %s is not applied") % patch)
822 raise util.Abort(_("patch %s is not applied") % patch)
823
823
824 if len(self.applied) == 0:
824 if len(self.applied) == 0:
825 # Allow qpop -a to work repeatedly,
825 # Allow qpop -a to work repeatedly,
826 # but not qpop without an argument
826 # but not qpop without an argument
827 self.ui.warn(_("no patches applied\n"))
827 self.ui.warn(_("no patches applied\n"))
828 return not all
828 return not all
829
829
830 if not update:
830 if not update:
831 parents = repo.dirstate.parents()
831 parents = repo.dirstate.parents()
832 rr = [ revlog.bin(x.rev) for x in self.applied ]
832 rr = [ revlog.bin(x.rev) for x in self.applied ]
833 for p in parents:
833 for p in parents:
834 if p in rr:
834 if p in rr:
835 self.ui.warn("qpop: forcing dirstate update\n")
835 self.ui.warn("qpop: forcing dirstate update\n")
836 update = True
836 update = True
837
837
838 if not force and update:
838 if not force and update:
839 self.check_localchanges(repo)
839 self.check_localchanges(repo)
840
840
841 self.applied_dirty = 1;
841 self.applied_dirty = 1;
842 end = len(self.applied)
842 end = len(self.applied)
843 if not patch:
843 if not patch:
844 if all:
844 if all:
845 popi = 0
845 popi = 0
846 else:
846 else:
847 popi = len(self.applied) - 1
847 popi = len(self.applied) - 1
848 else:
848 else:
849 popi = info[0] + 1
849 popi = info[0] + 1
850 if popi >= end:
850 if popi >= end:
851 self.ui.warn("qpop: %s is already at the top\n" % patch)
851 self.ui.warn("qpop: %s is already at the top\n" % patch)
852 return
852 return
853 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
853 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
854
854
855 start = info[0]
855 start = info[0]
856 rev = revlog.bin(info[1])
856 rev = revlog.bin(info[1])
857
857
858 # we know there are no local changes, so we can make a simplified
858 # we know there are no local changes, so we can make a simplified
859 # form of hg.update.
859 # form of hg.update.
860 if update:
860 if update:
861 top = self.check_toppatch(repo)
861 top = self.check_toppatch(repo)
862 qp = self.qparents(repo, rev)
862 qp = self.qparents(repo, rev)
863 changes = repo.changelog.read(qp)
863 changes = repo.changelog.read(qp)
864 mmap = repo.manifest.read(changes[0])
864 mmap = repo.manifest.read(changes[0])
865 m, a, r, d, u = repo.status(qp, top)[:5]
865 m, a, r, d, u = repo.status(qp, top)[:5]
866 if d:
866 if d:
867 raise util.Abort("deletions found between repo revs")
867 raise util.Abort("deletions found between repo revs")
868 for f in m:
868 for f in m:
869 getfile(f, mmap[f])
869 getfile(f, mmap[f])
870 for f in r:
870 for f in r:
871 getfile(f, mmap[f])
871 getfile(f, mmap[f])
872 util.set_exec(repo.wjoin(f), mmap.execf(f))
872 util.set_exec(repo.wjoin(f), mmap.execf(f))
873 for f in m + r:
873 for f in m + r:
874 repo.dirstate.normal(f)
874 repo.dirstate.normal(f)
875 for f in a:
875 for f in a:
876 try:
876 try:
877 os.unlink(repo.wjoin(f))
877 os.unlink(repo.wjoin(f))
878 except OSError, e:
878 except OSError, e:
879 if e.errno != errno.ENOENT:
879 if e.errno != errno.ENOENT:
880 raise
880 raise
881 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
881 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
882 except: pass
882 except: pass
883 repo.dirstate.forget(f)
883 repo.dirstate.forget(f)
884 repo.dirstate.setparents(qp, revlog.nullid)
884 repo.dirstate.setparents(qp, revlog.nullid)
885 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
885 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
886 del self.applied[start:end]
886 del self.applied[start:end]
887 if len(self.applied):
887 if len(self.applied):
888 self.ui.write("Now at: %s\n" % self.applied[-1].name)
888 self.ui.write("Now at: %s\n" % self.applied[-1].name)
889 else:
889 else:
890 self.ui.write("Patch queue now empty\n")
890 self.ui.write("Patch queue now empty\n")
891
891
892 def diff(self, repo, pats, opts):
892 def diff(self, repo, pats, opts):
893 top = self.check_toppatch(repo)
893 top = self.check_toppatch(repo)
894 if not top:
894 if not top:
895 self.ui.write("No patches applied\n")
895 self.ui.write("No patches applied\n")
896 return
896 return
897 qp = self.qparents(repo, top)
897 qp = self.qparents(repo, top)
898 if opts.get('git'):
898 if opts.get('git'):
899 self.diffopts().git = True
899 self.diffopts().git = True
900 self.printdiff(repo, qp, files=pats, opts=opts)
900 self.printdiff(repo, qp, files=pats, opts=opts)
901
901
902 def refresh(self, repo, pats=None, **opts):
902 def refresh(self, repo, pats=None, **opts):
903 if len(self.applied) == 0:
903 if len(self.applied) == 0:
904 self.ui.write("No patches applied\n")
904 self.ui.write("No patches applied\n")
905 return 1
905 return 1
906 wlock = repo.wlock()
906 wlock = repo.wlock()
907 self.check_toppatch(repo)
907 self.check_toppatch(repo)
908 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
908 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
909 top = revlog.bin(top)
909 top = revlog.bin(top)
910 cparents = repo.changelog.parents(top)
910 cparents = repo.changelog.parents(top)
911 patchparent = self.qparents(repo, top)
911 patchparent = self.qparents(repo, top)
912 message, comments, user, date, patchfound = self.readheaders(patchfn)
912 message, comments, user, date, patchfound = self.readheaders(patchfn)
913
913
914 patchf = self.opener(patchfn, 'r+')
914 patchf = self.opener(patchfn, 'r+')
915
915
916 # if the patch was a git patch, refresh it as a git patch
916 # if the patch was a git patch, refresh it as a git patch
917 for line in patchf:
917 for line in patchf:
918 if line.startswith('diff --git'):
918 if line.startswith('diff --git'):
919 self.diffopts().git = True
919 self.diffopts().git = True
920 break
920 break
921 patchf.seek(0)
921 patchf.seek(0)
922 patchf.truncate()
922 patchf.truncate()
923
923
924 msg = opts.get('msg', '').rstrip()
924 msg = opts.get('msg', '').rstrip()
925 if msg:
925 if msg:
926 if comments:
926 if comments:
927 # Remove existing message.
927 # Remove existing message.
928 ci = 0
928 ci = 0
929 subj = None
929 subj = None
930 for mi in xrange(len(message)):
930 for mi in xrange(len(message)):
931 if comments[ci].lower().startswith('subject: '):
931 if comments[ci].lower().startswith('subject: '):
932 subj = comments[ci][9:]
932 subj = comments[ci][9:]
933 while message[mi] != comments[ci] and message[mi] != subj:
933 while message[mi] != comments[ci] and message[mi] != subj:
934 ci += 1
934 ci += 1
935 del comments[ci]
935 del comments[ci]
936 comments.append(msg)
936 comments.append(msg)
937 if comments:
937 if comments:
938 comments = "\n".join(comments) + '\n\n'
938 comments = "\n".join(comments) + '\n\n'
939 patchf.write(comments)
939 patchf.write(comments)
940
940
941 if opts.get('git'):
941 if opts.get('git'):
942 self.diffopts().git = True
942 self.diffopts().git = True
943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
944 tip = repo.changelog.tip()
944 tip = repo.changelog.tip()
945 if top == tip:
945 if top == tip:
946 # if the top of our patch queue is also the tip, there is an
946 # if the top of our patch queue is also the tip, there is an
947 # optimization here. We update the dirstate in place and strip
947 # optimization here. We update the dirstate in place and strip
948 # off the tip commit. Then just commit the current directory
948 # off the tip commit. Then just commit the current directory
949 # tree. We can also send repo.commit the list of files
949 # tree. We can also send repo.commit the list of files
950 # changed to speed up the diff
950 # changed to speed up the diff
951 #
951 #
952 # in short mode, we only diff the files included in the
952 # in short mode, we only diff the files included in the
953 # patch already
953 # patch already
954 #
954 #
955 # this should really read:
955 # this should really read:
956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
957 # but we do it backwards to take advantage of manifest/chlog
957 # but we do it backwards to take advantage of manifest/chlog
958 # caching against the next repo.status call
958 # caching against the next repo.status call
959 #
959 #
960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
961 changes = repo.changelog.read(tip)
961 changes = repo.changelog.read(tip)
962 man = repo.manifest.read(changes[0])
962 man = repo.manifest.read(changes[0])
963 aaa = aa[:]
963 aaa = aa[:]
964 if opts.get('short'):
964 if opts.get('short'):
965 filelist = mm + aa + dd
965 filelist = mm + aa + dd
966 match = dict.fromkeys(filelist).__contains__
966 match = dict.fromkeys(filelist).__contains__
967 else:
967 else:
968 filelist = None
968 filelist = None
969 match = util.always
969 match = util.always
970 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
970 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
971
971
972 # we might end up with files that were added between tip and
972 # we might end up with files that were added between tip and
973 # the dirstate parent, but then changed in the local dirstate.
973 # the dirstate parent, but then changed in the local dirstate.
974 # in this case, we want them to only show up in the added section
974 # in this case, we want them to only show up in the added section
975 for x in m:
975 for x in m:
976 if x not in aa:
976 if x not in aa:
977 mm.append(x)
977 mm.append(x)
978 # we might end up with files added by the local dirstate that
978 # we might end up with files added by the local dirstate that
979 # were deleted by the patch. In this case, they should only
979 # were deleted by the patch. In this case, they should only
980 # show up in the changed section.
980 # show up in the changed section.
981 for x in a:
981 for x in a:
982 if x in dd:
982 if x in dd:
983 del dd[dd.index(x)]
983 del dd[dd.index(x)]
984 mm.append(x)
984 mm.append(x)
985 else:
985 else:
986 aa.append(x)
986 aa.append(x)
987 # make sure any files deleted in the local dirstate
987 # make sure any files deleted in the local dirstate
988 # are not in the add or change column of the patch
988 # are not in the add or change column of the patch
989 forget = []
989 forget = []
990 for x in d + r:
990 for x in d + r:
991 if x in aa:
991 if x in aa:
992 del aa[aa.index(x)]
992 del aa[aa.index(x)]
993 forget.append(x)
993 forget.append(x)
994 continue
994 continue
995 elif x in mm:
995 elif x in mm:
996 del mm[mm.index(x)]
996 del mm[mm.index(x)]
997 dd.append(x)
997 dd.append(x)
998
998
999 m = util.unique(mm)
999 m = util.unique(mm)
1000 r = util.unique(dd)
1000 r = util.unique(dd)
1001 a = util.unique(aa)
1001 a = util.unique(aa)
1002 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1002 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1003 filelist = util.unique(c[0] + c[1] + c[2])
1003 filelist = util.unique(c[0] + c[1] + c[2])
1004 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1004 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1005 fp=patchf, changes=c, opts=self.diffopts())
1005 fp=patchf, changes=c, opts=self.diffopts())
1006 patchf.close()
1006 patchf.close()
1007
1007
1008 repo.dirstate.setparents(*cparents)
1008 repo.dirstate.setparents(*cparents)
1009 copies = {}
1009 copies = {}
1010 for dst in a:
1010 for dst in a:
1011 src = repo.dirstate.copied(dst)
1011 src = repo.dirstate.copied(dst)
1012 if src is None:
1012 if src is None:
1013 continue
1013 continue
1014 copies.setdefault(src, []).append(dst)
1014 copies.setdefault(src, []).append(dst)
1015 repo.dirstate.add(dst)
1015 repo.dirstate.add(dst)
1016 # remember the copies between patchparent and tip
1016 # remember the copies between patchparent and tip
1017 # this may be slow, so don't do it if we're not tracking copies
1017 # this may be slow, so don't do it if we're not tracking copies
1018 if self.diffopts().git:
1018 if self.diffopts().git:
1019 for dst in aaa:
1019 for dst in aaa:
1020 f = repo.file(dst)
1020 f = repo.file(dst)
1021 src = f.renamed(man[dst])
1021 src = f.renamed(man[dst])
1022 if src:
1022 if src:
1023 copies[src[0]] = copies.get(dst, [])
1023 copies[src[0]] = copies.get(dst, [])
1024 if dst in a:
1024 if dst in a:
1025 copies[src[0]].append(dst)
1025 copies[src[0]].append(dst)
1026 # we can't copy a file created by the patch itself
1026 # we can't copy a file created by the patch itself
1027 if dst in copies:
1027 if dst in copies:
1028 del copies[dst]
1028 del copies[dst]
1029 for src, dsts in copies.iteritems():
1029 for src, dsts in copies.iteritems():
1030 for dst in dsts:
1030 for dst in dsts:
1031 repo.dirstate.copy(src, dst)
1031 repo.dirstate.copy(src, dst)
1032 for f in r:
1032 for f in r:
1033 repo.dirstate.remove(f)
1033 repo.dirstate.remove(f)
1034 # if the patch excludes a modified file, mark that file with mtime=0
1034 # if the patch excludes a modified file, mark that file with mtime=0
1035 # so status can see it.
1035 # so status can see it.
1036 mm = []
1036 mm = []
1037 for i in xrange(len(m)-1, -1, -1):
1037 for i in xrange(len(m)-1, -1, -1):
1038 if not matchfn(m[i]):
1038 if not matchfn(m[i]):
1039 mm.append(m[i])
1039 mm.append(m[i])
1040 del m[i]
1040 del m[i]
1041 for f in m:
1041 for f in m:
1042 repo.dirstate.normal(f)
1042 repo.dirstate.normal(f)
1043 for f in mm:
1043 for f in mm:
1044 repo.dirstate.normaldirty(f)
1044 repo.dirstate.normaldirty(f)
1045 for f in forget:
1045 for f in forget:
1046 repo.dirstate.forget(f)
1046 repo.dirstate.forget(f)
1047
1047
1048 if not msg:
1048 if not msg:
1049 if not message:
1049 if not message:
1050 message = "[mq]: %s\n" % patchfn
1050 message = "[mq]: %s\n" % patchfn
1051 else:
1051 else:
1052 message = "\n".join(message)
1052 message = "\n".join(message)
1053 else:
1053 else:
1054 message = msg
1054 message = msg
1055
1055
1056 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1056 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1057 n = repo.commit(filelist, message, changes[1], match=matchfn,
1057 n = repo.commit(filelist, message, changes[1], match=matchfn,
1058 force=1, wlock=wlock)
1058 force=1, wlock=wlock)
1059 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1059 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1060 self.applied_dirty = 1
1060 self.applied_dirty = 1
1061 self.removeundo(repo)
1061 self.removeundo(repo)
1062 else:
1062 else:
1063 self.printdiff(repo, patchparent, fp=patchf)
1063 self.printdiff(repo, patchparent, fp=patchf)
1064 patchf.close()
1064 patchf.close()
1065 added = repo.status()[1]
1065 added = repo.status()[1]
1066 for a in added:
1066 for a in added:
1067 f = repo.wjoin(a)
1067 f = repo.wjoin(a)
1068 try:
1068 try:
1069 os.unlink(f)
1069 os.unlink(f)
1070 except OSError, e:
1070 except OSError, e:
1071 if e.errno != errno.ENOENT:
1071 if e.errno != errno.ENOENT:
1072 raise
1072 raise
1073 try: os.removedirs(os.path.dirname(f))
1073 try: os.removedirs(os.path.dirname(f))
1074 except: pass
1074 except: pass
1075 # forget the file copies in the dirstate
1075 # forget the file copies in the dirstate
1076 # push should readd the files later on
1076 # push should readd the files later on
1077 repo.dirstate.forget(a)
1077 repo.dirstate.forget(a)
1078 self.pop(repo, force=True, wlock=wlock)
1078 self.pop(repo, force=True, wlock=wlock)
1079 self.push(repo, force=True, wlock=wlock)
1079 self.push(repo, force=True, wlock=wlock)
1080
1080
1081 def init(self, repo, create=False):
1081 def init(self, repo, create=False):
1082 if not create and os.path.isdir(self.path):
1082 if not create and os.path.isdir(self.path):
1083 raise util.Abort(_("patch queue directory already exists"))
1083 raise util.Abort(_("patch queue directory already exists"))
1084 try:
1084 try:
1085 os.mkdir(self.path)
1085 os.mkdir(self.path)
1086 except OSError, inst:
1086 except OSError, inst:
1087 if inst.errno != errno.EEXIST or not create:
1087 if inst.errno != errno.EEXIST or not create:
1088 raise
1088 raise
1089 if create:
1089 if create:
1090 return self.qrepo(create=True)
1090 return self.qrepo(create=True)
1091
1091
1092 def unapplied(self, repo, patch=None):
1092 def unapplied(self, repo, patch=None):
1093 if patch and patch not in self.series:
1093 if patch and patch not in self.series:
1094 raise util.Abort(_("patch %s is not in series file") % patch)
1094 raise util.Abort(_("patch %s is not in series file") % patch)
1095 if not patch:
1095 if not patch:
1096 start = self.series_end()
1096 start = self.series_end()
1097 else:
1097 else:
1098 start = self.series.index(patch) + 1
1098 start = self.series.index(patch) + 1
1099 unapplied = []
1099 unapplied = []
1100 for i in xrange(start, len(self.series)):
1100 for i in xrange(start, len(self.series)):
1101 pushable, reason = self.pushable(i)
1101 pushable, reason = self.pushable(i)
1102 if pushable:
1102 if pushable:
1103 unapplied.append((i, self.series[i]))
1103 unapplied.append((i, self.series[i]))
1104 self.explain_pushable(i)
1104 self.explain_pushable(i)
1105 return unapplied
1105 return unapplied
1106
1106
1107 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1107 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1108 summary=False):
1108 summary=False):
1109 def displayname(patchname):
1109 def displayname(patchname):
1110 if summary:
1110 if summary:
1111 msg = self.readheaders(patchname)[0]
1111 msg = self.readheaders(patchname)[0]
1112 msg = msg and ': ' + msg[0] or ': '
1112 msg = msg and ': ' + msg[0] or ': '
1113 else:
1113 else:
1114 msg = ''
1114 msg = ''
1115 return '%s%s' % (patchname, msg)
1115 return '%s%s' % (patchname, msg)
1116
1116
1117 applied = dict.fromkeys([p.name for p in self.applied])
1117 applied = dict.fromkeys([p.name for p in self.applied])
1118 if length is None:
1118 if length is None:
1119 length = len(self.series) - start
1119 length = len(self.series) - start
1120 if not missing:
1120 if not missing:
1121 for i in xrange(start, start+length):
1121 for i in xrange(start, start+length):
1122 patch = self.series[i]
1122 patch = self.series[i]
1123 if patch in applied:
1123 if patch in applied:
1124 stat = 'A'
1124 stat = 'A'
1125 elif self.pushable(i)[0]:
1125 elif self.pushable(i)[0]:
1126 stat = 'U'
1126 stat = 'U'
1127 else:
1127 else:
1128 stat = 'G'
1128 stat = 'G'
1129 pfx = ''
1129 pfx = ''
1130 if self.ui.verbose:
1130 if self.ui.verbose:
1131 pfx = '%d %s ' % (i, stat)
1131 pfx = '%d %s ' % (i, stat)
1132 elif status and status != stat:
1132 elif status and status != stat:
1133 continue
1133 continue
1134 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1134 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1135 else:
1135 else:
1136 msng_list = []
1136 msng_list = []
1137 for root, dirs, files in os.walk(self.path):
1137 for root, dirs, files in os.walk(self.path):
1138 d = root[len(self.path) + 1:]
1138 d = root[len(self.path) + 1:]
1139 for f in files:
1139 for f in files:
1140 fl = os.path.join(d, f)
1140 fl = os.path.join(d, f)
1141 if (fl not in self.series and
1141 if (fl not in self.series and
1142 fl not in (self.status_path, self.series_path,
1142 fl not in (self.status_path, self.series_path,
1143 self.guards_path)
1143 self.guards_path)
1144 and not fl.startswith('.')):
1144 and not fl.startswith('.')):
1145 msng_list.append(fl)
1145 msng_list.append(fl)
1146 msng_list.sort()
1146 msng_list.sort()
1147 for x in msng_list:
1147 for x in msng_list:
1148 pfx = self.ui.verbose and ('D ') or ''
1148 pfx = self.ui.verbose and ('D ') or ''
1149 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1149 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1150
1150
1151 def issaveline(self, l):
1151 def issaveline(self, l):
1152 if l.name == '.hg.patches.save.line':
1152 if l.name == '.hg.patches.save.line':
1153 return True
1153 return True
1154
1154
1155 def qrepo(self, create=False):
1155 def qrepo(self, create=False):
1156 if create or os.path.isdir(self.join(".hg")):
1156 if create or os.path.isdir(self.join(".hg")):
1157 return hg.repository(self.ui, path=self.path, create=create)
1157 return hg.repository(self.ui, path=self.path, create=create)
1158
1158
1159 def restore(self, repo, rev, delete=None, qupdate=None):
1159 def restore(self, repo, rev, delete=None, qupdate=None):
1160 c = repo.changelog.read(rev)
1160 c = repo.changelog.read(rev)
1161 desc = c[4].strip()
1161 desc = c[4].strip()
1162 lines = desc.splitlines()
1162 lines = desc.splitlines()
1163 i = 0
1163 i = 0
1164 datastart = None
1164 datastart = None
1165 series = []
1165 series = []
1166 applied = []
1166 applied = []
1167 qpp = None
1167 qpp = None
1168 for i in xrange(0, len(lines)):
1168 for i in xrange(0, len(lines)):
1169 if lines[i] == 'Patch Data:':
1169 if lines[i] == 'Patch Data:':
1170 datastart = i + 1
1170 datastart = i + 1
1171 elif lines[i].startswith('Dirstate:'):
1171 elif lines[i].startswith('Dirstate:'):
1172 l = lines[i].rstrip()
1172 l = lines[i].rstrip()
1173 l = l[10:].split(' ')
1173 l = l[10:].split(' ')
1174 qpp = [ hg.bin(x) for x in l ]
1174 qpp = [ hg.bin(x) for x in l ]
1175 elif datastart != None:
1175 elif datastart != None:
1176 l = lines[i].rstrip()
1176 l = lines[i].rstrip()
1177 se = statusentry(l)
1177 se = statusentry(l)
1178 file_ = se.name
1178 file_ = se.name
1179 if se.rev:
1179 if se.rev:
1180 applied.append(se)
1180 applied.append(se)
1181 else:
1181 else:
1182 series.append(file_)
1182 series.append(file_)
1183 if datastart == None:
1183 if datastart == None:
1184 self.ui.warn("No saved patch data found\n")
1184 self.ui.warn("No saved patch data found\n")
1185 return 1
1185 return 1
1186 self.ui.warn("restoring status: %s\n" % lines[0])
1186 self.ui.warn("restoring status: %s\n" % lines[0])
1187 self.full_series = series
1187 self.full_series = series
1188 self.applied = applied
1188 self.applied = applied
1189 self.parse_series()
1189 self.parse_series()
1190 self.series_dirty = 1
1190 self.series_dirty = 1
1191 self.applied_dirty = 1
1191 self.applied_dirty = 1
1192 heads = repo.changelog.heads()
1192 heads = repo.changelog.heads()
1193 if delete:
1193 if delete:
1194 if rev not in heads:
1194 if rev not in heads:
1195 self.ui.warn("save entry has children, leaving it alone\n")
1195 self.ui.warn("save entry has children, leaving it alone\n")
1196 else:
1196 else:
1197 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1197 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1198 pp = repo.dirstate.parents()
1198 pp = repo.dirstate.parents()
1199 if rev in pp:
1199 if rev in pp:
1200 update = True
1200 update = True
1201 else:
1201 else:
1202 update = False
1202 update = False
1203 self.strip(repo, rev, update=update, backup='strip')
1203 self.strip(repo, rev, update=update, backup='strip')
1204 if qpp:
1204 if qpp:
1205 self.ui.warn("saved queue repository parents: %s %s\n" %
1205 self.ui.warn("saved queue repository parents: %s %s\n" %
1206 (hg.short(qpp[0]), hg.short(qpp[1])))
1206 (hg.short(qpp[0]), hg.short(qpp[1])))
1207 if qupdate:
1207 if qupdate:
1208 print "queue directory updating"
1208 print "queue directory updating"
1209 r = self.qrepo()
1209 r = self.qrepo()
1210 if not r:
1210 if not r:
1211 self.ui.warn("Unable to load queue repository\n")
1211 self.ui.warn("Unable to load queue repository\n")
1212 return 1
1212 return 1
1213 hg.clean(r, qpp[0])
1213 hg.clean(r, qpp[0])
1214
1214
1215 def save(self, repo, msg=None):
1215 def save(self, repo, msg=None):
1216 if len(self.applied) == 0:
1216 if len(self.applied) == 0:
1217 self.ui.warn("save: no patches applied, exiting\n")
1217 self.ui.warn("save: no patches applied, exiting\n")
1218 return 1
1218 return 1
1219 if self.issaveline(self.applied[-1]):
1219 if self.issaveline(self.applied[-1]):
1220 self.ui.warn("status is already saved\n")
1220 self.ui.warn("status is already saved\n")
1221 return 1
1221 return 1
1222
1222
1223 ar = [ ':' + x for x in self.full_series ]
1223 ar = [ ':' + x for x in self.full_series ]
1224 if not msg:
1224 if not msg:
1225 msg = "hg patches saved state"
1225 msg = "hg patches saved state"
1226 else:
1226 else:
1227 msg = "hg patches: " + msg.rstrip('\r\n')
1227 msg = "hg patches: " + msg.rstrip('\r\n')
1228 r = self.qrepo()
1228 r = self.qrepo()
1229 if r:
1229 if r:
1230 pp = r.dirstate.parents()
1230 pp = r.dirstate.parents()
1231 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1231 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1232 msg += "\n\nPatch Data:\n"
1232 msg += "\n\nPatch Data:\n"
1233 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1233 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1234 "\n".join(ar) + '\n' or "")
1234 "\n".join(ar) + '\n' or "")
1235 n = repo.commit(None, text, user=None, force=1)
1235 n = repo.commit(None, text, user=None, force=1)
1236 if not n:
1236 if not n:
1237 self.ui.warn("repo commit failed\n")
1237 self.ui.warn("repo commit failed\n")
1238 return 1
1238 return 1
1239 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1239 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1240 self.applied_dirty = 1
1240 self.applied_dirty = 1
1241 self.removeundo(repo)
1241 self.removeundo(repo)
1242
1242
1243 def full_series_end(self):
1243 def full_series_end(self):
1244 if len(self.applied) > 0:
1244 if len(self.applied) > 0:
1245 p = self.applied[-1].name
1245 p = self.applied[-1].name
1246 end = self.find_series(p)
1246 end = self.find_series(p)
1247 if end == None:
1247 if end == None:
1248 return len(self.full_series)
1248 return len(self.full_series)
1249 return end + 1
1249 return end + 1
1250 return 0
1250 return 0
1251
1251
1252 def series_end(self, all_patches=False):
1252 def series_end(self, all_patches=False):
1253 """If all_patches is False, return the index of the next pushable patch
1253 """If all_patches is False, return the index of the next pushable patch
1254 in the series, or the series length. If all_patches is True, return the
1254 in the series, or the series length. If all_patches is True, return the
1255 index of the first patch past the last applied one.
1255 index of the first patch past the last applied one.
1256 """
1256 """
1257 end = 0
1257 end = 0
1258 def next(start):
1258 def next(start):
1259 if all_patches:
1259 if all_patches:
1260 return start
1260 return start
1261 i = start
1261 i = start
1262 while i < len(self.series):
1262 while i < len(self.series):
1263 p, reason = self.pushable(i)
1263 p, reason = self.pushable(i)
1264 if p:
1264 if p:
1265 break
1265 break
1266 self.explain_pushable(i)
1266 self.explain_pushable(i)
1267 i += 1
1267 i += 1
1268 return i
1268 return i
1269 if len(self.applied) > 0:
1269 if len(self.applied) > 0:
1270 p = self.applied[-1].name
1270 p = self.applied[-1].name
1271 try:
1271 try:
1272 end = self.series.index(p)
1272 end = self.series.index(p)
1273 except ValueError:
1273 except ValueError:
1274 return 0
1274 return 0
1275 return next(end + 1)
1275 return next(end + 1)
1276 return next(end)
1276 return next(end)
1277
1277
1278 def appliedname(self, index):
1278 def appliedname(self, index):
1279 pname = self.applied[index].name
1279 pname = self.applied[index].name
1280 if not self.ui.verbose:
1280 if not self.ui.verbose:
1281 p = pname
1281 p = pname
1282 else:
1282 else:
1283 p = str(self.series.index(pname)) + " " + pname
1283 p = str(self.series.index(pname)) + " " + pname
1284 return p
1284 return p
1285
1285
1286 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1286 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1287 force=None, git=False):
1287 force=None, git=False):
1288 def checkseries(patchname):
1288 def checkseries(patchname):
1289 if patchname in self.series:
1289 if patchname in self.series:
1290 raise util.Abort(_('patch %s is already in the series file')
1290 raise util.Abort(_('patch %s is already in the series file')
1291 % patchname)
1291 % patchname)
1292 def checkfile(patchname):
1292 def checkfile(patchname):
1293 if not force and os.path.exists(self.join(patchname)):
1293 if not force and os.path.exists(self.join(patchname)):
1294 raise util.Abort(_('patch "%s" already exists')
1294 raise util.Abort(_('patch "%s" already exists')
1295 % patchname)
1295 % patchname)
1296
1296
1297 if rev:
1297 if rev:
1298 if files:
1298 if files:
1299 raise util.Abort(_('option "-r" not valid when importing '
1299 raise util.Abort(_('option "-r" not valid when importing '
1300 'files'))
1300 'files'))
1301 rev = cmdutil.revrange(repo, rev)
1301 rev = cmdutil.revrange(repo, rev)
1302 rev.sort(lambda x, y: cmp(y, x))
1302 rev.sort(lambda x, y: cmp(y, x))
1303 if (len(files) > 1 or len(rev) > 1) and patchname:
1303 if (len(files) > 1 or len(rev) > 1) and patchname:
1304 raise util.Abort(_('option "-n" not valid when importing multiple '
1304 raise util.Abort(_('option "-n" not valid when importing multiple '
1305 'patches'))
1305 'patches'))
1306 i = 0
1306 i = 0
1307 added = []
1307 added = []
1308 if rev:
1308 if rev:
1309 # If mq patches are applied, we can only import revisions
1309 # If mq patches are applied, we can only import revisions
1310 # that form a linear path to qbase.
1310 # that form a linear path to qbase.
1311 # Otherwise, they should form a linear path to a head.
1311 # Otherwise, they should form a linear path to a head.
1312 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1312 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1313 if len(heads) > 1:
1313 if len(heads) > 1:
1314 raise util.Abort(_('revision %d is the root of more than one '
1314 raise util.Abort(_('revision %d is the root of more than one '
1315 'branch') % rev[-1])
1315 'branch') % rev[-1])
1316 if self.applied:
1316 if self.applied:
1317 base = revlog.hex(repo.changelog.node(rev[0]))
1317 base = revlog.hex(repo.changelog.node(rev[0]))
1318 if base in [n.rev for n in self.applied]:
1318 if base in [n.rev for n in self.applied]:
1319 raise util.Abort(_('revision %d is already managed')
1319 raise util.Abort(_('revision %d is already managed')
1320 % rev[0])
1320 % rev[0])
1321 if heads != [revlog.bin(self.applied[-1].rev)]:
1321 if heads != [revlog.bin(self.applied[-1].rev)]:
1322 raise util.Abort(_('revision %d is not the parent of '
1322 raise util.Abort(_('revision %d is not the parent of '
1323 'the queue') % rev[0])
1323 'the queue') % rev[0])
1324 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1324 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1325 lastparent = repo.changelog.parentrevs(base)[0]
1325 lastparent = repo.changelog.parentrevs(base)[0]
1326 else:
1326 else:
1327 if heads != [repo.changelog.node(rev[0])]:
1327 if heads != [repo.changelog.node(rev[0])]:
1328 raise util.Abort(_('revision %d has unmanaged children')
1328 raise util.Abort(_('revision %d has unmanaged children')
1329 % rev[0])
1329 % rev[0])
1330 lastparent = None
1330 lastparent = None
1331
1331
1332 if git:
1332 if git:
1333 self.diffopts().git = True
1333 self.diffopts().git = True
1334
1334
1335 for r in rev:
1335 for r in rev:
1336 p1, p2 = repo.changelog.parentrevs(r)
1336 p1, p2 = repo.changelog.parentrevs(r)
1337 n = repo.changelog.node(r)
1337 n = repo.changelog.node(r)
1338 if p2 != revlog.nullrev:
1338 if p2 != revlog.nullrev:
1339 raise util.Abort(_('cannot import merge revision %d') % r)
1339 raise util.Abort(_('cannot import merge revision %d') % r)
1340 if lastparent and lastparent != r:
1340 if lastparent and lastparent != r:
1341 raise util.Abort(_('revision %d is not the parent of %d')
1341 raise util.Abort(_('revision %d is not the parent of %d')
1342 % (r, lastparent))
1342 % (r, lastparent))
1343 lastparent = p1
1343 lastparent = p1
1344
1344
1345 if not patchname:
1345 if not patchname:
1346 patchname = normname('%d.diff' % r)
1346 patchname = normname('%d.diff' % r)
1347 checkseries(patchname)
1347 checkseries(patchname)
1348 checkfile(patchname)
1348 checkfile(patchname)
1349 self.full_series.insert(0, patchname)
1349 self.full_series.insert(0, patchname)
1350
1350
1351 patchf = self.opener(patchname, "w")
1351 patchf = self.opener(patchname, "w")
1352 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1352 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1353 patchf.close()
1353 patchf.close()
1354
1354
1355 se = statusentry(revlog.hex(n), patchname)
1355 se = statusentry(revlog.hex(n), patchname)
1356 self.applied.insert(0, se)
1356 self.applied.insert(0, se)
1357
1357
1358 added.append(patchname)
1358 added.append(patchname)
1359 patchname = None
1359 patchname = None
1360 self.parse_series()
1360 self.parse_series()
1361 self.applied_dirty = 1
1361 self.applied_dirty = 1
1362
1362
1363 for filename in files:
1363 for filename in files:
1364 if existing:
1364 if existing:
1365 if filename == '-':
1365 if filename == '-':
1366 raise util.Abort(_('-e is incompatible with import from -'))
1366 raise util.Abort(_('-e is incompatible with import from -'))
1367 if not patchname:
1367 if not patchname:
1368 patchname = normname(filename)
1368 patchname = normname(filename)
1369 if not os.path.isfile(self.join(patchname)):
1369 if not os.path.isfile(self.join(patchname)):
1370 raise util.Abort(_("patch %s does not exist") % patchname)
1370 raise util.Abort(_("patch %s does not exist") % patchname)
1371 else:
1371 else:
1372 try:
1372 try:
1373 if filename == '-':
1373 if filename == '-':
1374 if not patchname:
1374 if not patchname:
1375 raise util.Abort(_('need --name to import a patch from -'))
1375 raise util.Abort(_('need --name to import a patch from -'))
1376 text = sys.stdin.read()
1376 text = sys.stdin.read()
1377 else:
1377 else:
1378 text = file(filename).read()
1378 text = file(filename).read()
1379 except IOError:
1379 except IOError:
1380 raise util.Abort(_("unable to read %s") % patchname)
1380 raise util.Abort(_("unable to read %s") % patchname)
1381 if not patchname:
1381 if not patchname:
1382 patchname = normname(os.path.basename(filename))
1382 patchname = normname(os.path.basename(filename))
1383 checkfile(patchname)
1383 checkfile(patchname)
1384 patchf = self.opener(patchname, "w")
1384 patchf = self.opener(patchname, "w")
1385 patchf.write(text)
1385 patchf.write(text)
1386 checkseries(patchname)
1386 checkseries(patchname)
1387 index = self.full_series_end() + i
1387 index = self.full_series_end() + i
1388 self.full_series[index:index] = [patchname]
1388 self.full_series[index:index] = [patchname]
1389 self.parse_series()
1389 self.parse_series()
1390 self.ui.warn("adding %s to series file\n" % patchname)
1390 self.ui.warn("adding %s to series file\n" % patchname)
1391 i += 1
1391 i += 1
1392 added.append(patchname)
1392 added.append(patchname)
1393 patchname = None
1393 patchname = None
1394 self.series_dirty = 1
1394 self.series_dirty = 1
1395 qrepo = self.qrepo()
1395 qrepo = self.qrepo()
1396 if qrepo:
1396 if qrepo:
1397 qrepo.add(added)
1397 qrepo.add(added)
1398
1398
1399 def delete(ui, repo, *patches, **opts):
1399 def delete(ui, repo, *patches, **opts):
1400 """remove patches from queue
1400 """remove patches from queue
1401
1401
1402 The patches must not be applied, unless they are arguments to
1402 The patches must not be applied, unless they are arguments to
1403 the --rev parameter. At least one patch or revision is required.
1403 the --rev parameter. At least one patch or revision is required.
1404
1404
1405 With --rev, mq will stop managing the named revisions (converting
1405 With --rev, mq will stop managing the named revisions (converting
1406 them to regular mercurial changesets). The patches must be applied
1406 them to regular mercurial changesets). The patches must be applied
1407 and at the base of the stack. This option is useful when the patches
1407 and at the base of the stack. This option is useful when the patches
1408 have been applied upstream.
1408 have been applied upstream.
1409
1409
1410 With --keep, the patch files are preserved in the patch directory."""
1410 With --keep, the patch files are preserved in the patch directory."""
1411 q = repo.mq
1411 q = repo.mq
1412 q.delete(repo, patches, opts)
1412 q.delete(repo, patches, opts)
1413 q.save_dirty()
1413 q.save_dirty()
1414 return 0
1414 return 0
1415
1415
1416 def applied(ui, repo, patch=None, **opts):
1416 def applied(ui, repo, patch=None, **opts):
1417 """print the patches already applied"""
1417 """print the patches already applied"""
1418 q = repo.mq
1418 q = repo.mq
1419 if patch:
1419 if patch:
1420 if patch not in q.series:
1420 if patch not in q.series:
1421 raise util.Abort(_("patch %s is not in series file") % patch)
1421 raise util.Abort(_("patch %s is not in series file") % patch)
1422 end = q.series.index(patch) + 1
1422 end = q.series.index(patch) + 1
1423 else:
1423 else:
1424 end = q.series_end(True)
1424 end = q.series_end(True)
1425 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1425 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1426
1426
1427 def unapplied(ui, repo, patch=None, **opts):
1427 def unapplied(ui, repo, patch=None, **opts):
1428 """print the patches not yet applied"""
1428 """print the patches not yet applied"""
1429 q = repo.mq
1429 q = repo.mq
1430 if patch:
1430 if patch:
1431 if patch not in q.series:
1431 if patch not in q.series:
1432 raise util.Abort(_("patch %s is not in series file") % patch)
1432 raise util.Abort(_("patch %s is not in series file") % patch)
1433 start = q.series.index(patch) + 1
1433 start = q.series.index(patch) + 1
1434 else:
1434 else:
1435 start = q.series_end(True)
1435 start = q.series_end(True)
1436 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1436 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1437
1437
1438 def qimport(ui, repo, *filename, **opts):
1438 def qimport(ui, repo, *filename, **opts):
1439 """import a patch
1439 """import a patch
1440
1440
1441 The patch will have the same name as its source file unless you
1441 The patch will have the same name as its source file unless you
1442 give it a new one with --name.
1442 give it a new one with --name.
1443
1443
1444 You can register an existing patch inside the patch directory
1444 You can register an existing patch inside the patch directory
1445 with the --existing flag.
1445 with the --existing flag.
1446
1446
1447 With --force, an existing patch of the same name will be overwritten.
1447 With --force, an existing patch of the same name will be overwritten.
1448
1448
1449 An existing changeset may be placed under mq control with --rev
1449 An existing changeset may be placed under mq control with --rev
1450 (e.g. qimport --rev tip -n patch will place tip under mq control).
1450 (e.g. qimport --rev tip -n patch will place tip under mq control).
1451 With --git, patches imported with --rev will use the git diff
1451 With --git, patches imported with --rev will use the git diff
1452 format.
1452 format.
1453 """
1453 """
1454 q = repo.mq
1454 q = repo.mq
1455 q.qimport(repo, filename, patchname=opts['name'],
1455 q.qimport(repo, filename, patchname=opts['name'],
1456 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1456 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1457 git=opts['git'])
1457 git=opts['git'])
1458 q.save_dirty()
1458 q.save_dirty()
1459 return 0
1459 return 0
1460
1460
1461 def init(ui, repo, **opts):
1461 def init(ui, repo, **opts):
1462 """init a new queue repository
1462 """init a new queue repository
1463
1463
1464 The queue repository is unversioned by default. If -c is
1464 The queue repository is unversioned by default. If -c is
1465 specified, qinit will create a separate nested repository
1465 specified, qinit will create a separate nested repository
1466 for patches (qinit -c may also be run later to convert
1466 for patches (qinit -c may also be run later to convert
1467 an unversioned patch repository into a versioned one).
1467 an unversioned patch repository into a versioned one).
1468 You can use qcommit to commit changes to this queue repository."""
1468 You can use qcommit to commit changes to this queue repository."""
1469 q = repo.mq
1469 q = repo.mq
1470 r = q.init(repo, create=opts['create_repo'])
1470 r = q.init(repo, create=opts['create_repo'])
1471 q.save_dirty()
1471 q.save_dirty()
1472 if r:
1472 if r:
1473 if not os.path.exists(r.wjoin('.hgignore')):
1473 if not os.path.exists(r.wjoin('.hgignore')):
1474 fp = r.wopener('.hgignore', 'w')
1474 fp = r.wopener('.hgignore', 'w')
1475 fp.write('syntax: glob\n')
1475 fp.write('syntax: glob\n')
1476 fp.write('status\n')
1476 fp.write('status\n')
1477 fp.write('guards\n')
1477 fp.write('guards\n')
1478 fp.close()
1478 fp.close()
1479 if not os.path.exists(r.wjoin('series')):
1479 if not os.path.exists(r.wjoin('series')):
1480 r.wopener('series', 'w').close()
1480 r.wopener('series', 'w').close()
1481 r.add(['.hgignore', 'series'])
1481 r.add(['.hgignore', 'series'])
1482 commands.add(ui, r)
1482 commands.add(ui, r)
1483 return 0
1483 return 0
1484
1484
1485 def clone(ui, source, dest=None, **opts):
1485 def clone(ui, source, dest=None, **opts):
1486 '''clone main and patch repository at same time
1486 '''clone main and patch repository at same time
1487
1487
1488 If source is local, destination will have no patches applied. If
1488 If source is local, destination will have no patches applied. If
1489 source is remote, this command can not check if patches are
1489 source is remote, this command can not check if patches are
1490 applied in source, so cannot guarantee that patches are not
1490 applied in source, so cannot guarantee that patches are not
1491 applied in destination. If you clone remote repository, be sure
1491 applied in destination. If you clone remote repository, be sure
1492 before that it has no patches applied.
1492 before that it has no patches applied.
1493
1493
1494 Source patch repository is looked for in <src>/.hg/patches by
1494 Source patch repository is looked for in <src>/.hg/patches by
1495 default. Use -p <url> to change.
1495 default. Use -p <url> to change.
1496
1496
1497 The patch directory must be a nested mercurial repository, as
1497 The patch directory must be a nested mercurial repository, as
1498 would be created by qinit -c.
1498 would be created by qinit -c.
1499 '''
1499 '''
1500 cmdutil.setremoteconfig(ui, opts)
1500 cmdutil.setremoteconfig(ui, opts)
1501 if dest is None:
1501 if dest is None:
1502 dest = hg.defaultdest(source)
1502 dest = hg.defaultdest(source)
1503 sr = hg.repository(ui, ui.expandpath(source))
1503 sr = hg.repository(ui, ui.expandpath(source))
1504 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1504 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1505 try:
1505 try:
1506 pr = hg.repository(ui, patchdir)
1506 pr = hg.repository(ui, patchdir)
1507 except hg.RepoError:
1507 except hg.RepoError:
1508 raise util.Abort(_('versioned patch repository not found'
1508 raise util.Abort(_('versioned patch repository not found'
1509 ' (see qinit -c)'))
1509 ' (see qinit -c)'))
1510 qbase, destrev = None, None
1510 qbase, destrev = None, None
1511 if sr.local():
1511 if sr.local():
1512 if sr.mq.applied:
1512 if sr.mq.applied:
1513 qbase = revlog.bin(sr.mq.applied[0].rev)
1513 qbase = revlog.bin(sr.mq.applied[0].rev)
1514 if not hg.islocal(dest):
1514 if not hg.islocal(dest):
1515 heads = dict.fromkeys(sr.heads())
1515 heads = dict.fromkeys(sr.heads())
1516 for h in sr.heads(qbase):
1516 for h in sr.heads(qbase):
1517 del heads[h]
1517 del heads[h]
1518 destrev = heads.keys()
1518 destrev = heads.keys()
1519 destrev.append(sr.changelog.parents(qbase)[0])
1519 destrev.append(sr.changelog.parents(qbase)[0])
1520 ui.note(_('cloning main repo\n'))
1520 ui.note(_('cloning main repo\n'))
1521 sr, dr = hg.clone(ui, sr.url(), dest,
1521 sr, dr = hg.clone(ui, sr.url(), dest,
1522 pull=opts['pull'],
1522 pull=opts['pull'],
1523 rev=destrev,
1523 rev=destrev,
1524 update=False,
1524 update=False,
1525 stream=opts['uncompressed'])
1525 stream=opts['uncompressed'])
1526 ui.note(_('cloning patch repo\n'))
1526 ui.note(_('cloning patch repo\n'))
1527 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1527 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1528 dr.url() + '/.hg/patches',
1528 dr.url() + '/.hg/patches',
1529 pull=opts['pull'],
1529 pull=opts['pull'],
1530 update=not opts['noupdate'],
1530 update=not opts['noupdate'],
1531 stream=opts['uncompressed'])
1531 stream=opts['uncompressed'])
1532 if dr.local():
1532 if dr.local():
1533 if qbase:
1533 if qbase:
1534 ui.note(_('stripping applied patches from destination repo\n'))
1534 ui.note(_('stripping applied patches from destination repo\n'))
1535 dr.mq.strip(dr, qbase, update=False, backup=None)
1535 dr.mq.strip(dr, qbase, update=False, backup=None)
1536 if not opts['noupdate']:
1536 if not opts['noupdate']:
1537 ui.note(_('updating destination repo\n'))
1537 ui.note(_('updating destination repo\n'))
1538 hg.update(dr, dr.changelog.tip())
1538 hg.update(dr, dr.changelog.tip())
1539
1539
1540 def commit(ui, repo, *pats, **opts):
1540 def commit(ui, repo, *pats, **opts):
1541 """commit changes in the queue repository"""
1541 """commit changes in the queue repository"""
1542 q = repo.mq
1542 q = repo.mq
1543 r = q.qrepo()
1543 r = q.qrepo()
1544 if not r: raise util.Abort('no queue repository')
1544 if not r: raise util.Abort('no queue repository')
1545 commands.commit(r.ui, r, *pats, **opts)
1545 commands.commit(r.ui, r, *pats, **opts)
1546
1546
1547 def series(ui, repo, **opts):
1547 def series(ui, repo, **opts):
1548 """print the entire series file"""
1548 """print the entire series file"""
1549 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1549 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1550 return 0
1550 return 0
1551
1551
1552 def top(ui, repo, **opts):
1552 def top(ui, repo, **opts):
1553 """print the name of the current patch"""
1553 """print the name of the current patch"""
1554 q = repo.mq
1554 q = repo.mq
1555 t = q.applied and q.series_end(True) or 0
1555 t = q.applied and q.series_end(True) or 0
1556 if t:
1556 if t:
1557 return q.qseries(repo, start=t-1, length=1, status='A',
1557 return q.qseries(repo, start=t-1, length=1, status='A',
1558 summary=opts.get('summary'))
1558 summary=opts.get('summary'))
1559 else:
1559 else:
1560 ui.write("No patches applied\n")
1560 ui.write("No patches applied\n")
1561 return 1
1561 return 1
1562
1562
1563 def next(ui, repo, **opts):
1563 def next(ui, repo, **opts):
1564 """print the name of the next patch"""
1564 """print the name of the next patch"""
1565 q = repo.mq
1565 q = repo.mq
1566 end = q.series_end()
1566 end = q.series_end()
1567 if end == len(q.series):
1567 if end == len(q.series):
1568 ui.write("All patches applied\n")
1568 ui.write("All patches applied\n")
1569 return 1
1569 return 1
1570 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1570 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1571
1571
1572 def prev(ui, repo, **opts):
1572 def prev(ui, repo, **opts):
1573 """print the name of the previous patch"""
1573 """print the name of the previous patch"""
1574 q = repo.mq
1574 q = repo.mq
1575 l = len(q.applied)
1575 l = len(q.applied)
1576 if l == 1:
1576 if l == 1:
1577 ui.write("Only one patch applied\n")
1577 ui.write("Only one patch applied\n")
1578 return 1
1578 return 1
1579 if not l:
1579 if not l:
1580 ui.write("No patches applied\n")
1580 ui.write("No patches applied\n")
1581 return 1
1581 return 1
1582 return q.qseries(repo, start=l-2, length=1, status='A',
1582 return q.qseries(repo, start=l-2, length=1, status='A',
1583 summary=opts.get('summary'))
1583 summary=opts.get('summary'))
1584
1584
1585 def new(ui, repo, patch, *args, **opts):
1585 def new(ui, repo, patch, *args, **opts):
1586 """create a new patch
1586 """create a new patch
1587
1587
1588 qnew creates a new patch on top of the currently-applied patch
1588 qnew creates a new patch on top of the currently-applied patch
1589 (if any). It will refuse to run if there are any outstanding
1589 (if any). It will refuse to run if there are any outstanding
1590 changes unless -f is specified, in which case the patch will
1590 changes unless -f is specified, in which case the patch will
1591 be initialised with them. You may also use -I, -X, and/or a list of
1591 be initialised with them. You may also use -I, -X, and/or a list of
1592 files after the patch name to add only changes to matching files
1592 files after the patch name to add only changes to matching files
1593 to the new patch, leaving the rest as uncommitted modifications.
1593 to the new patch, leaving the rest as uncommitted modifications.
1594
1594
1595 -e, -m or -l set the patch header as well as the commit message.
1595 -e, -m or -l set the patch header as well as the commit message.
1596 If none is specified, the patch header is empty and the
1596 If none is specified, the patch header is empty and the
1597 commit message is '[mq]: PATCH'"""
1597 commit message is '[mq]: PATCH'"""
1598 q = repo.mq
1598 q = repo.mq
1599 message = cmdutil.logmessage(opts)
1599 message = cmdutil.logmessage(opts)
1600 if opts['edit']:
1600 if opts['edit']:
1601 message = ui.edit(message, ui.username())
1601 message = ui.edit(message, ui.username())
1602 opts['msg'] = message
1602 opts['msg'] = message
1603 q.new(repo, patch, *args, **opts)
1603 q.new(repo, patch, *args, **opts)
1604 q.save_dirty()
1604 q.save_dirty()
1605 return 0
1605 return 0
1606
1606
1607 def refresh(ui, repo, *pats, **opts):
1607 def refresh(ui, repo, *pats, **opts):
1608 """update the current patch
1608 """update the current patch
1609
1609
1610 If any file patterns are provided, the refreshed patch will contain only
1610 If any file patterns are provided, the refreshed patch will contain only
1611 the modifications that match those patterns; the remaining modifications
1611 the modifications that match those patterns; the remaining modifications
1612 will remain in the working directory.
1612 will remain in the working directory.
1613
1613
1614 hg add/remove/copy/rename work as usual, though you might want to use
1614 hg add/remove/copy/rename work as usual, though you might want to use
1615 git-style patches (--git or [diff] git=1) to track copies and renames.
1615 git-style patches (--git or [diff] git=1) to track copies and renames.
1616 """
1616 """
1617 q = repo.mq
1617 q = repo.mq
1618 message = cmdutil.logmessage(opts)
1618 message = cmdutil.logmessage(opts)
1619 if opts['edit']:
1619 if opts['edit']:
1620 if message:
1620 if message:
1621 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1621 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1622 patch = q.applied[-1].name
1622 patch = q.applied[-1].name
1623 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1623 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1624 message = ui.edit('\n'.join(message), user or ui.username())
1624 message = ui.edit('\n'.join(message), user or ui.username())
1625 ret = q.refresh(repo, pats, msg=message, **opts)
1625 ret = q.refresh(repo, pats, msg=message, **opts)
1626 q.save_dirty()
1626 q.save_dirty()
1627 return ret
1627 return ret
1628
1628
1629 def diff(ui, repo, *pats, **opts):
1629 def diff(ui, repo, *pats, **opts):
1630 """diff of the current patch"""
1630 """diff of the current patch"""
1631 repo.mq.diff(repo, pats, opts)
1631 repo.mq.diff(repo, pats, opts)
1632 return 0
1632 return 0
1633
1633
1634 def fold(ui, repo, *files, **opts):
1634 def fold(ui, repo, *files, **opts):
1635 """fold the named patches into the current patch
1635 """fold the named patches into the current patch
1636
1636
1637 Patches must not yet be applied. Each patch will be successively
1637 Patches must not yet be applied. Each patch will be successively
1638 applied to the current patch in the order given. If all the
1638 applied to the current patch in the order given. If all the
1639 patches apply successfully, the current patch will be refreshed
1639 patches apply successfully, the current patch will be refreshed
1640 with the new cumulative patch, and the folded patches will
1640 with the new cumulative patch, and the folded patches will
1641 be deleted. With -k/--keep, the folded patch files will not
1641 be deleted. With -k/--keep, the folded patch files will not
1642 be removed afterwards.
1642 be removed afterwards.
1643
1643
1644 The header for each folded patch will be concatenated with
1644 The header for each folded patch will be concatenated with
1645 the current patch header, separated by a line of '* * *'."""
1645 the current patch header, separated by a line of '* * *'."""
1646
1646
1647 q = repo.mq
1647 q = repo.mq
1648
1648
1649 if not files:
1649 if not files:
1650 raise util.Abort(_('qfold requires at least one patch name'))
1650 raise util.Abort(_('qfold requires at least one patch name'))
1651 if not q.check_toppatch(repo):
1651 if not q.check_toppatch(repo):
1652 raise util.Abort(_('No patches applied'))
1652 raise util.Abort(_('No patches applied'))
1653
1653
1654 message = cmdutil.logmessage(opts)
1654 message = cmdutil.logmessage(opts)
1655 if opts['edit']:
1655 if opts['edit']:
1656 if message:
1656 if message:
1657 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1657 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1658
1658
1659 parent = q.lookup('qtip')
1659 parent = q.lookup('qtip')
1660 patches = []
1660 patches = []
1661 messages = []
1661 messages = []
1662 for f in files:
1662 for f in files:
1663 p = q.lookup(f)
1663 p = q.lookup(f)
1664 if p in patches or p == parent:
1664 if p in patches or p == parent:
1665 ui.warn(_('Skipping already folded patch %s') % p)
1665 ui.warn(_('Skipping already folded patch %s') % p)
1666 if q.isapplied(p):
1666 if q.isapplied(p):
1667 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1667 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1668 patches.append(p)
1668 patches.append(p)
1669
1669
1670 for p in patches:
1670 for p in patches:
1671 if not message:
1671 if not message:
1672 messages.append(q.readheaders(p)[0])
1672 messages.append(q.readheaders(p)[0])
1673 pf = q.join(p)
1673 pf = q.join(p)
1674 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1674 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1675 if not patchsuccess:
1675 if not patchsuccess:
1676 raise util.Abort(_('Error folding patch %s') % p)
1676 raise util.Abort(_('Error folding patch %s') % p)
1677 patch.updatedir(ui, repo, files)
1677 patch.updatedir(ui, repo, files)
1678
1678
1679 if not message:
1679 if not message:
1680 message, comments, user = q.readheaders(parent)[0:3]
1680 message, comments, user = q.readheaders(parent)[0:3]
1681 for msg in messages:
1681 for msg in messages:
1682 message.append('* * *')
1682 message.append('* * *')
1683 message.extend(msg)
1683 message.extend(msg)
1684 message = '\n'.join(message)
1684 message = '\n'.join(message)
1685
1685
1686 if opts['edit']:
1686 if opts['edit']:
1687 message = ui.edit(message, user or ui.username())
1687 message = ui.edit(message, user or ui.username())
1688
1688
1689 q.refresh(repo, msg=message)
1689 q.refresh(repo, msg=message)
1690 q.delete(repo, patches, opts)
1690 q.delete(repo, patches, opts)
1691 q.save_dirty()
1691 q.save_dirty()
1692
1692
1693 def goto(ui, repo, patch, **opts):
1693 def goto(ui, repo, patch, **opts):
1694 '''push or pop patches until named patch is at top of stack'''
1694 '''push or pop patches until named patch is at top of stack'''
1695 q = repo.mq
1695 q = repo.mq
1696 patch = q.lookup(patch)
1696 patch = q.lookup(patch)
1697 if q.isapplied(patch):
1697 if q.isapplied(patch):
1698 ret = q.pop(repo, patch, force=opts['force'])
1698 ret = q.pop(repo, patch, force=opts['force'])
1699 else:
1699 else:
1700 ret = q.push(repo, patch, force=opts['force'])
1700 ret = q.push(repo, patch, force=opts['force'])
1701 q.save_dirty()
1701 q.save_dirty()
1702 return ret
1702 return ret
1703
1703
1704 def guard(ui, repo, *args, **opts):
1704 def guard(ui, repo, *args, **opts):
1705 '''set or print guards for a patch
1705 '''set or print guards for a patch
1706
1706
1707 Guards control whether a patch can be pushed. A patch with no
1707 Guards control whether a patch can be pushed. A patch with no
1708 guards is always pushed. A patch with a positive guard ("+foo") is
1708 guards is always pushed. A patch with a positive guard ("+foo") is
1709 pushed only if the qselect command has activated it. A patch with
1709 pushed only if the qselect command has activated it. A patch with
1710 a negative guard ("-foo") is never pushed if the qselect command
1710 a negative guard ("-foo") is never pushed if the qselect command
1711 has activated it.
1711 has activated it.
1712
1712
1713 With no arguments, print the currently active guards.
1713 With no arguments, print the currently active guards.
1714 With arguments, set guards for the named patch.
1714 With arguments, set guards for the named patch.
1715
1715
1716 To set a negative guard "-foo" on topmost patch ("--" is needed so
1716 To set a negative guard "-foo" on topmost patch ("--" is needed so
1717 hg will not interpret "-foo" as an option):
1717 hg will not interpret "-foo" as an option):
1718 hg qguard -- -foo
1718 hg qguard -- -foo
1719
1719
1720 To set guards on another patch:
1720 To set guards on another patch:
1721 hg qguard other.patch +2.6.17 -stable
1721 hg qguard other.patch +2.6.17 -stable
1722 '''
1722 '''
1723 def status(idx):
1723 def status(idx):
1724 guards = q.series_guards[idx] or ['unguarded']
1724 guards = q.series_guards[idx] or ['unguarded']
1725 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1725 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1726 q = repo.mq
1726 q = repo.mq
1727 patch = None
1727 patch = None
1728 args = list(args)
1728 args = list(args)
1729 if opts['list']:
1729 if opts['list']:
1730 if args or opts['none']:
1730 if args or opts['none']:
1731 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1731 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1732 for i in xrange(len(q.series)):
1732 for i in xrange(len(q.series)):
1733 status(i)
1733 status(i)
1734 return
1734 return
1735 if not args or args[0][0:1] in '-+':
1735 if not args or args[0][0:1] in '-+':
1736 if not q.applied:
1736 if not q.applied:
1737 raise util.Abort(_('no patches applied'))
1737 raise util.Abort(_('no patches applied'))
1738 patch = q.applied[-1].name
1738 patch = q.applied[-1].name
1739 if patch is None and args[0][0:1] not in '-+':
1739 if patch is None and args[0][0:1] not in '-+':
1740 patch = args.pop(0)
1740 patch = args.pop(0)
1741 if patch is None:
1741 if patch is None:
1742 raise util.Abort(_('no patch to work with'))
1742 raise util.Abort(_('no patch to work with'))
1743 if args or opts['none']:
1743 if args or opts['none']:
1744 idx = q.find_series(patch)
1744 idx = q.find_series(patch)
1745 if idx is None:
1745 if idx is None:
1746 raise util.Abort(_('no patch named %s') % patch)
1746 raise util.Abort(_('no patch named %s') % patch)
1747 q.set_guards(idx, args)
1747 q.set_guards(idx, args)
1748 q.save_dirty()
1748 q.save_dirty()
1749 else:
1749 else:
1750 status(q.series.index(q.lookup(patch)))
1750 status(q.series.index(q.lookup(patch)))
1751
1751
1752 def header(ui, repo, patch=None):
1752 def header(ui, repo, patch=None):
1753 """Print the header of the topmost or specified patch"""
1753 """Print the header of the topmost or specified patch"""
1754 q = repo.mq
1754 q = repo.mq
1755
1755
1756 if patch:
1756 if patch:
1757 patch = q.lookup(patch)
1757 patch = q.lookup(patch)
1758 else:
1758 else:
1759 if not q.applied:
1759 if not q.applied:
1760 ui.write('No patches applied\n')
1760 ui.write('No patches applied\n')
1761 return 1
1761 return 1
1762 patch = q.lookup('qtip')
1762 patch = q.lookup('qtip')
1763 message = repo.mq.readheaders(patch)[0]
1763 message = repo.mq.readheaders(patch)[0]
1764
1764
1765 ui.write('\n'.join(message) + '\n')
1765 ui.write('\n'.join(message) + '\n')
1766
1766
1767 def lastsavename(path):
1767 def lastsavename(path):
1768 (directory, base) = os.path.split(path)
1768 (directory, base) = os.path.split(path)
1769 names = os.listdir(directory)
1769 names = os.listdir(directory)
1770 namere = re.compile("%s.([0-9]+)" % base)
1770 namere = re.compile("%s.([0-9]+)" % base)
1771 maxindex = None
1771 maxindex = None
1772 maxname = None
1772 maxname = None
1773 for f in names:
1773 for f in names:
1774 m = namere.match(f)
1774 m = namere.match(f)
1775 if m:
1775 if m:
1776 index = int(m.group(1))
1776 index = int(m.group(1))
1777 if maxindex == None or index > maxindex:
1777 if maxindex == None or index > maxindex:
1778 maxindex = index
1778 maxindex = index
1779 maxname = f
1779 maxname = f
1780 if maxname:
1780 if maxname:
1781 return (os.path.join(directory, maxname), maxindex)
1781 return (os.path.join(directory, maxname), maxindex)
1782 return (None, None)
1782 return (None, None)
1783
1783
1784 def savename(path):
1784 def savename(path):
1785 (last, index) = lastsavename(path)
1785 (last, index) = lastsavename(path)
1786 if last is None:
1786 if last is None:
1787 index = 0
1787 index = 0
1788 newpath = path + ".%d" % (index + 1)
1788 newpath = path + ".%d" % (index + 1)
1789 return newpath
1789 return newpath
1790
1790
1791 def push(ui, repo, patch=None, **opts):
1791 def push(ui, repo, patch=None, **opts):
1792 """push the next patch onto the stack"""
1792 """push the next patch onto the stack"""
1793 q = repo.mq
1793 q = repo.mq
1794 mergeq = None
1794 mergeq = None
1795
1795
1796 if opts['all']:
1796 if opts['all']:
1797 if not q.series:
1797 if not q.series:
1798 ui.warn(_('no patches in series\n'))
1798 ui.warn(_('no patches in series\n'))
1799 return 0
1799 return 0
1800 patch = q.series[-1]
1800 patch = q.series[-1]
1801 if opts['merge']:
1801 if opts['merge']:
1802 if opts['name']:
1802 if opts['name']:
1803 newpath = opts['name']
1803 newpath = opts['name']
1804 else:
1804 else:
1805 newpath, i = lastsavename(q.path)
1805 newpath, i = lastsavename(q.path)
1806 if not newpath:
1806 if not newpath:
1807 ui.warn("no saved queues found, please use -n\n")
1807 ui.warn("no saved queues found, please use -n\n")
1808 return 1
1808 return 1
1809 mergeq = queue(ui, repo.join(""), newpath)
1809 mergeq = queue(ui, repo.join(""), newpath)
1810 ui.warn("merging with queue at: %s\n" % mergeq.path)
1810 ui.warn("merging with queue at: %s\n" % mergeq.path)
1811 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1811 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1812 mergeq=mergeq)
1812 mergeq=mergeq)
1813 return ret
1813 return ret
1814
1814
1815 def pop(ui, repo, patch=None, **opts):
1815 def pop(ui, repo, patch=None, **opts):
1816 """pop the current patch off the stack"""
1816 """pop the current patch off the stack"""
1817 localupdate = True
1817 localupdate = True
1818 if opts['name']:
1818 if opts['name']:
1819 q = queue(ui, repo.join(""), repo.join(opts['name']))
1819 q = queue(ui, repo.join(""), repo.join(opts['name']))
1820 ui.warn('using patch queue: %s\n' % q.path)
1820 ui.warn('using patch queue: %s\n' % q.path)
1821 localupdate = False
1821 localupdate = False
1822 else:
1822 else:
1823 q = repo.mq
1823 q = repo.mq
1824 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1824 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1825 all=opts['all'])
1825 all=opts['all'])
1826 q.save_dirty()
1826 q.save_dirty()
1827 return ret
1827 return ret
1828
1828
1829 def rename(ui, repo, patch, name=None, **opts):
1829 def rename(ui, repo, patch, name=None, **opts):
1830 """rename a patch
1830 """rename a patch
1831
1831
1832 With one argument, renames the current patch to PATCH1.
1832 With one argument, renames the current patch to PATCH1.
1833 With two arguments, renames PATCH1 to PATCH2."""
1833 With two arguments, renames PATCH1 to PATCH2."""
1834
1834
1835 q = repo.mq
1835 q = repo.mq
1836
1836
1837 if not name:
1837 if not name:
1838 name = patch
1838 name = patch
1839 patch = None
1839 patch = None
1840
1840
1841 if patch:
1841 if patch:
1842 patch = q.lookup(patch)
1842 patch = q.lookup(patch)
1843 else:
1843 else:
1844 if not q.applied:
1844 if not q.applied:
1845 ui.write(_('No patches applied\n'))
1845 ui.write(_('No patches applied\n'))
1846 return
1846 return
1847 patch = q.lookup('qtip')
1847 patch = q.lookup('qtip')
1848 absdest = q.join(name)
1848 absdest = q.join(name)
1849 if os.path.isdir(absdest):
1849 if os.path.isdir(absdest):
1850 name = normname(os.path.join(name, os.path.basename(patch)))
1850 name = normname(os.path.join(name, os.path.basename(patch)))
1851 absdest = q.join(name)
1851 absdest = q.join(name)
1852 if os.path.exists(absdest):
1852 if os.path.exists(absdest):
1853 raise util.Abort(_('%s already exists') % absdest)
1853 raise util.Abort(_('%s already exists') % absdest)
1854
1854
1855 if name in q.series:
1855 if name in q.series:
1856 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1856 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1857
1857
1858 if ui.verbose:
1858 if ui.verbose:
1859 ui.write('Renaming %s to %s\n' % (patch, name))
1859 ui.write('Renaming %s to %s\n' % (patch, name))
1860 i = q.find_series(patch)
1860 i = q.find_series(patch)
1861 guards = q.guard_re.findall(q.full_series[i])
1861 guards = q.guard_re.findall(q.full_series[i])
1862 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1862 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1863 q.parse_series()
1863 q.parse_series()
1864 q.series_dirty = 1
1864 q.series_dirty = 1
1865
1865
1866 info = q.isapplied(patch)
1866 info = q.isapplied(patch)
1867 if info:
1867 if info:
1868 q.applied[info[0]] = statusentry(info[1], name)
1868 q.applied[info[0]] = statusentry(info[1], name)
1869 q.applied_dirty = 1
1869 q.applied_dirty = 1
1870
1870
1871 util.rename(q.join(patch), absdest)
1871 util.rename(q.join(patch), absdest)
1872 r = q.qrepo()
1872 r = q.qrepo()
1873 if r:
1873 if r:
1874 wlock = r.wlock()
1874 wlock = r.wlock()
1875 if r.dirstate.state(name) == 'r':
1875 if r.dirstate[name] == 'r':
1876 r.undelete([name], wlock)
1876 r.undelete([name], wlock)
1877 r.copy(patch, name, wlock)
1877 r.copy(patch, name, wlock)
1878 r.remove([patch], False, wlock)
1878 r.remove([patch], False, wlock)
1879
1879
1880 q.save_dirty()
1880 q.save_dirty()
1881
1881
1882 def restore(ui, repo, rev, **opts):
1882 def restore(ui, repo, rev, **opts):
1883 """restore the queue state saved by a rev"""
1883 """restore the queue state saved by a rev"""
1884 rev = repo.lookup(rev)
1884 rev = repo.lookup(rev)
1885 q = repo.mq
1885 q = repo.mq
1886 q.restore(repo, rev, delete=opts['delete'],
1886 q.restore(repo, rev, delete=opts['delete'],
1887 qupdate=opts['update'])
1887 qupdate=opts['update'])
1888 q.save_dirty()
1888 q.save_dirty()
1889 return 0
1889 return 0
1890
1890
1891 def save(ui, repo, **opts):
1891 def save(ui, repo, **opts):
1892 """save current queue state"""
1892 """save current queue state"""
1893 q = repo.mq
1893 q = repo.mq
1894 message = cmdutil.logmessage(opts)
1894 message = cmdutil.logmessage(opts)
1895 ret = q.save(repo, msg=message)
1895 ret = q.save(repo, msg=message)
1896 if ret:
1896 if ret:
1897 return ret
1897 return ret
1898 q.save_dirty()
1898 q.save_dirty()
1899 if opts['copy']:
1899 if opts['copy']:
1900 path = q.path
1900 path = q.path
1901 if opts['name']:
1901 if opts['name']:
1902 newpath = os.path.join(q.basepath, opts['name'])
1902 newpath = os.path.join(q.basepath, opts['name'])
1903 if os.path.exists(newpath):
1903 if os.path.exists(newpath):
1904 if not os.path.isdir(newpath):
1904 if not os.path.isdir(newpath):
1905 raise util.Abort(_('destination %s exists and is not '
1905 raise util.Abort(_('destination %s exists and is not '
1906 'a directory') % newpath)
1906 'a directory') % newpath)
1907 if not opts['force']:
1907 if not opts['force']:
1908 raise util.Abort(_('destination %s exists, '
1908 raise util.Abort(_('destination %s exists, '
1909 'use -f to force') % newpath)
1909 'use -f to force') % newpath)
1910 else:
1910 else:
1911 newpath = savename(path)
1911 newpath = savename(path)
1912 ui.warn("copy %s to %s\n" % (path, newpath))
1912 ui.warn("copy %s to %s\n" % (path, newpath))
1913 util.copyfiles(path, newpath)
1913 util.copyfiles(path, newpath)
1914 if opts['empty']:
1914 if opts['empty']:
1915 try:
1915 try:
1916 os.unlink(q.join(q.status_path))
1916 os.unlink(q.join(q.status_path))
1917 except:
1917 except:
1918 pass
1918 pass
1919 return 0
1919 return 0
1920
1920
1921 def strip(ui, repo, rev, **opts):
1921 def strip(ui, repo, rev, **opts):
1922 """strip a revision and all later revs on the same branch"""
1922 """strip a revision and all later revs on the same branch"""
1923 rev = repo.lookup(rev)
1923 rev = repo.lookup(rev)
1924 backup = 'all'
1924 backup = 'all'
1925 if opts['backup']:
1925 if opts['backup']:
1926 backup = 'strip'
1926 backup = 'strip'
1927 elif opts['nobackup']:
1927 elif opts['nobackup']:
1928 backup = 'none'
1928 backup = 'none'
1929 update = repo.dirstate.parents()[0] != revlog.nullid
1929 update = repo.dirstate.parents()[0] != revlog.nullid
1930 repo.mq.strip(repo, rev, backup=backup, update=update)
1930 repo.mq.strip(repo, rev, backup=backup, update=update)
1931 return 0
1931 return 0
1932
1932
1933 def select(ui, repo, *args, **opts):
1933 def select(ui, repo, *args, **opts):
1934 '''set or print guarded patches to push
1934 '''set or print guarded patches to push
1935
1935
1936 Use the qguard command to set or print guards on patch, then use
1936 Use the qguard command to set or print guards on patch, then use
1937 qselect to tell mq which guards to use. A patch will be pushed if it
1937 qselect to tell mq which guards to use. A patch will be pushed if it
1938 has no guards or any positive guards match the currently selected guard,
1938 has no guards or any positive guards match the currently selected guard,
1939 but will not be pushed if any negative guards match the current guard.
1939 but will not be pushed if any negative guards match the current guard.
1940 For example:
1940 For example:
1941
1941
1942 qguard foo.patch -stable (negative guard)
1942 qguard foo.patch -stable (negative guard)
1943 qguard bar.patch +stable (positive guard)
1943 qguard bar.patch +stable (positive guard)
1944 qselect stable
1944 qselect stable
1945
1945
1946 This activates the "stable" guard. mq will skip foo.patch (because
1946 This activates the "stable" guard. mq will skip foo.patch (because
1947 it has a negative match) but push bar.patch (because it
1947 it has a negative match) but push bar.patch (because it
1948 has a positive match).
1948 has a positive match).
1949
1949
1950 With no arguments, prints the currently active guards.
1950 With no arguments, prints the currently active guards.
1951 With one argument, sets the active guard.
1951 With one argument, sets the active guard.
1952
1952
1953 Use -n/--none to deactivate guards (no other arguments needed).
1953 Use -n/--none to deactivate guards (no other arguments needed).
1954 When no guards are active, patches with positive guards are skipped
1954 When no guards are active, patches with positive guards are skipped
1955 and patches with negative guards are pushed.
1955 and patches with negative guards are pushed.
1956
1956
1957 qselect can change the guards on applied patches. It does not pop
1957 qselect can change the guards on applied patches. It does not pop
1958 guarded patches by default. Use --pop to pop back to the last applied
1958 guarded patches by default. Use --pop to pop back to the last applied
1959 patch that is not guarded. Use --reapply (which implies --pop) to push
1959 patch that is not guarded. Use --reapply (which implies --pop) to push
1960 back to the current patch afterwards, but skip guarded patches.
1960 back to the current patch afterwards, but skip guarded patches.
1961
1961
1962 Use -s/--series to print a list of all guards in the series file (no
1962 Use -s/--series to print a list of all guards in the series file (no
1963 other arguments needed). Use -v for more information.'''
1963 other arguments needed). Use -v for more information.'''
1964
1964
1965 q = repo.mq
1965 q = repo.mq
1966 guards = q.active()
1966 guards = q.active()
1967 if args or opts['none']:
1967 if args or opts['none']:
1968 old_unapplied = q.unapplied(repo)
1968 old_unapplied = q.unapplied(repo)
1969 old_guarded = [i for i in xrange(len(q.applied)) if
1969 old_guarded = [i for i in xrange(len(q.applied)) if
1970 not q.pushable(i)[0]]
1970 not q.pushable(i)[0]]
1971 q.set_active(args)
1971 q.set_active(args)
1972 q.save_dirty()
1972 q.save_dirty()
1973 if not args:
1973 if not args:
1974 ui.status(_('guards deactivated\n'))
1974 ui.status(_('guards deactivated\n'))
1975 if not opts['pop'] and not opts['reapply']:
1975 if not opts['pop'] and not opts['reapply']:
1976 unapplied = q.unapplied(repo)
1976 unapplied = q.unapplied(repo)
1977 guarded = [i for i in xrange(len(q.applied))
1977 guarded = [i for i in xrange(len(q.applied))
1978 if not q.pushable(i)[0]]
1978 if not q.pushable(i)[0]]
1979 if len(unapplied) != len(old_unapplied):
1979 if len(unapplied) != len(old_unapplied):
1980 ui.status(_('number of unguarded, unapplied patches has '
1980 ui.status(_('number of unguarded, unapplied patches has '
1981 'changed from %d to %d\n') %
1981 'changed from %d to %d\n') %
1982 (len(old_unapplied), len(unapplied)))
1982 (len(old_unapplied), len(unapplied)))
1983 if len(guarded) != len(old_guarded):
1983 if len(guarded) != len(old_guarded):
1984 ui.status(_('number of guarded, applied patches has changed '
1984 ui.status(_('number of guarded, applied patches has changed '
1985 'from %d to %d\n') %
1985 'from %d to %d\n') %
1986 (len(old_guarded), len(guarded)))
1986 (len(old_guarded), len(guarded)))
1987 elif opts['series']:
1987 elif opts['series']:
1988 guards = {}
1988 guards = {}
1989 noguards = 0
1989 noguards = 0
1990 for gs in q.series_guards:
1990 for gs in q.series_guards:
1991 if not gs:
1991 if not gs:
1992 noguards += 1
1992 noguards += 1
1993 for g in gs:
1993 for g in gs:
1994 guards.setdefault(g, 0)
1994 guards.setdefault(g, 0)
1995 guards[g] += 1
1995 guards[g] += 1
1996 if ui.verbose:
1996 if ui.verbose:
1997 guards['NONE'] = noguards
1997 guards['NONE'] = noguards
1998 guards = guards.items()
1998 guards = guards.items()
1999 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1999 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2000 if guards:
2000 if guards:
2001 ui.note(_('guards in series file:\n'))
2001 ui.note(_('guards in series file:\n'))
2002 for guard, count in guards:
2002 for guard, count in guards:
2003 ui.note('%2d ' % count)
2003 ui.note('%2d ' % count)
2004 ui.write(guard, '\n')
2004 ui.write(guard, '\n')
2005 else:
2005 else:
2006 ui.note(_('no guards in series file\n'))
2006 ui.note(_('no guards in series file\n'))
2007 else:
2007 else:
2008 if guards:
2008 if guards:
2009 ui.note(_('active guards:\n'))
2009 ui.note(_('active guards:\n'))
2010 for g in guards:
2010 for g in guards:
2011 ui.write(g, '\n')
2011 ui.write(g, '\n')
2012 else:
2012 else:
2013 ui.write(_('no active guards\n'))
2013 ui.write(_('no active guards\n'))
2014 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2014 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2015 popped = False
2015 popped = False
2016 if opts['pop'] or opts['reapply']:
2016 if opts['pop'] or opts['reapply']:
2017 for i in xrange(len(q.applied)):
2017 for i in xrange(len(q.applied)):
2018 pushable, reason = q.pushable(i)
2018 pushable, reason = q.pushable(i)
2019 if not pushable:
2019 if not pushable:
2020 ui.status(_('popping guarded patches\n'))
2020 ui.status(_('popping guarded patches\n'))
2021 popped = True
2021 popped = True
2022 if i == 0:
2022 if i == 0:
2023 q.pop(repo, all=True)
2023 q.pop(repo, all=True)
2024 else:
2024 else:
2025 q.pop(repo, i-1)
2025 q.pop(repo, i-1)
2026 break
2026 break
2027 if popped:
2027 if popped:
2028 try:
2028 try:
2029 if reapply:
2029 if reapply:
2030 ui.status(_('reapplying unguarded patches\n'))
2030 ui.status(_('reapplying unguarded patches\n'))
2031 q.push(repo, reapply)
2031 q.push(repo, reapply)
2032 finally:
2032 finally:
2033 q.save_dirty()
2033 q.save_dirty()
2034
2034
2035 def reposetup(ui, repo):
2035 def reposetup(ui, repo):
2036 class mqrepo(repo.__class__):
2036 class mqrepo(repo.__class__):
2037 def abort_if_wdir_patched(self, errmsg, force=False):
2037 def abort_if_wdir_patched(self, errmsg, force=False):
2038 if self.mq.applied and not force:
2038 if self.mq.applied and not force:
2039 parent = revlog.hex(self.dirstate.parents()[0])
2039 parent = revlog.hex(self.dirstate.parents()[0])
2040 if parent in [s.rev for s in self.mq.applied]:
2040 if parent in [s.rev for s in self.mq.applied]:
2041 raise util.Abort(errmsg)
2041 raise util.Abort(errmsg)
2042
2042
2043 def commit(self, *args, **opts):
2043 def commit(self, *args, **opts):
2044 if len(args) >= 6:
2044 if len(args) >= 6:
2045 force = args[5]
2045 force = args[5]
2046 else:
2046 else:
2047 force = opts.get('force')
2047 force = opts.get('force')
2048 self.abort_if_wdir_patched(
2048 self.abort_if_wdir_patched(
2049 _('cannot commit over an applied mq patch'),
2049 _('cannot commit over an applied mq patch'),
2050 force)
2050 force)
2051
2051
2052 return super(mqrepo, self).commit(*args, **opts)
2052 return super(mqrepo, self).commit(*args, **opts)
2053
2053
2054 def push(self, remote, force=False, revs=None):
2054 def push(self, remote, force=False, revs=None):
2055 if self.mq.applied and not force and not revs:
2055 if self.mq.applied and not force and not revs:
2056 raise util.Abort(_('source has mq patches applied'))
2056 raise util.Abort(_('source has mq patches applied'))
2057 return super(mqrepo, self).push(remote, force, revs)
2057 return super(mqrepo, self).push(remote, force, revs)
2058
2058
2059 def tags(self):
2059 def tags(self):
2060 if self.tagscache:
2060 if self.tagscache:
2061 return self.tagscache
2061 return self.tagscache
2062
2062
2063 tagscache = super(mqrepo, self).tags()
2063 tagscache = super(mqrepo, self).tags()
2064
2064
2065 q = self.mq
2065 q = self.mq
2066 if not q.applied:
2066 if not q.applied:
2067 return tagscache
2067 return tagscache
2068
2068
2069 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2069 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2070 mqtags.append((mqtags[-1][0], 'qtip'))
2070 mqtags.append((mqtags[-1][0], 'qtip'))
2071 mqtags.append((mqtags[0][0], 'qbase'))
2071 mqtags.append((mqtags[0][0], 'qbase'))
2072 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2072 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2073 for patch in mqtags:
2073 for patch in mqtags:
2074 if patch[1] in tagscache:
2074 if patch[1] in tagscache:
2075 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2075 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2076 else:
2076 else:
2077 tagscache[patch[1]] = patch[0]
2077 tagscache[patch[1]] = patch[0]
2078
2078
2079 return tagscache
2079 return tagscache
2080
2080
2081 def _branchtags(self):
2081 def _branchtags(self):
2082 q = self.mq
2082 q = self.mq
2083 if not q.applied:
2083 if not q.applied:
2084 return super(mqrepo, self)._branchtags()
2084 return super(mqrepo, self)._branchtags()
2085
2085
2086 self.branchcache = {} # avoid recursion in changectx
2086 self.branchcache = {} # avoid recursion in changectx
2087 cl = self.changelog
2087 cl = self.changelog
2088 partial, last, lrev = self._readbranchcache()
2088 partial, last, lrev = self._readbranchcache()
2089
2089
2090 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2090 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2091 start = lrev + 1
2091 start = lrev + 1
2092 if start < qbase:
2092 if start < qbase:
2093 # update the cache (excluding the patches) and save it
2093 # update the cache (excluding the patches) and save it
2094 self._updatebranchcache(partial, lrev+1, qbase)
2094 self._updatebranchcache(partial, lrev+1, qbase)
2095 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2095 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2096 start = qbase
2096 start = qbase
2097 # if start = qbase, the cache is as updated as it should be.
2097 # if start = qbase, the cache is as updated as it should be.
2098 # if start > qbase, the cache includes (part of) the patches.
2098 # if start > qbase, the cache includes (part of) the patches.
2099 # we might as well use it, but we won't save it.
2099 # we might as well use it, but we won't save it.
2100
2100
2101 # update the cache up to the tip
2101 # update the cache up to the tip
2102 self._updatebranchcache(partial, start, cl.count())
2102 self._updatebranchcache(partial, start, cl.count())
2103
2103
2104 return partial
2104 return partial
2105
2105
2106 if repo.local():
2106 if repo.local():
2107 repo.__class__ = mqrepo
2107 repo.__class__ = mqrepo
2108 repo.mq = queue(ui, repo.join(""))
2108 repo.mq = queue(ui, repo.join(""))
2109
2109
2110 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2110 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2111
2111
2112 cmdtable = {
2112 cmdtable = {
2113 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2113 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2114 "qclone":
2114 "qclone":
2115 (clone,
2115 (clone,
2116 [('', 'pull', None, _('use pull protocol to copy metadata')),
2116 [('', 'pull', None, _('use pull protocol to copy metadata')),
2117 ('U', 'noupdate', None, _('do not update the new working directories')),
2117 ('U', 'noupdate', None, _('do not update the new working directories')),
2118 ('', 'uncompressed', None,
2118 ('', 'uncompressed', None,
2119 _('use uncompressed transfer (fast over LAN)')),
2119 _('use uncompressed transfer (fast over LAN)')),
2120 ('e', 'ssh', '', _('specify ssh command to use')),
2120 ('e', 'ssh', '', _('specify ssh command to use')),
2121 ('p', 'patches', '', _('location of source patch repo')),
2121 ('p', 'patches', '', _('location of source patch repo')),
2122 ('', 'remotecmd', '',
2122 ('', 'remotecmd', '',
2123 _('specify hg command to run on the remote side'))],
2123 _('specify hg command to run on the remote side'))],
2124 _('hg qclone [OPTION]... SOURCE [DEST]')),
2124 _('hg qclone [OPTION]... SOURCE [DEST]')),
2125 "qcommit|qci":
2125 "qcommit|qci":
2126 (commit,
2126 (commit,
2127 commands.table["^commit|ci"][1],
2127 commands.table["^commit|ci"][1],
2128 _('hg qcommit [OPTION]... [FILE]...')),
2128 _('hg qcommit [OPTION]... [FILE]...')),
2129 "^qdiff":
2129 "^qdiff":
2130 (diff,
2130 (diff,
2131 [('g', 'git', None, _('use git extended diff format')),
2131 [('g', 'git', None, _('use git extended diff format')),
2132 ('I', 'include', [], _('include names matching the given patterns')),
2132 ('I', 'include', [], _('include names matching the given patterns')),
2133 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2133 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2134 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2134 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2135 "qdelete|qremove|qrm":
2135 "qdelete|qremove|qrm":
2136 (delete,
2136 (delete,
2137 [('k', 'keep', None, _('keep patch file')),
2137 [('k', 'keep', None, _('keep patch file')),
2138 ('r', 'rev', [], _('stop managing a revision'))],
2138 ('r', 'rev', [], _('stop managing a revision'))],
2139 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2139 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2140 'qfold':
2140 'qfold':
2141 (fold,
2141 (fold,
2142 [('e', 'edit', None, _('edit patch header')),
2142 [('e', 'edit', None, _('edit patch header')),
2143 ('k', 'keep', None, _('keep folded patch files')),
2143 ('k', 'keep', None, _('keep folded patch files')),
2144 ] + commands.commitopts,
2144 ] + commands.commitopts,
2145 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2145 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2146 'qgoto':
2146 'qgoto':
2147 (goto,
2147 (goto,
2148 [('f', 'force', None, _('overwrite any local changes'))],
2148 [('f', 'force', None, _('overwrite any local changes'))],
2149 _('hg qgoto [OPTION]... PATCH')),
2149 _('hg qgoto [OPTION]... PATCH')),
2150 'qguard':
2150 'qguard':
2151 (guard,
2151 (guard,
2152 [('l', 'list', None, _('list all patches and guards')),
2152 [('l', 'list', None, _('list all patches and guards')),
2153 ('n', 'none', None, _('drop all guards'))],
2153 ('n', 'none', None, _('drop all guards'))],
2154 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2154 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2155 'qheader': (header, [], _('hg qheader [PATCH]')),
2155 'qheader': (header, [], _('hg qheader [PATCH]')),
2156 "^qimport":
2156 "^qimport":
2157 (qimport,
2157 (qimport,
2158 [('e', 'existing', None, 'import file in patch dir'),
2158 [('e', 'existing', None, 'import file in patch dir'),
2159 ('n', 'name', '', 'patch file name'),
2159 ('n', 'name', '', 'patch file name'),
2160 ('f', 'force', None, 'overwrite existing files'),
2160 ('f', 'force', None, 'overwrite existing files'),
2161 ('r', 'rev', [], 'place existing revisions under mq control'),
2161 ('r', 'rev', [], 'place existing revisions under mq control'),
2162 ('g', 'git', None, _('use git extended diff format'))],
2162 ('g', 'git', None, _('use git extended diff format'))],
2163 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2163 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2164 "^qinit":
2164 "^qinit":
2165 (init,
2165 (init,
2166 [('c', 'create-repo', None, 'create queue repository')],
2166 [('c', 'create-repo', None, 'create queue repository')],
2167 _('hg qinit [-c]')),
2167 _('hg qinit [-c]')),
2168 "qnew":
2168 "qnew":
2169 (new,
2169 (new,
2170 [('e', 'edit', None, _('edit commit message')),
2170 [('e', 'edit', None, _('edit commit message')),
2171 ('f', 'force', None, _('import uncommitted changes into patch')),
2171 ('f', 'force', None, _('import uncommitted changes into patch')),
2172 ('I', 'include', [], _('include names matching the given patterns')),
2172 ('I', 'include', [], _('include names matching the given patterns')),
2173 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2173 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2174 ] + commands.commitopts,
2174 ] + commands.commitopts,
2175 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2175 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2176 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2176 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2177 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2177 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2178 "^qpop":
2178 "^qpop":
2179 (pop,
2179 (pop,
2180 [('a', 'all', None, _('pop all patches')),
2180 [('a', 'all', None, _('pop all patches')),
2181 ('n', 'name', '', _('queue name to pop')),
2181 ('n', 'name', '', _('queue name to pop')),
2182 ('f', 'force', None, _('forget any local changes'))],
2182 ('f', 'force', None, _('forget any local changes'))],
2183 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2183 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2184 "^qpush":
2184 "^qpush":
2185 (push,
2185 (push,
2186 [('f', 'force', None, _('apply if the patch has rejects')),
2186 [('f', 'force', None, _('apply if the patch has rejects')),
2187 ('l', 'list', None, _('list patch name in commit text')),
2187 ('l', 'list', None, _('list patch name in commit text')),
2188 ('a', 'all', None, _('apply all patches')),
2188 ('a', 'all', None, _('apply all patches')),
2189 ('m', 'merge', None, _('merge from another queue')),
2189 ('m', 'merge', None, _('merge from another queue')),
2190 ('n', 'name', '', _('merge queue name'))],
2190 ('n', 'name', '', _('merge queue name'))],
2191 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2191 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2192 "^qrefresh":
2192 "^qrefresh":
2193 (refresh,
2193 (refresh,
2194 [('e', 'edit', None, _('edit commit message')),
2194 [('e', 'edit', None, _('edit commit message')),
2195 ('g', 'git', None, _('use git extended diff format')),
2195 ('g', 'git', None, _('use git extended diff format')),
2196 ('s', 'short', None, _('refresh only files already in the patch')),
2196 ('s', 'short', None, _('refresh only files already in the patch')),
2197 ('I', 'include', [], _('include names matching the given patterns')),
2197 ('I', 'include', [], _('include names matching the given patterns')),
2198 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2198 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2199 ] + commands.commitopts,
2199 ] + commands.commitopts,
2200 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2200 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2201 'qrename|qmv':
2201 'qrename|qmv':
2202 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2202 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2203 "qrestore":
2203 "qrestore":
2204 (restore,
2204 (restore,
2205 [('d', 'delete', None, _('delete save entry')),
2205 [('d', 'delete', None, _('delete save entry')),
2206 ('u', 'update', None, _('update queue working dir'))],
2206 ('u', 'update', None, _('update queue working dir'))],
2207 _('hg qrestore [-d] [-u] REV')),
2207 _('hg qrestore [-d] [-u] REV')),
2208 "qsave":
2208 "qsave":
2209 (save,
2209 (save,
2210 [('c', 'copy', None, _('copy patch directory')),
2210 [('c', 'copy', None, _('copy patch directory')),
2211 ('n', 'name', '', _('copy directory name')),
2211 ('n', 'name', '', _('copy directory name')),
2212 ('e', 'empty', None, _('clear queue status file')),
2212 ('e', 'empty', None, _('clear queue status file')),
2213 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2213 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2214 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2214 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2215 "qselect":
2215 "qselect":
2216 (select,
2216 (select,
2217 [('n', 'none', None, _('disable all guards')),
2217 [('n', 'none', None, _('disable all guards')),
2218 ('s', 'series', None, _('list all guards in series file')),
2218 ('s', 'series', None, _('list all guards in series file')),
2219 ('', 'pop', None, _('pop to before first guarded applied patch')),
2219 ('', 'pop', None, _('pop to before first guarded applied patch')),
2220 ('', 'reapply', None, _('pop, then reapply patches'))],
2220 ('', 'reapply', None, _('pop, then reapply patches'))],
2221 _('hg qselect [OPTION]... [GUARD]...')),
2221 _('hg qselect [OPTION]... [GUARD]...')),
2222 "qseries":
2222 "qseries":
2223 (series,
2223 (series,
2224 [('m', 'missing', None, _('print patches not in series')),
2224 [('m', 'missing', None, _('print patches not in series')),
2225 ] + seriesopts,
2225 ] + seriesopts,
2226 _('hg qseries [-ms]')),
2226 _('hg qseries [-ms]')),
2227 "^strip":
2227 "^strip":
2228 (strip,
2228 (strip,
2229 [('f', 'force', None, _('force multi-head removal')),
2229 [('f', 'force', None, _('force multi-head removal')),
2230 ('b', 'backup', None, _('bundle unrelated changesets')),
2230 ('b', 'backup', None, _('bundle unrelated changesets')),
2231 ('n', 'nobackup', None, _('no backups'))],
2231 ('n', 'nobackup', None, _('no backups'))],
2232 _('hg strip [-f] [-b] [-n] REV')),
2232 _('hg strip [-f] [-b] [-n] REV')),
2233 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2233 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2234 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2234 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2235 }
2235 }
@@ -1,1278 +1,1278 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
10 import os, sys, atexit, signal, pdb, traceback, socket, errno, shlex
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
11 import mdiff, bdiff, util, templater, patch, commands, hg, lock, time
12 import fancyopts, revlog, version, extensions, hook
12 import fancyopts, revlog, version, extensions, hook
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 class UnknownCommand(Exception):
16 class UnknownCommand(Exception):
17 """Exception raised if command is not in the command table."""
17 """Exception raised if command is not in the command table."""
18 class AmbiguousCommand(Exception):
18 class AmbiguousCommand(Exception):
19 """Exception raised if command shortcut matches more than one command."""
19 """Exception raised if command shortcut matches more than one command."""
20 class ParseError(Exception):
20 class ParseError(Exception):
21 """Exception raised on errors in parsing the command line."""
21 """Exception raised on errors in parsing the command line."""
22
22
23 def runcatch(ui, args, argv0=None):
23 def runcatch(ui, args, argv0=None):
24 def catchterm(*args):
24 def catchterm(*args):
25 raise util.SignalInterrupt
25 raise util.SignalInterrupt
26
26
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
27 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
28 num = getattr(signal, name, None)
28 num = getattr(signal, name, None)
29 if num: signal.signal(num, catchterm)
29 if num: signal.signal(num, catchterm)
30
30
31 try:
31 try:
32 try:
32 try:
33 # enter the debugger before command execution
33 # enter the debugger before command execution
34 if '--debugger' in args:
34 if '--debugger' in args:
35 pdb.set_trace()
35 pdb.set_trace()
36 try:
36 try:
37 return dispatch(ui, args, argv0=argv0)
37 return dispatch(ui, args, argv0=argv0)
38 finally:
38 finally:
39 ui.flush()
39 ui.flush()
40 except:
40 except:
41 # enter the debugger when we hit an exception
41 # enter the debugger when we hit an exception
42 if '--debugger' in args:
42 if '--debugger' in args:
43 pdb.post_mortem(sys.exc_info()[2])
43 pdb.post_mortem(sys.exc_info()[2])
44 ui.print_exc()
44 ui.print_exc()
45 raise
45 raise
46
46
47 except ParseError, inst:
47 except ParseError, inst:
48 if inst.args[0]:
48 if inst.args[0]:
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
49 ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
50 commands.help_(ui, inst.args[0])
50 commands.help_(ui, inst.args[0])
51 else:
51 else:
52 ui.warn(_("hg: %s\n") % inst.args[1])
52 ui.warn(_("hg: %s\n") % inst.args[1])
53 commands.help_(ui, 'shortlist')
53 commands.help_(ui, 'shortlist')
54 except AmbiguousCommand, inst:
54 except AmbiguousCommand, inst:
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
55 ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
56 (inst.args[0], " ".join(inst.args[1])))
56 (inst.args[0], " ".join(inst.args[1])))
57 except UnknownCommand, inst:
57 except UnknownCommand, inst:
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
58 ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
59 commands.help_(ui, 'shortlist')
59 commands.help_(ui, 'shortlist')
60 except hg.RepoError, inst:
60 except hg.RepoError, inst:
61 ui.warn(_("abort: %s!\n") % inst)
61 ui.warn(_("abort: %s!\n") % inst)
62 except lock.LockHeld, inst:
62 except lock.LockHeld, inst:
63 if inst.errno == errno.ETIMEDOUT:
63 if inst.errno == errno.ETIMEDOUT:
64 reason = _('timed out waiting for lock held by %s') % inst.locker
64 reason = _('timed out waiting for lock held by %s') % inst.locker
65 else:
65 else:
66 reason = _('lock held by %s') % inst.locker
66 reason = _('lock held by %s') % inst.locker
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
67 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
68 except lock.LockUnavailable, inst:
68 except lock.LockUnavailable, inst:
69 ui.warn(_("abort: could not lock %s: %s\n") %
69 ui.warn(_("abort: could not lock %s: %s\n") %
70 (inst.desc or inst.filename, inst.strerror))
70 (inst.desc or inst.filename, inst.strerror))
71 except revlog.RevlogError, inst:
71 except revlog.RevlogError, inst:
72 ui.warn(_("abort: %s!\n") % inst)
72 ui.warn(_("abort: %s!\n") % inst)
73 except util.SignalInterrupt:
73 except util.SignalInterrupt:
74 ui.warn(_("killed!\n"))
74 ui.warn(_("killed!\n"))
75 except KeyboardInterrupt:
75 except KeyboardInterrupt:
76 try:
76 try:
77 ui.warn(_("interrupted!\n"))
77 ui.warn(_("interrupted!\n"))
78 except IOError, inst:
78 except IOError, inst:
79 if inst.errno == errno.EPIPE:
79 if inst.errno == errno.EPIPE:
80 if ui.debugflag:
80 if ui.debugflag:
81 ui.warn(_("\nbroken pipe\n"))
81 ui.warn(_("\nbroken pipe\n"))
82 else:
82 else:
83 raise
83 raise
84 except socket.error, inst:
84 except socket.error, inst:
85 ui.warn(_("abort: %s\n") % inst[1])
85 ui.warn(_("abort: %s\n") % inst[1])
86 except IOError, inst:
86 except IOError, inst:
87 if hasattr(inst, "code"):
87 if hasattr(inst, "code"):
88 ui.warn(_("abort: %s\n") % inst)
88 ui.warn(_("abort: %s\n") % inst)
89 elif hasattr(inst, "reason"):
89 elif hasattr(inst, "reason"):
90 try: # usually it is in the form (errno, strerror)
90 try: # usually it is in the form (errno, strerror)
91 reason = inst.reason.args[1]
91 reason = inst.reason.args[1]
92 except: # it might be anything, for example a string
92 except: # it might be anything, for example a string
93 reason = inst.reason
93 reason = inst.reason
94 ui.warn(_("abort: error: %s\n") % reason)
94 ui.warn(_("abort: error: %s\n") % reason)
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
95 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
96 if ui.debugflag:
96 if ui.debugflag:
97 ui.warn(_("broken pipe\n"))
97 ui.warn(_("broken pipe\n"))
98 elif getattr(inst, "strerror", None):
98 elif getattr(inst, "strerror", None):
99 if getattr(inst, "filename", None):
99 if getattr(inst, "filename", None):
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
100 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
101 else:
101 else:
102 ui.warn(_("abort: %s\n") % inst.strerror)
102 ui.warn(_("abort: %s\n") % inst.strerror)
103 else:
103 else:
104 raise
104 raise
105 except OSError, inst:
105 except OSError, inst:
106 if getattr(inst, "filename", None):
106 if getattr(inst, "filename", None):
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
107 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
108 else:
108 else:
109 ui.warn(_("abort: %s\n") % inst.strerror)
109 ui.warn(_("abort: %s\n") % inst.strerror)
110 except util.UnexpectedOutput, inst:
110 except util.UnexpectedOutput, inst:
111 ui.warn(_("abort: %s") % inst[0])
111 ui.warn(_("abort: %s") % inst[0])
112 if not isinstance(inst[1], basestring):
112 if not isinstance(inst[1], basestring):
113 ui.warn(" %r\n" % (inst[1],))
113 ui.warn(" %r\n" % (inst[1],))
114 elif not inst[1]:
114 elif not inst[1]:
115 ui.warn(_(" empty string\n"))
115 ui.warn(_(" empty string\n"))
116 else:
116 else:
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
117 ui.warn("\n%r\n" % util.ellipsis(inst[1]))
118 except ImportError, inst:
118 except ImportError, inst:
119 m = str(inst).split()[-1]
119 m = str(inst).split()[-1]
120 ui.warn(_("abort: could not import module %s!\n" % m))
120 ui.warn(_("abort: could not import module %s!\n" % m))
121 if m in "mpatch bdiff".split():
121 if m in "mpatch bdiff".split():
122 ui.warn(_("(did you forget to compile extensions?)\n"))
122 ui.warn(_("(did you forget to compile extensions?)\n"))
123 elif m in "zlib".split():
123 elif m in "zlib".split():
124 ui.warn(_("(is your Python install correct?)\n"))
124 ui.warn(_("(is your Python install correct?)\n"))
125
125
126 except util.Abort, inst:
126 except util.Abort, inst:
127 ui.warn(_("abort: %s\n") % inst)
127 ui.warn(_("abort: %s\n") % inst)
128 except SystemExit, inst:
128 except SystemExit, inst:
129 # Commands shouldn't sys.exit directly, but give a return code.
129 # Commands shouldn't sys.exit directly, but give a return code.
130 # Just in case catch this and and pass exit code to caller.
130 # Just in case catch this and and pass exit code to caller.
131 return inst.code
131 return inst.code
132 except:
132 except:
133 ui.warn(_("** unknown exception encountered, details follow\n"))
133 ui.warn(_("** unknown exception encountered, details follow\n"))
134 ui.warn(_("** report bug details to "
134 ui.warn(_("** report bug details to "
135 "http://www.selenic.com/mercurial/bts\n"))
135 "http://www.selenic.com/mercurial/bts\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
136 ui.warn(_("** or mercurial@selenic.com\n"))
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
137 ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
138 % version.get_version())
138 % version.get_version())
139 raise
139 raise
140
140
141 return -1
141 return -1
142
142
143 def findpossible(ui, cmd):
143 def findpossible(ui, cmd):
144 """
144 """
145 Return cmd -> (aliases, command table entry)
145 Return cmd -> (aliases, command table entry)
146 for each matching command.
146 for each matching command.
147 Return debug commands (or their aliases) only if no normal command matches.
147 Return debug commands (or their aliases) only if no normal command matches.
148 """
148 """
149 choice = {}
149 choice = {}
150 debugchoice = {}
150 debugchoice = {}
151 for e in commands.table.keys():
151 for e in commands.table.keys():
152 aliases = e.lstrip("^").split("|")
152 aliases = e.lstrip("^").split("|")
153 found = None
153 found = None
154 if cmd in aliases:
154 if cmd in aliases:
155 found = cmd
155 found = cmd
156 elif not ui.config("ui", "strict"):
156 elif not ui.config("ui", "strict"):
157 for a in aliases:
157 for a in aliases:
158 if a.startswith(cmd):
158 if a.startswith(cmd):
159 found = a
159 found = a
160 break
160 break
161 if found is not None:
161 if found is not None:
162 if aliases[0].startswith("debug") or found.startswith("debug"):
162 if aliases[0].startswith("debug") or found.startswith("debug"):
163 debugchoice[found] = (aliases, commands.table[e])
163 debugchoice[found] = (aliases, commands.table[e])
164 else:
164 else:
165 choice[found] = (aliases, commands.table[e])
165 choice[found] = (aliases, commands.table[e])
166
166
167 if not choice and debugchoice:
167 if not choice and debugchoice:
168 choice = debugchoice
168 choice = debugchoice
169
169
170 return choice
170 return choice
171
171
172 def findcmd(ui, cmd):
172 def findcmd(ui, cmd):
173 """Return (aliases, command table entry) for command string."""
173 """Return (aliases, command table entry) for command string."""
174 choice = findpossible(ui, cmd)
174 choice = findpossible(ui, cmd)
175
175
176 if choice.has_key(cmd):
176 if choice.has_key(cmd):
177 return choice[cmd]
177 return choice[cmd]
178
178
179 if len(choice) > 1:
179 if len(choice) > 1:
180 clist = choice.keys()
180 clist = choice.keys()
181 clist.sort()
181 clist.sort()
182 raise AmbiguousCommand(cmd, clist)
182 raise AmbiguousCommand(cmd, clist)
183
183
184 if choice:
184 if choice:
185 return choice.values()[0]
185 return choice.values()[0]
186
186
187 raise UnknownCommand(cmd)
187 raise UnknownCommand(cmd)
188
188
189 def findrepo():
189 def findrepo():
190 p = os.getcwd()
190 p = os.getcwd()
191 while not os.path.isdir(os.path.join(p, ".hg")):
191 while not os.path.isdir(os.path.join(p, ".hg")):
192 oldp, p = p, os.path.dirname(p)
192 oldp, p = p, os.path.dirname(p)
193 if p == oldp:
193 if p == oldp:
194 return None
194 return None
195
195
196 return p
196 return p
197
197
198 def parse(ui, args):
198 def parse(ui, args):
199 options = {}
199 options = {}
200 cmdoptions = {}
200 cmdoptions = {}
201
201
202 try:
202 try:
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
203 args = fancyopts.fancyopts(args, commands.globalopts, options)
204 except fancyopts.getopt.GetoptError, inst:
204 except fancyopts.getopt.GetoptError, inst:
205 raise ParseError(None, inst)
205 raise ParseError(None, inst)
206
206
207 if args:
207 if args:
208 cmd, args = args[0], args[1:]
208 cmd, args = args[0], args[1:]
209 aliases, i = findcmd(ui, cmd)
209 aliases, i = findcmd(ui, cmd)
210 cmd = aliases[0]
210 cmd = aliases[0]
211 defaults = ui.config("defaults", cmd)
211 defaults = ui.config("defaults", cmd)
212 if defaults:
212 if defaults:
213 args = shlex.split(defaults) + args
213 args = shlex.split(defaults) + args
214 c = list(i[1])
214 c = list(i[1])
215 else:
215 else:
216 cmd = None
216 cmd = None
217 c = []
217 c = []
218
218
219 # combine global options into local
219 # combine global options into local
220 for o in commands.globalopts:
220 for o in commands.globalopts:
221 c.append((o[0], o[1], options[o[1]], o[3]))
221 c.append((o[0], o[1], options[o[1]], o[3]))
222
222
223 try:
223 try:
224 args = fancyopts.fancyopts(args, c, cmdoptions)
224 args = fancyopts.fancyopts(args, c, cmdoptions)
225 except fancyopts.getopt.GetoptError, inst:
225 except fancyopts.getopt.GetoptError, inst:
226 raise ParseError(cmd, inst)
226 raise ParseError(cmd, inst)
227
227
228 # separate global options back out
228 # separate global options back out
229 for o in commands.globalopts:
229 for o in commands.globalopts:
230 n = o[1]
230 n = o[1]
231 options[n] = cmdoptions[n]
231 options[n] = cmdoptions[n]
232 del cmdoptions[n]
232 del cmdoptions[n]
233
233
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
234 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
235
235
236 def parseconfig(config):
236 def parseconfig(config):
237 """parse the --config options from the command line"""
237 """parse the --config options from the command line"""
238 parsed = []
238 parsed = []
239 for cfg in config:
239 for cfg in config:
240 try:
240 try:
241 name, value = cfg.split('=', 1)
241 name, value = cfg.split('=', 1)
242 section, name = name.split('.', 1)
242 section, name = name.split('.', 1)
243 if not section or not name:
243 if not section or not name:
244 raise IndexError
244 raise IndexError
245 parsed.append((section, name, value))
245 parsed.append((section, name, value))
246 except (IndexError, ValueError):
246 except (IndexError, ValueError):
247 raise util.Abort(_('malformed --config option: %s') % cfg)
247 raise util.Abort(_('malformed --config option: %s') % cfg)
248 return parsed
248 return parsed
249
249
250 def earlygetopt(aliases, args):
250 def earlygetopt(aliases, args):
251 """Return list of values for an option (or aliases).
251 """Return list of values for an option (or aliases).
252
252
253 The values are listed in the order they appear in args.
253 The values are listed in the order they appear in args.
254 The options and values are removed from args.
254 The options and values are removed from args.
255 """
255 """
256 try:
256 try:
257 argcount = args.index("--")
257 argcount = args.index("--")
258 except ValueError:
258 except ValueError:
259 argcount = len(args)
259 argcount = len(args)
260 shortopts = [opt for opt in aliases if len(opt) == 2]
260 shortopts = [opt for opt in aliases if len(opt) == 2]
261 values = []
261 values = []
262 pos = 0
262 pos = 0
263 while pos < argcount:
263 while pos < argcount:
264 if args[pos] in aliases:
264 if args[pos] in aliases:
265 if pos + 1 >= argcount:
265 if pos + 1 >= argcount:
266 # ignore and let getopt report an error if there is no value
266 # ignore and let getopt report an error if there is no value
267 break
267 break
268 del args[pos]
268 del args[pos]
269 values.append(args.pop(pos))
269 values.append(args.pop(pos))
270 argcount -= 2
270 argcount -= 2
271 elif args[pos][:2] in shortopts:
271 elif args[pos][:2] in shortopts:
272 # short option can have no following space, e.g. hg log -Rfoo
272 # short option can have no following space, e.g. hg log -Rfoo
273 values.append(args.pop(pos)[2:])
273 values.append(args.pop(pos)[2:])
274 argcount -= 1
274 argcount -= 1
275 else:
275 else:
276 pos += 1
276 pos += 1
277 return values
277 return values
278
278
279 def dispatch(ui, args, argv0=None):
279 def dispatch(ui, args, argv0=None):
280 # remember how to call 'hg' before changing the working dir
280 # remember how to call 'hg' before changing the working dir
281 util.set_hgexecutable(argv0)
281 util.set_hgexecutable(argv0)
282
282
283 # read --config before doing anything else
283 # read --config before doing anything else
284 # (e.g. to change trust settings for reading .hg/hgrc)
284 # (e.g. to change trust settings for reading .hg/hgrc)
285 config = earlygetopt(['--config'], args)
285 config = earlygetopt(['--config'], args)
286 if config:
286 if config:
287 ui.updateopts(config=parseconfig(config))
287 ui.updateopts(config=parseconfig(config))
288
288
289 # check for cwd
289 # check for cwd
290 cwd = earlygetopt(['--cwd'], args)
290 cwd = earlygetopt(['--cwd'], args)
291 if cwd:
291 if cwd:
292 os.chdir(cwd[-1])
292 os.chdir(cwd[-1])
293
293
294 # read the local repository .hgrc into a local ui object
294 # read the local repository .hgrc into a local ui object
295 path = findrepo() or ""
295 path = findrepo() or ""
296 if not path:
296 if not path:
297 lui = ui
297 lui = ui
298 if path:
298 if path:
299 try:
299 try:
300 lui = commands.ui.ui(parentui=ui)
300 lui = commands.ui.ui(parentui=ui)
301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
301 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
302 except IOError:
302 except IOError:
303 pass
303 pass
304
304
305 # now we can expand paths, even ones in .hg/hgrc
305 # now we can expand paths, even ones in .hg/hgrc
306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
306 rpath = earlygetopt(["-R", "--repository", "--repo"], args)
307 if rpath:
307 if rpath:
308 path = lui.expandpath(rpath[-1])
308 path = lui.expandpath(rpath[-1])
309 lui = commands.ui.ui(parentui=ui)
309 lui = commands.ui.ui(parentui=ui)
310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
310 lui.readconfig(os.path.join(path, ".hg", "hgrc"))
311
311
312 extensions.loadall(lui)
312 extensions.loadall(lui)
313 # check for fallback encoding
313 # check for fallback encoding
314 fallback = lui.config('ui', 'fallbackencoding')
314 fallback = lui.config('ui', 'fallbackencoding')
315 if fallback:
315 if fallback:
316 util._fallbackencoding = fallback
316 util._fallbackencoding = fallback
317
317
318 fullargs = args
318 fullargs = args
319 cmd, func, args, options, cmdoptions = parse(ui, args)
319 cmd, func, args, options, cmdoptions = parse(ui, args)
320
320
321 if options["config"]:
321 if options["config"]:
322 raise util.Abort(_("Option --config may not be abbreviated!"))
322 raise util.Abort(_("Option --config may not be abbreviated!"))
323 if options["cwd"]:
323 if options["cwd"]:
324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
324 raise util.Abort(_("Option --cwd may not be abbreviated!"))
325 if options["repository"]:
325 if options["repository"]:
326 raise util.Abort(_(
326 raise util.Abort(_(
327 "Option -R has to be separated from other options (i.e. not -qR) "
327 "Option -R has to be separated from other options (i.e. not -qR) "
328 "and --repository may only be abbreviated as --repo!"))
328 "and --repository may only be abbreviated as --repo!"))
329
329
330 if options["encoding"]:
330 if options["encoding"]:
331 util._encoding = options["encoding"]
331 util._encoding = options["encoding"]
332 if options["encodingmode"]:
332 if options["encodingmode"]:
333 util._encodingmode = options["encodingmode"]
333 util._encodingmode = options["encodingmode"]
334 if options["time"]:
334 if options["time"]:
335 def get_times():
335 def get_times():
336 t = os.times()
336 t = os.times()
337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
338 t = (t[0], t[1], t[2], t[3], time.clock())
338 t = (t[0], t[1], t[2], t[3], time.clock())
339 return t
339 return t
340 s = get_times()
340 s = get_times()
341 def print_time():
341 def print_time():
342 t = get_times()
342 t = get_times()
343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
343 ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
345 atexit.register(print_time)
345 atexit.register(print_time)
346
346
347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
347 ui.updateopts(options["verbose"], options["debug"], options["quiet"],
348 not options["noninteractive"], options["traceback"])
348 not options["noninteractive"], options["traceback"])
349
349
350 if options['help']:
350 if options['help']:
351 return commands.help_(ui, cmd, options['version'])
351 return commands.help_(ui, cmd, options['version'])
352 elif options['version']:
352 elif options['version']:
353 return commands.version_(ui)
353 return commands.version_(ui)
354 elif not cmd:
354 elif not cmd:
355 return commands.help_(ui, 'shortlist')
355 return commands.help_(ui, 'shortlist')
356
356
357 repo = None
357 repo = None
358 if cmd not in commands.norepo.split():
358 if cmd not in commands.norepo.split():
359 try:
359 try:
360 repo = hg.repository(ui, path=path)
360 repo = hg.repository(ui, path=path)
361 ui = repo.ui
361 ui = repo.ui
362 if not repo.local():
362 if not repo.local():
363 raise util.Abort(_("repository '%s' is not local") % path)
363 raise util.Abort(_("repository '%s' is not local") % path)
364 except hg.RepoError:
364 except hg.RepoError:
365 if cmd not in commands.optionalrepo.split():
365 if cmd not in commands.optionalrepo.split():
366 if not path:
366 if not path:
367 raise hg.RepoError(_("There is no Mercurial repository here"
367 raise hg.RepoError(_("There is no Mercurial repository here"
368 " (.hg not found)"))
368 " (.hg not found)"))
369 raise
369 raise
370 d = lambda: func(ui, repo, *args, **cmdoptions)
370 d = lambda: func(ui, repo, *args, **cmdoptions)
371 else:
371 else:
372 d = lambda: func(ui, *args, **cmdoptions)
372 d = lambda: func(ui, *args, **cmdoptions)
373
373
374 # run pre-hook, and abort if it fails
374 # run pre-hook, and abort if it fails
375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
375 ret = hook.hook(ui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs))
376 if ret:
376 if ret:
377 return ret
377 return ret
378 ret = runcommand(ui, options, cmd, d)
378 ret = runcommand(ui, options, cmd, d)
379 # run post-hook, passing command result
379 # run post-hook, passing command result
380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
380 hook.hook(ui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
381 result = ret)
381 result = ret)
382 return ret
382 return ret
383
383
384 def runcommand(ui, options, cmd, cmdfunc):
384 def runcommand(ui, options, cmd, cmdfunc):
385 def checkargs():
385 def checkargs():
386 try:
386 try:
387 return cmdfunc()
387 return cmdfunc()
388 except TypeError, inst:
388 except TypeError, inst:
389 # was this an argument error?
389 # was this an argument error?
390 tb = traceback.extract_tb(sys.exc_info()[2])
390 tb = traceback.extract_tb(sys.exc_info()[2])
391 if len(tb) != 2: # no
391 if len(tb) != 2: # no
392 raise
392 raise
393 raise ParseError(cmd, _("invalid arguments"))
393 raise ParseError(cmd, _("invalid arguments"))
394
394
395 if options['profile']:
395 if options['profile']:
396 import hotshot, hotshot.stats
396 import hotshot, hotshot.stats
397 prof = hotshot.Profile("hg.prof")
397 prof = hotshot.Profile("hg.prof")
398 try:
398 try:
399 try:
399 try:
400 return prof.runcall(checkargs)
400 return prof.runcall(checkargs)
401 except:
401 except:
402 try:
402 try:
403 ui.warn(_('exception raised - generating '
403 ui.warn(_('exception raised - generating '
404 'profile anyway\n'))
404 'profile anyway\n'))
405 except:
405 except:
406 pass
406 pass
407 raise
407 raise
408 finally:
408 finally:
409 prof.close()
409 prof.close()
410 stats = hotshot.stats.load("hg.prof")
410 stats = hotshot.stats.load("hg.prof")
411 stats.strip_dirs()
411 stats.strip_dirs()
412 stats.sort_stats('time', 'calls')
412 stats.sort_stats('time', 'calls')
413 stats.print_stats(40)
413 stats.print_stats(40)
414 elif options['lsprof']:
414 elif options['lsprof']:
415 try:
415 try:
416 from mercurial import lsprof
416 from mercurial import lsprof
417 except ImportError:
417 except ImportError:
418 raise util.Abort(_(
418 raise util.Abort(_(
419 'lsprof not available - install from '
419 'lsprof not available - install from '
420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
420 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
421 p = lsprof.Profiler()
421 p = lsprof.Profiler()
422 p.enable(subcalls=True)
422 p.enable(subcalls=True)
423 try:
423 try:
424 return checkargs()
424 return checkargs()
425 finally:
425 finally:
426 p.disable()
426 p.disable()
427 stats = lsprof.Stats(p.getstats())
427 stats = lsprof.Stats(p.getstats())
428 stats.sort()
428 stats.sort()
429 stats.pprint(top=10, file=sys.stderr, climit=5)
429 stats.pprint(top=10, file=sys.stderr, climit=5)
430 else:
430 else:
431 return checkargs()
431 return checkargs()
432
432
433 def bail_if_changed(repo):
433 def bail_if_changed(repo):
434 modified, added, removed, deleted = repo.status()[:4]
434 modified, added, removed, deleted = repo.status()[:4]
435 if modified or added or removed or deleted:
435 if modified or added or removed or deleted:
436 raise util.Abort(_("outstanding uncommitted changes"))
436 raise util.Abort(_("outstanding uncommitted changes"))
437
437
438 def logmessage(opts):
438 def logmessage(opts):
439 """ get the log message according to -m and -l option """
439 """ get the log message according to -m and -l option """
440 message = opts['message']
440 message = opts['message']
441 logfile = opts['logfile']
441 logfile = opts['logfile']
442
442
443 if message and logfile:
443 if message and logfile:
444 raise util.Abort(_('options --message and --logfile are mutually '
444 raise util.Abort(_('options --message and --logfile are mutually '
445 'exclusive'))
445 'exclusive'))
446 if not message and logfile:
446 if not message and logfile:
447 try:
447 try:
448 if logfile == '-':
448 if logfile == '-':
449 message = sys.stdin.read()
449 message = sys.stdin.read()
450 else:
450 else:
451 message = open(logfile).read()
451 message = open(logfile).read()
452 except IOError, inst:
452 except IOError, inst:
453 raise util.Abort(_("can't read commit message '%s': %s") %
453 raise util.Abort(_("can't read commit message '%s': %s") %
454 (logfile, inst.strerror))
454 (logfile, inst.strerror))
455 return message
455 return message
456
456
457 def setremoteconfig(ui, opts):
457 def setremoteconfig(ui, opts):
458 "copy remote options to ui tree"
458 "copy remote options to ui tree"
459 if opts.get('ssh'):
459 if opts.get('ssh'):
460 ui.setconfig("ui", "ssh", opts['ssh'])
460 ui.setconfig("ui", "ssh", opts['ssh'])
461 if opts.get('remotecmd'):
461 if opts.get('remotecmd'):
462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
462 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
463
463
464 def parseurl(url, revs):
464 def parseurl(url, revs):
465 '''parse url#branch, returning url, branch + revs'''
465 '''parse url#branch, returning url, branch + revs'''
466
466
467 if '#' not in url:
467 if '#' not in url:
468 return url, (revs or None)
468 return url, (revs or None)
469
469
470 url, rev = url.split('#', 1)
470 url, rev = url.split('#', 1)
471 return url, revs + [rev]
471 return url, revs + [rev]
472
472
473 def revpair(repo, revs):
473 def revpair(repo, revs):
474 '''return pair of nodes, given list of revisions. second item can
474 '''return pair of nodes, given list of revisions. second item can
475 be None, meaning use working dir.'''
475 be None, meaning use working dir.'''
476
476
477 def revfix(repo, val, defval):
477 def revfix(repo, val, defval):
478 if not val and val != 0 and defval is not None:
478 if not val and val != 0 and defval is not None:
479 val = defval
479 val = defval
480 return repo.lookup(val)
480 return repo.lookup(val)
481
481
482 if not revs:
482 if not revs:
483 return repo.dirstate.parents()[0], None
483 return repo.dirstate.parents()[0], None
484 end = None
484 end = None
485 if len(revs) == 1:
485 if len(revs) == 1:
486 if revrangesep in revs[0]:
486 if revrangesep in revs[0]:
487 start, end = revs[0].split(revrangesep, 1)
487 start, end = revs[0].split(revrangesep, 1)
488 start = revfix(repo, start, 0)
488 start = revfix(repo, start, 0)
489 end = revfix(repo, end, repo.changelog.count() - 1)
489 end = revfix(repo, end, repo.changelog.count() - 1)
490 else:
490 else:
491 start = revfix(repo, revs[0], None)
491 start = revfix(repo, revs[0], None)
492 elif len(revs) == 2:
492 elif len(revs) == 2:
493 if revrangesep in revs[0] or revrangesep in revs[1]:
493 if revrangesep in revs[0] or revrangesep in revs[1]:
494 raise util.Abort(_('too many revisions specified'))
494 raise util.Abort(_('too many revisions specified'))
495 start = revfix(repo, revs[0], None)
495 start = revfix(repo, revs[0], None)
496 end = revfix(repo, revs[1], None)
496 end = revfix(repo, revs[1], None)
497 else:
497 else:
498 raise util.Abort(_('too many revisions specified'))
498 raise util.Abort(_('too many revisions specified'))
499 return start, end
499 return start, end
500
500
501 def revrange(repo, revs):
501 def revrange(repo, revs):
502 """Yield revision as strings from a list of revision specifications."""
502 """Yield revision as strings from a list of revision specifications."""
503
503
504 def revfix(repo, val, defval):
504 def revfix(repo, val, defval):
505 if not val and val != 0 and defval is not None:
505 if not val and val != 0 and defval is not None:
506 return defval
506 return defval
507 return repo.changelog.rev(repo.lookup(val))
507 return repo.changelog.rev(repo.lookup(val))
508
508
509 seen, l = {}, []
509 seen, l = {}, []
510 for spec in revs:
510 for spec in revs:
511 if revrangesep in spec:
511 if revrangesep in spec:
512 start, end = spec.split(revrangesep, 1)
512 start, end = spec.split(revrangesep, 1)
513 start = revfix(repo, start, 0)
513 start = revfix(repo, start, 0)
514 end = revfix(repo, end, repo.changelog.count() - 1)
514 end = revfix(repo, end, repo.changelog.count() - 1)
515 step = start > end and -1 or 1
515 step = start > end and -1 or 1
516 for rev in xrange(start, end+step, step):
516 for rev in xrange(start, end+step, step):
517 if rev in seen:
517 if rev in seen:
518 continue
518 continue
519 seen[rev] = 1
519 seen[rev] = 1
520 l.append(rev)
520 l.append(rev)
521 else:
521 else:
522 rev = revfix(repo, spec, None)
522 rev = revfix(repo, spec, None)
523 if rev in seen:
523 if rev in seen:
524 continue
524 continue
525 seen[rev] = 1
525 seen[rev] = 1
526 l.append(rev)
526 l.append(rev)
527
527
528 return l
528 return l
529
529
530 def make_filename(repo, pat, node,
530 def make_filename(repo, pat, node,
531 total=None, seqno=None, revwidth=None, pathname=None):
531 total=None, seqno=None, revwidth=None, pathname=None):
532 node_expander = {
532 node_expander = {
533 'H': lambda: hex(node),
533 'H': lambda: hex(node),
534 'R': lambda: str(repo.changelog.rev(node)),
534 'R': lambda: str(repo.changelog.rev(node)),
535 'h': lambda: short(node),
535 'h': lambda: short(node),
536 }
536 }
537 expander = {
537 expander = {
538 '%': lambda: '%',
538 '%': lambda: '%',
539 'b': lambda: os.path.basename(repo.root),
539 'b': lambda: os.path.basename(repo.root),
540 }
540 }
541
541
542 try:
542 try:
543 if node:
543 if node:
544 expander.update(node_expander)
544 expander.update(node_expander)
545 if node:
545 if node:
546 expander['r'] = (lambda:
546 expander['r'] = (lambda:
547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
547 str(repo.changelog.rev(node)).zfill(revwidth or 0))
548 if total is not None:
548 if total is not None:
549 expander['N'] = lambda: str(total)
549 expander['N'] = lambda: str(total)
550 if seqno is not None:
550 if seqno is not None:
551 expander['n'] = lambda: str(seqno)
551 expander['n'] = lambda: str(seqno)
552 if total is not None and seqno is not None:
552 if total is not None and seqno is not None:
553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
553 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
554 if pathname is not None:
554 if pathname is not None:
555 expander['s'] = lambda: os.path.basename(pathname)
555 expander['s'] = lambda: os.path.basename(pathname)
556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
556 expander['d'] = lambda: os.path.dirname(pathname) or '.'
557 expander['p'] = lambda: pathname
557 expander['p'] = lambda: pathname
558
558
559 newname = []
559 newname = []
560 patlen = len(pat)
560 patlen = len(pat)
561 i = 0
561 i = 0
562 while i < patlen:
562 while i < patlen:
563 c = pat[i]
563 c = pat[i]
564 if c == '%':
564 if c == '%':
565 i += 1
565 i += 1
566 c = pat[i]
566 c = pat[i]
567 c = expander[c]()
567 c = expander[c]()
568 newname.append(c)
568 newname.append(c)
569 i += 1
569 i += 1
570 return ''.join(newname)
570 return ''.join(newname)
571 except KeyError, inst:
571 except KeyError, inst:
572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
572 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
573 inst.args[0])
573 inst.args[0])
574
574
575 def make_file(repo, pat, node=None,
575 def make_file(repo, pat, node=None,
576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
576 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
577 if not pat or pat == '-':
577 if not pat or pat == '-':
578 return 'w' in mode and sys.stdout or sys.stdin
578 return 'w' in mode and sys.stdout or sys.stdin
579 if hasattr(pat, 'write') and 'w' in mode:
579 if hasattr(pat, 'write') and 'w' in mode:
580 return pat
580 return pat
581 if hasattr(pat, 'read') and 'r' in mode:
581 if hasattr(pat, 'read') and 'r' in mode:
582 return pat
582 return pat
583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
583 return open(make_filename(repo, pat, node, total, seqno, revwidth,
584 pathname),
584 pathname),
585 mode)
585 mode)
586
586
587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
587 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
588 cwd = repo.getcwd()
588 cwd = repo.getcwd()
589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
589 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
590 opts.get('exclude'), globbed=globbed,
590 opts.get('exclude'), globbed=globbed,
591 default=default)
591 default=default)
592
592
593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
593 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
594 default=None):
594 default=None):
595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
595 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
596 default=default)
596 default=default)
597 exact = dict.fromkeys(files)
597 exact = dict.fromkeys(files)
598 cwd = repo.getcwd()
598 cwd = repo.getcwd()
599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
599 for src, fn in repo.walk(node=node, files=files, match=matchfn,
600 badmatch=badmatch):
600 badmatch=badmatch):
601 yield src, fn, repo.pathto(fn, cwd), fn in exact
601 yield src, fn, repo.pathto(fn, cwd), fn in exact
602
602
603 def findrenames(repo, added=None, removed=None, threshold=0.5):
603 def findrenames(repo, added=None, removed=None, threshold=0.5):
604 '''find renamed files -- yields (before, after, score) tuples'''
604 '''find renamed files -- yields (before, after, score) tuples'''
605 if added is None or removed is None:
605 if added is None or removed is None:
606 added, removed = repo.status()[1:3]
606 added, removed = repo.status()[1:3]
607 ctx = repo.changectx()
607 ctx = repo.changectx()
608 for a in added:
608 for a in added:
609 aa = repo.wread(a)
609 aa = repo.wread(a)
610 bestname, bestscore = None, threshold
610 bestname, bestscore = None, threshold
611 for r in removed:
611 for r in removed:
612 rr = ctx.filectx(r).data()
612 rr = ctx.filectx(r).data()
613
613
614 # bdiff.blocks() returns blocks of matching lines
614 # bdiff.blocks() returns blocks of matching lines
615 # count the number of bytes in each
615 # count the number of bytes in each
616 equal = 0
616 equal = 0
617 alines = mdiff.splitnewlines(aa)
617 alines = mdiff.splitnewlines(aa)
618 matches = bdiff.blocks(aa, rr)
618 matches = bdiff.blocks(aa, rr)
619 for x1,x2,y1,y2 in matches:
619 for x1,x2,y1,y2 in matches:
620 for line in alines[x1:x2]:
620 for line in alines[x1:x2]:
621 equal += len(line)
621 equal += len(line)
622
622
623 lengths = len(aa) + len(rr)
623 lengths = len(aa) + len(rr)
624 if lengths:
624 if lengths:
625 myscore = equal*2.0 / lengths
625 myscore = equal*2.0 / lengths
626 if myscore >= bestscore:
626 if myscore >= bestscore:
627 bestname, bestscore = r, myscore
627 bestname, bestscore = r, myscore
628 if bestname:
628 if bestname:
629 yield bestname, a, bestscore
629 yield bestname, a, bestscore
630
630
631 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
631 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
632 similarity=None):
632 similarity=None):
633 if dry_run is None:
633 if dry_run is None:
634 dry_run = opts.get('dry_run')
634 dry_run = opts.get('dry_run')
635 if similarity is None:
635 if similarity is None:
636 similarity = float(opts.get('similarity') or 0)
636 similarity = float(opts.get('similarity') or 0)
637 add, remove = [], []
637 add, remove = [], []
638 mapping = {}
638 mapping = {}
639 for src, abs, rel, exact in walk(repo, pats, opts):
639 for src, abs, rel, exact in walk(repo, pats, opts):
640 target = repo.wjoin(abs)
640 target = repo.wjoin(abs)
641 if src == 'f' and repo.dirstate.state(abs) == '?':
641 if src == 'f' and abs not in repo.dirstate:
642 add.append(abs)
642 add.append(abs)
643 mapping[abs] = rel, exact
643 mapping[abs] = rel, exact
644 if repo.ui.verbose or not exact:
644 if repo.ui.verbose or not exact:
645 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
645 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
646 if repo.dirstate.state(abs) != 'r' and not util.lexists(target):
646 if repo.dirstate[abs] != 'r' and not util.lexists(target):
647 remove.append(abs)
647 remove.append(abs)
648 mapping[abs] = rel, exact
648 mapping[abs] = rel, exact
649 if repo.ui.verbose or not exact:
649 if repo.ui.verbose or not exact:
650 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
650 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
651 if not dry_run:
651 if not dry_run:
652 repo.add(add, wlock=wlock)
652 repo.add(add, wlock=wlock)
653 repo.remove(remove, wlock=wlock)
653 repo.remove(remove, wlock=wlock)
654 if similarity > 0:
654 if similarity > 0:
655 for old, new, score in findrenames(repo, add, remove, similarity):
655 for old, new, score in findrenames(repo, add, remove, similarity):
656 oldrel, oldexact = mapping[old]
656 oldrel, oldexact = mapping[old]
657 newrel, newexact = mapping[new]
657 newrel, newexact = mapping[new]
658 if repo.ui.verbose or not oldexact or not newexact:
658 if repo.ui.verbose or not oldexact or not newexact:
659 repo.ui.status(_('recording removal of %s as rename to %s '
659 repo.ui.status(_('recording removal of %s as rename to %s '
660 '(%d%% similar)\n') %
660 '(%d%% similar)\n') %
661 (oldrel, newrel, score * 100))
661 (oldrel, newrel, score * 100))
662 if not dry_run:
662 if not dry_run:
663 repo.copy(old, new, wlock=wlock)
663 repo.copy(old, new, wlock=wlock)
664
664
665 def service(opts, parentfn=None, initfn=None, runfn=None):
665 def service(opts, parentfn=None, initfn=None, runfn=None):
666 '''Run a command as a service.'''
666 '''Run a command as a service.'''
667
667
668 if opts['daemon'] and not opts['daemon_pipefds']:
668 if opts['daemon'] and not opts['daemon_pipefds']:
669 rfd, wfd = os.pipe()
669 rfd, wfd = os.pipe()
670 args = sys.argv[:]
670 args = sys.argv[:]
671 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
671 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
672 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
672 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
673 args[0], args)
673 args[0], args)
674 os.close(wfd)
674 os.close(wfd)
675 os.read(rfd, 1)
675 os.read(rfd, 1)
676 if parentfn:
676 if parentfn:
677 return parentfn(pid)
677 return parentfn(pid)
678 else:
678 else:
679 os._exit(0)
679 os._exit(0)
680
680
681 if initfn:
681 if initfn:
682 initfn()
682 initfn()
683
683
684 if opts['pid_file']:
684 if opts['pid_file']:
685 fp = open(opts['pid_file'], 'w')
685 fp = open(opts['pid_file'], 'w')
686 fp.write(str(os.getpid()) + '\n')
686 fp.write(str(os.getpid()) + '\n')
687 fp.close()
687 fp.close()
688
688
689 if opts['daemon_pipefds']:
689 if opts['daemon_pipefds']:
690 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
690 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
691 os.close(rfd)
691 os.close(rfd)
692 try:
692 try:
693 os.setsid()
693 os.setsid()
694 except AttributeError:
694 except AttributeError:
695 pass
695 pass
696 os.write(wfd, 'y')
696 os.write(wfd, 'y')
697 os.close(wfd)
697 os.close(wfd)
698 sys.stdout.flush()
698 sys.stdout.flush()
699 sys.stderr.flush()
699 sys.stderr.flush()
700 fd = os.open(util.nulldev, os.O_RDWR)
700 fd = os.open(util.nulldev, os.O_RDWR)
701 if fd != 0: os.dup2(fd, 0)
701 if fd != 0: os.dup2(fd, 0)
702 if fd != 1: os.dup2(fd, 1)
702 if fd != 1: os.dup2(fd, 1)
703 if fd != 2: os.dup2(fd, 2)
703 if fd != 2: os.dup2(fd, 2)
704 if fd not in (0, 1, 2): os.close(fd)
704 if fd not in (0, 1, 2): os.close(fd)
705
705
706 if runfn:
706 if runfn:
707 return runfn()
707 return runfn()
708
708
709 class changeset_printer(object):
709 class changeset_printer(object):
710 '''show changeset information when templating not requested.'''
710 '''show changeset information when templating not requested.'''
711
711
712 def __init__(self, ui, repo, patch, buffered):
712 def __init__(self, ui, repo, patch, buffered):
713 self.ui = ui
713 self.ui = ui
714 self.repo = repo
714 self.repo = repo
715 self.buffered = buffered
715 self.buffered = buffered
716 self.patch = patch
716 self.patch = patch
717 self.header = {}
717 self.header = {}
718 self.hunk = {}
718 self.hunk = {}
719 self.lastheader = None
719 self.lastheader = None
720
720
721 def flush(self, rev):
721 def flush(self, rev):
722 if rev in self.header:
722 if rev in self.header:
723 h = self.header[rev]
723 h = self.header[rev]
724 if h != self.lastheader:
724 if h != self.lastheader:
725 self.lastheader = h
725 self.lastheader = h
726 self.ui.write(h)
726 self.ui.write(h)
727 del self.header[rev]
727 del self.header[rev]
728 if rev in self.hunk:
728 if rev in self.hunk:
729 self.ui.write(self.hunk[rev])
729 self.ui.write(self.hunk[rev])
730 del self.hunk[rev]
730 del self.hunk[rev]
731 return 1
731 return 1
732 return 0
732 return 0
733
733
734 def show(self, rev=0, changenode=None, copies=(), **props):
734 def show(self, rev=0, changenode=None, copies=(), **props):
735 if self.buffered:
735 if self.buffered:
736 self.ui.pushbuffer()
736 self.ui.pushbuffer()
737 self._show(rev, changenode, copies, props)
737 self._show(rev, changenode, copies, props)
738 self.hunk[rev] = self.ui.popbuffer()
738 self.hunk[rev] = self.ui.popbuffer()
739 else:
739 else:
740 self._show(rev, changenode, copies, props)
740 self._show(rev, changenode, copies, props)
741
741
742 def _show(self, rev, changenode, copies, props):
742 def _show(self, rev, changenode, copies, props):
743 '''show a single changeset or file revision'''
743 '''show a single changeset or file revision'''
744 log = self.repo.changelog
744 log = self.repo.changelog
745 if changenode is None:
745 if changenode is None:
746 changenode = log.node(rev)
746 changenode = log.node(rev)
747 elif not rev:
747 elif not rev:
748 rev = log.rev(changenode)
748 rev = log.rev(changenode)
749
749
750 if self.ui.quiet:
750 if self.ui.quiet:
751 self.ui.write("%d:%s\n" % (rev, short(changenode)))
751 self.ui.write("%d:%s\n" % (rev, short(changenode)))
752 return
752 return
753
753
754 changes = log.read(changenode)
754 changes = log.read(changenode)
755 date = util.datestr(changes[2])
755 date = util.datestr(changes[2])
756 extra = changes[5]
756 extra = changes[5]
757 branch = extra.get("branch")
757 branch = extra.get("branch")
758
758
759 hexfunc = self.ui.debugflag and hex or short
759 hexfunc = self.ui.debugflag and hex or short
760
760
761 parents = [(p, hexfunc(log.node(p)))
761 parents = [(p, hexfunc(log.node(p)))
762 for p in self._meaningful_parentrevs(log, rev)]
762 for p in self._meaningful_parentrevs(log, rev)]
763
763
764 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
764 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
765
765
766 # don't show the default branch name
766 # don't show the default branch name
767 if branch != 'default':
767 if branch != 'default':
768 branch = util.tolocal(branch)
768 branch = util.tolocal(branch)
769 self.ui.write(_("branch: %s\n") % branch)
769 self.ui.write(_("branch: %s\n") % branch)
770 for tag in self.repo.nodetags(changenode):
770 for tag in self.repo.nodetags(changenode):
771 self.ui.write(_("tag: %s\n") % tag)
771 self.ui.write(_("tag: %s\n") % tag)
772 for parent in parents:
772 for parent in parents:
773 self.ui.write(_("parent: %d:%s\n") % parent)
773 self.ui.write(_("parent: %d:%s\n") % parent)
774
774
775 if self.ui.debugflag:
775 if self.ui.debugflag:
776 self.ui.write(_("manifest: %d:%s\n") %
776 self.ui.write(_("manifest: %d:%s\n") %
777 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
777 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
778 self.ui.write(_("user: %s\n") % changes[1])
778 self.ui.write(_("user: %s\n") % changes[1])
779 self.ui.write(_("date: %s\n") % date)
779 self.ui.write(_("date: %s\n") % date)
780
780
781 if self.ui.debugflag:
781 if self.ui.debugflag:
782 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
782 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
783 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
783 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
784 files):
784 files):
785 if value:
785 if value:
786 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
786 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
787 elif changes[3] and self.ui.verbose:
787 elif changes[3] and self.ui.verbose:
788 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
788 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
789 if copies and self.ui.verbose:
789 if copies and self.ui.verbose:
790 copies = ['%s (%s)' % c for c in copies]
790 copies = ['%s (%s)' % c for c in copies]
791 self.ui.write(_("copies: %s\n") % ' '.join(copies))
791 self.ui.write(_("copies: %s\n") % ' '.join(copies))
792
792
793 if extra and self.ui.debugflag:
793 if extra and self.ui.debugflag:
794 extraitems = extra.items()
794 extraitems = extra.items()
795 extraitems.sort()
795 extraitems.sort()
796 for key, value in extraitems:
796 for key, value in extraitems:
797 self.ui.write(_("extra: %s=%s\n")
797 self.ui.write(_("extra: %s=%s\n")
798 % (key, value.encode('string_escape')))
798 % (key, value.encode('string_escape')))
799
799
800 description = changes[4].strip()
800 description = changes[4].strip()
801 if description:
801 if description:
802 if self.ui.verbose:
802 if self.ui.verbose:
803 self.ui.write(_("description:\n"))
803 self.ui.write(_("description:\n"))
804 self.ui.write(description)
804 self.ui.write(description)
805 self.ui.write("\n\n")
805 self.ui.write("\n\n")
806 else:
806 else:
807 self.ui.write(_("summary: %s\n") %
807 self.ui.write(_("summary: %s\n") %
808 description.splitlines()[0])
808 description.splitlines()[0])
809 self.ui.write("\n")
809 self.ui.write("\n")
810
810
811 self.showpatch(changenode)
811 self.showpatch(changenode)
812
812
813 def showpatch(self, node):
813 def showpatch(self, node):
814 if self.patch:
814 if self.patch:
815 prev = self.repo.changelog.parents(node)[0]
815 prev = self.repo.changelog.parents(node)[0]
816 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
816 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
817 opts=patch.diffopts(self.ui))
817 opts=patch.diffopts(self.ui))
818 self.ui.write("\n")
818 self.ui.write("\n")
819
819
820 def _meaningful_parentrevs(self, log, rev):
820 def _meaningful_parentrevs(self, log, rev):
821 """Return list of meaningful (or all if debug) parentrevs for rev.
821 """Return list of meaningful (or all if debug) parentrevs for rev.
822
822
823 For merges (two non-nullrev revisions) both parents are meaningful.
823 For merges (two non-nullrev revisions) both parents are meaningful.
824 Otherwise the first parent revision is considered meaningful if it
824 Otherwise the first parent revision is considered meaningful if it
825 is not the preceding revision.
825 is not the preceding revision.
826 """
826 """
827 parents = log.parentrevs(rev)
827 parents = log.parentrevs(rev)
828 if not self.ui.debugflag and parents[1] == nullrev:
828 if not self.ui.debugflag and parents[1] == nullrev:
829 if parents[0] >= rev - 1:
829 if parents[0] >= rev - 1:
830 parents = []
830 parents = []
831 else:
831 else:
832 parents = [parents[0]]
832 parents = [parents[0]]
833 return parents
833 return parents
834
834
835
835
836 class changeset_templater(changeset_printer):
836 class changeset_templater(changeset_printer):
837 '''format changeset information.'''
837 '''format changeset information.'''
838
838
839 def __init__(self, ui, repo, patch, mapfile, buffered):
839 def __init__(self, ui, repo, patch, mapfile, buffered):
840 changeset_printer.__init__(self, ui, repo, patch, buffered)
840 changeset_printer.__init__(self, ui, repo, patch, buffered)
841 filters = templater.common_filters.copy()
841 filters = templater.common_filters.copy()
842 filters['formatnode'] = (ui.debugflag and (lambda x: x)
842 filters['formatnode'] = (ui.debugflag and (lambda x: x)
843 or (lambda x: x[:12]))
843 or (lambda x: x[:12]))
844 self.t = templater.templater(mapfile, filters,
844 self.t = templater.templater(mapfile, filters,
845 cache={
845 cache={
846 'parent': '{rev}:{node|formatnode} ',
846 'parent': '{rev}:{node|formatnode} ',
847 'manifest': '{rev}:{node|formatnode}',
847 'manifest': '{rev}:{node|formatnode}',
848 'filecopy': '{name} ({source})'})
848 'filecopy': '{name} ({source})'})
849
849
850 def use_template(self, t):
850 def use_template(self, t):
851 '''set template string to use'''
851 '''set template string to use'''
852 self.t.cache['changeset'] = t
852 self.t.cache['changeset'] = t
853
853
854 def _show(self, rev, changenode, copies, props):
854 def _show(self, rev, changenode, copies, props):
855 '''show a single changeset or file revision'''
855 '''show a single changeset or file revision'''
856 log = self.repo.changelog
856 log = self.repo.changelog
857 if changenode is None:
857 if changenode is None:
858 changenode = log.node(rev)
858 changenode = log.node(rev)
859 elif not rev:
859 elif not rev:
860 rev = log.rev(changenode)
860 rev = log.rev(changenode)
861
861
862 changes = log.read(changenode)
862 changes = log.read(changenode)
863
863
864 def showlist(name, values, plural=None, **args):
864 def showlist(name, values, plural=None, **args):
865 '''expand set of values.
865 '''expand set of values.
866 name is name of key in template map.
866 name is name of key in template map.
867 values is list of strings or dicts.
867 values is list of strings or dicts.
868 plural is plural of name, if not simply name + 's'.
868 plural is plural of name, if not simply name + 's'.
869
869
870 expansion works like this, given name 'foo'.
870 expansion works like this, given name 'foo'.
871
871
872 if values is empty, expand 'no_foos'.
872 if values is empty, expand 'no_foos'.
873
873
874 if 'foo' not in template map, return values as a string,
874 if 'foo' not in template map, return values as a string,
875 joined by space.
875 joined by space.
876
876
877 expand 'start_foos'.
877 expand 'start_foos'.
878
878
879 for each value, expand 'foo'. if 'last_foo' in template
879 for each value, expand 'foo'. if 'last_foo' in template
880 map, expand it instead of 'foo' for last key.
880 map, expand it instead of 'foo' for last key.
881
881
882 expand 'end_foos'.
882 expand 'end_foos'.
883 '''
883 '''
884 if plural: names = plural
884 if plural: names = plural
885 else: names = name + 's'
885 else: names = name + 's'
886 if not values:
886 if not values:
887 noname = 'no_' + names
887 noname = 'no_' + names
888 if noname in self.t:
888 if noname in self.t:
889 yield self.t(noname, **args)
889 yield self.t(noname, **args)
890 return
890 return
891 if name not in self.t:
891 if name not in self.t:
892 if isinstance(values[0], str):
892 if isinstance(values[0], str):
893 yield ' '.join(values)
893 yield ' '.join(values)
894 else:
894 else:
895 for v in values:
895 for v in values:
896 yield dict(v, **args)
896 yield dict(v, **args)
897 return
897 return
898 startname = 'start_' + names
898 startname = 'start_' + names
899 if startname in self.t:
899 if startname in self.t:
900 yield self.t(startname, **args)
900 yield self.t(startname, **args)
901 vargs = args.copy()
901 vargs = args.copy()
902 def one(v, tag=name):
902 def one(v, tag=name):
903 try:
903 try:
904 vargs.update(v)
904 vargs.update(v)
905 except (AttributeError, ValueError):
905 except (AttributeError, ValueError):
906 try:
906 try:
907 for a, b in v:
907 for a, b in v:
908 vargs[a] = b
908 vargs[a] = b
909 except ValueError:
909 except ValueError:
910 vargs[name] = v
910 vargs[name] = v
911 return self.t(tag, **vargs)
911 return self.t(tag, **vargs)
912 lastname = 'last_' + name
912 lastname = 'last_' + name
913 if lastname in self.t:
913 if lastname in self.t:
914 last = values.pop()
914 last = values.pop()
915 else:
915 else:
916 last = None
916 last = None
917 for v in values:
917 for v in values:
918 yield one(v)
918 yield one(v)
919 if last is not None:
919 if last is not None:
920 yield one(last, tag=lastname)
920 yield one(last, tag=lastname)
921 endname = 'end_' + names
921 endname = 'end_' + names
922 if endname in self.t:
922 if endname in self.t:
923 yield self.t(endname, **args)
923 yield self.t(endname, **args)
924
924
925 def showbranches(**args):
925 def showbranches(**args):
926 branch = changes[5].get("branch")
926 branch = changes[5].get("branch")
927 if branch != 'default':
927 if branch != 'default':
928 branch = util.tolocal(branch)
928 branch = util.tolocal(branch)
929 return showlist('branch', [branch], plural='branches', **args)
929 return showlist('branch', [branch], plural='branches', **args)
930
930
931 def showparents(**args):
931 def showparents(**args):
932 parents = [[('rev', p), ('node', hex(log.node(p)))]
932 parents = [[('rev', p), ('node', hex(log.node(p)))]
933 for p in self._meaningful_parentrevs(log, rev)]
933 for p in self._meaningful_parentrevs(log, rev)]
934 return showlist('parent', parents, **args)
934 return showlist('parent', parents, **args)
935
935
936 def showtags(**args):
936 def showtags(**args):
937 return showlist('tag', self.repo.nodetags(changenode), **args)
937 return showlist('tag', self.repo.nodetags(changenode), **args)
938
938
939 def showextras(**args):
939 def showextras(**args):
940 extras = changes[5].items()
940 extras = changes[5].items()
941 extras.sort()
941 extras.sort()
942 for key, value in extras:
942 for key, value in extras:
943 args = args.copy()
943 args = args.copy()
944 args.update(dict(key=key, value=value))
944 args.update(dict(key=key, value=value))
945 yield self.t('extra', **args)
945 yield self.t('extra', **args)
946
946
947 def showcopies(**args):
947 def showcopies(**args):
948 c = [{'name': x[0], 'source': x[1]} for x in copies]
948 c = [{'name': x[0], 'source': x[1]} for x in copies]
949 return showlist('file_copy', c, plural='file_copies', **args)
949 return showlist('file_copy', c, plural='file_copies', **args)
950
950
951 if self.ui.debugflag:
951 if self.ui.debugflag:
952 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
952 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
953 def showfiles(**args):
953 def showfiles(**args):
954 return showlist('file', files[0], **args)
954 return showlist('file', files[0], **args)
955 def showadds(**args):
955 def showadds(**args):
956 return showlist('file_add', files[1], **args)
956 return showlist('file_add', files[1], **args)
957 def showdels(**args):
957 def showdels(**args):
958 return showlist('file_del', files[2], **args)
958 return showlist('file_del', files[2], **args)
959 def showmanifest(**args):
959 def showmanifest(**args):
960 args = args.copy()
960 args = args.copy()
961 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
961 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
962 node=hex(changes[0])))
962 node=hex(changes[0])))
963 return self.t('manifest', **args)
963 return self.t('manifest', **args)
964 else:
964 else:
965 def showfiles(**args):
965 def showfiles(**args):
966 return showlist('file', changes[3], **args)
966 return showlist('file', changes[3], **args)
967 showadds = ''
967 showadds = ''
968 showdels = ''
968 showdels = ''
969 showmanifest = ''
969 showmanifest = ''
970
970
971 defprops = {
971 defprops = {
972 'author': changes[1],
972 'author': changes[1],
973 'branches': showbranches,
973 'branches': showbranches,
974 'date': changes[2],
974 'date': changes[2],
975 'desc': changes[4].strip(),
975 'desc': changes[4].strip(),
976 'file_adds': showadds,
976 'file_adds': showadds,
977 'file_dels': showdels,
977 'file_dels': showdels,
978 'files': showfiles,
978 'files': showfiles,
979 'file_copies': showcopies,
979 'file_copies': showcopies,
980 'manifest': showmanifest,
980 'manifest': showmanifest,
981 'node': hex(changenode),
981 'node': hex(changenode),
982 'parents': showparents,
982 'parents': showparents,
983 'rev': rev,
983 'rev': rev,
984 'tags': showtags,
984 'tags': showtags,
985 'extras': showextras,
985 'extras': showextras,
986 }
986 }
987 props = props.copy()
987 props = props.copy()
988 props.update(defprops)
988 props.update(defprops)
989
989
990 try:
990 try:
991 if self.ui.debugflag and 'header_debug' in self.t:
991 if self.ui.debugflag and 'header_debug' in self.t:
992 key = 'header_debug'
992 key = 'header_debug'
993 elif self.ui.quiet and 'header_quiet' in self.t:
993 elif self.ui.quiet and 'header_quiet' in self.t:
994 key = 'header_quiet'
994 key = 'header_quiet'
995 elif self.ui.verbose and 'header_verbose' in self.t:
995 elif self.ui.verbose and 'header_verbose' in self.t:
996 key = 'header_verbose'
996 key = 'header_verbose'
997 elif 'header' in self.t:
997 elif 'header' in self.t:
998 key = 'header'
998 key = 'header'
999 else:
999 else:
1000 key = ''
1000 key = ''
1001 if key:
1001 if key:
1002 h = templater.stringify(self.t(key, **props))
1002 h = templater.stringify(self.t(key, **props))
1003 if self.buffered:
1003 if self.buffered:
1004 self.header[rev] = h
1004 self.header[rev] = h
1005 else:
1005 else:
1006 self.ui.write(h)
1006 self.ui.write(h)
1007 if self.ui.debugflag and 'changeset_debug' in self.t:
1007 if self.ui.debugflag and 'changeset_debug' in self.t:
1008 key = 'changeset_debug'
1008 key = 'changeset_debug'
1009 elif self.ui.quiet and 'changeset_quiet' in self.t:
1009 elif self.ui.quiet and 'changeset_quiet' in self.t:
1010 key = 'changeset_quiet'
1010 key = 'changeset_quiet'
1011 elif self.ui.verbose and 'changeset_verbose' in self.t:
1011 elif self.ui.verbose and 'changeset_verbose' in self.t:
1012 key = 'changeset_verbose'
1012 key = 'changeset_verbose'
1013 else:
1013 else:
1014 key = 'changeset'
1014 key = 'changeset'
1015 self.ui.write(templater.stringify(self.t(key, **props)))
1015 self.ui.write(templater.stringify(self.t(key, **props)))
1016 self.showpatch(changenode)
1016 self.showpatch(changenode)
1017 except KeyError, inst:
1017 except KeyError, inst:
1018 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1018 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
1019 inst.args[0]))
1019 inst.args[0]))
1020 except SyntaxError, inst:
1020 except SyntaxError, inst:
1021 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1021 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
1022
1022
1023 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1023 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
1024 """show one changeset using template or regular display.
1024 """show one changeset using template or regular display.
1025
1025
1026 Display format will be the first non-empty hit of:
1026 Display format will be the first non-empty hit of:
1027 1. option 'template'
1027 1. option 'template'
1028 2. option 'style'
1028 2. option 'style'
1029 3. [ui] setting 'logtemplate'
1029 3. [ui] setting 'logtemplate'
1030 4. [ui] setting 'style'
1030 4. [ui] setting 'style'
1031 If all of these values are either the unset or the empty string,
1031 If all of these values are either the unset or the empty string,
1032 regular display via changeset_printer() is done.
1032 regular display via changeset_printer() is done.
1033 """
1033 """
1034 # options
1034 # options
1035 patch = False
1035 patch = False
1036 if opts.get('patch'):
1036 if opts.get('patch'):
1037 patch = matchfn or util.always
1037 patch = matchfn or util.always
1038
1038
1039 tmpl = opts.get('template')
1039 tmpl = opts.get('template')
1040 mapfile = None
1040 mapfile = None
1041 if tmpl:
1041 if tmpl:
1042 tmpl = templater.parsestring(tmpl, quoted=False)
1042 tmpl = templater.parsestring(tmpl, quoted=False)
1043 else:
1043 else:
1044 mapfile = opts.get('style')
1044 mapfile = opts.get('style')
1045 # ui settings
1045 # ui settings
1046 if not mapfile:
1046 if not mapfile:
1047 tmpl = ui.config('ui', 'logtemplate')
1047 tmpl = ui.config('ui', 'logtemplate')
1048 if tmpl:
1048 if tmpl:
1049 tmpl = templater.parsestring(tmpl)
1049 tmpl = templater.parsestring(tmpl)
1050 else:
1050 else:
1051 mapfile = ui.config('ui', 'style')
1051 mapfile = ui.config('ui', 'style')
1052
1052
1053 if tmpl or mapfile:
1053 if tmpl or mapfile:
1054 if mapfile:
1054 if mapfile:
1055 if not os.path.split(mapfile)[0]:
1055 if not os.path.split(mapfile)[0]:
1056 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1056 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1057 or templater.templatepath(mapfile))
1057 or templater.templatepath(mapfile))
1058 if mapname: mapfile = mapname
1058 if mapname: mapfile = mapname
1059 try:
1059 try:
1060 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1060 t = changeset_templater(ui, repo, patch, mapfile, buffered)
1061 except SyntaxError, inst:
1061 except SyntaxError, inst:
1062 raise util.Abort(inst.args[0])
1062 raise util.Abort(inst.args[0])
1063 if tmpl: t.use_template(tmpl)
1063 if tmpl: t.use_template(tmpl)
1064 return t
1064 return t
1065 return changeset_printer(ui, repo, patch, buffered)
1065 return changeset_printer(ui, repo, patch, buffered)
1066
1066
1067 def finddate(ui, repo, date):
1067 def finddate(ui, repo, date):
1068 """Find the tipmost changeset that matches the given date spec"""
1068 """Find the tipmost changeset that matches the given date spec"""
1069 df = util.matchdate(date + " to " + date)
1069 df = util.matchdate(date + " to " + date)
1070 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1070 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1071 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1071 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
1072 results = {}
1072 results = {}
1073 for st, rev, fns in changeiter:
1073 for st, rev, fns in changeiter:
1074 if st == 'add':
1074 if st == 'add':
1075 d = get(rev)[2]
1075 d = get(rev)[2]
1076 if df(d[0]):
1076 if df(d[0]):
1077 results[rev] = d
1077 results[rev] = d
1078 elif st == 'iter':
1078 elif st == 'iter':
1079 if rev in results:
1079 if rev in results:
1080 ui.status("Found revision %s from %s\n" %
1080 ui.status("Found revision %s from %s\n" %
1081 (rev, util.datestr(results[rev])))
1081 (rev, util.datestr(results[rev])))
1082 return str(rev)
1082 return str(rev)
1083
1083
1084 raise util.Abort(_("revision matching date not found"))
1084 raise util.Abort(_("revision matching date not found"))
1085
1085
1086 def walkchangerevs(ui, repo, pats, change, opts):
1086 def walkchangerevs(ui, repo, pats, change, opts):
1087 '''Iterate over files and the revs they changed in.
1087 '''Iterate over files and the revs they changed in.
1088
1088
1089 Callers most commonly need to iterate backwards over the history
1089 Callers most commonly need to iterate backwards over the history
1090 it is interested in. Doing so has awful (quadratic-looking)
1090 it is interested in. Doing so has awful (quadratic-looking)
1091 performance, so we use iterators in a "windowed" way.
1091 performance, so we use iterators in a "windowed" way.
1092
1092
1093 We walk a window of revisions in the desired order. Within the
1093 We walk a window of revisions in the desired order. Within the
1094 window, we first walk forwards to gather data, then in the desired
1094 window, we first walk forwards to gather data, then in the desired
1095 order (usually backwards) to display it.
1095 order (usually backwards) to display it.
1096
1096
1097 This function returns an (iterator, matchfn) tuple. The iterator
1097 This function returns an (iterator, matchfn) tuple. The iterator
1098 yields 3-tuples. They will be of one of the following forms:
1098 yields 3-tuples. They will be of one of the following forms:
1099
1099
1100 "window", incrementing, lastrev: stepping through a window,
1100 "window", incrementing, lastrev: stepping through a window,
1101 positive if walking forwards through revs, last rev in the
1101 positive if walking forwards through revs, last rev in the
1102 sequence iterated over - use to reset state for the current window
1102 sequence iterated over - use to reset state for the current window
1103
1103
1104 "add", rev, fns: out-of-order traversal of the given file names
1104 "add", rev, fns: out-of-order traversal of the given file names
1105 fns, which changed during revision rev - use to gather data for
1105 fns, which changed during revision rev - use to gather data for
1106 possible display
1106 possible display
1107
1107
1108 "iter", rev, None: in-order traversal of the revs earlier iterated
1108 "iter", rev, None: in-order traversal of the revs earlier iterated
1109 over with "add" - use to display data'''
1109 over with "add" - use to display data'''
1110
1110
1111 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1111 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1112 if start < end:
1112 if start < end:
1113 while start < end:
1113 while start < end:
1114 yield start, min(windowsize, end-start)
1114 yield start, min(windowsize, end-start)
1115 start += windowsize
1115 start += windowsize
1116 if windowsize < sizelimit:
1116 if windowsize < sizelimit:
1117 windowsize *= 2
1117 windowsize *= 2
1118 else:
1118 else:
1119 while start > end:
1119 while start > end:
1120 yield start, min(windowsize, start-end-1)
1120 yield start, min(windowsize, start-end-1)
1121 start -= windowsize
1121 start -= windowsize
1122 if windowsize < sizelimit:
1122 if windowsize < sizelimit:
1123 windowsize *= 2
1123 windowsize *= 2
1124
1124
1125 files, matchfn, anypats = matchpats(repo, pats, opts)
1125 files, matchfn, anypats = matchpats(repo, pats, opts)
1126 follow = opts.get('follow') or opts.get('follow_first')
1126 follow = opts.get('follow') or opts.get('follow_first')
1127
1127
1128 if repo.changelog.count() == 0:
1128 if repo.changelog.count() == 0:
1129 return [], matchfn
1129 return [], matchfn
1130
1130
1131 if follow:
1131 if follow:
1132 defrange = '%s:0' % repo.changectx().rev()
1132 defrange = '%s:0' % repo.changectx().rev()
1133 else:
1133 else:
1134 defrange = 'tip:0'
1134 defrange = 'tip:0'
1135 revs = revrange(repo, opts['rev'] or [defrange])
1135 revs = revrange(repo, opts['rev'] or [defrange])
1136 wanted = {}
1136 wanted = {}
1137 slowpath = anypats or opts.get('removed')
1137 slowpath = anypats or opts.get('removed')
1138 fncache = {}
1138 fncache = {}
1139
1139
1140 if not slowpath and not files:
1140 if not slowpath and not files:
1141 # No files, no patterns. Display all revs.
1141 # No files, no patterns. Display all revs.
1142 wanted = dict.fromkeys(revs)
1142 wanted = dict.fromkeys(revs)
1143 copies = []
1143 copies = []
1144 if not slowpath:
1144 if not slowpath:
1145 # Only files, no patterns. Check the history of each file.
1145 # Only files, no patterns. Check the history of each file.
1146 def filerevgen(filelog, node):
1146 def filerevgen(filelog, node):
1147 cl_count = repo.changelog.count()
1147 cl_count = repo.changelog.count()
1148 if node is None:
1148 if node is None:
1149 last = filelog.count() - 1
1149 last = filelog.count() - 1
1150 else:
1150 else:
1151 last = filelog.rev(node)
1151 last = filelog.rev(node)
1152 for i, window in increasing_windows(last, nullrev):
1152 for i, window in increasing_windows(last, nullrev):
1153 revs = []
1153 revs = []
1154 for j in xrange(i - window, i + 1):
1154 for j in xrange(i - window, i + 1):
1155 n = filelog.node(j)
1155 n = filelog.node(j)
1156 revs.append((filelog.linkrev(n),
1156 revs.append((filelog.linkrev(n),
1157 follow and filelog.renamed(n)))
1157 follow and filelog.renamed(n)))
1158 revs.reverse()
1158 revs.reverse()
1159 for rev in revs:
1159 for rev in revs:
1160 # only yield rev for which we have the changelog, it can
1160 # only yield rev for which we have the changelog, it can
1161 # happen while doing "hg log" during a pull or commit
1161 # happen while doing "hg log" during a pull or commit
1162 if rev[0] < cl_count:
1162 if rev[0] < cl_count:
1163 yield rev
1163 yield rev
1164 def iterfiles():
1164 def iterfiles():
1165 for filename in files:
1165 for filename in files:
1166 yield filename, None
1166 yield filename, None
1167 for filename_node in copies:
1167 for filename_node in copies:
1168 yield filename_node
1168 yield filename_node
1169 minrev, maxrev = min(revs), max(revs)
1169 minrev, maxrev = min(revs), max(revs)
1170 for file_, node in iterfiles():
1170 for file_, node in iterfiles():
1171 filelog = repo.file(file_)
1171 filelog = repo.file(file_)
1172 # A zero count may be a directory or deleted file, so
1172 # A zero count may be a directory or deleted file, so
1173 # try to find matching entries on the slow path.
1173 # try to find matching entries on the slow path.
1174 if filelog.count() == 0:
1174 if filelog.count() == 0:
1175 slowpath = True
1175 slowpath = True
1176 break
1176 break
1177 for rev, copied in filerevgen(filelog, node):
1177 for rev, copied in filerevgen(filelog, node):
1178 if rev <= maxrev:
1178 if rev <= maxrev:
1179 if rev < minrev:
1179 if rev < minrev:
1180 break
1180 break
1181 fncache.setdefault(rev, [])
1181 fncache.setdefault(rev, [])
1182 fncache[rev].append(file_)
1182 fncache[rev].append(file_)
1183 wanted[rev] = 1
1183 wanted[rev] = 1
1184 if follow and copied:
1184 if follow and copied:
1185 copies.append(copied)
1185 copies.append(copied)
1186 if slowpath:
1186 if slowpath:
1187 if follow:
1187 if follow:
1188 raise util.Abort(_('can only follow copies/renames for explicit '
1188 raise util.Abort(_('can only follow copies/renames for explicit '
1189 'file names'))
1189 'file names'))
1190
1190
1191 # The slow path checks files modified in every changeset.
1191 # The slow path checks files modified in every changeset.
1192 def changerevgen():
1192 def changerevgen():
1193 for i, window in increasing_windows(repo.changelog.count()-1,
1193 for i, window in increasing_windows(repo.changelog.count()-1,
1194 nullrev):
1194 nullrev):
1195 for j in xrange(i - window, i + 1):
1195 for j in xrange(i - window, i + 1):
1196 yield j, change(j)[3]
1196 yield j, change(j)[3]
1197
1197
1198 for rev, changefiles in changerevgen():
1198 for rev, changefiles in changerevgen():
1199 matches = filter(matchfn, changefiles)
1199 matches = filter(matchfn, changefiles)
1200 if matches:
1200 if matches:
1201 fncache[rev] = matches
1201 fncache[rev] = matches
1202 wanted[rev] = 1
1202 wanted[rev] = 1
1203
1203
1204 class followfilter:
1204 class followfilter:
1205 def __init__(self, onlyfirst=False):
1205 def __init__(self, onlyfirst=False):
1206 self.startrev = nullrev
1206 self.startrev = nullrev
1207 self.roots = []
1207 self.roots = []
1208 self.onlyfirst = onlyfirst
1208 self.onlyfirst = onlyfirst
1209
1209
1210 def match(self, rev):
1210 def match(self, rev):
1211 def realparents(rev):
1211 def realparents(rev):
1212 if self.onlyfirst:
1212 if self.onlyfirst:
1213 return repo.changelog.parentrevs(rev)[0:1]
1213 return repo.changelog.parentrevs(rev)[0:1]
1214 else:
1214 else:
1215 return filter(lambda x: x != nullrev,
1215 return filter(lambda x: x != nullrev,
1216 repo.changelog.parentrevs(rev))
1216 repo.changelog.parentrevs(rev))
1217
1217
1218 if self.startrev == nullrev:
1218 if self.startrev == nullrev:
1219 self.startrev = rev
1219 self.startrev = rev
1220 return True
1220 return True
1221
1221
1222 if rev > self.startrev:
1222 if rev > self.startrev:
1223 # forward: all descendants
1223 # forward: all descendants
1224 if not self.roots:
1224 if not self.roots:
1225 self.roots.append(self.startrev)
1225 self.roots.append(self.startrev)
1226 for parent in realparents(rev):
1226 for parent in realparents(rev):
1227 if parent in self.roots:
1227 if parent in self.roots:
1228 self.roots.append(rev)
1228 self.roots.append(rev)
1229 return True
1229 return True
1230 else:
1230 else:
1231 # backwards: all parents
1231 # backwards: all parents
1232 if not self.roots:
1232 if not self.roots:
1233 self.roots.extend(realparents(self.startrev))
1233 self.roots.extend(realparents(self.startrev))
1234 if rev in self.roots:
1234 if rev in self.roots:
1235 self.roots.remove(rev)
1235 self.roots.remove(rev)
1236 self.roots.extend(realparents(rev))
1236 self.roots.extend(realparents(rev))
1237 return True
1237 return True
1238
1238
1239 return False
1239 return False
1240
1240
1241 # it might be worthwhile to do this in the iterator if the rev range
1241 # it might be worthwhile to do this in the iterator if the rev range
1242 # is descending and the prune args are all within that range
1242 # is descending and the prune args are all within that range
1243 for rev in opts.get('prune', ()):
1243 for rev in opts.get('prune', ()):
1244 rev = repo.changelog.rev(repo.lookup(rev))
1244 rev = repo.changelog.rev(repo.lookup(rev))
1245 ff = followfilter()
1245 ff = followfilter()
1246 stop = min(revs[0], revs[-1])
1246 stop = min(revs[0], revs[-1])
1247 for x in xrange(rev, stop-1, -1):
1247 for x in xrange(rev, stop-1, -1):
1248 if ff.match(x) and x in wanted:
1248 if ff.match(x) and x in wanted:
1249 del wanted[x]
1249 del wanted[x]
1250
1250
1251 def iterate():
1251 def iterate():
1252 if follow and not files:
1252 if follow and not files:
1253 ff = followfilter(onlyfirst=opts.get('follow_first'))
1253 ff = followfilter(onlyfirst=opts.get('follow_first'))
1254 def want(rev):
1254 def want(rev):
1255 if ff.match(rev) and rev in wanted:
1255 if ff.match(rev) and rev in wanted:
1256 return True
1256 return True
1257 return False
1257 return False
1258 else:
1258 else:
1259 def want(rev):
1259 def want(rev):
1260 return rev in wanted
1260 return rev in wanted
1261
1261
1262 for i, window in increasing_windows(0, len(revs)):
1262 for i, window in increasing_windows(0, len(revs)):
1263 yield 'window', revs[0] < revs[-1], revs[-1]
1263 yield 'window', revs[0] < revs[-1], revs[-1]
1264 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1264 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1265 srevs = list(nrevs)
1265 srevs = list(nrevs)
1266 srevs.sort()
1266 srevs.sort()
1267 for rev in srevs:
1267 for rev in srevs:
1268 fns = fncache.get(rev)
1268 fns = fncache.get(rev)
1269 if not fns:
1269 if not fns:
1270 def fns_generator():
1270 def fns_generator():
1271 for f in change(rev)[3]:
1271 for f in change(rev)[3]:
1272 if matchfn(f):
1272 if matchfn(f):
1273 yield f
1273 yield f
1274 fns = fns_generator()
1274 fns = fns_generator()
1275 yield 'add', rev, fns
1275 yield 'add', rev, fns
1276 for rev in nrevs:
1276 for rev in nrevs:
1277 yield 'iter', rev, None
1277 yield 'iter', rev, None
1278 return iterate(), matchfn
1278 return iterate(), matchfn
@@ -1,3163 +1,3164 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, urllib, shlex, stat
11 import bisect, os, re, sys, urllib, shlex, stat
12 import ui, hg, util, revlog, bundlerepo, extensions
12 import ui, hg, util, revlog, bundlerepo, extensions
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import errno, version, socket
14 import errno, version, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 # Commands start here, listed alphabetically
17 # Commands start here, listed alphabetically
18
18
19 def add(ui, repo, *pats, **opts):
19 def add(ui, repo, *pats, **opts):
20 """add the specified files on the next commit
20 """add the specified files on the next commit
21
21
22 Schedule files to be version controlled and added to the repository.
22 Schedule files to be version controlled and added to the repository.
23
23
24 The files will be added to the repository at the next commit. To
24 The files will be added to the repository at the next commit. To
25 undo an add before that, see hg revert.
25 undo an add before that, see hg revert.
26
26
27 If no names are given, add all files in the repository.
27 If no names are given, add all files in the repository.
28 """
28 """
29
29
30 names = []
30 names = []
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 if exact:
32 if exact:
33 if ui.verbose:
33 if ui.verbose:
34 ui.status(_('adding %s\n') % rel)
34 ui.status(_('adding %s\n') % rel)
35 names.append(abs)
35 names.append(abs)
36 elif repo.dirstate.state(abs) == '?':
36 elif abs not in repo.dirstate:
37 ui.status(_('adding %s\n') % rel)
37 ui.status(_('adding %s\n') % rel)
38 names.append(abs)
38 names.append(abs)
39 if not opts.get('dry_run'):
39 if not opts.get('dry_run'):
40 repo.add(names)
40 repo.add(names)
41
41
42 def addremove(ui, repo, *pats, **opts):
42 def addremove(ui, repo, *pats, **opts):
43 """add all new files, delete all missing files
43 """add all new files, delete all missing files
44
44
45 Add all new files and remove all missing files from the repository.
45 Add all new files and remove all missing files from the repository.
46
46
47 New files are ignored if they match any of the patterns in .hgignore. As
47 New files are ignored if they match any of the patterns in .hgignore. As
48 with add, these changes take effect at the next commit.
48 with add, these changes take effect at the next commit.
49
49
50 Use the -s option to detect renamed files. With a parameter > 0,
50 Use the -s option to detect renamed files. With a parameter > 0,
51 this compares every removed file with every added file and records
51 this compares every removed file with every added file and records
52 those similar enough as renames. This option takes a percentage
52 those similar enough as renames. This option takes a percentage
53 between 0 (disabled) and 100 (files must be identical) as its
53 between 0 (disabled) and 100 (files must be identical) as its
54 parameter. Detecting renamed files this way can be expensive.
54 parameter. Detecting renamed files this way can be expensive.
55 """
55 """
56 sim = float(opts.get('similarity') or 0)
56 sim = float(opts.get('similarity') or 0)
57 if sim < 0 or sim > 100:
57 if sim < 0 or sim > 100:
58 raise util.Abort(_('similarity must be between 0 and 100'))
58 raise util.Abort(_('similarity must be between 0 and 100'))
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60
60
61 def annotate(ui, repo, *pats, **opts):
61 def annotate(ui, repo, *pats, **opts):
62 """show changeset information per file line
62 """show changeset information per file line
63
63
64 List changes in files, showing the revision id responsible for each line
64 List changes in files, showing the revision id responsible for each line
65
65
66 This command is useful to discover who did a change or when a change took
66 This command is useful to discover who did a change or when a change took
67 place.
67 place.
68
68
69 Without the -a option, annotate will avoid processing files it
69 Without the -a option, annotate will avoid processing files it
70 detects as binary. With -a, annotate will generate an annotation
70 detects as binary. With -a, annotate will generate an annotation
71 anyway, probably with undesirable results.
71 anyway, probably with undesirable results.
72 """
72 """
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74
74
75 if not pats:
75 if not pats:
76 raise util.Abort(_('at least one file name or pattern required'))
76 raise util.Abort(_('at least one file name or pattern required'))
77
77
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 ('number', lambda x: str(x[0].rev())),
79 ('number', lambda x: str(x[0].rev())),
80 ('changeset', lambda x: short(x[0].node())),
80 ('changeset', lambda x: short(x[0].node())),
81 ('date', getdate),
81 ('date', getdate),
82 ('follow', lambda x: x[0].path()),
82 ('follow', lambda x: x[0].path()),
83 ]
83 ]
84
84
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 and not opts['follow']):
86 and not opts['follow']):
87 opts['number'] = 1
87 opts['number'] = 1
88
88
89 linenumber = opts.get('line_number') is not None
89 linenumber = opts.get('line_number') is not None
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92
92
93 funcmap = [func for op, func in opmap if opts.get(op)]
93 funcmap = [func for op, func in opmap if opts.get(op)]
94 if linenumber:
94 if linenumber:
95 lastfunc = funcmap[-1]
95 lastfunc = funcmap[-1]
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97
97
98 ctx = repo.changectx(opts['rev'])
98 ctx = repo.changectx(opts['rev'])
99
99
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 node=ctx.node()):
101 node=ctx.node()):
102 fctx = ctx.filectx(abs)
102 fctx = ctx.filectx(abs)
103 if not opts['text'] and util.binary(fctx.data()):
103 if not opts['text'] and util.binary(fctx.data()):
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 continue
105 continue
106
106
107 lines = fctx.annotate(follow=opts.get('follow'),
107 lines = fctx.annotate(follow=opts.get('follow'),
108 linenumber=linenumber)
108 linenumber=linenumber)
109 pieces = []
109 pieces = []
110
110
111 for f in funcmap:
111 for f in funcmap:
112 l = [f(n) for n, dummy in lines]
112 l = [f(n) for n, dummy in lines]
113 if l:
113 if l:
114 m = max(map(len, l))
114 m = max(map(len, l))
115 pieces.append(["%*s" % (m, x) for x in l])
115 pieces.append(["%*s" % (m, x) for x in l])
116
116
117 if pieces:
117 if pieces:
118 for p, l in zip(zip(*pieces), lines):
118 for p, l in zip(zip(*pieces), lines):
119 ui.write("%s: %s" % (" ".join(p), l[1]))
119 ui.write("%s: %s" % (" ".join(p), l[1]))
120
120
121 def archive(ui, repo, dest, **opts):
121 def archive(ui, repo, dest, **opts):
122 '''create unversioned archive of a repository revision
122 '''create unversioned archive of a repository revision
123
123
124 By default, the revision used is the parent of the working
124 By default, the revision used is the parent of the working
125 directory; use "-r" to specify a different revision.
125 directory; use "-r" to specify a different revision.
126
126
127 To specify the type of archive to create, use "-t". Valid
127 To specify the type of archive to create, use "-t". Valid
128 types are:
128 types are:
129
129
130 "files" (default): a directory full of files
130 "files" (default): a directory full of files
131 "tar": tar archive, uncompressed
131 "tar": tar archive, uncompressed
132 "tbz2": tar archive, compressed using bzip2
132 "tbz2": tar archive, compressed using bzip2
133 "tgz": tar archive, compressed using gzip
133 "tgz": tar archive, compressed using gzip
134 "uzip": zip archive, uncompressed
134 "uzip": zip archive, uncompressed
135 "zip": zip archive, compressed using deflate
135 "zip": zip archive, compressed using deflate
136
136
137 The exact name of the destination archive or directory is given
137 The exact name of the destination archive or directory is given
138 using a format string; see "hg help export" for details.
138 using a format string; see "hg help export" for details.
139
139
140 Each member added to an archive file has a directory prefix
140 Each member added to an archive file has a directory prefix
141 prepended. Use "-p" to specify a format string for the prefix.
141 prepended. Use "-p" to specify a format string for the prefix.
142 The default is the basename of the archive, with suffixes removed.
142 The default is the basename of the archive, with suffixes removed.
143 '''
143 '''
144
144
145 ctx = repo.changectx(opts['rev'])
145 ctx = repo.changectx(opts['rev'])
146 if not ctx:
146 if not ctx:
147 raise util.Abort(_('repository has no revisions'))
147 raise util.Abort(_('repository has no revisions'))
148 node = ctx.node()
148 node = ctx.node()
149 dest = cmdutil.make_filename(repo, dest, node)
149 dest = cmdutil.make_filename(repo, dest, node)
150 if os.path.realpath(dest) == repo.root:
150 if os.path.realpath(dest) == repo.root:
151 raise util.Abort(_('repository root cannot be destination'))
151 raise util.Abort(_('repository root cannot be destination'))
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 kind = opts.get('type') or 'files'
153 kind = opts.get('type') or 'files'
154 prefix = opts['prefix']
154 prefix = opts['prefix']
155 if dest == '-':
155 if dest == '-':
156 if kind == 'files':
156 if kind == 'files':
157 raise util.Abort(_('cannot archive plain files to stdout'))
157 raise util.Abort(_('cannot archive plain files to stdout'))
158 dest = sys.stdout
158 dest = sys.stdout
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 prefix = cmdutil.make_filename(repo, prefix, node)
160 prefix = cmdutil.make_filename(repo, prefix, node)
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 matchfn, prefix)
162 matchfn, prefix)
163
163
164 def backout(ui, repo, node=None, rev=None, **opts):
164 def backout(ui, repo, node=None, rev=None, **opts):
165 '''reverse effect of earlier changeset
165 '''reverse effect of earlier changeset
166
166
167 Commit the backed out changes as a new changeset. The new
167 Commit the backed out changes as a new changeset. The new
168 changeset is a child of the backed out changeset.
168 changeset is a child of the backed out changeset.
169
169
170 If you back out a changeset other than the tip, a new head is
170 If you back out a changeset other than the tip, a new head is
171 created. This head is the parent of the working directory. If
171 created. This head is the parent of the working directory. If
172 you back out an old changeset, your working directory will appear
172 you back out an old changeset, your working directory will appear
173 old after the backout. You should merge the backout changeset
173 old after the backout. You should merge the backout changeset
174 with another head.
174 with another head.
175
175
176 The --merge option remembers the parent of the working directory
176 The --merge option remembers the parent of the working directory
177 before starting the backout, then merges the new head with that
177 before starting the backout, then merges the new head with that
178 changeset afterwards. This saves you from doing the merge by
178 changeset afterwards. This saves you from doing the merge by
179 hand. The result of this merge is not committed, as for a normal
179 hand. The result of this merge is not committed, as for a normal
180 merge.'''
180 merge.'''
181 if rev and node:
181 if rev and node:
182 raise util.Abort(_("please specify just one revision"))
182 raise util.Abort(_("please specify just one revision"))
183
183
184 if not rev:
184 if not rev:
185 rev = node
185 rev = node
186
186
187 if not rev:
187 if not rev:
188 raise util.Abort(_("please specify a revision to backout"))
188 raise util.Abort(_("please specify a revision to backout"))
189
189
190 cmdutil.bail_if_changed(repo)
190 cmdutil.bail_if_changed(repo)
191 op1, op2 = repo.dirstate.parents()
191 op1, op2 = repo.dirstate.parents()
192 if op2 != nullid:
192 if op2 != nullid:
193 raise util.Abort(_('outstanding uncommitted merge'))
193 raise util.Abort(_('outstanding uncommitted merge'))
194 node = repo.lookup(rev)
194 node = repo.lookup(rev)
195 p1, p2 = repo.changelog.parents(node)
195 p1, p2 = repo.changelog.parents(node)
196 if p1 == nullid:
196 if p1 == nullid:
197 raise util.Abort(_('cannot back out a change with no parents'))
197 raise util.Abort(_('cannot back out a change with no parents'))
198 if p2 != nullid:
198 if p2 != nullid:
199 if not opts['parent']:
199 if not opts['parent']:
200 raise util.Abort(_('cannot back out a merge changeset without '
200 raise util.Abort(_('cannot back out a merge changeset without '
201 '--parent'))
201 '--parent'))
202 p = repo.lookup(opts['parent'])
202 p = repo.lookup(opts['parent'])
203 if p not in (p1, p2):
203 if p not in (p1, p2):
204 raise util.Abort(_('%s is not a parent of %s') %
204 raise util.Abort(_('%s is not a parent of %s') %
205 (short(p), short(node)))
205 (short(p), short(node)))
206 parent = p
206 parent = p
207 else:
207 else:
208 if opts['parent']:
208 if opts['parent']:
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 parent = p1
210 parent = p1
211 hg.clean(repo, node, show_stats=False)
211 hg.clean(repo, node, show_stats=False)
212 revert_opts = opts.copy()
212 revert_opts = opts.copy()
213 revert_opts['date'] = None
213 revert_opts['date'] = None
214 revert_opts['all'] = True
214 revert_opts['all'] = True
215 revert_opts['rev'] = hex(parent)
215 revert_opts['rev'] = hex(parent)
216 revert(ui, repo, **revert_opts)
216 revert(ui, repo, **revert_opts)
217 commit_opts = opts.copy()
217 commit_opts = opts.copy()
218 commit_opts['addremove'] = False
218 commit_opts['addremove'] = False
219 if not commit_opts['message'] and not commit_opts['logfile']:
219 if not commit_opts['message'] and not commit_opts['logfile']:
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 commit_opts['force_editor'] = True
221 commit_opts['force_editor'] = True
222 commit(ui, repo, **commit_opts)
222 commit(ui, repo, **commit_opts)
223 def nice(node):
223 def nice(node):
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 ui.status(_('changeset %s backs out changeset %s\n') %
225 ui.status(_('changeset %s backs out changeset %s\n') %
226 (nice(repo.changelog.tip()), nice(node)))
226 (nice(repo.changelog.tip()), nice(node)))
227 if op1 != node:
227 if op1 != node:
228 if opts['merge']:
228 if opts['merge']:
229 ui.status(_('merging with changeset %s\n') % nice(op1))
229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 hg.merge(repo, hex(op1))
230 hg.merge(repo, hex(op1))
231 else:
231 else:
232 ui.status(_('the backout changeset is a new head - '
232 ui.status(_('the backout changeset is a new head - '
233 'do not forget to merge\n'))
233 'do not forget to merge\n'))
234 ui.status(_('(use "backout --merge" '
234 ui.status(_('(use "backout --merge" '
235 'if you want to auto-merge)\n'))
235 'if you want to auto-merge)\n'))
236
236
237 def branch(ui, repo, label=None, **opts):
237 def branch(ui, repo, label=None, **opts):
238 """set or show the current branch name
238 """set or show the current branch name
239
239
240 With no argument, show the current branch name. With one argument,
240 With no argument, show the current branch name. With one argument,
241 set the working directory branch name (the branch does not exist in
241 set the working directory branch name (the branch does not exist in
242 the repository until the next commit).
242 the repository until the next commit).
243
243
244 Unless --force is specified, branch will not let you set a
244 Unless --force is specified, branch will not let you set a
245 branch name that shadows an existing branch.
245 branch name that shadows an existing branch.
246 """
246 """
247
247
248 if label:
248 if label:
249 if not opts.get('force') and label in repo.branchtags():
249 if not opts.get('force') and label in repo.branchtags():
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 raise util.Abort(_('a branch of the same name already exists'
251 raise util.Abort(_('a branch of the same name already exists'
252 ' (use --force to override)'))
252 ' (use --force to override)'))
253 repo.dirstate.setbranch(util.fromlocal(label))
253 repo.dirstate.setbranch(util.fromlocal(label))
254 ui.status(_('marked working directory as branch %s\n') % label)
254 ui.status(_('marked working directory as branch %s\n') % label)
255 else:
255 else:
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257
257
258 def branches(ui, repo, active=False):
258 def branches(ui, repo, active=False):
259 """list repository named branches
259 """list repository named branches
260
260
261 List the repository's named branches, indicating which ones are
261 List the repository's named branches, indicating which ones are
262 inactive. If active is specified, only show active branches.
262 inactive. If active is specified, only show active branches.
263
263
264 A branch is considered active if it contains unmerged heads.
264 A branch is considered active if it contains unmerged heads.
265 """
265 """
266 b = repo.branchtags()
266 b = repo.branchtags()
267 heads = dict.fromkeys(repo.heads(), 1)
267 heads = dict.fromkeys(repo.heads(), 1)
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 l.sort()
269 l.sort()
270 l.reverse()
270 l.reverse()
271 for ishead, r, n, t in l:
271 for ishead, r, n, t in l:
272 if active and not ishead:
272 if active and not ishead:
273 # If we're only displaying active branches, abort the loop on
273 # If we're only displaying active branches, abort the loop on
274 # encountering the first inactive head
274 # encountering the first inactive head
275 break
275 break
276 else:
276 else:
277 hexfunc = ui.debugflag and hex or short
277 hexfunc = ui.debugflag and hex or short
278 if ui.quiet:
278 if ui.quiet:
279 ui.write("%s\n" % t)
279 ui.write("%s\n" % t)
280 else:
280 else:
281 spaces = " " * (30 - util.locallen(t))
281 spaces = " " * (30 - util.locallen(t))
282 # The code only gets here if inactive branches are being
282 # The code only gets here if inactive branches are being
283 # displayed or the branch is active.
283 # displayed or the branch is active.
284 isinactive = ((not ishead) and " (inactive)") or ''
284 isinactive = ((not ishead) and " (inactive)") or ''
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286
286
287 def bundle(ui, repo, fname, dest=None, **opts):
287 def bundle(ui, repo, fname, dest=None, **opts):
288 """create a changegroup file
288 """create a changegroup file
289
289
290 Generate a compressed changegroup file collecting changesets not
290 Generate a compressed changegroup file collecting changesets not
291 found in the other repository.
291 found in the other repository.
292
292
293 If no destination repository is specified the destination is assumed
293 If no destination repository is specified the destination is assumed
294 to have all the nodes specified by one or more --base parameters.
294 to have all the nodes specified by one or more --base parameters.
295
295
296 The bundle file can then be transferred using conventional means and
296 The bundle file can then be transferred using conventional means and
297 applied to another repository with the unbundle or pull command.
297 applied to another repository with the unbundle or pull command.
298 This is useful when direct push and pull are not available or when
298 This is useful when direct push and pull are not available or when
299 exporting an entire repository is undesirable.
299 exporting an entire repository is undesirable.
300
300
301 Applying bundles preserves all changeset contents including
301 Applying bundles preserves all changeset contents including
302 permissions, copy/rename information, and revision history.
302 permissions, copy/rename information, and revision history.
303 """
303 """
304 revs = opts.get('rev') or None
304 revs = opts.get('rev') or None
305 if revs:
305 if revs:
306 revs = [repo.lookup(rev) for rev in revs]
306 revs = [repo.lookup(rev) for rev in revs]
307 base = opts.get('base')
307 base = opts.get('base')
308 if base:
308 if base:
309 if dest:
309 if dest:
310 raise util.Abort(_("--base is incompatible with specifiying "
310 raise util.Abort(_("--base is incompatible with specifiying "
311 "a destination"))
311 "a destination"))
312 base = [repo.lookup(rev) for rev in base]
312 base = [repo.lookup(rev) for rev in base]
313 # create the right base
313 # create the right base
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 o = []
315 o = []
316 has = {nullid: None}
316 has = {nullid: None}
317 for n in base:
317 for n in base:
318 has.update(repo.changelog.reachable(n))
318 has.update(repo.changelog.reachable(n))
319 if revs:
319 if revs:
320 visit = list(revs)
320 visit = list(revs)
321 else:
321 else:
322 visit = repo.changelog.heads()
322 visit = repo.changelog.heads()
323 seen = {}
323 seen = {}
324 while visit:
324 while visit:
325 n = visit.pop(0)
325 n = visit.pop(0)
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 if len(parents) == 0:
327 if len(parents) == 0:
328 o.insert(0, n)
328 o.insert(0, n)
329 else:
329 else:
330 for p in parents:
330 for p in parents:
331 if p not in seen:
331 if p not in seen:
332 seen[p] = 1
332 seen[p] = 1
333 visit.append(p)
333 visit.append(p)
334 else:
334 else:
335 cmdutil.setremoteconfig(ui, opts)
335 cmdutil.setremoteconfig(ui, opts)
336 dest, revs = cmdutil.parseurl(
336 dest, revs = cmdutil.parseurl(
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 other = hg.repository(ui, dest)
338 other = hg.repository(ui, dest)
339 o = repo.findoutgoing(other, force=opts['force'])
339 o = repo.findoutgoing(other, force=opts['force'])
340
340
341 if revs:
341 if revs:
342 cg = repo.changegroupsubset(o, revs, 'bundle')
342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 else:
343 else:
344 cg = repo.changegroup(o, 'bundle')
344 cg = repo.changegroup(o, 'bundle')
345 changegroup.writebundle(cg, fname, "HG10BZ")
345 changegroup.writebundle(cg, fname, "HG10BZ")
346
346
347 def cat(ui, repo, file1, *pats, **opts):
347 def cat(ui, repo, file1, *pats, **opts):
348 """output the current or given revision of files
348 """output the current or given revision of files
349
349
350 Print the specified files as they were at the given revision.
350 Print the specified files as they were at the given revision.
351 If no revision is given, the parent of the working directory is used,
351 If no revision is given, the parent of the working directory is used,
352 or tip if no revision is checked out.
352 or tip if no revision is checked out.
353
353
354 Output may be to a file, in which case the name of the file is
354 Output may be to a file, in which case the name of the file is
355 given using a format string. The formatting rules are the same as
355 given using a format string. The formatting rules are the same as
356 for the export command, with the following additions:
356 for the export command, with the following additions:
357
357
358 %s basename of file being printed
358 %s basename of file being printed
359 %d dirname of file being printed, or '.' if in repo root
359 %d dirname of file being printed, or '.' if in repo root
360 %p root-relative path name of file being printed
360 %p root-relative path name of file being printed
361 """
361 """
362 ctx = repo.changectx(opts['rev'])
362 ctx = repo.changectx(opts['rev'])
363 err = 1
363 err = 1
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 ctx.node()):
365 ctx.node()):
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 fp.write(ctx.filectx(abs).data())
367 fp.write(ctx.filectx(abs).data())
368 err = 0
368 err = 0
369 return err
369 return err
370
370
371 def clone(ui, source, dest=None, **opts):
371 def clone(ui, source, dest=None, **opts):
372 """make a copy of an existing repository
372 """make a copy of an existing repository
373
373
374 Create a copy of an existing repository in a new directory.
374 Create a copy of an existing repository in a new directory.
375
375
376 If no destination directory name is specified, it defaults to the
376 If no destination directory name is specified, it defaults to the
377 basename of the source.
377 basename of the source.
378
378
379 The location of the source is added to the new repository's
379 The location of the source is added to the new repository's
380 .hg/hgrc file, as the default to be used for future pulls.
380 .hg/hgrc file, as the default to be used for future pulls.
381
381
382 For efficiency, hardlinks are used for cloning whenever the source
382 For efficiency, hardlinks are used for cloning whenever the source
383 and destination are on the same filesystem (note this applies only
383 and destination are on the same filesystem (note this applies only
384 to the repository data, not to the checked out files). Some
384 to the repository data, not to the checked out files). Some
385 filesystems, such as AFS, implement hardlinking incorrectly, but
385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 do not report errors. In these cases, use the --pull option to
386 do not report errors. In these cases, use the --pull option to
387 avoid hardlinking.
387 avoid hardlinking.
388
388
389 You can safely clone repositories and checked out files using full
389 You can safely clone repositories and checked out files using full
390 hardlinks with
390 hardlinks with
391
391
392 $ cp -al REPO REPOCLONE
392 $ cp -al REPO REPOCLONE
393
393
394 which is the fastest way to clone. However, the operation is not
394 which is the fastest way to clone. However, the operation is not
395 atomic (making sure REPO is not modified during the operation is
395 atomic (making sure REPO is not modified during the operation is
396 up to you) and you have to make sure your editor breaks hardlinks
396 up to you) and you have to make sure your editor breaks hardlinks
397 (Emacs and most Linux Kernel tools do so).
397 (Emacs and most Linux Kernel tools do so).
398
398
399 If you use the -r option to clone up to a specific revision, no
399 If you use the -r option to clone up to a specific revision, no
400 subsequent revisions will be present in the cloned repository.
400 subsequent revisions will be present in the cloned repository.
401 This option implies --pull, even on local repositories.
401 This option implies --pull, even on local repositories.
402
402
403 See pull for valid source format details.
403 See pull for valid source format details.
404
404
405 It is possible to specify an ssh:// URL as the destination, but no
405 It is possible to specify an ssh:// URL as the destination, but no
406 .hg/hgrc and working directory will be created on the remote side.
406 .hg/hgrc and working directory will be created on the remote side.
407 Look at the help text for the pull command for important details
407 Look at the help text for the pull command for important details
408 about ssh:// URLs.
408 about ssh:// URLs.
409 """
409 """
410 cmdutil.setremoteconfig(ui, opts)
410 cmdutil.setremoteconfig(ui, opts)
411 hg.clone(ui, source, dest,
411 hg.clone(ui, source, dest,
412 pull=opts['pull'],
412 pull=opts['pull'],
413 stream=opts['uncompressed'],
413 stream=opts['uncompressed'],
414 rev=opts['rev'],
414 rev=opts['rev'],
415 update=not opts['noupdate'])
415 update=not opts['noupdate'])
416
416
417 def commit(ui, repo, *pats, **opts):
417 def commit(ui, repo, *pats, **opts):
418 """commit the specified files or all outstanding changes
418 """commit the specified files or all outstanding changes
419
419
420 Commit changes to the given files into the repository.
420 Commit changes to the given files into the repository.
421
421
422 If a list of files is omitted, all changes reported by "hg status"
422 If a list of files is omitted, all changes reported by "hg status"
423 will be committed.
423 will be committed.
424
424
425 If no commit message is specified, the editor configured in your hgrc
425 If no commit message is specified, the editor configured in your hgrc
426 or in the EDITOR environment variable is started to enter a message.
426 or in the EDITOR environment variable is started to enter a message.
427 """
427 """
428 message = cmdutil.logmessage(opts)
428 message = cmdutil.logmessage(opts)
429
429
430 if opts['addremove']:
430 if opts['addremove']:
431 cmdutil.addremove(repo, pats, opts)
431 cmdutil.addremove(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 if pats:
433 if pats:
434 status = repo.status(files=fns, match=match)
434 status = repo.status(files=fns, match=match)
435 modified, added, removed, deleted, unknown = status[:5]
435 modified, added, removed, deleted, unknown = status[:5]
436 files = modified + added + removed
436 files = modified + added + removed
437 slist = None
437 slist = None
438 for f in fns:
438 for f in fns:
439 if f == '.':
439 if f == '.':
440 continue
440 continue
441 if f not in files:
441 if f not in files:
442 rf = repo.wjoin(f)
442 rf = repo.wjoin(f)
443 try:
443 try:
444 mode = os.lstat(rf)[stat.ST_MODE]
444 mode = os.lstat(rf)[stat.ST_MODE]
445 except OSError:
445 except OSError:
446 raise util.Abort(_("file %s not found!") % rf)
446 raise util.Abort(_("file %s not found!") % rf)
447 if stat.S_ISDIR(mode):
447 if stat.S_ISDIR(mode):
448 name = f + '/'
448 name = f + '/'
449 if slist is None:
449 if slist is None:
450 slist = list(files)
450 slist = list(files)
451 slist.sort()
451 slist.sort()
452 i = bisect.bisect(slist, name)
452 i = bisect.bisect(slist, name)
453 if i >= len(slist) or not slist[i].startswith(name):
453 if i >= len(slist) or not slist[i].startswith(name):
454 raise util.Abort(_("no match under directory %s!")
454 raise util.Abort(_("no match under directory %s!")
455 % rf)
455 % rf)
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 raise util.Abort(_("can't commit %s: "
457 raise util.Abort(_("can't commit %s: "
458 "unsupported file type!") % rf)
458 "unsupported file type!") % rf)
459 elif repo.dirstate.state(f) == '?':
459 elif f not in repo.dirstate:
460 raise util.Abort(_("file %s not tracked!") % rf)
460 raise util.Abort(_("file %s not tracked!") % rf)
461 else:
461 else:
462 files = []
462 files = []
463 try:
463 try:
464 repo.commit(files, message, opts['user'], opts['date'], match,
464 repo.commit(files, message, opts['user'], opts['date'], match,
465 force_editor=opts.get('force_editor'))
465 force_editor=opts.get('force_editor'))
466 except ValueError, inst:
466 except ValueError, inst:
467 raise util.Abort(str(inst))
467 raise util.Abort(str(inst))
468
468
469 def docopy(ui, repo, pats, opts, wlock):
469 def docopy(ui, repo, pats, opts, wlock):
470 # called with the repo lock held
470 # called with the repo lock held
471 #
471 #
472 # hgsep => pathname that uses "/" to separate directories
472 # hgsep => pathname that uses "/" to separate directories
473 # ossep => pathname that uses os.sep to separate directories
473 # ossep => pathname that uses os.sep to separate directories
474 cwd = repo.getcwd()
474 cwd = repo.getcwd()
475 errors = 0
475 errors = 0
476 copied = []
476 copied = []
477 targets = {}
477 targets = {}
478
478
479 # abs: hgsep
479 # abs: hgsep
480 # rel: ossep
480 # rel: ossep
481 # return: hgsep
481 # return: hgsep
482 def okaytocopy(abs, rel, exact):
482 def okaytocopy(abs, rel, exact):
483 reasons = {'?': _('is not managed'),
483 reasons = {'?': _('is not managed'),
484 'r': _('has been marked for remove')}
484 'r': _('has been marked for remove')}
485 state = repo.dirstate.state(abs)
485 state = repo.dirstate[abs]
486 reason = reasons.get(state)
486 reason = reasons.get(state)
487 if reason:
487 if reason:
488 if exact:
488 if exact:
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 else:
490 else:
491 if state == 'a':
491 if state == 'a':
492 origsrc = repo.dirstate.copied(abs)
492 origsrc = repo.dirstate.copied(abs)
493 if origsrc is not None:
493 if origsrc is not None:
494 return origsrc
494 return origsrc
495 return abs
495 return abs
496
496
497 # origsrc: hgsep
497 # origsrc: hgsep
498 # abssrc: hgsep
498 # abssrc: hgsep
499 # relsrc: ossep
499 # relsrc: ossep
500 # otarget: ossep
500 # otarget: ossep
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 abstarget = util.canonpath(repo.root, cwd, otarget)
502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 reltarget = repo.pathto(abstarget, cwd)
503 reltarget = repo.pathto(abstarget, cwd)
504 prevsrc = targets.get(abstarget)
504 prevsrc = targets.get(abstarget)
505 src = repo.wjoin(abssrc)
505 src = repo.wjoin(abssrc)
506 target = repo.wjoin(abstarget)
506 target = repo.wjoin(abstarget)
507 if prevsrc is not None:
507 if prevsrc is not None:
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 (reltarget, repo.pathto(abssrc, cwd),
509 (reltarget, repo.pathto(abssrc, cwd),
510 repo.pathto(prevsrc, cwd)))
510 repo.pathto(prevsrc, cwd)))
511 return
511 return
512 if (not opts['after'] and os.path.exists(target) or
512 if (not opts['after'] and os.path.exists(target) or
513 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
513 opts['after'] and repo.dirstate[abstarget] in 'mn'):
514 if not opts['force']:
514 if not opts['force']:
515 ui.warn(_('%s: not overwriting - file exists\n') %
515 ui.warn(_('%s: not overwriting - file exists\n') %
516 reltarget)
516 reltarget)
517 return
517 return
518 if not opts['after'] and not opts.get('dry_run'):
518 if not opts['after'] and not opts.get('dry_run'):
519 os.unlink(target)
519 os.unlink(target)
520 if opts['after']:
520 if opts['after']:
521 if not os.path.exists(target):
521 if not os.path.exists(target):
522 return
522 return
523 else:
523 else:
524 targetdir = os.path.dirname(target) or '.'
524 targetdir = os.path.dirname(target) or '.'
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 os.makedirs(targetdir)
526 os.makedirs(targetdir)
527 try:
527 try:
528 restore = repo.dirstate.state(abstarget) == 'r'
528 restore = repo.dirstate[abstarget] == 'r'
529 if restore and not opts.get('dry_run'):
529 if restore and not opts.get('dry_run'):
530 repo.undelete([abstarget], wlock)
530 repo.undelete([abstarget], wlock)
531 try:
531 try:
532 if not opts.get('dry_run'):
532 if not opts.get('dry_run'):
533 util.copyfile(src, target)
533 util.copyfile(src, target)
534 restore = False
534 restore = False
535 finally:
535 finally:
536 if restore:
536 if restore:
537 repo.remove([abstarget], wlock=wlock)
537 repo.remove([abstarget], wlock=wlock)
538 except IOError, inst:
538 except IOError, inst:
539 if inst.errno == errno.ENOENT:
539 if inst.errno == errno.ENOENT:
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 else:
541 else:
542 ui.warn(_('%s: cannot copy - %s\n') %
542 ui.warn(_('%s: cannot copy - %s\n') %
543 (relsrc, inst.strerror))
543 (relsrc, inst.strerror))
544 errors += 1
544 errors += 1
545 return
545 return
546 if ui.verbose or not exact:
546 if ui.verbose or not exact:
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 targets[abstarget] = abssrc
548 targets[abstarget] = abssrc
549 if abstarget != origsrc:
549 if abstarget != origsrc:
550 if repo.dirstate.state(origsrc) == 'a':
550 if repo.dirstate[origsrc] == 'a':
551 if not ui.quiet:
551 if not ui.quiet:
552 ui.warn(_("%s has not been committed yet, so no copy "
552 ui.warn(_("%s has not been committed yet, so no copy "
553 "data will be stored for %s.\n")
553 "data will be stored for %s.\n")
554 % (repo.pathto(origsrc, cwd), reltarget))
554 % (repo.pathto(origsrc, cwd), reltarget))
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 repo.add([abstarget], wlock)
556 repo.add([abstarget], wlock)
557 elif not opts.get('dry_run'):
557 elif not opts.get('dry_run'):
558 repo.copy(origsrc, abstarget, wlock)
558 repo.copy(origsrc, abstarget, wlock)
559 copied.append((abssrc, relsrc, exact))
559 copied.append((abssrc, relsrc, exact))
560
560
561 # pat: ossep
561 # pat: ossep
562 # dest ossep
562 # dest ossep
563 # srcs: list of (hgsep, hgsep, ossep, bool)
563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 # return: function that takes hgsep and returns ossep
564 # return: function that takes hgsep and returns ossep
565 def targetpathfn(pat, dest, srcs):
565 def targetpathfn(pat, dest, srcs):
566 if os.path.isdir(pat):
566 if os.path.isdir(pat):
567 abspfx = util.canonpath(repo.root, cwd, pat)
567 abspfx = util.canonpath(repo.root, cwd, pat)
568 abspfx = util.localpath(abspfx)
568 abspfx = util.localpath(abspfx)
569 if destdirexists:
569 if destdirexists:
570 striplen = len(os.path.split(abspfx)[0])
570 striplen = len(os.path.split(abspfx)[0])
571 else:
571 else:
572 striplen = len(abspfx)
572 striplen = len(abspfx)
573 if striplen:
573 if striplen:
574 striplen += len(os.sep)
574 striplen += len(os.sep)
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 elif destdirexists:
576 elif destdirexists:
577 res = lambda p: os.path.join(dest,
577 res = lambda p: os.path.join(dest,
578 os.path.basename(util.localpath(p)))
578 os.path.basename(util.localpath(p)))
579 else:
579 else:
580 res = lambda p: dest
580 res = lambda p: dest
581 return res
581 return res
582
582
583 # pat: ossep
583 # pat: ossep
584 # dest ossep
584 # dest ossep
585 # srcs: list of (hgsep, hgsep, ossep, bool)
585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 # return: function that takes hgsep and returns ossep
586 # return: function that takes hgsep and returns ossep
587 def targetpathafterfn(pat, dest, srcs):
587 def targetpathafterfn(pat, dest, srcs):
588 if util.patkind(pat, None)[0]:
588 if util.patkind(pat, None)[0]:
589 # a mercurial pattern
589 # a mercurial pattern
590 res = lambda p: os.path.join(dest,
590 res = lambda p: os.path.join(dest,
591 os.path.basename(util.localpath(p)))
591 os.path.basename(util.localpath(p)))
592 else:
592 else:
593 abspfx = util.canonpath(repo.root, cwd, pat)
593 abspfx = util.canonpath(repo.root, cwd, pat)
594 if len(abspfx) < len(srcs[0][0]):
594 if len(abspfx) < len(srcs[0][0]):
595 # A directory. Either the target path contains the last
595 # A directory. Either the target path contains the last
596 # component of the source path or it does not.
596 # component of the source path or it does not.
597 def evalpath(striplen):
597 def evalpath(striplen):
598 score = 0
598 score = 0
599 for s in srcs:
599 for s in srcs:
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 if os.path.exists(t):
601 if os.path.exists(t):
602 score += 1
602 score += 1
603 return score
603 return score
604
604
605 abspfx = util.localpath(abspfx)
605 abspfx = util.localpath(abspfx)
606 striplen = len(abspfx)
606 striplen = len(abspfx)
607 if striplen:
607 if striplen:
608 striplen += len(os.sep)
608 striplen += len(os.sep)
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 score = evalpath(striplen)
610 score = evalpath(striplen)
611 striplen1 = len(os.path.split(abspfx)[0])
611 striplen1 = len(os.path.split(abspfx)[0])
612 if striplen1:
612 if striplen1:
613 striplen1 += len(os.sep)
613 striplen1 += len(os.sep)
614 if evalpath(striplen1) > score:
614 if evalpath(striplen1) > score:
615 striplen = striplen1
615 striplen = striplen1
616 res = lambda p: os.path.join(dest,
616 res = lambda p: os.path.join(dest,
617 util.localpath(p)[striplen:])
617 util.localpath(p)[striplen:])
618 else:
618 else:
619 # a file
619 # a file
620 if destdirexists:
620 if destdirexists:
621 res = lambda p: os.path.join(dest,
621 res = lambda p: os.path.join(dest,
622 os.path.basename(util.localpath(p)))
622 os.path.basename(util.localpath(p)))
623 else:
623 else:
624 res = lambda p: dest
624 res = lambda p: dest
625 return res
625 return res
626
626
627
627
628 pats = util.expand_glob(pats)
628 pats = util.expand_glob(pats)
629 if not pats:
629 if not pats:
630 raise util.Abort(_('no source or destination specified'))
630 raise util.Abort(_('no source or destination specified'))
631 if len(pats) == 1:
631 if len(pats) == 1:
632 raise util.Abort(_('no destination specified'))
632 raise util.Abort(_('no destination specified'))
633 dest = pats.pop()
633 dest = pats.pop()
634 destdirexists = os.path.isdir(dest)
634 destdirexists = os.path.isdir(dest)
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 raise util.Abort(_('with multiple sources, destination must be an '
636 raise util.Abort(_('with multiple sources, destination must be an '
637 'existing directory'))
637 'existing directory'))
638 if opts['after']:
638 if opts['after']:
639 tfn = targetpathafterfn
639 tfn = targetpathafterfn
640 else:
640 else:
641 tfn = targetpathfn
641 tfn = targetpathfn
642 copylist = []
642 copylist = []
643 for pat in pats:
643 for pat in pats:
644 srcs = []
644 srcs = []
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 globbed=True):
646 globbed=True):
647 origsrc = okaytocopy(abssrc, relsrc, exact)
647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 if origsrc:
648 if origsrc:
649 srcs.append((origsrc, abssrc, relsrc, exact))
649 srcs.append((origsrc, abssrc, relsrc, exact))
650 if not srcs:
650 if not srcs:
651 continue
651 continue
652 copylist.append((tfn(pat, dest, srcs), srcs))
652 copylist.append((tfn(pat, dest, srcs), srcs))
653 if not copylist:
653 if not copylist:
654 raise util.Abort(_('no files to copy'))
654 raise util.Abort(_('no files to copy'))
655
655
656 for targetpath, srcs in copylist:
656 for targetpath, srcs in copylist:
657 for origsrc, abssrc, relsrc, exact in srcs:
657 for origsrc, abssrc, relsrc, exact in srcs:
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659
659
660 if errors:
660 if errors:
661 ui.warn(_('(consider using --after)\n'))
661 ui.warn(_('(consider using --after)\n'))
662 return errors, copied
662 return errors, copied
663
663
664 def copy(ui, repo, *pats, **opts):
664 def copy(ui, repo, *pats, **opts):
665 """mark files as copied for the next commit
665 """mark files as copied for the next commit
666
666
667 Mark dest as having copies of source files. If dest is a
667 Mark dest as having copies of source files. If dest is a
668 directory, copies are put in that directory. If dest is a file,
668 directory, copies are put in that directory. If dest is a file,
669 there can only be one source.
669 there can only be one source.
670
670
671 By default, this command copies the contents of files as they
671 By default, this command copies the contents of files as they
672 stand in the working directory. If invoked with --after, the
672 stand in the working directory. If invoked with --after, the
673 operation is recorded, but no copying is performed.
673 operation is recorded, but no copying is performed.
674
674
675 This command takes effect in the next commit. To undo a copy
675 This command takes effect in the next commit. To undo a copy
676 before that, see hg revert.
676 before that, see hg revert.
677 """
677 """
678 wlock = repo.wlock(0)
678 wlock = repo.wlock(0)
679 errs, copied = docopy(ui, repo, pats, opts, wlock)
679 errs, copied = docopy(ui, repo, pats, opts, wlock)
680 return errs
680 return errs
681
681
682 def debugancestor(ui, index, rev1, rev2):
682 def debugancestor(ui, index, rev1, rev2):
683 """find the ancestor revision of two revisions in a given index"""
683 """find the ancestor revision of two revisions in a given index"""
684 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
684 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
685 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
685 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
686 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
686 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
687
687
688 def debugcomplete(ui, cmd='', **opts):
688 def debugcomplete(ui, cmd='', **opts):
689 """returns the completion list associated with the given command"""
689 """returns the completion list associated with the given command"""
690
690
691 if opts['options']:
691 if opts['options']:
692 options = []
692 options = []
693 otables = [globalopts]
693 otables = [globalopts]
694 if cmd:
694 if cmd:
695 aliases, entry = cmdutil.findcmd(ui, cmd)
695 aliases, entry = cmdutil.findcmd(ui, cmd)
696 otables.append(entry[1])
696 otables.append(entry[1])
697 for t in otables:
697 for t in otables:
698 for o in t:
698 for o in t:
699 if o[0]:
699 if o[0]:
700 options.append('-%s' % o[0])
700 options.append('-%s' % o[0])
701 options.append('--%s' % o[1])
701 options.append('--%s' % o[1])
702 ui.write("%s\n" % "\n".join(options))
702 ui.write("%s\n" % "\n".join(options))
703 return
703 return
704
704
705 clist = cmdutil.findpossible(ui, cmd).keys()
705 clist = cmdutil.findpossible(ui, cmd).keys()
706 clist.sort()
706 clist.sort()
707 ui.write("%s\n" % "\n".join(clist))
707 ui.write("%s\n" % "\n".join(clist))
708
708
709 def debugrebuildstate(ui, repo, rev=""):
709 def debugrebuildstate(ui, repo, rev=""):
710 """rebuild the dirstate as it would look like for the given revision"""
710 """rebuild the dirstate as it would look like for the given revision"""
711 if rev == "":
711 if rev == "":
712 rev = repo.changelog.tip()
712 rev = repo.changelog.tip()
713 ctx = repo.changectx(rev)
713 ctx = repo.changectx(rev)
714 files = ctx.manifest()
714 files = ctx.manifest()
715 wlock = repo.wlock()
715 wlock = repo.wlock()
716 repo.dirstate.rebuild(rev, files)
716 repo.dirstate.rebuild(rev, files)
717
717
718 def debugcheckstate(ui, repo):
718 def debugcheckstate(ui, repo):
719 """validate the correctness of the current dirstate"""
719 """validate the correctness of the current dirstate"""
720 parent1, parent2 = repo.dirstate.parents()
720 parent1, parent2 = repo.dirstate.parents()
721 dc = repo.dirstate
722 m1 = repo.changectx(parent1).manifest()
721 m1 = repo.changectx(parent1).manifest()
723 m2 = repo.changectx(parent2).manifest()
722 m2 = repo.changectx(parent2).manifest()
724 errors = 0
723 errors = 0
725 for f in dc:
724 for f in repo.dirstate:
726 state = repo.dirstate.state(f)
725 state = repo.dirstate[f]
727 if state in "nr" and f not in m1:
726 if state in "nr" and f not in m1:
728 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
727 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
729 errors += 1
728 errors += 1
730 if state in "a" and f in m1:
729 if state in "a" and f in m1:
731 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
730 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
732 errors += 1
731 errors += 1
733 if state in "m" and f not in m1 and f not in m2:
732 if state in "m" and f not in m1 and f not in m2:
734 ui.warn(_("%s in state %s, but not in either manifest\n") %
733 ui.warn(_("%s in state %s, but not in either manifest\n") %
735 (f, state))
734 (f, state))
736 errors += 1
735 errors += 1
737 for f in m1:
736 for f in m1:
738 state = repo.dirstate.state(f)
737 state = repo.dirstate[f]
739 if state not in "nrm":
738 if state not in "nrm":
740 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
739 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
741 errors += 1
740 errors += 1
742 if errors:
741 if errors:
743 error = _(".hg/dirstate inconsistent with current parent's manifest")
742 error = _(".hg/dirstate inconsistent with current parent's manifest")
744 raise util.Abort(error)
743 raise util.Abort(error)
745
744
746 def showconfig(ui, repo, *values, **opts):
745 def showconfig(ui, repo, *values, **opts):
747 """show combined config settings from all hgrc files
746 """show combined config settings from all hgrc files
748
747
749 With no args, print names and values of all config items.
748 With no args, print names and values of all config items.
750
749
751 With one arg of the form section.name, print just the value of
750 With one arg of the form section.name, print just the value of
752 that config item.
751 that config item.
753
752
754 With multiple args, print names and values of all config items
753 With multiple args, print names and values of all config items
755 with matching section names."""
754 with matching section names."""
756
755
757 untrusted = bool(opts.get('untrusted'))
756 untrusted = bool(opts.get('untrusted'))
758 if values:
757 if values:
759 if len([v for v in values if '.' in v]) > 1:
758 if len([v for v in values if '.' in v]) > 1:
760 raise util.Abort(_('only one config item permitted'))
759 raise util.Abort(_('only one config item permitted'))
761 for section, name, value in ui.walkconfig(untrusted=untrusted):
760 for section, name, value in ui.walkconfig(untrusted=untrusted):
762 sectname = section + '.' + name
761 sectname = section + '.' + name
763 if values:
762 if values:
764 for v in values:
763 for v in values:
765 if v == section:
764 if v == section:
766 ui.write('%s=%s\n' % (sectname, value))
765 ui.write('%s=%s\n' % (sectname, value))
767 elif v == sectname:
766 elif v == sectname:
768 ui.write(value, '\n')
767 ui.write(value, '\n')
769 else:
768 else:
770 ui.write('%s=%s\n' % (sectname, value))
769 ui.write('%s=%s\n' % (sectname, value))
771
770
772 def debugsetparents(ui, repo, rev1, rev2=None):
771 def debugsetparents(ui, repo, rev1, rev2=None):
773 """manually set the parents of the current working directory
772 """manually set the parents of the current working directory
774
773
775 This is useful for writing repository conversion tools, but should
774 This is useful for writing repository conversion tools, but should
776 be used with care.
775 be used with care.
777 """
776 """
778
777
779 if not rev2:
778 if not rev2:
780 rev2 = hex(nullid)
779 rev2 = hex(nullid)
781
780
782 wlock = repo.wlock()
781 wlock = repo.wlock()
783 try:
782 try:
784 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
783 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
785 finally:
784 finally:
786 wlock.release()
785 wlock.release()
787
786
788 def debugstate(ui, repo):
787 def debugstate(ui, repo):
789 """show the contents of the current dirstate"""
788 """show the contents of the current dirstate"""
790 dc = repo.dirstate
789 dc = repo.dirstate._map
791 for file_ in dc:
790 k = dc.keys()
791 k.sort()
792 for file_ in k:
792 if dc[file_][3] == -1:
793 if dc[file_][3] == -1:
793 # Pad or slice to locale representation
794 # Pad or slice to locale representation
794 locale_len = len(time.strftime("%x %X", time.localtime(0)))
795 locale_len = len(time.strftime("%x %X", time.localtime(0)))
795 timestr = 'unset'
796 timestr = 'unset'
796 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
797 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
797 else:
798 else:
798 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
799 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
799 ui.write("%c %3o %10d %s %s\n"
800 ui.write("%c %3o %10d %s %s\n"
800 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
801 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
801 timestr, file_))
802 timestr, file_))
802 for f in repo.dirstate.copies():
803 for f in repo.dirstate.copies():
803 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
804 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
804
805
805 def debugdata(ui, file_, rev):
806 def debugdata(ui, file_, rev):
806 """dump the contents of a data file revision"""
807 """dump the contents of a data file revision"""
807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
808 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
808 try:
809 try:
809 ui.write(r.revision(r.lookup(rev)))
810 ui.write(r.revision(r.lookup(rev)))
810 except KeyError:
811 except KeyError:
811 raise util.Abort(_('invalid revision identifier %s') % rev)
812 raise util.Abort(_('invalid revision identifier %s') % rev)
812
813
813 def debugdate(ui, date, range=None, **opts):
814 def debugdate(ui, date, range=None, **opts):
814 """parse and display a date"""
815 """parse and display a date"""
815 if opts["extended"]:
816 if opts["extended"]:
816 d = util.parsedate(date, util.extendeddateformats)
817 d = util.parsedate(date, util.extendeddateformats)
817 else:
818 else:
818 d = util.parsedate(date)
819 d = util.parsedate(date)
819 ui.write("internal: %s %s\n" % d)
820 ui.write("internal: %s %s\n" % d)
820 ui.write("standard: %s\n" % util.datestr(d))
821 ui.write("standard: %s\n" % util.datestr(d))
821 if range:
822 if range:
822 m = util.matchdate(range)
823 m = util.matchdate(range)
823 ui.write("match: %s\n" % m(d[0]))
824 ui.write("match: %s\n" % m(d[0]))
824
825
825 def debugindex(ui, file_):
826 def debugindex(ui, file_):
826 """dump the contents of an index file"""
827 """dump the contents of an index file"""
827 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
828 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
828 ui.write(" rev offset length base linkrev" +
829 ui.write(" rev offset length base linkrev" +
829 " nodeid p1 p2\n")
830 " nodeid p1 p2\n")
830 for i in xrange(r.count()):
831 for i in xrange(r.count()):
831 node = r.node(i)
832 node = r.node(i)
832 pp = r.parents(node)
833 pp = r.parents(node)
833 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
834 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
834 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
835 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
835 short(node), short(pp[0]), short(pp[1])))
836 short(node), short(pp[0]), short(pp[1])))
836
837
837 def debugindexdot(ui, file_):
838 def debugindexdot(ui, file_):
838 """dump an index DAG as a .dot file"""
839 """dump an index DAG as a .dot file"""
839 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
840 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
840 ui.write("digraph G {\n")
841 ui.write("digraph G {\n")
841 for i in xrange(r.count()):
842 for i in xrange(r.count()):
842 node = r.node(i)
843 node = r.node(i)
843 pp = r.parents(node)
844 pp = r.parents(node)
844 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
845 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
845 if pp[1] != nullid:
846 if pp[1] != nullid:
846 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
847 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
847 ui.write("}\n")
848 ui.write("}\n")
848
849
849 def debuginstall(ui):
850 def debuginstall(ui):
850 '''test Mercurial installation'''
851 '''test Mercurial installation'''
851
852
852 def writetemp(contents):
853 def writetemp(contents):
853 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
854 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
854 f = os.fdopen(fd, "wb")
855 f = os.fdopen(fd, "wb")
855 f.write(contents)
856 f.write(contents)
856 f.close()
857 f.close()
857 return name
858 return name
858
859
859 problems = 0
860 problems = 0
860
861
861 # encoding
862 # encoding
862 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
863 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
863 try:
864 try:
864 util.fromlocal("test")
865 util.fromlocal("test")
865 except util.Abort, inst:
866 except util.Abort, inst:
866 ui.write(" %s\n" % inst)
867 ui.write(" %s\n" % inst)
867 ui.write(_(" (check that your locale is properly set)\n"))
868 ui.write(_(" (check that your locale is properly set)\n"))
868 problems += 1
869 problems += 1
869
870
870 # compiled modules
871 # compiled modules
871 ui.status(_("Checking extensions...\n"))
872 ui.status(_("Checking extensions...\n"))
872 try:
873 try:
873 import bdiff, mpatch, base85
874 import bdiff, mpatch, base85
874 except Exception, inst:
875 except Exception, inst:
875 ui.write(" %s\n" % inst)
876 ui.write(" %s\n" % inst)
876 ui.write(_(" One or more extensions could not be found"))
877 ui.write(_(" One or more extensions could not be found"))
877 ui.write(_(" (check that you compiled the extensions)\n"))
878 ui.write(_(" (check that you compiled the extensions)\n"))
878 problems += 1
879 problems += 1
879
880
880 # templates
881 # templates
881 ui.status(_("Checking templates...\n"))
882 ui.status(_("Checking templates...\n"))
882 try:
883 try:
883 import templater
884 import templater
884 t = templater.templater(templater.templatepath("map-cmdline.default"))
885 t = templater.templater(templater.templatepath("map-cmdline.default"))
885 except Exception, inst:
886 except Exception, inst:
886 ui.write(" %s\n" % inst)
887 ui.write(" %s\n" % inst)
887 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
888 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
888 problems += 1
889 problems += 1
889
890
890 # patch
891 # patch
891 ui.status(_("Checking patch...\n"))
892 ui.status(_("Checking patch...\n"))
892 patcher = ui.config('ui', 'patch')
893 patcher = ui.config('ui', 'patch')
893 patcher = ((patcher and util.find_exe(patcher)) or
894 patcher = ((patcher and util.find_exe(patcher)) or
894 util.find_exe('gpatch') or
895 util.find_exe('gpatch') or
895 util.find_exe('patch'))
896 util.find_exe('patch'))
896 if not patcher:
897 if not patcher:
897 ui.write(_(" Can't find patch or gpatch in PATH\n"))
898 ui.write(_(" Can't find patch or gpatch in PATH\n"))
898 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
899 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
899 problems += 1
900 problems += 1
900 else:
901 else:
901 # actually attempt a patch here
902 # actually attempt a patch here
902 a = "1\n2\n3\n4\n"
903 a = "1\n2\n3\n4\n"
903 b = "1\n2\n3\ninsert\n4\n"
904 b = "1\n2\n3\ninsert\n4\n"
904 fa = writetemp(a)
905 fa = writetemp(a)
905 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
906 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
906 fd = writetemp(d)
907 fd = writetemp(d)
907
908
908 files = {}
909 files = {}
909 try:
910 try:
910 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
911 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
911 except util.Abort, e:
912 except util.Abort, e:
912 ui.write(_(" patch call failed:\n"))
913 ui.write(_(" patch call failed:\n"))
913 ui.write(" " + str(e) + "\n")
914 ui.write(" " + str(e) + "\n")
914 problems += 1
915 problems += 1
915 else:
916 else:
916 if list(files) != [os.path.basename(fa)]:
917 if list(files) != [os.path.basename(fa)]:
917 ui.write(_(" unexpected patch output!"))
918 ui.write(_(" unexpected patch output!"))
918 ui.write(_(" (you may have an incompatible version of patch)\n"))
919 ui.write(_(" (you may have an incompatible version of patch)\n"))
919 problems += 1
920 problems += 1
920 a = file(fa).read()
921 a = file(fa).read()
921 if a != b:
922 if a != b:
922 ui.write(_(" patch test failed!"))
923 ui.write(_(" patch test failed!"))
923 ui.write(_(" (you may have an incompatible version of patch)\n"))
924 ui.write(_(" (you may have an incompatible version of patch)\n"))
924 problems += 1
925 problems += 1
925
926
926 os.unlink(fa)
927 os.unlink(fa)
927 os.unlink(fd)
928 os.unlink(fd)
928
929
929 # merge helper
930 # merge helper
930 ui.status(_("Checking merge helper...\n"))
931 ui.status(_("Checking merge helper...\n"))
931 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
932 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
932 or "hgmerge")
933 or "hgmerge")
933 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
934 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
934 if not cmdpath:
935 if not cmdpath:
935 if cmd == 'hgmerge':
936 if cmd == 'hgmerge':
936 ui.write(_(" No merge helper set and can't find default"
937 ui.write(_(" No merge helper set and can't find default"
937 " hgmerge script in PATH\n"))
938 " hgmerge script in PATH\n"))
938 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
939 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
939 else:
940 else:
940 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
941 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
941 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
942 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
942 problems += 1
943 problems += 1
943 else:
944 else:
944 # actually attempt a patch here
945 # actually attempt a patch here
945 fa = writetemp("1\n2\n3\n4\n")
946 fa = writetemp("1\n2\n3\n4\n")
946 fl = writetemp("1\n2\n3\ninsert\n4\n")
947 fl = writetemp("1\n2\n3\ninsert\n4\n")
947 fr = writetemp("begin\n1\n2\n3\n4\n")
948 fr = writetemp("begin\n1\n2\n3\n4\n")
948 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
949 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
949 if r:
950 if r:
950 ui.write(_(" Got unexpected merge error %d!\n") % r)
951 ui.write(_(" Got unexpected merge error %d!\n") % r)
951 problems += 1
952 problems += 1
952 m = file(fl).read()
953 m = file(fl).read()
953 if m != "begin\n1\n2\n3\ninsert\n4\n":
954 if m != "begin\n1\n2\n3\ninsert\n4\n":
954 ui.write(_(" Got unexpected merge results!\n"))
955 ui.write(_(" Got unexpected merge results!\n"))
955 ui.write(_(" (your merge helper may have the"
956 ui.write(_(" (your merge helper may have the"
956 " wrong argument order)\n"))
957 " wrong argument order)\n"))
957 ui.write(_(" Result: %r\n") % m)
958 ui.write(_(" Result: %r\n") % m)
958 problems += 1
959 problems += 1
959 os.unlink(fa)
960 os.unlink(fa)
960 os.unlink(fl)
961 os.unlink(fl)
961 os.unlink(fr)
962 os.unlink(fr)
962
963
963 # editor
964 # editor
964 ui.status(_("Checking commit editor...\n"))
965 ui.status(_("Checking commit editor...\n"))
965 editor = (os.environ.get("HGEDITOR") or
966 editor = (os.environ.get("HGEDITOR") or
966 ui.config("ui", "editor") or
967 ui.config("ui", "editor") or
967 os.environ.get("EDITOR", "vi"))
968 os.environ.get("EDITOR", "vi"))
968 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
969 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
969 if not cmdpath:
970 if not cmdpath:
970 if editor == 'vi':
971 if editor == 'vi':
971 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
972 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
972 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
973 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
973 else:
974 else:
974 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
975 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
975 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
976 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
976 problems += 1
977 problems += 1
977
978
978 # check username
979 # check username
979 ui.status(_("Checking username...\n"))
980 ui.status(_("Checking username...\n"))
980 user = os.environ.get("HGUSER")
981 user = os.environ.get("HGUSER")
981 if user is None:
982 if user is None:
982 user = ui.config("ui", "username")
983 user = ui.config("ui", "username")
983 if user is None:
984 if user is None:
984 user = os.environ.get("EMAIL")
985 user = os.environ.get("EMAIL")
985 if not user:
986 if not user:
986 ui.warn(" ")
987 ui.warn(" ")
987 ui.username()
988 ui.username()
988 ui.write(_(" (specify a username in your .hgrc file)\n"))
989 ui.write(_(" (specify a username in your .hgrc file)\n"))
989
990
990 if not problems:
991 if not problems:
991 ui.status(_("No problems detected\n"))
992 ui.status(_("No problems detected\n"))
992 else:
993 else:
993 ui.write(_("%s problems detected,"
994 ui.write(_("%s problems detected,"
994 " please check your install!\n") % problems)
995 " please check your install!\n") % problems)
995
996
996 return problems
997 return problems
997
998
998 def debugrename(ui, repo, file1, *pats, **opts):
999 def debugrename(ui, repo, file1, *pats, **opts):
999 """dump rename information"""
1000 """dump rename information"""
1000
1001
1001 ctx = repo.changectx(opts.get('rev', 'tip'))
1002 ctx = repo.changectx(opts.get('rev', 'tip'))
1002 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1003 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1003 ctx.node()):
1004 ctx.node()):
1004 m = ctx.filectx(abs).renamed()
1005 m = ctx.filectx(abs).renamed()
1005 if m:
1006 if m:
1006 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1007 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1007 else:
1008 else:
1008 ui.write(_("%s not renamed\n") % rel)
1009 ui.write(_("%s not renamed\n") % rel)
1009
1010
1010 def debugwalk(ui, repo, *pats, **opts):
1011 def debugwalk(ui, repo, *pats, **opts):
1011 """show how files match on given patterns"""
1012 """show how files match on given patterns"""
1012 items = list(cmdutil.walk(repo, pats, opts))
1013 items = list(cmdutil.walk(repo, pats, opts))
1013 if not items:
1014 if not items:
1014 return
1015 return
1015 fmt = '%%s %%-%ds %%-%ds %%s' % (
1016 fmt = '%%s %%-%ds %%-%ds %%s' % (
1016 max([len(abs) for (src, abs, rel, exact) in items]),
1017 max([len(abs) for (src, abs, rel, exact) in items]),
1017 max([len(rel) for (src, abs, rel, exact) in items]))
1018 max([len(rel) for (src, abs, rel, exact) in items]))
1018 for src, abs, rel, exact in items:
1019 for src, abs, rel, exact in items:
1019 line = fmt % (src, abs, rel, exact and 'exact' or '')
1020 line = fmt % (src, abs, rel, exact and 'exact' or '')
1020 ui.write("%s\n" % line.rstrip())
1021 ui.write("%s\n" % line.rstrip())
1021
1022
1022 def diff(ui, repo, *pats, **opts):
1023 def diff(ui, repo, *pats, **opts):
1023 """diff repository (or selected files)
1024 """diff repository (or selected files)
1024
1025
1025 Show differences between revisions for the specified files.
1026 Show differences between revisions for the specified files.
1026
1027
1027 Differences between files are shown using the unified diff format.
1028 Differences between files are shown using the unified diff format.
1028
1029
1029 NOTE: diff may generate unexpected results for merges, as it will
1030 NOTE: diff may generate unexpected results for merges, as it will
1030 default to comparing against the working directory's first parent
1031 default to comparing against the working directory's first parent
1031 changeset if no revisions are specified.
1032 changeset if no revisions are specified.
1032
1033
1033 When two revision arguments are given, then changes are shown
1034 When two revision arguments are given, then changes are shown
1034 between those revisions. If only one revision is specified then
1035 between those revisions. If only one revision is specified then
1035 that revision is compared to the working directory, and, when no
1036 that revision is compared to the working directory, and, when no
1036 revisions are specified, the working directory files are compared
1037 revisions are specified, the working directory files are compared
1037 to its parent.
1038 to its parent.
1038
1039
1039 Without the -a option, diff will avoid generating diffs of files
1040 Without the -a option, diff will avoid generating diffs of files
1040 it detects as binary. With -a, diff will generate a diff anyway,
1041 it detects as binary. With -a, diff will generate a diff anyway,
1041 probably with undesirable results.
1042 probably with undesirable results.
1042 """
1043 """
1043 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1044 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1044
1045
1045 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1046 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1046
1047
1047 patch.diff(repo, node1, node2, fns, match=matchfn,
1048 patch.diff(repo, node1, node2, fns, match=matchfn,
1048 opts=patch.diffopts(ui, opts))
1049 opts=patch.diffopts(ui, opts))
1049
1050
1050 def export(ui, repo, *changesets, **opts):
1051 def export(ui, repo, *changesets, **opts):
1051 """dump the header and diffs for one or more changesets
1052 """dump the header and diffs for one or more changesets
1052
1053
1053 Print the changeset header and diffs for one or more revisions.
1054 Print the changeset header and diffs for one or more revisions.
1054
1055
1055 The information shown in the changeset header is: author,
1056 The information shown in the changeset header is: author,
1056 changeset hash, parent(s) and commit comment.
1057 changeset hash, parent(s) and commit comment.
1057
1058
1058 NOTE: export may generate unexpected diff output for merge changesets,
1059 NOTE: export may generate unexpected diff output for merge changesets,
1059 as it will compare the merge changeset against its first parent only.
1060 as it will compare the merge changeset against its first parent only.
1060
1061
1061 Output may be to a file, in which case the name of the file is
1062 Output may be to a file, in which case the name of the file is
1062 given using a format string. The formatting rules are as follows:
1063 given using a format string. The formatting rules are as follows:
1063
1064
1064 %% literal "%" character
1065 %% literal "%" character
1065 %H changeset hash (40 bytes of hexadecimal)
1066 %H changeset hash (40 bytes of hexadecimal)
1066 %N number of patches being generated
1067 %N number of patches being generated
1067 %R changeset revision number
1068 %R changeset revision number
1068 %b basename of the exporting repository
1069 %b basename of the exporting repository
1069 %h short-form changeset hash (12 bytes of hexadecimal)
1070 %h short-form changeset hash (12 bytes of hexadecimal)
1070 %n zero-padded sequence number, starting at 1
1071 %n zero-padded sequence number, starting at 1
1071 %r zero-padded changeset revision number
1072 %r zero-padded changeset revision number
1072
1073
1073 Without the -a option, export will avoid generating diffs of files
1074 Without the -a option, export will avoid generating diffs of files
1074 it detects as binary. With -a, export will generate a diff anyway,
1075 it detects as binary. With -a, export will generate a diff anyway,
1075 probably with undesirable results.
1076 probably with undesirable results.
1076
1077
1077 With the --switch-parent option, the diff will be against the second
1078 With the --switch-parent option, the diff will be against the second
1078 parent. It can be useful to review a merge.
1079 parent. It can be useful to review a merge.
1079 """
1080 """
1080 if not changesets:
1081 if not changesets:
1081 raise util.Abort(_("export requires at least one changeset"))
1082 raise util.Abort(_("export requires at least one changeset"))
1082 revs = cmdutil.revrange(repo, changesets)
1083 revs = cmdutil.revrange(repo, changesets)
1083 if len(revs) > 1:
1084 if len(revs) > 1:
1084 ui.note(_('exporting patches:\n'))
1085 ui.note(_('exporting patches:\n'))
1085 else:
1086 else:
1086 ui.note(_('exporting patch:\n'))
1087 ui.note(_('exporting patch:\n'))
1087 patch.export(repo, revs, template=opts['output'],
1088 patch.export(repo, revs, template=opts['output'],
1088 switch_parent=opts['switch_parent'],
1089 switch_parent=opts['switch_parent'],
1089 opts=patch.diffopts(ui, opts))
1090 opts=patch.diffopts(ui, opts))
1090
1091
1091 def grep(ui, repo, pattern, *pats, **opts):
1092 def grep(ui, repo, pattern, *pats, **opts):
1092 """search for a pattern in specified files and revisions
1093 """search for a pattern in specified files and revisions
1093
1094
1094 Search revisions of files for a regular expression.
1095 Search revisions of files for a regular expression.
1095
1096
1096 This command behaves differently than Unix grep. It only accepts
1097 This command behaves differently than Unix grep. It only accepts
1097 Python/Perl regexps. It searches repository history, not the
1098 Python/Perl regexps. It searches repository history, not the
1098 working directory. It always prints the revision number in which
1099 working directory. It always prints the revision number in which
1099 a match appears.
1100 a match appears.
1100
1101
1101 By default, grep only prints output for the first revision of a
1102 By default, grep only prints output for the first revision of a
1102 file in which it finds a match. To get it to print every revision
1103 file in which it finds a match. To get it to print every revision
1103 that contains a change in match status ("-" for a match that
1104 that contains a change in match status ("-" for a match that
1104 becomes a non-match, or "+" for a non-match that becomes a match),
1105 becomes a non-match, or "+" for a non-match that becomes a match),
1105 use the --all flag.
1106 use the --all flag.
1106 """
1107 """
1107 reflags = 0
1108 reflags = 0
1108 if opts['ignore_case']:
1109 if opts['ignore_case']:
1109 reflags |= re.I
1110 reflags |= re.I
1110 try:
1111 try:
1111 regexp = re.compile(pattern, reflags)
1112 regexp = re.compile(pattern, reflags)
1112 except Exception, inst:
1113 except Exception, inst:
1113 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1114 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1114 return None
1115 return None
1115 sep, eol = ':', '\n'
1116 sep, eol = ':', '\n'
1116 if opts['print0']:
1117 if opts['print0']:
1117 sep = eol = '\0'
1118 sep = eol = '\0'
1118
1119
1119 fcache = {}
1120 fcache = {}
1120 def getfile(fn):
1121 def getfile(fn):
1121 if fn not in fcache:
1122 if fn not in fcache:
1122 fcache[fn] = repo.file(fn)
1123 fcache[fn] = repo.file(fn)
1123 return fcache[fn]
1124 return fcache[fn]
1124
1125
1125 def matchlines(body):
1126 def matchlines(body):
1126 begin = 0
1127 begin = 0
1127 linenum = 0
1128 linenum = 0
1128 while True:
1129 while True:
1129 match = regexp.search(body, begin)
1130 match = regexp.search(body, begin)
1130 if not match:
1131 if not match:
1131 break
1132 break
1132 mstart, mend = match.span()
1133 mstart, mend = match.span()
1133 linenum += body.count('\n', begin, mstart) + 1
1134 linenum += body.count('\n', begin, mstart) + 1
1134 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1135 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1135 lend = body.find('\n', mend)
1136 lend = body.find('\n', mend)
1136 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1137 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1137 begin = lend + 1
1138 begin = lend + 1
1138
1139
1139 class linestate(object):
1140 class linestate(object):
1140 def __init__(self, line, linenum, colstart, colend):
1141 def __init__(self, line, linenum, colstart, colend):
1141 self.line = line
1142 self.line = line
1142 self.linenum = linenum
1143 self.linenum = linenum
1143 self.colstart = colstart
1144 self.colstart = colstart
1144 self.colend = colend
1145 self.colend = colend
1145
1146
1146 def __eq__(self, other):
1147 def __eq__(self, other):
1147 return self.line == other.line
1148 return self.line == other.line
1148
1149
1149 matches = {}
1150 matches = {}
1150 copies = {}
1151 copies = {}
1151 def grepbody(fn, rev, body):
1152 def grepbody(fn, rev, body):
1152 matches[rev].setdefault(fn, [])
1153 matches[rev].setdefault(fn, [])
1153 m = matches[rev][fn]
1154 m = matches[rev][fn]
1154 for lnum, cstart, cend, line in matchlines(body):
1155 for lnum, cstart, cend, line in matchlines(body):
1155 s = linestate(line, lnum, cstart, cend)
1156 s = linestate(line, lnum, cstart, cend)
1156 m.append(s)
1157 m.append(s)
1157
1158
1158 def difflinestates(a, b):
1159 def difflinestates(a, b):
1159 sm = difflib.SequenceMatcher(None, a, b)
1160 sm = difflib.SequenceMatcher(None, a, b)
1160 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1161 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1161 if tag == 'insert':
1162 if tag == 'insert':
1162 for i in xrange(blo, bhi):
1163 for i in xrange(blo, bhi):
1163 yield ('+', b[i])
1164 yield ('+', b[i])
1164 elif tag == 'delete':
1165 elif tag == 'delete':
1165 for i in xrange(alo, ahi):
1166 for i in xrange(alo, ahi):
1166 yield ('-', a[i])
1167 yield ('-', a[i])
1167 elif tag == 'replace':
1168 elif tag == 'replace':
1168 for i in xrange(alo, ahi):
1169 for i in xrange(alo, ahi):
1169 yield ('-', a[i])
1170 yield ('-', a[i])
1170 for i in xrange(blo, bhi):
1171 for i in xrange(blo, bhi):
1171 yield ('+', b[i])
1172 yield ('+', b[i])
1172
1173
1173 prev = {}
1174 prev = {}
1174 def display(fn, rev, states, prevstates):
1175 def display(fn, rev, states, prevstates):
1175 found = False
1176 found = False
1176 filerevmatches = {}
1177 filerevmatches = {}
1177 r = prev.get(fn, -1)
1178 r = prev.get(fn, -1)
1178 if opts['all']:
1179 if opts['all']:
1179 iter = difflinestates(states, prevstates)
1180 iter = difflinestates(states, prevstates)
1180 else:
1181 else:
1181 iter = [('', l) for l in prevstates]
1182 iter = [('', l) for l in prevstates]
1182 for change, l in iter:
1183 for change, l in iter:
1183 cols = [fn, str(r)]
1184 cols = [fn, str(r)]
1184 if opts['line_number']:
1185 if opts['line_number']:
1185 cols.append(str(l.linenum))
1186 cols.append(str(l.linenum))
1186 if opts['all']:
1187 if opts['all']:
1187 cols.append(change)
1188 cols.append(change)
1188 if opts['user']:
1189 if opts['user']:
1189 cols.append(ui.shortuser(get(r)[1]))
1190 cols.append(ui.shortuser(get(r)[1]))
1190 if opts['files_with_matches']:
1191 if opts['files_with_matches']:
1191 c = (fn, r)
1192 c = (fn, r)
1192 if c in filerevmatches:
1193 if c in filerevmatches:
1193 continue
1194 continue
1194 filerevmatches[c] = 1
1195 filerevmatches[c] = 1
1195 else:
1196 else:
1196 cols.append(l.line)
1197 cols.append(l.line)
1197 ui.write(sep.join(cols), eol)
1198 ui.write(sep.join(cols), eol)
1198 found = True
1199 found = True
1199 return found
1200 return found
1200
1201
1201 fstate = {}
1202 fstate = {}
1202 skip = {}
1203 skip = {}
1203 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1204 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1204 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1205 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1205 found = False
1206 found = False
1206 follow = opts.get('follow')
1207 follow = opts.get('follow')
1207 for st, rev, fns in changeiter:
1208 for st, rev, fns in changeiter:
1208 if st == 'window':
1209 if st == 'window':
1209 matches.clear()
1210 matches.clear()
1210 elif st == 'add':
1211 elif st == 'add':
1211 mf = repo.changectx(rev).manifest()
1212 mf = repo.changectx(rev).manifest()
1212 matches[rev] = {}
1213 matches[rev] = {}
1213 for fn in fns:
1214 for fn in fns:
1214 if fn in skip:
1215 if fn in skip:
1215 continue
1216 continue
1216 fstate.setdefault(fn, {})
1217 fstate.setdefault(fn, {})
1217 try:
1218 try:
1218 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1219 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1219 if follow:
1220 if follow:
1220 copied = getfile(fn).renamed(mf[fn])
1221 copied = getfile(fn).renamed(mf[fn])
1221 if copied:
1222 if copied:
1222 copies.setdefault(rev, {})[fn] = copied[0]
1223 copies.setdefault(rev, {})[fn] = copied[0]
1223 except KeyError:
1224 except KeyError:
1224 pass
1225 pass
1225 elif st == 'iter':
1226 elif st == 'iter':
1226 states = matches[rev].items()
1227 states = matches[rev].items()
1227 states.sort()
1228 states.sort()
1228 for fn, m in states:
1229 for fn, m in states:
1229 copy = copies.get(rev, {}).get(fn)
1230 copy = copies.get(rev, {}).get(fn)
1230 if fn in skip:
1231 if fn in skip:
1231 if copy:
1232 if copy:
1232 skip[copy] = True
1233 skip[copy] = True
1233 continue
1234 continue
1234 if fn in prev or fstate[fn]:
1235 if fn in prev or fstate[fn]:
1235 r = display(fn, rev, m, fstate[fn])
1236 r = display(fn, rev, m, fstate[fn])
1236 found = found or r
1237 found = found or r
1237 if r and not opts['all']:
1238 if r and not opts['all']:
1238 skip[fn] = True
1239 skip[fn] = True
1239 if copy:
1240 if copy:
1240 skip[copy] = True
1241 skip[copy] = True
1241 fstate[fn] = m
1242 fstate[fn] = m
1242 if copy:
1243 if copy:
1243 fstate[copy] = m
1244 fstate[copy] = m
1244 prev[fn] = rev
1245 prev[fn] = rev
1245
1246
1246 fstate = fstate.items()
1247 fstate = fstate.items()
1247 fstate.sort()
1248 fstate.sort()
1248 for fn, state in fstate:
1249 for fn, state in fstate:
1249 if fn in skip:
1250 if fn in skip:
1250 continue
1251 continue
1251 if fn not in copies.get(prev[fn], {}):
1252 if fn not in copies.get(prev[fn], {}):
1252 found = display(fn, rev, {}, state) or found
1253 found = display(fn, rev, {}, state) or found
1253 return (not found and 1) or 0
1254 return (not found and 1) or 0
1254
1255
1255 def heads(ui, repo, *branchrevs, **opts):
1256 def heads(ui, repo, *branchrevs, **opts):
1256 """show current repository heads or show branch heads
1257 """show current repository heads or show branch heads
1257
1258
1258 With no arguments, show all repository head changesets.
1259 With no arguments, show all repository head changesets.
1259
1260
1260 If branch or revisions names are given this will show the heads of
1261 If branch or revisions names are given this will show the heads of
1261 the specified branches or the branches those revisions are tagged
1262 the specified branches or the branches those revisions are tagged
1262 with.
1263 with.
1263
1264
1264 Repository "heads" are changesets that don't have child
1265 Repository "heads" are changesets that don't have child
1265 changesets. They are where development generally takes place and
1266 changesets. They are where development generally takes place and
1266 are the usual targets for update and merge operations.
1267 are the usual targets for update and merge operations.
1267
1268
1268 Branch heads are changesets that have a given branch tag, but have
1269 Branch heads are changesets that have a given branch tag, but have
1269 no child changesets with that tag. They are usually where
1270 no child changesets with that tag. They are usually where
1270 development on the given branch takes place.
1271 development on the given branch takes place.
1271 """
1272 """
1272 if opts['rev']:
1273 if opts['rev']:
1273 start = repo.lookup(opts['rev'])
1274 start = repo.lookup(opts['rev'])
1274 else:
1275 else:
1275 start = None
1276 start = None
1276 if not branchrevs:
1277 if not branchrevs:
1277 # Assume we're looking repo-wide heads if no revs were specified.
1278 # Assume we're looking repo-wide heads if no revs were specified.
1278 heads = repo.heads(start)
1279 heads = repo.heads(start)
1279 else:
1280 else:
1280 heads = []
1281 heads = []
1281 visitedset = util.set()
1282 visitedset = util.set()
1282 for branchrev in branchrevs:
1283 for branchrev in branchrevs:
1283 branch = repo.changectx(branchrev).branch()
1284 branch = repo.changectx(branchrev).branch()
1284 if branch in visitedset:
1285 if branch in visitedset:
1285 continue
1286 continue
1286 visitedset.add(branch)
1287 visitedset.add(branch)
1287 bheads = repo.branchheads(branch, start)
1288 bheads = repo.branchheads(branch, start)
1288 if not bheads:
1289 if not bheads:
1289 if branch != branchrev:
1290 if branch != branchrev:
1290 ui.warn(_("no changes on branch %s containing %s are "
1291 ui.warn(_("no changes on branch %s containing %s are "
1291 "reachable from %s\n")
1292 "reachable from %s\n")
1292 % (branch, branchrev, opts['rev']))
1293 % (branch, branchrev, opts['rev']))
1293 else:
1294 else:
1294 ui.warn(_("no changes on branch %s are reachable from %s\n")
1295 ui.warn(_("no changes on branch %s are reachable from %s\n")
1295 % (branch, opts['rev']))
1296 % (branch, opts['rev']))
1296 heads.extend(bheads)
1297 heads.extend(bheads)
1297 if not heads:
1298 if not heads:
1298 return 1
1299 return 1
1299 displayer = cmdutil.show_changeset(ui, repo, opts)
1300 displayer = cmdutil.show_changeset(ui, repo, opts)
1300 for n in heads:
1301 for n in heads:
1301 displayer.show(changenode=n)
1302 displayer.show(changenode=n)
1302
1303
1303 def help_(ui, name=None, with_version=False):
1304 def help_(ui, name=None, with_version=False):
1304 """show help for a command, extension, or list of commands
1305 """show help for a command, extension, or list of commands
1305
1306
1306 With no arguments, print a list of commands and short help.
1307 With no arguments, print a list of commands and short help.
1307
1308
1308 Given a command name, print help for that command.
1309 Given a command name, print help for that command.
1309
1310
1310 Given an extension name, print help for that extension, and the
1311 Given an extension name, print help for that extension, and the
1311 commands it provides."""
1312 commands it provides."""
1312 option_lists = []
1313 option_lists = []
1313
1314
1314 def addglobalopts(aliases):
1315 def addglobalopts(aliases):
1315 if ui.verbose:
1316 if ui.verbose:
1316 option_lists.append((_("global options:"), globalopts))
1317 option_lists.append((_("global options:"), globalopts))
1317 if name == 'shortlist':
1318 if name == 'shortlist':
1318 option_lists.append((_('use "hg help" for the full list '
1319 option_lists.append((_('use "hg help" for the full list '
1319 'of commands'), ()))
1320 'of commands'), ()))
1320 else:
1321 else:
1321 if name == 'shortlist':
1322 if name == 'shortlist':
1322 msg = _('use "hg help" for the full list of commands '
1323 msg = _('use "hg help" for the full list of commands '
1323 'or "hg -v" for details')
1324 'or "hg -v" for details')
1324 elif aliases:
1325 elif aliases:
1325 msg = _('use "hg -v help%s" to show aliases and '
1326 msg = _('use "hg -v help%s" to show aliases and '
1326 'global options') % (name and " " + name or "")
1327 'global options') % (name and " " + name or "")
1327 else:
1328 else:
1328 msg = _('use "hg -v help %s" to show global options') % name
1329 msg = _('use "hg -v help %s" to show global options') % name
1329 option_lists.append((msg, ()))
1330 option_lists.append((msg, ()))
1330
1331
1331 def helpcmd(name):
1332 def helpcmd(name):
1332 if with_version:
1333 if with_version:
1333 version_(ui)
1334 version_(ui)
1334 ui.write('\n')
1335 ui.write('\n')
1335 aliases, i = cmdutil.findcmd(ui, name)
1336 aliases, i = cmdutil.findcmd(ui, name)
1336 # synopsis
1337 # synopsis
1337 ui.write("%s\n\n" % i[2])
1338 ui.write("%s\n\n" % i[2])
1338
1339
1339 # description
1340 # description
1340 doc = i[0].__doc__
1341 doc = i[0].__doc__
1341 if not doc:
1342 if not doc:
1342 doc = _("(No help text available)")
1343 doc = _("(No help text available)")
1343 if ui.quiet:
1344 if ui.quiet:
1344 doc = doc.splitlines(0)[0]
1345 doc = doc.splitlines(0)[0]
1345 ui.write("%s\n" % doc.rstrip())
1346 ui.write("%s\n" % doc.rstrip())
1346
1347
1347 if not ui.quiet:
1348 if not ui.quiet:
1348 # aliases
1349 # aliases
1349 if len(aliases) > 1:
1350 if len(aliases) > 1:
1350 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1351 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1351
1352
1352 # options
1353 # options
1353 if i[1]:
1354 if i[1]:
1354 option_lists.append((_("options:\n"), i[1]))
1355 option_lists.append((_("options:\n"), i[1]))
1355
1356
1356 addglobalopts(False)
1357 addglobalopts(False)
1357
1358
1358 def helplist(select=None):
1359 def helplist(select=None):
1359 h = {}
1360 h = {}
1360 cmds = {}
1361 cmds = {}
1361 for c, e in table.items():
1362 for c, e in table.items():
1362 f = c.split("|", 1)[0]
1363 f = c.split("|", 1)[0]
1363 if select and not select(f):
1364 if select and not select(f):
1364 continue
1365 continue
1365 if name == "shortlist" and not f.startswith("^"):
1366 if name == "shortlist" and not f.startswith("^"):
1366 continue
1367 continue
1367 f = f.lstrip("^")
1368 f = f.lstrip("^")
1368 if not ui.debugflag and f.startswith("debug"):
1369 if not ui.debugflag and f.startswith("debug"):
1369 continue
1370 continue
1370 doc = e[0].__doc__
1371 doc = e[0].__doc__
1371 if not doc:
1372 if not doc:
1372 doc = _("(No help text available)")
1373 doc = _("(No help text available)")
1373 h[f] = doc.splitlines(0)[0].rstrip()
1374 h[f] = doc.splitlines(0)[0].rstrip()
1374 cmds[f] = c.lstrip("^")
1375 cmds[f] = c.lstrip("^")
1375
1376
1376 fns = h.keys()
1377 fns = h.keys()
1377 fns.sort()
1378 fns.sort()
1378 m = max(map(len, fns))
1379 m = max(map(len, fns))
1379 for f in fns:
1380 for f in fns:
1380 if ui.verbose:
1381 if ui.verbose:
1381 commands = cmds[f].replace("|",", ")
1382 commands = cmds[f].replace("|",", ")
1382 ui.write(" %s:\n %s\n"%(commands, h[f]))
1383 ui.write(" %s:\n %s\n"%(commands, h[f]))
1383 else:
1384 else:
1384 ui.write(' %-*s %s\n' % (m, f, h[f]))
1385 ui.write(' %-*s %s\n' % (m, f, h[f]))
1385
1386
1386 if not ui.quiet:
1387 if not ui.quiet:
1387 addglobalopts(True)
1388 addglobalopts(True)
1388
1389
1389 def helptopic(name):
1390 def helptopic(name):
1390 v = None
1391 v = None
1391 for i in help.helptable:
1392 for i in help.helptable:
1392 l = i.split('|')
1393 l = i.split('|')
1393 if name in l:
1394 if name in l:
1394 v = i
1395 v = i
1395 header = l[-1]
1396 header = l[-1]
1396 if not v:
1397 if not v:
1397 raise cmdutil.UnknownCommand(name)
1398 raise cmdutil.UnknownCommand(name)
1398
1399
1399 # description
1400 # description
1400 doc = help.helptable[v]
1401 doc = help.helptable[v]
1401 if not doc:
1402 if not doc:
1402 doc = _("(No help text available)")
1403 doc = _("(No help text available)")
1403 if callable(doc):
1404 if callable(doc):
1404 doc = doc()
1405 doc = doc()
1405
1406
1406 ui.write("%s\n" % header)
1407 ui.write("%s\n" % header)
1407 ui.write("%s\n" % doc.rstrip())
1408 ui.write("%s\n" % doc.rstrip())
1408
1409
1409 def helpext(name):
1410 def helpext(name):
1410 try:
1411 try:
1411 mod = extensions.find(name)
1412 mod = extensions.find(name)
1412 except KeyError:
1413 except KeyError:
1413 raise cmdutil.UnknownCommand(name)
1414 raise cmdutil.UnknownCommand(name)
1414
1415
1415 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1416 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1416 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1417 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1417 for d in doc[1:]:
1418 for d in doc[1:]:
1418 ui.write(d, '\n')
1419 ui.write(d, '\n')
1419
1420
1420 ui.status('\n')
1421 ui.status('\n')
1421
1422
1422 try:
1423 try:
1423 ct = mod.cmdtable
1424 ct = mod.cmdtable
1424 except AttributeError:
1425 except AttributeError:
1425 ct = None
1426 ct = None
1426 if not ct:
1427 if not ct:
1427 ui.status(_('no commands defined\n'))
1428 ui.status(_('no commands defined\n'))
1428 return
1429 return
1429
1430
1430 ui.status(_('list of commands:\n\n'))
1431 ui.status(_('list of commands:\n\n'))
1431 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1432 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1432 helplist(modcmds.has_key)
1433 helplist(modcmds.has_key)
1433
1434
1434 if name and name != 'shortlist':
1435 if name and name != 'shortlist':
1435 i = None
1436 i = None
1436 for f in (helpcmd, helptopic, helpext):
1437 for f in (helpcmd, helptopic, helpext):
1437 try:
1438 try:
1438 f(name)
1439 f(name)
1439 i = None
1440 i = None
1440 break
1441 break
1441 except cmdutil.UnknownCommand, inst:
1442 except cmdutil.UnknownCommand, inst:
1442 i = inst
1443 i = inst
1443 if i:
1444 if i:
1444 raise i
1445 raise i
1445
1446
1446 else:
1447 else:
1447 # program name
1448 # program name
1448 if ui.verbose or with_version:
1449 if ui.verbose or with_version:
1449 version_(ui)
1450 version_(ui)
1450 else:
1451 else:
1451 ui.status(_("Mercurial Distributed SCM\n"))
1452 ui.status(_("Mercurial Distributed SCM\n"))
1452 ui.status('\n')
1453 ui.status('\n')
1453
1454
1454 # list of commands
1455 # list of commands
1455 if name == "shortlist":
1456 if name == "shortlist":
1456 ui.status(_('basic commands:\n\n'))
1457 ui.status(_('basic commands:\n\n'))
1457 else:
1458 else:
1458 ui.status(_('list of commands:\n\n'))
1459 ui.status(_('list of commands:\n\n'))
1459
1460
1460 helplist()
1461 helplist()
1461
1462
1462 # list all option lists
1463 # list all option lists
1463 opt_output = []
1464 opt_output = []
1464 for title, options in option_lists:
1465 for title, options in option_lists:
1465 opt_output.append(("\n%s" % title, None))
1466 opt_output.append(("\n%s" % title, None))
1466 for shortopt, longopt, default, desc in options:
1467 for shortopt, longopt, default, desc in options:
1467 if "DEPRECATED" in desc and not ui.verbose: continue
1468 if "DEPRECATED" in desc and not ui.verbose: continue
1468 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1469 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1469 longopt and " --%s" % longopt),
1470 longopt and " --%s" % longopt),
1470 "%s%s" % (desc,
1471 "%s%s" % (desc,
1471 default
1472 default
1472 and _(" (default: %s)") % default
1473 and _(" (default: %s)") % default
1473 or "")))
1474 or "")))
1474
1475
1475 if opt_output:
1476 if opt_output:
1476 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1477 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1477 for first, second in opt_output:
1478 for first, second in opt_output:
1478 if second:
1479 if second:
1479 ui.write(" %-*s %s\n" % (opts_len, first, second))
1480 ui.write(" %-*s %s\n" % (opts_len, first, second))
1480 else:
1481 else:
1481 ui.write("%s\n" % first)
1482 ui.write("%s\n" % first)
1482
1483
1483 def identify(ui, repo, source=None,
1484 def identify(ui, repo, source=None,
1484 rev=None, num=None, id=None, branch=None, tags=None):
1485 rev=None, num=None, id=None, branch=None, tags=None):
1485 """identify the working copy or specified revision
1486 """identify the working copy or specified revision
1486
1487
1487 With no revision, print a summary of the current state of the repo.
1488 With no revision, print a summary of the current state of the repo.
1488
1489
1489 With a path, do a lookup in another repository.
1490 With a path, do a lookup in another repository.
1490
1491
1491 This summary identifies the repository state using one or two parent
1492 This summary identifies the repository state using one or two parent
1492 hash identifiers, followed by a "+" if there are uncommitted changes
1493 hash identifiers, followed by a "+" if there are uncommitted changes
1493 in the working directory, a list of tags for this revision and a branch
1494 in the working directory, a list of tags for this revision and a branch
1494 name for non-default branches.
1495 name for non-default branches.
1495 """
1496 """
1496
1497
1497 hexfunc = ui.debugflag and hex or short
1498 hexfunc = ui.debugflag and hex or short
1498 default = not (num or id or branch or tags)
1499 default = not (num or id or branch or tags)
1499 output = []
1500 output = []
1500
1501
1501 if source:
1502 if source:
1502 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1503 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1503 srepo = hg.repository(ui, source)
1504 srepo = hg.repository(ui, source)
1504 if not rev and revs:
1505 if not rev and revs:
1505 rev = revs[0]
1506 rev = revs[0]
1506 if not rev:
1507 if not rev:
1507 rev = "tip"
1508 rev = "tip"
1508 if num or branch or tags:
1509 if num or branch or tags:
1509 raise util.Abort(
1510 raise util.Abort(
1510 "can't query remote revision number, branch, or tags")
1511 "can't query remote revision number, branch, or tags")
1511 output = [hexfunc(srepo.lookup(rev))]
1512 output = [hexfunc(srepo.lookup(rev))]
1512 elif not rev:
1513 elif not rev:
1513 ctx = repo.workingctx()
1514 ctx = repo.workingctx()
1514 parents = ctx.parents()
1515 parents = ctx.parents()
1515 changed = False
1516 changed = False
1516 if default or id or num:
1517 if default or id or num:
1517 changed = ctx.files() + ctx.deleted()
1518 changed = ctx.files() + ctx.deleted()
1518 if default or id:
1519 if default or id:
1519 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1520 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1520 (changed) and "+" or "")]
1521 (changed) and "+" or "")]
1521 if num:
1522 if num:
1522 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1523 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1523 (changed) and "+" or ""))
1524 (changed) and "+" or ""))
1524 else:
1525 else:
1525 ctx = repo.changectx(rev)
1526 ctx = repo.changectx(rev)
1526 if default or id:
1527 if default or id:
1527 output = [hexfunc(ctx.node())]
1528 output = [hexfunc(ctx.node())]
1528 if num:
1529 if num:
1529 output.append(str(ctx.rev()))
1530 output.append(str(ctx.rev()))
1530
1531
1531 if not source and default and not ui.quiet:
1532 if not source and default and not ui.quiet:
1532 b = util.tolocal(ctx.branch())
1533 b = util.tolocal(ctx.branch())
1533 if b != 'default':
1534 if b != 'default':
1534 output.append("(%s)" % b)
1535 output.append("(%s)" % b)
1535
1536
1536 # multiple tags for a single parent separated by '/'
1537 # multiple tags for a single parent separated by '/'
1537 t = "/".join(ctx.tags())
1538 t = "/".join(ctx.tags())
1538 if t:
1539 if t:
1539 output.append(t)
1540 output.append(t)
1540
1541
1541 if branch:
1542 if branch:
1542 output.append(util.tolocal(ctx.branch()))
1543 output.append(util.tolocal(ctx.branch()))
1543
1544
1544 if tags:
1545 if tags:
1545 output.extend(ctx.tags())
1546 output.extend(ctx.tags())
1546
1547
1547 ui.write("%s\n" % ' '.join(output))
1548 ui.write("%s\n" % ' '.join(output))
1548
1549
1549 def import_(ui, repo, patch1, *patches, **opts):
1550 def import_(ui, repo, patch1, *patches, **opts):
1550 """import an ordered set of patches
1551 """import an ordered set of patches
1551
1552
1552 Import a list of patches and commit them individually.
1553 Import a list of patches and commit them individually.
1553
1554
1554 If there are outstanding changes in the working directory, import
1555 If there are outstanding changes in the working directory, import
1555 will abort unless given the -f flag.
1556 will abort unless given the -f flag.
1556
1557
1557 You can import a patch straight from a mail message. Even patches
1558 You can import a patch straight from a mail message. Even patches
1558 as attachments work (body part must be type text/plain or
1559 as attachments work (body part must be type text/plain or
1559 text/x-patch to be used). From and Subject headers of email
1560 text/x-patch to be used). From and Subject headers of email
1560 message are used as default committer and commit message. All
1561 message are used as default committer and commit message. All
1561 text/plain body parts before first diff are added to commit
1562 text/plain body parts before first diff are added to commit
1562 message.
1563 message.
1563
1564
1564 If the imported patch was generated by hg export, user and description
1565 If the imported patch was generated by hg export, user and description
1565 from patch override values from message headers and body. Values
1566 from patch override values from message headers and body. Values
1566 given on command line with -m and -u override these.
1567 given on command line with -m and -u override these.
1567
1568
1568 If --exact is specified, import will set the working directory
1569 If --exact is specified, import will set the working directory
1569 to the parent of each patch before applying it, and will abort
1570 to the parent of each patch before applying it, and will abort
1570 if the resulting changeset has a different ID than the one
1571 if the resulting changeset has a different ID than the one
1571 recorded in the patch. This may happen due to character set
1572 recorded in the patch. This may happen due to character set
1572 problems or other deficiencies in the text patch format.
1573 problems or other deficiencies in the text patch format.
1573
1574
1574 To read a patch from standard input, use patch name "-".
1575 To read a patch from standard input, use patch name "-".
1575 """
1576 """
1576 patches = (patch1,) + patches
1577 patches = (patch1,) + patches
1577
1578
1578 if opts.get('exact') or not opts['force']:
1579 if opts.get('exact') or not opts['force']:
1579 cmdutil.bail_if_changed(repo)
1580 cmdutil.bail_if_changed(repo)
1580
1581
1581 d = opts["base"]
1582 d = opts["base"]
1582 strip = opts["strip"]
1583 strip = opts["strip"]
1583
1584
1584 wlock = repo.wlock()
1585 wlock = repo.wlock()
1585 lock = repo.lock()
1586 lock = repo.lock()
1586
1587
1587 for p in patches:
1588 for p in patches:
1588 pf = os.path.join(d, p)
1589 pf = os.path.join(d, p)
1589
1590
1590 if pf == '-':
1591 if pf == '-':
1591 ui.status(_("applying patch from stdin\n"))
1592 ui.status(_("applying patch from stdin\n"))
1592 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1593 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1593 else:
1594 else:
1594 ui.status(_("applying %s\n") % p)
1595 ui.status(_("applying %s\n") % p)
1595 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1596 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1596
1597
1597 if tmpname is None:
1598 if tmpname is None:
1598 raise util.Abort(_('no diffs found'))
1599 raise util.Abort(_('no diffs found'))
1599
1600
1600 try:
1601 try:
1601 cmdline_message = cmdutil.logmessage(opts)
1602 cmdline_message = cmdutil.logmessage(opts)
1602 if cmdline_message:
1603 if cmdline_message:
1603 # pickup the cmdline msg
1604 # pickup the cmdline msg
1604 message = cmdline_message
1605 message = cmdline_message
1605 elif message:
1606 elif message:
1606 # pickup the patch msg
1607 # pickup the patch msg
1607 message = message.strip()
1608 message = message.strip()
1608 else:
1609 else:
1609 # launch the editor
1610 # launch the editor
1610 message = None
1611 message = None
1611 ui.debug(_('message:\n%s\n') % message)
1612 ui.debug(_('message:\n%s\n') % message)
1612
1613
1613 wp = repo.workingctx().parents()
1614 wp = repo.workingctx().parents()
1614 if opts.get('exact'):
1615 if opts.get('exact'):
1615 if not nodeid or not p1:
1616 if not nodeid or not p1:
1616 raise util.Abort(_('not a mercurial patch'))
1617 raise util.Abort(_('not a mercurial patch'))
1617 p1 = repo.lookup(p1)
1618 p1 = repo.lookup(p1)
1618 p2 = repo.lookup(p2 or hex(nullid))
1619 p2 = repo.lookup(p2 or hex(nullid))
1619
1620
1620 if p1 != wp[0].node():
1621 if p1 != wp[0].node():
1621 hg.clean(repo, p1, wlock=wlock)
1622 hg.clean(repo, p1, wlock=wlock)
1622 repo.dirstate.setparents(p1, p2)
1623 repo.dirstate.setparents(p1, p2)
1623 elif p2:
1624 elif p2:
1624 try:
1625 try:
1625 p1 = repo.lookup(p1)
1626 p1 = repo.lookup(p1)
1626 p2 = repo.lookup(p2)
1627 p2 = repo.lookup(p2)
1627 if p1 == wp[0].node():
1628 if p1 == wp[0].node():
1628 repo.dirstate.setparents(p1, p2)
1629 repo.dirstate.setparents(p1, p2)
1629 except hg.RepoError:
1630 except hg.RepoError:
1630 pass
1631 pass
1631 if opts.get('exact') or opts.get('import_branch'):
1632 if opts.get('exact') or opts.get('import_branch'):
1632 repo.dirstate.setbranch(branch or 'default')
1633 repo.dirstate.setbranch(branch or 'default')
1633
1634
1634 files = {}
1635 files = {}
1635 try:
1636 try:
1636 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1637 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1637 files=files)
1638 files=files)
1638 finally:
1639 finally:
1639 files = patch.updatedir(ui, repo, files, wlock=wlock)
1640 files = patch.updatedir(ui, repo, files, wlock=wlock)
1640 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1641 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1641 if opts.get('exact'):
1642 if opts.get('exact'):
1642 if hex(n) != nodeid:
1643 if hex(n) != nodeid:
1643 repo.rollback(wlock=wlock, lock=lock)
1644 repo.rollback(wlock=wlock, lock=lock)
1644 raise util.Abort(_('patch is damaged or loses information'))
1645 raise util.Abort(_('patch is damaged or loses information'))
1645 finally:
1646 finally:
1646 os.unlink(tmpname)
1647 os.unlink(tmpname)
1647
1648
1648 def incoming(ui, repo, source="default", **opts):
1649 def incoming(ui, repo, source="default", **opts):
1649 """show new changesets found in source
1650 """show new changesets found in source
1650
1651
1651 Show new changesets found in the specified path/URL or the default
1652 Show new changesets found in the specified path/URL or the default
1652 pull location. These are the changesets that would be pulled if a pull
1653 pull location. These are the changesets that would be pulled if a pull
1653 was requested.
1654 was requested.
1654
1655
1655 For remote repository, using --bundle avoids downloading the changesets
1656 For remote repository, using --bundle avoids downloading the changesets
1656 twice if the incoming is followed by a pull.
1657 twice if the incoming is followed by a pull.
1657
1658
1658 See pull for valid source format details.
1659 See pull for valid source format details.
1659 """
1660 """
1660 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1661 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1661 cmdutil.setremoteconfig(ui, opts)
1662 cmdutil.setremoteconfig(ui, opts)
1662
1663
1663 other = hg.repository(ui, source)
1664 other = hg.repository(ui, source)
1664 ui.status(_('comparing with %s\n') % source)
1665 ui.status(_('comparing with %s\n') % source)
1665 if revs:
1666 if revs:
1666 if 'lookup' in other.capabilities:
1667 if 'lookup' in other.capabilities:
1667 revs = [other.lookup(rev) for rev in revs]
1668 revs = [other.lookup(rev) for rev in revs]
1668 else:
1669 else:
1669 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1670 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1670 raise util.Abort(error)
1671 raise util.Abort(error)
1671 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1672 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1672 if not incoming:
1673 if not incoming:
1673 try:
1674 try:
1674 os.unlink(opts["bundle"])
1675 os.unlink(opts["bundle"])
1675 except:
1676 except:
1676 pass
1677 pass
1677 ui.status(_("no changes found\n"))
1678 ui.status(_("no changes found\n"))
1678 return 1
1679 return 1
1679
1680
1680 cleanup = None
1681 cleanup = None
1681 try:
1682 try:
1682 fname = opts["bundle"]
1683 fname = opts["bundle"]
1683 if fname or not other.local():
1684 if fname or not other.local():
1684 # create a bundle (uncompressed if other repo is not local)
1685 # create a bundle (uncompressed if other repo is not local)
1685 if revs is None:
1686 if revs is None:
1686 cg = other.changegroup(incoming, "incoming")
1687 cg = other.changegroup(incoming, "incoming")
1687 else:
1688 else:
1688 if 'changegroupsubset' not in other.capabilities:
1689 if 'changegroupsubset' not in other.capabilities:
1689 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1690 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1690 cg = other.changegroupsubset(incoming, revs, 'incoming')
1691 cg = other.changegroupsubset(incoming, revs, 'incoming')
1691 bundletype = other.local() and "HG10BZ" or "HG10UN"
1692 bundletype = other.local() and "HG10BZ" or "HG10UN"
1692 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1693 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1693 # keep written bundle?
1694 # keep written bundle?
1694 if opts["bundle"]:
1695 if opts["bundle"]:
1695 cleanup = None
1696 cleanup = None
1696 if not other.local():
1697 if not other.local():
1697 # use the created uncompressed bundlerepo
1698 # use the created uncompressed bundlerepo
1698 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1699 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1699
1700
1700 o = other.changelog.nodesbetween(incoming, revs)[0]
1701 o = other.changelog.nodesbetween(incoming, revs)[0]
1701 if opts['newest_first']:
1702 if opts['newest_first']:
1702 o.reverse()
1703 o.reverse()
1703 displayer = cmdutil.show_changeset(ui, other, opts)
1704 displayer = cmdutil.show_changeset(ui, other, opts)
1704 for n in o:
1705 for n in o:
1705 parents = [p for p in other.changelog.parents(n) if p != nullid]
1706 parents = [p for p in other.changelog.parents(n) if p != nullid]
1706 if opts['no_merges'] and len(parents) == 2:
1707 if opts['no_merges'] and len(parents) == 2:
1707 continue
1708 continue
1708 displayer.show(changenode=n)
1709 displayer.show(changenode=n)
1709 finally:
1710 finally:
1710 if hasattr(other, 'close'):
1711 if hasattr(other, 'close'):
1711 other.close()
1712 other.close()
1712 if cleanup:
1713 if cleanup:
1713 os.unlink(cleanup)
1714 os.unlink(cleanup)
1714
1715
1715 def init(ui, dest=".", **opts):
1716 def init(ui, dest=".", **opts):
1716 """create a new repository in the given directory
1717 """create a new repository in the given directory
1717
1718
1718 Initialize a new repository in the given directory. If the given
1719 Initialize a new repository in the given directory. If the given
1719 directory does not exist, it is created.
1720 directory does not exist, it is created.
1720
1721
1721 If no directory is given, the current directory is used.
1722 If no directory is given, the current directory is used.
1722
1723
1723 It is possible to specify an ssh:// URL as the destination.
1724 It is possible to specify an ssh:// URL as the destination.
1724 Look at the help text for the pull command for important details
1725 Look at the help text for the pull command for important details
1725 about ssh:// URLs.
1726 about ssh:// URLs.
1726 """
1727 """
1727 cmdutil.setremoteconfig(ui, opts)
1728 cmdutil.setremoteconfig(ui, opts)
1728 hg.repository(ui, dest, create=1)
1729 hg.repository(ui, dest, create=1)
1729
1730
1730 def locate(ui, repo, *pats, **opts):
1731 def locate(ui, repo, *pats, **opts):
1731 """locate files matching specific patterns
1732 """locate files matching specific patterns
1732
1733
1733 Print all files under Mercurial control whose names match the
1734 Print all files under Mercurial control whose names match the
1734 given patterns.
1735 given patterns.
1735
1736
1736 This command searches the entire repository by default. To search
1737 This command searches the entire repository by default. To search
1737 just the current directory and its subdirectories, use
1738 just the current directory and its subdirectories, use
1738 "--include .".
1739 "--include .".
1739
1740
1740 If no patterns are given to match, this command prints all file
1741 If no patterns are given to match, this command prints all file
1741 names.
1742 names.
1742
1743
1743 If you want to feed the output of this command into the "xargs"
1744 If you want to feed the output of this command into the "xargs"
1744 command, use the "-0" option to both this command and "xargs".
1745 command, use the "-0" option to both this command and "xargs".
1745 This will avoid the problem of "xargs" treating single filenames
1746 This will avoid the problem of "xargs" treating single filenames
1746 that contain white space as multiple filenames.
1747 that contain white space as multiple filenames.
1747 """
1748 """
1748 end = opts['print0'] and '\0' or '\n'
1749 end = opts['print0'] and '\0' or '\n'
1749 rev = opts['rev']
1750 rev = opts['rev']
1750 if rev:
1751 if rev:
1751 node = repo.lookup(rev)
1752 node = repo.lookup(rev)
1752 else:
1753 else:
1753 node = None
1754 node = None
1754
1755
1755 ret = 1
1756 ret = 1
1756 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1757 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1757 badmatch=util.always,
1758 badmatch=util.always,
1758 default='relglob'):
1759 default='relglob'):
1759 if src == 'b':
1760 if src == 'b':
1760 continue
1761 continue
1761 if not node and repo.dirstate.state(abs) == '?':
1762 if not node and abs not in repo.dirstate:
1762 continue
1763 continue
1763 if opts['fullpath']:
1764 if opts['fullpath']:
1764 ui.write(os.path.join(repo.root, abs), end)
1765 ui.write(os.path.join(repo.root, abs), end)
1765 else:
1766 else:
1766 ui.write(((pats and rel) or abs), end)
1767 ui.write(((pats and rel) or abs), end)
1767 ret = 0
1768 ret = 0
1768
1769
1769 return ret
1770 return ret
1770
1771
1771 def log(ui, repo, *pats, **opts):
1772 def log(ui, repo, *pats, **opts):
1772 """show revision history of entire repository or files
1773 """show revision history of entire repository or files
1773
1774
1774 Print the revision history of the specified files or the entire
1775 Print the revision history of the specified files or the entire
1775 project.
1776 project.
1776
1777
1777 File history is shown without following rename or copy history of
1778 File history is shown without following rename or copy history of
1778 files. Use -f/--follow with a file name to follow history across
1779 files. Use -f/--follow with a file name to follow history across
1779 renames and copies. --follow without a file name will only show
1780 renames and copies. --follow without a file name will only show
1780 ancestors or descendants of the starting revision. --follow-first
1781 ancestors or descendants of the starting revision. --follow-first
1781 only follows the first parent of merge revisions.
1782 only follows the first parent of merge revisions.
1782
1783
1783 If no revision range is specified, the default is tip:0 unless
1784 If no revision range is specified, the default is tip:0 unless
1784 --follow is set, in which case the working directory parent is
1785 --follow is set, in which case the working directory parent is
1785 used as the starting revision.
1786 used as the starting revision.
1786
1787
1787 By default this command outputs: changeset id and hash, tags,
1788 By default this command outputs: changeset id and hash, tags,
1788 non-trivial parents, user, date and time, and a summary for each
1789 non-trivial parents, user, date and time, and a summary for each
1789 commit. When the -v/--verbose switch is used, the list of changed
1790 commit. When the -v/--verbose switch is used, the list of changed
1790 files and full commit message is shown.
1791 files and full commit message is shown.
1791
1792
1792 NOTE: log -p may generate unexpected diff output for merge
1793 NOTE: log -p may generate unexpected diff output for merge
1793 changesets, as it will compare the merge changeset against its
1794 changesets, as it will compare the merge changeset against its
1794 first parent only. Also, the files: list will only reflect files
1795 first parent only. Also, the files: list will only reflect files
1795 that are different from BOTH parents.
1796 that are different from BOTH parents.
1796
1797
1797 """
1798 """
1798
1799
1799 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1800 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1800 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1801 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1801
1802
1802 if opts['limit']:
1803 if opts['limit']:
1803 try:
1804 try:
1804 limit = int(opts['limit'])
1805 limit = int(opts['limit'])
1805 except ValueError:
1806 except ValueError:
1806 raise util.Abort(_('limit must be a positive integer'))
1807 raise util.Abort(_('limit must be a positive integer'))
1807 if limit <= 0: raise util.Abort(_('limit must be positive'))
1808 if limit <= 0: raise util.Abort(_('limit must be positive'))
1808 else:
1809 else:
1809 limit = sys.maxint
1810 limit = sys.maxint
1810 count = 0
1811 count = 0
1811
1812
1812 if opts['copies'] and opts['rev']:
1813 if opts['copies'] and opts['rev']:
1813 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1814 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1814 else:
1815 else:
1815 endrev = repo.changelog.count()
1816 endrev = repo.changelog.count()
1816 rcache = {}
1817 rcache = {}
1817 ncache = {}
1818 ncache = {}
1818 dcache = []
1819 dcache = []
1819 def getrenamed(fn, rev, man):
1820 def getrenamed(fn, rev, man):
1820 '''looks up all renames for a file (up to endrev) the first
1821 '''looks up all renames for a file (up to endrev) the first
1821 time the file is given. It indexes on the changerev and only
1822 time the file is given. It indexes on the changerev and only
1822 parses the manifest if linkrev != changerev.
1823 parses the manifest if linkrev != changerev.
1823 Returns rename info for fn at changerev rev.'''
1824 Returns rename info for fn at changerev rev.'''
1824 if fn not in rcache:
1825 if fn not in rcache:
1825 rcache[fn] = {}
1826 rcache[fn] = {}
1826 ncache[fn] = {}
1827 ncache[fn] = {}
1827 fl = repo.file(fn)
1828 fl = repo.file(fn)
1828 for i in xrange(fl.count()):
1829 for i in xrange(fl.count()):
1829 node = fl.node(i)
1830 node = fl.node(i)
1830 lr = fl.linkrev(node)
1831 lr = fl.linkrev(node)
1831 renamed = fl.renamed(node)
1832 renamed = fl.renamed(node)
1832 rcache[fn][lr] = renamed
1833 rcache[fn][lr] = renamed
1833 if renamed:
1834 if renamed:
1834 ncache[fn][node] = renamed
1835 ncache[fn][node] = renamed
1835 if lr >= endrev:
1836 if lr >= endrev:
1836 break
1837 break
1837 if rev in rcache[fn]:
1838 if rev in rcache[fn]:
1838 return rcache[fn][rev]
1839 return rcache[fn][rev]
1839 mr = repo.manifest.rev(man)
1840 mr = repo.manifest.rev(man)
1840 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1841 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1841 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1842 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1842 if not dcache or dcache[0] != man:
1843 if not dcache or dcache[0] != man:
1843 dcache[:] = [man, repo.manifest.readdelta(man)]
1844 dcache[:] = [man, repo.manifest.readdelta(man)]
1844 if fn in dcache[1]:
1845 if fn in dcache[1]:
1845 return ncache[fn].get(dcache[1][fn])
1846 return ncache[fn].get(dcache[1][fn])
1846 return None
1847 return None
1847
1848
1848 df = False
1849 df = False
1849 if opts["date"]:
1850 if opts["date"]:
1850 df = util.matchdate(opts["date"])
1851 df = util.matchdate(opts["date"])
1851
1852
1852 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1853 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1853 for st, rev, fns in changeiter:
1854 for st, rev, fns in changeiter:
1854 if st == 'add':
1855 if st == 'add':
1855 changenode = repo.changelog.node(rev)
1856 changenode = repo.changelog.node(rev)
1856 parents = [p for p in repo.changelog.parentrevs(rev)
1857 parents = [p for p in repo.changelog.parentrevs(rev)
1857 if p != nullrev]
1858 if p != nullrev]
1858 if opts['no_merges'] and len(parents) == 2:
1859 if opts['no_merges'] and len(parents) == 2:
1859 continue
1860 continue
1860 if opts['only_merges'] and len(parents) != 2:
1861 if opts['only_merges'] and len(parents) != 2:
1861 continue
1862 continue
1862
1863
1863 if df:
1864 if df:
1864 changes = get(rev)
1865 changes = get(rev)
1865 if not df(changes[2][0]):
1866 if not df(changes[2][0]):
1866 continue
1867 continue
1867
1868
1868 if opts['keyword']:
1869 if opts['keyword']:
1869 changes = get(rev)
1870 changes = get(rev)
1870 miss = 0
1871 miss = 0
1871 for k in [kw.lower() for kw in opts['keyword']]:
1872 for k in [kw.lower() for kw in opts['keyword']]:
1872 if not (k in changes[1].lower() or
1873 if not (k in changes[1].lower() or
1873 k in changes[4].lower() or
1874 k in changes[4].lower() or
1874 k in " ".join(changes[3]).lower()):
1875 k in " ".join(changes[3]).lower()):
1875 miss = 1
1876 miss = 1
1876 break
1877 break
1877 if miss:
1878 if miss:
1878 continue
1879 continue
1879
1880
1880 copies = []
1881 copies = []
1881 if opts.get('copies') and rev:
1882 if opts.get('copies') and rev:
1882 mf = get(rev)[0]
1883 mf = get(rev)[0]
1883 for fn in get(rev)[3]:
1884 for fn in get(rev)[3]:
1884 rename = getrenamed(fn, rev, mf)
1885 rename = getrenamed(fn, rev, mf)
1885 if rename:
1886 if rename:
1886 copies.append((fn, rename[0]))
1887 copies.append((fn, rename[0]))
1887 displayer.show(rev, changenode, copies=copies)
1888 displayer.show(rev, changenode, copies=copies)
1888 elif st == 'iter':
1889 elif st == 'iter':
1889 if count == limit: break
1890 if count == limit: break
1890 if displayer.flush(rev):
1891 if displayer.flush(rev):
1891 count += 1
1892 count += 1
1892
1893
1893 def manifest(ui, repo, rev=None):
1894 def manifest(ui, repo, rev=None):
1894 """output the current or given revision of the project manifest
1895 """output the current or given revision of the project manifest
1895
1896
1896 Print a list of version controlled files for the given revision.
1897 Print a list of version controlled files for the given revision.
1897 If no revision is given, the parent of the working directory is used,
1898 If no revision is given, the parent of the working directory is used,
1898 or tip if no revision is checked out.
1899 or tip if no revision is checked out.
1899
1900
1900 The manifest is the list of files being version controlled. If no revision
1901 The manifest is the list of files being version controlled. If no revision
1901 is given then the first parent of the working directory is used.
1902 is given then the first parent of the working directory is used.
1902
1903
1903 With -v flag, print file permissions. With --debug flag, print
1904 With -v flag, print file permissions. With --debug flag, print
1904 file revision hashes.
1905 file revision hashes.
1905 """
1906 """
1906
1907
1907 m = repo.changectx(rev).manifest()
1908 m = repo.changectx(rev).manifest()
1908 files = m.keys()
1909 files = m.keys()
1909 files.sort()
1910 files.sort()
1910
1911
1911 for f in files:
1912 for f in files:
1912 if ui.debugflag:
1913 if ui.debugflag:
1913 ui.write("%40s " % hex(m[f]))
1914 ui.write("%40s " % hex(m[f]))
1914 if ui.verbose:
1915 if ui.verbose:
1915 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1916 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1916 ui.write("%s\n" % f)
1917 ui.write("%s\n" % f)
1917
1918
1918 def merge(ui, repo, node=None, force=None, rev=None):
1919 def merge(ui, repo, node=None, force=None, rev=None):
1919 """merge working directory with another revision
1920 """merge working directory with another revision
1920
1921
1921 Merge the contents of the current working directory and the
1922 Merge the contents of the current working directory and the
1922 requested revision. Files that changed between either parent are
1923 requested revision. Files that changed between either parent are
1923 marked as changed for the next commit and a commit must be
1924 marked as changed for the next commit and a commit must be
1924 performed before any further updates are allowed.
1925 performed before any further updates are allowed.
1925
1926
1926 If no revision is specified, the working directory's parent is a
1927 If no revision is specified, the working directory's parent is a
1927 head revision, and the repository contains exactly one other head,
1928 head revision, and the repository contains exactly one other head,
1928 the other head is merged with by default. Otherwise, an explicit
1929 the other head is merged with by default. Otherwise, an explicit
1929 revision to merge with must be provided.
1930 revision to merge with must be provided.
1930 """
1931 """
1931
1932
1932 if rev and node:
1933 if rev and node:
1933 raise util.Abort(_("please specify just one revision"))
1934 raise util.Abort(_("please specify just one revision"))
1934
1935
1935 if not node:
1936 if not node:
1936 node = rev
1937 node = rev
1937
1938
1938 if not node:
1939 if not node:
1939 heads = repo.heads()
1940 heads = repo.heads()
1940 if len(heads) > 2:
1941 if len(heads) > 2:
1941 raise util.Abort(_('repo has %d heads - '
1942 raise util.Abort(_('repo has %d heads - '
1942 'please merge with an explicit rev') %
1943 'please merge with an explicit rev') %
1943 len(heads))
1944 len(heads))
1944 if len(heads) == 1:
1945 if len(heads) == 1:
1945 raise util.Abort(_('there is nothing to merge - '
1946 raise util.Abort(_('there is nothing to merge - '
1946 'use "hg update" instead'))
1947 'use "hg update" instead'))
1947 parent = repo.dirstate.parents()[0]
1948 parent = repo.dirstate.parents()[0]
1948 if parent not in heads:
1949 if parent not in heads:
1949 raise util.Abort(_('working dir not at a head rev - '
1950 raise util.Abort(_('working dir not at a head rev - '
1950 'use "hg update" or merge with an explicit rev'))
1951 'use "hg update" or merge with an explicit rev'))
1951 node = parent == heads[0] and heads[-1] or heads[0]
1952 node = parent == heads[0] and heads[-1] or heads[0]
1952 return hg.merge(repo, node, force=force)
1953 return hg.merge(repo, node, force=force)
1953
1954
1954 def outgoing(ui, repo, dest=None, **opts):
1955 def outgoing(ui, repo, dest=None, **opts):
1955 """show changesets not found in destination
1956 """show changesets not found in destination
1956
1957
1957 Show changesets not found in the specified destination repository or
1958 Show changesets not found in the specified destination repository or
1958 the default push location. These are the changesets that would be pushed
1959 the default push location. These are the changesets that would be pushed
1959 if a push was requested.
1960 if a push was requested.
1960
1961
1961 See pull for valid destination format details.
1962 See pull for valid destination format details.
1962 """
1963 """
1963 dest, revs = cmdutil.parseurl(
1964 dest, revs = cmdutil.parseurl(
1964 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1965 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1965 cmdutil.setremoteconfig(ui, opts)
1966 cmdutil.setremoteconfig(ui, opts)
1966 if revs:
1967 if revs:
1967 revs = [repo.lookup(rev) for rev in revs]
1968 revs = [repo.lookup(rev) for rev in revs]
1968
1969
1969 other = hg.repository(ui, dest)
1970 other = hg.repository(ui, dest)
1970 ui.status(_('comparing with %s\n') % dest)
1971 ui.status(_('comparing with %s\n') % dest)
1971 o = repo.findoutgoing(other, force=opts['force'])
1972 o = repo.findoutgoing(other, force=opts['force'])
1972 if not o:
1973 if not o:
1973 ui.status(_("no changes found\n"))
1974 ui.status(_("no changes found\n"))
1974 return 1
1975 return 1
1975 o = repo.changelog.nodesbetween(o, revs)[0]
1976 o = repo.changelog.nodesbetween(o, revs)[0]
1976 if opts['newest_first']:
1977 if opts['newest_first']:
1977 o.reverse()
1978 o.reverse()
1978 displayer = cmdutil.show_changeset(ui, repo, opts)
1979 displayer = cmdutil.show_changeset(ui, repo, opts)
1979 for n in o:
1980 for n in o:
1980 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1981 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1981 if opts['no_merges'] and len(parents) == 2:
1982 if opts['no_merges'] and len(parents) == 2:
1982 continue
1983 continue
1983 displayer.show(changenode=n)
1984 displayer.show(changenode=n)
1984
1985
1985 def parents(ui, repo, file_=None, **opts):
1986 def parents(ui, repo, file_=None, **opts):
1986 """show the parents of the working dir or revision
1987 """show the parents of the working dir or revision
1987
1988
1988 Print the working directory's parent revisions. If a
1989 Print the working directory's parent revisions. If a
1989 revision is given via --rev, the parent of that revision
1990 revision is given via --rev, the parent of that revision
1990 will be printed. If a file argument is given, revision in
1991 will be printed. If a file argument is given, revision in
1991 which the file was last changed (before the working directory
1992 which the file was last changed (before the working directory
1992 revision or the argument to --rev if given) is printed.
1993 revision or the argument to --rev if given) is printed.
1993 """
1994 """
1994 rev = opts.get('rev')
1995 rev = opts.get('rev')
1995 if file_:
1996 if file_:
1996 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1997 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1997 if anypats or len(files) != 1:
1998 if anypats or len(files) != 1:
1998 raise util.Abort(_('can only specify an explicit file name'))
1999 raise util.Abort(_('can only specify an explicit file name'))
1999 ctx = repo.filectx(files[0], changeid=rev)
2000 ctx = repo.filectx(files[0], changeid=rev)
2000 elif rev:
2001 elif rev:
2001 ctx = repo.changectx(rev)
2002 ctx = repo.changectx(rev)
2002 else:
2003 else:
2003 ctx = repo.workingctx()
2004 ctx = repo.workingctx()
2004 p = [cp.node() for cp in ctx.parents()]
2005 p = [cp.node() for cp in ctx.parents()]
2005
2006
2006 displayer = cmdutil.show_changeset(ui, repo, opts)
2007 displayer = cmdutil.show_changeset(ui, repo, opts)
2007 for n in p:
2008 for n in p:
2008 if n != nullid:
2009 if n != nullid:
2009 displayer.show(changenode=n)
2010 displayer.show(changenode=n)
2010
2011
2011 def paths(ui, repo, search=None):
2012 def paths(ui, repo, search=None):
2012 """show definition of symbolic path names
2013 """show definition of symbolic path names
2013
2014
2014 Show definition of symbolic path name NAME. If no name is given, show
2015 Show definition of symbolic path name NAME. If no name is given, show
2015 definition of available names.
2016 definition of available names.
2016
2017
2017 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2018 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2018 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2019 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2019 """
2020 """
2020 if search:
2021 if search:
2021 for name, path in ui.configitems("paths"):
2022 for name, path in ui.configitems("paths"):
2022 if name == search:
2023 if name == search:
2023 ui.write("%s\n" % path)
2024 ui.write("%s\n" % path)
2024 return
2025 return
2025 ui.warn(_("not found!\n"))
2026 ui.warn(_("not found!\n"))
2026 return 1
2027 return 1
2027 else:
2028 else:
2028 for name, path in ui.configitems("paths"):
2029 for name, path in ui.configitems("paths"):
2029 ui.write("%s = %s\n" % (name, path))
2030 ui.write("%s = %s\n" % (name, path))
2030
2031
2031 def postincoming(ui, repo, modheads, optupdate, wasempty):
2032 def postincoming(ui, repo, modheads, optupdate, wasempty):
2032 if modheads == 0:
2033 if modheads == 0:
2033 return
2034 return
2034 if optupdate:
2035 if optupdate:
2035 if wasempty:
2036 if wasempty:
2036 return hg.update(repo, repo.lookup('default'))
2037 return hg.update(repo, repo.lookup('default'))
2037 elif modheads == 1:
2038 elif modheads == 1:
2038 return hg.update(repo, repo.changelog.tip()) # update
2039 return hg.update(repo, repo.changelog.tip()) # update
2039 else:
2040 else:
2040 ui.status(_("not updating, since new heads added\n"))
2041 ui.status(_("not updating, since new heads added\n"))
2041 if modheads > 1:
2042 if modheads > 1:
2042 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2043 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2043 else:
2044 else:
2044 ui.status(_("(run 'hg update' to get a working copy)\n"))
2045 ui.status(_("(run 'hg update' to get a working copy)\n"))
2045
2046
2046 def pull(ui, repo, source="default", **opts):
2047 def pull(ui, repo, source="default", **opts):
2047 """pull changes from the specified source
2048 """pull changes from the specified source
2048
2049
2049 Pull changes from a remote repository to a local one.
2050 Pull changes from a remote repository to a local one.
2050
2051
2051 This finds all changes from the repository at the specified path
2052 This finds all changes from the repository at the specified path
2052 or URL and adds them to the local repository. By default, this
2053 or URL and adds them to the local repository. By default, this
2053 does not update the copy of the project in the working directory.
2054 does not update the copy of the project in the working directory.
2054
2055
2055 Valid URLs are of the form:
2056 Valid URLs are of the form:
2056
2057
2057 local/filesystem/path (or file://local/filesystem/path)
2058 local/filesystem/path (or file://local/filesystem/path)
2058 http://[user@]host[:port]/[path]
2059 http://[user@]host[:port]/[path]
2059 https://[user@]host[:port]/[path]
2060 https://[user@]host[:port]/[path]
2060 ssh://[user@]host[:port]/[path]
2061 ssh://[user@]host[:port]/[path]
2061 static-http://host[:port]/[path]
2062 static-http://host[:port]/[path]
2062
2063
2063 Paths in the local filesystem can either point to Mercurial
2064 Paths in the local filesystem can either point to Mercurial
2064 repositories or to bundle files (as created by 'hg bundle' or
2065 repositories or to bundle files (as created by 'hg bundle' or
2065 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2066 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2066 allows access to a Mercurial repository where you simply use a web
2067 allows access to a Mercurial repository where you simply use a web
2067 server to publish the .hg directory as static content.
2068 server to publish the .hg directory as static content.
2068
2069
2069 An optional identifier after # indicates a particular branch, tag,
2070 An optional identifier after # indicates a particular branch, tag,
2070 or changeset to pull.
2071 or changeset to pull.
2071
2072
2072 Some notes about using SSH with Mercurial:
2073 Some notes about using SSH with Mercurial:
2073 - SSH requires an accessible shell account on the destination machine
2074 - SSH requires an accessible shell account on the destination machine
2074 and a copy of hg in the remote path or specified with as remotecmd.
2075 and a copy of hg in the remote path or specified with as remotecmd.
2075 - path is relative to the remote user's home directory by default.
2076 - path is relative to the remote user's home directory by default.
2076 Use an extra slash at the start of a path to specify an absolute path:
2077 Use an extra slash at the start of a path to specify an absolute path:
2077 ssh://example.com//tmp/repository
2078 ssh://example.com//tmp/repository
2078 - Mercurial doesn't use its own compression via SSH; the right thing
2079 - Mercurial doesn't use its own compression via SSH; the right thing
2079 to do is to configure it in your ~/.ssh/config, e.g.:
2080 to do is to configure it in your ~/.ssh/config, e.g.:
2080 Host *.mylocalnetwork.example.com
2081 Host *.mylocalnetwork.example.com
2081 Compression no
2082 Compression no
2082 Host *
2083 Host *
2083 Compression yes
2084 Compression yes
2084 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2085 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2085 with the --ssh command line option.
2086 with the --ssh command line option.
2086 """
2087 """
2087 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2088 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2088 cmdutil.setremoteconfig(ui, opts)
2089 cmdutil.setremoteconfig(ui, opts)
2089
2090
2090 other = hg.repository(ui, source)
2091 other = hg.repository(ui, source)
2091 ui.status(_('pulling from %s\n') % (source))
2092 ui.status(_('pulling from %s\n') % (source))
2092 if revs:
2093 if revs:
2093 if 'lookup' in other.capabilities:
2094 if 'lookup' in other.capabilities:
2094 revs = [other.lookup(rev) for rev in revs]
2095 revs = [other.lookup(rev) for rev in revs]
2095 else:
2096 else:
2096 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2097 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2097 raise util.Abort(error)
2098 raise util.Abort(error)
2098
2099
2099 wasempty = repo.changelog.count() == 0
2100 wasempty = repo.changelog.count() == 0
2100 modheads = repo.pull(other, heads=revs, force=opts['force'])
2101 modheads = repo.pull(other, heads=revs, force=opts['force'])
2101 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2102 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2102
2103
2103 def push(ui, repo, dest=None, **opts):
2104 def push(ui, repo, dest=None, **opts):
2104 """push changes to the specified destination
2105 """push changes to the specified destination
2105
2106
2106 Push changes from the local repository to the given destination.
2107 Push changes from the local repository to the given destination.
2107
2108
2108 This is the symmetrical operation for pull. It helps to move
2109 This is the symmetrical operation for pull. It helps to move
2109 changes from the current repository to a different one. If the
2110 changes from the current repository to a different one. If the
2110 destination is local this is identical to a pull in that directory
2111 destination is local this is identical to a pull in that directory
2111 from the current one.
2112 from the current one.
2112
2113
2113 By default, push will refuse to run if it detects the result would
2114 By default, push will refuse to run if it detects the result would
2114 increase the number of remote heads. This generally indicates the
2115 increase the number of remote heads. This generally indicates the
2115 the client has forgotten to sync and merge before pushing.
2116 the client has forgotten to sync and merge before pushing.
2116
2117
2117 Valid URLs are of the form:
2118 Valid URLs are of the form:
2118
2119
2119 local/filesystem/path (or file://local/filesystem/path)
2120 local/filesystem/path (or file://local/filesystem/path)
2120 ssh://[user@]host[:port]/[path]
2121 ssh://[user@]host[:port]/[path]
2121 http://[user@]host[:port]/[path]
2122 http://[user@]host[:port]/[path]
2122 https://[user@]host[:port]/[path]
2123 https://[user@]host[:port]/[path]
2123
2124
2124 An optional identifier after # indicates a particular branch, tag,
2125 An optional identifier after # indicates a particular branch, tag,
2125 or changeset to push.
2126 or changeset to push.
2126
2127
2127 Look at the help text for the pull command for important details
2128 Look at the help text for the pull command for important details
2128 about ssh:// URLs.
2129 about ssh:// URLs.
2129
2130
2130 Pushing to http:// and https:// URLs is only possible, if this
2131 Pushing to http:// and https:// URLs is only possible, if this
2131 feature is explicitly enabled on the remote Mercurial server.
2132 feature is explicitly enabled on the remote Mercurial server.
2132 """
2133 """
2133 dest, revs = cmdutil.parseurl(
2134 dest, revs = cmdutil.parseurl(
2134 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2135 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2135 cmdutil.setremoteconfig(ui, opts)
2136 cmdutil.setremoteconfig(ui, opts)
2136
2137
2137 other = hg.repository(ui, dest)
2138 other = hg.repository(ui, dest)
2138 ui.status('pushing to %s\n' % (dest))
2139 ui.status('pushing to %s\n' % (dest))
2139 if revs:
2140 if revs:
2140 revs = [repo.lookup(rev) for rev in revs]
2141 revs = [repo.lookup(rev) for rev in revs]
2141 r = repo.push(other, opts['force'], revs=revs)
2142 r = repo.push(other, opts['force'], revs=revs)
2142 return r == 0
2143 return r == 0
2143
2144
2144 def rawcommit(ui, repo, *pats, **opts):
2145 def rawcommit(ui, repo, *pats, **opts):
2145 """raw commit interface (DEPRECATED)
2146 """raw commit interface (DEPRECATED)
2146
2147
2147 (DEPRECATED)
2148 (DEPRECATED)
2148 Lowlevel commit, for use in helper scripts.
2149 Lowlevel commit, for use in helper scripts.
2149
2150
2150 This command is not intended to be used by normal users, as it is
2151 This command is not intended to be used by normal users, as it is
2151 primarily useful for importing from other SCMs.
2152 primarily useful for importing from other SCMs.
2152
2153
2153 This command is now deprecated and will be removed in a future
2154 This command is now deprecated and will be removed in a future
2154 release, please use debugsetparents and commit instead.
2155 release, please use debugsetparents and commit instead.
2155 """
2156 """
2156
2157
2157 ui.warn(_("(the rawcommit command is deprecated)\n"))
2158 ui.warn(_("(the rawcommit command is deprecated)\n"))
2158
2159
2159 message = cmdutil.logmessage(opts)
2160 message = cmdutil.logmessage(opts)
2160
2161
2161 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2162 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2162 if opts['files']:
2163 if opts['files']:
2163 files += open(opts['files']).read().splitlines()
2164 files += open(opts['files']).read().splitlines()
2164
2165
2165 parents = [repo.lookup(p) for p in opts['parent']]
2166 parents = [repo.lookup(p) for p in opts['parent']]
2166
2167
2167 try:
2168 try:
2168 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2169 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2169 except ValueError, inst:
2170 except ValueError, inst:
2170 raise util.Abort(str(inst))
2171 raise util.Abort(str(inst))
2171
2172
2172 def recover(ui, repo):
2173 def recover(ui, repo):
2173 """roll back an interrupted transaction
2174 """roll back an interrupted transaction
2174
2175
2175 Recover from an interrupted commit or pull.
2176 Recover from an interrupted commit or pull.
2176
2177
2177 This command tries to fix the repository status after an interrupted
2178 This command tries to fix the repository status after an interrupted
2178 operation. It should only be necessary when Mercurial suggests it.
2179 operation. It should only be necessary when Mercurial suggests it.
2179 """
2180 """
2180 if repo.recover():
2181 if repo.recover():
2181 return hg.verify(repo)
2182 return hg.verify(repo)
2182 return 1
2183 return 1
2183
2184
2184 def remove(ui, repo, *pats, **opts):
2185 def remove(ui, repo, *pats, **opts):
2185 """remove the specified files on the next commit
2186 """remove the specified files on the next commit
2186
2187
2187 Schedule the indicated files for removal from the repository.
2188 Schedule the indicated files for removal from the repository.
2188
2189
2189 This only removes files from the current branch, not from the
2190 This only removes files from the current branch, not from the
2190 entire project history. If the files still exist in the working
2191 entire project history. If the files still exist in the working
2191 directory, they will be deleted from it. If invoked with --after,
2192 directory, they will be deleted from it. If invoked with --after,
2192 files are marked as removed, but not actually unlinked unless --force
2193 files are marked as removed, but not actually unlinked unless --force
2193 is also given. Without exact file names, --after will only mark
2194 is also given. Without exact file names, --after will only mark
2194 files as removed if they are no longer in the working directory.
2195 files as removed if they are no longer in the working directory.
2195
2196
2196 This command schedules the files to be removed at the next commit.
2197 This command schedules the files to be removed at the next commit.
2197 To undo a remove before that, see hg revert.
2198 To undo a remove before that, see hg revert.
2198
2199
2199 Modified files and added files are not removed by default. To
2200 Modified files and added files are not removed by default. To
2200 remove them, use the -f/--force option.
2201 remove them, use the -f/--force option.
2201 """
2202 """
2202 names = []
2203 names = []
2203 if not opts['after'] and not pats:
2204 if not opts['after'] and not pats:
2204 raise util.Abort(_('no files specified'))
2205 raise util.Abort(_('no files specified'))
2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2206 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2206 exact = dict.fromkeys(files)
2207 exact = dict.fromkeys(files)
2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2208 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2208 modified, added, removed, deleted, unknown = mardu
2209 modified, added, removed, deleted, unknown = mardu
2209 remove, forget = [], []
2210 remove, forget = [], []
2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2211 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2211 reason = None
2212 reason = None
2212 if abs in modified and not opts['force']:
2213 if abs in modified and not opts['force']:
2213 reason = _('is modified (use -f to force removal)')
2214 reason = _('is modified (use -f to force removal)')
2214 elif abs in added:
2215 elif abs in added:
2215 if opts['force']:
2216 if opts['force']:
2216 forget.append(abs)
2217 forget.append(abs)
2217 continue
2218 continue
2218 reason = _('has been marked for add (use -f to force removal)')
2219 reason = _('has been marked for add (use -f to force removal)')
2219 elif repo.dirstate.state(abs) == '?':
2220 elif abs not in repo.dirstate:
2220 reason = _('is not managed')
2221 reason = _('is not managed')
2221 elif opts['after'] and not exact and abs not in deleted:
2222 elif opts['after'] and not exact and abs not in deleted:
2222 continue
2223 continue
2223 elif abs in removed:
2224 elif abs in removed:
2224 continue
2225 continue
2225 if reason:
2226 if reason:
2226 if exact:
2227 if exact:
2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 else:
2229 else:
2229 if ui.verbose or not exact:
2230 if ui.verbose or not exact:
2230 ui.status(_('removing %s\n') % rel)
2231 ui.status(_('removing %s\n') % rel)
2231 remove.append(abs)
2232 remove.append(abs)
2232 repo.forget(forget)
2233 repo.forget(forget)
2233 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2234 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2234
2235
2235 def rename(ui, repo, *pats, **opts):
2236 def rename(ui, repo, *pats, **opts):
2236 """rename files; equivalent of copy + remove
2237 """rename files; equivalent of copy + remove
2237
2238
2238 Mark dest as copies of sources; mark sources for deletion. If
2239 Mark dest as copies of sources; mark sources for deletion. If
2239 dest is a directory, copies are put in that directory. If dest is
2240 dest is a directory, copies are put in that directory. If dest is
2240 a file, there can only be one source.
2241 a file, there can only be one source.
2241
2242
2242 By default, this command copies the contents of files as they
2243 By default, this command copies the contents of files as they
2243 stand in the working directory. If invoked with --after, the
2244 stand in the working directory. If invoked with --after, the
2244 operation is recorded, but no copying is performed.
2245 operation is recorded, but no copying is performed.
2245
2246
2246 This command takes effect in the next commit. To undo a rename
2247 This command takes effect in the next commit. To undo a rename
2247 before that, see hg revert.
2248 before that, see hg revert.
2248 """
2249 """
2249 wlock = repo.wlock(0)
2250 wlock = repo.wlock(0)
2250 errs, copied = docopy(ui, repo, pats, opts, wlock)
2251 errs, copied = docopy(ui, repo, pats, opts, wlock)
2251 names = []
2252 names = []
2252 for abs, rel, exact in copied:
2253 for abs, rel, exact in copied:
2253 if ui.verbose or not exact:
2254 if ui.verbose or not exact:
2254 ui.status(_('removing %s\n') % rel)
2255 ui.status(_('removing %s\n') % rel)
2255 names.append(abs)
2256 names.append(abs)
2256 if not opts.get('dry_run'):
2257 if not opts.get('dry_run'):
2257 repo.remove(names, True, wlock=wlock)
2258 repo.remove(names, True, wlock=wlock)
2258 return errs
2259 return errs
2259
2260
2260 def revert(ui, repo, *pats, **opts):
2261 def revert(ui, repo, *pats, **opts):
2261 """revert files or dirs to their states as of some revision
2262 """revert files or dirs to their states as of some revision
2262
2263
2263 With no revision specified, revert the named files or directories
2264 With no revision specified, revert the named files or directories
2264 to the contents they had in the parent of the working directory.
2265 to the contents they had in the parent of the working directory.
2265 This restores the contents of the affected files to an unmodified
2266 This restores the contents of the affected files to an unmodified
2266 state and unschedules adds, removes, copies, and renames. If the
2267 state and unschedules adds, removes, copies, and renames. If the
2267 working directory has two parents, you must explicitly specify the
2268 working directory has two parents, you must explicitly specify the
2268 revision to revert to.
2269 revision to revert to.
2269
2270
2270 Modified files are saved with a .orig suffix before reverting.
2271 Modified files are saved with a .orig suffix before reverting.
2271 To disable these backups, use --no-backup.
2272 To disable these backups, use --no-backup.
2272
2273
2273 Using the -r option, revert the given files or directories to their
2274 Using the -r option, revert the given files or directories to their
2274 contents as of a specific revision. This can be helpful to "roll
2275 contents as of a specific revision. This can be helpful to "roll
2275 back" some or all of a change that should not have been committed.
2276 back" some or all of a change that should not have been committed.
2276
2277
2277 Revert modifies the working directory. It does not commit any
2278 Revert modifies the working directory. It does not commit any
2278 changes, or change the parent of the working directory. If you
2279 changes, or change the parent of the working directory. If you
2279 revert to a revision other than the parent of the working
2280 revert to a revision other than the parent of the working
2280 directory, the reverted files will thus appear modified
2281 directory, the reverted files will thus appear modified
2281 afterwards.
2282 afterwards.
2282
2283
2283 If a file has been deleted, it is restored. If the executable
2284 If a file has been deleted, it is restored. If the executable
2284 mode of a file was changed, it is reset.
2285 mode of a file was changed, it is reset.
2285
2286
2286 If names are given, all files matching the names are reverted.
2287 If names are given, all files matching the names are reverted.
2287
2288
2288 If no arguments are given, no files are reverted.
2289 If no arguments are given, no files are reverted.
2289 """
2290 """
2290
2291
2291 if opts["date"]:
2292 if opts["date"]:
2292 if opts["rev"]:
2293 if opts["rev"]:
2293 raise util.Abort(_("you can't specify a revision and a date"))
2294 raise util.Abort(_("you can't specify a revision and a date"))
2294 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2295 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2295
2296
2296 if not pats and not opts['all']:
2297 if not pats and not opts['all']:
2297 raise util.Abort(_('no files or directories specified; '
2298 raise util.Abort(_('no files or directories specified; '
2298 'use --all to revert the whole repo'))
2299 'use --all to revert the whole repo'))
2299
2300
2300 parent, p2 = repo.dirstate.parents()
2301 parent, p2 = repo.dirstate.parents()
2301 if not opts['rev'] and p2 != nullid:
2302 if not opts['rev'] and p2 != nullid:
2302 raise util.Abort(_('uncommitted merge - please provide a '
2303 raise util.Abort(_('uncommitted merge - please provide a '
2303 'specific revision'))
2304 'specific revision'))
2304 ctx = repo.changectx(opts['rev'])
2305 ctx = repo.changectx(opts['rev'])
2305 node = ctx.node()
2306 node = ctx.node()
2306 mf = ctx.manifest()
2307 mf = ctx.manifest()
2307 if node == parent:
2308 if node == parent:
2308 pmf = mf
2309 pmf = mf
2309 else:
2310 else:
2310 pmf = None
2311 pmf = None
2311
2312
2312 wlock = repo.wlock()
2313 wlock = repo.wlock()
2313
2314
2314 # need all matching names in dirstate and manifest of target rev,
2315 # need all matching names in dirstate and manifest of target rev,
2315 # so have to walk both. do not print errors if files exist in one
2316 # so have to walk both. do not print errors if files exist in one
2316 # but not other.
2317 # but not other.
2317
2318
2318 names = {}
2319 names = {}
2319 target_only = {}
2320 target_only = {}
2320
2321
2321 # walk dirstate.
2322 # walk dirstate.
2322
2323
2323 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2324 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2324 badmatch=mf.has_key):
2325 badmatch=mf.has_key):
2325 names[abs] = (rel, exact)
2326 names[abs] = (rel, exact)
2326 if src == 'b':
2327 if src == 'b':
2327 target_only[abs] = True
2328 target_only[abs] = True
2328
2329
2329 # walk target manifest.
2330 # walk target manifest.
2330
2331
2331 def badmatch(path):
2332 def badmatch(path):
2332 if path in names:
2333 if path in names:
2333 return True
2334 return True
2334 path_ = path + '/'
2335 path_ = path + '/'
2335 for f in names:
2336 for f in names:
2336 if f.startswith(path_):
2337 if f.startswith(path_):
2337 return True
2338 return True
2338 return False
2339 return False
2339
2340
2340 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2341 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2341 badmatch=badmatch):
2342 badmatch=badmatch):
2342 if abs in names or src == 'b':
2343 if abs in names or src == 'b':
2343 continue
2344 continue
2344 names[abs] = (rel, exact)
2345 names[abs] = (rel, exact)
2345 target_only[abs] = True
2346 target_only[abs] = True
2346
2347
2347 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2348 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2348 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2349 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2349
2350
2350 revert = ([], _('reverting %s\n'))
2351 revert = ([], _('reverting %s\n'))
2351 add = ([], _('adding %s\n'))
2352 add = ([], _('adding %s\n'))
2352 remove = ([], _('removing %s\n'))
2353 remove = ([], _('removing %s\n'))
2353 forget = ([], _('forgetting %s\n'))
2354 forget = ([], _('forgetting %s\n'))
2354 undelete = ([], _('undeleting %s\n'))
2355 undelete = ([], _('undeleting %s\n'))
2355 update = {}
2356 update = {}
2356
2357
2357 disptable = (
2358 disptable = (
2358 # dispatch table:
2359 # dispatch table:
2359 # file state
2360 # file state
2360 # action if in target manifest
2361 # action if in target manifest
2361 # action if not in target manifest
2362 # action if not in target manifest
2362 # make backup if in target manifest
2363 # make backup if in target manifest
2363 # make backup if not in target manifest
2364 # make backup if not in target manifest
2364 (modified, revert, remove, True, True),
2365 (modified, revert, remove, True, True),
2365 (added, revert, forget, True, False),
2366 (added, revert, forget, True, False),
2366 (removed, undelete, None, False, False),
2367 (removed, undelete, None, False, False),
2367 (deleted, revert, remove, False, False),
2368 (deleted, revert, remove, False, False),
2368 (unknown, add, None, True, False),
2369 (unknown, add, None, True, False),
2369 (target_only, add, None, False, False),
2370 (target_only, add, None, False, False),
2370 )
2371 )
2371
2372
2372 entries = names.items()
2373 entries = names.items()
2373 entries.sort()
2374 entries.sort()
2374
2375
2375 for abs, (rel, exact) in entries:
2376 for abs, (rel, exact) in entries:
2376 mfentry = mf.get(abs)
2377 mfentry = mf.get(abs)
2377 target = repo.wjoin(abs)
2378 target = repo.wjoin(abs)
2378 def handle(xlist, dobackup):
2379 def handle(xlist, dobackup):
2379 xlist[0].append(abs)
2380 xlist[0].append(abs)
2380 update[abs] = 1
2381 update[abs] = 1
2381 if dobackup and not opts['no_backup'] and util.lexists(target):
2382 if dobackup and not opts['no_backup'] and util.lexists(target):
2382 bakname = "%s.orig" % rel
2383 bakname = "%s.orig" % rel
2383 ui.note(_('saving current version of %s as %s\n') %
2384 ui.note(_('saving current version of %s as %s\n') %
2384 (rel, bakname))
2385 (rel, bakname))
2385 if not opts.get('dry_run'):
2386 if not opts.get('dry_run'):
2386 util.copyfile(target, bakname)
2387 util.copyfile(target, bakname)
2387 if ui.verbose or not exact:
2388 if ui.verbose or not exact:
2388 ui.status(xlist[1] % rel)
2389 ui.status(xlist[1] % rel)
2389 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2390 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2390 if abs not in table: continue
2391 if abs not in table: continue
2391 # file has changed in dirstate
2392 # file has changed in dirstate
2392 if mfentry:
2393 if mfentry:
2393 handle(hitlist, backuphit)
2394 handle(hitlist, backuphit)
2394 elif misslist is not None:
2395 elif misslist is not None:
2395 handle(misslist, backupmiss)
2396 handle(misslist, backupmiss)
2396 else:
2397 else:
2397 if exact: ui.warn(_('file not managed: %s\n') % rel)
2398 if exact: ui.warn(_('file not managed: %s\n') % rel)
2398 break
2399 break
2399 else:
2400 else:
2400 # file has not changed in dirstate
2401 # file has not changed in dirstate
2401 if node == parent:
2402 if node == parent:
2402 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2403 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2403 continue
2404 continue
2404 if pmf is None:
2405 if pmf is None:
2405 # only need parent manifest in this unlikely case,
2406 # only need parent manifest in this unlikely case,
2406 # so do not read by default
2407 # so do not read by default
2407 pmf = repo.changectx(parent).manifest()
2408 pmf = repo.changectx(parent).manifest()
2408 if abs in pmf:
2409 if abs in pmf:
2409 if mfentry:
2410 if mfentry:
2410 # if version of file is same in parent and target
2411 # if version of file is same in parent and target
2411 # manifests, do nothing
2412 # manifests, do nothing
2412 if pmf[abs] != mfentry:
2413 if pmf[abs] != mfentry:
2413 handle(revert, False)
2414 handle(revert, False)
2414 else:
2415 else:
2415 handle(remove, False)
2416 handle(remove, False)
2416
2417
2417 if not opts.get('dry_run'):
2418 if not opts.get('dry_run'):
2418 for f in forget[0]:
2419 for f in forget[0]:
2419 repo.dirstate.forget(f)
2420 repo.dirstate.forget(f)
2420 r = hg.revert(repo, node, update.has_key, wlock)
2421 r = hg.revert(repo, node, update.has_key, wlock)
2421 for f in add[0]:
2422 for f in add[0]:
2422 repo.dirstate.add(f)
2423 repo.dirstate.add(f)
2423 for f in undelete[0]:
2424 for f in undelete[0]:
2424 repo.dirstate.normal(f)
2425 repo.dirstate.normal(f)
2425 for f in remove[0]:
2426 for f in remove[0]:
2426 repo.dirstate.remove(f)
2427 repo.dirstate.remove(f)
2427 return r
2428 return r
2428
2429
2429 def rollback(ui, repo):
2430 def rollback(ui, repo):
2430 """roll back the last transaction in this repository
2431 """roll back the last transaction in this repository
2431
2432
2432 Roll back the last transaction in this repository, restoring the
2433 Roll back the last transaction in this repository, restoring the
2433 project to its state prior to the transaction.
2434 project to its state prior to the transaction.
2434
2435
2435 Transactions are used to encapsulate the effects of all commands
2436 Transactions are used to encapsulate the effects of all commands
2436 that create new changesets or propagate existing changesets into a
2437 that create new changesets or propagate existing changesets into a
2437 repository. For example, the following commands are transactional,
2438 repository. For example, the following commands are transactional,
2438 and their effects can be rolled back:
2439 and their effects can be rolled back:
2439
2440
2440 commit
2441 commit
2441 import
2442 import
2442 pull
2443 pull
2443 push (with this repository as destination)
2444 push (with this repository as destination)
2444 unbundle
2445 unbundle
2445
2446
2446 This command should be used with care. There is only one level of
2447 This command should be used with care. There is only one level of
2447 rollback, and there is no way to undo a rollback. It will also
2448 rollback, and there is no way to undo a rollback. It will also
2448 restore the dirstate at the time of the last transaction, which
2449 restore the dirstate at the time of the last transaction, which
2449 may lose subsequent dirstate changes.
2450 may lose subsequent dirstate changes.
2450
2451
2451 This command is not intended for use on public repositories. Once
2452 This command is not intended for use on public repositories. Once
2452 changes are visible for pull by other users, rolling a transaction
2453 changes are visible for pull by other users, rolling a transaction
2453 back locally is ineffective (someone else may already have pulled
2454 back locally is ineffective (someone else may already have pulled
2454 the changes). Furthermore, a race is possible with readers of the
2455 the changes). Furthermore, a race is possible with readers of the
2455 repository; for example an in-progress pull from the repository
2456 repository; for example an in-progress pull from the repository
2456 may fail if a rollback is performed.
2457 may fail if a rollback is performed.
2457 """
2458 """
2458 repo.rollback()
2459 repo.rollback()
2459
2460
2460 def root(ui, repo):
2461 def root(ui, repo):
2461 """print the root (top) of the current working dir
2462 """print the root (top) of the current working dir
2462
2463
2463 Print the root directory of the current repository.
2464 Print the root directory of the current repository.
2464 """
2465 """
2465 ui.write(repo.root + "\n")
2466 ui.write(repo.root + "\n")
2466
2467
2467 def serve(ui, repo, **opts):
2468 def serve(ui, repo, **opts):
2468 """export the repository via HTTP
2469 """export the repository via HTTP
2469
2470
2470 Start a local HTTP repository browser and pull server.
2471 Start a local HTTP repository browser and pull server.
2471
2472
2472 By default, the server logs accesses to stdout and errors to
2473 By default, the server logs accesses to stdout and errors to
2473 stderr. Use the "-A" and "-E" options to log to files.
2474 stderr. Use the "-A" and "-E" options to log to files.
2474 """
2475 """
2475
2476
2476 if opts["stdio"]:
2477 if opts["stdio"]:
2477 if repo is None:
2478 if repo is None:
2478 raise hg.RepoError(_("There is no Mercurial repository here"
2479 raise hg.RepoError(_("There is no Mercurial repository here"
2479 " (.hg not found)"))
2480 " (.hg not found)"))
2480 s = sshserver.sshserver(ui, repo)
2481 s = sshserver.sshserver(ui, repo)
2481 s.serve_forever()
2482 s.serve_forever()
2482
2483
2483 parentui = ui.parentui or ui
2484 parentui = ui.parentui or ui
2484 optlist = ("name templates style address port ipv6"
2485 optlist = ("name templates style address port ipv6"
2485 " accesslog errorlog webdir_conf certificate")
2486 " accesslog errorlog webdir_conf certificate")
2486 for o in optlist.split():
2487 for o in optlist.split():
2487 if opts[o]:
2488 if opts[o]:
2488 parentui.setconfig("web", o, str(opts[o]))
2489 parentui.setconfig("web", o, str(opts[o]))
2489 if repo.ui != parentui:
2490 if repo.ui != parentui:
2490 repo.ui.setconfig("web", o, str(opts[o]))
2491 repo.ui.setconfig("web", o, str(opts[o]))
2491
2492
2492 if repo is None and not ui.config("web", "webdir_conf"):
2493 if repo is None and not ui.config("web", "webdir_conf"):
2493 raise hg.RepoError(_("There is no Mercurial repository here"
2494 raise hg.RepoError(_("There is no Mercurial repository here"
2494 " (.hg not found)"))
2495 " (.hg not found)"))
2495
2496
2496 class service:
2497 class service:
2497 def init(self):
2498 def init(self):
2498 util.set_signal_handler()
2499 util.set_signal_handler()
2499 try:
2500 try:
2500 self.httpd = hgweb.server.create_server(parentui, repo)
2501 self.httpd = hgweb.server.create_server(parentui, repo)
2501 except socket.error, inst:
2502 except socket.error, inst:
2502 raise util.Abort(_('cannot start server: ') + inst.args[1])
2503 raise util.Abort(_('cannot start server: ') + inst.args[1])
2503
2504
2504 if not ui.verbose: return
2505 if not ui.verbose: return
2505
2506
2506 if self.httpd.port != 80:
2507 if self.httpd.port != 80:
2507 ui.status(_('listening at http://%s:%d/\n') %
2508 ui.status(_('listening at http://%s:%d/\n') %
2508 (self.httpd.addr, self.httpd.port))
2509 (self.httpd.addr, self.httpd.port))
2509 else:
2510 else:
2510 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2511 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2511
2512
2512 def run(self):
2513 def run(self):
2513 self.httpd.serve_forever()
2514 self.httpd.serve_forever()
2514
2515
2515 service = service()
2516 service = service()
2516
2517
2517 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2518 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2518
2519
2519 def status(ui, repo, *pats, **opts):
2520 def status(ui, repo, *pats, **opts):
2520 """show changed files in the working directory
2521 """show changed files in the working directory
2521
2522
2522 Show status of files in the repository. If names are given, only
2523 Show status of files in the repository. If names are given, only
2523 files that match are shown. Files that are clean or ignored, are
2524 files that match are shown. Files that are clean or ignored, are
2524 not listed unless -c (clean), -i (ignored) or -A is given.
2525 not listed unless -c (clean), -i (ignored) or -A is given.
2525
2526
2526 NOTE: status may appear to disagree with diff if permissions have
2527 NOTE: status may appear to disagree with diff if permissions have
2527 changed or a merge has occurred. The standard diff format does not
2528 changed or a merge has occurred. The standard diff format does not
2528 report permission changes and diff only reports changes relative
2529 report permission changes and diff only reports changes relative
2529 to one merge parent.
2530 to one merge parent.
2530
2531
2531 If one revision is given, it is used as the base revision.
2532 If one revision is given, it is used as the base revision.
2532 If two revisions are given, the difference between them is shown.
2533 If two revisions are given, the difference between them is shown.
2533
2534
2534 The codes used to show the status of files are:
2535 The codes used to show the status of files are:
2535 M = modified
2536 M = modified
2536 A = added
2537 A = added
2537 R = removed
2538 R = removed
2538 C = clean
2539 C = clean
2539 ! = deleted, but still tracked
2540 ! = deleted, but still tracked
2540 ? = not tracked
2541 ? = not tracked
2541 I = ignored (not shown by default)
2542 I = ignored (not shown by default)
2542 = the previous added file was copied from here
2543 = the previous added file was copied from here
2543 """
2544 """
2544
2545
2545 all = opts['all']
2546 all = opts['all']
2546 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2547 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2547
2548
2548 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2549 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2549 cwd = (pats and repo.getcwd()) or ''
2550 cwd = (pats and repo.getcwd()) or ''
2550 modified, added, removed, deleted, unknown, ignored, clean = [
2551 modified, added, removed, deleted, unknown, ignored, clean = [
2551 n for n in repo.status(node1=node1, node2=node2, files=files,
2552 n for n in repo.status(node1=node1, node2=node2, files=files,
2552 match=matchfn,
2553 match=matchfn,
2553 list_ignored=all or opts['ignored'],
2554 list_ignored=all or opts['ignored'],
2554 list_clean=all or opts['clean'])]
2555 list_clean=all or opts['clean'])]
2555
2556
2556 changetypes = (('modified', 'M', modified),
2557 changetypes = (('modified', 'M', modified),
2557 ('added', 'A', added),
2558 ('added', 'A', added),
2558 ('removed', 'R', removed),
2559 ('removed', 'R', removed),
2559 ('deleted', '!', deleted),
2560 ('deleted', '!', deleted),
2560 ('unknown', '?', unknown),
2561 ('unknown', '?', unknown),
2561 ('ignored', 'I', ignored))
2562 ('ignored', 'I', ignored))
2562
2563
2563 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2564 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2564
2565
2565 end = opts['print0'] and '\0' or '\n'
2566 end = opts['print0'] and '\0' or '\n'
2566
2567
2567 for opt, char, changes in ([ct for ct in explicit_changetypes
2568 for opt, char, changes in ([ct for ct in explicit_changetypes
2568 if all or opts[ct[0]]]
2569 if all or opts[ct[0]]]
2569 or changetypes):
2570 or changetypes):
2570 if opts['no_status']:
2571 if opts['no_status']:
2571 format = "%%s%s" % end
2572 format = "%%s%s" % end
2572 else:
2573 else:
2573 format = "%s %%s%s" % (char, end)
2574 format = "%s %%s%s" % (char, end)
2574
2575
2575 for f in changes:
2576 for f in changes:
2576 ui.write(format % repo.pathto(f, cwd))
2577 ui.write(format % repo.pathto(f, cwd))
2577 if ((all or opts.get('copies')) and not opts.get('no_status')):
2578 if ((all or opts.get('copies')) and not opts.get('no_status')):
2578 copied = repo.dirstate.copied(f)
2579 copied = repo.dirstate.copied(f)
2579 if copied:
2580 if copied:
2580 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2581 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2581
2582
2582 def tag(ui, repo, name, rev_=None, **opts):
2583 def tag(ui, repo, name, rev_=None, **opts):
2583 """add a tag for the current or given revision
2584 """add a tag for the current or given revision
2584
2585
2585 Name a particular revision using <name>.
2586 Name a particular revision using <name>.
2586
2587
2587 Tags are used to name particular revisions of the repository and are
2588 Tags are used to name particular revisions of the repository and are
2588 very useful to compare different revision, to go back to significant
2589 very useful to compare different revision, to go back to significant
2589 earlier versions or to mark branch points as releases, etc.
2590 earlier versions or to mark branch points as releases, etc.
2590
2591
2591 If no revision is given, the parent of the working directory is used,
2592 If no revision is given, the parent of the working directory is used,
2592 or tip if no revision is checked out.
2593 or tip if no revision is checked out.
2593
2594
2594 To facilitate version control, distribution, and merging of tags,
2595 To facilitate version control, distribution, and merging of tags,
2595 they are stored as a file named ".hgtags" which is managed
2596 they are stored as a file named ".hgtags" which is managed
2596 similarly to other project files and can be hand-edited if
2597 similarly to other project files and can be hand-edited if
2597 necessary. The file '.hg/localtags' is used for local tags (not
2598 necessary. The file '.hg/localtags' is used for local tags (not
2598 shared among repositories).
2599 shared among repositories).
2599 """
2600 """
2600 if name in ['tip', '.', 'null']:
2601 if name in ['tip', '.', 'null']:
2601 raise util.Abort(_("the name '%s' is reserved") % name)
2602 raise util.Abort(_("the name '%s' is reserved") % name)
2602 if rev_ is not None:
2603 if rev_ is not None:
2603 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2604 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2604 "please use 'hg tag [-r REV] NAME' instead\n"))
2605 "please use 'hg tag [-r REV] NAME' instead\n"))
2605 if opts['rev']:
2606 if opts['rev']:
2606 raise util.Abort(_("use only one form to specify the revision"))
2607 raise util.Abort(_("use only one form to specify the revision"))
2607 if opts['rev'] and opts['remove']:
2608 if opts['rev'] and opts['remove']:
2608 raise util.Abort(_("--rev and --remove are incompatible"))
2609 raise util.Abort(_("--rev and --remove are incompatible"))
2609 if opts['rev']:
2610 if opts['rev']:
2610 rev_ = opts['rev']
2611 rev_ = opts['rev']
2611 message = opts['message']
2612 message = opts['message']
2612 if opts['remove']:
2613 if opts['remove']:
2613 if not name in repo.tags():
2614 if not name in repo.tags():
2614 raise util.Abort(_('tag %s does not exist') % name)
2615 raise util.Abort(_('tag %s does not exist') % name)
2615 rev_ = nullid
2616 rev_ = nullid
2616 if not message:
2617 if not message:
2617 message = _('Removed tag %s') % name
2618 message = _('Removed tag %s') % name
2618 elif name in repo.tags() and not opts['force']:
2619 elif name in repo.tags() and not opts['force']:
2619 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2620 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2620 % name)
2621 % name)
2621 if not rev_ and repo.dirstate.parents()[1] != nullid:
2622 if not rev_ and repo.dirstate.parents()[1] != nullid:
2622 raise util.Abort(_('uncommitted merge - please provide a '
2623 raise util.Abort(_('uncommitted merge - please provide a '
2623 'specific revision'))
2624 'specific revision'))
2624 r = repo.changectx(rev_).node()
2625 r = repo.changectx(rev_).node()
2625
2626
2626 if not message:
2627 if not message:
2627 message = _('Added tag %s for changeset %s') % (name, short(r))
2628 message = _('Added tag %s for changeset %s') % (name, short(r))
2628
2629
2629 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2630 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2630
2631
2631 def tags(ui, repo):
2632 def tags(ui, repo):
2632 """list repository tags
2633 """list repository tags
2633
2634
2634 List the repository tags.
2635 List the repository tags.
2635
2636
2636 This lists both regular and local tags.
2637 This lists both regular and local tags.
2637 """
2638 """
2638
2639
2639 l = repo.tagslist()
2640 l = repo.tagslist()
2640 l.reverse()
2641 l.reverse()
2641 hexfunc = ui.debugflag and hex or short
2642 hexfunc = ui.debugflag and hex or short
2642 for t, n in l:
2643 for t, n in l:
2643 try:
2644 try:
2644 hn = hexfunc(n)
2645 hn = hexfunc(n)
2645 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2646 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2646 except revlog.LookupError:
2647 except revlog.LookupError:
2647 r = " ?:%s" % hn
2648 r = " ?:%s" % hn
2648 if ui.quiet:
2649 if ui.quiet:
2649 ui.write("%s\n" % t)
2650 ui.write("%s\n" % t)
2650 else:
2651 else:
2651 spaces = " " * (30 - util.locallen(t))
2652 spaces = " " * (30 - util.locallen(t))
2652 ui.write("%s%s %s\n" % (t, spaces, r))
2653 ui.write("%s%s %s\n" % (t, spaces, r))
2653
2654
2654 def tip(ui, repo, **opts):
2655 def tip(ui, repo, **opts):
2655 """show the tip revision
2656 """show the tip revision
2656
2657
2657 Show the tip revision.
2658 Show the tip revision.
2658 """
2659 """
2659 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2660 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2660
2661
2661 def unbundle(ui, repo, fname1, *fnames, **opts):
2662 def unbundle(ui, repo, fname1, *fnames, **opts):
2662 """apply one or more changegroup files
2663 """apply one or more changegroup files
2663
2664
2664 Apply one or more compressed changegroup files generated by the
2665 Apply one or more compressed changegroup files generated by the
2665 bundle command.
2666 bundle command.
2666 """
2667 """
2667 fnames = (fname1,) + fnames
2668 fnames = (fname1,) + fnames
2668 result = None
2669 result = None
2669 wasempty = repo.changelog.count() == 0
2670 wasempty = repo.changelog.count() == 0
2670 for fname in fnames:
2671 for fname in fnames:
2671 if os.path.exists(fname):
2672 if os.path.exists(fname):
2672 f = open(fname, "rb")
2673 f = open(fname, "rb")
2673 else:
2674 else:
2674 f = urllib.urlopen(fname)
2675 f = urllib.urlopen(fname)
2675 gen = changegroup.readbundle(f, fname)
2676 gen = changegroup.readbundle(f, fname)
2676 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2677 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2677
2678
2678 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2679 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2679
2680
2680 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2681 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2681 """update working directory
2682 """update working directory
2682
2683
2683 Update the working directory to the specified revision, or the
2684 Update the working directory to the specified revision, or the
2684 tip of the current branch if none is specified.
2685 tip of the current branch if none is specified.
2685
2686
2686 If there are no outstanding changes in the working directory and
2687 If there are no outstanding changes in the working directory and
2687 there is a linear relationship between the current version and the
2688 there is a linear relationship between the current version and the
2688 requested version, the result is the requested version.
2689 requested version, the result is the requested version.
2689
2690
2690 To merge the working directory with another revision, use the
2691 To merge the working directory with another revision, use the
2691 merge command.
2692 merge command.
2692
2693
2693 By default, update will refuse to run if doing so would require
2694 By default, update will refuse to run if doing so would require
2694 discarding local changes.
2695 discarding local changes.
2695 """
2696 """
2696 if rev and node:
2697 if rev and node:
2697 raise util.Abort(_("please specify just one revision"))
2698 raise util.Abort(_("please specify just one revision"))
2698
2699
2699 if not rev:
2700 if not rev:
2700 rev = node
2701 rev = node
2701
2702
2702 if date:
2703 if date:
2703 if rev:
2704 if rev:
2704 raise util.Abort(_("you can't specify a revision and a date"))
2705 raise util.Abort(_("you can't specify a revision and a date"))
2705 rev = cmdutil.finddate(ui, repo, date)
2706 rev = cmdutil.finddate(ui, repo, date)
2706
2707
2707 if clean:
2708 if clean:
2708 return hg.clean(repo, rev)
2709 return hg.clean(repo, rev)
2709 else:
2710 else:
2710 return hg.update(repo, rev)
2711 return hg.update(repo, rev)
2711
2712
2712 def verify(ui, repo):
2713 def verify(ui, repo):
2713 """verify the integrity of the repository
2714 """verify the integrity of the repository
2714
2715
2715 Verify the integrity of the current repository.
2716 Verify the integrity of the current repository.
2716
2717
2717 This will perform an extensive check of the repository's
2718 This will perform an extensive check of the repository's
2718 integrity, validating the hashes and checksums of each entry in
2719 integrity, validating the hashes and checksums of each entry in
2719 the changelog, manifest, and tracked files, as well as the
2720 the changelog, manifest, and tracked files, as well as the
2720 integrity of their crosslinks and indices.
2721 integrity of their crosslinks and indices.
2721 """
2722 """
2722 return hg.verify(repo)
2723 return hg.verify(repo)
2723
2724
2724 def version_(ui):
2725 def version_(ui):
2725 """output version and copyright information"""
2726 """output version and copyright information"""
2726 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2727 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2727 % version.get_version())
2728 % version.get_version())
2728 ui.status(_(
2729 ui.status(_(
2729 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2730 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2730 "This is free software; see the source for copying conditions. "
2731 "This is free software; see the source for copying conditions. "
2731 "There is NO\nwarranty; "
2732 "There is NO\nwarranty; "
2732 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2733 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2733 ))
2734 ))
2734
2735
2735 # Command options and aliases are listed here, alphabetically
2736 # Command options and aliases are listed here, alphabetically
2736
2737
2737 globalopts = [
2738 globalopts = [
2738 ('R', 'repository', '',
2739 ('R', 'repository', '',
2739 _('repository root directory or symbolic path name')),
2740 _('repository root directory or symbolic path name')),
2740 ('', 'cwd', '', _('change working directory')),
2741 ('', 'cwd', '', _('change working directory')),
2741 ('y', 'noninteractive', None,
2742 ('y', 'noninteractive', None,
2742 _('do not prompt, assume \'yes\' for any required answers')),
2743 _('do not prompt, assume \'yes\' for any required answers')),
2743 ('q', 'quiet', None, _('suppress output')),
2744 ('q', 'quiet', None, _('suppress output')),
2744 ('v', 'verbose', None, _('enable additional output')),
2745 ('v', 'verbose', None, _('enable additional output')),
2745 ('', 'config', [], _('set/override config option')),
2746 ('', 'config', [], _('set/override config option')),
2746 ('', 'debug', None, _('enable debugging output')),
2747 ('', 'debug', None, _('enable debugging output')),
2747 ('', 'debugger', None, _('start debugger')),
2748 ('', 'debugger', None, _('start debugger')),
2748 ('', 'encoding', util._encoding, _('set the charset encoding')),
2749 ('', 'encoding', util._encoding, _('set the charset encoding')),
2749 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2750 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2750 ('', 'lsprof', None, _('print improved command execution profile')),
2751 ('', 'lsprof', None, _('print improved command execution profile')),
2751 ('', 'traceback', None, _('print traceback on exception')),
2752 ('', 'traceback', None, _('print traceback on exception')),
2752 ('', 'time', None, _('time how long the command takes')),
2753 ('', 'time', None, _('time how long the command takes')),
2753 ('', 'profile', None, _('print command execution profile')),
2754 ('', 'profile', None, _('print command execution profile')),
2754 ('', 'version', None, _('output version information and exit')),
2755 ('', 'version', None, _('output version information and exit')),
2755 ('h', 'help', None, _('display help and exit')),
2756 ('h', 'help', None, _('display help and exit')),
2756 ]
2757 ]
2757
2758
2758 dryrunopts = [('n', 'dry-run', None,
2759 dryrunopts = [('n', 'dry-run', None,
2759 _('do not perform actions, just print output'))]
2760 _('do not perform actions, just print output'))]
2760
2761
2761 remoteopts = [
2762 remoteopts = [
2762 ('e', 'ssh', '', _('specify ssh command to use')),
2763 ('e', 'ssh', '', _('specify ssh command to use')),
2763 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2764 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2764 ]
2765 ]
2765
2766
2766 walkopts = [
2767 walkopts = [
2767 ('I', 'include', [], _('include names matching the given patterns')),
2768 ('I', 'include', [], _('include names matching the given patterns')),
2768 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2769 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2769 ]
2770 ]
2770
2771
2771 commitopts = [
2772 commitopts = [
2772 ('m', 'message', '', _('use <text> as commit message')),
2773 ('m', 'message', '', _('use <text> as commit message')),
2773 ('l', 'logfile', '', _('read commit message from <file>')),
2774 ('l', 'logfile', '', _('read commit message from <file>')),
2774 ]
2775 ]
2775
2776
2776 table = {
2777 table = {
2777 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2778 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2778 "addremove":
2779 "addremove":
2779 (addremove,
2780 (addremove,
2780 [('s', 'similarity', '',
2781 [('s', 'similarity', '',
2781 _('guess renamed files by similarity (0<=s<=100)')),
2782 _('guess renamed files by similarity (0<=s<=100)')),
2782 ] + walkopts + dryrunopts,
2783 ] + walkopts + dryrunopts,
2783 _('hg addremove [OPTION]... [FILE]...')),
2784 _('hg addremove [OPTION]... [FILE]...')),
2784 "^annotate":
2785 "^annotate":
2785 (annotate,
2786 (annotate,
2786 [('r', 'rev', '', _('annotate the specified revision')),
2787 [('r', 'rev', '', _('annotate the specified revision')),
2787 ('f', 'follow', None, _('follow file copies and renames')),
2788 ('f', 'follow', None, _('follow file copies and renames')),
2788 ('a', 'text', None, _('treat all files as text')),
2789 ('a', 'text', None, _('treat all files as text')),
2789 ('u', 'user', None, _('list the author')),
2790 ('u', 'user', None, _('list the author')),
2790 ('d', 'date', None, _('list the date')),
2791 ('d', 'date', None, _('list the date')),
2791 ('n', 'number', None, _('list the revision number (default)')),
2792 ('n', 'number', None, _('list the revision number (default)')),
2792 ('c', 'changeset', None, _('list the changeset')),
2793 ('c', 'changeset', None, _('list the changeset')),
2793 ('l', 'line-number', None,
2794 ('l', 'line-number', None,
2794 _('show line number at the first appearance'))
2795 _('show line number at the first appearance'))
2795 ] + walkopts,
2796 ] + walkopts,
2796 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2797 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2797 "archive":
2798 "archive":
2798 (archive,
2799 (archive,
2799 [('', 'no-decode', None, _('do not pass files through decoders')),
2800 [('', 'no-decode', None, _('do not pass files through decoders')),
2800 ('p', 'prefix', '', _('directory prefix for files in archive')),
2801 ('p', 'prefix', '', _('directory prefix for files in archive')),
2801 ('r', 'rev', '', _('revision to distribute')),
2802 ('r', 'rev', '', _('revision to distribute')),
2802 ('t', 'type', '', _('type of distribution to create')),
2803 ('t', 'type', '', _('type of distribution to create')),
2803 ] + walkopts,
2804 ] + walkopts,
2804 _('hg archive [OPTION]... DEST')),
2805 _('hg archive [OPTION]... DEST')),
2805 "backout":
2806 "backout":
2806 (backout,
2807 (backout,
2807 [('', 'merge', None,
2808 [('', 'merge', None,
2808 _('merge with old dirstate parent after backout')),
2809 _('merge with old dirstate parent after backout')),
2809 ('d', 'date', '', _('record datecode as commit date')),
2810 ('d', 'date', '', _('record datecode as commit date')),
2810 ('', 'parent', '', _('parent to choose when backing out merge')),
2811 ('', 'parent', '', _('parent to choose when backing out merge')),
2811 ('u', 'user', '', _('record user as committer')),
2812 ('u', 'user', '', _('record user as committer')),
2812 ('r', 'rev', '', _('revision to backout')),
2813 ('r', 'rev', '', _('revision to backout')),
2813 ] + walkopts + commitopts,
2814 ] + walkopts + commitopts,
2814 _('hg backout [OPTION]... [-r] REV')),
2815 _('hg backout [OPTION]... [-r] REV')),
2815 "branch":
2816 "branch":
2816 (branch,
2817 (branch,
2817 [('f', 'force', None,
2818 [('f', 'force', None,
2818 _('set branch name even if it shadows an existing branch'))],
2819 _('set branch name even if it shadows an existing branch'))],
2819 _('hg branch [NAME]')),
2820 _('hg branch [NAME]')),
2820 "branches":
2821 "branches":
2821 (branches,
2822 (branches,
2822 [('a', 'active', False,
2823 [('a', 'active', False,
2823 _('show only branches that have unmerged heads'))],
2824 _('show only branches that have unmerged heads'))],
2824 _('hg branches [-a]')),
2825 _('hg branches [-a]')),
2825 "bundle":
2826 "bundle":
2826 (bundle,
2827 (bundle,
2827 [('f', 'force', None,
2828 [('f', 'force', None,
2828 _('run even when remote repository is unrelated')),
2829 _('run even when remote repository is unrelated')),
2829 ('r', 'rev', [],
2830 ('r', 'rev', [],
2830 _('a changeset you would like to bundle')),
2831 _('a changeset you would like to bundle')),
2831 ('', 'base', [],
2832 ('', 'base', [],
2832 _('a base changeset to specify instead of a destination')),
2833 _('a base changeset to specify instead of a destination')),
2833 ] + remoteopts,
2834 ] + remoteopts,
2834 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2835 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2835 "cat":
2836 "cat":
2836 (cat,
2837 (cat,
2837 [('o', 'output', '', _('print output to file with formatted name')),
2838 [('o', 'output', '', _('print output to file with formatted name')),
2838 ('r', 'rev', '', _('print the given revision')),
2839 ('r', 'rev', '', _('print the given revision')),
2839 ] + walkopts,
2840 ] + walkopts,
2840 _('hg cat [OPTION]... FILE...')),
2841 _('hg cat [OPTION]... FILE...')),
2841 "^clone":
2842 "^clone":
2842 (clone,
2843 (clone,
2843 [('U', 'noupdate', None, _('do not update the new working directory')),
2844 [('U', 'noupdate', None, _('do not update the new working directory')),
2844 ('r', 'rev', [],
2845 ('r', 'rev', [],
2845 _('a changeset you would like to have after cloning')),
2846 _('a changeset you would like to have after cloning')),
2846 ('', 'pull', None, _('use pull protocol to copy metadata')),
2847 ('', 'pull', None, _('use pull protocol to copy metadata')),
2847 ('', 'uncompressed', None,
2848 ('', 'uncompressed', None,
2848 _('use uncompressed transfer (fast over LAN)')),
2849 _('use uncompressed transfer (fast over LAN)')),
2849 ] + remoteopts,
2850 ] + remoteopts,
2850 _('hg clone [OPTION]... SOURCE [DEST]')),
2851 _('hg clone [OPTION]... SOURCE [DEST]')),
2851 "^commit|ci":
2852 "^commit|ci":
2852 (commit,
2853 (commit,
2853 [('A', 'addremove', None,
2854 [('A', 'addremove', None,
2854 _('mark new/missing files as added/removed before committing')),
2855 _('mark new/missing files as added/removed before committing')),
2855 ('d', 'date', '', _('record datecode as commit date')),
2856 ('d', 'date', '', _('record datecode as commit date')),
2856 ('u', 'user', '', _('record user as commiter')),
2857 ('u', 'user', '', _('record user as commiter')),
2857 ] + walkopts + commitopts,
2858 ] + walkopts + commitopts,
2858 _('hg commit [OPTION]... [FILE]...')),
2859 _('hg commit [OPTION]... [FILE]...')),
2859 "copy|cp":
2860 "copy|cp":
2860 (copy,
2861 (copy,
2861 [('A', 'after', None, _('record a copy that has already occurred')),
2862 [('A', 'after', None, _('record a copy that has already occurred')),
2862 ('f', 'force', None,
2863 ('f', 'force', None,
2863 _('forcibly copy over an existing managed file')),
2864 _('forcibly copy over an existing managed file')),
2864 ] + walkopts + dryrunopts,
2865 ] + walkopts + dryrunopts,
2865 _('hg copy [OPTION]... [SOURCE]... DEST')),
2866 _('hg copy [OPTION]... [SOURCE]... DEST')),
2866 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2867 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2867 "debugcomplete":
2868 "debugcomplete":
2868 (debugcomplete,
2869 (debugcomplete,
2869 [('o', 'options', None, _('show the command options'))],
2870 [('o', 'options', None, _('show the command options'))],
2870 _('debugcomplete [-o] CMD')),
2871 _('debugcomplete [-o] CMD')),
2871 "debuginstall": (debuginstall, [], _('debuginstall')),
2872 "debuginstall": (debuginstall, [], _('debuginstall')),
2872 "debugrebuildstate":
2873 "debugrebuildstate":
2873 (debugrebuildstate,
2874 (debugrebuildstate,
2874 [('r', 'rev', '', _('revision to rebuild to'))],
2875 [('r', 'rev', '', _('revision to rebuild to'))],
2875 _('debugrebuildstate [-r REV] [REV]')),
2876 _('debugrebuildstate [-r REV] [REV]')),
2876 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2877 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2877 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2878 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2878 "debugstate": (debugstate, [], _('debugstate')),
2879 "debugstate": (debugstate, [], _('debugstate')),
2879 "debugdate":
2880 "debugdate":
2880 (debugdate,
2881 (debugdate,
2881 [('e', 'extended', None, _('try extended date formats'))],
2882 [('e', 'extended', None, _('try extended date formats'))],
2882 _('debugdate [-e] DATE [RANGE]')),
2883 _('debugdate [-e] DATE [RANGE]')),
2883 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2884 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2884 "debugindex": (debugindex, [], _('debugindex FILE')),
2885 "debugindex": (debugindex, [], _('debugindex FILE')),
2885 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2886 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2886 "debugrename":
2887 "debugrename":
2887 (debugrename,
2888 (debugrename,
2888 [('r', 'rev', '', _('revision to debug'))],
2889 [('r', 'rev', '', _('revision to debug'))],
2889 _('debugrename [-r REV] FILE')),
2890 _('debugrename [-r REV] FILE')),
2890 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2891 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2891 "^diff":
2892 "^diff":
2892 (diff,
2893 (diff,
2893 [('r', 'rev', [], _('revision')),
2894 [('r', 'rev', [], _('revision')),
2894 ('a', 'text', None, _('treat all files as text')),
2895 ('a', 'text', None, _('treat all files as text')),
2895 ('p', 'show-function', None,
2896 ('p', 'show-function', None,
2896 _('show which function each change is in')),
2897 _('show which function each change is in')),
2897 ('g', 'git', None, _('use git extended diff format')),
2898 ('g', 'git', None, _('use git extended diff format')),
2898 ('', 'nodates', None, _("don't include dates in diff headers")),
2899 ('', 'nodates', None, _("don't include dates in diff headers")),
2899 ('w', 'ignore-all-space', None,
2900 ('w', 'ignore-all-space', None,
2900 _('ignore white space when comparing lines')),
2901 _('ignore white space when comparing lines')),
2901 ('b', 'ignore-space-change', None,
2902 ('b', 'ignore-space-change', None,
2902 _('ignore changes in the amount of white space')),
2903 _('ignore changes in the amount of white space')),
2903 ('B', 'ignore-blank-lines', None,
2904 ('B', 'ignore-blank-lines', None,
2904 _('ignore changes whose lines are all blank')),
2905 _('ignore changes whose lines are all blank')),
2905 ] + walkopts,
2906 ] + walkopts,
2906 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2907 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2907 "^export":
2908 "^export":
2908 (export,
2909 (export,
2909 [('o', 'output', '', _('print output to file with formatted name')),
2910 [('o', 'output', '', _('print output to file with formatted name')),
2910 ('a', 'text', None, _('treat all files as text')),
2911 ('a', 'text', None, _('treat all files as text')),
2911 ('g', 'git', None, _('use git extended diff format')),
2912 ('g', 'git', None, _('use git extended diff format')),
2912 ('', 'nodates', None, _("don't include dates in diff headers")),
2913 ('', 'nodates', None, _("don't include dates in diff headers")),
2913 ('', 'switch-parent', None, _('diff against the second parent'))],
2914 ('', 'switch-parent', None, _('diff against the second parent'))],
2914 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2915 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2915 "grep":
2916 "grep":
2916 (grep,
2917 (grep,
2917 [('0', 'print0', None, _('end fields with NUL')),
2918 [('0', 'print0', None, _('end fields with NUL')),
2918 ('', 'all', None, _('print all revisions that match')),
2919 ('', 'all', None, _('print all revisions that match')),
2919 ('f', 'follow', None,
2920 ('f', 'follow', None,
2920 _('follow changeset history, or file history across copies and renames')),
2921 _('follow changeset history, or file history across copies and renames')),
2921 ('i', 'ignore-case', None, _('ignore case when matching')),
2922 ('i', 'ignore-case', None, _('ignore case when matching')),
2922 ('l', 'files-with-matches', None,
2923 ('l', 'files-with-matches', None,
2923 _('print only filenames and revs that match')),
2924 _('print only filenames and revs that match')),
2924 ('n', 'line-number', None, _('print matching line numbers')),
2925 ('n', 'line-number', None, _('print matching line numbers')),
2925 ('r', 'rev', [], _('search in given revision range')),
2926 ('r', 'rev', [], _('search in given revision range')),
2926 ('u', 'user', None, _('print user who committed change')),
2927 ('u', 'user', None, _('print user who committed change')),
2927 ] + walkopts,
2928 ] + walkopts,
2928 _('hg grep [OPTION]... PATTERN [FILE]...')),
2929 _('hg grep [OPTION]... PATTERN [FILE]...')),
2929 "heads":
2930 "heads":
2930 (heads,
2931 (heads,
2931 [('', 'style', '', _('display using template map file')),
2932 [('', 'style', '', _('display using template map file')),
2932 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2933 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2933 ('', 'template', '', _('display with template'))],
2934 ('', 'template', '', _('display with template'))],
2934 _('hg heads [-r REV] [REV]...')),
2935 _('hg heads [-r REV] [REV]...')),
2935 "help": (help_, [], _('hg help [COMMAND]')),
2936 "help": (help_, [], _('hg help [COMMAND]')),
2936 "identify|id":
2937 "identify|id":
2937 (identify,
2938 (identify,
2938 [('r', 'rev', '', _('identify the specified rev')),
2939 [('r', 'rev', '', _('identify the specified rev')),
2939 ('n', 'num', None, _('show local revision number')),
2940 ('n', 'num', None, _('show local revision number')),
2940 ('i', 'id', None, _('show global revision id')),
2941 ('i', 'id', None, _('show global revision id')),
2941 ('b', 'branch', None, _('show branch')),
2942 ('b', 'branch', None, _('show branch')),
2942 ('t', 'tags', None, _('show tags'))],
2943 ('t', 'tags', None, _('show tags'))],
2943 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2944 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2944 "import|patch":
2945 "import|patch":
2945 (import_,
2946 (import_,
2946 [('p', 'strip', 1,
2947 [('p', 'strip', 1,
2947 _('directory strip option for patch. This has the same\n'
2948 _('directory strip option for patch. This has the same\n'
2948 'meaning as the corresponding patch option')),
2949 'meaning as the corresponding patch option')),
2949 ('b', 'base', '', _('base path')),
2950 ('b', 'base', '', _('base path')),
2950 ('f', 'force', None,
2951 ('f', 'force', None,
2951 _('skip check for outstanding uncommitted changes')),
2952 _('skip check for outstanding uncommitted changes')),
2952 ('', 'exact', None,
2953 ('', 'exact', None,
2953 _('apply patch to the nodes from which it was generated')),
2954 _('apply patch to the nodes from which it was generated')),
2954 ('', 'import-branch', None,
2955 ('', 'import-branch', None,
2955 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2956 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2956 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2957 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2957 "incoming|in": (incoming,
2958 "incoming|in": (incoming,
2958 [('M', 'no-merges', None, _('do not show merges')),
2959 [('M', 'no-merges', None, _('do not show merges')),
2959 ('f', 'force', None,
2960 ('f', 'force', None,
2960 _('run even when remote repository is unrelated')),
2961 _('run even when remote repository is unrelated')),
2961 ('', 'style', '', _('display using template map file')),
2962 ('', 'style', '', _('display using template map file')),
2962 ('n', 'newest-first', None, _('show newest record first')),
2963 ('n', 'newest-first', None, _('show newest record first')),
2963 ('', 'bundle', '', _('file to store the bundles into')),
2964 ('', 'bundle', '', _('file to store the bundles into')),
2964 ('p', 'patch', None, _('show patch')),
2965 ('p', 'patch', None, _('show patch')),
2965 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2966 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2966 ('', 'template', '', _('display with template')),
2967 ('', 'template', '', _('display with template')),
2967 ] + remoteopts,
2968 ] + remoteopts,
2968 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2969 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2969 ' [--bundle FILENAME] [SOURCE]')),
2970 ' [--bundle FILENAME] [SOURCE]')),
2970 "^init":
2971 "^init":
2971 (init,
2972 (init,
2972 remoteopts,
2973 remoteopts,
2973 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2974 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2974 "locate":
2975 "locate":
2975 (locate,
2976 (locate,
2976 [('r', 'rev', '', _('search the repository as it stood at rev')),
2977 [('r', 'rev', '', _('search the repository as it stood at rev')),
2977 ('0', 'print0', None,
2978 ('0', 'print0', None,
2978 _('end filenames with NUL, for use with xargs')),
2979 _('end filenames with NUL, for use with xargs')),
2979 ('f', 'fullpath', None,
2980 ('f', 'fullpath', None,
2980 _('print complete paths from the filesystem root')),
2981 _('print complete paths from the filesystem root')),
2981 ] + walkopts,
2982 ] + walkopts,
2982 _('hg locate [OPTION]... [PATTERN]...')),
2983 _('hg locate [OPTION]... [PATTERN]...')),
2983 "^log|history":
2984 "^log|history":
2984 (log,
2985 (log,
2985 [('f', 'follow', None,
2986 [('f', 'follow', None,
2986 _('follow changeset history, or file history across copies and renames')),
2987 _('follow changeset history, or file history across copies and renames')),
2987 ('', 'follow-first', None,
2988 ('', 'follow-first', None,
2988 _('only follow the first parent of merge changesets')),
2989 _('only follow the first parent of merge changesets')),
2989 ('d', 'date', '', _('show revs matching date spec')),
2990 ('d', 'date', '', _('show revs matching date spec')),
2990 ('C', 'copies', None, _('show copied files')),
2991 ('C', 'copies', None, _('show copied files')),
2991 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2992 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2992 ('l', 'limit', '', _('limit number of changes displayed')),
2993 ('l', 'limit', '', _('limit number of changes displayed')),
2993 ('r', 'rev', [], _('show the specified revision or range')),
2994 ('r', 'rev', [], _('show the specified revision or range')),
2994 ('', 'removed', None, _('include revs where files were removed')),
2995 ('', 'removed', None, _('include revs where files were removed')),
2995 ('M', 'no-merges', None, _('do not show merges')),
2996 ('M', 'no-merges', None, _('do not show merges')),
2996 ('', 'style', '', _('display using template map file')),
2997 ('', 'style', '', _('display using template map file')),
2997 ('m', 'only-merges', None, _('show only merges')),
2998 ('m', 'only-merges', None, _('show only merges')),
2998 ('p', 'patch', None, _('show patch')),
2999 ('p', 'patch', None, _('show patch')),
2999 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3000 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3000 ('', 'template', '', _('display with template')),
3001 ('', 'template', '', _('display with template')),
3001 ] + walkopts,
3002 ] + walkopts,
3002 _('hg log [OPTION]... [FILE]')),
3003 _('hg log [OPTION]... [FILE]')),
3003 "manifest": (manifest, [], _('hg manifest [REV]')),
3004 "manifest": (manifest, [], _('hg manifest [REV]')),
3004 "^merge":
3005 "^merge":
3005 (merge,
3006 (merge,
3006 [('f', 'force', None, _('force a merge with outstanding changes')),
3007 [('f', 'force', None, _('force a merge with outstanding changes')),
3007 ('r', 'rev', '', _('revision to merge')),
3008 ('r', 'rev', '', _('revision to merge')),
3008 ],
3009 ],
3009 _('hg merge [-f] [[-r] REV]')),
3010 _('hg merge [-f] [[-r] REV]')),
3010 "outgoing|out": (outgoing,
3011 "outgoing|out": (outgoing,
3011 [('M', 'no-merges', None, _('do not show merges')),
3012 [('M', 'no-merges', None, _('do not show merges')),
3012 ('f', 'force', None,
3013 ('f', 'force', None,
3013 _('run even when remote repository is unrelated')),
3014 _('run even when remote repository is unrelated')),
3014 ('p', 'patch', None, _('show patch')),
3015 ('p', 'patch', None, _('show patch')),
3015 ('', 'style', '', _('display using template map file')),
3016 ('', 'style', '', _('display using template map file')),
3016 ('r', 'rev', [], _('a specific revision you would like to push')),
3017 ('r', 'rev', [], _('a specific revision you would like to push')),
3017 ('n', 'newest-first', None, _('show newest record first')),
3018 ('n', 'newest-first', None, _('show newest record first')),
3018 ('', 'template', '', _('display with template')),
3019 ('', 'template', '', _('display with template')),
3019 ] + remoteopts,
3020 ] + remoteopts,
3020 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3021 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3021 "^parents":
3022 "^parents":
3022 (parents,
3023 (parents,
3023 [('r', 'rev', '', _('show parents from the specified rev')),
3024 [('r', 'rev', '', _('show parents from the specified rev')),
3024 ('', 'style', '', _('display using template map file')),
3025 ('', 'style', '', _('display using template map file')),
3025 ('', 'template', '', _('display with template'))],
3026 ('', 'template', '', _('display with template'))],
3026 _('hg parents [-r REV] [FILE]')),
3027 _('hg parents [-r REV] [FILE]')),
3027 "paths": (paths, [], _('hg paths [NAME]')),
3028 "paths": (paths, [], _('hg paths [NAME]')),
3028 "^pull":
3029 "^pull":
3029 (pull,
3030 (pull,
3030 [('u', 'update', None,
3031 [('u', 'update', None,
3031 _('update to new tip if changesets were pulled')),
3032 _('update to new tip if changesets were pulled')),
3032 ('f', 'force', None,
3033 ('f', 'force', None,
3033 _('run even when remote repository is unrelated')),
3034 _('run even when remote repository is unrelated')),
3034 ('r', 'rev', [],
3035 ('r', 'rev', [],
3035 _('a specific revision up to which you would like to pull')),
3036 _('a specific revision up to which you would like to pull')),
3036 ] + remoteopts,
3037 ] + remoteopts,
3037 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3038 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3038 "^push":
3039 "^push":
3039 (push,
3040 (push,
3040 [('f', 'force', None, _('force push')),
3041 [('f', 'force', None, _('force push')),
3041 ('r', 'rev', [], _('a specific revision you would like to push')),
3042 ('r', 'rev', [], _('a specific revision you would like to push')),
3042 ] + remoteopts,
3043 ] + remoteopts,
3043 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3044 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3044 "debugrawcommit|rawcommit":
3045 "debugrawcommit|rawcommit":
3045 (rawcommit,
3046 (rawcommit,
3046 [('p', 'parent', [], _('parent')),
3047 [('p', 'parent', [], _('parent')),
3047 ('d', 'date', '', _('date code')),
3048 ('d', 'date', '', _('date code')),
3048 ('u', 'user', '', _('user')),
3049 ('u', 'user', '', _('user')),
3049 ('F', 'files', '', _('file list'))
3050 ('F', 'files', '', _('file list'))
3050 ] + commitopts,
3051 ] + commitopts,
3051 _('hg debugrawcommit [OPTION]... [FILE]...')),
3052 _('hg debugrawcommit [OPTION]... [FILE]...')),
3052 "recover": (recover, [], _('hg recover')),
3053 "recover": (recover, [], _('hg recover')),
3053 "^remove|rm":
3054 "^remove|rm":
3054 (remove,
3055 (remove,
3055 [('A', 'after', None, _('record remove that has already occurred')),
3056 [('A', 'after', None, _('record remove that has already occurred')),
3056 ('f', 'force', None, _('remove file even if modified')),
3057 ('f', 'force', None, _('remove file even if modified')),
3057 ] + walkopts,
3058 ] + walkopts,
3058 _('hg remove [OPTION]... FILE...')),
3059 _('hg remove [OPTION]... FILE...')),
3059 "rename|mv":
3060 "rename|mv":
3060 (rename,
3061 (rename,
3061 [('A', 'after', None, _('record a rename that has already occurred')),
3062 [('A', 'after', None, _('record a rename that has already occurred')),
3062 ('f', 'force', None,
3063 ('f', 'force', None,
3063 _('forcibly copy over an existing managed file')),
3064 _('forcibly copy over an existing managed file')),
3064 ] + walkopts + dryrunopts,
3065 ] + walkopts + dryrunopts,
3065 _('hg rename [OPTION]... SOURCE... DEST')),
3066 _('hg rename [OPTION]... SOURCE... DEST')),
3066 "^revert":
3067 "^revert":
3067 (revert,
3068 (revert,
3068 [('a', 'all', None, _('revert all changes when no arguments given')),
3069 [('a', 'all', None, _('revert all changes when no arguments given')),
3069 ('d', 'date', '', _('tipmost revision matching date')),
3070 ('d', 'date', '', _('tipmost revision matching date')),
3070 ('r', 'rev', '', _('revision to revert to')),
3071 ('r', 'rev', '', _('revision to revert to')),
3071 ('', 'no-backup', None, _('do not save backup copies of files')),
3072 ('', 'no-backup', None, _('do not save backup copies of files')),
3072 ] + walkopts + dryrunopts,
3073 ] + walkopts + dryrunopts,
3073 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3074 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3074 "rollback": (rollback, [], _('hg rollback')),
3075 "rollback": (rollback, [], _('hg rollback')),
3075 "root": (root, [], _('hg root')),
3076 "root": (root, [], _('hg root')),
3076 "showconfig|debugconfig":
3077 "showconfig|debugconfig":
3077 (showconfig,
3078 (showconfig,
3078 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3079 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3079 _('showconfig [-u] [NAME]...')),
3080 _('showconfig [-u] [NAME]...')),
3080 "^serve":
3081 "^serve":
3081 (serve,
3082 (serve,
3082 [('A', 'accesslog', '', _('name of access log file to write to')),
3083 [('A', 'accesslog', '', _('name of access log file to write to')),
3083 ('d', 'daemon', None, _('run server in background')),
3084 ('d', 'daemon', None, _('run server in background')),
3084 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3085 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3085 ('E', 'errorlog', '', _('name of error log file to write to')),
3086 ('E', 'errorlog', '', _('name of error log file to write to')),
3086 ('p', 'port', 0, _('port to use (default: 8000)')),
3087 ('p', 'port', 0, _('port to use (default: 8000)')),
3087 ('a', 'address', '', _('address to use')),
3088 ('a', 'address', '', _('address to use')),
3088 ('n', 'name', '',
3089 ('n', 'name', '',
3089 _('name to show in web pages (default: working dir)')),
3090 _('name to show in web pages (default: working dir)')),
3090 ('', 'webdir-conf', '', _('name of the webdir config file'
3091 ('', 'webdir-conf', '', _('name of the webdir config file'
3091 ' (serve more than one repo)')),
3092 ' (serve more than one repo)')),
3092 ('', 'pid-file', '', _('name of file to write process ID to')),
3093 ('', 'pid-file', '', _('name of file to write process ID to')),
3093 ('', 'stdio', None, _('for remote clients')),
3094 ('', 'stdio', None, _('for remote clients')),
3094 ('t', 'templates', '', _('web templates to use')),
3095 ('t', 'templates', '', _('web templates to use')),
3095 ('', 'style', '', _('template style to use')),
3096 ('', 'style', '', _('template style to use')),
3096 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3097 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3097 ('', 'certificate', '', _('SSL certificate file'))],
3098 ('', 'certificate', '', _('SSL certificate file'))],
3098 _('hg serve [OPTION]...')),
3099 _('hg serve [OPTION]...')),
3099 "^status|st":
3100 "^status|st":
3100 (status,
3101 (status,
3101 [('A', 'all', None, _('show status of all files')),
3102 [('A', 'all', None, _('show status of all files')),
3102 ('m', 'modified', None, _('show only modified files')),
3103 ('m', 'modified', None, _('show only modified files')),
3103 ('a', 'added', None, _('show only added files')),
3104 ('a', 'added', None, _('show only added files')),
3104 ('r', 'removed', None, _('show only removed files')),
3105 ('r', 'removed', None, _('show only removed files')),
3105 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3106 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3106 ('c', 'clean', None, _('show only files without changes')),
3107 ('c', 'clean', None, _('show only files without changes')),
3107 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3108 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3108 ('i', 'ignored', None, _('show only ignored files')),
3109 ('i', 'ignored', None, _('show only ignored files')),
3109 ('n', 'no-status', None, _('hide status prefix')),
3110 ('n', 'no-status', None, _('hide status prefix')),
3110 ('C', 'copies', None, _('show source of copied files')),
3111 ('C', 'copies', None, _('show source of copied files')),
3111 ('0', 'print0', None,
3112 ('0', 'print0', None,
3112 _('end filenames with NUL, for use with xargs')),
3113 _('end filenames with NUL, for use with xargs')),
3113 ('', 'rev', [], _('show difference from revision')),
3114 ('', 'rev', [], _('show difference from revision')),
3114 ] + walkopts,
3115 ] + walkopts,
3115 _('hg status [OPTION]... [FILE]...')),
3116 _('hg status [OPTION]... [FILE]...')),
3116 "tag":
3117 "tag":
3117 (tag,
3118 (tag,
3118 [('f', 'force', None, _('replace existing tag')),
3119 [('f', 'force', None, _('replace existing tag')),
3119 ('l', 'local', None, _('make the tag local')),
3120 ('l', 'local', None, _('make the tag local')),
3120 ('m', 'message', '', _('message for tag commit log entry')),
3121 ('m', 'message', '', _('message for tag commit log entry')),
3121 ('d', 'date', '', _('record datecode as commit date')),
3122 ('d', 'date', '', _('record datecode as commit date')),
3122 ('u', 'user', '', _('record user as commiter')),
3123 ('u', 'user', '', _('record user as commiter')),
3123 ('r', 'rev', '', _('revision to tag')),
3124 ('r', 'rev', '', _('revision to tag')),
3124 ('', 'remove', None, _('remove a tag'))],
3125 ('', 'remove', None, _('remove a tag'))],
3125 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3126 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3126 "tags": (tags, [], _('hg tags')),
3127 "tags": (tags, [], _('hg tags')),
3127 "tip":
3128 "tip":
3128 (tip,
3129 (tip,
3129 [('', 'style', '', _('display using template map file')),
3130 [('', 'style', '', _('display using template map file')),
3130 ('p', 'patch', None, _('show patch')),
3131 ('p', 'patch', None, _('show patch')),
3131 ('', 'template', '', _('display with template'))],
3132 ('', 'template', '', _('display with template'))],
3132 _('hg tip [-p]')),
3133 _('hg tip [-p]')),
3133 "unbundle":
3134 "unbundle":
3134 (unbundle,
3135 (unbundle,
3135 [('u', 'update', None,
3136 [('u', 'update', None,
3136 _('update to new tip if changesets were unbundled'))],
3137 _('update to new tip if changesets were unbundled'))],
3137 _('hg unbundle [-u] FILE...')),
3138 _('hg unbundle [-u] FILE...')),
3138 "^update|up|checkout|co":
3139 "^update|up|checkout|co":
3139 (update,
3140 (update,
3140 [('C', 'clean', None, _('overwrite locally modified files')),
3141 [('C', 'clean', None, _('overwrite locally modified files')),
3141 ('d', 'date', '', _('tipmost revision matching date')),
3142 ('d', 'date', '', _('tipmost revision matching date')),
3142 ('r', 'rev', '', _('revision'))],
3143 ('r', 'rev', '', _('revision'))],
3143 _('hg update [-C] [-d DATE] [[-r] REV]')),
3144 _('hg update [-C] [-d DATE] [[-r] REV]')),
3144 "verify": (verify, [], _('hg verify')),
3145 "verify": (verify, [], _('hg verify')),
3145 "version": (version_, [], _('hg version')),
3146 "version": (version_, [], _('hg version')),
3146 }
3147 }
3147
3148
3148 extensions.commandtable = table
3149 extensions.commandtable = table
3149
3150
3150 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3151 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3151 " debugindex debugindexdot debugdate debuginstall")
3152 " debugindex debugindexdot debugdate debuginstall")
3152 optionalrepo = ("paths serve showconfig")
3153 optionalrepo = ("paths serve showconfig")
3153
3154
3154 def dispatch(args, argv0=None):
3155 def dispatch(args, argv0=None):
3155 try:
3156 try:
3156 u = ui.ui(traceback='--traceback' in args)
3157 u = ui.ui(traceback='--traceback' in args)
3157 except util.Abort, inst:
3158 except util.Abort, inst:
3158 sys.stderr.write(_("abort: %s\n") % inst)
3159 sys.stderr.write(_("abort: %s\n") % inst)
3159 return -1
3160 return -1
3160 return cmdutil.runcatch(u, args, argv0=argv0)
3161 return cmdutil.runcatch(u, args, argv0=argv0)
3161
3162
3162 def run():
3163 def run():
3163 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
3164 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,507 +1,502 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import _
11 from i18n import _
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 import cStringIO
13 import cStringIO
14
14
15 _unknown = ('?', 0, 0, 0)
15 _unknown = ('?', 0, 0, 0)
16 _format = ">cllll"
16 _format = ">cllll"
17
17
18 class dirstate(object):
18 class dirstate(object):
19
19
20 def __init__(self, opener, ui, root):
20 def __init__(self, opener, ui, root):
21 self._opener = opener
21 self._opener = opener
22 self._root = root
22 self._root = root
23 self._dirty = False
23 self._dirty = False
24 self._ui = ui
24 self._ui = ui
25
25
26 def __getattr__(self, name):
26 def __getattr__(self, name):
27 if name == '_map':
27 if name == '_map':
28 self._read()
28 self._read()
29 return self._map
29 return self._map
30 elif name == '_copymap':
30 elif name == '_copymap':
31 self._read()
31 self._read()
32 return self._copymap
32 return self._copymap
33 elif name == '_branch':
33 elif name == '_branch':
34 try:
34 try:
35 self._branch = (self._opener("branch").read().strip()
35 self._branch = (self._opener("branch").read().strip()
36 or "default")
36 or "default")
37 except IOError:
37 except IOError:
38 self._branch = "default"
38 self._branch = "default"
39 return self._branch
39 return self._branch
40 elif name == '_pl':
40 elif name == '_pl':
41 self._pl = [nullid, nullid]
41 self._pl = [nullid, nullid]
42 try:
42 try:
43 st = self._opener("dirstate").read(40)
43 st = self._opener("dirstate").read(40)
44 if len(st) == 40:
44 if len(st) == 40:
45 self._pl = st[:20], st[20:40]
45 self._pl = st[:20], st[20:40]
46 except IOError, err:
46 except IOError, err:
47 if err.errno != errno.ENOENT: raise
47 if err.errno != errno.ENOENT: raise
48 return self._pl
48 return self._pl
49 elif name == '_dirs':
49 elif name == '_dirs':
50 self._dirs = {}
50 self._dirs = {}
51 for f in self._map:
51 for f in self._map:
52 self._incpath(f)
52 self._incpath(f)
53 return self._dirs
53 return self._dirs
54 elif name == '_ignore':
54 elif name == '_ignore':
55 files = [self._join('.hgignore')]
55 files = [self._join('.hgignore')]
56 for name, path in self._ui.configitems("ui"):
56 for name, path in self._ui.configitems("ui"):
57 if name == 'ignore' or name.startswith('ignore.'):
57 if name == 'ignore' or name.startswith('ignore.'):
58 files.append(os.path.expanduser(path))
58 files.append(os.path.expanduser(path))
59 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
59 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 return self._ignore
60 return self._ignore
61 elif name == '_slash':
61 elif name == '_slash':
62 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
62 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 return self._slash
63 return self._slash
64 else:
64 else:
65 raise AttributeError, name
65 raise AttributeError, name
66
66
67 def _join(self, f):
67 def _join(self, f):
68 return os.path.join(self._root, f)
68 return os.path.join(self._root, f)
69
69
70 def getcwd(self):
70 def getcwd(self):
71 cwd = os.getcwd()
71 cwd = os.getcwd()
72 if cwd == self._root: return ''
72 if cwd == self._root: return ''
73 # self._root ends with a path separator if self._root is '/' or 'C:\'
73 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 rootsep = self._root
74 rootsep = self._root
75 if not rootsep.endswith(os.sep):
75 if not rootsep.endswith(os.sep):
76 rootsep += os.sep
76 rootsep += os.sep
77 if cwd.startswith(rootsep):
77 if cwd.startswith(rootsep):
78 return cwd[len(rootsep):]
78 return cwd[len(rootsep):]
79 else:
79 else:
80 # we're outside the repo. return an absolute path.
80 # we're outside the repo. return an absolute path.
81 return cwd
81 return cwd
82
82
83 def pathto(self, f, cwd=None):
83 def pathto(self, f, cwd=None):
84 if cwd is None:
84 if cwd is None:
85 cwd = self.getcwd()
85 cwd = self.getcwd()
86 path = util.pathto(self._root, cwd, f)
86 path = util.pathto(self._root, cwd, f)
87 if self._slash:
87 if self._slash:
88 return path.replace(os.sep, '/')
88 return path.replace(os.sep, '/')
89 return path
89 return path
90
90
91 def __del__(self):
92 self.write()
93
94 def __getitem__(self, key):
91 def __getitem__(self, key):
95 return self._map[key]
92 ''' current states:
93 n normal
94 m needs merging
95 r marked for removal
96 a marked for addition
97 ? not tracked'''
98 return self._map.get(key, ("?",))[0]
96
99
97 def __contains__(self, key):
100 def __contains__(self, key):
98 return key in self._map
101 return key in self._map
99
102
100 def __iter__(self):
103 def __iter__(self):
101 a = self._map.keys()
104 a = self._map.keys()
102 a.sort()
105 a.sort()
103 for x in a:
106 for x in a:
104 yield x
107 yield x
105
108
106 def parents(self):
109 def parents(self):
107 return self._pl
110 return self._pl
108
111
109 def branch(self):
112 def branch(self):
110 return self._branch
113 return self._branch
111
114
112 def setparents(self, p1, p2=nullid):
115 def setparents(self, p1, p2=nullid):
113 self._dirty = True
116 self._dirty = True
114 self._pl = p1, p2
117 self._pl = p1, p2
115
118
116 def setbranch(self, branch):
119 def setbranch(self, branch):
117 self._branch = branch
120 self._branch = branch
118 self._opener("branch", "w").write(branch + '\n')
121 self._opener("branch", "w").write(branch + '\n')
119
122
120 def state(self, key):
121 ''' current states:
122 n normal
123 m needs merging
124 r marked for removal
125 a marked for addition'''
126 return self._map.get(key, ("?",))[0]
127
128 def _read(self):
123 def _read(self):
129 self._map = {}
124 self._map = {}
130 self._copymap = {}
125 self._copymap = {}
131 self._pl = [nullid, nullid]
126 self._pl = [nullid, nullid]
132 try:
127 try:
133 st = self._opener("dirstate").read()
128 st = self._opener("dirstate").read()
134 except IOError, err:
129 except IOError, err:
135 if err.errno != errno.ENOENT: raise
130 if err.errno != errno.ENOENT: raise
136 return
131 return
137 if not st:
132 if not st:
138 return
133 return
139
134
140 self._pl = [st[:20], st[20: 40]]
135 self._pl = [st[:20], st[20: 40]]
141
136
142 # deref fields so they will be local in loop
137 # deref fields so they will be local in loop
143 dmap = self._map
138 dmap = self._map
144 copymap = self._copymap
139 copymap = self._copymap
145 unpack = struct.unpack
140 unpack = struct.unpack
146
141
147 pos = 40
142 pos = 40
148 e_size = struct.calcsize(_format)
143 e_size = struct.calcsize(_format)
149
144
150 while pos < len(st):
145 while pos < len(st):
151 newpos = pos + e_size
146 newpos = pos + e_size
152 e = unpack(_format, st[pos:newpos])
147 e = unpack(_format, st[pos:newpos])
153 l = e[4]
148 l = e[4]
154 pos = newpos
149 pos = newpos
155 newpos = pos + l
150 newpos = pos + l
156 f = st[pos:newpos]
151 f = st[pos:newpos]
157 if '\0' in f:
152 if '\0' in f:
158 f, c = f.split('\0')
153 f, c = f.split('\0')
159 copymap[f] = c
154 copymap[f] = c
160 dmap[f] = e[:4]
155 dmap[f] = e[:4]
161 pos = newpos
156 pos = newpos
162
157
163 def invalidate(self):
158 def invalidate(self):
164 for a in "_map _copymap _branch _pl _dirs _ignore".split():
159 for a in "_map _copymap _branch _pl _dirs _ignore".split():
165 if hasattr(self, a):
160 if hasattr(self, a):
166 self.__delattr__(a)
161 self.__delattr__(a)
167 self._dirty = False
162 self._dirty = False
168
163
169 def copy(self, source, dest):
164 def copy(self, source, dest):
170 self._dirty = True
165 self._dirty = True
171 self._copymap[dest] = source
166 self._copymap[dest] = source
172
167
173 def copied(self, file):
168 def copied(self, file):
174 return self._copymap.get(file, None)
169 return self._copymap.get(file, None)
175
170
176 def copies(self):
171 def copies(self):
177 return self._copymap
172 return self._copymap
178
173
179 def _incpath(self, path):
174 def _incpath(self, path):
180 for c in strutil.findall(path, '/'):
175 for c in strutil.findall(path, '/'):
181 pc = path[:c]
176 pc = path[:c]
182 self._dirs.setdefault(pc, 0)
177 self._dirs.setdefault(pc, 0)
183 self._dirs[pc] += 1
178 self._dirs[pc] += 1
184
179
185 def _decpath(self, path):
180 def _decpath(self, path):
186 for c in strutil.findall(path, '/'):
181 for c in strutil.findall(path, '/'):
187 pc = path[:c]
182 pc = path[:c]
188 self._dirs.setdefault(pc, 0)
183 self._dirs.setdefault(pc, 0)
189 self._dirs[pc] -= 1
184 self._dirs[pc] -= 1
190
185
191 def _incpathcheck(self, f):
186 def _incpathcheck(self, f):
192 if '\r' in f or '\n' in f:
187 if '\r' in f or '\n' in f:
193 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
188 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
194 # shadows
189 # shadows
195 if f in self._dirs:
190 if f in self._dirs:
196 raise util.Abort(_('directory named %r already in dirstate') % f)
191 raise util.Abort(_('directory named %r already in dirstate') % f)
197 for c in strutil.rfindall(f, '/'):
192 for c in strutil.rfindall(f, '/'):
198 d = f[:c]
193 d = f[:c]
199 if d in self._dirs:
194 if d in self._dirs:
200 break
195 break
201 if d in self._map:
196 if d in self._map:
202 raise util.Abort(_('file named %r already in dirstate') % d)
197 raise util.Abort(_('file named %r already in dirstate') % d)
203 self._incpath(f)
198 self._incpath(f)
204
199
205 def normal(self, f):
200 def normal(self, f):
206 'mark a file normal'
201 'mark a file normal'
207 self._dirty = True
202 self._dirty = True
208 s = os.lstat(self._join(f))
203 s = os.lstat(self._join(f))
209 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
204 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
210 if self._copymap.has_key(f):
205 if self._copymap.has_key(f):
211 del self._copymap[f]
206 del self._copymap[f]
212
207
213 def normaldirty(self, f):
208 def normaldirty(self, f):
214 'mark a file normal, but possibly dirty'
209 'mark a file normal, but possibly dirty'
215 self._dirty = True
210 self._dirty = True
216 s = os.lstat(self._join(f))
211 s = os.lstat(self._join(f))
217 self._map[f] = ('n', s.st_mode, -1, -1)
212 self._map[f] = ('n', s.st_mode, -1, -1)
218 if f in self._copymap:
213 if f in self._copymap:
219 del self._copymap[f]
214 del self._copymap[f]
220
215
221 def add(self, f):
216 def add(self, f):
222 'mark a file added'
217 'mark a file added'
223 self._dirty = True
218 self._dirty = True
224 self._incpathcheck(f)
219 self._incpathcheck(f)
225 s = os.lstat(self._join(f))
220 s = os.lstat(self._join(f))
226 self._map[f] = ('a', s.st_mode, s.st_size, s.st_mtime)
221 self._map[f] = ('a', s.st_mode, s.st_size, s.st_mtime)
227 if f in self._copymap:
222 if f in self._copymap:
228 del self._copymap[f]
223 del self._copymap[f]
229
224
230 def remove(self, f):
225 def remove(self, f):
231 'mark a file removed'
226 'mark a file removed'
232 self._dirty = True
227 self._dirty = True
233 self._map[f] = ('r', 0, 0, 0)
228 self._map[f] = ('r', 0, 0, 0)
234 self._decpath(f)
229 self._decpath(f)
235 if f in self._copymap:
230 if f in self._copymap:
236 del self._copymap[f]
231 del self._copymap[f]
237
232
238 def merge(self, f):
233 def merge(self, f):
239 'mark a file merged'
234 'mark a file merged'
240 self._dirty = True
235 self._dirty = True
241 s = os.lstat(self._join(f))
236 s = os.lstat(self._join(f))
242 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
237 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
243 if f in self._copymap:
238 if f in self._copymap:
244 del self._copymap[f]
239 del self._copymap[f]
245
240
246 def forget(self, f):
241 def forget(self, f):
247 'forget a file'
242 'forget a file'
248 self._dirty = True
243 self._dirty = True
249 try:
244 try:
250 del self._map[f]
245 del self._map[f]
251 self._decpath(f)
246 self._decpath(f)
252 except KeyError:
247 except KeyError:
253 self._ui.warn(_("not in dirstate: %s!\n") % f)
248 self._ui.warn(_("not in dirstate: %s!\n") % f)
254
249
255 def rebuild(self, parent, files):
250 def rebuild(self, parent, files):
256 self.invalidate()
251 self.invalidate()
257 for f in files:
252 for f in files:
258 if files.execf(f):
253 if files.execf(f):
259 self._map[f] = ('n', 0777, -1, 0)
254 self._map[f] = ('n', 0777, -1, 0)
260 else:
255 else:
261 self._map[f] = ('n', 0666, -1, 0)
256 self._map[f] = ('n', 0666, -1, 0)
262 self._pl = (parent, nullid)
257 self._pl = (parent, nullid)
263 self._dirty = True
258 self._dirty = True
264
259
265 def write(self):
260 def write(self):
266 if not self._dirty:
261 if not self._dirty:
267 return
262 return
268 cs = cStringIO.StringIO()
263 cs = cStringIO.StringIO()
269 cs.write("".join(self._pl))
264 cs.write("".join(self._pl))
270 for f, e in self._map.iteritems():
265 for f, e in self._map.iteritems():
271 c = self.copied(f)
266 c = self.copied(f)
272 if c:
267 if c:
273 f = f + "\0" + c
268 f = f + "\0" + c
274 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
269 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
275 cs.write(e)
270 cs.write(e)
276 cs.write(f)
271 cs.write(f)
277 st = self._opener("dirstate", "w", atomictemp=True)
272 st = self._opener("dirstate", "w", atomictemp=True)
278 st.write(cs.getvalue())
273 st.write(cs.getvalue())
279 st.rename()
274 st.rename()
280 self._dirty = False
275 self._dirty = False
281
276
282 def filterfiles(self, files):
277 def filterfiles(self, files):
283 ret = {}
278 ret = {}
284 unknown = []
279 unknown = []
285
280
286 for x in files:
281 for x in files:
287 if x == '.':
282 if x == '.':
288 return self._map.copy()
283 return self._map.copy()
289 if x not in self._map:
284 if x not in self._map:
290 unknown.append(x)
285 unknown.append(x)
291 else:
286 else:
292 ret[x] = self._map[x]
287 ret[x] = self._map[x]
293
288
294 if not unknown:
289 if not unknown:
295 return ret
290 return ret
296
291
297 b = self._map.keys()
292 b = self._map.keys()
298 b.sort()
293 b.sort()
299 blen = len(b)
294 blen = len(b)
300
295
301 for x in unknown:
296 for x in unknown:
302 bs = bisect.bisect(b, "%s%s" % (x, '/'))
297 bs = bisect.bisect(b, "%s%s" % (x, '/'))
303 while bs < blen:
298 while bs < blen:
304 s = b[bs]
299 s = b[bs]
305 if len(s) > len(x) and s.startswith(x):
300 if len(s) > len(x) and s.startswith(x):
306 ret[s] = self._map[s]
301 ret[s] = self._map[s]
307 else:
302 else:
308 break
303 break
309 bs += 1
304 bs += 1
310 return ret
305 return ret
311
306
312 def _supported(self, f, st, verbose=False):
307 def _supported(self, f, st, verbose=False):
313 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
308 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
314 return True
309 return True
315 if verbose:
310 if verbose:
316 kind = 'unknown'
311 kind = 'unknown'
317 if stat.S_ISCHR(st.st_mode): kind = _('character device')
312 if stat.S_ISCHR(st.st_mode): kind = _('character device')
318 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
313 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
319 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
314 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
320 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
315 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
321 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
316 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
322 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
317 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
323 % (self.pathto(f), kind))
318 % (self.pathto(f), kind))
324 return False
319 return False
325
320
326 def walk(self, files=None, match=util.always, badmatch=None):
321 def walk(self, files=None, match=util.always, badmatch=None):
327 # filter out the stat
322 # filter out the stat
328 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
323 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
329 yield src, f
324 yield src, f
330
325
331 def statwalk(self, files=None, match=util.always, ignored=False,
326 def statwalk(self, files=None, match=util.always, ignored=False,
332 badmatch=None, directories=False):
327 badmatch=None, directories=False):
333 '''
328 '''
334 walk recursively through the directory tree, finding all files
329 walk recursively through the directory tree, finding all files
335 matched by the match function
330 matched by the match function
336
331
337 results are yielded in a tuple (src, filename, st), where src
332 results are yielded in a tuple (src, filename, st), where src
338 is one of:
333 is one of:
339 'f' the file was found in the directory tree
334 'f' the file was found in the directory tree
340 'd' the file is a directory of the tree
335 'd' the file is a directory of the tree
341 'm' the file was only in the dirstate and not in the tree
336 'm' the file was only in the dirstate and not in the tree
342 'b' file was not found and matched badmatch
337 'b' file was not found and matched badmatch
343
338
344 and st is the stat result if the file was found in the directory.
339 and st is the stat result if the file was found in the directory.
345 '''
340 '''
346
341
347 # walk all files by default
342 # walk all files by default
348 if not files:
343 if not files:
349 files = ['.']
344 files = ['.']
350 dc = self._map.copy()
345 dc = self._map.copy()
351 else:
346 else:
352 files = util.unique(files)
347 files = util.unique(files)
353 dc = self.filterfiles(files)
348 dc = self.filterfiles(files)
354
349
355 def imatch(file_):
350 def imatch(file_):
356 if file_ not in dc and self._ignore(file_):
351 if file_ not in dc and self._ignore(file_):
357 return False
352 return False
358 return match(file_)
353 return match(file_)
359
354
360 ignore = self._ignore
355 ignore = self._ignore
361 if ignored:
356 if ignored:
362 imatch = match
357 imatch = match
363 ignore = util.never
358 ignore = util.never
364
359
365 # self._root may end with a path separator when self._root == '/'
360 # self._root may end with a path separator when self._root == '/'
366 common_prefix_len = len(self._root)
361 common_prefix_len = len(self._root)
367 if not self._root.endswith(os.sep):
362 if not self._root.endswith(os.sep):
368 common_prefix_len += 1
363 common_prefix_len += 1
369 # recursion free walker, faster than os.walk.
364 # recursion free walker, faster than os.walk.
370 def findfiles(s):
365 def findfiles(s):
371 work = [s]
366 work = [s]
372 if directories:
367 if directories:
373 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
368 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
374 while work:
369 while work:
375 top = work.pop()
370 top = work.pop()
376 names = os.listdir(top)
371 names = os.listdir(top)
377 names.sort()
372 names.sort()
378 # nd is the top of the repository dir tree
373 # nd is the top of the repository dir tree
379 nd = util.normpath(top[common_prefix_len:])
374 nd = util.normpath(top[common_prefix_len:])
380 if nd == '.':
375 if nd == '.':
381 nd = ''
376 nd = ''
382 else:
377 else:
383 # do not recurse into a repo contained in this
378 # do not recurse into a repo contained in this
384 # one. use bisect to find .hg directory so speed
379 # one. use bisect to find .hg directory so speed
385 # is good on big directory.
380 # is good on big directory.
386 hg = bisect.bisect_left(names, '.hg')
381 hg = bisect.bisect_left(names, '.hg')
387 if hg < len(names) and names[hg] == '.hg':
382 if hg < len(names) and names[hg] == '.hg':
388 if os.path.isdir(os.path.join(top, '.hg')):
383 if os.path.isdir(os.path.join(top, '.hg')):
389 continue
384 continue
390 for f in names:
385 for f in names:
391 np = util.pconvert(os.path.join(nd, f))
386 np = util.pconvert(os.path.join(nd, f))
392 if seen(np):
387 if seen(np):
393 continue
388 continue
394 p = os.path.join(top, f)
389 p = os.path.join(top, f)
395 # don't trip over symlinks
390 # don't trip over symlinks
396 st = os.lstat(p)
391 st = os.lstat(p)
397 if stat.S_ISDIR(st.st_mode):
392 if stat.S_ISDIR(st.st_mode):
398 if not ignore(np):
393 if not ignore(np):
399 work.append(p)
394 work.append(p)
400 if directories:
395 if directories:
401 yield 'd', np, st
396 yield 'd', np, st
402 if imatch(np) and np in dc:
397 if imatch(np) and np in dc:
403 yield 'm', np, st
398 yield 'm', np, st
404 elif imatch(np):
399 elif imatch(np):
405 if self._supported(np, st):
400 if self._supported(np, st):
406 yield 'f', np, st
401 yield 'f', np, st
407 elif np in dc:
402 elif np in dc:
408 yield 'm', np, st
403 yield 'm', np, st
409
404
410 known = {'.hg': 1}
405 known = {'.hg': 1}
411 def seen(fn):
406 def seen(fn):
412 if fn in known: return True
407 if fn in known: return True
413 known[fn] = 1
408 known[fn] = 1
414
409
415 # step one, find all files that match our criteria
410 # step one, find all files that match our criteria
416 files.sort()
411 files.sort()
417 for ff in files:
412 for ff in files:
418 nf = util.normpath(ff)
413 nf = util.normpath(ff)
419 f = self._join(ff)
414 f = self._join(ff)
420 try:
415 try:
421 st = os.lstat(f)
416 st = os.lstat(f)
422 except OSError, inst:
417 except OSError, inst:
423 found = False
418 found = False
424 for fn in dc:
419 for fn in dc:
425 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
420 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
426 found = True
421 found = True
427 break
422 break
428 if not found:
423 if not found:
429 if inst.errno != errno.ENOENT or not badmatch:
424 if inst.errno != errno.ENOENT or not badmatch:
430 self._ui.warn('%s: %s\n' %
425 self._ui.warn('%s: %s\n' %
431 (self.pathto(ff), inst.strerror))
426 (self.pathto(ff), inst.strerror))
432 elif badmatch and badmatch(ff) and imatch(nf):
427 elif badmatch and badmatch(ff) and imatch(nf):
433 yield 'b', ff, None
428 yield 'b', ff, None
434 continue
429 continue
435 if stat.S_ISDIR(st.st_mode):
430 if stat.S_ISDIR(st.st_mode):
436 cmp1 = (lambda x, y: cmp(x[1], y[1]))
431 cmp1 = (lambda x, y: cmp(x[1], y[1]))
437 sorted_ = [ x for x in findfiles(f) ]
432 sorted_ = [ x for x in findfiles(f) ]
438 sorted_.sort(cmp1)
433 sorted_.sort(cmp1)
439 for e in sorted_:
434 for e in sorted_:
440 yield e
435 yield e
441 else:
436 else:
442 if not seen(nf) and match(nf):
437 if not seen(nf) and match(nf):
443 if self._supported(ff, st, verbose=True):
438 if self._supported(ff, st, verbose=True):
444 yield 'f', nf, st
439 yield 'f', nf, st
445 elif ff in dc:
440 elif ff in dc:
446 yield 'm', nf, st
441 yield 'm', nf, st
447
442
448 # step two run through anything left in the dc hash and yield
443 # step two run through anything left in the dc hash and yield
449 # if we haven't already seen it
444 # if we haven't already seen it
450 ks = dc.keys()
445 ks = dc.keys()
451 ks.sort()
446 ks.sort()
452 for k in ks:
447 for k in ks:
453 if not seen(k) and imatch(k):
448 if not seen(k) and imatch(k):
454 yield 'm', k, None
449 yield 'm', k, None
455
450
456 def status(self, files=None, match=util.always, list_ignored=False,
451 def status(self, files=None, match=util.always, list_ignored=False,
457 list_clean=False):
452 list_clean=False):
458 lookup, modified, added, unknown, ignored = [], [], [], [], []
453 lookup, modified, added, unknown, ignored = [], [], [], [], []
459 removed, deleted, clean = [], [], []
454 removed, deleted, clean = [], [], []
460
455
461 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
456 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
462 try:
457 try:
463 type_, mode, size, time = self[fn]
458 type_, mode, size, time = self._map[fn]
464 except KeyError:
459 except KeyError:
465 if list_ignored and self._ignore(fn):
460 if list_ignored and self._ignore(fn):
466 ignored.append(fn)
461 ignored.append(fn)
467 else:
462 else:
468 unknown.append(fn)
463 unknown.append(fn)
469 continue
464 continue
470 if src == 'm':
465 if src == 'm':
471 nonexistent = True
466 nonexistent = True
472 if not st:
467 if not st:
473 try:
468 try:
474 st = os.lstat(self._join(fn))
469 st = os.lstat(self._join(fn))
475 except OSError, inst:
470 except OSError, inst:
476 if inst.errno != errno.ENOENT:
471 if inst.errno != errno.ENOENT:
477 raise
472 raise
478 st = None
473 st = None
479 # We need to re-check that it is a valid file
474 # We need to re-check that it is a valid file
480 if st and self._supported(fn, st):
475 if st and self._supported(fn, st):
481 nonexistent = False
476 nonexistent = False
482 # XXX: what to do with file no longer present in the fs
477 # XXX: what to do with file no longer present in the fs
483 # who are not removed in the dirstate ?
478 # who are not removed in the dirstate ?
484 if nonexistent and type_ in "nm":
479 if nonexistent and type_ in "nm":
485 deleted.append(fn)
480 deleted.append(fn)
486 continue
481 continue
487 # check the common case first
482 # check the common case first
488 if type_ == 'n':
483 if type_ == 'n':
489 if not st:
484 if not st:
490 st = os.lstat(self._join(fn))
485 st = os.lstat(self._join(fn))
491 if (size >= 0 and (size != st.st_size
486 if (size >= 0 and (size != st.st_size
492 or (mode ^ st.st_mode) & 0100)
487 or (mode ^ st.st_mode) & 0100)
493 or fn in self._copymap):
488 or fn in self._copymap):
494 modified.append(fn)
489 modified.append(fn)
495 elif time != int(st.st_mtime):
490 elif time != int(st.st_mtime):
496 lookup.append(fn)
491 lookup.append(fn)
497 elif list_clean:
492 elif list_clean:
498 clean.append(fn)
493 clean.append(fn)
499 elif type_ == 'm':
494 elif type_ == 'm':
500 modified.append(fn)
495 modified.append(fn)
501 elif type_ == 'a':
496 elif type_ == 'a':
502 added.append(fn)
497 added.append(fn)
503 elif type_ == 'r':
498 elif type_ == 'r':
504 removed.append(fn)
499 removed.append(fn)
505
500
506 return (lookup, modified, added, removed, deleted, unknown, ignored,
501 return (lookup, modified, added, removed, deleted, unknown, ignored,
507 clean)
502 clean)
@@ -1,1949 +1,1949 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util, extensions, hook
13 import os, revlog, time, util, extensions, hook
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 self.path = path
23 self.path = path
24 self.root = os.path.realpath(path)
24 self.root = os.path.realpath(path)
25 self.path = os.path.join(self.root, ".hg")
25 self.path = os.path.join(self.root, ".hg")
26 self.origroot = path
26 self.origroot = path
27 self.opener = util.opener(self.path)
27 self.opener = util.opener(self.path)
28 self.wopener = util.opener(self.root)
28 self.wopener = util.opener(self.root)
29
29
30 if not os.path.isdir(self.path):
30 if not os.path.isdir(self.path):
31 if create:
31 if create:
32 if not os.path.exists(path):
32 if not os.path.exists(path):
33 os.mkdir(path)
33 os.mkdir(path)
34 os.mkdir(self.path)
34 os.mkdir(self.path)
35 requirements = ["revlogv1"]
35 requirements = ["revlogv1"]
36 if parentui.configbool('format', 'usestore', True):
36 if parentui.configbool('format', 'usestore', True):
37 os.mkdir(os.path.join(self.path, "store"))
37 os.mkdir(os.path.join(self.path, "store"))
38 requirements.append("store")
38 requirements.append("store")
39 # create an invalid changelog
39 # create an invalid changelog
40 self.opener("00changelog.i", "a").write(
40 self.opener("00changelog.i", "a").write(
41 '\0\0\0\2' # represents revlogv2
41 '\0\0\0\2' # represents revlogv2
42 ' dummy changelog to prevent using the old repo layout'
42 ' dummy changelog to prevent using the old repo layout'
43 )
43 )
44 reqfile = self.opener("requires", "w")
44 reqfile = self.opener("requires", "w")
45 for r in requirements:
45 for r in requirements:
46 reqfile.write("%s\n" % r)
46 reqfile.write("%s\n" % r)
47 reqfile.close()
47 reqfile.close()
48 else:
48 else:
49 raise repo.RepoError(_("repository %s not found") % path)
49 raise repo.RepoError(_("repository %s not found") % path)
50 elif create:
50 elif create:
51 raise repo.RepoError(_("repository %s already exists") % path)
51 raise repo.RepoError(_("repository %s already exists") % path)
52 else:
52 else:
53 # find requirements
53 # find requirements
54 try:
54 try:
55 requirements = self.opener("requires").read().splitlines()
55 requirements = self.opener("requires").read().splitlines()
56 except IOError, inst:
56 except IOError, inst:
57 if inst.errno != errno.ENOENT:
57 if inst.errno != errno.ENOENT:
58 raise
58 raise
59 requirements = []
59 requirements = []
60 # check them
60 # check them
61 for r in requirements:
61 for r in requirements:
62 if r not in self.supported:
62 if r not in self.supported:
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
63 raise repo.RepoError(_("requirement '%s' not supported") % r)
64
64
65 # setup store
65 # setup store
66 if "store" in requirements:
66 if "store" in requirements:
67 self.encodefn = util.encodefilename
67 self.encodefn = util.encodefilename
68 self.decodefn = util.decodefilename
68 self.decodefn = util.decodefilename
69 self.spath = os.path.join(self.path, "store")
69 self.spath = os.path.join(self.path, "store")
70 else:
70 else:
71 self.encodefn = lambda x: x
71 self.encodefn = lambda x: x
72 self.decodefn = lambda x: x
72 self.decodefn = lambda x: x
73 self.spath = self.path
73 self.spath = self.path
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
75
75
76 self.ui = ui.ui(parentui=parentui)
76 self.ui = ui.ui(parentui=parentui)
77 try:
77 try:
78 self.ui.readconfig(self.join("hgrc"), self.root)
78 self.ui.readconfig(self.join("hgrc"), self.root)
79 extensions.loadall(self.ui)
79 extensions.loadall(self.ui)
80 except IOError:
80 except IOError:
81 pass
81 pass
82
82
83 self.tagscache = None
83 self.tagscache = None
84 self.branchcache = None
84 self.branchcache = None
85 self.nodetagscache = None
85 self.nodetagscache = None
86 self.filterpats = {}
86 self.filterpats = {}
87 self.transhandle = None
87 self.transhandle = None
88
88
89 def __getattr__(self, name):
89 def __getattr__(self, name):
90 if name == 'changelog':
90 if name == 'changelog':
91 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
93 return self.changelog
93 return self.changelog
94 if name == 'manifest':
94 if name == 'manifest':
95 self.changelog
95 self.changelog
96 self.manifest = manifest.manifest(self.sopener)
96 self.manifest = manifest.manifest(self.sopener)
97 return self.manifest
97 return self.manifest
98 if name == 'dirstate':
98 if name == 'dirstate':
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 return self.dirstate
100 return self.dirstate
101 else:
101 else:
102 raise AttributeError, name
102 raise AttributeError, name
103
103
104 def url(self):
104 def url(self):
105 return 'file:' + self.root
105 return 'file:' + self.root
106
106
107 def hook(self, name, throw=False, **args):
107 def hook(self, name, throw=False, **args):
108 return hook.hook(self.ui, self, name, throw, **args)
108 return hook.hook(self.ui, self, name, throw, **args)
109
109
110 tag_disallowed = ':\r\n'
110 tag_disallowed = ':\r\n'
111
111
112 def _tag(self, name, node, message, local, user, date, parent=None,
112 def _tag(self, name, node, message, local, user, date, parent=None,
113 extra={}):
113 extra={}):
114 use_dirstate = parent is None
114 use_dirstate = parent is None
115
115
116 for c in self.tag_disallowed:
116 for c in self.tag_disallowed:
117 if c in name:
117 if c in name:
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119
119
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121
121
122 def writetag(fp, name, munge, prevtags):
122 def writetag(fp, name, munge, prevtags):
123 if prevtags and prevtags[-1] != '\n':
123 if prevtags and prevtags[-1] != '\n':
124 fp.write('\n')
124 fp.write('\n')
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 fp.close()
126 fp.close()
127 self.hook('tag', node=hex(node), tag=name, local=local)
127 self.hook('tag', node=hex(node), tag=name, local=local)
128
128
129 prevtags = ''
129 prevtags = ''
130 if local:
130 if local:
131 try:
131 try:
132 fp = self.opener('localtags', 'r+')
132 fp = self.opener('localtags', 'r+')
133 except IOError, err:
133 except IOError, err:
134 fp = self.opener('localtags', 'a')
134 fp = self.opener('localtags', 'a')
135 else:
135 else:
136 prevtags = fp.read()
136 prevtags = fp.read()
137
137
138 # local tags are stored in the current charset
138 # local tags are stored in the current charset
139 writetag(fp, name, None, prevtags)
139 writetag(fp, name, None, prevtags)
140 return
140 return
141
141
142 if use_dirstate:
142 if use_dirstate:
143 try:
143 try:
144 fp = self.wfile('.hgtags', 'rb+')
144 fp = self.wfile('.hgtags', 'rb+')
145 except IOError, err:
145 except IOError, err:
146 fp = self.wfile('.hgtags', 'ab')
146 fp = self.wfile('.hgtags', 'ab')
147 else:
147 else:
148 prevtags = fp.read()
148 prevtags = fp.read()
149 else:
149 else:
150 try:
150 try:
151 prevtags = self.filectx('.hgtags', parent).data()
151 prevtags = self.filectx('.hgtags', parent).data()
152 except revlog.LookupError:
152 except revlog.LookupError:
153 pass
153 pass
154 fp = self.wfile('.hgtags', 'wb')
154 fp = self.wfile('.hgtags', 'wb')
155
155
156 # committed tags are stored in UTF-8
156 # committed tags are stored in UTF-8
157 writetag(fp, name, util.fromlocal, prevtags)
157 writetag(fp, name, util.fromlocal, prevtags)
158
158
159 if use_dirstate and self.dirstate.state('.hgtags') == '?':
159 if use_dirstate and '.hgtags' not in self.dirstate:
160 self.add(['.hgtags'])
160 self.add(['.hgtags'])
161
161
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 extra=extra)
163 extra=extra)
164
164
165 self.hook('tag', node=hex(node), tag=name, local=local)
165 self.hook('tag', node=hex(node), tag=name, local=local)
166
166
167 return tagnode
167 return tagnode
168
168
169 def tag(self, name, node, message, local, user, date):
169 def tag(self, name, node, message, local, user, date):
170 '''tag a revision with a symbolic name.
170 '''tag a revision with a symbolic name.
171
171
172 if local is True, the tag is stored in a per-repository file.
172 if local is True, the tag is stored in a per-repository file.
173 otherwise, it is stored in the .hgtags file, and a new
173 otherwise, it is stored in the .hgtags file, and a new
174 changeset is committed with the change.
174 changeset is committed with the change.
175
175
176 keyword arguments:
176 keyword arguments:
177
177
178 local: whether to store tag in non-version-controlled file
178 local: whether to store tag in non-version-controlled file
179 (default False)
179 (default False)
180
180
181 message: commit message to use if committing
181 message: commit message to use if committing
182
182
183 user: name of user to use if committing
183 user: name of user to use if committing
184
184
185 date: date tuple to use if committing'''
185 date: date tuple to use if committing'''
186
186
187 for x in self.status()[:5]:
187 for x in self.status()[:5]:
188 if '.hgtags' in x:
188 if '.hgtags' in x:
189 raise util.Abort(_('working copy of .hgtags is changed '
189 raise util.Abort(_('working copy of .hgtags is changed '
190 '(please commit .hgtags manually)'))
190 '(please commit .hgtags manually)'))
191
191
192
192
193 self._tag(name, node, message, local, user, date)
193 self._tag(name, node, message, local, user, date)
194
194
195 def tags(self):
195 def tags(self):
196 '''return a mapping of tag to node'''
196 '''return a mapping of tag to node'''
197 if self.tagscache:
197 if self.tagscache:
198 return self.tagscache
198 return self.tagscache
199
199
200 globaltags = {}
200 globaltags = {}
201
201
202 def readtags(lines, fn):
202 def readtags(lines, fn):
203 filetags = {}
203 filetags = {}
204 count = 0
204 count = 0
205
205
206 def warn(msg):
206 def warn(msg):
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208
208
209 for l in lines:
209 for l in lines:
210 count += 1
210 count += 1
211 if not l:
211 if not l:
212 continue
212 continue
213 s = l.split(" ", 1)
213 s = l.split(" ", 1)
214 if len(s) != 2:
214 if len(s) != 2:
215 warn(_("cannot parse entry"))
215 warn(_("cannot parse entry"))
216 continue
216 continue
217 node, key = s
217 node, key = s
218 key = util.tolocal(key.strip()) # stored in UTF-8
218 key = util.tolocal(key.strip()) # stored in UTF-8
219 try:
219 try:
220 bin_n = bin(node)
220 bin_n = bin(node)
221 except TypeError:
221 except TypeError:
222 warn(_("node '%s' is not well formed") % node)
222 warn(_("node '%s' is not well formed") % node)
223 continue
223 continue
224 if bin_n not in self.changelog.nodemap:
224 if bin_n not in self.changelog.nodemap:
225 warn(_("tag '%s' refers to unknown node") % key)
225 warn(_("tag '%s' refers to unknown node") % key)
226 continue
226 continue
227
227
228 h = []
228 h = []
229 if key in filetags:
229 if key in filetags:
230 n, h = filetags[key]
230 n, h = filetags[key]
231 h.append(n)
231 h.append(n)
232 filetags[key] = (bin_n, h)
232 filetags[key] = (bin_n, h)
233
233
234 for k, nh in filetags.items():
234 for k, nh in filetags.items():
235 if k not in globaltags:
235 if k not in globaltags:
236 globaltags[k] = nh
236 globaltags[k] = nh
237 continue
237 continue
238 # we prefer the global tag if:
238 # we prefer the global tag if:
239 # it supercedes us OR
239 # it supercedes us OR
240 # mutual supercedes and it has a higher rank
240 # mutual supercedes and it has a higher rank
241 # otherwise we win because we're tip-most
241 # otherwise we win because we're tip-most
242 an, ah = nh
242 an, ah = nh
243 bn, bh = globaltags[k]
243 bn, bh = globaltags[k]
244 if (bn != an and an in bh and
244 if (bn != an and an in bh and
245 (bn not in ah or len(bh) > len(ah))):
245 (bn not in ah or len(bh) > len(ah))):
246 an = bn
246 an = bn
247 ah.extend([n for n in bh if n not in ah])
247 ah.extend([n for n in bh if n not in ah])
248 globaltags[k] = an, ah
248 globaltags[k] = an, ah
249
249
250 # read the tags file from each head, ending with the tip
250 # read the tags file from each head, ending with the tip
251 f = None
251 f = None
252 for rev, node, fnode in self._hgtagsnodes():
252 for rev, node, fnode in self._hgtagsnodes():
253 f = (f and f.filectx(fnode) or
253 f = (f and f.filectx(fnode) or
254 self.filectx('.hgtags', fileid=fnode))
254 self.filectx('.hgtags', fileid=fnode))
255 readtags(f.data().splitlines(), f)
255 readtags(f.data().splitlines(), f)
256
256
257 try:
257 try:
258 data = util.fromlocal(self.opener("localtags").read())
258 data = util.fromlocal(self.opener("localtags").read())
259 # localtags are stored in the local character set
259 # localtags are stored in the local character set
260 # while the internal tag table is stored in UTF-8
260 # while the internal tag table is stored in UTF-8
261 readtags(data.splitlines(), "localtags")
261 readtags(data.splitlines(), "localtags")
262 except IOError:
262 except IOError:
263 pass
263 pass
264
264
265 self.tagscache = {}
265 self.tagscache = {}
266 for k,nh in globaltags.items():
266 for k,nh in globaltags.items():
267 n = nh[0]
267 n = nh[0]
268 if n != nullid:
268 if n != nullid:
269 self.tagscache[k] = n
269 self.tagscache[k] = n
270 self.tagscache['tip'] = self.changelog.tip()
270 self.tagscache['tip'] = self.changelog.tip()
271
271
272 return self.tagscache
272 return self.tagscache
273
273
274 def _hgtagsnodes(self):
274 def _hgtagsnodes(self):
275 heads = self.heads()
275 heads = self.heads()
276 heads.reverse()
276 heads.reverse()
277 last = {}
277 last = {}
278 ret = []
278 ret = []
279 for node in heads:
279 for node in heads:
280 c = self.changectx(node)
280 c = self.changectx(node)
281 rev = c.rev()
281 rev = c.rev()
282 try:
282 try:
283 fnode = c.filenode('.hgtags')
283 fnode = c.filenode('.hgtags')
284 except revlog.LookupError:
284 except revlog.LookupError:
285 continue
285 continue
286 ret.append((rev, node, fnode))
286 ret.append((rev, node, fnode))
287 if fnode in last:
287 if fnode in last:
288 ret[last[fnode]] = None
288 ret[last[fnode]] = None
289 last[fnode] = len(ret) - 1
289 last[fnode] = len(ret) - 1
290 return [item for item in ret if item]
290 return [item for item in ret if item]
291
291
292 def tagslist(self):
292 def tagslist(self):
293 '''return a list of tags ordered by revision'''
293 '''return a list of tags ordered by revision'''
294 l = []
294 l = []
295 for t, n in self.tags().items():
295 for t, n in self.tags().items():
296 try:
296 try:
297 r = self.changelog.rev(n)
297 r = self.changelog.rev(n)
298 except:
298 except:
299 r = -2 # sort to the beginning of the list if unknown
299 r = -2 # sort to the beginning of the list if unknown
300 l.append((r, t, n))
300 l.append((r, t, n))
301 l.sort()
301 l.sort()
302 return [(t, n) for r, t, n in l]
302 return [(t, n) for r, t, n in l]
303
303
304 def nodetags(self, node):
304 def nodetags(self, node):
305 '''return the tags associated with a node'''
305 '''return the tags associated with a node'''
306 if not self.nodetagscache:
306 if not self.nodetagscache:
307 self.nodetagscache = {}
307 self.nodetagscache = {}
308 for t, n in self.tags().items():
308 for t, n in self.tags().items():
309 self.nodetagscache.setdefault(n, []).append(t)
309 self.nodetagscache.setdefault(n, []).append(t)
310 return self.nodetagscache.get(node, [])
310 return self.nodetagscache.get(node, [])
311
311
312 def _branchtags(self):
312 def _branchtags(self):
313 partial, last, lrev = self._readbranchcache()
313 partial, last, lrev = self._readbranchcache()
314
314
315 tiprev = self.changelog.count() - 1
315 tiprev = self.changelog.count() - 1
316 if lrev != tiprev:
316 if lrev != tiprev:
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319
319
320 return partial
320 return partial
321
321
322 def branchtags(self):
322 def branchtags(self):
323 if self.branchcache is not None:
323 if self.branchcache is not None:
324 return self.branchcache
324 return self.branchcache
325
325
326 self.branchcache = {} # avoid recursion in changectx
326 self.branchcache = {} # avoid recursion in changectx
327 partial = self._branchtags()
327 partial = self._branchtags()
328
328
329 # the branch cache is stored on disk as UTF-8, but in the local
329 # the branch cache is stored on disk as UTF-8, but in the local
330 # charset internally
330 # charset internally
331 for k, v in partial.items():
331 for k, v in partial.items():
332 self.branchcache[util.tolocal(k)] = v
332 self.branchcache[util.tolocal(k)] = v
333 return self.branchcache
333 return self.branchcache
334
334
335 def _readbranchcache(self):
335 def _readbranchcache(self):
336 partial = {}
336 partial = {}
337 try:
337 try:
338 f = self.opener("branch.cache")
338 f = self.opener("branch.cache")
339 lines = f.read().split('\n')
339 lines = f.read().split('\n')
340 f.close()
340 f.close()
341 except (IOError, OSError):
341 except (IOError, OSError):
342 return {}, nullid, nullrev
342 return {}, nullid, nullrev
343
343
344 try:
344 try:
345 last, lrev = lines.pop(0).split(" ", 1)
345 last, lrev = lines.pop(0).split(" ", 1)
346 last, lrev = bin(last), int(lrev)
346 last, lrev = bin(last), int(lrev)
347 if not (lrev < self.changelog.count() and
347 if not (lrev < self.changelog.count() and
348 self.changelog.node(lrev) == last): # sanity check
348 self.changelog.node(lrev) == last): # sanity check
349 # invalidate the cache
349 # invalidate the cache
350 raise ValueError('Invalid branch cache: unknown tip')
350 raise ValueError('Invalid branch cache: unknown tip')
351 for l in lines:
351 for l in lines:
352 if not l: continue
352 if not l: continue
353 node, label = l.split(" ", 1)
353 node, label = l.split(" ", 1)
354 partial[label.strip()] = bin(node)
354 partial[label.strip()] = bin(node)
355 except (KeyboardInterrupt, util.SignalInterrupt):
355 except (KeyboardInterrupt, util.SignalInterrupt):
356 raise
356 raise
357 except Exception, inst:
357 except Exception, inst:
358 if self.ui.debugflag:
358 if self.ui.debugflag:
359 self.ui.warn(str(inst), '\n')
359 self.ui.warn(str(inst), '\n')
360 partial, last, lrev = {}, nullid, nullrev
360 partial, last, lrev = {}, nullid, nullrev
361 return partial, last, lrev
361 return partial, last, lrev
362
362
363 def _writebranchcache(self, branches, tip, tiprev):
363 def _writebranchcache(self, branches, tip, tiprev):
364 try:
364 try:
365 f = self.opener("branch.cache", "w", atomictemp=True)
365 f = self.opener("branch.cache", "w", atomictemp=True)
366 f.write("%s %s\n" % (hex(tip), tiprev))
366 f.write("%s %s\n" % (hex(tip), tiprev))
367 for label, node in branches.iteritems():
367 for label, node in branches.iteritems():
368 f.write("%s %s\n" % (hex(node), label))
368 f.write("%s %s\n" % (hex(node), label))
369 f.rename()
369 f.rename()
370 except (IOError, OSError):
370 except (IOError, OSError):
371 pass
371 pass
372
372
373 def _updatebranchcache(self, partial, start, end):
373 def _updatebranchcache(self, partial, start, end):
374 for r in xrange(start, end):
374 for r in xrange(start, end):
375 c = self.changectx(r)
375 c = self.changectx(r)
376 b = c.branch()
376 b = c.branch()
377 partial[b] = c.node()
377 partial[b] = c.node()
378
378
379 def lookup(self, key):
379 def lookup(self, key):
380 if key == '.':
380 if key == '.':
381 key, second = self.dirstate.parents()
381 key, second = self.dirstate.parents()
382 if key == nullid:
382 if key == nullid:
383 raise repo.RepoError(_("no revision checked out"))
383 raise repo.RepoError(_("no revision checked out"))
384 if second != nullid:
384 if second != nullid:
385 self.ui.warn(_("warning: working directory has two parents, "
385 self.ui.warn(_("warning: working directory has two parents, "
386 "tag '.' uses the first\n"))
386 "tag '.' uses the first\n"))
387 elif key == 'null':
387 elif key == 'null':
388 return nullid
388 return nullid
389 n = self.changelog._match(key)
389 n = self.changelog._match(key)
390 if n:
390 if n:
391 return n
391 return n
392 if key in self.tags():
392 if key in self.tags():
393 return self.tags()[key]
393 return self.tags()[key]
394 if key in self.branchtags():
394 if key in self.branchtags():
395 return self.branchtags()[key]
395 return self.branchtags()[key]
396 n = self.changelog._partialmatch(key)
396 n = self.changelog._partialmatch(key)
397 if n:
397 if n:
398 return n
398 return n
399 raise repo.RepoError(_("unknown revision '%s'") % key)
399 raise repo.RepoError(_("unknown revision '%s'") % key)
400
400
401 def dev(self):
401 def dev(self):
402 return os.lstat(self.path).st_dev
402 return os.lstat(self.path).st_dev
403
403
404 def local(self):
404 def local(self):
405 return True
405 return True
406
406
407 def join(self, f):
407 def join(self, f):
408 return os.path.join(self.path, f)
408 return os.path.join(self.path, f)
409
409
410 def sjoin(self, f):
410 def sjoin(self, f):
411 f = self.encodefn(f)
411 f = self.encodefn(f)
412 return os.path.join(self.spath, f)
412 return os.path.join(self.spath, f)
413
413
414 def wjoin(self, f):
414 def wjoin(self, f):
415 return os.path.join(self.root, f)
415 return os.path.join(self.root, f)
416
416
417 def file(self, f):
417 def file(self, f):
418 if f[0] == '/':
418 if f[0] == '/':
419 f = f[1:]
419 f = f[1:]
420 return filelog.filelog(self.sopener, f)
420 return filelog.filelog(self.sopener, f)
421
421
422 def changectx(self, changeid=None):
422 def changectx(self, changeid=None):
423 return context.changectx(self, changeid)
423 return context.changectx(self, changeid)
424
424
425 def workingctx(self):
425 def workingctx(self):
426 return context.workingctx(self)
426 return context.workingctx(self)
427
427
428 def parents(self, changeid=None):
428 def parents(self, changeid=None):
429 '''
429 '''
430 get list of changectxs for parents of changeid or working directory
430 get list of changectxs for parents of changeid or working directory
431 '''
431 '''
432 if changeid is None:
432 if changeid is None:
433 pl = self.dirstate.parents()
433 pl = self.dirstate.parents()
434 else:
434 else:
435 n = self.changelog.lookup(changeid)
435 n = self.changelog.lookup(changeid)
436 pl = self.changelog.parents(n)
436 pl = self.changelog.parents(n)
437 if pl[1] == nullid:
437 if pl[1] == nullid:
438 return [self.changectx(pl[0])]
438 return [self.changectx(pl[0])]
439 return [self.changectx(pl[0]), self.changectx(pl[1])]
439 return [self.changectx(pl[0]), self.changectx(pl[1])]
440
440
441 def filectx(self, path, changeid=None, fileid=None):
441 def filectx(self, path, changeid=None, fileid=None):
442 """changeid can be a changeset revision, node, or tag.
442 """changeid can be a changeset revision, node, or tag.
443 fileid can be a file revision or node."""
443 fileid can be a file revision or node."""
444 return context.filectx(self, path, changeid, fileid)
444 return context.filectx(self, path, changeid, fileid)
445
445
446 def getcwd(self):
446 def getcwd(self):
447 return self.dirstate.getcwd()
447 return self.dirstate.getcwd()
448
448
449 def pathto(self, f, cwd=None):
449 def pathto(self, f, cwd=None):
450 return self.dirstate.pathto(f, cwd)
450 return self.dirstate.pathto(f, cwd)
451
451
452 def wfile(self, f, mode='r'):
452 def wfile(self, f, mode='r'):
453 return self.wopener(f, mode)
453 return self.wopener(f, mode)
454
454
455 def _link(self, f):
455 def _link(self, f):
456 return os.path.islink(self.wjoin(f))
456 return os.path.islink(self.wjoin(f))
457
457
458 def _filter(self, filter, filename, data):
458 def _filter(self, filter, filename, data):
459 if filter not in self.filterpats:
459 if filter not in self.filterpats:
460 l = []
460 l = []
461 for pat, cmd in self.ui.configitems(filter):
461 for pat, cmd in self.ui.configitems(filter):
462 mf = util.matcher(self.root, "", [pat], [], [])[1]
462 mf = util.matcher(self.root, "", [pat], [], [])[1]
463 l.append((mf, cmd))
463 l.append((mf, cmd))
464 self.filterpats[filter] = l
464 self.filterpats[filter] = l
465
465
466 for mf, cmd in self.filterpats[filter]:
466 for mf, cmd in self.filterpats[filter]:
467 if mf(filename):
467 if mf(filename):
468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
469 data = util.filter(data, cmd)
469 data = util.filter(data, cmd)
470 break
470 break
471
471
472 return data
472 return data
473
473
474 def wread(self, filename):
474 def wread(self, filename):
475 if self._link(filename):
475 if self._link(filename):
476 data = os.readlink(self.wjoin(filename))
476 data = os.readlink(self.wjoin(filename))
477 else:
477 else:
478 data = self.wopener(filename, 'r').read()
478 data = self.wopener(filename, 'r').read()
479 return self._filter("encode", filename, data)
479 return self._filter("encode", filename, data)
480
480
481 def wwrite(self, filename, data, flags):
481 def wwrite(self, filename, data, flags):
482 data = self._filter("decode", filename, data)
482 data = self._filter("decode", filename, data)
483 if "l" in flags:
483 if "l" in flags:
484 self.wopener.symlink(data, filename)
484 self.wopener.symlink(data, filename)
485 else:
485 else:
486 try:
486 try:
487 if self._link(filename):
487 if self._link(filename):
488 os.unlink(self.wjoin(filename))
488 os.unlink(self.wjoin(filename))
489 except OSError:
489 except OSError:
490 pass
490 pass
491 self.wopener(filename, 'w').write(data)
491 self.wopener(filename, 'w').write(data)
492 util.set_exec(self.wjoin(filename), "x" in flags)
492 util.set_exec(self.wjoin(filename), "x" in flags)
493
493
494 def wwritedata(self, filename, data):
494 def wwritedata(self, filename, data):
495 return self._filter("decode", filename, data)
495 return self._filter("decode", filename, data)
496
496
497 def transaction(self):
497 def transaction(self):
498 tr = self.transhandle
498 tr = self.transhandle
499 if tr != None and tr.running():
499 if tr != None and tr.running():
500 return tr.nest()
500 return tr.nest()
501
501
502 # save dirstate for rollback
502 # save dirstate for rollback
503 try:
503 try:
504 ds = self.opener("dirstate").read()
504 ds = self.opener("dirstate").read()
505 except IOError:
505 except IOError:
506 ds = ""
506 ds = ""
507 self.opener("journal.dirstate", "w").write(ds)
507 self.opener("journal.dirstate", "w").write(ds)
508
508
509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
511 tr = transaction.transaction(self.ui.warn, self.sopener,
511 tr = transaction.transaction(self.ui.warn, self.sopener,
512 self.sjoin("journal"),
512 self.sjoin("journal"),
513 aftertrans(renames))
513 aftertrans(renames))
514 self.transhandle = tr
514 self.transhandle = tr
515 return tr
515 return tr
516
516
517 def recover(self):
517 def recover(self):
518 l = self.lock()
518 l = self.lock()
519 if os.path.exists(self.sjoin("journal")):
519 if os.path.exists(self.sjoin("journal")):
520 self.ui.status(_("rolling back interrupted transaction\n"))
520 self.ui.status(_("rolling back interrupted transaction\n"))
521 transaction.rollback(self.sopener, self.sjoin("journal"))
521 transaction.rollback(self.sopener, self.sjoin("journal"))
522 self.invalidate()
522 self.invalidate()
523 return True
523 return True
524 else:
524 else:
525 self.ui.warn(_("no interrupted transaction available\n"))
525 self.ui.warn(_("no interrupted transaction available\n"))
526 return False
526 return False
527
527
528 def rollback(self, wlock=None, lock=None):
528 def rollback(self, wlock=None, lock=None):
529 if not wlock:
529 if not wlock:
530 wlock = self.wlock()
530 wlock = self.wlock()
531 if not lock:
531 if not lock:
532 lock = self.lock()
532 lock = self.lock()
533 if os.path.exists(self.sjoin("undo")):
533 if os.path.exists(self.sjoin("undo")):
534 self.ui.status(_("rolling back last transaction\n"))
534 self.ui.status(_("rolling back last transaction\n"))
535 transaction.rollback(self.sopener, self.sjoin("undo"))
535 transaction.rollback(self.sopener, self.sjoin("undo"))
536 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
536 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
537 self.invalidate()
537 self.invalidate()
538 self.dirstate.invalidate()
538 self.dirstate.invalidate()
539 else:
539 else:
540 self.ui.warn(_("no rollback information available\n"))
540 self.ui.warn(_("no rollback information available\n"))
541
541
542 def invalidate(self):
542 def invalidate(self):
543 for a in "changelog manifest".split():
543 for a in "changelog manifest".split():
544 if hasattr(self, a):
544 if hasattr(self, a):
545 self.__delattr__(a)
545 self.__delattr__(a)
546 self.tagscache = None
546 self.tagscache = None
547 self.nodetagscache = None
547 self.nodetagscache = None
548
548
549 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
549 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
550 desc=None):
550 desc=None):
551 try:
551 try:
552 l = lock.lock(lockname, 0, releasefn, desc=desc)
552 l = lock.lock(lockname, 0, releasefn, desc=desc)
553 except lock.LockHeld, inst:
553 except lock.LockHeld, inst:
554 if not wait:
554 if not wait:
555 raise
555 raise
556 self.ui.warn(_("waiting for lock on %s held by %r\n") %
556 self.ui.warn(_("waiting for lock on %s held by %r\n") %
557 (desc, inst.locker))
557 (desc, inst.locker))
558 # default to 600 seconds timeout
558 # default to 600 seconds timeout
559 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
559 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
560 releasefn, desc=desc)
560 releasefn, desc=desc)
561 if acquirefn:
561 if acquirefn:
562 acquirefn()
562 acquirefn()
563 return l
563 return l
564
564
565 def lock(self, wait=1):
565 def lock(self, wait=1):
566 return self.do_lock(self.sjoin("lock"), wait,
566 return self.do_lock(self.sjoin("lock"), wait,
567 acquirefn=self.invalidate,
567 acquirefn=self.invalidate,
568 desc=_('repository %s') % self.origroot)
568 desc=_('repository %s') % self.origroot)
569
569
570 def wlock(self, wait=1):
570 def wlock(self, wait=1):
571 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
571 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
572 self.dirstate.invalidate,
572 self.dirstate.invalidate,
573 desc=_('working directory of %s') % self.origroot)
573 desc=_('working directory of %s') % self.origroot)
574
574
575 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
575 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
576 """
576 """
577 commit an individual file as part of a larger transaction
577 commit an individual file as part of a larger transaction
578 """
578 """
579
579
580 t = self.wread(fn)
580 t = self.wread(fn)
581 fl = self.file(fn)
581 fl = self.file(fn)
582 fp1 = manifest1.get(fn, nullid)
582 fp1 = manifest1.get(fn, nullid)
583 fp2 = manifest2.get(fn, nullid)
583 fp2 = manifest2.get(fn, nullid)
584
584
585 meta = {}
585 meta = {}
586 cp = self.dirstate.copied(fn)
586 cp = self.dirstate.copied(fn)
587 if cp:
587 if cp:
588 # Mark the new revision of this file as a copy of another
588 # Mark the new revision of this file as a copy of another
589 # file. This copy data will effectively act as a parent
589 # file. This copy data will effectively act as a parent
590 # of this new revision. If this is a merge, the first
590 # of this new revision. If this is a merge, the first
591 # parent will be the nullid (meaning "look up the copy data")
591 # parent will be the nullid (meaning "look up the copy data")
592 # and the second one will be the other parent. For example:
592 # and the second one will be the other parent. For example:
593 #
593 #
594 # 0 --- 1 --- 3 rev1 changes file foo
594 # 0 --- 1 --- 3 rev1 changes file foo
595 # \ / rev2 renames foo to bar and changes it
595 # \ / rev2 renames foo to bar and changes it
596 # \- 2 -/ rev3 should have bar with all changes and
596 # \- 2 -/ rev3 should have bar with all changes and
597 # should record that bar descends from
597 # should record that bar descends from
598 # bar in rev2 and foo in rev1
598 # bar in rev2 and foo in rev1
599 #
599 #
600 # this allows this merge to succeed:
600 # this allows this merge to succeed:
601 #
601 #
602 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
602 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
603 # \ / merging rev3 and rev4 should use bar@rev2
603 # \ / merging rev3 and rev4 should use bar@rev2
604 # \- 2 --- 4 as the merge base
604 # \- 2 --- 4 as the merge base
605 #
605 #
606 meta["copy"] = cp
606 meta["copy"] = cp
607 if not manifest2: # not a branch merge
607 if not manifest2: # not a branch merge
608 meta["copyrev"] = hex(manifest1.get(cp, nullid))
608 meta["copyrev"] = hex(manifest1.get(cp, nullid))
609 fp2 = nullid
609 fp2 = nullid
610 elif fp2 != nullid: # copied on remote side
610 elif fp2 != nullid: # copied on remote side
611 meta["copyrev"] = hex(manifest1.get(cp, nullid))
611 meta["copyrev"] = hex(manifest1.get(cp, nullid))
612 elif fp1 != nullid: # copied on local side, reversed
612 elif fp1 != nullid: # copied on local side, reversed
613 meta["copyrev"] = hex(manifest2.get(cp))
613 meta["copyrev"] = hex(manifest2.get(cp))
614 fp2 = fp1
614 fp2 = fp1
615 else: # directory rename
615 else: # directory rename
616 meta["copyrev"] = hex(manifest1.get(cp, nullid))
616 meta["copyrev"] = hex(manifest1.get(cp, nullid))
617 self.ui.debug(_(" %s: copy %s:%s\n") %
617 self.ui.debug(_(" %s: copy %s:%s\n") %
618 (fn, cp, meta["copyrev"]))
618 (fn, cp, meta["copyrev"]))
619 fp1 = nullid
619 fp1 = nullid
620 elif fp2 != nullid:
620 elif fp2 != nullid:
621 # is one parent an ancestor of the other?
621 # is one parent an ancestor of the other?
622 fpa = fl.ancestor(fp1, fp2)
622 fpa = fl.ancestor(fp1, fp2)
623 if fpa == fp1:
623 if fpa == fp1:
624 fp1, fp2 = fp2, nullid
624 fp1, fp2 = fp2, nullid
625 elif fpa == fp2:
625 elif fpa == fp2:
626 fp2 = nullid
626 fp2 = nullid
627
627
628 # is the file unmodified from the parent? report existing entry
628 # is the file unmodified from the parent? report existing entry
629 if fp2 == nullid and not fl.cmp(fp1, t):
629 if fp2 == nullid and not fl.cmp(fp1, t):
630 return fp1
630 return fp1
631
631
632 changelist.append(fn)
632 changelist.append(fn)
633 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
633 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
634
634
635 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
635 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
636 if p1 is None:
636 if p1 is None:
637 p1, p2 = self.dirstate.parents()
637 p1, p2 = self.dirstate.parents()
638 return self.commit(files=files, text=text, user=user, date=date,
638 return self.commit(files=files, text=text, user=user, date=date,
639 p1=p1, p2=p2, wlock=wlock, extra=extra)
639 p1=p1, p2=p2, wlock=wlock, extra=extra)
640
640
641 def commit(self, files=None, text="", user=None, date=None,
641 def commit(self, files=None, text="", user=None, date=None,
642 match=util.always, force=False, lock=None, wlock=None,
642 match=util.always, force=False, lock=None, wlock=None,
643 force_editor=False, p1=None, p2=None, extra={}):
643 force_editor=False, p1=None, p2=None, extra={}):
644
644
645 commit = []
645 commit = []
646 remove = []
646 remove = []
647 changed = []
647 changed = []
648 use_dirstate = (p1 is None) # not rawcommit
648 use_dirstate = (p1 is None) # not rawcommit
649 extra = extra.copy()
649 extra = extra.copy()
650
650
651 if use_dirstate:
651 if use_dirstate:
652 if files:
652 if files:
653 for f in files:
653 for f in files:
654 s = self.dirstate.state(f)
654 s = self.dirstate[f]
655 if s in 'nmai':
655 if s in 'nma':
656 commit.append(f)
656 commit.append(f)
657 elif s == 'r':
657 elif s == 'r':
658 remove.append(f)
658 remove.append(f)
659 else:
659 else:
660 self.ui.warn(_("%s not tracked!\n") % f)
660 self.ui.warn(_("%s not tracked!\n") % f)
661 else:
661 else:
662 changes = self.status(match=match)[:5]
662 changes = self.status(match=match)[:5]
663 modified, added, removed, deleted, unknown = changes
663 modified, added, removed, deleted, unknown = changes
664 commit = modified + added
664 commit = modified + added
665 remove = removed
665 remove = removed
666 else:
666 else:
667 commit = files
667 commit = files
668
668
669 if use_dirstate:
669 if use_dirstate:
670 p1, p2 = self.dirstate.parents()
670 p1, p2 = self.dirstate.parents()
671 update_dirstate = True
671 update_dirstate = True
672 else:
672 else:
673 p1, p2 = p1, p2 or nullid
673 p1, p2 = p1, p2 or nullid
674 update_dirstate = (self.dirstate.parents()[0] == p1)
674 update_dirstate = (self.dirstate.parents()[0] == p1)
675
675
676 c1 = self.changelog.read(p1)
676 c1 = self.changelog.read(p1)
677 c2 = self.changelog.read(p2)
677 c2 = self.changelog.read(p2)
678 m1 = self.manifest.read(c1[0]).copy()
678 m1 = self.manifest.read(c1[0]).copy()
679 m2 = self.manifest.read(c2[0])
679 m2 = self.manifest.read(c2[0])
680
680
681 if use_dirstate:
681 if use_dirstate:
682 branchname = self.workingctx().branch()
682 branchname = self.workingctx().branch()
683 try:
683 try:
684 branchname = branchname.decode('UTF-8').encode('UTF-8')
684 branchname = branchname.decode('UTF-8').encode('UTF-8')
685 except UnicodeDecodeError:
685 except UnicodeDecodeError:
686 raise util.Abort(_('branch name not in UTF-8!'))
686 raise util.Abort(_('branch name not in UTF-8!'))
687 else:
687 else:
688 branchname = ""
688 branchname = ""
689
689
690 if use_dirstate:
690 if use_dirstate:
691 oldname = c1[5].get("branch") # stored in UTF-8
691 oldname = c1[5].get("branch") # stored in UTF-8
692 if (not commit and not remove and not force and p2 == nullid
692 if (not commit and not remove and not force and p2 == nullid
693 and branchname == oldname):
693 and branchname == oldname):
694 self.ui.status(_("nothing changed\n"))
694 self.ui.status(_("nothing changed\n"))
695 return None
695 return None
696
696
697 xp1 = hex(p1)
697 xp1 = hex(p1)
698 if p2 == nullid: xp2 = ''
698 if p2 == nullid: xp2 = ''
699 else: xp2 = hex(p2)
699 else: xp2 = hex(p2)
700
700
701 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
701 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
702
702
703 if not wlock:
703 if not wlock:
704 wlock = self.wlock()
704 wlock = self.wlock()
705 if not lock:
705 if not lock:
706 lock = self.lock()
706 lock = self.lock()
707 tr = self.transaction()
707 tr = self.transaction()
708
708
709 # check in files
709 # check in files
710 new = {}
710 new = {}
711 linkrev = self.changelog.count()
711 linkrev = self.changelog.count()
712 commit.sort()
712 commit.sort()
713 is_exec = util.execfunc(self.root, m1.execf)
713 is_exec = util.execfunc(self.root, m1.execf)
714 is_link = util.linkfunc(self.root, m1.linkf)
714 is_link = util.linkfunc(self.root, m1.linkf)
715 for f in commit:
715 for f in commit:
716 self.ui.note(f + "\n")
716 self.ui.note(f + "\n")
717 try:
717 try:
718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 new_exec = is_exec(f)
719 new_exec = is_exec(f)
720 new_link = is_link(f)
720 new_link = is_link(f)
721 if not changed or changed[-1] != f:
721 if not changed or changed[-1] != f:
722 # mention the file in the changelog if some flag changed,
722 # mention the file in the changelog if some flag changed,
723 # even if there was no content change.
723 # even if there was no content change.
724 old_exec = m1.execf(f)
724 old_exec = m1.execf(f)
725 old_link = m1.linkf(f)
725 old_link = m1.linkf(f)
726 if old_exec != new_exec or old_link != new_link:
726 if old_exec != new_exec or old_link != new_link:
727 changed.append(f)
727 changed.append(f)
728 m1.set(f, new_exec, new_link)
728 m1.set(f, new_exec, new_link)
729 except (OSError, IOError):
729 except (OSError, IOError):
730 if use_dirstate:
730 if use_dirstate:
731 self.ui.warn(_("trouble committing %s!\n") % f)
731 self.ui.warn(_("trouble committing %s!\n") % f)
732 raise
732 raise
733 else:
733 else:
734 remove.append(f)
734 remove.append(f)
735
735
736 # update manifest
736 # update manifest
737 m1.update(new)
737 m1.update(new)
738 remove.sort()
738 remove.sort()
739 removed = []
739 removed = []
740
740
741 for f in remove:
741 for f in remove:
742 if f in m1:
742 if f in m1:
743 del m1[f]
743 del m1[f]
744 removed.append(f)
744 removed.append(f)
745 elif f in m2:
745 elif f in m2:
746 removed.append(f)
746 removed.append(f)
747 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
747 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
748
748
749 # add changeset
749 # add changeset
750 new = new.keys()
750 new = new.keys()
751 new.sort()
751 new.sort()
752
752
753 user = user or self.ui.username()
753 user = user or self.ui.username()
754 if not text or force_editor:
754 if not text or force_editor:
755 edittext = []
755 edittext = []
756 if text:
756 if text:
757 edittext.append(text)
757 edittext.append(text)
758 edittext.append("")
758 edittext.append("")
759 edittext.append("HG: user: %s" % user)
759 edittext.append("HG: user: %s" % user)
760 if p2 != nullid:
760 if p2 != nullid:
761 edittext.append("HG: branch merge")
761 edittext.append("HG: branch merge")
762 if branchname:
762 if branchname:
763 edittext.append("HG: branch %s" % util.tolocal(branchname))
763 edittext.append("HG: branch %s" % util.tolocal(branchname))
764 edittext.extend(["HG: changed %s" % f for f in changed])
764 edittext.extend(["HG: changed %s" % f for f in changed])
765 edittext.extend(["HG: removed %s" % f for f in removed])
765 edittext.extend(["HG: removed %s" % f for f in removed])
766 if not changed and not remove:
766 if not changed and not remove:
767 edittext.append("HG: no files changed")
767 edittext.append("HG: no files changed")
768 edittext.append("")
768 edittext.append("")
769 # run editor in the repository root
769 # run editor in the repository root
770 olddir = os.getcwd()
770 olddir = os.getcwd()
771 os.chdir(self.root)
771 os.chdir(self.root)
772 text = self.ui.edit("\n".join(edittext), user)
772 text = self.ui.edit("\n".join(edittext), user)
773 os.chdir(olddir)
773 os.chdir(olddir)
774
774
775 lines = [line.rstrip() for line in text.rstrip().splitlines()]
775 lines = [line.rstrip() for line in text.rstrip().splitlines()]
776 while lines and not lines[0]:
776 while lines and not lines[0]:
777 del lines[0]
777 del lines[0]
778 if not lines:
778 if not lines:
779 return None
779 return None
780 text = '\n'.join(lines)
780 text = '\n'.join(lines)
781 if branchname:
781 if branchname:
782 extra["branch"] = branchname
782 extra["branch"] = branchname
783 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
783 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
784 user, date, extra)
784 user, date, extra)
785 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
785 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
786 parent2=xp2)
786 parent2=xp2)
787 tr.close()
787 tr.close()
788
788
789 if self.branchcache and "branch" in extra:
789 if self.branchcache and "branch" in extra:
790 self.branchcache[util.tolocal(extra["branch"])] = n
790 self.branchcache[util.tolocal(extra["branch"])] = n
791
791
792 if use_dirstate or update_dirstate:
792 if use_dirstate or update_dirstate:
793 self.dirstate.setparents(n)
793 self.dirstate.setparents(n)
794 if use_dirstate:
794 if use_dirstate:
795 for f in new:
795 for f in new:
796 self.dirstate.normal(f)
796 self.dirstate.normal(f)
797 for f in removed:
797 for f in removed:
798 self.dirstate.forget(f)
798 self.dirstate.forget(f)
799
799
800 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
800 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
801 return n
801 return n
802
802
803 def walk(self, node=None, files=[], match=util.always, badmatch=None):
803 def walk(self, node=None, files=[], match=util.always, badmatch=None):
804 '''
804 '''
805 walk recursively through the directory tree or a given
805 walk recursively through the directory tree or a given
806 changeset, finding all files matched by the match
806 changeset, finding all files matched by the match
807 function
807 function
808
808
809 results are yielded in a tuple (src, filename), where src
809 results are yielded in a tuple (src, filename), where src
810 is one of:
810 is one of:
811 'f' the file was found in the directory tree
811 'f' the file was found in the directory tree
812 'm' the file was only in the dirstate and not in the tree
812 'm' the file was only in the dirstate and not in the tree
813 'b' file was not found and matched badmatch
813 'b' file was not found and matched badmatch
814 '''
814 '''
815
815
816 if node:
816 if node:
817 fdict = dict.fromkeys(files)
817 fdict = dict.fromkeys(files)
818 # for dirstate.walk, files=['.'] means "walk the whole tree".
818 # for dirstate.walk, files=['.'] means "walk the whole tree".
819 # follow that here, too
819 # follow that here, too
820 fdict.pop('.', None)
820 fdict.pop('.', None)
821 mdict = self.manifest.read(self.changelog.read(node)[0])
821 mdict = self.manifest.read(self.changelog.read(node)[0])
822 mfiles = mdict.keys()
822 mfiles = mdict.keys()
823 mfiles.sort()
823 mfiles.sort()
824 for fn in mfiles:
824 for fn in mfiles:
825 for ffn in fdict:
825 for ffn in fdict:
826 # match if the file is the exact name or a directory
826 # match if the file is the exact name or a directory
827 if ffn == fn or fn.startswith("%s/" % ffn):
827 if ffn == fn or fn.startswith("%s/" % ffn):
828 del fdict[ffn]
828 del fdict[ffn]
829 break
829 break
830 if match(fn):
830 if match(fn):
831 yield 'm', fn
831 yield 'm', fn
832 ffiles = fdict.keys()
832 ffiles = fdict.keys()
833 ffiles.sort()
833 ffiles.sort()
834 for fn in ffiles:
834 for fn in ffiles:
835 if badmatch and badmatch(fn):
835 if badmatch and badmatch(fn):
836 if match(fn):
836 if match(fn):
837 yield 'b', fn
837 yield 'b', fn
838 else:
838 else:
839 self.ui.warn(_('%s: No such file in rev %s\n')
839 self.ui.warn(_('%s: No such file in rev %s\n')
840 % (self.pathto(fn), short(node)))
840 % (self.pathto(fn), short(node)))
841 else:
841 else:
842 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
842 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
843 yield src, fn
843 yield src, fn
844
844
845 def status(self, node1=None, node2=None, files=[], match=util.always,
845 def status(self, node1=None, node2=None, files=[], match=util.always,
846 wlock=None, list_ignored=False, list_clean=False):
846 wlock=None, list_ignored=False, list_clean=False):
847 """return status of files between two nodes or node and working directory
847 """return status of files between two nodes or node and working directory
848
848
849 If node1 is None, use the first dirstate parent instead.
849 If node1 is None, use the first dirstate parent instead.
850 If node2 is None, compare node1 with working directory.
850 If node2 is None, compare node1 with working directory.
851 """
851 """
852
852
853 def fcmp(fn, getnode):
853 def fcmp(fn, getnode):
854 t1 = self.wread(fn)
854 t1 = self.wread(fn)
855 return self.file(fn).cmp(getnode(fn), t1)
855 return self.file(fn).cmp(getnode(fn), t1)
856
856
857 def mfmatches(node):
857 def mfmatches(node):
858 change = self.changelog.read(node)
858 change = self.changelog.read(node)
859 mf = self.manifest.read(change[0]).copy()
859 mf = self.manifest.read(change[0]).copy()
860 for fn in mf.keys():
860 for fn in mf.keys():
861 if not match(fn):
861 if not match(fn):
862 del mf[fn]
862 del mf[fn]
863 return mf
863 return mf
864
864
865 modified, added, removed, deleted, unknown = [], [], [], [], []
865 modified, added, removed, deleted, unknown = [], [], [], [], []
866 ignored, clean = [], []
866 ignored, clean = [], []
867
867
868 compareworking = False
868 compareworking = False
869 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
869 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
870 compareworking = True
870 compareworking = True
871
871
872 if not compareworking:
872 if not compareworking:
873 # read the manifest from node1 before the manifest from node2,
873 # read the manifest from node1 before the manifest from node2,
874 # so that we'll hit the manifest cache if we're going through
874 # so that we'll hit the manifest cache if we're going through
875 # all the revisions in parent->child order.
875 # all the revisions in parent->child order.
876 mf1 = mfmatches(node1)
876 mf1 = mfmatches(node1)
877
877
878 mywlock = False
878 mywlock = False
879
879
880 # are we comparing the working directory?
880 # are we comparing the working directory?
881 if not node2:
881 if not node2:
882 (lookup, modified, added, removed, deleted, unknown,
882 (lookup, modified, added, removed, deleted, unknown,
883 ignored, clean) = self.dirstate.status(files, match,
883 ignored, clean) = self.dirstate.status(files, match,
884 list_ignored, list_clean)
884 list_ignored, list_clean)
885
885
886 # are we comparing working dir against its parent?
886 # are we comparing working dir against its parent?
887 if compareworking:
887 if compareworking:
888 if lookup:
888 if lookup:
889 # do a full compare of any files that might have changed
889 # do a full compare of any files that might have changed
890 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
890 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
891 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
891 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
892 nullid)
892 nullid)
893 for f in lookup:
893 for f in lookup:
894 if fcmp(f, getnode):
894 if fcmp(f, getnode):
895 modified.append(f)
895 modified.append(f)
896 else:
896 else:
897 if list_clean:
897 if list_clean:
898 clean.append(f)
898 clean.append(f)
899 if not wlock and not mywlock:
899 if not wlock and not mywlock:
900 mywlock = True
900 mywlock = True
901 try:
901 try:
902 wlock = self.wlock(wait=0)
902 wlock = self.wlock(wait=0)
903 except lock.LockException:
903 except lock.LockException:
904 pass
904 pass
905 if wlock:
905 if wlock:
906 self.dirstate.normal(f)
906 self.dirstate.normal(f)
907 else:
907 else:
908 # we are comparing working dir against non-parent
908 # we are comparing working dir against non-parent
909 # generate a pseudo-manifest for the working dir
909 # generate a pseudo-manifest for the working dir
910 # XXX: create it in dirstate.py ?
910 # XXX: create it in dirstate.py ?
911 mf2 = mfmatches(self.dirstate.parents()[0])
911 mf2 = mfmatches(self.dirstate.parents()[0])
912 is_exec = util.execfunc(self.root, mf2.execf)
912 is_exec = util.execfunc(self.root, mf2.execf)
913 is_link = util.linkfunc(self.root, mf2.linkf)
913 is_link = util.linkfunc(self.root, mf2.linkf)
914 for f in lookup + modified + added:
914 for f in lookup + modified + added:
915 mf2[f] = ""
915 mf2[f] = ""
916 mf2.set(f, is_exec(f), is_link(f))
916 mf2.set(f, is_exec(f), is_link(f))
917 for f in removed:
917 for f in removed:
918 if f in mf2:
918 if f in mf2:
919 del mf2[f]
919 del mf2[f]
920
920
921 if mywlock and wlock:
921 if mywlock and wlock:
922 wlock.release()
922 wlock.release()
923 else:
923 else:
924 # we are comparing two revisions
924 # we are comparing two revisions
925 mf2 = mfmatches(node2)
925 mf2 = mfmatches(node2)
926
926
927 if not compareworking:
927 if not compareworking:
928 # flush lists from dirstate before comparing manifests
928 # flush lists from dirstate before comparing manifests
929 modified, added, clean = [], [], []
929 modified, added, clean = [], [], []
930
930
931 # make sure to sort the files so we talk to the disk in a
931 # make sure to sort the files so we talk to the disk in a
932 # reasonable order
932 # reasonable order
933 mf2keys = mf2.keys()
933 mf2keys = mf2.keys()
934 mf2keys.sort()
934 mf2keys.sort()
935 getnode = lambda fn: mf1.get(fn, nullid)
935 getnode = lambda fn: mf1.get(fn, nullid)
936 for fn in mf2keys:
936 for fn in mf2keys:
937 if mf1.has_key(fn):
937 if mf1.has_key(fn):
938 if (mf1.flags(fn) != mf2.flags(fn) or
938 if (mf1.flags(fn) != mf2.flags(fn) or
939 (mf1[fn] != mf2[fn] and
939 (mf1[fn] != mf2[fn] and
940 (mf2[fn] != "" or fcmp(fn, getnode)))):
940 (mf2[fn] != "" or fcmp(fn, getnode)))):
941 modified.append(fn)
941 modified.append(fn)
942 elif list_clean:
942 elif list_clean:
943 clean.append(fn)
943 clean.append(fn)
944 del mf1[fn]
944 del mf1[fn]
945 else:
945 else:
946 added.append(fn)
946 added.append(fn)
947
947
948 removed = mf1.keys()
948 removed = mf1.keys()
949
949
950 # sort and return results:
950 # sort and return results:
951 for l in modified, added, removed, deleted, unknown, ignored, clean:
951 for l in modified, added, removed, deleted, unknown, ignored, clean:
952 l.sort()
952 l.sort()
953 return (modified, added, removed, deleted, unknown, ignored, clean)
953 return (modified, added, removed, deleted, unknown, ignored, clean)
954
954
955 def add(self, list, wlock=None):
955 def add(self, list, wlock=None):
956 if not wlock:
956 if not wlock:
957 wlock = self.wlock()
957 wlock = self.wlock()
958 for f in list:
958 for f in list:
959 p = self.wjoin(f)
959 p = self.wjoin(f)
960 try:
960 try:
961 st = os.lstat(p)
961 st = os.lstat(p)
962 except:
962 except:
963 self.ui.warn(_("%s does not exist!\n") % f)
963 self.ui.warn(_("%s does not exist!\n") % f)
964 continue
964 continue
965 if st.st_size > 10000000:
965 if st.st_size > 10000000:
966 self.ui.warn(_("%s: files over 10MB may cause memory and"
966 self.ui.warn(_("%s: files over 10MB may cause memory and"
967 " performance problems\n"
967 " performance problems\n"
968 "(use 'hg revert %s' to unadd the file)\n")
968 "(use 'hg revert %s' to unadd the file)\n")
969 % (f, f))
969 % (f, f))
970 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
970 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
971 self.ui.warn(_("%s not added: only files and symlinks "
971 self.ui.warn(_("%s not added: only files and symlinks "
972 "supported currently\n") % f)
972 "supported currently\n") % f)
973 elif self.dirstate.state(f) in 'an':
973 elif self.dirstate[f] in 'an':
974 self.ui.warn(_("%s already tracked!\n") % f)
974 self.ui.warn(_("%s already tracked!\n") % f)
975 else:
975 else:
976 self.dirstate.add(f)
976 self.dirstate.add(f)
977
977
978 def forget(self, list, wlock=None):
978 def forget(self, list, wlock=None):
979 if not wlock:
979 if not wlock:
980 wlock = self.wlock()
980 wlock = self.wlock()
981 for f in list:
981 for f in list:
982 if self.dirstate.state(f) not in 'ai':
982 if self.dirstate[f] != 'a':
983 self.ui.warn(_("%s not added!\n") % f)
983 self.ui.warn(_("%s not added!\n") % f)
984 else:
984 else:
985 self.dirstate.forget(f)
985 self.dirstate.forget(f)
986
986
987 def remove(self, list, unlink=False, wlock=None):
987 def remove(self, list, unlink=False, wlock=None):
988 if unlink:
988 if unlink:
989 for f in list:
989 for f in list:
990 try:
990 try:
991 util.unlink(self.wjoin(f))
991 util.unlink(self.wjoin(f))
992 except OSError, inst:
992 except OSError, inst:
993 if inst.errno != errno.ENOENT:
993 if inst.errno != errno.ENOENT:
994 raise
994 raise
995 if not wlock:
995 if not wlock:
996 wlock = self.wlock()
996 wlock = self.wlock()
997 for f in list:
997 for f in list:
998 if unlink and os.path.exists(self.wjoin(f)):
998 if unlink and os.path.exists(self.wjoin(f)):
999 self.ui.warn(_("%s still exists!\n") % f)
999 self.ui.warn(_("%s still exists!\n") % f)
1000 elif self.dirstate.state(f) == 'a':
1000 elif self.dirstate[f] == 'a':
1001 self.dirstate.forget(f)
1001 self.dirstate.forget(f)
1002 elif f not in self.dirstate:
1002 elif f not in self.dirstate:
1003 self.ui.warn(_("%s not tracked!\n") % f)
1003 self.ui.warn(_("%s not tracked!\n") % f)
1004 else:
1004 else:
1005 self.dirstate.remove(f)
1005 self.dirstate.remove(f)
1006
1006
1007 def undelete(self, list, wlock=None):
1007 def undelete(self, list, wlock=None):
1008 p = self.dirstate.parents()[0]
1008 p = self.dirstate.parents()[0]
1009 mn = self.changelog.read(p)[0]
1009 mn = self.changelog.read(p)[0]
1010 m = self.manifest.read(mn)
1010 m = self.manifest.read(mn)
1011 if not wlock:
1011 if not wlock:
1012 wlock = self.wlock()
1012 wlock = self.wlock()
1013 for f in list:
1013 for f in list:
1014 if self.dirstate.state(f) not in "r":
1014 if self.dirstate[f] != 'r':
1015 self.ui.warn("%s not removed!\n" % f)
1015 self.ui.warn("%s not removed!\n" % f)
1016 else:
1016 else:
1017 t = self.file(f).read(m[f])
1017 t = self.file(f).read(m[f])
1018 self.wwrite(f, t, m.flags(f))
1018 self.wwrite(f, t, m.flags(f))
1019 self.dirstate.normal(f)
1019 self.dirstate.normal(f)
1020
1020
1021 def copy(self, source, dest, wlock=None):
1021 def copy(self, source, dest, wlock=None):
1022 p = self.wjoin(dest)
1022 p = self.wjoin(dest)
1023 if not (os.path.exists(p) or os.path.islink(p)):
1023 if not (os.path.exists(p) or os.path.islink(p)):
1024 self.ui.warn(_("%s does not exist!\n") % dest)
1024 self.ui.warn(_("%s does not exist!\n") % dest)
1025 elif not (os.path.isfile(p) or os.path.islink(p)):
1025 elif not (os.path.isfile(p) or os.path.islink(p)):
1026 self.ui.warn(_("copy failed: %s is not a file or a "
1026 self.ui.warn(_("copy failed: %s is not a file or a "
1027 "symbolic link\n") % dest)
1027 "symbolic link\n") % dest)
1028 else:
1028 else:
1029 if not wlock:
1029 if not wlock:
1030 wlock = self.wlock()
1030 wlock = self.wlock()
1031 if self.dirstate.state(dest) == '?':
1031 if dest not in self.dirstate:
1032 self.dirstate.add(dest)
1032 self.dirstate.add(dest)
1033 self.dirstate.copy(source, dest)
1033 self.dirstate.copy(source, dest)
1034
1034
1035 def heads(self, start=None):
1035 def heads(self, start=None):
1036 heads = self.changelog.heads(start)
1036 heads = self.changelog.heads(start)
1037 # sort the output in rev descending order
1037 # sort the output in rev descending order
1038 heads = [(-self.changelog.rev(h), h) for h in heads]
1038 heads = [(-self.changelog.rev(h), h) for h in heads]
1039 heads.sort()
1039 heads.sort()
1040 return [n for (r, n) in heads]
1040 return [n for (r, n) in heads]
1041
1041
1042 def branchheads(self, branch, start=None):
1042 def branchheads(self, branch, start=None):
1043 branches = self.branchtags()
1043 branches = self.branchtags()
1044 if branch not in branches:
1044 if branch not in branches:
1045 return []
1045 return []
1046 # The basic algorithm is this:
1046 # The basic algorithm is this:
1047 #
1047 #
1048 # Start from the branch tip since there are no later revisions that can
1048 # Start from the branch tip since there are no later revisions that can
1049 # possibly be in this branch, and the tip is a guaranteed head.
1049 # possibly be in this branch, and the tip is a guaranteed head.
1050 #
1050 #
1051 # Remember the tip's parents as the first ancestors, since these by
1051 # Remember the tip's parents as the first ancestors, since these by
1052 # definition are not heads.
1052 # definition are not heads.
1053 #
1053 #
1054 # Step backwards from the brach tip through all the revisions. We are
1054 # Step backwards from the brach tip through all the revisions. We are
1055 # guaranteed by the rules of Mercurial that we will now be visiting the
1055 # guaranteed by the rules of Mercurial that we will now be visiting the
1056 # nodes in reverse topological order (children before parents).
1056 # nodes in reverse topological order (children before parents).
1057 #
1057 #
1058 # If a revision is one of the ancestors of a head then we can toss it
1058 # If a revision is one of the ancestors of a head then we can toss it
1059 # out of the ancestors set (we've already found it and won't be
1059 # out of the ancestors set (we've already found it and won't be
1060 # visiting it again) and put its parents in the ancestors set.
1060 # visiting it again) and put its parents in the ancestors set.
1061 #
1061 #
1062 # Otherwise, if a revision is in the branch it's another head, since it
1062 # Otherwise, if a revision is in the branch it's another head, since it
1063 # wasn't in the ancestor list of an existing head. So add it to the
1063 # wasn't in the ancestor list of an existing head. So add it to the
1064 # head list, and add its parents to the ancestor list.
1064 # head list, and add its parents to the ancestor list.
1065 #
1065 #
1066 # If it is not in the branch ignore it.
1066 # If it is not in the branch ignore it.
1067 #
1067 #
1068 # Once we have a list of heads, use nodesbetween to filter out all the
1068 # Once we have a list of heads, use nodesbetween to filter out all the
1069 # heads that cannot be reached from startrev. There may be a more
1069 # heads that cannot be reached from startrev. There may be a more
1070 # efficient way to do this as part of the previous algorithm.
1070 # efficient way to do this as part of the previous algorithm.
1071
1071
1072 set = util.set
1072 set = util.set
1073 heads = [self.changelog.rev(branches[branch])]
1073 heads = [self.changelog.rev(branches[branch])]
1074 # Don't care if ancestors contains nullrev or not.
1074 # Don't care if ancestors contains nullrev or not.
1075 ancestors = set(self.changelog.parentrevs(heads[0]))
1075 ancestors = set(self.changelog.parentrevs(heads[0]))
1076 for rev in xrange(heads[0] - 1, nullrev, -1):
1076 for rev in xrange(heads[0] - 1, nullrev, -1):
1077 if rev in ancestors:
1077 if rev in ancestors:
1078 ancestors.update(self.changelog.parentrevs(rev))
1078 ancestors.update(self.changelog.parentrevs(rev))
1079 ancestors.remove(rev)
1079 ancestors.remove(rev)
1080 elif self.changectx(rev).branch() == branch:
1080 elif self.changectx(rev).branch() == branch:
1081 heads.append(rev)
1081 heads.append(rev)
1082 ancestors.update(self.changelog.parentrevs(rev))
1082 ancestors.update(self.changelog.parentrevs(rev))
1083 heads = [self.changelog.node(rev) for rev in heads]
1083 heads = [self.changelog.node(rev) for rev in heads]
1084 if start is not None:
1084 if start is not None:
1085 heads = self.changelog.nodesbetween([start], heads)[2]
1085 heads = self.changelog.nodesbetween([start], heads)[2]
1086 return heads
1086 return heads
1087
1087
1088 def branches(self, nodes):
1088 def branches(self, nodes):
1089 if not nodes:
1089 if not nodes:
1090 nodes = [self.changelog.tip()]
1090 nodes = [self.changelog.tip()]
1091 b = []
1091 b = []
1092 for n in nodes:
1092 for n in nodes:
1093 t = n
1093 t = n
1094 while 1:
1094 while 1:
1095 p = self.changelog.parents(n)
1095 p = self.changelog.parents(n)
1096 if p[1] != nullid or p[0] == nullid:
1096 if p[1] != nullid or p[0] == nullid:
1097 b.append((t, n, p[0], p[1]))
1097 b.append((t, n, p[0], p[1]))
1098 break
1098 break
1099 n = p[0]
1099 n = p[0]
1100 return b
1100 return b
1101
1101
1102 def between(self, pairs):
1102 def between(self, pairs):
1103 r = []
1103 r = []
1104
1104
1105 for top, bottom in pairs:
1105 for top, bottom in pairs:
1106 n, l, i = top, [], 0
1106 n, l, i = top, [], 0
1107 f = 1
1107 f = 1
1108
1108
1109 while n != bottom:
1109 while n != bottom:
1110 p = self.changelog.parents(n)[0]
1110 p = self.changelog.parents(n)[0]
1111 if i == f:
1111 if i == f:
1112 l.append(n)
1112 l.append(n)
1113 f = f * 2
1113 f = f * 2
1114 n = p
1114 n = p
1115 i += 1
1115 i += 1
1116
1116
1117 r.append(l)
1117 r.append(l)
1118
1118
1119 return r
1119 return r
1120
1120
1121 def findincoming(self, remote, base=None, heads=None, force=False):
1121 def findincoming(self, remote, base=None, heads=None, force=False):
1122 """Return list of roots of the subsets of missing nodes from remote
1122 """Return list of roots of the subsets of missing nodes from remote
1123
1123
1124 If base dict is specified, assume that these nodes and their parents
1124 If base dict is specified, assume that these nodes and their parents
1125 exist on the remote side and that no child of a node of base exists
1125 exist on the remote side and that no child of a node of base exists
1126 in both remote and self.
1126 in both remote and self.
1127 Furthermore base will be updated to include the nodes that exists
1127 Furthermore base will be updated to include the nodes that exists
1128 in self and remote but no children exists in self and remote.
1128 in self and remote but no children exists in self and remote.
1129 If a list of heads is specified, return only nodes which are heads
1129 If a list of heads is specified, return only nodes which are heads
1130 or ancestors of these heads.
1130 or ancestors of these heads.
1131
1131
1132 All the ancestors of base are in self and in remote.
1132 All the ancestors of base are in self and in remote.
1133 All the descendants of the list returned are missing in self.
1133 All the descendants of the list returned are missing in self.
1134 (and so we know that the rest of the nodes are missing in remote, see
1134 (and so we know that the rest of the nodes are missing in remote, see
1135 outgoing)
1135 outgoing)
1136 """
1136 """
1137 m = self.changelog.nodemap
1137 m = self.changelog.nodemap
1138 search = []
1138 search = []
1139 fetch = {}
1139 fetch = {}
1140 seen = {}
1140 seen = {}
1141 seenbranch = {}
1141 seenbranch = {}
1142 if base == None:
1142 if base == None:
1143 base = {}
1143 base = {}
1144
1144
1145 if not heads:
1145 if not heads:
1146 heads = remote.heads()
1146 heads = remote.heads()
1147
1147
1148 if self.changelog.tip() == nullid:
1148 if self.changelog.tip() == nullid:
1149 base[nullid] = 1
1149 base[nullid] = 1
1150 if heads != [nullid]:
1150 if heads != [nullid]:
1151 return [nullid]
1151 return [nullid]
1152 return []
1152 return []
1153
1153
1154 # assume we're closer to the tip than the root
1154 # assume we're closer to the tip than the root
1155 # and start by examining the heads
1155 # and start by examining the heads
1156 self.ui.status(_("searching for changes\n"))
1156 self.ui.status(_("searching for changes\n"))
1157
1157
1158 unknown = []
1158 unknown = []
1159 for h in heads:
1159 for h in heads:
1160 if h not in m:
1160 if h not in m:
1161 unknown.append(h)
1161 unknown.append(h)
1162 else:
1162 else:
1163 base[h] = 1
1163 base[h] = 1
1164
1164
1165 if not unknown:
1165 if not unknown:
1166 return []
1166 return []
1167
1167
1168 req = dict.fromkeys(unknown)
1168 req = dict.fromkeys(unknown)
1169 reqcnt = 0
1169 reqcnt = 0
1170
1170
1171 # search through remote branches
1171 # search through remote branches
1172 # a 'branch' here is a linear segment of history, with four parts:
1172 # a 'branch' here is a linear segment of history, with four parts:
1173 # head, root, first parent, second parent
1173 # head, root, first parent, second parent
1174 # (a branch always has two parents (or none) by definition)
1174 # (a branch always has two parents (or none) by definition)
1175 unknown = remote.branches(unknown)
1175 unknown = remote.branches(unknown)
1176 while unknown:
1176 while unknown:
1177 r = []
1177 r = []
1178 while unknown:
1178 while unknown:
1179 n = unknown.pop(0)
1179 n = unknown.pop(0)
1180 if n[0] in seen:
1180 if n[0] in seen:
1181 continue
1181 continue
1182
1182
1183 self.ui.debug(_("examining %s:%s\n")
1183 self.ui.debug(_("examining %s:%s\n")
1184 % (short(n[0]), short(n[1])))
1184 % (short(n[0]), short(n[1])))
1185 if n[0] == nullid: # found the end of the branch
1185 if n[0] == nullid: # found the end of the branch
1186 pass
1186 pass
1187 elif n in seenbranch:
1187 elif n in seenbranch:
1188 self.ui.debug(_("branch already found\n"))
1188 self.ui.debug(_("branch already found\n"))
1189 continue
1189 continue
1190 elif n[1] and n[1] in m: # do we know the base?
1190 elif n[1] and n[1] in m: # do we know the base?
1191 self.ui.debug(_("found incomplete branch %s:%s\n")
1191 self.ui.debug(_("found incomplete branch %s:%s\n")
1192 % (short(n[0]), short(n[1])))
1192 % (short(n[0]), short(n[1])))
1193 search.append(n) # schedule branch range for scanning
1193 search.append(n) # schedule branch range for scanning
1194 seenbranch[n] = 1
1194 seenbranch[n] = 1
1195 else:
1195 else:
1196 if n[1] not in seen and n[1] not in fetch:
1196 if n[1] not in seen and n[1] not in fetch:
1197 if n[2] in m and n[3] in m:
1197 if n[2] in m and n[3] in m:
1198 self.ui.debug(_("found new changeset %s\n") %
1198 self.ui.debug(_("found new changeset %s\n") %
1199 short(n[1]))
1199 short(n[1]))
1200 fetch[n[1]] = 1 # earliest unknown
1200 fetch[n[1]] = 1 # earliest unknown
1201 for p in n[2:4]:
1201 for p in n[2:4]:
1202 if p in m:
1202 if p in m:
1203 base[p] = 1 # latest known
1203 base[p] = 1 # latest known
1204
1204
1205 for p in n[2:4]:
1205 for p in n[2:4]:
1206 if p not in req and p not in m:
1206 if p not in req and p not in m:
1207 r.append(p)
1207 r.append(p)
1208 req[p] = 1
1208 req[p] = 1
1209 seen[n[0]] = 1
1209 seen[n[0]] = 1
1210
1210
1211 if r:
1211 if r:
1212 reqcnt += 1
1212 reqcnt += 1
1213 self.ui.debug(_("request %d: %s\n") %
1213 self.ui.debug(_("request %d: %s\n") %
1214 (reqcnt, " ".join(map(short, r))))
1214 (reqcnt, " ".join(map(short, r))))
1215 for p in xrange(0, len(r), 10):
1215 for p in xrange(0, len(r), 10):
1216 for b in remote.branches(r[p:p+10]):
1216 for b in remote.branches(r[p:p+10]):
1217 self.ui.debug(_("received %s:%s\n") %
1217 self.ui.debug(_("received %s:%s\n") %
1218 (short(b[0]), short(b[1])))
1218 (short(b[0]), short(b[1])))
1219 unknown.append(b)
1219 unknown.append(b)
1220
1220
1221 # do binary search on the branches we found
1221 # do binary search on the branches we found
1222 while search:
1222 while search:
1223 n = search.pop(0)
1223 n = search.pop(0)
1224 reqcnt += 1
1224 reqcnt += 1
1225 l = remote.between([(n[0], n[1])])[0]
1225 l = remote.between([(n[0], n[1])])[0]
1226 l.append(n[1])
1226 l.append(n[1])
1227 p = n[0]
1227 p = n[0]
1228 f = 1
1228 f = 1
1229 for i in l:
1229 for i in l:
1230 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1230 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1231 if i in m:
1231 if i in m:
1232 if f <= 2:
1232 if f <= 2:
1233 self.ui.debug(_("found new branch changeset %s\n") %
1233 self.ui.debug(_("found new branch changeset %s\n") %
1234 short(p))
1234 short(p))
1235 fetch[p] = 1
1235 fetch[p] = 1
1236 base[i] = 1
1236 base[i] = 1
1237 else:
1237 else:
1238 self.ui.debug(_("narrowed branch search to %s:%s\n")
1238 self.ui.debug(_("narrowed branch search to %s:%s\n")
1239 % (short(p), short(i)))
1239 % (short(p), short(i)))
1240 search.append((p, i))
1240 search.append((p, i))
1241 break
1241 break
1242 p, f = i, f * 2
1242 p, f = i, f * 2
1243
1243
1244 # sanity check our fetch list
1244 # sanity check our fetch list
1245 for f in fetch.keys():
1245 for f in fetch.keys():
1246 if f in m:
1246 if f in m:
1247 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1247 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1248
1248
1249 if base.keys() == [nullid]:
1249 if base.keys() == [nullid]:
1250 if force:
1250 if force:
1251 self.ui.warn(_("warning: repository is unrelated\n"))
1251 self.ui.warn(_("warning: repository is unrelated\n"))
1252 else:
1252 else:
1253 raise util.Abort(_("repository is unrelated"))
1253 raise util.Abort(_("repository is unrelated"))
1254
1254
1255 self.ui.debug(_("found new changesets starting at ") +
1255 self.ui.debug(_("found new changesets starting at ") +
1256 " ".join([short(f) for f in fetch]) + "\n")
1256 " ".join([short(f) for f in fetch]) + "\n")
1257
1257
1258 self.ui.debug(_("%d total queries\n") % reqcnt)
1258 self.ui.debug(_("%d total queries\n") % reqcnt)
1259
1259
1260 return fetch.keys()
1260 return fetch.keys()
1261
1261
1262 def findoutgoing(self, remote, base=None, heads=None, force=False):
1262 def findoutgoing(self, remote, base=None, heads=None, force=False):
1263 """Return list of nodes that are roots of subsets not in remote
1263 """Return list of nodes that are roots of subsets not in remote
1264
1264
1265 If base dict is specified, assume that these nodes and their parents
1265 If base dict is specified, assume that these nodes and their parents
1266 exist on the remote side.
1266 exist on the remote side.
1267 If a list of heads is specified, return only nodes which are heads
1267 If a list of heads is specified, return only nodes which are heads
1268 or ancestors of these heads, and return a second element which
1268 or ancestors of these heads, and return a second element which
1269 contains all remote heads which get new children.
1269 contains all remote heads which get new children.
1270 """
1270 """
1271 if base == None:
1271 if base == None:
1272 base = {}
1272 base = {}
1273 self.findincoming(remote, base, heads, force=force)
1273 self.findincoming(remote, base, heads, force=force)
1274
1274
1275 self.ui.debug(_("common changesets up to ")
1275 self.ui.debug(_("common changesets up to ")
1276 + " ".join(map(short, base.keys())) + "\n")
1276 + " ".join(map(short, base.keys())) + "\n")
1277
1277
1278 remain = dict.fromkeys(self.changelog.nodemap)
1278 remain = dict.fromkeys(self.changelog.nodemap)
1279
1279
1280 # prune everything remote has from the tree
1280 # prune everything remote has from the tree
1281 del remain[nullid]
1281 del remain[nullid]
1282 remove = base.keys()
1282 remove = base.keys()
1283 while remove:
1283 while remove:
1284 n = remove.pop(0)
1284 n = remove.pop(0)
1285 if n in remain:
1285 if n in remain:
1286 del remain[n]
1286 del remain[n]
1287 for p in self.changelog.parents(n):
1287 for p in self.changelog.parents(n):
1288 remove.append(p)
1288 remove.append(p)
1289
1289
1290 # find every node whose parents have been pruned
1290 # find every node whose parents have been pruned
1291 subset = []
1291 subset = []
1292 # find every remote head that will get new children
1292 # find every remote head that will get new children
1293 updated_heads = {}
1293 updated_heads = {}
1294 for n in remain:
1294 for n in remain:
1295 p1, p2 = self.changelog.parents(n)
1295 p1, p2 = self.changelog.parents(n)
1296 if p1 not in remain and p2 not in remain:
1296 if p1 not in remain and p2 not in remain:
1297 subset.append(n)
1297 subset.append(n)
1298 if heads:
1298 if heads:
1299 if p1 in heads:
1299 if p1 in heads:
1300 updated_heads[p1] = True
1300 updated_heads[p1] = True
1301 if p2 in heads:
1301 if p2 in heads:
1302 updated_heads[p2] = True
1302 updated_heads[p2] = True
1303
1303
1304 # this is the set of all roots we have to push
1304 # this is the set of all roots we have to push
1305 if heads:
1305 if heads:
1306 return subset, updated_heads.keys()
1306 return subset, updated_heads.keys()
1307 else:
1307 else:
1308 return subset
1308 return subset
1309
1309
1310 def pull(self, remote, heads=None, force=False, lock=None):
1310 def pull(self, remote, heads=None, force=False, lock=None):
1311 mylock = False
1311 mylock = False
1312 if not lock:
1312 if not lock:
1313 lock = self.lock()
1313 lock = self.lock()
1314 mylock = True
1314 mylock = True
1315
1315
1316 try:
1316 try:
1317 fetch = self.findincoming(remote, force=force)
1317 fetch = self.findincoming(remote, force=force)
1318 if fetch == [nullid]:
1318 if fetch == [nullid]:
1319 self.ui.status(_("requesting all changes\n"))
1319 self.ui.status(_("requesting all changes\n"))
1320
1320
1321 if not fetch:
1321 if not fetch:
1322 self.ui.status(_("no changes found\n"))
1322 self.ui.status(_("no changes found\n"))
1323 return 0
1323 return 0
1324
1324
1325 if heads is None:
1325 if heads is None:
1326 cg = remote.changegroup(fetch, 'pull')
1326 cg = remote.changegroup(fetch, 'pull')
1327 else:
1327 else:
1328 if 'changegroupsubset' not in remote.capabilities:
1328 if 'changegroupsubset' not in remote.capabilities:
1329 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1329 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1330 cg = remote.changegroupsubset(fetch, heads, 'pull')
1330 cg = remote.changegroupsubset(fetch, heads, 'pull')
1331 return self.addchangegroup(cg, 'pull', remote.url())
1331 return self.addchangegroup(cg, 'pull', remote.url())
1332 finally:
1332 finally:
1333 if mylock:
1333 if mylock:
1334 lock.release()
1334 lock.release()
1335
1335
1336 def push(self, remote, force=False, revs=None):
1336 def push(self, remote, force=False, revs=None):
1337 # there are two ways to push to remote repo:
1337 # there are two ways to push to remote repo:
1338 #
1338 #
1339 # addchangegroup assumes local user can lock remote
1339 # addchangegroup assumes local user can lock remote
1340 # repo (local filesystem, old ssh servers).
1340 # repo (local filesystem, old ssh servers).
1341 #
1341 #
1342 # unbundle assumes local user cannot lock remote repo (new ssh
1342 # unbundle assumes local user cannot lock remote repo (new ssh
1343 # servers, http servers).
1343 # servers, http servers).
1344
1344
1345 if remote.capable('unbundle'):
1345 if remote.capable('unbundle'):
1346 return self.push_unbundle(remote, force, revs)
1346 return self.push_unbundle(remote, force, revs)
1347 return self.push_addchangegroup(remote, force, revs)
1347 return self.push_addchangegroup(remote, force, revs)
1348
1348
1349 def prepush(self, remote, force, revs):
1349 def prepush(self, remote, force, revs):
1350 base = {}
1350 base = {}
1351 remote_heads = remote.heads()
1351 remote_heads = remote.heads()
1352 inc = self.findincoming(remote, base, remote_heads, force=force)
1352 inc = self.findincoming(remote, base, remote_heads, force=force)
1353
1353
1354 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1354 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1355 if revs is not None:
1355 if revs is not None:
1356 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1356 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1357 else:
1357 else:
1358 bases, heads = update, self.changelog.heads()
1358 bases, heads = update, self.changelog.heads()
1359
1359
1360 if not bases:
1360 if not bases:
1361 self.ui.status(_("no changes found\n"))
1361 self.ui.status(_("no changes found\n"))
1362 return None, 1
1362 return None, 1
1363 elif not force:
1363 elif not force:
1364 # check if we're creating new remote heads
1364 # check if we're creating new remote heads
1365 # to be a remote head after push, node must be either
1365 # to be a remote head after push, node must be either
1366 # - unknown locally
1366 # - unknown locally
1367 # - a local outgoing head descended from update
1367 # - a local outgoing head descended from update
1368 # - a remote head that's known locally and not
1368 # - a remote head that's known locally and not
1369 # ancestral to an outgoing head
1369 # ancestral to an outgoing head
1370
1370
1371 warn = 0
1371 warn = 0
1372
1372
1373 if remote_heads == [nullid]:
1373 if remote_heads == [nullid]:
1374 warn = 0
1374 warn = 0
1375 elif not revs and len(heads) > len(remote_heads):
1375 elif not revs and len(heads) > len(remote_heads):
1376 warn = 1
1376 warn = 1
1377 else:
1377 else:
1378 newheads = list(heads)
1378 newheads = list(heads)
1379 for r in remote_heads:
1379 for r in remote_heads:
1380 if r in self.changelog.nodemap:
1380 if r in self.changelog.nodemap:
1381 desc = self.changelog.heads(r, heads)
1381 desc = self.changelog.heads(r, heads)
1382 l = [h for h in heads if h in desc]
1382 l = [h for h in heads if h in desc]
1383 if not l:
1383 if not l:
1384 newheads.append(r)
1384 newheads.append(r)
1385 else:
1385 else:
1386 newheads.append(r)
1386 newheads.append(r)
1387 if len(newheads) > len(remote_heads):
1387 if len(newheads) > len(remote_heads):
1388 warn = 1
1388 warn = 1
1389
1389
1390 if warn:
1390 if warn:
1391 self.ui.warn(_("abort: push creates new remote branches!\n"))
1391 self.ui.warn(_("abort: push creates new remote branches!\n"))
1392 self.ui.status(_("(did you forget to merge?"
1392 self.ui.status(_("(did you forget to merge?"
1393 " use push -f to force)\n"))
1393 " use push -f to force)\n"))
1394 return None, 1
1394 return None, 1
1395 elif inc:
1395 elif inc:
1396 self.ui.warn(_("note: unsynced remote changes!\n"))
1396 self.ui.warn(_("note: unsynced remote changes!\n"))
1397
1397
1398
1398
1399 if revs is None:
1399 if revs is None:
1400 cg = self.changegroup(update, 'push')
1400 cg = self.changegroup(update, 'push')
1401 else:
1401 else:
1402 cg = self.changegroupsubset(update, revs, 'push')
1402 cg = self.changegroupsubset(update, revs, 'push')
1403 return cg, remote_heads
1403 return cg, remote_heads
1404
1404
1405 def push_addchangegroup(self, remote, force, revs):
1405 def push_addchangegroup(self, remote, force, revs):
1406 lock = remote.lock()
1406 lock = remote.lock()
1407
1407
1408 ret = self.prepush(remote, force, revs)
1408 ret = self.prepush(remote, force, revs)
1409 if ret[0] is not None:
1409 if ret[0] is not None:
1410 cg, remote_heads = ret
1410 cg, remote_heads = ret
1411 return remote.addchangegroup(cg, 'push', self.url())
1411 return remote.addchangegroup(cg, 'push', self.url())
1412 return ret[1]
1412 return ret[1]
1413
1413
1414 def push_unbundle(self, remote, force, revs):
1414 def push_unbundle(self, remote, force, revs):
1415 # local repo finds heads on server, finds out what revs it
1415 # local repo finds heads on server, finds out what revs it
1416 # must push. once revs transferred, if server finds it has
1416 # must push. once revs transferred, if server finds it has
1417 # different heads (someone else won commit/push race), server
1417 # different heads (someone else won commit/push race), server
1418 # aborts.
1418 # aborts.
1419
1419
1420 ret = self.prepush(remote, force, revs)
1420 ret = self.prepush(remote, force, revs)
1421 if ret[0] is not None:
1421 if ret[0] is not None:
1422 cg, remote_heads = ret
1422 cg, remote_heads = ret
1423 if force: remote_heads = ['force']
1423 if force: remote_heads = ['force']
1424 return remote.unbundle(cg, remote_heads, 'push')
1424 return remote.unbundle(cg, remote_heads, 'push')
1425 return ret[1]
1425 return ret[1]
1426
1426
1427 def changegroupinfo(self, nodes):
1427 def changegroupinfo(self, nodes):
1428 self.ui.note(_("%d changesets found\n") % len(nodes))
1428 self.ui.note(_("%d changesets found\n") % len(nodes))
1429 if self.ui.debugflag:
1429 if self.ui.debugflag:
1430 self.ui.debug(_("List of changesets:\n"))
1430 self.ui.debug(_("List of changesets:\n"))
1431 for node in nodes:
1431 for node in nodes:
1432 self.ui.debug("%s\n" % hex(node))
1432 self.ui.debug("%s\n" % hex(node))
1433
1433
1434 def changegroupsubset(self, bases, heads, source):
1434 def changegroupsubset(self, bases, heads, source):
1435 """This function generates a changegroup consisting of all the nodes
1435 """This function generates a changegroup consisting of all the nodes
1436 that are descendents of any of the bases, and ancestors of any of
1436 that are descendents of any of the bases, and ancestors of any of
1437 the heads.
1437 the heads.
1438
1438
1439 It is fairly complex as determining which filenodes and which
1439 It is fairly complex as determining which filenodes and which
1440 manifest nodes need to be included for the changeset to be complete
1440 manifest nodes need to be included for the changeset to be complete
1441 is non-trivial.
1441 is non-trivial.
1442
1442
1443 Another wrinkle is doing the reverse, figuring out which changeset in
1443 Another wrinkle is doing the reverse, figuring out which changeset in
1444 the changegroup a particular filenode or manifestnode belongs to."""
1444 the changegroup a particular filenode or manifestnode belongs to."""
1445
1445
1446 self.hook('preoutgoing', throw=True, source=source)
1446 self.hook('preoutgoing', throw=True, source=source)
1447
1447
1448 # Set up some initial variables
1448 # Set up some initial variables
1449 # Make it easy to refer to self.changelog
1449 # Make it easy to refer to self.changelog
1450 cl = self.changelog
1450 cl = self.changelog
1451 # msng is short for missing - compute the list of changesets in this
1451 # msng is short for missing - compute the list of changesets in this
1452 # changegroup.
1452 # changegroup.
1453 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1453 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1454 self.changegroupinfo(msng_cl_lst)
1454 self.changegroupinfo(msng_cl_lst)
1455 # Some bases may turn out to be superfluous, and some heads may be
1455 # Some bases may turn out to be superfluous, and some heads may be
1456 # too. nodesbetween will return the minimal set of bases and heads
1456 # too. nodesbetween will return the minimal set of bases and heads
1457 # necessary to re-create the changegroup.
1457 # necessary to re-create the changegroup.
1458
1458
1459 # Known heads are the list of heads that it is assumed the recipient
1459 # Known heads are the list of heads that it is assumed the recipient
1460 # of this changegroup will know about.
1460 # of this changegroup will know about.
1461 knownheads = {}
1461 knownheads = {}
1462 # We assume that all parents of bases are known heads.
1462 # We assume that all parents of bases are known heads.
1463 for n in bases:
1463 for n in bases:
1464 for p in cl.parents(n):
1464 for p in cl.parents(n):
1465 if p != nullid:
1465 if p != nullid:
1466 knownheads[p] = 1
1466 knownheads[p] = 1
1467 knownheads = knownheads.keys()
1467 knownheads = knownheads.keys()
1468 if knownheads:
1468 if knownheads:
1469 # Now that we know what heads are known, we can compute which
1469 # Now that we know what heads are known, we can compute which
1470 # changesets are known. The recipient must know about all
1470 # changesets are known. The recipient must know about all
1471 # changesets required to reach the known heads from the null
1471 # changesets required to reach the known heads from the null
1472 # changeset.
1472 # changeset.
1473 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1473 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1474 junk = None
1474 junk = None
1475 # Transform the list into an ersatz set.
1475 # Transform the list into an ersatz set.
1476 has_cl_set = dict.fromkeys(has_cl_set)
1476 has_cl_set = dict.fromkeys(has_cl_set)
1477 else:
1477 else:
1478 # If there were no known heads, the recipient cannot be assumed to
1478 # If there were no known heads, the recipient cannot be assumed to
1479 # know about any changesets.
1479 # know about any changesets.
1480 has_cl_set = {}
1480 has_cl_set = {}
1481
1481
1482 # Make it easy to refer to self.manifest
1482 # Make it easy to refer to self.manifest
1483 mnfst = self.manifest
1483 mnfst = self.manifest
1484 # We don't know which manifests are missing yet
1484 # We don't know which manifests are missing yet
1485 msng_mnfst_set = {}
1485 msng_mnfst_set = {}
1486 # Nor do we know which filenodes are missing.
1486 # Nor do we know which filenodes are missing.
1487 msng_filenode_set = {}
1487 msng_filenode_set = {}
1488
1488
1489 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1489 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1490 junk = None
1490 junk = None
1491
1491
1492 # A changeset always belongs to itself, so the changenode lookup
1492 # A changeset always belongs to itself, so the changenode lookup
1493 # function for a changenode is identity.
1493 # function for a changenode is identity.
1494 def identity(x):
1494 def identity(x):
1495 return x
1495 return x
1496
1496
1497 # A function generating function. Sets up an environment for the
1497 # A function generating function. Sets up an environment for the
1498 # inner function.
1498 # inner function.
1499 def cmp_by_rev_func(revlog):
1499 def cmp_by_rev_func(revlog):
1500 # Compare two nodes by their revision number in the environment's
1500 # Compare two nodes by their revision number in the environment's
1501 # revision history. Since the revision number both represents the
1501 # revision history. Since the revision number both represents the
1502 # most efficient order to read the nodes in, and represents a
1502 # most efficient order to read the nodes in, and represents a
1503 # topological sorting of the nodes, this function is often useful.
1503 # topological sorting of the nodes, this function is often useful.
1504 def cmp_by_rev(a, b):
1504 def cmp_by_rev(a, b):
1505 return cmp(revlog.rev(a), revlog.rev(b))
1505 return cmp(revlog.rev(a), revlog.rev(b))
1506 return cmp_by_rev
1506 return cmp_by_rev
1507
1507
1508 # If we determine that a particular file or manifest node must be a
1508 # If we determine that a particular file or manifest node must be a
1509 # node that the recipient of the changegroup will already have, we can
1509 # node that the recipient of the changegroup will already have, we can
1510 # also assume the recipient will have all the parents. This function
1510 # also assume the recipient will have all the parents. This function
1511 # prunes them from the set of missing nodes.
1511 # prunes them from the set of missing nodes.
1512 def prune_parents(revlog, hasset, msngset):
1512 def prune_parents(revlog, hasset, msngset):
1513 haslst = hasset.keys()
1513 haslst = hasset.keys()
1514 haslst.sort(cmp_by_rev_func(revlog))
1514 haslst.sort(cmp_by_rev_func(revlog))
1515 for node in haslst:
1515 for node in haslst:
1516 parentlst = [p for p in revlog.parents(node) if p != nullid]
1516 parentlst = [p for p in revlog.parents(node) if p != nullid]
1517 while parentlst:
1517 while parentlst:
1518 n = parentlst.pop()
1518 n = parentlst.pop()
1519 if n not in hasset:
1519 if n not in hasset:
1520 hasset[n] = 1
1520 hasset[n] = 1
1521 p = [p for p in revlog.parents(n) if p != nullid]
1521 p = [p for p in revlog.parents(n) if p != nullid]
1522 parentlst.extend(p)
1522 parentlst.extend(p)
1523 for n in hasset:
1523 for n in hasset:
1524 msngset.pop(n, None)
1524 msngset.pop(n, None)
1525
1525
1526 # This is a function generating function used to set up an environment
1526 # This is a function generating function used to set up an environment
1527 # for the inner function to execute in.
1527 # for the inner function to execute in.
1528 def manifest_and_file_collector(changedfileset):
1528 def manifest_and_file_collector(changedfileset):
1529 # This is an information gathering function that gathers
1529 # This is an information gathering function that gathers
1530 # information from each changeset node that goes out as part of
1530 # information from each changeset node that goes out as part of
1531 # the changegroup. The information gathered is a list of which
1531 # the changegroup. The information gathered is a list of which
1532 # manifest nodes are potentially required (the recipient may
1532 # manifest nodes are potentially required (the recipient may
1533 # already have them) and total list of all files which were
1533 # already have them) and total list of all files which were
1534 # changed in any changeset in the changegroup.
1534 # changed in any changeset in the changegroup.
1535 #
1535 #
1536 # We also remember the first changenode we saw any manifest
1536 # We also remember the first changenode we saw any manifest
1537 # referenced by so we can later determine which changenode 'owns'
1537 # referenced by so we can later determine which changenode 'owns'
1538 # the manifest.
1538 # the manifest.
1539 def collect_manifests_and_files(clnode):
1539 def collect_manifests_and_files(clnode):
1540 c = cl.read(clnode)
1540 c = cl.read(clnode)
1541 for f in c[3]:
1541 for f in c[3]:
1542 # This is to make sure we only have one instance of each
1542 # This is to make sure we only have one instance of each
1543 # filename string for each filename.
1543 # filename string for each filename.
1544 changedfileset.setdefault(f, f)
1544 changedfileset.setdefault(f, f)
1545 msng_mnfst_set.setdefault(c[0], clnode)
1545 msng_mnfst_set.setdefault(c[0], clnode)
1546 return collect_manifests_and_files
1546 return collect_manifests_and_files
1547
1547
1548 # Figure out which manifest nodes (of the ones we think might be part
1548 # Figure out which manifest nodes (of the ones we think might be part
1549 # of the changegroup) the recipient must know about and remove them
1549 # of the changegroup) the recipient must know about and remove them
1550 # from the changegroup.
1550 # from the changegroup.
1551 def prune_manifests():
1551 def prune_manifests():
1552 has_mnfst_set = {}
1552 has_mnfst_set = {}
1553 for n in msng_mnfst_set:
1553 for n in msng_mnfst_set:
1554 # If a 'missing' manifest thinks it belongs to a changenode
1554 # If a 'missing' manifest thinks it belongs to a changenode
1555 # the recipient is assumed to have, obviously the recipient
1555 # the recipient is assumed to have, obviously the recipient
1556 # must have that manifest.
1556 # must have that manifest.
1557 linknode = cl.node(mnfst.linkrev(n))
1557 linknode = cl.node(mnfst.linkrev(n))
1558 if linknode in has_cl_set:
1558 if linknode in has_cl_set:
1559 has_mnfst_set[n] = 1
1559 has_mnfst_set[n] = 1
1560 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1560 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1561
1561
1562 # Use the information collected in collect_manifests_and_files to say
1562 # Use the information collected in collect_manifests_and_files to say
1563 # which changenode any manifestnode belongs to.
1563 # which changenode any manifestnode belongs to.
1564 def lookup_manifest_link(mnfstnode):
1564 def lookup_manifest_link(mnfstnode):
1565 return msng_mnfst_set[mnfstnode]
1565 return msng_mnfst_set[mnfstnode]
1566
1566
1567 # A function generating function that sets up the initial environment
1567 # A function generating function that sets up the initial environment
1568 # the inner function.
1568 # the inner function.
1569 def filenode_collector(changedfiles):
1569 def filenode_collector(changedfiles):
1570 next_rev = [0]
1570 next_rev = [0]
1571 # This gathers information from each manifestnode included in the
1571 # This gathers information from each manifestnode included in the
1572 # changegroup about which filenodes the manifest node references
1572 # changegroup about which filenodes the manifest node references
1573 # so we can include those in the changegroup too.
1573 # so we can include those in the changegroup too.
1574 #
1574 #
1575 # It also remembers which changenode each filenode belongs to. It
1575 # It also remembers which changenode each filenode belongs to. It
1576 # does this by assuming the a filenode belongs to the changenode
1576 # does this by assuming the a filenode belongs to the changenode
1577 # the first manifest that references it belongs to.
1577 # the first manifest that references it belongs to.
1578 def collect_msng_filenodes(mnfstnode):
1578 def collect_msng_filenodes(mnfstnode):
1579 r = mnfst.rev(mnfstnode)
1579 r = mnfst.rev(mnfstnode)
1580 if r == next_rev[0]:
1580 if r == next_rev[0]:
1581 # If the last rev we looked at was the one just previous,
1581 # If the last rev we looked at was the one just previous,
1582 # we only need to see a diff.
1582 # we only need to see a diff.
1583 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1583 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1584 # For each line in the delta
1584 # For each line in the delta
1585 for dline in delta.splitlines():
1585 for dline in delta.splitlines():
1586 # get the filename and filenode for that line
1586 # get the filename and filenode for that line
1587 f, fnode = dline.split('\0')
1587 f, fnode = dline.split('\0')
1588 fnode = bin(fnode[:40])
1588 fnode = bin(fnode[:40])
1589 f = changedfiles.get(f, None)
1589 f = changedfiles.get(f, None)
1590 # And if the file is in the list of files we care
1590 # And if the file is in the list of files we care
1591 # about.
1591 # about.
1592 if f is not None:
1592 if f is not None:
1593 # Get the changenode this manifest belongs to
1593 # Get the changenode this manifest belongs to
1594 clnode = msng_mnfst_set[mnfstnode]
1594 clnode = msng_mnfst_set[mnfstnode]
1595 # Create the set of filenodes for the file if
1595 # Create the set of filenodes for the file if
1596 # there isn't one already.
1596 # there isn't one already.
1597 ndset = msng_filenode_set.setdefault(f, {})
1597 ndset = msng_filenode_set.setdefault(f, {})
1598 # And set the filenode's changelog node to the
1598 # And set the filenode's changelog node to the
1599 # manifest's if it hasn't been set already.
1599 # manifest's if it hasn't been set already.
1600 ndset.setdefault(fnode, clnode)
1600 ndset.setdefault(fnode, clnode)
1601 else:
1601 else:
1602 # Otherwise we need a full manifest.
1602 # Otherwise we need a full manifest.
1603 m = mnfst.read(mnfstnode)
1603 m = mnfst.read(mnfstnode)
1604 # For every file in we care about.
1604 # For every file in we care about.
1605 for f in changedfiles:
1605 for f in changedfiles:
1606 fnode = m.get(f, None)
1606 fnode = m.get(f, None)
1607 # If it's in the manifest
1607 # If it's in the manifest
1608 if fnode is not None:
1608 if fnode is not None:
1609 # See comments above.
1609 # See comments above.
1610 clnode = msng_mnfst_set[mnfstnode]
1610 clnode = msng_mnfst_set[mnfstnode]
1611 ndset = msng_filenode_set.setdefault(f, {})
1611 ndset = msng_filenode_set.setdefault(f, {})
1612 ndset.setdefault(fnode, clnode)
1612 ndset.setdefault(fnode, clnode)
1613 # Remember the revision we hope to see next.
1613 # Remember the revision we hope to see next.
1614 next_rev[0] = r + 1
1614 next_rev[0] = r + 1
1615 return collect_msng_filenodes
1615 return collect_msng_filenodes
1616
1616
1617 # We have a list of filenodes we think we need for a file, lets remove
1617 # We have a list of filenodes we think we need for a file, lets remove
1618 # all those we now the recipient must have.
1618 # all those we now the recipient must have.
1619 def prune_filenodes(f, filerevlog):
1619 def prune_filenodes(f, filerevlog):
1620 msngset = msng_filenode_set[f]
1620 msngset = msng_filenode_set[f]
1621 hasset = {}
1621 hasset = {}
1622 # If a 'missing' filenode thinks it belongs to a changenode we
1622 # If a 'missing' filenode thinks it belongs to a changenode we
1623 # assume the recipient must have, then the recipient must have
1623 # assume the recipient must have, then the recipient must have
1624 # that filenode.
1624 # that filenode.
1625 for n in msngset:
1625 for n in msngset:
1626 clnode = cl.node(filerevlog.linkrev(n))
1626 clnode = cl.node(filerevlog.linkrev(n))
1627 if clnode in has_cl_set:
1627 if clnode in has_cl_set:
1628 hasset[n] = 1
1628 hasset[n] = 1
1629 prune_parents(filerevlog, hasset, msngset)
1629 prune_parents(filerevlog, hasset, msngset)
1630
1630
1631 # A function generator function that sets up the a context for the
1631 # A function generator function that sets up the a context for the
1632 # inner function.
1632 # inner function.
1633 def lookup_filenode_link_func(fname):
1633 def lookup_filenode_link_func(fname):
1634 msngset = msng_filenode_set[fname]
1634 msngset = msng_filenode_set[fname]
1635 # Lookup the changenode the filenode belongs to.
1635 # Lookup the changenode the filenode belongs to.
1636 def lookup_filenode_link(fnode):
1636 def lookup_filenode_link(fnode):
1637 return msngset[fnode]
1637 return msngset[fnode]
1638 return lookup_filenode_link
1638 return lookup_filenode_link
1639
1639
1640 # Now that we have all theses utility functions to help out and
1640 # Now that we have all theses utility functions to help out and
1641 # logically divide up the task, generate the group.
1641 # logically divide up the task, generate the group.
1642 def gengroup():
1642 def gengroup():
1643 # The set of changed files starts empty.
1643 # The set of changed files starts empty.
1644 changedfiles = {}
1644 changedfiles = {}
1645 # Create a changenode group generator that will call our functions
1645 # Create a changenode group generator that will call our functions
1646 # back to lookup the owning changenode and collect information.
1646 # back to lookup the owning changenode and collect information.
1647 group = cl.group(msng_cl_lst, identity,
1647 group = cl.group(msng_cl_lst, identity,
1648 manifest_and_file_collector(changedfiles))
1648 manifest_and_file_collector(changedfiles))
1649 for chnk in group:
1649 for chnk in group:
1650 yield chnk
1650 yield chnk
1651
1651
1652 # The list of manifests has been collected by the generator
1652 # The list of manifests has been collected by the generator
1653 # calling our functions back.
1653 # calling our functions back.
1654 prune_manifests()
1654 prune_manifests()
1655 msng_mnfst_lst = msng_mnfst_set.keys()
1655 msng_mnfst_lst = msng_mnfst_set.keys()
1656 # Sort the manifestnodes by revision number.
1656 # Sort the manifestnodes by revision number.
1657 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1657 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1658 # Create a generator for the manifestnodes that calls our lookup
1658 # Create a generator for the manifestnodes that calls our lookup
1659 # and data collection functions back.
1659 # and data collection functions back.
1660 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1660 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1661 filenode_collector(changedfiles))
1661 filenode_collector(changedfiles))
1662 for chnk in group:
1662 for chnk in group:
1663 yield chnk
1663 yield chnk
1664
1664
1665 # These are no longer needed, dereference and toss the memory for
1665 # These are no longer needed, dereference and toss the memory for
1666 # them.
1666 # them.
1667 msng_mnfst_lst = None
1667 msng_mnfst_lst = None
1668 msng_mnfst_set.clear()
1668 msng_mnfst_set.clear()
1669
1669
1670 changedfiles = changedfiles.keys()
1670 changedfiles = changedfiles.keys()
1671 changedfiles.sort()
1671 changedfiles.sort()
1672 # Go through all our files in order sorted by name.
1672 # Go through all our files in order sorted by name.
1673 for fname in changedfiles:
1673 for fname in changedfiles:
1674 filerevlog = self.file(fname)
1674 filerevlog = self.file(fname)
1675 # Toss out the filenodes that the recipient isn't really
1675 # Toss out the filenodes that the recipient isn't really
1676 # missing.
1676 # missing.
1677 if msng_filenode_set.has_key(fname):
1677 if msng_filenode_set.has_key(fname):
1678 prune_filenodes(fname, filerevlog)
1678 prune_filenodes(fname, filerevlog)
1679 msng_filenode_lst = msng_filenode_set[fname].keys()
1679 msng_filenode_lst = msng_filenode_set[fname].keys()
1680 else:
1680 else:
1681 msng_filenode_lst = []
1681 msng_filenode_lst = []
1682 # If any filenodes are left, generate the group for them,
1682 # If any filenodes are left, generate the group for them,
1683 # otherwise don't bother.
1683 # otherwise don't bother.
1684 if len(msng_filenode_lst) > 0:
1684 if len(msng_filenode_lst) > 0:
1685 yield changegroup.genchunk(fname)
1685 yield changegroup.genchunk(fname)
1686 # Sort the filenodes by their revision #
1686 # Sort the filenodes by their revision #
1687 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1687 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1688 # Create a group generator and only pass in a changenode
1688 # Create a group generator and only pass in a changenode
1689 # lookup function as we need to collect no information
1689 # lookup function as we need to collect no information
1690 # from filenodes.
1690 # from filenodes.
1691 group = filerevlog.group(msng_filenode_lst,
1691 group = filerevlog.group(msng_filenode_lst,
1692 lookup_filenode_link_func(fname))
1692 lookup_filenode_link_func(fname))
1693 for chnk in group:
1693 for chnk in group:
1694 yield chnk
1694 yield chnk
1695 if msng_filenode_set.has_key(fname):
1695 if msng_filenode_set.has_key(fname):
1696 # Don't need this anymore, toss it to free memory.
1696 # Don't need this anymore, toss it to free memory.
1697 del msng_filenode_set[fname]
1697 del msng_filenode_set[fname]
1698 # Signal that no more groups are left.
1698 # Signal that no more groups are left.
1699 yield changegroup.closechunk()
1699 yield changegroup.closechunk()
1700
1700
1701 if msng_cl_lst:
1701 if msng_cl_lst:
1702 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1702 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1703
1703
1704 return util.chunkbuffer(gengroup())
1704 return util.chunkbuffer(gengroup())
1705
1705
1706 def changegroup(self, basenodes, source):
1706 def changegroup(self, basenodes, source):
1707 """Generate a changegroup of all nodes that we have that a recipient
1707 """Generate a changegroup of all nodes that we have that a recipient
1708 doesn't.
1708 doesn't.
1709
1709
1710 This is much easier than the previous function as we can assume that
1710 This is much easier than the previous function as we can assume that
1711 the recipient has any changenode we aren't sending them."""
1711 the recipient has any changenode we aren't sending them."""
1712
1712
1713 self.hook('preoutgoing', throw=True, source=source)
1713 self.hook('preoutgoing', throw=True, source=source)
1714
1714
1715 cl = self.changelog
1715 cl = self.changelog
1716 nodes = cl.nodesbetween(basenodes, None)[0]
1716 nodes = cl.nodesbetween(basenodes, None)[0]
1717 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1717 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1718 self.changegroupinfo(nodes)
1718 self.changegroupinfo(nodes)
1719
1719
1720 def identity(x):
1720 def identity(x):
1721 return x
1721 return x
1722
1722
1723 def gennodelst(revlog):
1723 def gennodelst(revlog):
1724 for r in xrange(0, revlog.count()):
1724 for r in xrange(0, revlog.count()):
1725 n = revlog.node(r)
1725 n = revlog.node(r)
1726 if revlog.linkrev(n) in revset:
1726 if revlog.linkrev(n) in revset:
1727 yield n
1727 yield n
1728
1728
1729 def changed_file_collector(changedfileset):
1729 def changed_file_collector(changedfileset):
1730 def collect_changed_files(clnode):
1730 def collect_changed_files(clnode):
1731 c = cl.read(clnode)
1731 c = cl.read(clnode)
1732 for fname in c[3]:
1732 for fname in c[3]:
1733 changedfileset[fname] = 1
1733 changedfileset[fname] = 1
1734 return collect_changed_files
1734 return collect_changed_files
1735
1735
1736 def lookuprevlink_func(revlog):
1736 def lookuprevlink_func(revlog):
1737 def lookuprevlink(n):
1737 def lookuprevlink(n):
1738 return cl.node(revlog.linkrev(n))
1738 return cl.node(revlog.linkrev(n))
1739 return lookuprevlink
1739 return lookuprevlink
1740
1740
1741 def gengroup():
1741 def gengroup():
1742 # construct a list of all changed files
1742 # construct a list of all changed files
1743 changedfiles = {}
1743 changedfiles = {}
1744
1744
1745 for chnk in cl.group(nodes, identity,
1745 for chnk in cl.group(nodes, identity,
1746 changed_file_collector(changedfiles)):
1746 changed_file_collector(changedfiles)):
1747 yield chnk
1747 yield chnk
1748 changedfiles = changedfiles.keys()
1748 changedfiles = changedfiles.keys()
1749 changedfiles.sort()
1749 changedfiles.sort()
1750
1750
1751 mnfst = self.manifest
1751 mnfst = self.manifest
1752 nodeiter = gennodelst(mnfst)
1752 nodeiter = gennodelst(mnfst)
1753 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1753 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1754 yield chnk
1754 yield chnk
1755
1755
1756 for fname in changedfiles:
1756 for fname in changedfiles:
1757 filerevlog = self.file(fname)
1757 filerevlog = self.file(fname)
1758 nodeiter = gennodelst(filerevlog)
1758 nodeiter = gennodelst(filerevlog)
1759 nodeiter = list(nodeiter)
1759 nodeiter = list(nodeiter)
1760 if nodeiter:
1760 if nodeiter:
1761 yield changegroup.genchunk(fname)
1761 yield changegroup.genchunk(fname)
1762 lookup = lookuprevlink_func(filerevlog)
1762 lookup = lookuprevlink_func(filerevlog)
1763 for chnk in filerevlog.group(nodeiter, lookup):
1763 for chnk in filerevlog.group(nodeiter, lookup):
1764 yield chnk
1764 yield chnk
1765
1765
1766 yield changegroup.closechunk()
1766 yield changegroup.closechunk()
1767
1767
1768 if nodes:
1768 if nodes:
1769 self.hook('outgoing', node=hex(nodes[0]), source=source)
1769 self.hook('outgoing', node=hex(nodes[0]), source=source)
1770
1770
1771 return util.chunkbuffer(gengroup())
1771 return util.chunkbuffer(gengroup())
1772
1772
1773 def addchangegroup(self, source, srctype, url):
1773 def addchangegroup(self, source, srctype, url):
1774 """add changegroup to repo.
1774 """add changegroup to repo.
1775
1775
1776 return values:
1776 return values:
1777 - nothing changed or no source: 0
1777 - nothing changed or no source: 0
1778 - more heads than before: 1+added heads (2..n)
1778 - more heads than before: 1+added heads (2..n)
1779 - less heads than before: -1-removed heads (-2..-n)
1779 - less heads than before: -1-removed heads (-2..-n)
1780 - number of heads stays the same: 1
1780 - number of heads stays the same: 1
1781 """
1781 """
1782 def csmap(x):
1782 def csmap(x):
1783 self.ui.debug(_("add changeset %s\n") % short(x))
1783 self.ui.debug(_("add changeset %s\n") % short(x))
1784 return cl.count()
1784 return cl.count()
1785
1785
1786 def revmap(x):
1786 def revmap(x):
1787 return cl.rev(x)
1787 return cl.rev(x)
1788
1788
1789 if not source:
1789 if not source:
1790 return 0
1790 return 0
1791
1791
1792 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1792 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1793
1793
1794 changesets = files = revisions = 0
1794 changesets = files = revisions = 0
1795
1795
1796 tr = self.transaction()
1796 tr = self.transaction()
1797
1797
1798 # write changelog data to temp files so concurrent readers will not see
1798 # write changelog data to temp files so concurrent readers will not see
1799 # inconsistent view
1799 # inconsistent view
1800 cl = self.changelog
1800 cl = self.changelog
1801 cl.delayupdate()
1801 cl.delayupdate()
1802 oldheads = len(cl.heads())
1802 oldheads = len(cl.heads())
1803
1803
1804 # pull off the changeset group
1804 # pull off the changeset group
1805 self.ui.status(_("adding changesets\n"))
1805 self.ui.status(_("adding changesets\n"))
1806 cor = cl.count() - 1
1806 cor = cl.count() - 1
1807 chunkiter = changegroup.chunkiter(source)
1807 chunkiter = changegroup.chunkiter(source)
1808 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1808 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1809 raise util.Abort(_("received changelog group is empty"))
1809 raise util.Abort(_("received changelog group is empty"))
1810 cnr = cl.count() - 1
1810 cnr = cl.count() - 1
1811 changesets = cnr - cor
1811 changesets = cnr - cor
1812
1812
1813 # pull off the manifest group
1813 # pull off the manifest group
1814 self.ui.status(_("adding manifests\n"))
1814 self.ui.status(_("adding manifests\n"))
1815 chunkiter = changegroup.chunkiter(source)
1815 chunkiter = changegroup.chunkiter(source)
1816 # no need to check for empty manifest group here:
1816 # no need to check for empty manifest group here:
1817 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1817 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1818 # no new manifest will be created and the manifest group will
1818 # no new manifest will be created and the manifest group will
1819 # be empty during the pull
1819 # be empty during the pull
1820 self.manifest.addgroup(chunkiter, revmap, tr)
1820 self.manifest.addgroup(chunkiter, revmap, tr)
1821
1821
1822 # process the files
1822 # process the files
1823 self.ui.status(_("adding file changes\n"))
1823 self.ui.status(_("adding file changes\n"))
1824 while 1:
1824 while 1:
1825 f = changegroup.getchunk(source)
1825 f = changegroup.getchunk(source)
1826 if not f:
1826 if not f:
1827 break
1827 break
1828 self.ui.debug(_("adding %s revisions\n") % f)
1828 self.ui.debug(_("adding %s revisions\n") % f)
1829 fl = self.file(f)
1829 fl = self.file(f)
1830 o = fl.count()
1830 o = fl.count()
1831 chunkiter = changegroup.chunkiter(source)
1831 chunkiter = changegroup.chunkiter(source)
1832 if fl.addgroup(chunkiter, revmap, tr) is None:
1832 if fl.addgroup(chunkiter, revmap, tr) is None:
1833 raise util.Abort(_("received file revlog group is empty"))
1833 raise util.Abort(_("received file revlog group is empty"))
1834 revisions += fl.count() - o
1834 revisions += fl.count() - o
1835 files += 1
1835 files += 1
1836
1836
1837 # make changelog see real files again
1837 # make changelog see real files again
1838 cl.finalize(tr)
1838 cl.finalize(tr)
1839
1839
1840 newheads = len(self.changelog.heads())
1840 newheads = len(self.changelog.heads())
1841 heads = ""
1841 heads = ""
1842 if oldheads and newheads != oldheads:
1842 if oldheads and newheads != oldheads:
1843 heads = _(" (%+d heads)") % (newheads - oldheads)
1843 heads = _(" (%+d heads)") % (newheads - oldheads)
1844
1844
1845 self.ui.status(_("added %d changesets"
1845 self.ui.status(_("added %d changesets"
1846 " with %d changes to %d files%s\n")
1846 " with %d changes to %d files%s\n")
1847 % (changesets, revisions, files, heads))
1847 % (changesets, revisions, files, heads))
1848
1848
1849 if changesets > 0:
1849 if changesets > 0:
1850 self.hook('pretxnchangegroup', throw=True,
1850 self.hook('pretxnchangegroup', throw=True,
1851 node=hex(self.changelog.node(cor+1)), source=srctype,
1851 node=hex(self.changelog.node(cor+1)), source=srctype,
1852 url=url)
1852 url=url)
1853
1853
1854 tr.close()
1854 tr.close()
1855
1855
1856 if changesets > 0:
1856 if changesets > 0:
1857 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1857 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1858 source=srctype, url=url)
1858 source=srctype, url=url)
1859
1859
1860 for i in xrange(cor + 1, cnr + 1):
1860 for i in xrange(cor + 1, cnr + 1):
1861 self.hook("incoming", node=hex(self.changelog.node(i)),
1861 self.hook("incoming", node=hex(self.changelog.node(i)),
1862 source=srctype, url=url)
1862 source=srctype, url=url)
1863
1863
1864 # never return 0 here:
1864 # never return 0 here:
1865 if newheads < oldheads:
1865 if newheads < oldheads:
1866 return newheads - oldheads - 1
1866 return newheads - oldheads - 1
1867 else:
1867 else:
1868 return newheads - oldheads + 1
1868 return newheads - oldheads + 1
1869
1869
1870
1870
1871 def stream_in(self, remote):
1871 def stream_in(self, remote):
1872 fp = remote.stream_out()
1872 fp = remote.stream_out()
1873 l = fp.readline()
1873 l = fp.readline()
1874 try:
1874 try:
1875 resp = int(l)
1875 resp = int(l)
1876 except ValueError:
1876 except ValueError:
1877 raise util.UnexpectedOutput(
1877 raise util.UnexpectedOutput(
1878 _('Unexpected response from remote server:'), l)
1878 _('Unexpected response from remote server:'), l)
1879 if resp == 1:
1879 if resp == 1:
1880 raise util.Abort(_('operation forbidden by server'))
1880 raise util.Abort(_('operation forbidden by server'))
1881 elif resp == 2:
1881 elif resp == 2:
1882 raise util.Abort(_('locking the remote repository failed'))
1882 raise util.Abort(_('locking the remote repository failed'))
1883 elif resp != 0:
1883 elif resp != 0:
1884 raise util.Abort(_('the server sent an unknown error code'))
1884 raise util.Abort(_('the server sent an unknown error code'))
1885 self.ui.status(_('streaming all changes\n'))
1885 self.ui.status(_('streaming all changes\n'))
1886 l = fp.readline()
1886 l = fp.readline()
1887 try:
1887 try:
1888 total_files, total_bytes = map(int, l.split(' ', 1))
1888 total_files, total_bytes = map(int, l.split(' ', 1))
1889 except ValueError, TypeError:
1889 except ValueError, TypeError:
1890 raise util.UnexpectedOutput(
1890 raise util.UnexpectedOutput(
1891 _('Unexpected response from remote server:'), l)
1891 _('Unexpected response from remote server:'), l)
1892 self.ui.status(_('%d files to transfer, %s of data\n') %
1892 self.ui.status(_('%d files to transfer, %s of data\n') %
1893 (total_files, util.bytecount(total_bytes)))
1893 (total_files, util.bytecount(total_bytes)))
1894 start = time.time()
1894 start = time.time()
1895 for i in xrange(total_files):
1895 for i in xrange(total_files):
1896 # XXX doesn't support '\n' or '\r' in filenames
1896 # XXX doesn't support '\n' or '\r' in filenames
1897 l = fp.readline()
1897 l = fp.readline()
1898 try:
1898 try:
1899 name, size = l.split('\0', 1)
1899 name, size = l.split('\0', 1)
1900 size = int(size)
1900 size = int(size)
1901 except ValueError, TypeError:
1901 except ValueError, TypeError:
1902 raise util.UnexpectedOutput(
1902 raise util.UnexpectedOutput(
1903 _('Unexpected response from remote server:'), l)
1903 _('Unexpected response from remote server:'), l)
1904 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1904 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1905 ofp = self.sopener(name, 'w')
1905 ofp = self.sopener(name, 'w')
1906 for chunk in util.filechunkiter(fp, limit=size):
1906 for chunk in util.filechunkiter(fp, limit=size):
1907 ofp.write(chunk)
1907 ofp.write(chunk)
1908 ofp.close()
1908 ofp.close()
1909 elapsed = time.time() - start
1909 elapsed = time.time() - start
1910 if elapsed <= 0:
1910 if elapsed <= 0:
1911 elapsed = 0.001
1911 elapsed = 0.001
1912 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1912 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1913 (util.bytecount(total_bytes), elapsed,
1913 (util.bytecount(total_bytes), elapsed,
1914 util.bytecount(total_bytes / elapsed)))
1914 util.bytecount(total_bytes / elapsed)))
1915 self.invalidate()
1915 self.invalidate()
1916 return len(self.heads()) + 1
1916 return len(self.heads()) + 1
1917
1917
1918 def clone(self, remote, heads=[], stream=False):
1918 def clone(self, remote, heads=[], stream=False):
1919 '''clone remote repository.
1919 '''clone remote repository.
1920
1920
1921 keyword arguments:
1921 keyword arguments:
1922 heads: list of revs to clone (forces use of pull)
1922 heads: list of revs to clone (forces use of pull)
1923 stream: use streaming clone if possible'''
1923 stream: use streaming clone if possible'''
1924
1924
1925 # now, all clients that can request uncompressed clones can
1925 # now, all clients that can request uncompressed clones can
1926 # read repo formats supported by all servers that can serve
1926 # read repo formats supported by all servers that can serve
1927 # them.
1927 # them.
1928
1928
1929 # if revlog format changes, client will have to check version
1929 # if revlog format changes, client will have to check version
1930 # and format flags on "stream" capability, and use
1930 # and format flags on "stream" capability, and use
1931 # uncompressed only if compatible.
1931 # uncompressed only if compatible.
1932
1932
1933 if stream and not heads and remote.capable('stream'):
1933 if stream and not heads and remote.capable('stream'):
1934 return self.stream_in(remote)
1934 return self.stream_in(remote)
1935 return self.pull(remote, heads)
1935 return self.pull(remote, heads)
1936
1936
1937 # used to avoid circular references so destructors work
1937 # used to avoid circular references so destructors work
1938 def aftertrans(files):
1938 def aftertrans(files):
1939 renamefiles = [tuple(t) for t in files]
1939 renamefiles = [tuple(t) for t in files]
1940 def a():
1940 def a():
1941 for src, dest in renamefiles:
1941 for src, dest in renamefiles:
1942 util.rename(src, dest)
1942 util.rename(src, dest)
1943 return a
1943 return a
1944
1944
1945 def instance(ui, path, create):
1945 def instance(ui, path, create):
1946 return localrepository(ui, util.drop_scheme('file', path), create)
1946 return localrepository(ui, util.drop_scheme('file', path), create)
1947
1947
1948 def islocal(path):
1948 def islocal(path):
1949 return True
1949 return True
General Comments 0
You need to be logged in to leave comments. Login now