##// END OF EJS Templates
Use better names (hg-{usage}-{random}.{suffix}) for temporary files.
Thomas Arendsen Hein -
r2165:d821918e default
parent child Browse files
Show More
@@ -1,269 +1,269
1 # GnuPG signing extension for Mercurial
1 # GnuPG signing extension for Mercurial
2 #
2 #
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util
9 from mercurial import util
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import gettext as _
11 from mercurial.i18n import gettext as _
12
12
13 class gpg:
13 class gpg:
14 def __init__(self, path, key=None):
14 def __init__(self, path, key=None):
15 self.path = path
15 self.path = path
16 self.key = (key and " --local-user \"%s\"" % key) or ""
16 self.key = (key and " --local-user \"%s\"" % key) or ""
17
17
18 def sign(self, data):
18 def sign(self, data):
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 return util.filter(data, gpgcmd)
20 return util.filter(data, gpgcmd)
21
21
22 def verify(self, data, sig):
22 def verify(self, data, sig):
23 """ returns of the good and bad signatures"""
23 """ returns of the good and bad signatures"""
24 try:
24 try:
25 # create temporary files
25 # create temporary files
26 fd, sigfile = tempfile.mkstemp(prefix="hggpgsig")
26 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
27 fp = os.fdopen(fd, 'wb')
27 fp = os.fdopen(fd, 'wb')
28 fp.write(sig)
28 fp.write(sig)
29 fp.close()
29 fp.close()
30 fd, datafile = tempfile.mkstemp(prefix="hggpgdata")
30 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
31 fp = os.fdopen(fd, 'wb')
31 fp = os.fdopen(fd, 'wb')
32 fp.write(data)
32 fp.write(data)
33 fp.close()
33 fp.close()
34 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
34 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
35 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
35 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
36 ret = util.filter("", gpgcmd)
36 ret = util.filter("", gpgcmd)
37 except:
37 except:
38 for f in (sigfile, datafile):
38 for f in (sigfile, datafile):
39 try:
39 try:
40 if f: os.unlink(f)
40 if f: os.unlink(f)
41 except: pass
41 except: pass
42 raise
42 raise
43 keys = []
43 keys = []
44 key, fingerprint = None, None
44 key, fingerprint = None, None
45 err = ""
45 err = ""
46 for l in ret.splitlines():
46 for l in ret.splitlines():
47 # see DETAILS in the gnupg documentation
47 # see DETAILS in the gnupg documentation
48 # filter the logger output
48 # filter the logger output
49 if not l.startswith("[GNUPG:]"):
49 if not l.startswith("[GNUPG:]"):
50 continue
50 continue
51 l = l[9:]
51 l = l[9:]
52 if l.startswith("ERRSIG"):
52 if l.startswith("ERRSIG"):
53 err = _("error while verifying signature")
53 err = _("error while verifying signature")
54 break
54 break
55 elif l.startswith("VALIDSIG"):
55 elif l.startswith("VALIDSIG"):
56 # fingerprint of the primary key
56 # fingerprint of the primary key
57 fingerprint = l.split()[10]
57 fingerprint = l.split()[10]
58 elif (l.startswith("GOODSIG") or
58 elif (l.startswith("GOODSIG") or
59 l.startswith("EXPSIG") or
59 l.startswith("EXPSIG") or
60 l.startswith("EXPKEYSIG") or
60 l.startswith("EXPKEYSIG") or
61 l.startswith("BADSIG")):
61 l.startswith("BADSIG")):
62 if key is not None:
62 if key is not None:
63 keys.append(key + [fingerprint])
63 keys.append(key + [fingerprint])
64 key = l.split(" ", 2)
64 key = l.split(" ", 2)
65 fingerprint = None
65 fingerprint = None
66 if err:
66 if err:
67 return err, []
67 return err, []
68 if key is not None:
68 if key is not None:
69 keys.append(key + [fingerprint])
69 keys.append(key + [fingerprint])
70 return err, keys
70 return err, keys
71
71
72 def newgpg(ui, **opts):
72 def newgpg(ui, **opts):
73 """create a new gpg instance"""
73 """create a new gpg instance"""
74 gpgpath = ui.config("gpg", "cmd", "gpg")
74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 gpgkey = opts.get('key')
75 gpgkey = opts.get('key')
76 if not gpgkey:
76 if not gpgkey:
77 gpgkey = ui.config("gpg", "key", None)
77 gpgkey = ui.config("gpg", "key", None)
78 return gpg(gpgpath, gpgkey)
78 return gpg(gpgpath, gpgkey)
79
79
80 def sigwalk(repo):
80 def sigwalk(repo):
81 """
81 """
82 walk over every sigs, yields a couple
82 walk over every sigs, yields a couple
83 ((node, version, sig), (filename, linenumber))
83 ((node, version, sig), (filename, linenumber))
84 """
84 """
85 def parsefile(fileiter, context):
85 def parsefile(fileiter, context):
86 ln = 1
86 ln = 1
87 for l in fileiter:
87 for l in fileiter:
88 if not l:
88 if not l:
89 continue
89 continue
90 yield (l.split(" ", 2), (context, ln))
90 yield (l.split(" ", 2), (context, ln))
91 ln +=1
91 ln +=1
92
92
93 fl = repo.file(".hgsigs")
93 fl = repo.file(".hgsigs")
94 h = fl.heads()
94 h = fl.heads()
95 h.reverse()
95 h.reverse()
96 # read the heads
96 # read the heads
97 for r in h:
97 for r in h:
98 fn = ".hgsigs|%s" % hgnode.short(r)
98 fn = ".hgsigs|%s" % hgnode.short(r)
99 for item in parsefile(fl.read(r).splitlines(), fn):
99 for item in parsefile(fl.read(r).splitlines(), fn):
100 yield item
100 yield item
101 try:
101 try:
102 # read local signatures
102 # read local signatures
103 fn = "localsigs"
103 fn = "localsigs"
104 for item in parsefile(repo.opener(fn), fn):
104 for item in parsefile(repo.opener(fn), fn):
105 yield item
105 yield item
106 except IOError:
106 except IOError:
107 pass
107 pass
108
108
109 def getkeys(ui, repo, mygpg, sigdata, context):
109 def getkeys(ui, repo, mygpg, sigdata, context):
110 """get the keys who signed a data"""
110 """get the keys who signed a data"""
111 fn, ln = context
111 fn, ln = context
112 node, version, sig = sigdata
112 node, version, sig = sigdata
113 prefix = "%s:%d" % (fn, ln)
113 prefix = "%s:%d" % (fn, ln)
114 node = hgnode.bin(node)
114 node = hgnode.bin(node)
115
115
116 data = node2txt(repo, node, version)
116 data = node2txt(repo, node, version)
117 sig = binascii.a2b_base64(sig)
117 sig = binascii.a2b_base64(sig)
118 err, keys = mygpg.verify(data, sig)
118 err, keys = mygpg.verify(data, sig)
119 if err:
119 if err:
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
120 ui.warn("%s:%d %s\n" % (fn, ln , err))
121 return None
121 return None
122
122
123 validkeys = []
123 validkeys = []
124 # warn for expired key and/or sigs
124 # warn for expired key and/or sigs
125 for key in keys:
125 for key in keys:
126 if key[0] == "BADSIG":
126 if key[0] == "BADSIG":
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
127 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
128 continue
128 continue
129 if key[0] == "EXPSIG":
129 if key[0] == "EXPSIG":
130 ui.write(_("%s Note: Signature has expired"
130 ui.write(_("%s Note: Signature has expired"
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
131 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 elif key[0] == "EXPKEYSIG":
132 elif key[0] == "EXPKEYSIG":
133 ui.write(_("%s Note: This key has expired"
133 ui.write(_("%s Note: This key has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 validkeys.append((key[1], key[2], key[3]))
135 validkeys.append((key[1], key[2], key[3]))
136 return validkeys
136 return validkeys
137
137
138 def sigs(ui, repo):
138 def sigs(ui, repo):
139 """list signed changesets"""
139 """list signed changesets"""
140 mygpg = newgpg(ui)
140 mygpg = newgpg(ui)
141 revs = {}
141 revs = {}
142
142
143 for data, context in sigwalk(repo):
143 for data, context in sigwalk(repo):
144 node, version, sig = data
144 node, version, sig = data
145 fn, ln = context
145 fn, ln = context
146 try:
146 try:
147 n = repo.lookup(node)
147 n = repo.lookup(node)
148 except KeyError:
148 except KeyError:
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
149 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
150 continue
150 continue
151 r = repo.changelog.rev(n)
151 r = repo.changelog.rev(n)
152 keys = getkeys(ui, repo, mygpg, data, context)
152 keys = getkeys(ui, repo, mygpg, data, context)
153 if not keys:
153 if not keys:
154 continue
154 continue
155 revs.setdefault(r, [])
155 revs.setdefault(r, [])
156 revs[r].extend(keys)
156 revs[r].extend(keys)
157 nodes = list(revs)
157 nodes = list(revs)
158 nodes.reverse()
158 nodes.reverse()
159 for rev in nodes:
159 for rev in nodes:
160 for k in revs[rev]:
160 for k in revs[rev]:
161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
161 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
162 ui.write("%-30s %s\n" % (keystr(ui, k), r))
163
163
164 def check(ui, repo, rev):
164 def check(ui, repo, rev):
165 """verify all the signatures there may be for a particular revision"""
165 """verify all the signatures there may be for a particular revision"""
166 mygpg = newgpg(ui)
166 mygpg = newgpg(ui)
167 rev = repo.lookup(rev)
167 rev = repo.lookup(rev)
168 hexrev = hgnode.hex(rev)
168 hexrev = hgnode.hex(rev)
169 keys = []
169 keys = []
170
170
171 for data, context in sigwalk(repo):
171 for data, context in sigwalk(repo):
172 node, version, sig = data
172 node, version, sig = data
173 if node == hexrev:
173 if node == hexrev:
174 k = getkeys(ui, repo, mygpg, data, context)
174 k = getkeys(ui, repo, mygpg, data, context)
175 if k:
175 if k:
176 keys.extend(k)
176 keys.extend(k)
177
177
178 if not keys:
178 if not keys:
179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
179 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
180 return
180 return
181
181
182 # print summary
182 # print summary
183 ui.write("%s is signed by:\n" % hgnode.short(rev))
183 ui.write("%s is signed by:\n" % hgnode.short(rev))
184 for key in keys:
184 for key in keys:
185 ui.write(" %s\n" % keystr(ui, key))
185 ui.write(" %s\n" % keystr(ui, key))
186
186
187 def keystr(ui, key):
187 def keystr(ui, key):
188 """associate a string to a key (username, comment)"""
188 """associate a string to a key (username, comment)"""
189 keyid, user, fingerprint = key
189 keyid, user, fingerprint = key
190 comment = ui.config("gpg", fingerprint, None)
190 comment = ui.config("gpg", fingerprint, None)
191 if comment:
191 if comment:
192 return "%s (%s)" % (user, comment)
192 return "%s (%s)" % (user, comment)
193 else:
193 else:
194 return user
194 return user
195
195
196 def sign(ui, repo, *revs, **opts):
196 def sign(ui, repo, *revs, **opts):
197 """add a signature for the current tip or a given revision"""
197 """add a signature for the current tip or a given revision"""
198 mygpg = newgpg(ui, **opts)
198 mygpg = newgpg(ui, **opts)
199 sigver = "0"
199 sigver = "0"
200 sigmessage = ""
200 sigmessage = ""
201 if revs:
201 if revs:
202 nodes = [repo.lookup(n) for n in revs]
202 nodes = [repo.lookup(n) for n in revs]
203 else:
203 else:
204 nodes = [repo.changelog.tip()]
204 nodes = [repo.changelog.tip()]
205
205
206 for n in nodes:
206 for n in nodes:
207 hexnode = hgnode.hex(n)
207 hexnode = hgnode.hex(n)
208 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
208 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
209 hgnode.short(n)))
209 hgnode.short(n)))
210 # build data
210 # build data
211 data = node2txt(repo, n, sigver)
211 data = node2txt(repo, n, sigver)
212 sig = mygpg.sign(data)
212 sig = mygpg.sign(data)
213 if not sig:
213 if not sig:
214 raise util.Abort(_("Error while signing"))
214 raise util.Abort(_("Error while signing"))
215 sig = binascii.b2a_base64(sig)
215 sig = binascii.b2a_base64(sig)
216 sig = sig.replace("\n", "")
216 sig = sig.replace("\n", "")
217 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
217 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
218
218
219 # write it
219 # write it
220 if opts['local']:
220 if opts['local']:
221 repo.opener("localsigs", "ab").write(sigmessage)
221 repo.opener("localsigs", "ab").write(sigmessage)
222 return
222 return
223
223
224 for x in repo.changes():
224 for x in repo.changes():
225 if ".hgsigs" in x and not opts["force"]:
225 if ".hgsigs" in x and not opts["force"]:
226 raise util.Abort(_("working copy of .hgsigs is changed "
226 raise util.Abort(_("working copy of .hgsigs is changed "
227 "(please commit .hgsigs manually "
227 "(please commit .hgsigs manually "
228 "or use --force)"))
228 "or use --force)"))
229
229
230 repo.wfile(".hgsigs", "ab").write(sigmessage)
230 repo.wfile(".hgsigs", "ab").write(sigmessage)
231
231
232 if repo.dirstate.state(".hgsigs") == '?':
232 if repo.dirstate.state(".hgsigs") == '?':
233 repo.add([".hgsigs"])
233 repo.add([".hgsigs"])
234
234
235 if opts["no_commit"]:
235 if opts["no_commit"]:
236 return
236 return
237
237
238 message = opts['message']
238 message = opts['message']
239 if not message:
239 if not message:
240 message = "\n".join([_("Added signature for changeset %s")
240 message = "\n".join([_("Added signature for changeset %s")
241 % hgnode.hex(n)
241 % hgnode.hex(n)
242 for n in nodes])
242 for n in nodes])
243 try:
243 try:
244 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
244 repo.commit([".hgsigs"], message, opts['user'], opts['date'])
245 except ValueError, inst:
245 except ValueError, inst:
246 raise util.Abort(str(inst))
246 raise util.Abort(str(inst))
247
247
248 def node2txt(repo, node, ver):
248 def node2txt(repo, node, ver):
249 """map a manifest into some text"""
249 """map a manifest into some text"""
250 if ver == "0":
250 if ver == "0":
251 return "%s\n" % hgnode.hex(node)
251 return "%s\n" % hgnode.hex(node)
252 else:
252 else:
253 raise util.Abort(_("unknown signature version"))
253 raise util.Abort(_("unknown signature version"))
254
254
255 cmdtable = {
255 cmdtable = {
256 "sign":
256 "sign":
257 (sign,
257 (sign,
258 [('l', 'local', None, _("make the signature local")),
258 [('l', 'local', None, _("make the signature local")),
259 ('f', 'force', None, _("sign even if the sigfile is modified")),
259 ('f', 'force', None, _("sign even if the sigfile is modified")),
260 ('', 'no-commit', None, _("do not commit the sigfile after signing")),
260 ('', 'no-commit', None, _("do not commit the sigfile after signing")),
261 ('m', 'message', "", _("commit message")),
261 ('m', 'message', "", _("commit message")),
262 ('d', 'date', "", _("date code")),
262 ('d', 'date', "", _("date code")),
263 ('u', 'user', "", _("user")),
263 ('u', 'user', "", _("user")),
264 ('k', 'key', "", _("the key id to sign with"))],
264 ('k', 'key', "", _("the key id to sign with"))],
265 _("hg sign [OPTION]... [REVISION]...")),
265 _("hg sign [OPTION]... [REVISION]...")),
266 "sigcheck": (check, [], _('hg sigcheck REVISION')),
266 "sigcheck": (check, [], _('hg sigcheck REVISION')),
267 "sigs": (sigs, [], _('hg sigs')),
267 "sigs": (sigs, [], _('hg sigs')),
268 }
268 }
269
269
@@ -1,290 +1,290
1 # Command for sending a collection of Mercurial changesets as a series
1 # Command for sending a collection of Mercurial changesets as a series
2 # of patch emails.
2 # of patch emails.
3 #
3 #
4 # The series is started off with a "[PATCH 0 of N]" introduction,
4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 # which describes the series as a whole.
5 # which describes the series as a whole.
6 #
6 #
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 # the first line of the changeset description as the subject text.
8 # the first line of the changeset description as the subject text.
9 # The message contains two or three body parts:
9 # The message contains two or three body parts:
10 #
10 #
11 # The remainder of the changeset description.
11 # The remainder of the changeset description.
12 #
12 #
13 # [Optional] If the diffstat program is installed, the result of
13 # [Optional] If the diffstat program is installed, the result of
14 # running diffstat on the patch.
14 # running diffstat on the patch.
15 #
15 #
16 # The patch itself, as generated by "hg export".
16 # The patch itself, as generated by "hg export".
17 #
17 #
18 # Each message refers to all of its predecessors using the In-Reply-To
18 # Each message refers to all of its predecessors using the In-Reply-To
19 # and References headers, so they will show up as a sequence in
19 # and References headers, so they will show up as a sequence in
20 # threaded mail and news readers, and in mail archives.
20 # threaded mail and news readers, and in mail archives.
21 #
21 #
22 # For each changeset, you will be prompted with a diffstat summary and
22 # For each changeset, you will be prompted with a diffstat summary and
23 # the changeset summary, so you can be sure you are sending the right
23 # the changeset summary, so you can be sure you are sending the right
24 # changes.
24 # changes.
25 #
25 #
26 # It is best to run this script with the "-n" (test only) flag before
26 # It is best to run this script with the "-n" (test only) flag before
27 # firing it up "for real", in which case it will use your pager to
27 # firing it up "for real", in which case it will use your pager to
28 # display each of the messages that it would send.
28 # display each of the messages that it would send.
29 #
29 #
30 # The "-m" (mbox) option will create an mbox file instead of sending
30 # The "-m" (mbox) option will create an mbox file instead of sending
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
33 #
33 #
34 # To configure a default mail host, add a section like this to your
34 # To configure a default mail host, add a section like this to your
35 # hgrc file:
35 # hgrc file:
36 #
36 #
37 # [smtp]
37 # [smtp]
38 # host = my_mail_host
38 # host = my_mail_host
39 # port = 1025
39 # port = 1025
40 # tls = yes # or omit if not needed
40 # tls = yes # or omit if not needed
41 # username = user # if SMTP authentication required
41 # username = user # if SMTP authentication required
42 # password = password # if SMTP authentication required - PLAINTEXT
42 # password = password # if SMTP authentication required - PLAINTEXT
43 #
43 #
44 # To configure other defaults, add a section like this to your hgrc
44 # To configure other defaults, add a section like this to your hgrc
45 # file:
45 # file:
46 #
46 #
47 # [patchbomb]
47 # [patchbomb]
48 # from = My Name <my@email>
48 # from = My Name <my@email>
49 # to = recipient1, recipient2, ...
49 # to = recipient1, recipient2, ...
50 # cc = cc1, cc2, ...
50 # cc = cc1, cc2, ...
51
51
52 from mercurial.demandload import *
52 from mercurial.demandload import *
53 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
53 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
54 mercurial:commands,hg,ui
54 mercurial:commands,hg,ui
55 os errno popen2 smtplib socket sys tempfile time''')
55 os errno popen2 smtplib socket sys tempfile time''')
56 from mercurial.i18n import gettext as _
56 from mercurial.i18n import gettext as _
57
57
58 try:
58 try:
59 # readline gives raw_input editing capabilities, but is not
59 # readline gives raw_input editing capabilities, but is not
60 # present on windows
60 # present on windows
61 import readline
61 import readline
62 except ImportError: pass
62 except ImportError: pass
63
63
64 def diffstat(patch):
64 def diffstat(patch):
65 fd, name = tempfile.mkstemp()
65 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
66 try:
66 try:
67 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
67 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
68 try:
68 try:
69 for line in patch: print >> p.tochild, line
69 for line in patch: print >> p.tochild, line
70 p.tochild.close()
70 p.tochild.close()
71 if p.wait(): return
71 if p.wait(): return
72 fp = os.fdopen(fd, 'r')
72 fp = os.fdopen(fd, 'r')
73 stat = []
73 stat = []
74 for line in fp: stat.append(line.lstrip())
74 for line in fp: stat.append(line.lstrip())
75 last = stat.pop()
75 last = stat.pop()
76 stat.insert(0, last)
76 stat.insert(0, last)
77 stat = ''.join(stat)
77 stat = ''.join(stat)
78 if stat.startswith('0 files'): raise ValueError
78 if stat.startswith('0 files'): raise ValueError
79 return stat
79 return stat
80 except: raise
80 except: raise
81 finally:
81 finally:
82 try: os.unlink(name)
82 try: os.unlink(name)
83 except: pass
83 except: pass
84
84
85 def patchbomb(ui, repo, *revs, **opts):
85 def patchbomb(ui, repo, *revs, **opts):
86 '''send changesets as a series of patch emails
86 '''send changesets as a series of patch emails
87
87
88 The series starts with a "[PATCH 0 of N]" introduction, which
88 The series starts with a "[PATCH 0 of N]" introduction, which
89 describes the series as a whole.
89 describes the series as a whole.
90
90
91 Each patch email has a Subject line of "[PATCH M of N] ...", using
91 Each patch email has a Subject line of "[PATCH M of N] ...", using
92 the first line of the changeset description as the subject text.
92 the first line of the changeset description as the subject text.
93 The message contains two or three body parts. First, the rest of
93 The message contains two or three body parts. First, the rest of
94 the changeset description. Next, (optionally) if the diffstat
94 the changeset description. Next, (optionally) if the diffstat
95 program is installed, the result of running diffstat on the patch.
95 program is installed, the result of running diffstat on the patch.
96 Finally, the patch itself, as generated by "hg export".'''
96 Finally, the patch itself, as generated by "hg export".'''
97 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
97 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
98 if default: prompt += ' [%s]' % default
98 if default: prompt += ' [%s]' % default
99 prompt += rest
99 prompt += rest
100 while True:
100 while True:
101 r = raw_input(prompt)
101 r = raw_input(prompt)
102 if r: return r
102 if r: return r
103 if default is not None: return default
103 if default is not None: return default
104 if empty_ok: return r
104 if empty_ok: return r
105 ui.warn(_('Please enter a valid value.\n'))
105 ui.warn(_('Please enter a valid value.\n'))
106
106
107 def confirm(s):
107 def confirm(s):
108 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
108 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
109 raise ValueError
109 raise ValueError
110
110
111 def cdiffstat(summary, patch):
111 def cdiffstat(summary, patch):
112 s = diffstat(patch)
112 s = diffstat(patch)
113 if s:
113 if s:
114 if summary:
114 if summary:
115 ui.write(summary, '\n')
115 ui.write(summary, '\n')
116 ui.write(s, '\n')
116 ui.write(s, '\n')
117 confirm(_('Does the diffstat above look okay'))
117 confirm(_('Does the diffstat above look okay'))
118 return s
118 return s
119
119
120 def makepatch(patch, idx, total):
120 def makepatch(patch, idx, total):
121 desc = []
121 desc = []
122 node = None
122 node = None
123 body = ''
123 body = ''
124 for line in patch:
124 for line in patch:
125 if line.startswith('#'):
125 if line.startswith('#'):
126 if line.startswith('# Node ID'): node = line.split()[-1]
126 if line.startswith('# Node ID'): node = line.split()[-1]
127 continue
127 continue
128 if line.startswith('diff -r'): break
128 if line.startswith('diff -r'): break
129 desc.append(line)
129 desc.append(line)
130 if not node: raise ValueError
130 if not node: raise ValueError
131
131
132 #body = ('\n'.join(desc[1:]).strip() or
132 #body = ('\n'.join(desc[1:]).strip() or
133 # 'Patch subject is complete summary.')
133 # 'Patch subject is complete summary.')
134 #body += '\n\n\n'
134 #body += '\n\n\n'
135
135
136 if opts['plain']:
136 if opts['plain']:
137 while patch and patch[0].startswith('# '): patch.pop(0)
137 while patch and patch[0].startswith('# '): patch.pop(0)
138 if patch: patch.pop(0)
138 if patch: patch.pop(0)
139 while patch and not patch[0].strip(): patch.pop(0)
139 while patch and not patch[0].strip(): patch.pop(0)
140 if opts['diffstat']:
140 if opts['diffstat']:
141 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
141 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
142 body += '\n'.join(patch)
142 body += '\n'.join(patch)
143 msg = email.MIMEText.MIMEText(body)
143 msg = email.MIMEText.MIMEText(body)
144 if total == 1:
144 if total == 1:
145 subj = '[PATCH] ' + desc[0].strip()
145 subj = '[PATCH] ' + desc[0].strip()
146 else:
146 else:
147 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
147 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
148 if subj.endswith('.'): subj = subj[:-1]
148 if subj.endswith('.'): subj = subj[:-1]
149 msg['Subject'] = subj
149 msg['Subject'] = subj
150 msg['X-Mercurial-Node'] = node
150 msg['X-Mercurial-Node'] = node
151 return msg
151 return msg
152
152
153 start_time = int(time.time())
153 start_time = int(time.time())
154
154
155 def genmsgid(id):
155 def genmsgid(id):
156 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
156 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
157
157
158 patches = []
158 patches = []
159
159
160 class exportee:
160 class exportee:
161 def __init__(self, container):
161 def __init__(self, container):
162 self.lines = []
162 self.lines = []
163 self.container = container
163 self.container = container
164 self.name = 'email'
164 self.name = 'email'
165
165
166 def write(self, data):
166 def write(self, data):
167 self.lines.append(data)
167 self.lines.append(data)
168
168
169 def close(self):
169 def close(self):
170 self.container.append(''.join(self.lines).split('\n'))
170 self.container.append(''.join(self.lines).split('\n'))
171 self.lines = []
171 self.lines = []
172
172
173 commands.export(ui, repo, *revs, **{'output': exportee(patches),
173 commands.export(ui, repo, *revs, **{'output': exportee(patches),
174 'switch_parent': False,
174 'switch_parent': False,
175 'text': None})
175 'text': None})
176
176
177 jumbo = []
177 jumbo = []
178 msgs = []
178 msgs = []
179
179
180 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
180 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
181
181
182 for p, i in zip(patches, range(len(patches))):
182 for p, i in zip(patches, range(len(patches))):
183 jumbo.extend(p)
183 jumbo.extend(p)
184 msgs.append(makepatch(p, i + 1, len(patches)))
184 msgs.append(makepatch(p, i + 1, len(patches)))
185
185
186 sender = (opts['from'] or ui.config('patchbomb', 'from') or
186 sender = (opts['from'] or ui.config('patchbomb', 'from') or
187 prompt('From', ui.username()))
187 prompt('From', ui.username()))
188
188
189 def getaddrs(opt, prpt, default = None):
189 def getaddrs(opt, prpt, default = None):
190 addrs = opts[opt] or (ui.config('patchbomb', opt) or
190 addrs = opts[opt] or (ui.config('patchbomb', opt) or
191 prompt(prpt, default = default)).split(',')
191 prompt(prpt, default = default)).split(',')
192 return [a.strip() for a in addrs if a.strip()]
192 return [a.strip() for a in addrs if a.strip()]
193 to = getaddrs('to', 'To')
193 to = getaddrs('to', 'To')
194 cc = getaddrs('cc', 'Cc', '')
194 cc = getaddrs('cc', 'Cc', '')
195
195
196 if len(patches) > 1:
196 if len(patches) > 1:
197 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
197 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
198
198
199 msg = email.MIMEMultipart.MIMEMultipart()
199 msg = email.MIMEMultipart.MIMEMultipart()
200 msg['Subject'] = '[PATCH 0 of %d] %s' % (
200 msg['Subject'] = '[PATCH 0 of %d] %s' % (
201 len(patches),
201 len(patches),
202 opts['subject'] or
202 opts['subject'] or
203 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
203 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
204
204
205 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
205 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
206
206
207 body = []
207 body = []
208
208
209 while True:
209 while True:
210 try: l = raw_input()
210 try: l = raw_input()
211 except EOFError: break
211 except EOFError: break
212 if l == '.': break
212 if l == '.': break
213 body.append(l)
213 body.append(l)
214
214
215 msg.attach(email.MIMEText.MIMEText('\n'.join(body) + '\n'))
215 msg.attach(email.MIMEText.MIMEText('\n'.join(body) + '\n'))
216
216
217 if opts['diffstat']:
217 if opts['diffstat']:
218 d = cdiffstat(_('Final summary:\n'), jumbo)
218 d = cdiffstat(_('Final summary:\n'), jumbo)
219 if d: msg.attach(email.MIMEText.MIMEText(d))
219 if d: msg.attach(email.MIMEText.MIMEText(d))
220
220
221 msgs.insert(0, msg)
221 msgs.insert(0, msg)
222
222
223 ui.write('\n')
223 ui.write('\n')
224
224
225 if not opts['test'] and not opts['mbox']:
225 if not opts['test'] and not opts['mbox']:
226 s = smtplib.SMTP()
226 s = smtplib.SMTP()
227 s.connect(host = ui.config('smtp', 'host', 'mail'),
227 s.connect(host = ui.config('smtp', 'host', 'mail'),
228 port = int(ui.config('smtp', 'port', 25)))
228 port = int(ui.config('smtp', 'port', 25)))
229 if ui.configbool('smtp', 'tls'):
229 if ui.configbool('smtp', 'tls'):
230 s.ehlo()
230 s.ehlo()
231 s.starttls()
231 s.starttls()
232 s.ehlo()
232 s.ehlo()
233 username = ui.config('smtp', 'username')
233 username = ui.config('smtp', 'username')
234 password = ui.config('smtp', 'password')
234 password = ui.config('smtp', 'password')
235 if username and password:
235 if username and password:
236 s.login(username, password)
236 s.login(username, password)
237 parent = None
237 parent = None
238 tz = time.strftime('%z')
238 tz = time.strftime('%z')
239 sender_addr = email.Utils.parseaddr(sender)[1]
239 sender_addr = email.Utils.parseaddr(sender)[1]
240 for m in msgs:
240 for m in msgs:
241 try:
241 try:
242 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
242 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
243 except TypeError:
243 except TypeError:
244 m['Message-Id'] = genmsgid('patchbomb')
244 m['Message-Id'] = genmsgid('patchbomb')
245 if parent:
245 if parent:
246 m['In-Reply-To'] = parent
246 m['In-Reply-To'] = parent
247 else:
247 else:
248 parent = m['Message-Id']
248 parent = m['Message-Id']
249 m['Date'] = time.strftime('%a, %e %b %Y %T ', time.localtime(start_time)) + tz
249 m['Date'] = time.strftime('%a, %e %b %Y %T ', time.localtime(start_time)) + tz
250 start_time += 1
250 start_time += 1
251 m['From'] = sender
251 m['From'] = sender
252 m['To'] = ', '.join(to)
252 m['To'] = ', '.join(to)
253 if cc: m['Cc'] = ', '.join(cc)
253 if cc: m['Cc'] = ', '.join(cc)
254 if opts['test']:
254 if opts['test']:
255 ui.status('Displaying ', m['Subject'], ' ...\n')
255 ui.status('Displaying ', m['Subject'], ' ...\n')
256 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
256 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
257 try:
257 try:
258 fp.write(m.as_string(0))
258 fp.write(m.as_string(0))
259 fp.write('\n')
259 fp.write('\n')
260 except IOError, inst:
260 except IOError, inst:
261 if inst.errno != errno.EPIPE:
261 if inst.errno != errno.EPIPE:
262 raise
262 raise
263 fp.close()
263 fp.close()
264 elif opts['mbox']:
264 elif opts['mbox']:
265 ui.status('Writing ', m['Subject'], ' ...\n')
265 ui.status('Writing ', m['Subject'], ' ...\n')
266 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
266 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
267 date = time.asctime(time.localtime(start_time))
267 date = time.asctime(time.localtime(start_time))
268 fp.write('From %s %s\n' % (sender_addr, date))
268 fp.write('From %s %s\n' % (sender_addr, date))
269 fp.write(m.as_string(0))
269 fp.write(m.as_string(0))
270 fp.write('\n\n')
270 fp.write('\n\n')
271 fp.close()
271 fp.close()
272 else:
272 else:
273 ui.status('Sending ', m['Subject'], ' ...\n')
273 ui.status('Sending ', m['Subject'], ' ...\n')
274 s.sendmail(sender, to + cc, m.as_string(0))
274 s.sendmail(sender, to + cc, m.as_string(0))
275 if not opts['test'] and not opts['mbox']:
275 if not opts['test'] and not opts['mbox']:
276 s.close()
276 s.close()
277
277
278 cmdtable = {
278 cmdtable = {
279 'email':
279 'email':
280 (patchbomb,
280 (patchbomb,
281 [('c', 'cc', [], 'email addresses of copy recipients'),
281 [('c', 'cc', [], 'email addresses of copy recipients'),
282 ('d', 'diffstat', None, 'add diffstat output to messages'),
282 ('d', 'diffstat', None, 'add diffstat output to messages'),
283 ('f', 'from', '', 'email address of sender'),
283 ('f', 'from', '', 'email address of sender'),
284 ('', 'plain', None, 'omit hg patch header'),
284 ('', 'plain', None, 'omit hg patch header'),
285 ('n', 'test', None, 'print messages that would be sent'),
285 ('n', 'test', None, 'print messages that would be sent'),
286 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
286 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
287 ('s', 'subject', '', 'subject of introductory message'),
287 ('s', 'subject', '', 'subject of introductory message'),
288 ('t', 'to', [], 'email addresses of recipients')],
288 ('t', 'to', [], 'email addresses of recipients')],
289 "hg email [OPTION]... [REV]...")
289 "hg email [OPTION]... [REV]...")
290 }
290 }
@@ -1,154 +1,154
1 # appendfile.py - special classes to make repo updates atomic
1 # appendfile.py - special classes to make repo updates atomic
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import *
8 from demandload import *
9 demandload(globals(), "cStringIO changelog errno manifest os tempfile")
9 demandload(globals(), "cStringIO changelog errno manifest os tempfile")
10
10
11 # writes to metadata files are ordered. reads: changelog, manifest,
11 # writes to metadata files are ordered. reads: changelog, manifest,
12 # normal files. writes: normal files, manifest, changelog.
12 # normal files. writes: normal files, manifest, changelog.
13
13
14 # manifest contains pointers to offsets in normal files. changelog
14 # manifest contains pointers to offsets in normal files. changelog
15 # contains pointers to offsets in manifest. if reader reads old
15 # contains pointers to offsets in manifest. if reader reads old
16 # changelog while manifest or normal files are written, it has no
16 # changelog while manifest or normal files are written, it has no
17 # pointers into new parts of those files that are maybe not consistent
17 # pointers into new parts of those files that are maybe not consistent
18 # yet, so will not read them.
18 # yet, so will not read them.
19
19
20 # localrepo.addchangegroup thinks it writes changelog first, then
20 # localrepo.addchangegroup thinks it writes changelog first, then
21 # manifest, then normal files (this is order they are available, and
21 # manifest, then normal files (this is order they are available, and
22 # needed for computing linkrev fields), but uses appendfile to hide
22 # needed for computing linkrev fields), but uses appendfile to hide
23 # updates from readers. data not written to manifest or changelog
23 # updates from readers. data not written to manifest or changelog
24 # until all normal files updated. write manifest first, then
24 # until all normal files updated. write manifest first, then
25 # changelog.
25 # changelog.
26
26
27 # with this write ordering, readers cannot see inconsistent view of
27 # with this write ordering, readers cannot see inconsistent view of
28 # repo during update.
28 # repo during update.
29
29
30 class appendfile(object):
30 class appendfile(object):
31 '''implement enough of file protocol to append to revlog file.
31 '''implement enough of file protocol to append to revlog file.
32 appended data is written to temp file. reads and seeks span real
32 appended data is written to temp file. reads and seeks span real
33 file and temp file. readers cannot see appended data until
33 file and temp file. readers cannot see appended data until
34 writedata called.'''
34 writedata called.'''
35
35
36 def __init__(self, fp, tmpname):
36 def __init__(self, fp, tmpname):
37 if tmpname:
37 if tmpname:
38 self.tmpname = tmpname
38 self.tmpname = tmpname
39 self.tmpfp = open(self.tmpname, 'ab+')
39 self.tmpfp = open(self.tmpname, 'ab+')
40 else:
40 else:
41 fd, self.tmpname = tempfile.mkstemp()
41 fd, self.tmpname = tempfile.mkstemp(prefix="hg-appendfile-")
42 self.tmpfp = os.fdopen(fd, 'ab+')
42 self.tmpfp = os.fdopen(fd, 'ab+')
43 self.realfp = fp
43 self.realfp = fp
44 self.offset = fp.tell()
44 self.offset = fp.tell()
45 # real file is not written by anyone else. cache its size so
45 # real file is not written by anyone else. cache its size so
46 # seek and read can be fast.
46 # seek and read can be fast.
47 self.realsize = os.fstat(fp.fileno()).st_size
47 self.realsize = os.fstat(fp.fileno()).st_size
48
48
49 def end(self):
49 def end(self):
50 self.tmpfp.flush() # make sure the stat is correct
50 self.tmpfp.flush() # make sure the stat is correct
51 return self.realsize + os.fstat(self.tmpfp.fileno()).st_size
51 return self.realsize + os.fstat(self.tmpfp.fileno()).st_size
52
52
53 def tell(self):
53 def tell(self):
54 return self.offset
54 return self.offset
55
55
56 def flush(self):
56 def flush(self):
57 self.tmpfp.flush()
57 self.tmpfp.flush()
58
58
59 def close(self):
59 def close(self):
60 self.realfp.close()
60 self.realfp.close()
61 self.tmpfp.close()
61 self.tmpfp.close()
62
62
63 def seek(self, offset, whence=0):
63 def seek(self, offset, whence=0):
64 '''virtual file offset spans real file and temp file.'''
64 '''virtual file offset spans real file and temp file.'''
65 if whence == 0:
65 if whence == 0:
66 self.offset = offset
66 self.offset = offset
67 elif whence == 1:
67 elif whence == 1:
68 self.offset += offset
68 self.offset += offset
69 elif whence == 2:
69 elif whence == 2:
70 self.offset = self.end() + offset
70 self.offset = self.end() + offset
71
71
72 if self.offset < self.realsize:
72 if self.offset < self.realsize:
73 self.realfp.seek(self.offset)
73 self.realfp.seek(self.offset)
74 else:
74 else:
75 self.tmpfp.seek(self.offset - self.realsize)
75 self.tmpfp.seek(self.offset - self.realsize)
76
76
77 def read(self, count=-1):
77 def read(self, count=-1):
78 '''only trick here is reads that span real file and temp file.'''
78 '''only trick here is reads that span real file and temp file.'''
79 fp = cStringIO.StringIO()
79 fp = cStringIO.StringIO()
80 old_offset = self.offset
80 old_offset = self.offset
81 if self.offset < self.realsize:
81 if self.offset < self.realsize:
82 s = self.realfp.read(count)
82 s = self.realfp.read(count)
83 fp.write(s)
83 fp.write(s)
84 self.offset += len(s)
84 self.offset += len(s)
85 if count > 0:
85 if count > 0:
86 count -= len(s)
86 count -= len(s)
87 if count != 0:
87 if count != 0:
88 if old_offset != self.offset:
88 if old_offset != self.offset:
89 self.tmpfp.seek(self.offset - self.realsize)
89 self.tmpfp.seek(self.offset - self.realsize)
90 s = self.tmpfp.read(count)
90 s = self.tmpfp.read(count)
91 fp.write(s)
91 fp.write(s)
92 self.offset += len(s)
92 self.offset += len(s)
93 return fp.getvalue()
93 return fp.getvalue()
94
94
95 def write(self, s):
95 def write(self, s):
96 '''append to temp file.'''
96 '''append to temp file.'''
97 self.tmpfp.seek(0, 2)
97 self.tmpfp.seek(0, 2)
98 self.tmpfp.write(s)
98 self.tmpfp.write(s)
99 # all writes are appends, so offset must go to end of file.
99 # all writes are appends, so offset must go to end of file.
100 self.offset = self.realsize + self.tmpfp.tell()
100 self.offset = self.realsize + self.tmpfp.tell()
101
101
102 class appendopener(object):
102 class appendopener(object):
103 '''special opener for files that only read or append.'''
103 '''special opener for files that only read or append.'''
104
104
105 def __init__(self, opener):
105 def __init__(self, opener):
106 self.realopener = opener
106 self.realopener = opener
107 # key: file name, value: appendfile name
107 # key: file name, value: appendfile name
108 self.tmpnames = {}
108 self.tmpnames = {}
109
109
110 def __call__(self, name, mode='r'):
110 def __call__(self, name, mode='r'):
111 '''open file.'''
111 '''open file.'''
112
112
113 assert mode in 'ra+'
113 assert mode in 'ra+'
114 try:
114 try:
115 realfp = self.realopener(name, 'r')
115 realfp = self.realopener(name, 'r')
116 except IOError, err:
116 except IOError, err:
117 if err.errno != errno.ENOENT: raise
117 if err.errno != errno.ENOENT: raise
118 realfp = self.realopener(name, 'w+')
118 realfp = self.realopener(name, 'w+')
119 tmpname = self.tmpnames.get(name)
119 tmpname = self.tmpnames.get(name)
120 fp = appendfile(realfp, tmpname)
120 fp = appendfile(realfp, tmpname)
121 if tmpname is None:
121 if tmpname is None:
122 self.tmpnames[name] = fp.tmpname
122 self.tmpnames[name] = fp.tmpname
123 return fp
123 return fp
124
124
125 def writedata(self):
125 def writedata(self):
126 '''copy data from temp files to real files.'''
126 '''copy data from temp files to real files.'''
127 # write .d file before .i file.
127 # write .d file before .i file.
128 tmpnames = self.tmpnames.items()
128 tmpnames = self.tmpnames.items()
129 tmpnames.sort()
129 tmpnames.sort()
130 for name, tmpname in tmpnames:
130 for name, tmpname in tmpnames:
131 fp = open(tmpname, 'rb')
131 fp = open(tmpname, 'rb')
132 s = fp.read()
132 s = fp.read()
133 fp.close()
133 fp.close()
134 os.unlink(tmpname)
134 os.unlink(tmpname)
135 fp = self.realopener(name, 'a')
135 fp = self.realopener(name, 'a')
136 fp.write(s)
136 fp.write(s)
137 fp.close()
137 fp.close()
138
138
139 # files for changelog and manifest are in different appendopeners, so
139 # files for changelog and manifest are in different appendopeners, so
140 # not mixed up together.
140 # not mixed up together.
141
141
142 class appendchangelog(changelog.changelog, appendopener):
142 class appendchangelog(changelog.changelog, appendopener):
143 def __init__(self, opener, version):
143 def __init__(self, opener, version):
144 appendopener.__init__(self, opener)
144 appendopener.__init__(self, opener)
145 changelog.changelog.__init__(self, self, version)
145 changelog.changelog.__init__(self, self, version)
146 def checkinlinesize(self, fp, tr):
146 def checkinlinesize(self, fp, tr):
147 return
147 return
148
148
149 class appendmanifest(manifest.manifest, appendopener):
149 class appendmanifest(manifest.manifest, appendopener):
150 def __init__(self, opener, version):
150 def __init__(self, opener, version):
151 appendopener.__init__(self, opener)
151 appendopener.__init__(self, opener)
152 manifest.manifest.__init__(self, self, version)
152 manifest.manifest.__init__(self, self, version)
153 def checkinlinesize(self, fp, tr):
153 def checkinlinesize(self, fp, tr):
154 return
154 return
@@ -1,3568 +1,3568
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch hgweb mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival changegroup")
15 demandload(globals(), "archival changegroup")
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted, unknown = repo.changes()
23 modified, added, removed, deleted, unknown = repo.changes()
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def filterfiles(filters, files):
27 def filterfiles(filters, files):
28 l = [x for x in files if x in filters]
28 l = [x for x in files if x in filters]
29
29
30 for t in filters:
30 for t in filters:
31 if t and t[-1] != "/":
31 if t and t[-1] != "/":
32 t += "/"
32 t += "/"
33 l += [x for x in files if x.startswith(t)]
33 l += [x for x in files if x.startswith(t)]
34 return l
34 return l
35
35
36 def relpath(repo, args):
36 def relpath(repo, args):
37 cwd = repo.getcwd()
37 cwd = repo.getcwd()
38 if cwd:
38 if cwd:
39 return [util.normpath(os.path.join(cwd, x)) for x in args]
39 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return args
40 return args
41
41
42 def matchpats(repo, pats=[], opts={}, head=''):
42 def matchpats(repo, pats=[], opts={}, head=''):
43 cwd = repo.getcwd()
43 cwd = repo.getcwd()
44 if not pats and cwd:
44 if not pats and cwd:
45 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
45 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
46 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
46 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
47 cwd = ''
47 cwd = ''
48 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
48 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
49 opts.get('exclude'), head)
49 opts.get('exclude'), head)
50
50
51 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
51 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
52 files, matchfn, anypats = matchpats(repo, pats, opts, head)
52 files, matchfn, anypats = matchpats(repo, pats, opts, head)
53 exact = dict(zip(files, files))
53 exact = dict(zip(files, files))
54 def walk():
54 def walk():
55 for src, fn in repo.walk(node=node, files=files, match=matchfn,
55 for src, fn in repo.walk(node=node, files=files, match=matchfn,
56 badmatch=badmatch):
56 badmatch=badmatch):
57 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
57 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
58 return files, matchfn, walk()
58 return files, matchfn, walk()
59
59
60 def walk(repo, pats, opts, node=None, head='', badmatch=None):
60 def walk(repo, pats, opts, node=None, head='', badmatch=None):
61 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
61 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
62 for r in results:
62 for r in results:
63 yield r
63 yield r
64
64
65 def walkchangerevs(ui, repo, pats, opts):
65 def walkchangerevs(ui, repo, pats, opts):
66 '''Iterate over files and the revs they changed in.
66 '''Iterate over files and the revs they changed in.
67
67
68 Callers most commonly need to iterate backwards over the history
68 Callers most commonly need to iterate backwards over the history
69 it is interested in. Doing so has awful (quadratic-looking)
69 it is interested in. Doing so has awful (quadratic-looking)
70 performance, so we use iterators in a "windowed" way.
70 performance, so we use iterators in a "windowed" way.
71
71
72 We walk a window of revisions in the desired order. Within the
72 We walk a window of revisions in the desired order. Within the
73 window, we first walk forwards to gather data, then in the desired
73 window, we first walk forwards to gather data, then in the desired
74 order (usually backwards) to display it.
74 order (usually backwards) to display it.
75
75
76 This function returns an (iterator, getchange, matchfn) tuple. The
76 This function returns an (iterator, getchange, matchfn) tuple. The
77 getchange function returns the changelog entry for a numeric
77 getchange function returns the changelog entry for a numeric
78 revision. The iterator yields 3-tuples. They will be of one of
78 revision. The iterator yields 3-tuples. They will be of one of
79 the following forms:
79 the following forms:
80
80
81 "window", incrementing, lastrev: stepping through a window,
81 "window", incrementing, lastrev: stepping through a window,
82 positive if walking forwards through revs, last rev in the
82 positive if walking forwards through revs, last rev in the
83 sequence iterated over - use to reset state for the current window
83 sequence iterated over - use to reset state for the current window
84
84
85 "add", rev, fns: out-of-order traversal of the given file names
85 "add", rev, fns: out-of-order traversal of the given file names
86 fns, which changed during revision rev - use to gather data for
86 fns, which changed during revision rev - use to gather data for
87 possible display
87 possible display
88
88
89 "iter", rev, None: in-order traversal of the revs earlier iterated
89 "iter", rev, None: in-order traversal of the revs earlier iterated
90 over with "add" - use to display data'''
90 over with "add" - use to display data'''
91
91
92 def increasing_windows(start, end, windowsize=8, sizelimit=512):
92 def increasing_windows(start, end, windowsize=8, sizelimit=512):
93 if start < end:
93 if start < end:
94 while start < end:
94 while start < end:
95 yield start, min(windowsize, end-start)
95 yield start, min(windowsize, end-start)
96 start += windowsize
96 start += windowsize
97 if windowsize < sizelimit:
97 if windowsize < sizelimit:
98 windowsize *= 2
98 windowsize *= 2
99 else:
99 else:
100 while start > end:
100 while start > end:
101 yield start, min(windowsize, start-end-1)
101 yield start, min(windowsize, start-end-1)
102 start -= windowsize
102 start -= windowsize
103 if windowsize < sizelimit:
103 if windowsize < sizelimit:
104 windowsize *= 2
104 windowsize *= 2
105
105
106
106
107 files, matchfn, anypats = matchpats(repo, pats, opts)
107 files, matchfn, anypats = matchpats(repo, pats, opts)
108
108
109 if repo.changelog.count() == 0:
109 if repo.changelog.count() == 0:
110 return [], False, matchfn
110 return [], False, matchfn
111
111
112 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
112 revs = map(int, revrange(ui, repo, opts['rev'] or ['tip:0']))
113 wanted = {}
113 wanted = {}
114 slowpath = anypats
114 slowpath = anypats
115 fncache = {}
115 fncache = {}
116
116
117 chcache = {}
117 chcache = {}
118 def getchange(rev):
118 def getchange(rev):
119 ch = chcache.get(rev)
119 ch = chcache.get(rev)
120 if ch is None:
120 if ch is None:
121 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
121 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
122 return ch
122 return ch
123
123
124 if not slowpath and not files:
124 if not slowpath and not files:
125 # No files, no patterns. Display all revs.
125 # No files, no patterns. Display all revs.
126 wanted = dict(zip(revs, revs))
126 wanted = dict(zip(revs, revs))
127 if not slowpath:
127 if not slowpath:
128 # Only files, no patterns. Check the history of each file.
128 # Only files, no patterns. Check the history of each file.
129 def filerevgen(filelog):
129 def filerevgen(filelog):
130 for i, window in increasing_windows(filelog.count()-1, -1):
130 for i, window in increasing_windows(filelog.count()-1, -1):
131 revs = []
131 revs = []
132 for j in xrange(i - window, i + 1):
132 for j in xrange(i - window, i + 1):
133 revs.append(filelog.linkrev(filelog.node(j)))
133 revs.append(filelog.linkrev(filelog.node(j)))
134 revs.reverse()
134 revs.reverse()
135 for rev in revs:
135 for rev in revs:
136 yield rev
136 yield rev
137
137
138 minrev, maxrev = min(revs), max(revs)
138 minrev, maxrev = min(revs), max(revs)
139 for file_ in files:
139 for file_ in files:
140 filelog = repo.file(file_)
140 filelog = repo.file(file_)
141 # A zero count may be a directory or deleted file, so
141 # A zero count may be a directory or deleted file, so
142 # try to find matching entries on the slow path.
142 # try to find matching entries on the slow path.
143 if filelog.count() == 0:
143 if filelog.count() == 0:
144 slowpath = True
144 slowpath = True
145 break
145 break
146 for rev in filerevgen(filelog):
146 for rev in filerevgen(filelog):
147 if rev <= maxrev:
147 if rev <= maxrev:
148 if rev < minrev:
148 if rev < minrev:
149 break
149 break
150 fncache.setdefault(rev, [])
150 fncache.setdefault(rev, [])
151 fncache[rev].append(file_)
151 fncache[rev].append(file_)
152 wanted[rev] = 1
152 wanted[rev] = 1
153 if slowpath:
153 if slowpath:
154 # The slow path checks files modified in every changeset.
154 # The slow path checks files modified in every changeset.
155 def changerevgen():
155 def changerevgen():
156 for i, window in increasing_windows(repo.changelog.count()-1, -1):
156 for i, window in increasing_windows(repo.changelog.count()-1, -1):
157 for j in xrange(i - window, i + 1):
157 for j in xrange(i - window, i + 1):
158 yield j, getchange(j)[3]
158 yield j, getchange(j)[3]
159
159
160 for rev, changefiles in changerevgen():
160 for rev, changefiles in changerevgen():
161 matches = filter(matchfn, changefiles)
161 matches = filter(matchfn, changefiles)
162 if matches:
162 if matches:
163 fncache[rev] = matches
163 fncache[rev] = matches
164 wanted[rev] = 1
164 wanted[rev] = 1
165
165
166 def iterate():
166 def iterate():
167 for i, window in increasing_windows(0, len(revs)):
167 for i, window in increasing_windows(0, len(revs)):
168 yield 'window', revs[0] < revs[-1], revs[-1]
168 yield 'window', revs[0] < revs[-1], revs[-1]
169 nrevs = [rev for rev in revs[i:i+window]
169 nrevs = [rev for rev in revs[i:i+window]
170 if rev in wanted]
170 if rev in wanted]
171 srevs = list(nrevs)
171 srevs = list(nrevs)
172 srevs.sort()
172 srevs.sort()
173 for rev in srevs:
173 for rev in srevs:
174 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
174 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
175 yield 'add', rev, fns
175 yield 'add', rev, fns
176 for rev in nrevs:
176 for rev in nrevs:
177 yield 'iter', rev, None
177 yield 'iter', rev, None
178 return iterate(), getchange, matchfn
178 return iterate(), getchange, matchfn
179
179
180 revrangesep = ':'
180 revrangesep = ':'
181
181
182 def revrange(ui, repo, revs, revlog=None):
182 def revrange(ui, repo, revs, revlog=None):
183 """Yield revision as strings from a list of revision specifications."""
183 """Yield revision as strings from a list of revision specifications."""
184 if revlog is None:
184 if revlog is None:
185 revlog = repo.changelog
185 revlog = repo.changelog
186 revcount = revlog.count()
186 revcount = revlog.count()
187 def fix(val, defval):
187 def fix(val, defval):
188 if not val:
188 if not val:
189 return defval
189 return defval
190 try:
190 try:
191 num = int(val)
191 num = int(val)
192 if str(num) != val:
192 if str(num) != val:
193 raise ValueError
193 raise ValueError
194 if num < 0:
194 if num < 0:
195 num += revcount
195 num += revcount
196 if num < 0:
196 if num < 0:
197 num = 0
197 num = 0
198 elif num >= revcount:
198 elif num >= revcount:
199 raise ValueError
199 raise ValueError
200 except ValueError:
200 except ValueError:
201 try:
201 try:
202 num = repo.changelog.rev(repo.lookup(val))
202 num = repo.changelog.rev(repo.lookup(val))
203 except KeyError:
203 except KeyError:
204 try:
204 try:
205 num = revlog.rev(revlog.lookup(val))
205 num = revlog.rev(revlog.lookup(val))
206 except KeyError:
206 except KeyError:
207 raise util.Abort(_('invalid revision identifier %s'), val)
207 raise util.Abort(_('invalid revision identifier %s'), val)
208 return num
208 return num
209 seen = {}
209 seen = {}
210 for spec in revs:
210 for spec in revs:
211 if spec.find(revrangesep) >= 0:
211 if spec.find(revrangesep) >= 0:
212 start, end = spec.split(revrangesep, 1)
212 start, end = spec.split(revrangesep, 1)
213 start = fix(start, 0)
213 start = fix(start, 0)
214 end = fix(end, revcount - 1)
214 end = fix(end, revcount - 1)
215 step = start > end and -1 or 1
215 step = start > end and -1 or 1
216 for rev in xrange(start, end+step, step):
216 for rev in xrange(start, end+step, step):
217 if rev in seen:
217 if rev in seen:
218 continue
218 continue
219 seen[rev] = 1
219 seen[rev] = 1
220 yield str(rev)
220 yield str(rev)
221 else:
221 else:
222 rev = fix(spec, None)
222 rev = fix(spec, None)
223 if rev in seen:
223 if rev in seen:
224 continue
224 continue
225 seen[rev] = 1
225 seen[rev] = 1
226 yield str(rev)
226 yield str(rev)
227
227
228 def make_filename(repo, r, pat, node=None,
228 def make_filename(repo, r, pat, node=None,
229 total=None, seqno=None, revwidth=None, pathname=None):
229 total=None, seqno=None, revwidth=None, pathname=None):
230 node_expander = {
230 node_expander = {
231 'H': lambda: hex(node),
231 'H': lambda: hex(node),
232 'R': lambda: str(r.rev(node)),
232 'R': lambda: str(r.rev(node)),
233 'h': lambda: short(node),
233 'h': lambda: short(node),
234 }
234 }
235 expander = {
235 expander = {
236 '%': lambda: '%',
236 '%': lambda: '%',
237 'b': lambda: os.path.basename(repo.root),
237 'b': lambda: os.path.basename(repo.root),
238 }
238 }
239
239
240 try:
240 try:
241 if node:
241 if node:
242 expander.update(node_expander)
242 expander.update(node_expander)
243 if node and revwidth is not None:
243 if node and revwidth is not None:
244 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
244 expander['r'] = lambda: str(r.rev(node)).zfill(revwidth)
245 if total is not None:
245 if total is not None:
246 expander['N'] = lambda: str(total)
246 expander['N'] = lambda: str(total)
247 if seqno is not None:
247 if seqno is not None:
248 expander['n'] = lambda: str(seqno)
248 expander['n'] = lambda: str(seqno)
249 if total is not None and seqno is not None:
249 if total is not None and seqno is not None:
250 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
250 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
251 if pathname is not None:
251 if pathname is not None:
252 expander['s'] = lambda: os.path.basename(pathname)
252 expander['s'] = lambda: os.path.basename(pathname)
253 expander['d'] = lambda: os.path.dirname(pathname) or '.'
253 expander['d'] = lambda: os.path.dirname(pathname) or '.'
254 expander['p'] = lambda: pathname
254 expander['p'] = lambda: pathname
255
255
256 newname = []
256 newname = []
257 patlen = len(pat)
257 patlen = len(pat)
258 i = 0
258 i = 0
259 while i < patlen:
259 while i < patlen:
260 c = pat[i]
260 c = pat[i]
261 if c == '%':
261 if c == '%':
262 i += 1
262 i += 1
263 c = pat[i]
263 c = pat[i]
264 c = expander[c]()
264 c = expander[c]()
265 newname.append(c)
265 newname.append(c)
266 i += 1
266 i += 1
267 return ''.join(newname)
267 return ''.join(newname)
268 except KeyError, inst:
268 except KeyError, inst:
269 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
269 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
270 inst.args[0])
270 inst.args[0])
271
271
272 def make_file(repo, r, pat, node=None,
272 def make_file(repo, r, pat, node=None,
273 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
273 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
274 if not pat or pat == '-':
274 if not pat or pat == '-':
275 return 'w' in mode and sys.stdout or sys.stdin
275 return 'w' in mode and sys.stdout or sys.stdin
276 if hasattr(pat, 'write') and 'w' in mode:
276 if hasattr(pat, 'write') and 'w' in mode:
277 return pat
277 return pat
278 if hasattr(pat, 'read') and 'r' in mode:
278 if hasattr(pat, 'read') and 'r' in mode:
279 return pat
279 return pat
280 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
280 return open(make_filename(repo, r, pat, node, total, seqno, revwidth,
281 pathname),
281 pathname),
282 mode)
282 mode)
283
283
284 def write_bundle(cg, filename=None, compress=True):
284 def write_bundle(cg, filename=None, compress=True):
285 """Write a bundle file and return its filename.
285 """Write a bundle file and return its filename.
286
286
287 Existing files will not be overwritten.
287 Existing files will not be overwritten.
288 If no filename is specified, a temporary file is created.
288 If no filename is specified, a temporary file is created.
289 bz2 compression can be turned off.
289 bz2 compression can be turned off.
290 The bundle file will be deleted in case of errors.
290 The bundle file will be deleted in case of errors.
291 """
291 """
292 class nocompress(object):
292 class nocompress(object):
293 def compress(self, x):
293 def compress(self, x):
294 return x
294 return x
295 def flush(self):
295 def flush(self):
296 return ""
296 return ""
297
297
298 fh = None
298 fh = None
299 cleanup = None
299 cleanup = None
300 try:
300 try:
301 if filename:
301 if filename:
302 if os.path.exists(filename):
302 if os.path.exists(filename):
303 raise util.Abort(_("file '%s' already exists"), filename)
303 raise util.Abort(_("file '%s' already exists"), filename)
304 fh = open(filename, "wb")
304 fh = open(filename, "wb")
305 else:
305 else:
306 fd, filename = tempfile.mkstemp(suffix=".hg", prefix="hg-bundle-")
306 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
307 fh = os.fdopen(fd, "wb")
307 fh = os.fdopen(fd, "wb")
308 cleanup = filename
308 cleanup = filename
309
309
310 if compress:
310 if compress:
311 fh.write("HG10")
311 fh.write("HG10")
312 z = bz2.BZ2Compressor(9)
312 z = bz2.BZ2Compressor(9)
313 else:
313 else:
314 fh.write("HG10UN")
314 fh.write("HG10UN")
315 z = nocompress()
315 z = nocompress()
316 # parse the changegroup data, otherwise we will block
316 # parse the changegroup data, otherwise we will block
317 # in case of sshrepo because we don't know the end of the stream
317 # in case of sshrepo because we don't know the end of the stream
318
318
319 # an empty chunkiter is the end of the changegroup
319 # an empty chunkiter is the end of the changegroup
320 empty = False
320 empty = False
321 while not empty:
321 while not empty:
322 empty = True
322 empty = True
323 for chunk in changegroup.chunkiter(cg):
323 for chunk in changegroup.chunkiter(cg):
324 empty = False
324 empty = False
325 fh.write(z.compress(changegroup.genchunk(chunk)))
325 fh.write(z.compress(changegroup.genchunk(chunk)))
326 fh.write(z.compress(changegroup.closechunk()))
326 fh.write(z.compress(changegroup.closechunk()))
327 fh.write(z.flush())
327 fh.write(z.flush())
328 cleanup = None
328 cleanup = None
329 return filename
329 return filename
330 finally:
330 finally:
331 if fh is not None:
331 if fh is not None:
332 fh.close()
332 fh.close()
333 if cleanup is not None:
333 if cleanup is not None:
334 os.unlink(cleanup)
334 os.unlink(cleanup)
335
335
336 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
336 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
337 changes=None, text=False, opts={}):
337 changes=None, text=False, opts={}):
338 if not node1:
338 if not node1:
339 node1 = repo.dirstate.parents()[0]
339 node1 = repo.dirstate.parents()[0]
340 # reading the data for node1 early allows it to play nicely
340 # reading the data for node1 early allows it to play nicely
341 # with repo.changes and the revlog cache.
341 # with repo.changes and the revlog cache.
342 change = repo.changelog.read(node1)
342 change = repo.changelog.read(node1)
343 mmap = repo.manifest.read(change[0])
343 mmap = repo.manifest.read(change[0])
344 date1 = util.datestr(change[2])
344 date1 = util.datestr(change[2])
345
345
346 if not changes:
346 if not changes:
347 changes = repo.changes(node1, node2, files, match=match)
347 changes = repo.changes(node1, node2, files, match=match)
348 modified, added, removed, deleted, unknown = changes
348 modified, added, removed, deleted, unknown = changes
349 if files:
349 if files:
350 modified, added, removed = map(lambda x: filterfiles(files, x),
350 modified, added, removed = map(lambda x: filterfiles(files, x),
351 (modified, added, removed))
351 (modified, added, removed))
352
352
353 if not modified and not added and not removed:
353 if not modified and not added and not removed:
354 return
354 return
355
355
356 if node2:
356 if node2:
357 change = repo.changelog.read(node2)
357 change = repo.changelog.read(node2)
358 mmap2 = repo.manifest.read(change[0])
358 mmap2 = repo.manifest.read(change[0])
359 date2 = util.datestr(change[2])
359 date2 = util.datestr(change[2])
360 def read(f):
360 def read(f):
361 return repo.file(f).read(mmap2[f])
361 return repo.file(f).read(mmap2[f])
362 else:
362 else:
363 date2 = util.datestr()
363 date2 = util.datestr()
364 def read(f):
364 def read(f):
365 return repo.wread(f)
365 return repo.wread(f)
366
366
367 if ui.quiet:
367 if ui.quiet:
368 r = None
368 r = None
369 else:
369 else:
370 hexfunc = ui.verbose and hex or short
370 hexfunc = ui.verbose and hex or short
371 r = [hexfunc(node) for node in [node1, node2] if node]
371 r = [hexfunc(node) for node in [node1, node2] if node]
372
372
373 diffopts = ui.diffopts()
373 diffopts = ui.diffopts()
374 showfunc = opts.get('show_function') or diffopts['showfunc']
374 showfunc = opts.get('show_function') or diffopts['showfunc']
375 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
375 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
376 for f in modified:
376 for f in modified:
377 to = None
377 to = None
378 if f in mmap:
378 if f in mmap:
379 to = repo.file(f).read(mmap[f])
379 to = repo.file(f).read(mmap[f])
380 tn = read(f)
380 tn = read(f)
381 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
381 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
382 showfunc=showfunc, ignorews=ignorews))
382 showfunc=showfunc, ignorews=ignorews))
383 for f in added:
383 for f in added:
384 to = None
384 to = None
385 tn = read(f)
385 tn = read(f)
386 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
386 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
387 showfunc=showfunc, ignorews=ignorews))
387 showfunc=showfunc, ignorews=ignorews))
388 for f in removed:
388 for f in removed:
389 to = repo.file(f).read(mmap[f])
389 to = repo.file(f).read(mmap[f])
390 tn = None
390 tn = None
391 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
391 fp.write(mdiff.unidiff(to, date1, tn, date2, f, r, text=text,
392 showfunc=showfunc, ignorews=ignorews))
392 showfunc=showfunc, ignorews=ignorews))
393
393
394 def trimuser(ui, name, rev, revcache):
394 def trimuser(ui, name, rev, revcache):
395 """trim the name of the user who committed a change"""
395 """trim the name of the user who committed a change"""
396 user = revcache.get(rev)
396 user = revcache.get(rev)
397 if user is None:
397 if user is None:
398 user = revcache[rev] = ui.shortuser(name)
398 user = revcache[rev] = ui.shortuser(name)
399 return user
399 return user
400
400
401 class changeset_templater(object):
401 class changeset_templater(object):
402 '''use templater module to format changeset information.'''
402 '''use templater module to format changeset information.'''
403
403
404 def __init__(self, ui, repo, mapfile):
404 def __init__(self, ui, repo, mapfile):
405 self.t = templater.templater(mapfile, templater.common_filters,
405 self.t = templater.templater(mapfile, templater.common_filters,
406 cache={'parent': '{rev}:{node|short} ',
406 cache={'parent': '{rev}:{node|short} ',
407 'manifest': '{rev}:{node|short}'})
407 'manifest': '{rev}:{node|short}'})
408 self.ui = ui
408 self.ui = ui
409 self.repo = repo
409 self.repo = repo
410
410
411 def use_template(self, t):
411 def use_template(self, t):
412 '''set template string to use'''
412 '''set template string to use'''
413 self.t.cache['changeset'] = t
413 self.t.cache['changeset'] = t
414
414
415 def write(self, thing, header=False):
415 def write(self, thing, header=False):
416 '''write expanded template.
416 '''write expanded template.
417 uses in-order recursive traverse of iterators.'''
417 uses in-order recursive traverse of iterators.'''
418 for t in thing:
418 for t in thing:
419 if hasattr(t, '__iter__'):
419 if hasattr(t, '__iter__'):
420 self.write(t, header=header)
420 self.write(t, header=header)
421 elif header:
421 elif header:
422 self.ui.write_header(t)
422 self.ui.write_header(t)
423 else:
423 else:
424 self.ui.write(t)
424 self.ui.write(t)
425
425
426 def write_header(self, thing):
426 def write_header(self, thing):
427 self.write(thing, header=True)
427 self.write(thing, header=True)
428
428
429 def show(self, rev=0, changenode=None, brinfo=None):
429 def show(self, rev=0, changenode=None, brinfo=None):
430 '''show a single changeset or file revision'''
430 '''show a single changeset or file revision'''
431 log = self.repo.changelog
431 log = self.repo.changelog
432 if changenode is None:
432 if changenode is None:
433 changenode = log.node(rev)
433 changenode = log.node(rev)
434 elif not rev:
434 elif not rev:
435 rev = log.rev(changenode)
435 rev = log.rev(changenode)
436
436
437 changes = log.read(changenode)
437 changes = log.read(changenode)
438
438
439 def showlist(name, values, plural=None, **args):
439 def showlist(name, values, plural=None, **args):
440 '''expand set of values.
440 '''expand set of values.
441 name is name of key in template map.
441 name is name of key in template map.
442 values is list of strings or dicts.
442 values is list of strings or dicts.
443 plural is plural of name, if not simply name + 's'.
443 plural is plural of name, if not simply name + 's'.
444
444
445 expansion works like this, given name 'foo'.
445 expansion works like this, given name 'foo'.
446
446
447 if values is empty, expand 'no_foos'.
447 if values is empty, expand 'no_foos'.
448
448
449 if 'foo' not in template map, return values as a string,
449 if 'foo' not in template map, return values as a string,
450 joined by space.
450 joined by space.
451
451
452 expand 'start_foos'.
452 expand 'start_foos'.
453
453
454 for each value, expand 'foo'. if 'last_foo' in template
454 for each value, expand 'foo'. if 'last_foo' in template
455 map, expand it instead of 'foo' for last key.
455 map, expand it instead of 'foo' for last key.
456
456
457 expand 'end_foos'.
457 expand 'end_foos'.
458 '''
458 '''
459 if plural: names = plural
459 if plural: names = plural
460 else: names = name + 's'
460 else: names = name + 's'
461 if not values:
461 if not values:
462 noname = 'no_' + names
462 noname = 'no_' + names
463 if noname in self.t:
463 if noname in self.t:
464 yield self.t(noname, **args)
464 yield self.t(noname, **args)
465 return
465 return
466 if name not in self.t:
466 if name not in self.t:
467 if isinstance(values[0], str):
467 if isinstance(values[0], str):
468 yield ' '.join(values)
468 yield ' '.join(values)
469 else:
469 else:
470 for v in values:
470 for v in values:
471 yield dict(v, **args)
471 yield dict(v, **args)
472 return
472 return
473 startname = 'start_' + names
473 startname = 'start_' + names
474 if startname in self.t:
474 if startname in self.t:
475 yield self.t(startname, **args)
475 yield self.t(startname, **args)
476 vargs = args.copy()
476 vargs = args.copy()
477 def one(v, tag=name):
477 def one(v, tag=name):
478 try:
478 try:
479 vargs.update(v)
479 vargs.update(v)
480 except (AttributeError, ValueError):
480 except (AttributeError, ValueError):
481 try:
481 try:
482 for a, b in v:
482 for a, b in v:
483 vargs[a] = b
483 vargs[a] = b
484 except ValueError:
484 except ValueError:
485 vargs[name] = v
485 vargs[name] = v
486 return self.t(tag, **vargs)
486 return self.t(tag, **vargs)
487 lastname = 'last_' + name
487 lastname = 'last_' + name
488 if lastname in self.t:
488 if lastname in self.t:
489 last = values.pop()
489 last = values.pop()
490 else:
490 else:
491 last = None
491 last = None
492 for v in values:
492 for v in values:
493 yield one(v)
493 yield one(v)
494 if last is not None:
494 if last is not None:
495 yield one(last, tag=lastname)
495 yield one(last, tag=lastname)
496 endname = 'end_' + names
496 endname = 'end_' + names
497 if endname in self.t:
497 if endname in self.t:
498 yield self.t(endname, **args)
498 yield self.t(endname, **args)
499
499
500 if brinfo:
500 if brinfo:
501 def showbranches(**args):
501 def showbranches(**args):
502 if changenode in brinfo:
502 if changenode in brinfo:
503 for x in showlist('branch', brinfo[changenode],
503 for x in showlist('branch', brinfo[changenode],
504 plural='branches', **args):
504 plural='branches', **args):
505 yield x
505 yield x
506 else:
506 else:
507 showbranches = ''
507 showbranches = ''
508
508
509 if self.ui.debugflag:
509 if self.ui.debugflag:
510 def showmanifest(**args):
510 def showmanifest(**args):
511 args = args.copy()
511 args = args.copy()
512 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
512 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
513 node=hex(changes[0])))
513 node=hex(changes[0])))
514 yield self.t('manifest', **args)
514 yield self.t('manifest', **args)
515 else:
515 else:
516 showmanifest = ''
516 showmanifest = ''
517
517
518 def showparents(**args):
518 def showparents(**args):
519 parents = [[('rev', log.rev(p)), ('node', hex(p))]
519 parents = [[('rev', log.rev(p)), ('node', hex(p))]
520 for p in log.parents(changenode)
520 for p in log.parents(changenode)
521 if self.ui.debugflag or p != nullid]
521 if self.ui.debugflag or p != nullid]
522 if (not self.ui.debugflag and len(parents) == 1 and
522 if (not self.ui.debugflag and len(parents) == 1 and
523 parents[0][0][1] == rev - 1):
523 parents[0][0][1] == rev - 1):
524 return
524 return
525 for x in showlist('parent', parents, **args):
525 for x in showlist('parent', parents, **args):
526 yield x
526 yield x
527
527
528 def showtags(**args):
528 def showtags(**args):
529 for x in showlist('tag', self.repo.nodetags(changenode), **args):
529 for x in showlist('tag', self.repo.nodetags(changenode), **args):
530 yield x
530 yield x
531
531
532 if self.ui.debugflag:
532 if self.ui.debugflag:
533 files = self.repo.changes(log.parents(changenode)[0], changenode)
533 files = self.repo.changes(log.parents(changenode)[0], changenode)
534 def showfiles(**args):
534 def showfiles(**args):
535 for x in showlist('file', files[0], **args): yield x
535 for x in showlist('file', files[0], **args): yield x
536 def showadds(**args):
536 def showadds(**args):
537 for x in showlist('file_add', files[1], **args): yield x
537 for x in showlist('file_add', files[1], **args): yield x
538 def showdels(**args):
538 def showdels(**args):
539 for x in showlist('file_del', files[2], **args): yield x
539 for x in showlist('file_del', files[2], **args): yield x
540 else:
540 else:
541 def showfiles(**args):
541 def showfiles(**args):
542 for x in showlist('file', changes[3], **args): yield x
542 for x in showlist('file', changes[3], **args): yield x
543 showadds = ''
543 showadds = ''
544 showdels = ''
544 showdels = ''
545
545
546 props = {
546 props = {
547 'author': changes[1],
547 'author': changes[1],
548 'branches': showbranches,
548 'branches': showbranches,
549 'date': changes[2],
549 'date': changes[2],
550 'desc': changes[4],
550 'desc': changes[4],
551 'file_adds': showadds,
551 'file_adds': showadds,
552 'file_dels': showdels,
552 'file_dels': showdels,
553 'files': showfiles,
553 'files': showfiles,
554 'manifest': showmanifest,
554 'manifest': showmanifest,
555 'node': hex(changenode),
555 'node': hex(changenode),
556 'parents': showparents,
556 'parents': showparents,
557 'rev': rev,
557 'rev': rev,
558 'tags': showtags,
558 'tags': showtags,
559 }
559 }
560
560
561 try:
561 try:
562 if self.ui.debugflag and 'header_debug' in self.t:
562 if self.ui.debugflag and 'header_debug' in self.t:
563 key = 'header_debug'
563 key = 'header_debug'
564 elif self.ui.quiet and 'header_quiet' in self.t:
564 elif self.ui.quiet and 'header_quiet' in self.t:
565 key = 'header_quiet'
565 key = 'header_quiet'
566 elif self.ui.verbose and 'header_verbose' in self.t:
566 elif self.ui.verbose and 'header_verbose' in self.t:
567 key = 'header_verbose'
567 key = 'header_verbose'
568 elif 'header' in self.t:
568 elif 'header' in self.t:
569 key = 'header'
569 key = 'header'
570 else:
570 else:
571 key = ''
571 key = ''
572 if key:
572 if key:
573 self.write_header(self.t(key, **props))
573 self.write_header(self.t(key, **props))
574 if self.ui.debugflag and 'changeset_debug' in self.t:
574 if self.ui.debugflag and 'changeset_debug' in self.t:
575 key = 'changeset_debug'
575 key = 'changeset_debug'
576 elif self.ui.quiet and 'changeset_quiet' in self.t:
576 elif self.ui.quiet and 'changeset_quiet' in self.t:
577 key = 'changeset_quiet'
577 key = 'changeset_quiet'
578 elif self.ui.verbose and 'changeset_verbose' in self.t:
578 elif self.ui.verbose and 'changeset_verbose' in self.t:
579 key = 'changeset_verbose'
579 key = 'changeset_verbose'
580 else:
580 else:
581 key = 'changeset'
581 key = 'changeset'
582 self.write(self.t(key, **props))
582 self.write(self.t(key, **props))
583 except KeyError, inst:
583 except KeyError, inst:
584 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
584 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
585 inst.args[0]))
585 inst.args[0]))
586 except SyntaxError, inst:
586 except SyntaxError, inst:
587 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
587 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
588
588
589 class changeset_printer(object):
589 class changeset_printer(object):
590 '''show changeset information when templating not requested.'''
590 '''show changeset information when templating not requested.'''
591
591
592 def __init__(self, ui, repo):
592 def __init__(self, ui, repo):
593 self.ui = ui
593 self.ui = ui
594 self.repo = repo
594 self.repo = repo
595
595
596 def show(self, rev=0, changenode=None, brinfo=None):
596 def show(self, rev=0, changenode=None, brinfo=None):
597 '''show a single changeset or file revision'''
597 '''show a single changeset or file revision'''
598 log = self.repo.changelog
598 log = self.repo.changelog
599 if changenode is None:
599 if changenode is None:
600 changenode = log.node(rev)
600 changenode = log.node(rev)
601 elif not rev:
601 elif not rev:
602 rev = log.rev(changenode)
602 rev = log.rev(changenode)
603
603
604 if self.ui.quiet:
604 if self.ui.quiet:
605 self.ui.write("%d:%s\n" % (rev, short(changenode)))
605 self.ui.write("%d:%s\n" % (rev, short(changenode)))
606 return
606 return
607
607
608 changes = log.read(changenode)
608 changes = log.read(changenode)
609 date = util.datestr(changes[2])
609 date = util.datestr(changes[2])
610
610
611 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
611 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
612 for p in log.parents(changenode)
612 for p in log.parents(changenode)
613 if self.ui.debugflag or p != nullid]
613 if self.ui.debugflag or p != nullid]
614 if (not self.ui.debugflag and len(parents) == 1 and
614 if (not self.ui.debugflag and len(parents) == 1 and
615 parents[0][0] == rev-1):
615 parents[0][0] == rev-1):
616 parents = []
616 parents = []
617
617
618 if self.ui.verbose:
618 if self.ui.verbose:
619 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
619 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
620 else:
620 else:
621 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
621 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
622
622
623 for tag in self.repo.nodetags(changenode):
623 for tag in self.repo.nodetags(changenode):
624 self.ui.status(_("tag: %s\n") % tag)
624 self.ui.status(_("tag: %s\n") % tag)
625 for parent in parents:
625 for parent in parents:
626 self.ui.write(_("parent: %d:%s\n") % parent)
626 self.ui.write(_("parent: %d:%s\n") % parent)
627
627
628 if brinfo and changenode in brinfo:
628 if brinfo and changenode in brinfo:
629 br = brinfo[changenode]
629 br = brinfo[changenode]
630 self.ui.write(_("branch: %s\n") % " ".join(br))
630 self.ui.write(_("branch: %s\n") % " ".join(br))
631
631
632 self.ui.debug(_("manifest: %d:%s\n") %
632 self.ui.debug(_("manifest: %d:%s\n") %
633 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
633 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
634 self.ui.status(_("user: %s\n") % changes[1])
634 self.ui.status(_("user: %s\n") % changes[1])
635 self.ui.status(_("date: %s\n") % date)
635 self.ui.status(_("date: %s\n") % date)
636
636
637 if self.ui.debugflag:
637 if self.ui.debugflag:
638 files = self.repo.changes(log.parents(changenode)[0], changenode)
638 files = self.repo.changes(log.parents(changenode)[0], changenode)
639 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
639 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
640 files):
640 files):
641 if value:
641 if value:
642 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
642 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
643 else:
643 else:
644 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
644 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
645
645
646 description = changes[4].strip()
646 description = changes[4].strip()
647 if description:
647 if description:
648 if self.ui.verbose:
648 if self.ui.verbose:
649 self.ui.status(_("description:\n"))
649 self.ui.status(_("description:\n"))
650 self.ui.status(description)
650 self.ui.status(description)
651 self.ui.status("\n\n")
651 self.ui.status("\n\n")
652 else:
652 else:
653 self.ui.status(_("summary: %s\n") %
653 self.ui.status(_("summary: %s\n") %
654 description.splitlines()[0])
654 description.splitlines()[0])
655 self.ui.status("\n")
655 self.ui.status("\n")
656
656
657 def show_changeset(ui, repo, opts):
657 def show_changeset(ui, repo, opts):
658 '''show one changeset. uses template or regular display. caller
658 '''show one changeset. uses template or regular display. caller
659 can pass in 'style' and 'template' options in opts.'''
659 can pass in 'style' and 'template' options in opts.'''
660
660
661 tmpl = opts.get('template')
661 tmpl = opts.get('template')
662 if tmpl:
662 if tmpl:
663 tmpl = templater.parsestring(tmpl, quoted=False)
663 tmpl = templater.parsestring(tmpl, quoted=False)
664 else:
664 else:
665 tmpl = ui.config('ui', 'logtemplate')
665 tmpl = ui.config('ui', 'logtemplate')
666 if tmpl: tmpl = templater.parsestring(tmpl)
666 if tmpl: tmpl = templater.parsestring(tmpl)
667 mapfile = opts.get('style') or ui.config('ui', 'style')
667 mapfile = opts.get('style') or ui.config('ui', 'style')
668 if tmpl or mapfile:
668 if tmpl or mapfile:
669 if mapfile:
669 if mapfile:
670 if not os.path.isfile(mapfile):
670 if not os.path.isfile(mapfile):
671 mapname = templater.templatepath('map-cmdline.' + mapfile)
671 mapname = templater.templatepath('map-cmdline.' + mapfile)
672 if not mapname: mapname = templater.templatepath(mapfile)
672 if not mapname: mapname = templater.templatepath(mapfile)
673 if mapname: mapfile = mapname
673 if mapname: mapfile = mapname
674 try:
674 try:
675 t = changeset_templater(ui, repo, mapfile)
675 t = changeset_templater(ui, repo, mapfile)
676 except SyntaxError, inst:
676 except SyntaxError, inst:
677 raise util.Abort(inst.args[0])
677 raise util.Abort(inst.args[0])
678 if tmpl: t.use_template(tmpl)
678 if tmpl: t.use_template(tmpl)
679 return t
679 return t
680 return changeset_printer(ui, repo)
680 return changeset_printer(ui, repo)
681
681
682 def show_version(ui):
682 def show_version(ui):
683 """output version and copyright information"""
683 """output version and copyright information"""
684 ui.write(_("Mercurial Distributed SCM (version %s)\n")
684 ui.write(_("Mercurial Distributed SCM (version %s)\n")
685 % version.get_version())
685 % version.get_version())
686 ui.status(_(
686 ui.status(_(
687 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
687 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
688 "This is free software; see the source for copying conditions. "
688 "This is free software; see the source for copying conditions. "
689 "There is NO\nwarranty; "
689 "There is NO\nwarranty; "
690 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
690 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
691 ))
691 ))
692
692
693 def help_(ui, cmd=None, with_version=False):
693 def help_(ui, cmd=None, with_version=False):
694 """show help for a given command or all commands"""
694 """show help for a given command or all commands"""
695 option_lists = []
695 option_lists = []
696 if cmd and cmd != 'shortlist':
696 if cmd and cmd != 'shortlist':
697 if with_version:
697 if with_version:
698 show_version(ui)
698 show_version(ui)
699 ui.write('\n')
699 ui.write('\n')
700 aliases, i = find(cmd)
700 aliases, i = find(cmd)
701 # synopsis
701 # synopsis
702 ui.write("%s\n\n" % i[2])
702 ui.write("%s\n\n" % i[2])
703
703
704 # description
704 # description
705 doc = i[0].__doc__
705 doc = i[0].__doc__
706 if not doc:
706 if not doc:
707 doc = _("(No help text available)")
707 doc = _("(No help text available)")
708 if ui.quiet:
708 if ui.quiet:
709 doc = doc.splitlines(0)[0]
709 doc = doc.splitlines(0)[0]
710 ui.write("%s\n" % doc.rstrip())
710 ui.write("%s\n" % doc.rstrip())
711
711
712 if not ui.quiet:
712 if not ui.quiet:
713 # aliases
713 # aliases
714 if len(aliases) > 1:
714 if len(aliases) > 1:
715 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
715 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
716
716
717 # options
717 # options
718 if i[1]:
718 if i[1]:
719 option_lists.append(("options", i[1]))
719 option_lists.append(("options", i[1]))
720
720
721 else:
721 else:
722 # program name
722 # program name
723 if ui.verbose or with_version:
723 if ui.verbose or with_version:
724 show_version(ui)
724 show_version(ui)
725 else:
725 else:
726 ui.status(_("Mercurial Distributed SCM\n"))
726 ui.status(_("Mercurial Distributed SCM\n"))
727 ui.status('\n')
727 ui.status('\n')
728
728
729 # list of commands
729 # list of commands
730 if cmd == "shortlist":
730 if cmd == "shortlist":
731 ui.status(_('basic commands (use "hg help" '
731 ui.status(_('basic commands (use "hg help" '
732 'for the full list or option "-v" for details):\n\n'))
732 'for the full list or option "-v" for details):\n\n'))
733 elif ui.verbose:
733 elif ui.verbose:
734 ui.status(_('list of commands:\n\n'))
734 ui.status(_('list of commands:\n\n'))
735 else:
735 else:
736 ui.status(_('list of commands (use "hg help -v" '
736 ui.status(_('list of commands (use "hg help -v" '
737 'to show aliases and global options):\n\n'))
737 'to show aliases and global options):\n\n'))
738
738
739 h = {}
739 h = {}
740 cmds = {}
740 cmds = {}
741 for c, e in table.items():
741 for c, e in table.items():
742 f = c.split("|")[0]
742 f = c.split("|")[0]
743 if cmd == "shortlist" and not f.startswith("^"):
743 if cmd == "shortlist" and not f.startswith("^"):
744 continue
744 continue
745 f = f.lstrip("^")
745 f = f.lstrip("^")
746 if not ui.debugflag and f.startswith("debug"):
746 if not ui.debugflag and f.startswith("debug"):
747 continue
747 continue
748 doc = e[0].__doc__
748 doc = e[0].__doc__
749 if not doc:
749 if not doc:
750 doc = _("(No help text available)")
750 doc = _("(No help text available)")
751 h[f] = doc.splitlines(0)[0].rstrip()
751 h[f] = doc.splitlines(0)[0].rstrip()
752 cmds[f] = c.lstrip("^")
752 cmds[f] = c.lstrip("^")
753
753
754 fns = h.keys()
754 fns = h.keys()
755 fns.sort()
755 fns.sort()
756 m = max(map(len, fns))
756 m = max(map(len, fns))
757 for f in fns:
757 for f in fns:
758 if ui.verbose:
758 if ui.verbose:
759 commands = cmds[f].replace("|",", ")
759 commands = cmds[f].replace("|",", ")
760 ui.write(" %s:\n %s\n"%(commands, h[f]))
760 ui.write(" %s:\n %s\n"%(commands, h[f]))
761 else:
761 else:
762 ui.write(' %-*s %s\n' % (m, f, h[f]))
762 ui.write(' %-*s %s\n' % (m, f, h[f]))
763
763
764 # global options
764 # global options
765 if ui.verbose:
765 if ui.verbose:
766 option_lists.append(("global options", globalopts))
766 option_lists.append(("global options", globalopts))
767
767
768 # list all option lists
768 # list all option lists
769 opt_output = []
769 opt_output = []
770 for title, options in option_lists:
770 for title, options in option_lists:
771 opt_output.append(("\n%s:\n" % title, None))
771 opt_output.append(("\n%s:\n" % title, None))
772 for shortopt, longopt, default, desc in options:
772 for shortopt, longopt, default, desc in options:
773 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
773 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
774 longopt and " --%s" % longopt),
774 longopt and " --%s" % longopt),
775 "%s%s" % (desc,
775 "%s%s" % (desc,
776 default
776 default
777 and _(" (default: %s)") % default
777 and _(" (default: %s)") % default
778 or "")))
778 or "")))
779
779
780 if opt_output:
780 if opt_output:
781 opts_len = max([len(line[0]) for line in opt_output if line[1]])
781 opts_len = max([len(line[0]) for line in opt_output if line[1]])
782 for first, second in opt_output:
782 for first, second in opt_output:
783 if second:
783 if second:
784 ui.write(" %-*s %s\n" % (opts_len, first, second))
784 ui.write(" %-*s %s\n" % (opts_len, first, second))
785 else:
785 else:
786 ui.write("%s\n" % first)
786 ui.write("%s\n" % first)
787
787
788 # Commands start here, listed alphabetically
788 # Commands start here, listed alphabetically
789
789
790 def add(ui, repo, *pats, **opts):
790 def add(ui, repo, *pats, **opts):
791 """add the specified files on the next commit
791 """add the specified files on the next commit
792
792
793 Schedule files to be version controlled and added to the repository.
793 Schedule files to be version controlled and added to the repository.
794
794
795 The files will be added to the repository at the next commit.
795 The files will be added to the repository at the next commit.
796
796
797 If no names are given, add all files in the repository.
797 If no names are given, add all files in the repository.
798 """
798 """
799
799
800 names = []
800 names = []
801 for src, abs, rel, exact in walk(repo, pats, opts):
801 for src, abs, rel, exact in walk(repo, pats, opts):
802 if exact:
802 if exact:
803 if ui.verbose:
803 if ui.verbose:
804 ui.status(_('adding %s\n') % rel)
804 ui.status(_('adding %s\n') % rel)
805 names.append(abs)
805 names.append(abs)
806 elif repo.dirstate.state(abs) == '?':
806 elif repo.dirstate.state(abs) == '?':
807 ui.status(_('adding %s\n') % rel)
807 ui.status(_('adding %s\n') % rel)
808 names.append(abs)
808 names.append(abs)
809 repo.add(names)
809 repo.add(names)
810
810
811 def addremove(ui, repo, *pats, **opts):
811 def addremove(ui, repo, *pats, **opts):
812 """add all new files, delete all missing files
812 """add all new files, delete all missing files
813
813
814 Add all new files and remove all missing files from the repository.
814 Add all new files and remove all missing files from the repository.
815
815
816 New files are ignored if they match any of the patterns in .hgignore. As
816 New files are ignored if they match any of the patterns in .hgignore. As
817 with add, these changes take effect at the next commit.
817 with add, these changes take effect at the next commit.
818 """
818 """
819 return addremove_lock(ui, repo, pats, opts)
819 return addremove_lock(ui, repo, pats, opts)
820
820
821 def addremove_lock(ui, repo, pats, opts, wlock=None):
821 def addremove_lock(ui, repo, pats, opts, wlock=None):
822 add, remove = [], []
822 add, remove = [], []
823 for src, abs, rel, exact in walk(repo, pats, opts):
823 for src, abs, rel, exact in walk(repo, pats, opts):
824 if src == 'f' and repo.dirstate.state(abs) == '?':
824 if src == 'f' and repo.dirstate.state(abs) == '?':
825 add.append(abs)
825 add.append(abs)
826 if ui.verbose or not exact:
826 if ui.verbose or not exact:
827 ui.status(_('adding %s\n') % ((pats and rel) or abs))
827 ui.status(_('adding %s\n') % ((pats and rel) or abs))
828 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
828 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
829 remove.append(abs)
829 remove.append(abs)
830 if ui.verbose or not exact:
830 if ui.verbose or not exact:
831 ui.status(_('removing %s\n') % ((pats and rel) or abs))
831 ui.status(_('removing %s\n') % ((pats and rel) or abs))
832 repo.add(add, wlock=wlock)
832 repo.add(add, wlock=wlock)
833 repo.remove(remove, wlock=wlock)
833 repo.remove(remove, wlock=wlock)
834
834
835 def annotate(ui, repo, *pats, **opts):
835 def annotate(ui, repo, *pats, **opts):
836 """show changeset information per file line
836 """show changeset information per file line
837
837
838 List changes in files, showing the revision id responsible for each line
838 List changes in files, showing the revision id responsible for each line
839
839
840 This command is useful to discover who did a change or when a change took
840 This command is useful to discover who did a change or when a change took
841 place.
841 place.
842
842
843 Without the -a option, annotate will avoid processing files it
843 Without the -a option, annotate will avoid processing files it
844 detects as binary. With -a, annotate will generate an annotation
844 detects as binary. With -a, annotate will generate an annotation
845 anyway, probably with undesirable results.
845 anyway, probably with undesirable results.
846 """
846 """
847 def getnode(rev):
847 def getnode(rev):
848 return short(repo.changelog.node(rev))
848 return short(repo.changelog.node(rev))
849
849
850 ucache = {}
850 ucache = {}
851 def getname(rev):
851 def getname(rev):
852 cl = repo.changelog.read(repo.changelog.node(rev))
852 cl = repo.changelog.read(repo.changelog.node(rev))
853 return trimuser(ui, cl[1], rev, ucache)
853 return trimuser(ui, cl[1], rev, ucache)
854
854
855 dcache = {}
855 dcache = {}
856 def getdate(rev):
856 def getdate(rev):
857 datestr = dcache.get(rev)
857 datestr = dcache.get(rev)
858 if datestr is None:
858 if datestr is None:
859 cl = repo.changelog.read(repo.changelog.node(rev))
859 cl = repo.changelog.read(repo.changelog.node(rev))
860 datestr = dcache[rev] = util.datestr(cl[2])
860 datestr = dcache[rev] = util.datestr(cl[2])
861 return datestr
861 return datestr
862
862
863 if not pats:
863 if not pats:
864 raise util.Abort(_('at least one file name or pattern required'))
864 raise util.Abort(_('at least one file name or pattern required'))
865
865
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
867 ['date', getdate]]
867 ['date', getdate]]
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
869 opts['number'] = 1
869 opts['number'] = 1
870
870
871 if opts['rev']:
871 if opts['rev']:
872 node = repo.changelog.lookup(opts['rev'])
872 node = repo.changelog.lookup(opts['rev'])
873 else:
873 else:
874 node = repo.dirstate.parents()[0]
874 node = repo.dirstate.parents()[0]
875 change = repo.changelog.read(node)
875 change = repo.changelog.read(node)
876 mmap = repo.manifest.read(change[0])
876 mmap = repo.manifest.read(change[0])
877
877
878 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
878 for src, abs, rel, exact in walk(repo, pats, opts, node=node):
879 f = repo.file(abs)
879 f = repo.file(abs)
880 if not opts['text'] and util.binary(f.read(mmap[abs])):
880 if not opts['text'] and util.binary(f.read(mmap[abs])):
881 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
881 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
882 continue
882 continue
883
883
884 lines = f.annotate(mmap[abs])
884 lines = f.annotate(mmap[abs])
885 pieces = []
885 pieces = []
886
886
887 for o, f in opmap:
887 for o, f in opmap:
888 if opts[o]:
888 if opts[o]:
889 l = [f(n) for n, dummy in lines]
889 l = [f(n) for n, dummy in lines]
890 if l:
890 if l:
891 m = max(map(len, l))
891 m = max(map(len, l))
892 pieces.append(["%*s" % (m, x) for x in l])
892 pieces.append(["%*s" % (m, x) for x in l])
893
893
894 if pieces:
894 if pieces:
895 for p, l in zip(zip(*pieces), lines):
895 for p, l in zip(zip(*pieces), lines):
896 ui.write("%s: %s" % (" ".join(p), l[1]))
896 ui.write("%s: %s" % (" ".join(p), l[1]))
897
897
898 def archive(ui, repo, dest, **opts):
898 def archive(ui, repo, dest, **opts):
899 '''create unversioned archive of a repository revision
899 '''create unversioned archive of a repository revision
900
900
901 By default, the revision used is the parent of the working
901 By default, the revision used is the parent of the working
902 directory; use "-r" to specify a different revision.
902 directory; use "-r" to specify a different revision.
903
903
904 To specify the type of archive to create, use "-t". Valid
904 To specify the type of archive to create, use "-t". Valid
905 types are:
905 types are:
906
906
907 "files" (default): a directory full of files
907 "files" (default): a directory full of files
908 "tar": tar archive, uncompressed
908 "tar": tar archive, uncompressed
909 "tbz2": tar archive, compressed using bzip2
909 "tbz2": tar archive, compressed using bzip2
910 "tgz": tar archive, compressed using gzip
910 "tgz": tar archive, compressed using gzip
911 "uzip": zip archive, uncompressed
911 "uzip": zip archive, uncompressed
912 "zip": zip archive, compressed using deflate
912 "zip": zip archive, compressed using deflate
913
913
914 The exact name of the destination archive or directory is given
914 The exact name of the destination archive or directory is given
915 using a format string; see "hg help export" for details.
915 using a format string; see "hg help export" for details.
916
916
917 Each member added to an archive file has a directory prefix
917 Each member added to an archive file has a directory prefix
918 prepended. Use "-p" to specify a format string for the prefix.
918 prepended. Use "-p" to specify a format string for the prefix.
919 The default is the basename of the archive, with suffixes removed.
919 The default is the basename of the archive, with suffixes removed.
920 '''
920 '''
921
921
922 if opts['rev']:
922 if opts['rev']:
923 node = repo.lookup(opts['rev'])
923 node = repo.lookup(opts['rev'])
924 else:
924 else:
925 node, p2 = repo.dirstate.parents()
925 node, p2 = repo.dirstate.parents()
926 if p2 != nullid:
926 if p2 != nullid:
927 raise util.Abort(_('uncommitted merge - please provide a '
927 raise util.Abort(_('uncommitted merge - please provide a '
928 'specific revision'))
928 'specific revision'))
929
929
930 dest = make_filename(repo, repo.changelog, dest, node)
930 dest = make_filename(repo, repo.changelog, dest, node)
931 prefix = make_filename(repo, repo.changelog, opts['prefix'], node)
931 prefix = make_filename(repo, repo.changelog, opts['prefix'], node)
932 if os.path.realpath(dest) == repo.root:
932 if os.path.realpath(dest) == repo.root:
933 raise util.Abort(_('repository root cannot be destination'))
933 raise util.Abort(_('repository root cannot be destination'))
934 dummy, matchfn, dummy = matchpats(repo, [], opts)
934 dummy, matchfn, dummy = matchpats(repo, [], opts)
935 archival.archive(repo, dest, node, opts.get('type') or 'files',
935 archival.archive(repo, dest, node, opts.get('type') or 'files',
936 not opts['no_decode'], matchfn, prefix)
936 not opts['no_decode'], matchfn, prefix)
937
937
938 def backout(ui, repo, rev, **opts):
938 def backout(ui, repo, rev, **opts):
939 '''reverse effect of earlier changeset
939 '''reverse effect of earlier changeset
940
940
941 Commit the backed out changes as a new changeset.
941 Commit the backed out changes as a new changeset.
942
942
943 If you back out a changeset other than the tip, a new head is
943 If you back out a changeset other than the tip, a new head is
944 created. The --merge option remembers the parent of the working
944 created. The --merge option remembers the parent of the working
945 directory before starting the backout, then merges the new head
945 directory before starting the backout, then merges the new head
946 with it afterwards, to save you from doing this by hand. The
946 with it afterwards, to save you from doing this by hand. The
947 result of this merge is not committed, as for a normal merge.'''
947 result of this merge is not committed, as for a normal merge.'''
948
948
949 bail_if_changed(repo)
949 bail_if_changed(repo)
950 op1, op2 = repo.dirstate.parents()
950 op1, op2 = repo.dirstate.parents()
951 if op2 != nullid:
951 if op2 != nullid:
952 raise util.Abort(_('outstanding uncommitted merge'))
952 raise util.Abort(_('outstanding uncommitted merge'))
953 node = repo.lookup(rev)
953 node = repo.lookup(rev)
954 parent, p2 = repo.changelog.parents(node)
954 parent, p2 = repo.changelog.parents(node)
955 if parent == nullid:
955 if parent == nullid:
956 raise util.Abort(_('cannot back out a change with no parents'))
956 raise util.Abort(_('cannot back out a change with no parents'))
957 if p2 != nullid:
957 if p2 != nullid:
958 raise util.Abort(_('cannot back out a merge'))
958 raise util.Abort(_('cannot back out a merge'))
959 repo.update(node, force=True)
959 repo.update(node, force=True)
960 revert_opts = opts.copy()
960 revert_opts = opts.copy()
961 revert_opts['rev'] = hex(parent)
961 revert_opts['rev'] = hex(parent)
962 revert(ui, repo, **revert_opts)
962 revert(ui, repo, **revert_opts)
963 commit_opts = opts.copy()
963 commit_opts = opts.copy()
964 commit_opts['addremove'] = False
964 commit_opts['addremove'] = False
965 if not commit_opts['message'] and not commit_opts['logfile']:
965 if not commit_opts['message'] and not commit_opts['logfile']:
966 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
966 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
967 commit(ui, repo, **commit_opts)
967 commit(ui, repo, **commit_opts)
968 def nice(node):
968 def nice(node):
969 return '%d:%s' % (repo.changelog.rev(node), short(node))
969 return '%d:%s' % (repo.changelog.rev(node), short(node))
970 ui.status(_('changeset %s backs out changeset %s\n') %
970 ui.status(_('changeset %s backs out changeset %s\n') %
971 (nice(repo.changelog.tip()), nice(node)))
971 (nice(repo.changelog.tip()), nice(node)))
972 if opts['merge'] and op1 != node:
972 if opts['merge'] and op1 != node:
973 ui.status(_('merging with changeset %s\n') % nice(op1))
973 ui.status(_('merging with changeset %s\n') % nice(op1))
974 update(ui, repo, hex(op1), **opts)
974 update(ui, repo, hex(op1), **opts)
975
975
976 def bundle(ui, repo, fname, dest="default-push", **opts):
976 def bundle(ui, repo, fname, dest="default-push", **opts):
977 """create a changegroup file
977 """create a changegroup file
978
978
979 Generate a compressed changegroup file collecting all changesets
979 Generate a compressed changegroup file collecting all changesets
980 not found in the other repository.
980 not found in the other repository.
981
981
982 This file can then be transferred using conventional means and
982 This file can then be transferred using conventional means and
983 applied to another repository with the unbundle command. This is
983 applied to another repository with the unbundle command. This is
984 useful when native push and pull are not available or when
984 useful when native push and pull are not available or when
985 exporting an entire repository is undesirable. The standard file
985 exporting an entire repository is undesirable. The standard file
986 extension is ".hg".
986 extension is ".hg".
987
987
988 Unlike import/export, this exactly preserves all changeset
988 Unlike import/export, this exactly preserves all changeset
989 contents including permissions, rename data, and revision history.
989 contents including permissions, rename data, and revision history.
990 """
990 """
991 dest = ui.expandpath(dest)
991 dest = ui.expandpath(dest)
992 other = hg.repository(ui, dest)
992 other = hg.repository(ui, dest)
993 o = repo.findoutgoing(other, force=opts['force'])
993 o = repo.findoutgoing(other, force=opts['force'])
994 cg = repo.changegroup(o, 'bundle')
994 cg = repo.changegroup(o, 'bundle')
995 write_bundle(cg, fname)
995 write_bundle(cg, fname)
996
996
997 def cat(ui, repo, file1, *pats, **opts):
997 def cat(ui, repo, file1, *pats, **opts):
998 """output the latest or given revisions of files
998 """output the latest or given revisions of files
999
999
1000 Print the specified files as they were at the given revision.
1000 Print the specified files as they were at the given revision.
1001 If no revision is given then the tip is used.
1001 If no revision is given then the tip is used.
1002
1002
1003 Output may be to a file, in which case the name of the file is
1003 Output may be to a file, in which case the name of the file is
1004 given using a format string. The formatting rules are the same as
1004 given using a format string. The formatting rules are the same as
1005 for the export command, with the following additions:
1005 for the export command, with the following additions:
1006
1006
1007 %s basename of file being printed
1007 %s basename of file being printed
1008 %d dirname of file being printed, or '.' if in repo root
1008 %d dirname of file being printed, or '.' if in repo root
1009 %p root-relative path name of file being printed
1009 %p root-relative path name of file being printed
1010 """
1010 """
1011 mf = {}
1011 mf = {}
1012 rev = opts['rev']
1012 rev = opts['rev']
1013 if rev:
1013 if rev:
1014 node = repo.lookup(rev)
1014 node = repo.lookup(rev)
1015 else:
1015 else:
1016 node = repo.changelog.tip()
1016 node = repo.changelog.tip()
1017 change = repo.changelog.read(node)
1017 change = repo.changelog.read(node)
1018 mf = repo.manifest.read(change[0])
1018 mf = repo.manifest.read(change[0])
1019 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
1019 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, node):
1020 r = repo.file(abs)
1020 r = repo.file(abs)
1021 n = mf[abs]
1021 n = mf[abs]
1022 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
1022 fp = make_file(repo, r, opts['output'], node=n, pathname=abs)
1023 fp.write(r.read(n))
1023 fp.write(r.read(n))
1024
1024
1025 def clone(ui, source, dest=None, **opts):
1025 def clone(ui, source, dest=None, **opts):
1026 """make a copy of an existing repository
1026 """make a copy of an existing repository
1027
1027
1028 Create a copy of an existing repository in a new directory.
1028 Create a copy of an existing repository in a new directory.
1029
1029
1030 If no destination directory name is specified, it defaults to the
1030 If no destination directory name is specified, it defaults to the
1031 basename of the source.
1031 basename of the source.
1032
1032
1033 The location of the source is added to the new repository's
1033 The location of the source is added to the new repository's
1034 .hg/hgrc file, as the default to be used for future pulls.
1034 .hg/hgrc file, as the default to be used for future pulls.
1035
1035
1036 For efficiency, hardlinks are used for cloning whenever the source
1036 For efficiency, hardlinks are used for cloning whenever the source
1037 and destination are on the same filesystem. Some filesystems,
1037 and destination are on the same filesystem. Some filesystems,
1038 such as AFS, implement hardlinking incorrectly, but do not report
1038 such as AFS, implement hardlinking incorrectly, but do not report
1039 errors. In these cases, use the --pull option to avoid
1039 errors. In these cases, use the --pull option to avoid
1040 hardlinking.
1040 hardlinking.
1041
1041
1042 See pull for valid source format details.
1042 See pull for valid source format details.
1043 """
1043 """
1044 if dest is None:
1044 if dest is None:
1045 dest = os.path.basename(os.path.normpath(source))
1045 dest = os.path.basename(os.path.normpath(source))
1046
1046
1047 if os.path.exists(dest):
1047 if os.path.exists(dest):
1048 raise util.Abort(_("destination '%s' already exists"), dest)
1048 raise util.Abort(_("destination '%s' already exists"), dest)
1049
1049
1050 dest = os.path.realpath(dest)
1050 dest = os.path.realpath(dest)
1051
1051
1052 class Dircleanup(object):
1052 class Dircleanup(object):
1053 def __init__(self, dir_):
1053 def __init__(self, dir_):
1054 self.rmtree = shutil.rmtree
1054 self.rmtree = shutil.rmtree
1055 self.dir_ = dir_
1055 self.dir_ = dir_
1056 os.mkdir(dir_)
1056 os.mkdir(dir_)
1057 def close(self):
1057 def close(self):
1058 self.dir_ = None
1058 self.dir_ = None
1059 def __del__(self):
1059 def __del__(self):
1060 if self.dir_:
1060 if self.dir_:
1061 self.rmtree(self.dir_, True)
1061 self.rmtree(self.dir_, True)
1062
1062
1063 if opts['ssh']:
1063 if opts['ssh']:
1064 ui.setconfig("ui", "ssh", opts['ssh'])
1064 ui.setconfig("ui", "ssh", opts['ssh'])
1065 if opts['remotecmd']:
1065 if opts['remotecmd']:
1066 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1066 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1067
1067
1068 source = ui.expandpath(source)
1068 source = ui.expandpath(source)
1069
1069
1070 d = Dircleanup(dest)
1070 d = Dircleanup(dest)
1071 abspath = source
1071 abspath = source
1072 other = hg.repository(ui, source)
1072 other = hg.repository(ui, source)
1073
1073
1074 copy = False
1074 copy = False
1075 if other.dev() != -1:
1075 if other.dev() != -1:
1076 abspath = os.path.abspath(source)
1076 abspath = os.path.abspath(source)
1077 if not opts['pull'] and not opts['rev']:
1077 if not opts['pull'] and not opts['rev']:
1078 copy = True
1078 copy = True
1079
1079
1080 if copy:
1080 if copy:
1081 try:
1081 try:
1082 # we use a lock here because if we race with commit, we
1082 # we use a lock here because if we race with commit, we
1083 # can end up with extra data in the cloned revlogs that's
1083 # can end up with extra data in the cloned revlogs that's
1084 # not pointed to by changesets, thus causing verify to
1084 # not pointed to by changesets, thus causing verify to
1085 # fail
1085 # fail
1086 l1 = other.lock()
1086 l1 = other.lock()
1087 except lock.LockException:
1087 except lock.LockException:
1088 copy = False
1088 copy = False
1089
1089
1090 if copy:
1090 if copy:
1091 # we lock here to avoid premature writing to the target
1091 # we lock here to avoid premature writing to the target
1092 os.mkdir(os.path.join(dest, ".hg"))
1092 os.mkdir(os.path.join(dest, ".hg"))
1093 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
1093 l2 = lock.lock(os.path.join(dest, ".hg", "lock"))
1094
1094
1095 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
1095 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
1096 for f in files.split():
1096 for f in files.split():
1097 src = os.path.join(source, ".hg", f)
1097 src = os.path.join(source, ".hg", f)
1098 dst = os.path.join(dest, ".hg", f)
1098 dst = os.path.join(dest, ".hg", f)
1099 try:
1099 try:
1100 util.copyfiles(src, dst)
1100 util.copyfiles(src, dst)
1101 except OSError, inst:
1101 except OSError, inst:
1102 if inst.errno != errno.ENOENT:
1102 if inst.errno != errno.ENOENT:
1103 raise
1103 raise
1104
1104
1105 repo = hg.repository(ui, dest)
1105 repo = hg.repository(ui, dest)
1106
1106
1107 else:
1107 else:
1108 revs = None
1108 revs = None
1109 if opts['rev']:
1109 if opts['rev']:
1110 if not other.local():
1110 if not other.local():
1111 error = _("clone -r not supported yet for remote repositories.")
1111 error = _("clone -r not supported yet for remote repositories.")
1112 raise util.Abort(error)
1112 raise util.Abort(error)
1113 else:
1113 else:
1114 revs = [other.lookup(rev) for rev in opts['rev']]
1114 revs = [other.lookup(rev) for rev in opts['rev']]
1115 repo = hg.repository(ui, dest, create=1)
1115 repo = hg.repository(ui, dest, create=1)
1116 repo.pull(other, heads = revs)
1116 repo.pull(other, heads = revs)
1117
1117
1118 f = repo.opener("hgrc", "w", text=True)
1118 f = repo.opener("hgrc", "w", text=True)
1119 f.write("[paths]\n")
1119 f.write("[paths]\n")
1120 f.write("default = %s\n" % abspath)
1120 f.write("default = %s\n" % abspath)
1121 f.close()
1121 f.close()
1122
1122
1123 if not opts['noupdate']:
1123 if not opts['noupdate']:
1124 update(repo.ui, repo)
1124 update(repo.ui, repo)
1125
1125
1126 d.close()
1126 d.close()
1127
1127
1128 def commit(ui, repo, *pats, **opts):
1128 def commit(ui, repo, *pats, **opts):
1129 """commit the specified files or all outstanding changes
1129 """commit the specified files or all outstanding changes
1130
1130
1131 Commit changes to the given files into the repository.
1131 Commit changes to the given files into the repository.
1132
1132
1133 If a list of files is omitted, all changes reported by "hg status"
1133 If a list of files is omitted, all changes reported by "hg status"
1134 will be committed.
1134 will be committed.
1135
1135
1136 If no commit message is specified, the editor configured in your hgrc
1136 If no commit message is specified, the editor configured in your hgrc
1137 or in the EDITOR environment variable is started to enter a message.
1137 or in the EDITOR environment variable is started to enter a message.
1138 """
1138 """
1139 message = opts['message']
1139 message = opts['message']
1140 logfile = opts['logfile']
1140 logfile = opts['logfile']
1141
1141
1142 if message and logfile:
1142 if message and logfile:
1143 raise util.Abort(_('options --message and --logfile are mutually '
1143 raise util.Abort(_('options --message and --logfile are mutually '
1144 'exclusive'))
1144 'exclusive'))
1145 if not message and logfile:
1145 if not message and logfile:
1146 try:
1146 try:
1147 if logfile == '-':
1147 if logfile == '-':
1148 message = sys.stdin.read()
1148 message = sys.stdin.read()
1149 else:
1149 else:
1150 message = open(logfile).read()
1150 message = open(logfile).read()
1151 except IOError, inst:
1151 except IOError, inst:
1152 raise util.Abort(_("can't read commit message '%s': %s") %
1152 raise util.Abort(_("can't read commit message '%s': %s") %
1153 (logfile, inst.strerror))
1153 (logfile, inst.strerror))
1154
1154
1155 if opts['addremove']:
1155 if opts['addremove']:
1156 addremove(ui, repo, *pats, **opts)
1156 addremove(ui, repo, *pats, **opts)
1157 fns, match, anypats = matchpats(repo, pats, opts)
1157 fns, match, anypats = matchpats(repo, pats, opts)
1158 if pats:
1158 if pats:
1159 modified, added, removed, deleted, unknown = (
1159 modified, added, removed, deleted, unknown = (
1160 repo.changes(files=fns, match=match))
1160 repo.changes(files=fns, match=match))
1161 files = modified + added + removed
1161 files = modified + added + removed
1162 else:
1162 else:
1163 files = []
1163 files = []
1164 try:
1164 try:
1165 repo.commit(files, message, opts['user'], opts['date'], match)
1165 repo.commit(files, message, opts['user'], opts['date'], match)
1166 except ValueError, inst:
1166 except ValueError, inst:
1167 raise util.Abort(str(inst))
1167 raise util.Abort(str(inst))
1168
1168
1169 def docopy(ui, repo, pats, opts, wlock):
1169 def docopy(ui, repo, pats, opts, wlock):
1170 # called with the repo lock held
1170 # called with the repo lock held
1171 cwd = repo.getcwd()
1171 cwd = repo.getcwd()
1172 errors = 0
1172 errors = 0
1173 copied = []
1173 copied = []
1174 targets = {}
1174 targets = {}
1175
1175
1176 def okaytocopy(abs, rel, exact):
1176 def okaytocopy(abs, rel, exact):
1177 reasons = {'?': _('is not managed'),
1177 reasons = {'?': _('is not managed'),
1178 'a': _('has been marked for add'),
1178 'a': _('has been marked for add'),
1179 'r': _('has been marked for remove')}
1179 'r': _('has been marked for remove')}
1180 state = repo.dirstate.state(abs)
1180 state = repo.dirstate.state(abs)
1181 reason = reasons.get(state)
1181 reason = reasons.get(state)
1182 if reason:
1182 if reason:
1183 if state == 'a':
1183 if state == 'a':
1184 origsrc = repo.dirstate.copied(abs)
1184 origsrc = repo.dirstate.copied(abs)
1185 if origsrc is not None:
1185 if origsrc is not None:
1186 return origsrc
1186 return origsrc
1187 if exact:
1187 if exact:
1188 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1188 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1189 else:
1189 else:
1190 return abs
1190 return abs
1191
1191
1192 def copy(origsrc, abssrc, relsrc, target, exact):
1192 def copy(origsrc, abssrc, relsrc, target, exact):
1193 abstarget = util.canonpath(repo.root, cwd, target)
1193 abstarget = util.canonpath(repo.root, cwd, target)
1194 reltarget = util.pathto(cwd, abstarget)
1194 reltarget = util.pathto(cwd, abstarget)
1195 prevsrc = targets.get(abstarget)
1195 prevsrc = targets.get(abstarget)
1196 if prevsrc is not None:
1196 if prevsrc is not None:
1197 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1197 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1198 (reltarget, abssrc, prevsrc))
1198 (reltarget, abssrc, prevsrc))
1199 return
1199 return
1200 if (not opts['after'] and os.path.exists(reltarget) or
1200 if (not opts['after'] and os.path.exists(reltarget) or
1201 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1201 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1202 if not opts['force']:
1202 if not opts['force']:
1203 ui.warn(_('%s: not overwriting - file exists\n') %
1203 ui.warn(_('%s: not overwriting - file exists\n') %
1204 reltarget)
1204 reltarget)
1205 return
1205 return
1206 if not opts['after']:
1206 if not opts['after']:
1207 os.unlink(reltarget)
1207 os.unlink(reltarget)
1208 if opts['after']:
1208 if opts['after']:
1209 if not os.path.exists(reltarget):
1209 if not os.path.exists(reltarget):
1210 return
1210 return
1211 else:
1211 else:
1212 targetdir = os.path.dirname(reltarget) or '.'
1212 targetdir = os.path.dirname(reltarget) or '.'
1213 if not os.path.isdir(targetdir):
1213 if not os.path.isdir(targetdir):
1214 os.makedirs(targetdir)
1214 os.makedirs(targetdir)
1215 try:
1215 try:
1216 restore = repo.dirstate.state(abstarget) == 'r'
1216 restore = repo.dirstate.state(abstarget) == 'r'
1217 if restore:
1217 if restore:
1218 repo.undelete([abstarget], wlock)
1218 repo.undelete([abstarget], wlock)
1219 try:
1219 try:
1220 shutil.copyfile(relsrc, reltarget)
1220 shutil.copyfile(relsrc, reltarget)
1221 shutil.copymode(relsrc, reltarget)
1221 shutil.copymode(relsrc, reltarget)
1222 restore = False
1222 restore = False
1223 finally:
1223 finally:
1224 if restore:
1224 if restore:
1225 repo.remove([abstarget], wlock)
1225 repo.remove([abstarget], wlock)
1226 except shutil.Error, inst:
1226 except shutil.Error, inst:
1227 raise util.Abort(str(inst))
1227 raise util.Abort(str(inst))
1228 except IOError, inst:
1228 except IOError, inst:
1229 if inst.errno == errno.ENOENT:
1229 if inst.errno == errno.ENOENT:
1230 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1230 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1231 else:
1231 else:
1232 ui.warn(_('%s: cannot copy - %s\n') %
1232 ui.warn(_('%s: cannot copy - %s\n') %
1233 (relsrc, inst.strerror))
1233 (relsrc, inst.strerror))
1234 errors += 1
1234 errors += 1
1235 return
1235 return
1236 if ui.verbose or not exact:
1236 if ui.verbose or not exact:
1237 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1237 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1238 targets[abstarget] = abssrc
1238 targets[abstarget] = abssrc
1239 if abstarget != origsrc:
1239 if abstarget != origsrc:
1240 repo.copy(origsrc, abstarget, wlock)
1240 repo.copy(origsrc, abstarget, wlock)
1241 copied.append((abssrc, relsrc, exact))
1241 copied.append((abssrc, relsrc, exact))
1242
1242
1243 def targetpathfn(pat, dest, srcs):
1243 def targetpathfn(pat, dest, srcs):
1244 if os.path.isdir(pat):
1244 if os.path.isdir(pat):
1245 abspfx = util.canonpath(repo.root, cwd, pat)
1245 abspfx = util.canonpath(repo.root, cwd, pat)
1246 if destdirexists:
1246 if destdirexists:
1247 striplen = len(os.path.split(abspfx)[0])
1247 striplen = len(os.path.split(abspfx)[0])
1248 else:
1248 else:
1249 striplen = len(abspfx)
1249 striplen = len(abspfx)
1250 if striplen:
1250 if striplen:
1251 striplen += len(os.sep)
1251 striplen += len(os.sep)
1252 res = lambda p: os.path.join(dest, p[striplen:])
1252 res = lambda p: os.path.join(dest, p[striplen:])
1253 elif destdirexists:
1253 elif destdirexists:
1254 res = lambda p: os.path.join(dest, os.path.basename(p))
1254 res = lambda p: os.path.join(dest, os.path.basename(p))
1255 else:
1255 else:
1256 res = lambda p: dest
1256 res = lambda p: dest
1257 return res
1257 return res
1258
1258
1259 def targetpathafterfn(pat, dest, srcs):
1259 def targetpathafterfn(pat, dest, srcs):
1260 if util.patkind(pat, None)[0]:
1260 if util.patkind(pat, None)[0]:
1261 # a mercurial pattern
1261 # a mercurial pattern
1262 res = lambda p: os.path.join(dest, os.path.basename(p))
1262 res = lambda p: os.path.join(dest, os.path.basename(p))
1263 else:
1263 else:
1264 abspfx = util.canonpath(repo.root, cwd, pat)
1264 abspfx = util.canonpath(repo.root, cwd, pat)
1265 if len(abspfx) < len(srcs[0][0]):
1265 if len(abspfx) < len(srcs[0][0]):
1266 # A directory. Either the target path contains the last
1266 # A directory. Either the target path contains the last
1267 # component of the source path or it does not.
1267 # component of the source path or it does not.
1268 def evalpath(striplen):
1268 def evalpath(striplen):
1269 score = 0
1269 score = 0
1270 for s in srcs:
1270 for s in srcs:
1271 t = os.path.join(dest, s[0][striplen:])
1271 t = os.path.join(dest, s[0][striplen:])
1272 if os.path.exists(t):
1272 if os.path.exists(t):
1273 score += 1
1273 score += 1
1274 return score
1274 return score
1275
1275
1276 striplen = len(abspfx)
1276 striplen = len(abspfx)
1277 if striplen:
1277 if striplen:
1278 striplen += len(os.sep)
1278 striplen += len(os.sep)
1279 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1279 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1280 score = evalpath(striplen)
1280 score = evalpath(striplen)
1281 striplen1 = len(os.path.split(abspfx)[0])
1281 striplen1 = len(os.path.split(abspfx)[0])
1282 if striplen1:
1282 if striplen1:
1283 striplen1 += len(os.sep)
1283 striplen1 += len(os.sep)
1284 if evalpath(striplen1) > score:
1284 if evalpath(striplen1) > score:
1285 striplen = striplen1
1285 striplen = striplen1
1286 res = lambda p: os.path.join(dest, p[striplen:])
1286 res = lambda p: os.path.join(dest, p[striplen:])
1287 else:
1287 else:
1288 # a file
1288 # a file
1289 if destdirexists:
1289 if destdirexists:
1290 res = lambda p: os.path.join(dest, os.path.basename(p))
1290 res = lambda p: os.path.join(dest, os.path.basename(p))
1291 else:
1291 else:
1292 res = lambda p: dest
1292 res = lambda p: dest
1293 return res
1293 return res
1294
1294
1295
1295
1296 pats = list(pats)
1296 pats = list(pats)
1297 if not pats:
1297 if not pats:
1298 raise util.Abort(_('no source or destination specified'))
1298 raise util.Abort(_('no source or destination specified'))
1299 if len(pats) == 1:
1299 if len(pats) == 1:
1300 raise util.Abort(_('no destination specified'))
1300 raise util.Abort(_('no destination specified'))
1301 dest = pats.pop()
1301 dest = pats.pop()
1302 destdirexists = os.path.isdir(dest)
1302 destdirexists = os.path.isdir(dest)
1303 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1303 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1304 raise util.Abort(_('with multiple sources, destination must be an '
1304 raise util.Abort(_('with multiple sources, destination must be an '
1305 'existing directory'))
1305 'existing directory'))
1306 if opts['after']:
1306 if opts['after']:
1307 tfn = targetpathafterfn
1307 tfn = targetpathafterfn
1308 else:
1308 else:
1309 tfn = targetpathfn
1309 tfn = targetpathfn
1310 copylist = []
1310 copylist = []
1311 for pat in pats:
1311 for pat in pats:
1312 srcs = []
1312 srcs = []
1313 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1313 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1314 origsrc = okaytocopy(abssrc, relsrc, exact)
1314 origsrc = okaytocopy(abssrc, relsrc, exact)
1315 if origsrc:
1315 if origsrc:
1316 srcs.append((origsrc, abssrc, relsrc, exact))
1316 srcs.append((origsrc, abssrc, relsrc, exact))
1317 if not srcs:
1317 if not srcs:
1318 continue
1318 continue
1319 copylist.append((tfn(pat, dest, srcs), srcs))
1319 copylist.append((tfn(pat, dest, srcs), srcs))
1320 if not copylist:
1320 if not copylist:
1321 raise util.Abort(_('no files to copy'))
1321 raise util.Abort(_('no files to copy'))
1322
1322
1323 for targetpath, srcs in copylist:
1323 for targetpath, srcs in copylist:
1324 for origsrc, abssrc, relsrc, exact in srcs:
1324 for origsrc, abssrc, relsrc, exact in srcs:
1325 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1325 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1326
1326
1327 if errors:
1327 if errors:
1328 ui.warn(_('(consider using --after)\n'))
1328 ui.warn(_('(consider using --after)\n'))
1329 return errors, copied
1329 return errors, copied
1330
1330
1331 def copy(ui, repo, *pats, **opts):
1331 def copy(ui, repo, *pats, **opts):
1332 """mark files as copied for the next commit
1332 """mark files as copied for the next commit
1333
1333
1334 Mark dest as having copies of source files. If dest is a
1334 Mark dest as having copies of source files. If dest is a
1335 directory, copies are put in that directory. If dest is a file,
1335 directory, copies are put in that directory. If dest is a file,
1336 there can only be one source.
1336 there can only be one source.
1337
1337
1338 By default, this command copies the contents of files as they
1338 By default, this command copies the contents of files as they
1339 stand in the working directory. If invoked with --after, the
1339 stand in the working directory. If invoked with --after, the
1340 operation is recorded, but no copying is performed.
1340 operation is recorded, but no copying is performed.
1341
1341
1342 This command takes effect in the next commit.
1342 This command takes effect in the next commit.
1343
1343
1344 NOTE: This command should be treated as experimental. While it
1344 NOTE: This command should be treated as experimental. While it
1345 should properly record copied files, this information is not yet
1345 should properly record copied files, this information is not yet
1346 fully used by merge, nor fully reported by log.
1346 fully used by merge, nor fully reported by log.
1347 """
1347 """
1348 wlock = repo.wlock(0)
1348 wlock = repo.wlock(0)
1349 errs, copied = docopy(ui, repo, pats, opts, wlock)
1349 errs, copied = docopy(ui, repo, pats, opts, wlock)
1350 return errs
1350 return errs
1351
1351
1352 def debugancestor(ui, index, rev1, rev2):
1352 def debugancestor(ui, index, rev1, rev2):
1353 """find the ancestor revision of two revisions in a given index"""
1353 """find the ancestor revision of two revisions in a given index"""
1354 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1354 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1355 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1355 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1356 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1356 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1357
1357
1358 def debugcomplete(ui, cmd='', **opts):
1358 def debugcomplete(ui, cmd='', **opts):
1359 """returns the completion list associated with the given command"""
1359 """returns the completion list associated with the given command"""
1360
1360
1361 if opts['options']:
1361 if opts['options']:
1362 options = []
1362 options = []
1363 otables = [globalopts]
1363 otables = [globalopts]
1364 if cmd:
1364 if cmd:
1365 aliases, entry = find(cmd)
1365 aliases, entry = find(cmd)
1366 otables.append(entry[1])
1366 otables.append(entry[1])
1367 for t in otables:
1367 for t in otables:
1368 for o in t:
1368 for o in t:
1369 if o[0]:
1369 if o[0]:
1370 options.append('-%s' % o[0])
1370 options.append('-%s' % o[0])
1371 options.append('--%s' % o[1])
1371 options.append('--%s' % o[1])
1372 ui.write("%s\n" % "\n".join(options))
1372 ui.write("%s\n" % "\n".join(options))
1373 return
1373 return
1374
1374
1375 clist = findpossible(cmd).keys()
1375 clist = findpossible(cmd).keys()
1376 clist.sort()
1376 clist.sort()
1377 ui.write("%s\n" % "\n".join(clist))
1377 ui.write("%s\n" % "\n".join(clist))
1378
1378
1379 def debugrebuildstate(ui, repo, rev=None):
1379 def debugrebuildstate(ui, repo, rev=None):
1380 """rebuild the dirstate as it would look like for the given revision"""
1380 """rebuild the dirstate as it would look like for the given revision"""
1381 if not rev:
1381 if not rev:
1382 rev = repo.changelog.tip()
1382 rev = repo.changelog.tip()
1383 else:
1383 else:
1384 rev = repo.lookup(rev)
1384 rev = repo.lookup(rev)
1385 change = repo.changelog.read(rev)
1385 change = repo.changelog.read(rev)
1386 n = change[0]
1386 n = change[0]
1387 files = repo.manifest.readflags(n)
1387 files = repo.manifest.readflags(n)
1388 wlock = repo.wlock()
1388 wlock = repo.wlock()
1389 repo.dirstate.rebuild(rev, files.iteritems())
1389 repo.dirstate.rebuild(rev, files.iteritems())
1390
1390
1391 def debugcheckstate(ui, repo):
1391 def debugcheckstate(ui, repo):
1392 """validate the correctness of the current dirstate"""
1392 """validate the correctness of the current dirstate"""
1393 parent1, parent2 = repo.dirstate.parents()
1393 parent1, parent2 = repo.dirstate.parents()
1394 repo.dirstate.read()
1394 repo.dirstate.read()
1395 dc = repo.dirstate.map
1395 dc = repo.dirstate.map
1396 keys = dc.keys()
1396 keys = dc.keys()
1397 keys.sort()
1397 keys.sort()
1398 m1n = repo.changelog.read(parent1)[0]
1398 m1n = repo.changelog.read(parent1)[0]
1399 m2n = repo.changelog.read(parent2)[0]
1399 m2n = repo.changelog.read(parent2)[0]
1400 m1 = repo.manifest.read(m1n)
1400 m1 = repo.manifest.read(m1n)
1401 m2 = repo.manifest.read(m2n)
1401 m2 = repo.manifest.read(m2n)
1402 errors = 0
1402 errors = 0
1403 for f in dc:
1403 for f in dc:
1404 state = repo.dirstate.state(f)
1404 state = repo.dirstate.state(f)
1405 if state in "nr" and f not in m1:
1405 if state in "nr" and f not in m1:
1406 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1406 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1407 errors += 1
1407 errors += 1
1408 if state in "a" and f in m1:
1408 if state in "a" and f in m1:
1409 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1409 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1410 errors += 1
1410 errors += 1
1411 if state in "m" and f not in m1 and f not in m2:
1411 if state in "m" and f not in m1 and f not in m2:
1412 ui.warn(_("%s in state %s, but not in either manifest\n") %
1412 ui.warn(_("%s in state %s, but not in either manifest\n") %
1413 (f, state))
1413 (f, state))
1414 errors += 1
1414 errors += 1
1415 for f in m1:
1415 for f in m1:
1416 state = repo.dirstate.state(f)
1416 state = repo.dirstate.state(f)
1417 if state not in "nrm":
1417 if state not in "nrm":
1418 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1418 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1419 errors += 1
1419 errors += 1
1420 if errors:
1420 if errors:
1421 error = _(".hg/dirstate inconsistent with current parent's manifest")
1421 error = _(".hg/dirstate inconsistent with current parent's manifest")
1422 raise util.Abort(error)
1422 raise util.Abort(error)
1423
1423
1424 def debugconfig(ui, repo):
1424 def debugconfig(ui, repo):
1425 """show combined config settings from all hgrc files"""
1425 """show combined config settings from all hgrc files"""
1426 for section, name, value in ui.walkconfig():
1426 for section, name, value in ui.walkconfig():
1427 ui.write('%s.%s=%s\n' % (section, name, value))
1427 ui.write('%s.%s=%s\n' % (section, name, value))
1428
1428
1429 def debugsetparents(ui, repo, rev1, rev2=None):
1429 def debugsetparents(ui, repo, rev1, rev2=None):
1430 """manually set the parents of the current working directory
1430 """manually set the parents of the current working directory
1431
1431
1432 This is useful for writing repository conversion tools, but should
1432 This is useful for writing repository conversion tools, but should
1433 be used with care.
1433 be used with care.
1434 """
1434 """
1435
1435
1436 if not rev2:
1436 if not rev2:
1437 rev2 = hex(nullid)
1437 rev2 = hex(nullid)
1438
1438
1439 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1439 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1440
1440
1441 def debugstate(ui, repo):
1441 def debugstate(ui, repo):
1442 """show the contents of the current dirstate"""
1442 """show the contents of the current dirstate"""
1443 repo.dirstate.read()
1443 repo.dirstate.read()
1444 dc = repo.dirstate.map
1444 dc = repo.dirstate.map
1445 keys = dc.keys()
1445 keys = dc.keys()
1446 keys.sort()
1446 keys.sort()
1447 for file_ in keys:
1447 for file_ in keys:
1448 ui.write("%c %3o %10d %s %s\n"
1448 ui.write("%c %3o %10d %s %s\n"
1449 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1449 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1450 time.strftime("%x %X",
1450 time.strftime("%x %X",
1451 time.localtime(dc[file_][3])), file_))
1451 time.localtime(dc[file_][3])), file_))
1452 for f in repo.dirstate.copies:
1452 for f in repo.dirstate.copies:
1453 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1453 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1454
1454
1455 def debugdata(ui, file_, rev):
1455 def debugdata(ui, file_, rev):
1456 """dump the contents of an data file revision"""
1456 """dump the contents of an data file revision"""
1457 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1457 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1458 file_[:-2] + ".i", file_, 0)
1458 file_[:-2] + ".i", file_, 0)
1459 try:
1459 try:
1460 ui.write(r.revision(r.lookup(rev)))
1460 ui.write(r.revision(r.lookup(rev)))
1461 except KeyError:
1461 except KeyError:
1462 raise util.Abort(_('invalid revision identifier %s'), rev)
1462 raise util.Abort(_('invalid revision identifier %s'), rev)
1463
1463
1464 def debugindex(ui, file_):
1464 def debugindex(ui, file_):
1465 """dump the contents of an index file"""
1465 """dump the contents of an index file"""
1466 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1466 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1467 ui.write(" rev offset length base linkrev" +
1467 ui.write(" rev offset length base linkrev" +
1468 " nodeid p1 p2\n")
1468 " nodeid p1 p2\n")
1469 for i in range(r.count()):
1469 for i in range(r.count()):
1470 node = r.node(i)
1470 node = r.node(i)
1471 pp = r.parents(node)
1471 pp = r.parents(node)
1472 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1472 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1473 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1473 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1474 short(node), short(pp[0]), short(pp[1])))
1474 short(node), short(pp[0]), short(pp[1])))
1475
1475
1476 def debugindexdot(ui, file_):
1476 def debugindexdot(ui, file_):
1477 """dump an index DAG as a .dot file"""
1477 """dump an index DAG as a .dot file"""
1478 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1478 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1479 ui.write("digraph G {\n")
1479 ui.write("digraph G {\n")
1480 for i in range(r.count()):
1480 for i in range(r.count()):
1481 e = r.index[i]
1481 e = r.index[i]
1482 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1482 ui.write("\t%d -> %d\n" % (r.rev(e[4]), i))
1483 if e[5] != nullid:
1483 if e[5] != nullid:
1484 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1484 ui.write("\t%d -> %d\n" % (r.rev(e[5]), i))
1485 ui.write("}\n")
1485 ui.write("}\n")
1486
1486
1487 def debugrename(ui, repo, file, rev=None):
1487 def debugrename(ui, repo, file, rev=None):
1488 """dump rename information"""
1488 """dump rename information"""
1489 r = repo.file(relpath(repo, [file])[0])
1489 r = repo.file(relpath(repo, [file])[0])
1490 if rev:
1490 if rev:
1491 try:
1491 try:
1492 # assume all revision numbers are for changesets
1492 # assume all revision numbers are for changesets
1493 n = repo.lookup(rev)
1493 n = repo.lookup(rev)
1494 change = repo.changelog.read(n)
1494 change = repo.changelog.read(n)
1495 m = repo.manifest.read(change[0])
1495 m = repo.manifest.read(change[0])
1496 n = m[relpath(repo, [file])[0]]
1496 n = m[relpath(repo, [file])[0]]
1497 except (hg.RepoError, KeyError):
1497 except (hg.RepoError, KeyError):
1498 n = r.lookup(rev)
1498 n = r.lookup(rev)
1499 else:
1499 else:
1500 n = r.tip()
1500 n = r.tip()
1501 m = r.renamed(n)
1501 m = r.renamed(n)
1502 if m:
1502 if m:
1503 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1503 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1504 else:
1504 else:
1505 ui.write(_("not renamed\n"))
1505 ui.write(_("not renamed\n"))
1506
1506
1507 def debugwalk(ui, repo, *pats, **opts):
1507 def debugwalk(ui, repo, *pats, **opts):
1508 """show how files match on given patterns"""
1508 """show how files match on given patterns"""
1509 items = list(walk(repo, pats, opts))
1509 items = list(walk(repo, pats, opts))
1510 if not items:
1510 if not items:
1511 return
1511 return
1512 fmt = '%%s %%-%ds %%-%ds %%s' % (
1512 fmt = '%%s %%-%ds %%-%ds %%s' % (
1513 max([len(abs) for (src, abs, rel, exact) in items]),
1513 max([len(abs) for (src, abs, rel, exact) in items]),
1514 max([len(rel) for (src, abs, rel, exact) in items]))
1514 max([len(rel) for (src, abs, rel, exact) in items]))
1515 for src, abs, rel, exact in items:
1515 for src, abs, rel, exact in items:
1516 line = fmt % (src, abs, rel, exact and 'exact' or '')
1516 line = fmt % (src, abs, rel, exact and 'exact' or '')
1517 ui.write("%s\n" % line.rstrip())
1517 ui.write("%s\n" % line.rstrip())
1518
1518
1519 def diff(ui, repo, *pats, **opts):
1519 def diff(ui, repo, *pats, **opts):
1520 """diff repository (or selected files)
1520 """diff repository (or selected files)
1521
1521
1522 Show differences between revisions for the specified files.
1522 Show differences between revisions for the specified files.
1523
1523
1524 Differences between files are shown using the unified diff format.
1524 Differences between files are shown using the unified diff format.
1525
1525
1526 When two revision arguments are given, then changes are shown
1526 When two revision arguments are given, then changes are shown
1527 between those revisions. If only one revision is specified then
1527 between those revisions. If only one revision is specified then
1528 that revision is compared to the working directory, and, when no
1528 that revision is compared to the working directory, and, when no
1529 revisions are specified, the working directory files are compared
1529 revisions are specified, the working directory files are compared
1530 to its parent.
1530 to its parent.
1531
1531
1532 Without the -a option, diff will avoid generating diffs of files
1532 Without the -a option, diff will avoid generating diffs of files
1533 it detects as binary. With -a, diff will generate a diff anyway,
1533 it detects as binary. With -a, diff will generate a diff anyway,
1534 probably with undesirable results.
1534 probably with undesirable results.
1535 """
1535 """
1536 node1, node2 = None, None
1536 node1, node2 = None, None
1537 revs = [repo.lookup(x) for x in opts['rev']]
1537 revs = [repo.lookup(x) for x in opts['rev']]
1538
1538
1539 if len(revs) > 0:
1539 if len(revs) > 0:
1540 node1 = revs[0]
1540 node1 = revs[0]
1541 if len(revs) > 1:
1541 if len(revs) > 1:
1542 node2 = revs[1]
1542 node2 = revs[1]
1543 if len(revs) > 2:
1543 if len(revs) > 2:
1544 raise util.Abort(_("too many revisions to diff"))
1544 raise util.Abort(_("too many revisions to diff"))
1545
1545
1546 fns, matchfn, anypats = matchpats(repo, pats, opts)
1546 fns, matchfn, anypats = matchpats(repo, pats, opts)
1547
1547
1548 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1548 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1549 text=opts['text'], opts=opts)
1549 text=opts['text'], opts=opts)
1550
1550
1551 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1551 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1552 node = repo.lookup(changeset)
1552 node = repo.lookup(changeset)
1553 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1553 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1554 if opts['switch_parent']:
1554 if opts['switch_parent']:
1555 parents.reverse()
1555 parents.reverse()
1556 prev = (parents and parents[0]) or nullid
1556 prev = (parents and parents[0]) or nullid
1557 change = repo.changelog.read(node)
1557 change = repo.changelog.read(node)
1558
1558
1559 fp = make_file(repo, repo.changelog, opts['output'],
1559 fp = make_file(repo, repo.changelog, opts['output'],
1560 node=node, total=total, seqno=seqno,
1560 node=node, total=total, seqno=seqno,
1561 revwidth=revwidth)
1561 revwidth=revwidth)
1562 if fp != sys.stdout:
1562 if fp != sys.stdout:
1563 ui.note("%s\n" % fp.name)
1563 ui.note("%s\n" % fp.name)
1564
1564
1565 fp.write("# HG changeset patch\n")
1565 fp.write("# HG changeset patch\n")
1566 fp.write("# User %s\n" % change[1])
1566 fp.write("# User %s\n" % change[1])
1567 fp.write("# Node ID %s\n" % hex(node))
1567 fp.write("# Node ID %s\n" % hex(node))
1568 fp.write("# Parent %s\n" % hex(prev))
1568 fp.write("# Parent %s\n" % hex(prev))
1569 if len(parents) > 1:
1569 if len(parents) > 1:
1570 fp.write("# Parent %s\n" % hex(parents[1]))
1570 fp.write("# Parent %s\n" % hex(parents[1]))
1571 fp.write(change[4].rstrip())
1571 fp.write(change[4].rstrip())
1572 fp.write("\n\n")
1572 fp.write("\n\n")
1573
1573
1574 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1574 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1575 if fp != sys.stdout:
1575 if fp != sys.stdout:
1576 fp.close()
1576 fp.close()
1577
1577
1578 def export(ui, repo, *changesets, **opts):
1578 def export(ui, repo, *changesets, **opts):
1579 """dump the header and diffs for one or more changesets
1579 """dump the header and diffs for one or more changesets
1580
1580
1581 Print the changeset header and diffs for one or more revisions.
1581 Print the changeset header and diffs for one or more revisions.
1582
1582
1583 The information shown in the changeset header is: author,
1583 The information shown in the changeset header is: author,
1584 changeset hash, parent and commit comment.
1584 changeset hash, parent and commit comment.
1585
1585
1586 Output may be to a file, in which case the name of the file is
1586 Output may be to a file, in which case the name of the file is
1587 given using a format string. The formatting rules are as follows:
1587 given using a format string. The formatting rules are as follows:
1588
1588
1589 %% literal "%" character
1589 %% literal "%" character
1590 %H changeset hash (40 bytes of hexadecimal)
1590 %H changeset hash (40 bytes of hexadecimal)
1591 %N number of patches being generated
1591 %N number of patches being generated
1592 %R changeset revision number
1592 %R changeset revision number
1593 %b basename of the exporting repository
1593 %b basename of the exporting repository
1594 %h short-form changeset hash (12 bytes of hexadecimal)
1594 %h short-form changeset hash (12 bytes of hexadecimal)
1595 %n zero-padded sequence number, starting at 1
1595 %n zero-padded sequence number, starting at 1
1596 %r zero-padded changeset revision number
1596 %r zero-padded changeset revision number
1597
1597
1598 Without the -a option, export will avoid generating diffs of files
1598 Without the -a option, export will avoid generating diffs of files
1599 it detects as binary. With -a, export will generate a diff anyway,
1599 it detects as binary. With -a, export will generate a diff anyway,
1600 probably with undesirable results.
1600 probably with undesirable results.
1601
1601
1602 With the --switch-parent option, the diff will be against the second
1602 With the --switch-parent option, the diff will be against the second
1603 parent. It can be useful to review a merge.
1603 parent. It can be useful to review a merge.
1604 """
1604 """
1605 if not changesets:
1605 if not changesets:
1606 raise util.Abort(_("export requires at least one changeset"))
1606 raise util.Abort(_("export requires at least one changeset"))
1607 seqno = 0
1607 seqno = 0
1608 revs = list(revrange(ui, repo, changesets))
1608 revs = list(revrange(ui, repo, changesets))
1609 total = len(revs)
1609 total = len(revs)
1610 revwidth = max(map(len, revs))
1610 revwidth = max(map(len, revs))
1611 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1611 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1612 ui.note(msg)
1612 ui.note(msg)
1613 for cset in revs:
1613 for cset in revs:
1614 seqno += 1
1614 seqno += 1
1615 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1615 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1616
1616
1617 def forget(ui, repo, *pats, **opts):
1617 def forget(ui, repo, *pats, **opts):
1618 """don't add the specified files on the next commit (DEPRECATED)
1618 """don't add the specified files on the next commit (DEPRECATED)
1619
1619
1620 (DEPRECATED)
1620 (DEPRECATED)
1621 Undo an 'hg add' scheduled for the next commit.
1621 Undo an 'hg add' scheduled for the next commit.
1622
1622
1623 This command is now deprecated and will be removed in a future
1623 This command is now deprecated and will be removed in a future
1624 release. Please use revert instead.
1624 release. Please use revert instead.
1625 """
1625 """
1626 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1626 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1627 forget = []
1627 forget = []
1628 for src, abs, rel, exact in walk(repo, pats, opts):
1628 for src, abs, rel, exact in walk(repo, pats, opts):
1629 if repo.dirstate.state(abs) == 'a':
1629 if repo.dirstate.state(abs) == 'a':
1630 forget.append(abs)
1630 forget.append(abs)
1631 if ui.verbose or not exact:
1631 if ui.verbose or not exact:
1632 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1632 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1633 repo.forget(forget)
1633 repo.forget(forget)
1634
1634
1635 def grep(ui, repo, pattern, *pats, **opts):
1635 def grep(ui, repo, pattern, *pats, **opts):
1636 """search for a pattern in specified files and revisions
1636 """search for a pattern in specified files and revisions
1637
1637
1638 Search revisions of files for a regular expression.
1638 Search revisions of files for a regular expression.
1639
1639
1640 This command behaves differently than Unix grep. It only accepts
1640 This command behaves differently than Unix grep. It only accepts
1641 Python/Perl regexps. It searches repository history, not the
1641 Python/Perl regexps. It searches repository history, not the
1642 working directory. It always prints the revision number in which
1642 working directory. It always prints the revision number in which
1643 a match appears.
1643 a match appears.
1644
1644
1645 By default, grep only prints output for the first revision of a
1645 By default, grep only prints output for the first revision of a
1646 file in which it finds a match. To get it to print every revision
1646 file in which it finds a match. To get it to print every revision
1647 that contains a change in match status ("-" for a match that
1647 that contains a change in match status ("-" for a match that
1648 becomes a non-match, or "+" for a non-match that becomes a match),
1648 becomes a non-match, or "+" for a non-match that becomes a match),
1649 use the --all flag.
1649 use the --all flag.
1650 """
1650 """
1651 reflags = 0
1651 reflags = 0
1652 if opts['ignore_case']:
1652 if opts['ignore_case']:
1653 reflags |= re.I
1653 reflags |= re.I
1654 regexp = re.compile(pattern, reflags)
1654 regexp = re.compile(pattern, reflags)
1655 sep, eol = ':', '\n'
1655 sep, eol = ':', '\n'
1656 if opts['print0']:
1656 if opts['print0']:
1657 sep = eol = '\0'
1657 sep = eol = '\0'
1658
1658
1659 fcache = {}
1659 fcache = {}
1660 def getfile(fn):
1660 def getfile(fn):
1661 if fn not in fcache:
1661 if fn not in fcache:
1662 fcache[fn] = repo.file(fn)
1662 fcache[fn] = repo.file(fn)
1663 return fcache[fn]
1663 return fcache[fn]
1664
1664
1665 def matchlines(body):
1665 def matchlines(body):
1666 begin = 0
1666 begin = 0
1667 linenum = 0
1667 linenum = 0
1668 while True:
1668 while True:
1669 match = regexp.search(body, begin)
1669 match = regexp.search(body, begin)
1670 if not match:
1670 if not match:
1671 break
1671 break
1672 mstart, mend = match.span()
1672 mstart, mend = match.span()
1673 linenum += body.count('\n', begin, mstart) + 1
1673 linenum += body.count('\n', begin, mstart) + 1
1674 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1674 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1675 lend = body.find('\n', mend)
1675 lend = body.find('\n', mend)
1676 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1676 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1677 begin = lend + 1
1677 begin = lend + 1
1678
1678
1679 class linestate(object):
1679 class linestate(object):
1680 def __init__(self, line, linenum, colstart, colend):
1680 def __init__(self, line, linenum, colstart, colend):
1681 self.line = line
1681 self.line = line
1682 self.linenum = linenum
1682 self.linenum = linenum
1683 self.colstart = colstart
1683 self.colstart = colstart
1684 self.colend = colend
1684 self.colend = colend
1685 def __eq__(self, other):
1685 def __eq__(self, other):
1686 return self.line == other.line
1686 return self.line == other.line
1687 def __hash__(self):
1687 def __hash__(self):
1688 return hash(self.line)
1688 return hash(self.line)
1689
1689
1690 matches = {}
1690 matches = {}
1691 def grepbody(fn, rev, body):
1691 def grepbody(fn, rev, body):
1692 matches[rev].setdefault(fn, {})
1692 matches[rev].setdefault(fn, {})
1693 m = matches[rev][fn]
1693 m = matches[rev][fn]
1694 for lnum, cstart, cend, line in matchlines(body):
1694 for lnum, cstart, cend, line in matchlines(body):
1695 s = linestate(line, lnum, cstart, cend)
1695 s = linestate(line, lnum, cstart, cend)
1696 m[s] = s
1696 m[s] = s
1697
1697
1698 # FIXME: prev isn't used, why ?
1698 # FIXME: prev isn't used, why ?
1699 prev = {}
1699 prev = {}
1700 ucache = {}
1700 ucache = {}
1701 def display(fn, rev, states, prevstates):
1701 def display(fn, rev, states, prevstates):
1702 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1702 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1703 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1703 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1704 counts = {'-': 0, '+': 0}
1704 counts = {'-': 0, '+': 0}
1705 filerevmatches = {}
1705 filerevmatches = {}
1706 for l in diff:
1706 for l in diff:
1707 if incrementing or not opts['all']:
1707 if incrementing or not opts['all']:
1708 change = ((l in prevstates) and '-') or '+'
1708 change = ((l in prevstates) and '-') or '+'
1709 r = rev
1709 r = rev
1710 else:
1710 else:
1711 change = ((l in states) and '-') or '+'
1711 change = ((l in states) and '-') or '+'
1712 r = prev[fn]
1712 r = prev[fn]
1713 cols = [fn, str(rev)]
1713 cols = [fn, str(rev)]
1714 if opts['line_number']:
1714 if opts['line_number']:
1715 cols.append(str(l.linenum))
1715 cols.append(str(l.linenum))
1716 if opts['all']:
1716 if opts['all']:
1717 cols.append(change)
1717 cols.append(change)
1718 if opts['user']:
1718 if opts['user']:
1719 cols.append(trimuser(ui, getchange(rev)[1], rev,
1719 cols.append(trimuser(ui, getchange(rev)[1], rev,
1720 ucache))
1720 ucache))
1721 if opts['files_with_matches']:
1721 if opts['files_with_matches']:
1722 c = (fn, rev)
1722 c = (fn, rev)
1723 if c in filerevmatches:
1723 if c in filerevmatches:
1724 continue
1724 continue
1725 filerevmatches[c] = 1
1725 filerevmatches[c] = 1
1726 else:
1726 else:
1727 cols.append(l.line)
1727 cols.append(l.line)
1728 ui.write(sep.join(cols), eol)
1728 ui.write(sep.join(cols), eol)
1729 counts[change] += 1
1729 counts[change] += 1
1730 return counts['+'], counts['-']
1730 return counts['+'], counts['-']
1731
1731
1732 fstate = {}
1732 fstate = {}
1733 skip = {}
1733 skip = {}
1734 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1734 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1735 count = 0
1735 count = 0
1736 incrementing = False
1736 incrementing = False
1737 for st, rev, fns in changeiter:
1737 for st, rev, fns in changeiter:
1738 if st == 'window':
1738 if st == 'window':
1739 incrementing = rev
1739 incrementing = rev
1740 matches.clear()
1740 matches.clear()
1741 elif st == 'add':
1741 elif st == 'add':
1742 change = repo.changelog.read(repo.lookup(str(rev)))
1742 change = repo.changelog.read(repo.lookup(str(rev)))
1743 mf = repo.manifest.read(change[0])
1743 mf = repo.manifest.read(change[0])
1744 matches[rev] = {}
1744 matches[rev] = {}
1745 for fn in fns:
1745 for fn in fns:
1746 if fn in skip:
1746 if fn in skip:
1747 continue
1747 continue
1748 fstate.setdefault(fn, {})
1748 fstate.setdefault(fn, {})
1749 try:
1749 try:
1750 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1750 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1751 except KeyError:
1751 except KeyError:
1752 pass
1752 pass
1753 elif st == 'iter':
1753 elif st == 'iter':
1754 states = matches[rev].items()
1754 states = matches[rev].items()
1755 states.sort()
1755 states.sort()
1756 for fn, m in states:
1756 for fn, m in states:
1757 if fn in skip:
1757 if fn in skip:
1758 continue
1758 continue
1759 if incrementing or not opts['all'] or fstate[fn]:
1759 if incrementing or not opts['all'] or fstate[fn]:
1760 pos, neg = display(fn, rev, m, fstate[fn])
1760 pos, neg = display(fn, rev, m, fstate[fn])
1761 count += pos + neg
1761 count += pos + neg
1762 if pos and not opts['all']:
1762 if pos and not opts['all']:
1763 skip[fn] = True
1763 skip[fn] = True
1764 fstate[fn] = m
1764 fstate[fn] = m
1765 prev[fn] = rev
1765 prev[fn] = rev
1766
1766
1767 if not incrementing:
1767 if not incrementing:
1768 fstate = fstate.items()
1768 fstate = fstate.items()
1769 fstate.sort()
1769 fstate.sort()
1770 for fn, state in fstate:
1770 for fn, state in fstate:
1771 if fn in skip:
1771 if fn in skip:
1772 continue
1772 continue
1773 display(fn, rev, {}, state)
1773 display(fn, rev, {}, state)
1774 return (count == 0 and 1) or 0
1774 return (count == 0 and 1) or 0
1775
1775
1776 def heads(ui, repo, **opts):
1776 def heads(ui, repo, **opts):
1777 """show current repository heads
1777 """show current repository heads
1778
1778
1779 Show all repository head changesets.
1779 Show all repository head changesets.
1780
1780
1781 Repository "heads" are changesets that don't have children
1781 Repository "heads" are changesets that don't have children
1782 changesets. They are where development generally takes place and
1782 changesets. They are where development generally takes place and
1783 are the usual targets for update and merge operations.
1783 are the usual targets for update and merge operations.
1784 """
1784 """
1785 if opts['rev']:
1785 if opts['rev']:
1786 heads = repo.heads(repo.lookup(opts['rev']))
1786 heads = repo.heads(repo.lookup(opts['rev']))
1787 else:
1787 else:
1788 heads = repo.heads()
1788 heads = repo.heads()
1789 br = None
1789 br = None
1790 if opts['branches']:
1790 if opts['branches']:
1791 br = repo.branchlookup(heads)
1791 br = repo.branchlookup(heads)
1792 displayer = show_changeset(ui, repo, opts)
1792 displayer = show_changeset(ui, repo, opts)
1793 for n in heads:
1793 for n in heads:
1794 displayer.show(changenode=n, brinfo=br)
1794 displayer.show(changenode=n, brinfo=br)
1795
1795
1796 def identify(ui, repo):
1796 def identify(ui, repo):
1797 """print information about the working copy
1797 """print information about the working copy
1798
1798
1799 Print a short summary of the current state of the repo.
1799 Print a short summary of the current state of the repo.
1800
1800
1801 This summary identifies the repository state using one or two parent
1801 This summary identifies the repository state using one or two parent
1802 hash identifiers, followed by a "+" if there are uncommitted changes
1802 hash identifiers, followed by a "+" if there are uncommitted changes
1803 in the working directory, followed by a list of tags for this revision.
1803 in the working directory, followed by a list of tags for this revision.
1804 """
1804 """
1805 parents = [p for p in repo.dirstate.parents() if p != nullid]
1805 parents = [p for p in repo.dirstate.parents() if p != nullid]
1806 if not parents:
1806 if not parents:
1807 ui.write(_("unknown\n"))
1807 ui.write(_("unknown\n"))
1808 return
1808 return
1809
1809
1810 hexfunc = ui.verbose and hex or short
1810 hexfunc = ui.verbose and hex or short
1811 modified, added, removed, deleted, unknown = repo.changes()
1811 modified, added, removed, deleted, unknown = repo.changes()
1812 output = ["%s%s" %
1812 output = ["%s%s" %
1813 ('+'.join([hexfunc(parent) for parent in parents]),
1813 ('+'.join([hexfunc(parent) for parent in parents]),
1814 (modified or added or removed or deleted) and "+" or "")]
1814 (modified or added or removed or deleted) and "+" or "")]
1815
1815
1816 if not ui.quiet:
1816 if not ui.quiet:
1817 # multiple tags for a single parent separated by '/'
1817 # multiple tags for a single parent separated by '/'
1818 parenttags = ['/'.join(tags)
1818 parenttags = ['/'.join(tags)
1819 for tags in map(repo.nodetags, parents) if tags]
1819 for tags in map(repo.nodetags, parents) if tags]
1820 # tags for multiple parents separated by ' + '
1820 # tags for multiple parents separated by ' + '
1821 if parenttags:
1821 if parenttags:
1822 output.append(' + '.join(parenttags))
1822 output.append(' + '.join(parenttags))
1823
1823
1824 ui.write("%s\n" % ' '.join(output))
1824 ui.write("%s\n" % ' '.join(output))
1825
1825
1826 def import_(ui, repo, patch1, *patches, **opts):
1826 def import_(ui, repo, patch1, *patches, **opts):
1827 """import an ordered set of patches
1827 """import an ordered set of patches
1828
1828
1829 Import a list of patches and commit them individually.
1829 Import a list of patches and commit them individually.
1830
1830
1831 If there are outstanding changes in the working directory, import
1831 If there are outstanding changes in the working directory, import
1832 will abort unless given the -f flag.
1832 will abort unless given the -f flag.
1833
1833
1834 If a patch looks like a mail message (its first line starts with
1834 If a patch looks like a mail message (its first line starts with
1835 "From " or looks like an RFC822 header), it will not be applied
1835 "From " or looks like an RFC822 header), it will not be applied
1836 unless the -f option is used. The importer neither parses nor
1836 unless the -f option is used. The importer neither parses nor
1837 discards mail headers, so use -f only to override the "mailness"
1837 discards mail headers, so use -f only to override the "mailness"
1838 safety check, not to import a real mail message.
1838 safety check, not to import a real mail message.
1839 """
1839 """
1840 patches = (patch1,) + patches
1840 patches = (patch1,) + patches
1841
1841
1842 if not opts['force']:
1842 if not opts['force']:
1843 bail_if_changed(repo)
1843 bail_if_changed(repo)
1844
1844
1845 d = opts["base"]
1845 d = opts["base"]
1846 strip = opts["strip"]
1846 strip = opts["strip"]
1847
1847
1848 mailre = re.compile(r'(?:From |[\w-]+:)')
1848 mailre = re.compile(r'(?:From |[\w-]+:)')
1849
1849
1850 # attempt to detect the start of a patch
1850 # attempt to detect the start of a patch
1851 # (this heuristic is borrowed from quilt)
1851 # (this heuristic is borrowed from quilt)
1852 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1852 diffre = re.compile(r'(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1853 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1853 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1854 '(---|\*\*\*)[ \t])')
1854 '(---|\*\*\*)[ \t])')
1855
1855
1856 for patch in patches:
1856 for patch in patches:
1857 ui.status(_("applying %s\n") % patch)
1857 ui.status(_("applying %s\n") % patch)
1858 pf = os.path.join(d, patch)
1858 pf = os.path.join(d, patch)
1859
1859
1860 message = []
1860 message = []
1861 user = None
1861 user = None
1862 hgpatch = False
1862 hgpatch = False
1863 for line in file(pf):
1863 for line in file(pf):
1864 line = line.rstrip()
1864 line = line.rstrip()
1865 if (not message and not hgpatch and
1865 if (not message and not hgpatch and
1866 mailre.match(line) and not opts['force']):
1866 mailre.match(line) and not opts['force']):
1867 if len(line) > 35:
1867 if len(line) > 35:
1868 line = line[:32] + '...'
1868 line = line[:32] + '...'
1869 raise util.Abort(_('first line looks like a '
1869 raise util.Abort(_('first line looks like a '
1870 'mail header: ') + line)
1870 'mail header: ') + line)
1871 if diffre.match(line):
1871 if diffre.match(line):
1872 break
1872 break
1873 elif hgpatch:
1873 elif hgpatch:
1874 # parse values when importing the result of an hg export
1874 # parse values when importing the result of an hg export
1875 if line.startswith("# User "):
1875 if line.startswith("# User "):
1876 user = line[7:]
1876 user = line[7:]
1877 ui.debug(_('User: %s\n') % user)
1877 ui.debug(_('User: %s\n') % user)
1878 elif not line.startswith("# ") and line:
1878 elif not line.startswith("# ") and line:
1879 message.append(line)
1879 message.append(line)
1880 hgpatch = False
1880 hgpatch = False
1881 elif line == '# HG changeset patch':
1881 elif line == '# HG changeset patch':
1882 hgpatch = True
1882 hgpatch = True
1883 message = [] # We may have collected garbage
1883 message = [] # We may have collected garbage
1884 else:
1884 else:
1885 message.append(line)
1885 message.append(line)
1886
1886
1887 # make sure message isn't empty
1887 # make sure message isn't empty
1888 if not message:
1888 if not message:
1889 message = _("imported patch %s\n") % patch
1889 message = _("imported patch %s\n") % patch
1890 else:
1890 else:
1891 message = "%s\n" % '\n'.join(message)
1891 message = "%s\n" % '\n'.join(message)
1892 ui.debug(_('message:\n%s\n') % message)
1892 ui.debug(_('message:\n%s\n') % message)
1893
1893
1894 files = util.patch(strip, pf, ui)
1894 files = util.patch(strip, pf, ui)
1895
1895
1896 if len(files) > 0:
1896 if len(files) > 0:
1897 addremove(ui, repo, *files)
1897 addremove(ui, repo, *files)
1898 repo.commit(files, message, user)
1898 repo.commit(files, message, user)
1899
1899
1900 def incoming(ui, repo, source="default", **opts):
1900 def incoming(ui, repo, source="default", **opts):
1901 """show new changesets found in source
1901 """show new changesets found in source
1902
1902
1903 Show new changesets found in the specified path/URL or the default
1903 Show new changesets found in the specified path/URL or the default
1904 pull location. These are the changesets that would be pulled if a pull
1904 pull location. These are the changesets that would be pulled if a pull
1905 was requested.
1905 was requested.
1906
1906
1907 For remote repository, using --bundle avoids downloading the changesets
1907 For remote repository, using --bundle avoids downloading the changesets
1908 twice if the incoming is followed by a pull.
1908 twice if the incoming is followed by a pull.
1909
1909
1910 See pull for valid source format details.
1910 See pull for valid source format details.
1911 """
1911 """
1912 source = ui.expandpath(source)
1912 source = ui.expandpath(source)
1913 if opts['ssh']:
1913 if opts['ssh']:
1914 ui.setconfig("ui", "ssh", opts['ssh'])
1914 ui.setconfig("ui", "ssh", opts['ssh'])
1915 if opts['remotecmd']:
1915 if opts['remotecmd']:
1916 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1916 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
1917
1917
1918 other = hg.repository(ui, source)
1918 other = hg.repository(ui, source)
1919 incoming = repo.findincoming(other, force=opts["force"])
1919 incoming = repo.findincoming(other, force=opts["force"])
1920 if not incoming:
1920 if not incoming:
1921 ui.status(_("no changes found\n"))
1921 ui.status(_("no changes found\n"))
1922 return
1922 return
1923
1923
1924 cleanup = None
1924 cleanup = None
1925 try:
1925 try:
1926 fname = opts["bundle"]
1926 fname = opts["bundle"]
1927 if fname or not other.local():
1927 if fname or not other.local():
1928 # create a bundle (uncompressed if other repo is not local)
1928 # create a bundle (uncompressed if other repo is not local)
1929 cg = other.changegroup(incoming, "incoming")
1929 cg = other.changegroup(incoming, "incoming")
1930 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1930 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1931 # keep written bundle?
1931 # keep written bundle?
1932 if opts["bundle"]:
1932 if opts["bundle"]:
1933 cleanup = None
1933 cleanup = None
1934 if not other.local():
1934 if not other.local():
1935 # use the created uncompressed bundlerepo
1935 # use the created uncompressed bundlerepo
1936 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1936 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1937
1937
1938 o = other.changelog.nodesbetween(incoming)[0]
1938 o = other.changelog.nodesbetween(incoming)[0]
1939 if opts['newest_first']:
1939 if opts['newest_first']:
1940 o.reverse()
1940 o.reverse()
1941 displayer = show_changeset(ui, other, opts)
1941 displayer = show_changeset(ui, other, opts)
1942 for n in o:
1942 for n in o:
1943 parents = [p for p in other.changelog.parents(n) if p != nullid]
1943 parents = [p for p in other.changelog.parents(n) if p != nullid]
1944 if opts['no_merges'] and len(parents) == 2:
1944 if opts['no_merges'] and len(parents) == 2:
1945 continue
1945 continue
1946 displayer.show(changenode=n)
1946 displayer.show(changenode=n)
1947 if opts['patch']:
1947 if opts['patch']:
1948 prev = (parents and parents[0]) or nullid
1948 prev = (parents and parents[0]) or nullid
1949 dodiff(ui, ui, other, prev, n)
1949 dodiff(ui, ui, other, prev, n)
1950 ui.write("\n")
1950 ui.write("\n")
1951 finally:
1951 finally:
1952 if hasattr(other, 'close'):
1952 if hasattr(other, 'close'):
1953 other.close()
1953 other.close()
1954 if cleanup:
1954 if cleanup:
1955 os.unlink(cleanup)
1955 os.unlink(cleanup)
1956
1956
1957 def init(ui, dest="."):
1957 def init(ui, dest="."):
1958 """create a new repository in the given directory
1958 """create a new repository in the given directory
1959
1959
1960 Initialize a new repository in the given directory. If the given
1960 Initialize a new repository in the given directory. If the given
1961 directory does not exist, it is created.
1961 directory does not exist, it is created.
1962
1962
1963 If no directory is given, the current directory is used.
1963 If no directory is given, the current directory is used.
1964 """
1964 """
1965 if not os.path.exists(dest):
1965 if not os.path.exists(dest):
1966 os.mkdir(dest)
1966 os.mkdir(dest)
1967 hg.repository(ui, dest, create=1)
1967 hg.repository(ui, dest, create=1)
1968
1968
1969 def locate(ui, repo, *pats, **opts):
1969 def locate(ui, repo, *pats, **opts):
1970 """locate files matching specific patterns
1970 """locate files matching specific patterns
1971
1971
1972 Print all files under Mercurial control whose names match the
1972 Print all files under Mercurial control whose names match the
1973 given patterns.
1973 given patterns.
1974
1974
1975 This command searches the current directory and its
1975 This command searches the current directory and its
1976 subdirectories. To search an entire repository, move to the root
1976 subdirectories. To search an entire repository, move to the root
1977 of the repository.
1977 of the repository.
1978
1978
1979 If no patterns are given to match, this command prints all file
1979 If no patterns are given to match, this command prints all file
1980 names.
1980 names.
1981
1981
1982 If you want to feed the output of this command into the "xargs"
1982 If you want to feed the output of this command into the "xargs"
1983 command, use the "-0" option to both this command and "xargs".
1983 command, use the "-0" option to both this command and "xargs".
1984 This will avoid the problem of "xargs" treating single filenames
1984 This will avoid the problem of "xargs" treating single filenames
1985 that contain white space as multiple filenames.
1985 that contain white space as multiple filenames.
1986 """
1986 """
1987 end = opts['print0'] and '\0' or '\n'
1987 end = opts['print0'] and '\0' or '\n'
1988 rev = opts['rev']
1988 rev = opts['rev']
1989 if rev:
1989 if rev:
1990 node = repo.lookup(rev)
1990 node = repo.lookup(rev)
1991 else:
1991 else:
1992 node = None
1992 node = None
1993
1993
1994 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1994 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
1995 head='(?:.*/|)'):
1995 head='(?:.*/|)'):
1996 if not node and repo.dirstate.state(abs) == '?':
1996 if not node and repo.dirstate.state(abs) == '?':
1997 continue
1997 continue
1998 if opts['fullpath']:
1998 if opts['fullpath']:
1999 ui.write(os.path.join(repo.root, abs), end)
1999 ui.write(os.path.join(repo.root, abs), end)
2000 else:
2000 else:
2001 ui.write(((pats and rel) or abs), end)
2001 ui.write(((pats and rel) or abs), end)
2002
2002
2003 def log(ui, repo, *pats, **opts):
2003 def log(ui, repo, *pats, **opts):
2004 """show revision history of entire repository or files
2004 """show revision history of entire repository or files
2005
2005
2006 Print the revision history of the specified files or the entire project.
2006 Print the revision history of the specified files or the entire project.
2007
2007
2008 By default this command outputs: changeset id and hash, tags,
2008 By default this command outputs: changeset id and hash, tags,
2009 non-trivial parents, user, date and time, and a summary for each
2009 non-trivial parents, user, date and time, and a summary for each
2010 commit. When the -v/--verbose switch is used, the list of changed
2010 commit. When the -v/--verbose switch is used, the list of changed
2011 files and full commit message is shown.
2011 files and full commit message is shown.
2012 """
2012 """
2013 class dui(object):
2013 class dui(object):
2014 # Implement and delegate some ui protocol. Save hunks of
2014 # Implement and delegate some ui protocol. Save hunks of
2015 # output for later display in the desired order.
2015 # output for later display in the desired order.
2016 def __init__(self, ui):
2016 def __init__(self, ui):
2017 self.ui = ui
2017 self.ui = ui
2018 self.hunk = {}
2018 self.hunk = {}
2019 self.header = {}
2019 self.header = {}
2020 def bump(self, rev):
2020 def bump(self, rev):
2021 self.rev = rev
2021 self.rev = rev
2022 self.hunk[rev] = []
2022 self.hunk[rev] = []
2023 self.header[rev] = []
2023 self.header[rev] = []
2024 def note(self, *args):
2024 def note(self, *args):
2025 if self.verbose:
2025 if self.verbose:
2026 self.write(*args)
2026 self.write(*args)
2027 def status(self, *args):
2027 def status(self, *args):
2028 if not self.quiet:
2028 if not self.quiet:
2029 self.write(*args)
2029 self.write(*args)
2030 def write(self, *args):
2030 def write(self, *args):
2031 self.hunk[self.rev].append(args)
2031 self.hunk[self.rev].append(args)
2032 def write_header(self, *args):
2032 def write_header(self, *args):
2033 self.header[self.rev].append(args)
2033 self.header[self.rev].append(args)
2034 def debug(self, *args):
2034 def debug(self, *args):
2035 if self.debugflag:
2035 if self.debugflag:
2036 self.write(*args)
2036 self.write(*args)
2037 def __getattr__(self, key):
2037 def __getattr__(self, key):
2038 return getattr(self.ui, key)
2038 return getattr(self.ui, key)
2039
2039
2040 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2040 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2041
2041
2042 if opts['limit']:
2042 if opts['limit']:
2043 try:
2043 try:
2044 limit = int(opts['limit'])
2044 limit = int(opts['limit'])
2045 except ValueError:
2045 except ValueError:
2046 raise util.Abort(_('limit must be a positive integer'))
2046 raise util.Abort(_('limit must be a positive integer'))
2047 if limit <= 0: raise util.Abort(_('limit must be positive'))
2047 if limit <= 0: raise util.Abort(_('limit must be positive'))
2048 else:
2048 else:
2049 limit = sys.maxint
2049 limit = sys.maxint
2050 count = 0
2050 count = 0
2051
2051
2052 displayer = show_changeset(ui, repo, opts)
2052 displayer = show_changeset(ui, repo, opts)
2053 for st, rev, fns in changeiter:
2053 for st, rev, fns in changeiter:
2054 if st == 'window':
2054 if st == 'window':
2055 du = dui(ui)
2055 du = dui(ui)
2056 displayer.ui = du
2056 displayer.ui = du
2057 elif st == 'add':
2057 elif st == 'add':
2058 du.bump(rev)
2058 du.bump(rev)
2059 changenode = repo.changelog.node(rev)
2059 changenode = repo.changelog.node(rev)
2060 parents = [p for p in repo.changelog.parents(changenode)
2060 parents = [p for p in repo.changelog.parents(changenode)
2061 if p != nullid]
2061 if p != nullid]
2062 if opts['no_merges'] and len(parents) == 2:
2062 if opts['no_merges'] and len(parents) == 2:
2063 continue
2063 continue
2064 if opts['only_merges'] and len(parents) != 2:
2064 if opts['only_merges'] and len(parents) != 2:
2065 continue
2065 continue
2066
2066
2067 if opts['keyword']:
2067 if opts['keyword']:
2068 changes = getchange(rev)
2068 changes = getchange(rev)
2069 miss = 0
2069 miss = 0
2070 for k in [kw.lower() for kw in opts['keyword']]:
2070 for k in [kw.lower() for kw in opts['keyword']]:
2071 if not (k in changes[1].lower() or
2071 if not (k in changes[1].lower() or
2072 k in changes[4].lower() or
2072 k in changes[4].lower() or
2073 k in " ".join(changes[3][:20]).lower()):
2073 k in " ".join(changes[3][:20]).lower()):
2074 miss = 1
2074 miss = 1
2075 break
2075 break
2076 if miss:
2076 if miss:
2077 continue
2077 continue
2078
2078
2079 br = None
2079 br = None
2080 if opts['branches']:
2080 if opts['branches']:
2081 br = repo.branchlookup([repo.changelog.node(rev)])
2081 br = repo.branchlookup([repo.changelog.node(rev)])
2082
2082
2083 displayer.show(rev, brinfo=br)
2083 displayer.show(rev, brinfo=br)
2084 if opts['patch']:
2084 if opts['patch']:
2085 prev = (parents and parents[0]) or nullid
2085 prev = (parents and parents[0]) or nullid
2086 dodiff(du, du, repo, prev, changenode, match=matchfn)
2086 dodiff(du, du, repo, prev, changenode, match=matchfn)
2087 du.write("\n\n")
2087 du.write("\n\n")
2088 elif st == 'iter':
2088 elif st == 'iter':
2089 if count == limit: break
2089 if count == limit: break
2090 if du.header[rev]:
2090 if du.header[rev]:
2091 for args in du.header[rev]:
2091 for args in du.header[rev]:
2092 ui.write_header(*args)
2092 ui.write_header(*args)
2093 if du.hunk[rev]:
2093 if du.hunk[rev]:
2094 count += 1
2094 count += 1
2095 for args in du.hunk[rev]:
2095 for args in du.hunk[rev]:
2096 ui.write(*args)
2096 ui.write(*args)
2097
2097
2098 def manifest(ui, repo, rev=None):
2098 def manifest(ui, repo, rev=None):
2099 """output the latest or given revision of the project manifest
2099 """output the latest or given revision of the project manifest
2100
2100
2101 Print a list of version controlled files for the given revision.
2101 Print a list of version controlled files for the given revision.
2102
2102
2103 The manifest is the list of files being version controlled. If no revision
2103 The manifest is the list of files being version controlled. If no revision
2104 is given then the tip is used.
2104 is given then the tip is used.
2105 """
2105 """
2106 if rev:
2106 if rev:
2107 try:
2107 try:
2108 # assume all revision numbers are for changesets
2108 # assume all revision numbers are for changesets
2109 n = repo.lookup(rev)
2109 n = repo.lookup(rev)
2110 change = repo.changelog.read(n)
2110 change = repo.changelog.read(n)
2111 n = change[0]
2111 n = change[0]
2112 except hg.RepoError:
2112 except hg.RepoError:
2113 n = repo.manifest.lookup(rev)
2113 n = repo.manifest.lookup(rev)
2114 else:
2114 else:
2115 n = repo.manifest.tip()
2115 n = repo.manifest.tip()
2116 m = repo.manifest.read(n)
2116 m = repo.manifest.read(n)
2117 mf = repo.manifest.readflags(n)
2117 mf = repo.manifest.readflags(n)
2118 files = m.keys()
2118 files = m.keys()
2119 files.sort()
2119 files.sort()
2120
2120
2121 for f in files:
2121 for f in files:
2122 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2122 ui.write("%40s %3s %s\n" % (hex(m[f]), mf[f] and "755" or "644", f))
2123
2123
2124 def merge(ui, repo, node=None, **opts):
2124 def merge(ui, repo, node=None, **opts):
2125 """Merge working directory with another revision
2125 """Merge working directory with another revision
2126
2126
2127 Merge the contents of the current working directory and the
2127 Merge the contents of the current working directory and the
2128 requested revision. Files that changed between either parent are
2128 requested revision. Files that changed between either parent are
2129 marked as changed for the next commit and a commit must be
2129 marked as changed for the next commit and a commit must be
2130 performed before any further updates are allowed.
2130 performed before any further updates are allowed.
2131 """
2131 """
2132 return update(ui, repo, node=node, merge=True, **opts)
2132 return update(ui, repo, node=node, merge=True, **opts)
2133
2133
2134 def outgoing(ui, repo, dest="default-push", **opts):
2134 def outgoing(ui, repo, dest="default-push", **opts):
2135 """show changesets not found in destination
2135 """show changesets not found in destination
2136
2136
2137 Show changesets not found in the specified destination repository or
2137 Show changesets not found in the specified destination repository or
2138 the default push location. These are the changesets that would be pushed
2138 the default push location. These are the changesets that would be pushed
2139 if a push was requested.
2139 if a push was requested.
2140
2140
2141 See pull for valid destination format details.
2141 See pull for valid destination format details.
2142 """
2142 """
2143 dest = ui.expandpath(dest)
2143 dest = ui.expandpath(dest)
2144 if opts['ssh']:
2144 if opts['ssh']:
2145 ui.setconfig("ui", "ssh", opts['ssh'])
2145 ui.setconfig("ui", "ssh", opts['ssh'])
2146 if opts['remotecmd']:
2146 if opts['remotecmd']:
2147 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2147 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2148
2148
2149 other = hg.repository(ui, dest)
2149 other = hg.repository(ui, dest)
2150 o = repo.findoutgoing(other, force=opts['force'])
2150 o = repo.findoutgoing(other, force=opts['force'])
2151 if not o:
2151 if not o:
2152 ui.status(_("no changes found\n"))
2152 ui.status(_("no changes found\n"))
2153 return
2153 return
2154 o = repo.changelog.nodesbetween(o)[0]
2154 o = repo.changelog.nodesbetween(o)[0]
2155 if opts['newest_first']:
2155 if opts['newest_first']:
2156 o.reverse()
2156 o.reverse()
2157 displayer = show_changeset(ui, repo, opts)
2157 displayer = show_changeset(ui, repo, opts)
2158 for n in o:
2158 for n in o:
2159 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2159 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2160 if opts['no_merges'] and len(parents) == 2:
2160 if opts['no_merges'] and len(parents) == 2:
2161 continue
2161 continue
2162 displayer.show(changenode=n)
2162 displayer.show(changenode=n)
2163 if opts['patch']:
2163 if opts['patch']:
2164 prev = (parents and parents[0]) or nullid
2164 prev = (parents and parents[0]) or nullid
2165 dodiff(ui, ui, repo, prev, n)
2165 dodiff(ui, ui, repo, prev, n)
2166 ui.write("\n")
2166 ui.write("\n")
2167
2167
2168 def parents(ui, repo, rev=None, branches=None, **opts):
2168 def parents(ui, repo, rev=None, branches=None, **opts):
2169 """show the parents of the working dir or revision
2169 """show the parents of the working dir or revision
2170
2170
2171 Print the working directory's parent revisions.
2171 Print the working directory's parent revisions.
2172 """
2172 """
2173 if rev:
2173 if rev:
2174 p = repo.changelog.parents(repo.lookup(rev))
2174 p = repo.changelog.parents(repo.lookup(rev))
2175 else:
2175 else:
2176 p = repo.dirstate.parents()
2176 p = repo.dirstate.parents()
2177
2177
2178 br = None
2178 br = None
2179 if branches is not None:
2179 if branches is not None:
2180 br = repo.branchlookup(p)
2180 br = repo.branchlookup(p)
2181 displayer = show_changeset(ui, repo, opts)
2181 displayer = show_changeset(ui, repo, opts)
2182 for n in p:
2182 for n in p:
2183 if n != nullid:
2183 if n != nullid:
2184 displayer.show(changenode=n, brinfo=br)
2184 displayer.show(changenode=n, brinfo=br)
2185
2185
2186 def paths(ui, repo, search=None):
2186 def paths(ui, repo, search=None):
2187 """show definition of symbolic path names
2187 """show definition of symbolic path names
2188
2188
2189 Show definition of symbolic path name NAME. If no name is given, show
2189 Show definition of symbolic path name NAME. If no name is given, show
2190 definition of available names.
2190 definition of available names.
2191
2191
2192 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2192 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2193 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2193 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2194 """
2194 """
2195 if search:
2195 if search:
2196 for name, path in ui.configitems("paths"):
2196 for name, path in ui.configitems("paths"):
2197 if name == search:
2197 if name == search:
2198 ui.write("%s\n" % path)
2198 ui.write("%s\n" % path)
2199 return
2199 return
2200 ui.warn(_("not found!\n"))
2200 ui.warn(_("not found!\n"))
2201 return 1
2201 return 1
2202 else:
2202 else:
2203 for name, path in ui.configitems("paths"):
2203 for name, path in ui.configitems("paths"):
2204 ui.write("%s = %s\n" % (name, path))
2204 ui.write("%s = %s\n" % (name, path))
2205
2205
2206 def postincoming(ui, repo, modheads, optupdate):
2206 def postincoming(ui, repo, modheads, optupdate):
2207 if modheads == 0:
2207 if modheads == 0:
2208 return
2208 return
2209 if optupdate:
2209 if optupdate:
2210 if modheads == 1:
2210 if modheads == 1:
2211 return update(ui, repo)
2211 return update(ui, repo)
2212 else:
2212 else:
2213 ui.status(_("not updating, since new heads added\n"))
2213 ui.status(_("not updating, since new heads added\n"))
2214 if modheads > 1:
2214 if modheads > 1:
2215 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2215 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2216 else:
2216 else:
2217 ui.status(_("(run 'hg update' to get a working copy)\n"))
2217 ui.status(_("(run 'hg update' to get a working copy)\n"))
2218
2218
2219 def pull(ui, repo, source="default", **opts):
2219 def pull(ui, repo, source="default", **opts):
2220 """pull changes from the specified source
2220 """pull changes from the specified source
2221
2221
2222 Pull changes from a remote repository to a local one.
2222 Pull changes from a remote repository to a local one.
2223
2223
2224 This finds all changes from the repository at the specified path
2224 This finds all changes from the repository at the specified path
2225 or URL and adds them to the local repository. By default, this
2225 or URL and adds them to the local repository. By default, this
2226 does not update the copy of the project in the working directory.
2226 does not update the copy of the project in the working directory.
2227
2227
2228 Valid URLs are of the form:
2228 Valid URLs are of the form:
2229
2229
2230 local/filesystem/path
2230 local/filesystem/path
2231 http://[user@]host[:port][/path]
2231 http://[user@]host[:port][/path]
2232 https://[user@]host[:port][/path]
2232 https://[user@]host[:port][/path]
2233 ssh://[user@]host[:port][/path]
2233 ssh://[user@]host[:port][/path]
2234
2234
2235 Some notes about using SSH with Mercurial:
2235 Some notes about using SSH with Mercurial:
2236 - SSH requires an accessible shell account on the destination machine
2236 - SSH requires an accessible shell account on the destination machine
2237 and a copy of hg in the remote path or specified with as remotecmd.
2237 and a copy of hg in the remote path or specified with as remotecmd.
2238 - /path is relative to the remote user's home directory by default.
2238 - /path is relative to the remote user's home directory by default.
2239 Use two slashes at the start of a path to specify an absolute path.
2239 Use two slashes at the start of a path to specify an absolute path.
2240 - Mercurial doesn't use its own compression via SSH; the right thing
2240 - Mercurial doesn't use its own compression via SSH; the right thing
2241 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2241 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2242 Host *.mylocalnetwork.example.com
2242 Host *.mylocalnetwork.example.com
2243 Compression off
2243 Compression off
2244 Host *
2244 Host *
2245 Compression on
2245 Compression on
2246 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2246 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2247 with the --ssh command line option.
2247 with the --ssh command line option.
2248 """
2248 """
2249 source = ui.expandpath(source)
2249 source = ui.expandpath(source)
2250 ui.status(_('pulling from %s\n') % (source))
2250 ui.status(_('pulling from %s\n') % (source))
2251
2251
2252 if opts['ssh']:
2252 if opts['ssh']:
2253 ui.setconfig("ui", "ssh", opts['ssh'])
2253 ui.setconfig("ui", "ssh", opts['ssh'])
2254 if opts['remotecmd']:
2254 if opts['remotecmd']:
2255 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2255 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2256
2256
2257 other = hg.repository(ui, source)
2257 other = hg.repository(ui, source)
2258 revs = None
2258 revs = None
2259 if opts['rev'] and not other.local():
2259 if opts['rev'] and not other.local():
2260 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2260 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2261 elif opts['rev']:
2261 elif opts['rev']:
2262 revs = [other.lookup(rev) for rev in opts['rev']]
2262 revs = [other.lookup(rev) for rev in opts['rev']]
2263 modheads = repo.pull(other, heads=revs, force=opts['force'])
2263 modheads = repo.pull(other, heads=revs, force=opts['force'])
2264 return postincoming(ui, repo, modheads, opts['update'])
2264 return postincoming(ui, repo, modheads, opts['update'])
2265
2265
2266 def push(ui, repo, dest="default-push", **opts):
2266 def push(ui, repo, dest="default-push", **opts):
2267 """push changes to the specified destination
2267 """push changes to the specified destination
2268
2268
2269 Push changes from the local repository to the given destination.
2269 Push changes from the local repository to the given destination.
2270
2270
2271 This is the symmetrical operation for pull. It helps to move
2271 This is the symmetrical operation for pull. It helps to move
2272 changes from the current repository to a different one. If the
2272 changes from the current repository to a different one. If the
2273 destination is local this is identical to a pull in that directory
2273 destination is local this is identical to a pull in that directory
2274 from the current one.
2274 from the current one.
2275
2275
2276 By default, push will refuse to run if it detects the result would
2276 By default, push will refuse to run if it detects the result would
2277 increase the number of remote heads. This generally indicates the
2277 increase the number of remote heads. This generally indicates the
2278 the client has forgotten to sync and merge before pushing.
2278 the client has forgotten to sync and merge before pushing.
2279
2279
2280 Valid URLs are of the form:
2280 Valid URLs are of the form:
2281
2281
2282 local/filesystem/path
2282 local/filesystem/path
2283 ssh://[user@]host[:port][/path]
2283 ssh://[user@]host[:port][/path]
2284
2284
2285 Look at the help text for the pull command for important details
2285 Look at the help text for the pull command for important details
2286 about ssh:// URLs.
2286 about ssh:// URLs.
2287 """
2287 """
2288 dest = ui.expandpath(dest)
2288 dest = ui.expandpath(dest)
2289 ui.status('pushing to %s\n' % (dest))
2289 ui.status('pushing to %s\n' % (dest))
2290
2290
2291 if opts['ssh']:
2291 if opts['ssh']:
2292 ui.setconfig("ui", "ssh", opts['ssh'])
2292 ui.setconfig("ui", "ssh", opts['ssh'])
2293 if opts['remotecmd']:
2293 if opts['remotecmd']:
2294 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2294 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
2295
2295
2296 other = hg.repository(ui, dest)
2296 other = hg.repository(ui, dest)
2297 revs = None
2297 revs = None
2298 if opts['rev']:
2298 if opts['rev']:
2299 revs = [repo.lookup(rev) for rev in opts['rev']]
2299 revs = [repo.lookup(rev) for rev in opts['rev']]
2300 r = repo.push(other, opts['force'], revs=revs)
2300 r = repo.push(other, opts['force'], revs=revs)
2301 return r == 0
2301 return r == 0
2302
2302
2303 def rawcommit(ui, repo, *flist, **rc):
2303 def rawcommit(ui, repo, *flist, **rc):
2304 """raw commit interface (DEPRECATED)
2304 """raw commit interface (DEPRECATED)
2305
2305
2306 (DEPRECATED)
2306 (DEPRECATED)
2307 Lowlevel commit, for use in helper scripts.
2307 Lowlevel commit, for use in helper scripts.
2308
2308
2309 This command is not intended to be used by normal users, as it is
2309 This command is not intended to be used by normal users, as it is
2310 primarily useful for importing from other SCMs.
2310 primarily useful for importing from other SCMs.
2311
2311
2312 This command is now deprecated and will be removed in a future
2312 This command is now deprecated and will be removed in a future
2313 release, please use debugsetparents and commit instead.
2313 release, please use debugsetparents and commit instead.
2314 """
2314 """
2315
2315
2316 ui.warn(_("(the rawcommit command is deprecated)\n"))
2316 ui.warn(_("(the rawcommit command is deprecated)\n"))
2317
2317
2318 message = rc['message']
2318 message = rc['message']
2319 if not message and rc['logfile']:
2319 if not message and rc['logfile']:
2320 try:
2320 try:
2321 message = open(rc['logfile']).read()
2321 message = open(rc['logfile']).read()
2322 except IOError:
2322 except IOError:
2323 pass
2323 pass
2324 if not message and not rc['logfile']:
2324 if not message and not rc['logfile']:
2325 raise util.Abort(_("missing commit message"))
2325 raise util.Abort(_("missing commit message"))
2326
2326
2327 files = relpath(repo, list(flist))
2327 files = relpath(repo, list(flist))
2328 if rc['files']:
2328 if rc['files']:
2329 files += open(rc['files']).read().splitlines()
2329 files += open(rc['files']).read().splitlines()
2330
2330
2331 rc['parent'] = map(repo.lookup, rc['parent'])
2331 rc['parent'] = map(repo.lookup, rc['parent'])
2332
2332
2333 try:
2333 try:
2334 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2334 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2335 except ValueError, inst:
2335 except ValueError, inst:
2336 raise util.Abort(str(inst))
2336 raise util.Abort(str(inst))
2337
2337
2338 def recover(ui, repo):
2338 def recover(ui, repo):
2339 """roll back an interrupted transaction
2339 """roll back an interrupted transaction
2340
2340
2341 Recover from an interrupted commit or pull.
2341 Recover from an interrupted commit or pull.
2342
2342
2343 This command tries to fix the repository status after an interrupted
2343 This command tries to fix the repository status after an interrupted
2344 operation. It should only be necessary when Mercurial suggests it.
2344 operation. It should only be necessary when Mercurial suggests it.
2345 """
2345 """
2346 if repo.recover():
2346 if repo.recover():
2347 return repo.verify()
2347 return repo.verify()
2348 return 1
2348 return 1
2349
2349
2350 def remove(ui, repo, pat, *pats, **opts):
2350 def remove(ui, repo, pat, *pats, **opts):
2351 """remove the specified files on the next commit
2351 """remove the specified files on the next commit
2352
2352
2353 Schedule the indicated files for removal from the repository.
2353 Schedule the indicated files for removal from the repository.
2354
2354
2355 This command schedules the files to be removed at the next commit.
2355 This command schedules the files to be removed at the next commit.
2356 This only removes files from the current branch, not from the
2356 This only removes files from the current branch, not from the
2357 entire project history. If the files still exist in the working
2357 entire project history. If the files still exist in the working
2358 directory, they will be deleted from it.
2358 directory, they will be deleted from it.
2359 """
2359 """
2360 names = []
2360 names = []
2361 def okaytoremove(abs, rel, exact):
2361 def okaytoremove(abs, rel, exact):
2362 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2362 modified, added, removed, deleted, unknown = repo.changes(files=[abs])
2363 reason = None
2363 reason = None
2364 if modified and not opts['force']:
2364 if modified and not opts['force']:
2365 reason = _('is modified')
2365 reason = _('is modified')
2366 elif added:
2366 elif added:
2367 reason = _('has been marked for add')
2367 reason = _('has been marked for add')
2368 elif unknown:
2368 elif unknown:
2369 reason = _('is not managed')
2369 reason = _('is not managed')
2370 if reason:
2370 if reason:
2371 if exact:
2371 if exact:
2372 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2372 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2373 else:
2373 else:
2374 return True
2374 return True
2375 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2375 for src, abs, rel, exact in walk(repo, (pat,) + pats, opts):
2376 if okaytoremove(abs, rel, exact):
2376 if okaytoremove(abs, rel, exact):
2377 if ui.verbose or not exact:
2377 if ui.verbose or not exact:
2378 ui.status(_('removing %s\n') % rel)
2378 ui.status(_('removing %s\n') % rel)
2379 names.append(abs)
2379 names.append(abs)
2380 repo.remove(names, unlink=True)
2380 repo.remove(names, unlink=True)
2381
2381
2382 def rename(ui, repo, *pats, **opts):
2382 def rename(ui, repo, *pats, **opts):
2383 """rename files; equivalent of copy + remove
2383 """rename files; equivalent of copy + remove
2384
2384
2385 Mark dest as copies of sources; mark sources for deletion. If
2385 Mark dest as copies of sources; mark sources for deletion. If
2386 dest is a directory, copies are put in that directory. If dest is
2386 dest is a directory, copies are put in that directory. If dest is
2387 a file, there can only be one source.
2387 a file, there can only be one source.
2388
2388
2389 By default, this command copies the contents of files as they
2389 By default, this command copies the contents of files as they
2390 stand in the working directory. If invoked with --after, the
2390 stand in the working directory. If invoked with --after, the
2391 operation is recorded, but no copying is performed.
2391 operation is recorded, but no copying is performed.
2392
2392
2393 This command takes effect in the next commit.
2393 This command takes effect in the next commit.
2394
2394
2395 NOTE: This command should be treated as experimental. While it
2395 NOTE: This command should be treated as experimental. While it
2396 should properly record rename files, this information is not yet
2396 should properly record rename files, this information is not yet
2397 fully used by merge, nor fully reported by log.
2397 fully used by merge, nor fully reported by log.
2398 """
2398 """
2399 wlock = repo.wlock(0)
2399 wlock = repo.wlock(0)
2400 errs, copied = docopy(ui, repo, pats, opts, wlock)
2400 errs, copied = docopy(ui, repo, pats, opts, wlock)
2401 names = []
2401 names = []
2402 for abs, rel, exact in copied:
2402 for abs, rel, exact in copied:
2403 if ui.verbose or not exact:
2403 if ui.verbose or not exact:
2404 ui.status(_('removing %s\n') % rel)
2404 ui.status(_('removing %s\n') % rel)
2405 names.append(abs)
2405 names.append(abs)
2406 repo.remove(names, True, wlock)
2406 repo.remove(names, True, wlock)
2407 return errs
2407 return errs
2408
2408
2409 def revert(ui, repo, *pats, **opts):
2409 def revert(ui, repo, *pats, **opts):
2410 """revert modified files or dirs back to their unmodified states
2410 """revert modified files or dirs back to their unmodified states
2411
2411
2412 In its default mode, it reverts any uncommitted modifications made
2412 In its default mode, it reverts any uncommitted modifications made
2413 to the named files or directories. This restores the contents of
2413 to the named files or directories. This restores the contents of
2414 the affected files to an unmodified state.
2414 the affected files to an unmodified state.
2415
2415
2416 Modified files are saved with a .orig suffix before reverting.
2416 Modified files are saved with a .orig suffix before reverting.
2417 To disable these backups, use --no-backup.
2417 To disable these backups, use --no-backup.
2418
2418
2419 Using the -r option, it reverts the given files or directories to
2419 Using the -r option, it reverts the given files or directories to
2420 their state as of an earlier revision. This can be helpful to "roll
2420 their state as of an earlier revision. This can be helpful to "roll
2421 back" some or all of a change that should not have been committed.
2421 back" some or all of a change that should not have been committed.
2422
2422
2423 Revert modifies the working directory. It does not commit any
2423 Revert modifies the working directory. It does not commit any
2424 changes, or change the parent of the current working directory.
2424 changes, or change the parent of the current working directory.
2425
2425
2426 If a file has been deleted, it is recreated. If the executable
2426 If a file has been deleted, it is recreated. If the executable
2427 mode of a file was changed, it is reset.
2427 mode of a file was changed, it is reset.
2428
2428
2429 If names are given, all files matching the names are reverted.
2429 If names are given, all files matching the names are reverted.
2430
2430
2431 If no arguments are given, all files in the repository are reverted.
2431 If no arguments are given, all files in the repository are reverted.
2432 """
2432 """
2433 parent = repo.dirstate.parents()[0]
2433 parent = repo.dirstate.parents()[0]
2434 node = opts['rev'] and repo.lookup(opts['rev']) or parent
2434 node = opts['rev'] and repo.lookup(opts['rev']) or parent
2435 mf = repo.manifest.read(repo.changelog.read(node)[0])
2435 mf = repo.manifest.read(repo.changelog.read(node)[0])
2436
2436
2437 wlock = repo.wlock()
2437 wlock = repo.wlock()
2438
2438
2439 # need all matching names in dirstate and manifest of target rev,
2439 # need all matching names in dirstate and manifest of target rev,
2440 # so have to walk both. do not print errors if files exist in one
2440 # so have to walk both. do not print errors if files exist in one
2441 # but not other.
2441 # but not other.
2442
2442
2443 names = {}
2443 names = {}
2444 target_only = {}
2444 target_only = {}
2445
2445
2446 # walk dirstate.
2446 # walk dirstate.
2447
2447
2448 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2448 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2449 names[abs] = (rel, exact)
2449 names[abs] = (rel, exact)
2450 if src == 'b':
2450 if src == 'b':
2451 target_only[abs] = True
2451 target_only[abs] = True
2452
2452
2453 # walk target manifest.
2453 # walk target manifest.
2454
2454
2455 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2455 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2456 badmatch=names.has_key):
2456 badmatch=names.has_key):
2457 if abs in names: continue
2457 if abs in names: continue
2458 names[abs] = (rel, exact)
2458 names[abs] = (rel, exact)
2459 target_only[abs] = True
2459 target_only[abs] = True
2460
2460
2461 changes = repo.changes(match=names.has_key, wlock=wlock)
2461 changes = repo.changes(match=names.has_key, wlock=wlock)
2462 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2462 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2463
2463
2464 revert = ([], _('reverting %s\n'))
2464 revert = ([], _('reverting %s\n'))
2465 add = ([], _('adding %s\n'))
2465 add = ([], _('adding %s\n'))
2466 remove = ([], _('removing %s\n'))
2466 remove = ([], _('removing %s\n'))
2467 forget = ([], _('forgetting %s\n'))
2467 forget = ([], _('forgetting %s\n'))
2468 undelete = ([], _('undeleting %s\n'))
2468 undelete = ([], _('undeleting %s\n'))
2469 update = {}
2469 update = {}
2470
2470
2471 disptable = (
2471 disptable = (
2472 # dispatch table:
2472 # dispatch table:
2473 # file state
2473 # file state
2474 # action if in target manifest
2474 # action if in target manifest
2475 # action if not in target manifest
2475 # action if not in target manifest
2476 # make backup if in target manifest
2476 # make backup if in target manifest
2477 # make backup if not in target manifest
2477 # make backup if not in target manifest
2478 (modified, revert, remove, True, True),
2478 (modified, revert, remove, True, True),
2479 (added, revert, forget, True, False),
2479 (added, revert, forget, True, False),
2480 (removed, undelete, None, False, False),
2480 (removed, undelete, None, False, False),
2481 (deleted, revert, remove, False, False),
2481 (deleted, revert, remove, False, False),
2482 (unknown, add, None, True, False),
2482 (unknown, add, None, True, False),
2483 (target_only, add, None, False, False),
2483 (target_only, add, None, False, False),
2484 )
2484 )
2485
2485
2486 entries = names.items()
2486 entries = names.items()
2487 entries.sort()
2487 entries.sort()
2488
2488
2489 for abs, (rel, exact) in entries:
2489 for abs, (rel, exact) in entries:
2490 in_mf = abs in mf
2490 in_mf = abs in mf
2491 def handle(xlist, dobackup):
2491 def handle(xlist, dobackup):
2492 xlist[0].append(abs)
2492 xlist[0].append(abs)
2493 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2493 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2494 bakname = "%s.orig" % rel
2494 bakname = "%s.orig" % rel
2495 ui.note(_('saving current version of %s as %s\n') %
2495 ui.note(_('saving current version of %s as %s\n') %
2496 (rel, bakname))
2496 (rel, bakname))
2497 shutil.copyfile(rel, bakname)
2497 shutil.copyfile(rel, bakname)
2498 shutil.copymode(rel, bakname)
2498 shutil.copymode(rel, bakname)
2499 if ui.verbose or not exact:
2499 if ui.verbose or not exact:
2500 ui.status(xlist[1] % rel)
2500 ui.status(xlist[1] % rel)
2501 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2501 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2502 if abs not in table: continue
2502 if abs not in table: continue
2503 # file has changed in dirstate
2503 # file has changed in dirstate
2504 if in_mf:
2504 if in_mf:
2505 handle(hitlist, backuphit)
2505 handle(hitlist, backuphit)
2506 elif misslist is not None:
2506 elif misslist is not None:
2507 handle(misslist, backupmiss)
2507 handle(misslist, backupmiss)
2508 else:
2508 else:
2509 if exact: ui.warn(_('file not managed: %s\n' % rel))
2509 if exact: ui.warn(_('file not managed: %s\n' % rel))
2510 break
2510 break
2511 else:
2511 else:
2512 # file has not changed in dirstate
2512 # file has not changed in dirstate
2513 if node == parent:
2513 if node == parent:
2514 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2514 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2515 continue
2515 continue
2516 if not in_mf:
2516 if not in_mf:
2517 handle(remove, False)
2517 handle(remove, False)
2518 update[abs] = True
2518 update[abs] = True
2519
2519
2520 repo.dirstate.forget(forget[0])
2520 repo.dirstate.forget(forget[0])
2521 r = repo.update(node, False, True, update.has_key, False, wlock=wlock)
2521 r = repo.update(node, False, True, update.has_key, False, wlock=wlock)
2522 repo.dirstate.update(add[0], 'a')
2522 repo.dirstate.update(add[0], 'a')
2523 repo.dirstate.update(undelete[0], 'n')
2523 repo.dirstate.update(undelete[0], 'n')
2524 repo.dirstate.update(remove[0], 'r')
2524 repo.dirstate.update(remove[0], 'r')
2525 return r
2525 return r
2526
2526
2527 def root(ui, repo):
2527 def root(ui, repo):
2528 """print the root (top) of the current working dir
2528 """print the root (top) of the current working dir
2529
2529
2530 Print the root directory of the current repository.
2530 Print the root directory of the current repository.
2531 """
2531 """
2532 ui.write(repo.root + "\n")
2532 ui.write(repo.root + "\n")
2533
2533
2534 def serve(ui, repo, **opts):
2534 def serve(ui, repo, **opts):
2535 """export the repository via HTTP
2535 """export the repository via HTTP
2536
2536
2537 Start a local HTTP repository browser and pull server.
2537 Start a local HTTP repository browser and pull server.
2538
2538
2539 By default, the server logs accesses to stdout and errors to
2539 By default, the server logs accesses to stdout and errors to
2540 stderr. Use the "-A" and "-E" options to log to files.
2540 stderr. Use the "-A" and "-E" options to log to files.
2541 """
2541 """
2542
2542
2543 if opts["stdio"]:
2543 if opts["stdio"]:
2544 if repo is None:
2544 if repo is None:
2545 raise hg.RepoError(_('no repo found'))
2545 raise hg.RepoError(_('no repo found'))
2546 fin, fout = sys.stdin, sys.stdout
2546 fin, fout = sys.stdin, sys.stdout
2547 sys.stdout = sys.stderr
2547 sys.stdout = sys.stderr
2548
2548
2549 # Prevent insertion/deletion of CRs
2549 # Prevent insertion/deletion of CRs
2550 util.set_binary(fin)
2550 util.set_binary(fin)
2551 util.set_binary(fout)
2551 util.set_binary(fout)
2552
2552
2553 def getarg():
2553 def getarg():
2554 argline = fin.readline()[:-1]
2554 argline = fin.readline()[:-1]
2555 arg, l = argline.split()
2555 arg, l = argline.split()
2556 val = fin.read(int(l))
2556 val = fin.read(int(l))
2557 return arg, val
2557 return arg, val
2558 def respond(v):
2558 def respond(v):
2559 fout.write("%d\n" % len(v))
2559 fout.write("%d\n" % len(v))
2560 fout.write(v)
2560 fout.write(v)
2561 fout.flush()
2561 fout.flush()
2562
2562
2563 lock = None
2563 lock = None
2564
2564
2565 while 1:
2565 while 1:
2566 cmd = fin.readline()[:-1]
2566 cmd = fin.readline()[:-1]
2567 if cmd == '':
2567 if cmd == '':
2568 return
2568 return
2569 if cmd == "heads":
2569 if cmd == "heads":
2570 h = repo.heads()
2570 h = repo.heads()
2571 respond(" ".join(map(hex, h)) + "\n")
2571 respond(" ".join(map(hex, h)) + "\n")
2572 if cmd == "lock":
2572 if cmd == "lock":
2573 lock = repo.lock()
2573 lock = repo.lock()
2574 respond("")
2574 respond("")
2575 if cmd == "unlock":
2575 if cmd == "unlock":
2576 if lock:
2576 if lock:
2577 lock.release()
2577 lock.release()
2578 lock = None
2578 lock = None
2579 respond("")
2579 respond("")
2580 elif cmd == "branches":
2580 elif cmd == "branches":
2581 arg, nodes = getarg()
2581 arg, nodes = getarg()
2582 nodes = map(bin, nodes.split(" "))
2582 nodes = map(bin, nodes.split(" "))
2583 r = []
2583 r = []
2584 for b in repo.branches(nodes):
2584 for b in repo.branches(nodes):
2585 r.append(" ".join(map(hex, b)) + "\n")
2585 r.append(" ".join(map(hex, b)) + "\n")
2586 respond("".join(r))
2586 respond("".join(r))
2587 elif cmd == "between":
2587 elif cmd == "between":
2588 arg, pairs = getarg()
2588 arg, pairs = getarg()
2589 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2589 pairs = [map(bin, p.split("-")) for p in pairs.split(" ")]
2590 r = []
2590 r = []
2591 for b in repo.between(pairs):
2591 for b in repo.between(pairs):
2592 r.append(" ".join(map(hex, b)) + "\n")
2592 r.append(" ".join(map(hex, b)) + "\n")
2593 respond("".join(r))
2593 respond("".join(r))
2594 elif cmd == "changegroup":
2594 elif cmd == "changegroup":
2595 nodes = []
2595 nodes = []
2596 arg, roots = getarg()
2596 arg, roots = getarg()
2597 nodes = map(bin, roots.split(" "))
2597 nodes = map(bin, roots.split(" "))
2598
2598
2599 cg = repo.changegroup(nodes, 'serve')
2599 cg = repo.changegroup(nodes, 'serve')
2600 while 1:
2600 while 1:
2601 d = cg.read(4096)
2601 d = cg.read(4096)
2602 if not d:
2602 if not d:
2603 break
2603 break
2604 fout.write(d)
2604 fout.write(d)
2605
2605
2606 fout.flush()
2606 fout.flush()
2607
2607
2608 elif cmd == "addchangegroup":
2608 elif cmd == "addchangegroup":
2609 if not lock:
2609 if not lock:
2610 respond("not locked")
2610 respond("not locked")
2611 continue
2611 continue
2612 respond("")
2612 respond("")
2613
2613
2614 r = repo.addchangegroup(fin)
2614 r = repo.addchangegroup(fin)
2615 respond(str(r))
2615 respond(str(r))
2616
2616
2617 optlist = ("name templates style address port ipv6"
2617 optlist = ("name templates style address port ipv6"
2618 " accesslog errorlog webdir_conf")
2618 " accesslog errorlog webdir_conf")
2619 for o in optlist.split():
2619 for o in optlist.split():
2620 if opts[o]:
2620 if opts[o]:
2621 ui.setconfig("web", o, opts[o])
2621 ui.setconfig("web", o, opts[o])
2622
2622
2623 if repo is None and not ui.config("web", "webdir_conf"):
2623 if repo is None and not ui.config("web", "webdir_conf"):
2624 raise hg.RepoError(_('no repo found'))
2624 raise hg.RepoError(_('no repo found'))
2625
2625
2626 if opts['daemon'] and not opts['daemon_pipefds']:
2626 if opts['daemon'] and not opts['daemon_pipefds']:
2627 rfd, wfd = os.pipe()
2627 rfd, wfd = os.pipe()
2628 args = sys.argv[:]
2628 args = sys.argv[:]
2629 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2629 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2630 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2630 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2631 args[0], args)
2631 args[0], args)
2632 os.close(wfd)
2632 os.close(wfd)
2633 os.read(rfd, 1)
2633 os.read(rfd, 1)
2634 os._exit(0)
2634 os._exit(0)
2635
2635
2636 try:
2636 try:
2637 httpd = hgweb.create_server(ui, repo)
2637 httpd = hgweb.create_server(ui, repo)
2638 except socket.error, inst:
2638 except socket.error, inst:
2639 raise util.Abort(_('cannot start server: ') + inst.args[1])
2639 raise util.Abort(_('cannot start server: ') + inst.args[1])
2640
2640
2641 if ui.verbose:
2641 if ui.verbose:
2642 addr, port = httpd.socket.getsockname()
2642 addr, port = httpd.socket.getsockname()
2643 if addr == '0.0.0.0':
2643 if addr == '0.0.0.0':
2644 addr = socket.gethostname()
2644 addr = socket.gethostname()
2645 else:
2645 else:
2646 try:
2646 try:
2647 addr = socket.gethostbyaddr(addr)[0]
2647 addr = socket.gethostbyaddr(addr)[0]
2648 except socket.error:
2648 except socket.error:
2649 pass
2649 pass
2650 if port != 80:
2650 if port != 80:
2651 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2651 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2652 else:
2652 else:
2653 ui.status(_('listening at http://%s/\n') % addr)
2653 ui.status(_('listening at http://%s/\n') % addr)
2654
2654
2655 if opts['pid_file']:
2655 if opts['pid_file']:
2656 fp = open(opts['pid_file'], 'w')
2656 fp = open(opts['pid_file'], 'w')
2657 fp.write(str(os.getpid()))
2657 fp.write(str(os.getpid()))
2658 fp.close()
2658 fp.close()
2659
2659
2660 if opts['daemon_pipefds']:
2660 if opts['daemon_pipefds']:
2661 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2661 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2662 os.close(rfd)
2662 os.close(rfd)
2663 os.write(wfd, 'y')
2663 os.write(wfd, 'y')
2664 os.close(wfd)
2664 os.close(wfd)
2665 sys.stdout.flush()
2665 sys.stdout.flush()
2666 sys.stderr.flush()
2666 sys.stderr.flush()
2667 fd = os.open(util.nulldev, os.O_RDWR)
2667 fd = os.open(util.nulldev, os.O_RDWR)
2668 if fd != 0: os.dup2(fd, 0)
2668 if fd != 0: os.dup2(fd, 0)
2669 if fd != 1: os.dup2(fd, 1)
2669 if fd != 1: os.dup2(fd, 1)
2670 if fd != 2: os.dup2(fd, 2)
2670 if fd != 2: os.dup2(fd, 2)
2671 if fd not in (0, 1, 2): os.close(fd)
2671 if fd not in (0, 1, 2): os.close(fd)
2672
2672
2673 httpd.serve_forever()
2673 httpd.serve_forever()
2674
2674
2675 def status(ui, repo, *pats, **opts):
2675 def status(ui, repo, *pats, **opts):
2676 """show changed files in the working directory
2676 """show changed files in the working directory
2677
2677
2678 Show changed files in the repository. If names are
2678 Show changed files in the repository. If names are
2679 given, only files that match are shown.
2679 given, only files that match are shown.
2680
2680
2681 The codes used to show the status of files are:
2681 The codes used to show the status of files are:
2682 M = modified
2682 M = modified
2683 A = added
2683 A = added
2684 R = removed
2684 R = removed
2685 ! = deleted, but still tracked
2685 ! = deleted, but still tracked
2686 ? = not tracked
2686 ? = not tracked
2687 I = ignored (not shown by default)
2687 I = ignored (not shown by default)
2688 """
2688 """
2689
2689
2690 show_ignored = opts['ignored'] and True or False
2690 show_ignored = opts['ignored'] and True or False
2691 files, matchfn, anypats = matchpats(repo, pats, opts)
2691 files, matchfn, anypats = matchpats(repo, pats, opts)
2692 cwd = (pats and repo.getcwd()) or ''
2692 cwd = (pats and repo.getcwd()) or ''
2693 modified, added, removed, deleted, unknown, ignored = [
2693 modified, added, removed, deleted, unknown, ignored = [
2694 [util.pathto(cwd, x) for x in n]
2694 [util.pathto(cwd, x) for x in n]
2695 for n in repo.changes(files=files, match=matchfn,
2695 for n in repo.changes(files=files, match=matchfn,
2696 show_ignored=show_ignored)]
2696 show_ignored=show_ignored)]
2697
2697
2698 changetypes = [('modified', 'M', modified),
2698 changetypes = [('modified', 'M', modified),
2699 ('added', 'A', added),
2699 ('added', 'A', added),
2700 ('removed', 'R', removed),
2700 ('removed', 'R', removed),
2701 ('deleted', '!', deleted),
2701 ('deleted', '!', deleted),
2702 ('unknown', '?', unknown),
2702 ('unknown', '?', unknown),
2703 ('ignored', 'I', ignored)]
2703 ('ignored', 'I', ignored)]
2704
2704
2705 end = opts['print0'] and '\0' or '\n'
2705 end = opts['print0'] and '\0' or '\n'
2706
2706
2707 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2707 for opt, char, changes in ([ct for ct in changetypes if opts[ct[0]]]
2708 or changetypes):
2708 or changetypes):
2709 if opts['no_status']:
2709 if opts['no_status']:
2710 format = "%%s%s" % end
2710 format = "%%s%s" % end
2711 else:
2711 else:
2712 format = "%s %%s%s" % (char, end)
2712 format = "%s %%s%s" % (char, end)
2713
2713
2714 for f in changes:
2714 for f in changes:
2715 ui.write(format % f)
2715 ui.write(format % f)
2716
2716
2717 def tag(ui, repo, name, rev_=None, **opts):
2717 def tag(ui, repo, name, rev_=None, **opts):
2718 """add a tag for the current tip or a given revision
2718 """add a tag for the current tip or a given revision
2719
2719
2720 Name a particular revision using <name>.
2720 Name a particular revision using <name>.
2721
2721
2722 Tags are used to name particular revisions of the repository and are
2722 Tags are used to name particular revisions of the repository and are
2723 very useful to compare different revision, to go back to significant
2723 very useful to compare different revision, to go back to significant
2724 earlier versions or to mark branch points as releases, etc.
2724 earlier versions or to mark branch points as releases, etc.
2725
2725
2726 If no revision is given, the tip is used.
2726 If no revision is given, the tip is used.
2727
2727
2728 To facilitate version control, distribution, and merging of tags,
2728 To facilitate version control, distribution, and merging of tags,
2729 they are stored as a file named ".hgtags" which is managed
2729 they are stored as a file named ".hgtags" which is managed
2730 similarly to other project files and can be hand-edited if
2730 similarly to other project files and can be hand-edited if
2731 necessary. The file '.hg/localtags' is used for local tags (not
2731 necessary. The file '.hg/localtags' is used for local tags (not
2732 shared among repositories).
2732 shared among repositories).
2733 """
2733 """
2734 if name == "tip":
2734 if name == "tip":
2735 raise util.Abort(_("the name 'tip' is reserved"))
2735 raise util.Abort(_("the name 'tip' is reserved"))
2736 if rev_ is not None:
2736 if rev_ is not None:
2737 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2737 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2738 "please use 'hg tag [-r REV] NAME' instead\n"))
2738 "please use 'hg tag [-r REV] NAME' instead\n"))
2739 if opts['rev']:
2739 if opts['rev']:
2740 raise util.Abort(_("use only one form to specify the revision"))
2740 raise util.Abort(_("use only one form to specify the revision"))
2741 if opts['rev']:
2741 if opts['rev']:
2742 rev_ = opts['rev']
2742 rev_ = opts['rev']
2743 if rev_:
2743 if rev_:
2744 r = hex(repo.lookup(rev_))
2744 r = hex(repo.lookup(rev_))
2745 else:
2745 else:
2746 r = hex(repo.changelog.tip())
2746 r = hex(repo.changelog.tip())
2747
2747
2748 disallowed = (revrangesep, '\r', '\n')
2748 disallowed = (revrangesep, '\r', '\n')
2749 for c in disallowed:
2749 for c in disallowed:
2750 if name.find(c) >= 0:
2750 if name.find(c) >= 0:
2751 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2751 raise util.Abort(_("%s cannot be used in a tag name") % repr(c))
2752
2752
2753 repo.hook('pretag', throw=True, node=r, tag=name,
2753 repo.hook('pretag', throw=True, node=r, tag=name,
2754 local=int(not not opts['local']))
2754 local=int(not not opts['local']))
2755
2755
2756 if opts['local']:
2756 if opts['local']:
2757 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2757 repo.opener("localtags", "a").write("%s %s\n" % (r, name))
2758 repo.hook('tag', node=r, tag=name, local=1)
2758 repo.hook('tag', node=r, tag=name, local=1)
2759 return
2759 return
2760
2760
2761 for x in repo.changes():
2761 for x in repo.changes():
2762 if ".hgtags" in x:
2762 if ".hgtags" in x:
2763 raise util.Abort(_("working copy of .hgtags is changed "
2763 raise util.Abort(_("working copy of .hgtags is changed "
2764 "(please commit .hgtags manually)"))
2764 "(please commit .hgtags manually)"))
2765
2765
2766 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2766 repo.wfile(".hgtags", "ab").write("%s %s\n" % (r, name))
2767 if repo.dirstate.state(".hgtags") == '?':
2767 if repo.dirstate.state(".hgtags") == '?':
2768 repo.add([".hgtags"])
2768 repo.add([".hgtags"])
2769
2769
2770 message = (opts['message'] or
2770 message = (opts['message'] or
2771 _("Added tag %s for changeset %s") % (name, r))
2771 _("Added tag %s for changeset %s") % (name, r))
2772 try:
2772 try:
2773 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2773 repo.commit([".hgtags"], message, opts['user'], opts['date'])
2774 repo.hook('tag', node=r, tag=name, local=0)
2774 repo.hook('tag', node=r, tag=name, local=0)
2775 except ValueError, inst:
2775 except ValueError, inst:
2776 raise util.Abort(str(inst))
2776 raise util.Abort(str(inst))
2777
2777
2778 def tags(ui, repo):
2778 def tags(ui, repo):
2779 """list repository tags
2779 """list repository tags
2780
2780
2781 List the repository tags.
2781 List the repository tags.
2782
2782
2783 This lists both regular and local tags.
2783 This lists both regular and local tags.
2784 """
2784 """
2785
2785
2786 l = repo.tagslist()
2786 l = repo.tagslist()
2787 l.reverse()
2787 l.reverse()
2788 for t, n in l:
2788 for t, n in l:
2789 try:
2789 try:
2790 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2790 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2791 except KeyError:
2791 except KeyError:
2792 r = " ?:?"
2792 r = " ?:?"
2793 if ui.quiet:
2793 if ui.quiet:
2794 ui.write("%s\n" % t)
2794 ui.write("%s\n" % t)
2795 else:
2795 else:
2796 ui.write("%-30s %s\n" % (t, r))
2796 ui.write("%-30s %s\n" % (t, r))
2797
2797
2798 def tip(ui, repo, **opts):
2798 def tip(ui, repo, **opts):
2799 """show the tip revision
2799 """show the tip revision
2800
2800
2801 Show the tip revision.
2801 Show the tip revision.
2802 """
2802 """
2803 n = repo.changelog.tip()
2803 n = repo.changelog.tip()
2804 br = None
2804 br = None
2805 if opts['branches']:
2805 if opts['branches']:
2806 br = repo.branchlookup([n])
2806 br = repo.branchlookup([n])
2807 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2807 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2808 if opts['patch']:
2808 if opts['patch']:
2809 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2809 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2810
2810
2811 def unbundle(ui, repo, fname, **opts):
2811 def unbundle(ui, repo, fname, **opts):
2812 """apply a changegroup file
2812 """apply a changegroup file
2813
2813
2814 Apply a compressed changegroup file generated by the bundle
2814 Apply a compressed changegroup file generated by the bundle
2815 command.
2815 command.
2816 """
2816 """
2817 f = urllib.urlopen(fname)
2817 f = urllib.urlopen(fname)
2818
2818
2819 header = f.read(6)
2819 header = f.read(6)
2820 if not header.startswith("HG"):
2820 if not header.startswith("HG"):
2821 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2821 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2822 elif not header.startswith("HG10"):
2822 elif not header.startswith("HG10"):
2823 raise util.Abort(_("%s: unknown bundle version") % fname)
2823 raise util.Abort(_("%s: unknown bundle version") % fname)
2824 elif header == "HG10BZ":
2824 elif header == "HG10BZ":
2825 def generator(f):
2825 def generator(f):
2826 zd = bz2.BZ2Decompressor()
2826 zd = bz2.BZ2Decompressor()
2827 zd.decompress("BZ")
2827 zd.decompress("BZ")
2828 for chunk in f:
2828 for chunk in f:
2829 yield zd.decompress(chunk)
2829 yield zd.decompress(chunk)
2830 elif header == "HG10UN":
2830 elif header == "HG10UN":
2831 def generator(f):
2831 def generator(f):
2832 for chunk in f:
2832 for chunk in f:
2833 yield chunk
2833 yield chunk
2834 else:
2834 else:
2835 raise util.Abort(_("%s: unknown bundle compression type")
2835 raise util.Abort(_("%s: unknown bundle compression type")
2836 % fname)
2836 % fname)
2837 gen = generator(util.filechunkiter(f, 4096))
2837 gen = generator(util.filechunkiter(f, 4096))
2838 modheads = repo.addchangegroup(util.chunkbuffer(gen))
2838 modheads = repo.addchangegroup(util.chunkbuffer(gen))
2839 return postincoming(ui, repo, modheads, opts['update'])
2839 return postincoming(ui, repo, modheads, opts['update'])
2840
2840
2841 def undo(ui, repo):
2841 def undo(ui, repo):
2842 """undo the last commit or pull
2842 """undo the last commit or pull
2843
2843
2844 Roll back the last pull or commit transaction on the
2844 Roll back the last pull or commit transaction on the
2845 repository, restoring the project to its earlier state.
2845 repository, restoring the project to its earlier state.
2846
2846
2847 This command should be used with care. There is only one level of
2847 This command should be used with care. There is only one level of
2848 undo and there is no redo.
2848 undo and there is no redo.
2849
2849
2850 This command is not intended for use on public repositories. Once
2850 This command is not intended for use on public repositories. Once
2851 a change is visible for pull by other users, undoing it locally is
2851 a change is visible for pull by other users, undoing it locally is
2852 ineffective. Furthemore a race is possible with readers of the
2852 ineffective. Furthemore a race is possible with readers of the
2853 repository, for example an ongoing pull from the repository will
2853 repository, for example an ongoing pull from the repository will
2854 fail and rollback.
2854 fail and rollback.
2855 """
2855 """
2856 repo.undo()
2856 repo.undo()
2857
2857
2858 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2858 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2859 branch=None, **opts):
2859 branch=None, **opts):
2860 """update or merge working directory
2860 """update or merge working directory
2861
2861
2862 Update the working directory to the specified revision.
2862 Update the working directory to the specified revision.
2863
2863
2864 If there are no outstanding changes in the working directory and
2864 If there are no outstanding changes in the working directory and
2865 there is a linear relationship between the current version and the
2865 there is a linear relationship between the current version and the
2866 requested version, the result is the requested version.
2866 requested version, the result is the requested version.
2867
2867
2868 Otherwise the result is a merge between the contents of the
2868 Otherwise the result is a merge between the contents of the
2869 current working directory and the requested version. Files that
2869 current working directory and the requested version. Files that
2870 changed between either parent are marked as changed for the next
2870 changed between either parent are marked as changed for the next
2871 commit and a commit must be performed before any further updates
2871 commit and a commit must be performed before any further updates
2872 are allowed.
2872 are allowed.
2873
2873
2874 By default, update will refuse to run if doing so would require
2874 By default, update will refuse to run if doing so would require
2875 merging or discarding local changes.
2875 merging or discarding local changes.
2876 """
2876 """
2877 if branch:
2877 if branch:
2878 br = repo.branchlookup(branch=branch)
2878 br = repo.branchlookup(branch=branch)
2879 found = []
2879 found = []
2880 for x in br:
2880 for x in br:
2881 if branch in br[x]:
2881 if branch in br[x]:
2882 found.append(x)
2882 found.append(x)
2883 if len(found) > 1:
2883 if len(found) > 1:
2884 ui.warn(_("Found multiple heads for %s\n") % branch)
2884 ui.warn(_("Found multiple heads for %s\n") % branch)
2885 for x in found:
2885 for x in found:
2886 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2886 show_changeset(ui, repo, opts).show(changenode=x, brinfo=br)
2887 return 1
2887 return 1
2888 if len(found) == 1:
2888 if len(found) == 1:
2889 node = found[0]
2889 node = found[0]
2890 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2890 ui.warn(_("Using head %s for branch %s\n") % (short(node), branch))
2891 else:
2891 else:
2892 ui.warn(_("branch %s not found\n") % (branch))
2892 ui.warn(_("branch %s not found\n") % (branch))
2893 return 1
2893 return 1
2894 else:
2894 else:
2895 node = node and repo.lookup(node) or repo.changelog.tip()
2895 node = node and repo.lookup(node) or repo.changelog.tip()
2896 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2896 return repo.update(node, allow=merge, force=clean, forcemerge=force)
2897
2897
2898 def verify(ui, repo):
2898 def verify(ui, repo):
2899 """verify the integrity of the repository
2899 """verify the integrity of the repository
2900
2900
2901 Verify the integrity of the current repository.
2901 Verify the integrity of the current repository.
2902
2902
2903 This will perform an extensive check of the repository's
2903 This will perform an extensive check of the repository's
2904 integrity, validating the hashes and checksums of each entry in
2904 integrity, validating the hashes and checksums of each entry in
2905 the changelog, manifest, and tracked files, as well as the
2905 the changelog, manifest, and tracked files, as well as the
2906 integrity of their crosslinks and indices.
2906 integrity of their crosslinks and indices.
2907 """
2907 """
2908 return repo.verify()
2908 return repo.verify()
2909
2909
2910 # Command options and aliases are listed here, alphabetically
2910 # Command options and aliases are listed here, alphabetically
2911
2911
2912 table = {
2912 table = {
2913 "^add":
2913 "^add":
2914 (add,
2914 (add,
2915 [('I', 'include', [], _('include names matching the given patterns')),
2915 [('I', 'include', [], _('include names matching the given patterns')),
2916 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2916 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2917 _('hg add [OPTION]... [FILE]...')),
2917 _('hg add [OPTION]... [FILE]...')),
2918 "addremove":
2918 "addremove":
2919 (addremove,
2919 (addremove,
2920 [('I', 'include', [], _('include names matching the given patterns')),
2920 [('I', 'include', [], _('include names matching the given patterns')),
2921 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2921 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2922 _('hg addremove [OPTION]... [FILE]...')),
2922 _('hg addremove [OPTION]... [FILE]...')),
2923 "^annotate":
2923 "^annotate":
2924 (annotate,
2924 (annotate,
2925 [('r', 'rev', '', _('annotate the specified revision')),
2925 [('r', 'rev', '', _('annotate the specified revision')),
2926 ('a', 'text', None, _('treat all files as text')),
2926 ('a', 'text', None, _('treat all files as text')),
2927 ('u', 'user', None, _('list the author')),
2927 ('u', 'user', None, _('list the author')),
2928 ('d', 'date', None, _('list the date')),
2928 ('d', 'date', None, _('list the date')),
2929 ('n', 'number', None, _('list the revision number (default)')),
2929 ('n', 'number', None, _('list the revision number (default)')),
2930 ('c', 'changeset', None, _('list the changeset')),
2930 ('c', 'changeset', None, _('list the changeset')),
2931 ('I', 'include', [], _('include names matching the given patterns')),
2931 ('I', 'include', [], _('include names matching the given patterns')),
2932 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2932 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2933 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2933 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2934 "archive":
2934 "archive":
2935 (archive,
2935 (archive,
2936 [('', 'no-decode', None, _('do not pass files through decoders')),
2936 [('', 'no-decode', None, _('do not pass files through decoders')),
2937 ('p', 'prefix', '', _('directory prefix for files in archive')),
2937 ('p', 'prefix', '', _('directory prefix for files in archive')),
2938 ('r', 'rev', '', _('revision to distribute')),
2938 ('r', 'rev', '', _('revision to distribute')),
2939 ('t', 'type', '', _('type of distribution to create')),
2939 ('t', 'type', '', _('type of distribution to create')),
2940 ('I', 'include', [], _('include names matching the given patterns')),
2940 ('I', 'include', [], _('include names matching the given patterns')),
2941 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2941 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2942 _('hg archive [OPTION]... DEST')),
2942 _('hg archive [OPTION]... DEST')),
2943 "backout":
2943 "backout":
2944 (backout,
2944 (backout,
2945 [('', 'merge', None,
2945 [('', 'merge', None,
2946 _('merge with old dirstate parent after backout')),
2946 _('merge with old dirstate parent after backout')),
2947 ('m', 'message', '', _('use <text> as commit message')),
2947 ('m', 'message', '', _('use <text> as commit message')),
2948 ('l', 'logfile', '', _('read commit message from <file>')),
2948 ('l', 'logfile', '', _('read commit message from <file>')),
2949 ('d', 'date', '', _('record datecode as commit date')),
2949 ('d', 'date', '', _('record datecode as commit date')),
2950 ('u', 'user', '', _('record user as committer')),
2950 ('u', 'user', '', _('record user as committer')),
2951 ('I', 'include', [], _('include names matching the given patterns')),
2951 ('I', 'include', [], _('include names matching the given patterns')),
2952 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2952 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2953 _('hg backout [OPTION]... REV')),
2953 _('hg backout [OPTION]... REV')),
2954 "bundle":
2954 "bundle":
2955 (bundle,
2955 (bundle,
2956 [('f', 'force', None,
2956 [('f', 'force', None,
2957 _('run even when remote repository is unrelated'))],
2957 _('run even when remote repository is unrelated'))],
2958 _('hg bundle FILE DEST')),
2958 _('hg bundle FILE DEST')),
2959 "cat":
2959 "cat":
2960 (cat,
2960 (cat,
2961 [('o', 'output', '', _('print output to file with formatted name')),
2961 [('o', 'output', '', _('print output to file with formatted name')),
2962 ('r', 'rev', '', _('print the given revision')),
2962 ('r', 'rev', '', _('print the given revision')),
2963 ('I', 'include', [], _('include names matching the given patterns')),
2963 ('I', 'include', [], _('include names matching the given patterns')),
2964 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2964 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2965 _('hg cat [OPTION]... FILE...')),
2965 _('hg cat [OPTION]... FILE...')),
2966 "^clone":
2966 "^clone":
2967 (clone,
2967 (clone,
2968 [('U', 'noupdate', None, _('do not update the new working directory')),
2968 [('U', 'noupdate', None, _('do not update the new working directory')),
2969 ('r', 'rev', [],
2969 ('r', 'rev', [],
2970 _('a changeset you would like to have after cloning')),
2970 _('a changeset you would like to have after cloning')),
2971 ('', 'pull', None, _('use pull protocol to copy metadata')),
2971 ('', 'pull', None, _('use pull protocol to copy metadata')),
2972 ('e', 'ssh', '', _('specify ssh command to use')),
2972 ('e', 'ssh', '', _('specify ssh command to use')),
2973 ('', 'remotecmd', '',
2973 ('', 'remotecmd', '',
2974 _('specify hg command to run on the remote side'))],
2974 _('specify hg command to run on the remote side'))],
2975 _('hg clone [OPTION]... SOURCE [DEST]')),
2975 _('hg clone [OPTION]... SOURCE [DEST]')),
2976 "^commit|ci":
2976 "^commit|ci":
2977 (commit,
2977 (commit,
2978 [('A', 'addremove', None, _('run addremove during commit')),
2978 [('A', 'addremove', None, _('run addremove during commit')),
2979 ('m', 'message', '', _('use <text> as commit message')),
2979 ('m', 'message', '', _('use <text> as commit message')),
2980 ('l', 'logfile', '', _('read the commit message from <file>')),
2980 ('l', 'logfile', '', _('read the commit message from <file>')),
2981 ('d', 'date', '', _('record datecode as commit date')),
2981 ('d', 'date', '', _('record datecode as commit date')),
2982 ('u', 'user', '', _('record user as commiter')),
2982 ('u', 'user', '', _('record user as commiter')),
2983 ('I', 'include', [], _('include names matching the given patterns')),
2983 ('I', 'include', [], _('include names matching the given patterns')),
2984 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2984 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 _('hg commit [OPTION]... [FILE]...')),
2985 _('hg commit [OPTION]... [FILE]...')),
2986 "copy|cp":
2986 "copy|cp":
2987 (copy,
2987 (copy,
2988 [('A', 'after', None, _('record a copy that has already occurred')),
2988 [('A', 'after', None, _('record a copy that has already occurred')),
2989 ('f', 'force', None,
2989 ('f', 'force', None,
2990 _('forcibly copy over an existing managed file')),
2990 _('forcibly copy over an existing managed file')),
2991 ('I', 'include', [], _('include names matching the given patterns')),
2991 ('I', 'include', [], _('include names matching the given patterns')),
2992 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2992 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2993 _('hg copy [OPTION]... [SOURCE]... DEST')),
2993 _('hg copy [OPTION]... [SOURCE]... DEST')),
2994 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2994 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2995 "debugcomplete":
2995 "debugcomplete":
2996 (debugcomplete,
2996 (debugcomplete,
2997 [('o', 'options', None, _('show the command options'))],
2997 [('o', 'options', None, _('show the command options'))],
2998 _('debugcomplete [-o] CMD')),
2998 _('debugcomplete [-o] CMD')),
2999 "debugrebuildstate":
2999 "debugrebuildstate":
3000 (debugrebuildstate,
3000 (debugrebuildstate,
3001 [('r', 'rev', '', _('revision to rebuild to'))],
3001 [('r', 'rev', '', _('revision to rebuild to'))],
3002 _('debugrebuildstate [-r REV] [REV]')),
3002 _('debugrebuildstate [-r REV] [REV]')),
3003 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3003 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3004 "debugconfig": (debugconfig, [], _('debugconfig')),
3004 "debugconfig": (debugconfig, [], _('debugconfig')),
3005 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3005 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3006 "debugstate": (debugstate, [], _('debugstate')),
3006 "debugstate": (debugstate, [], _('debugstate')),
3007 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3007 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3008 "debugindex": (debugindex, [], _('debugindex FILE')),
3008 "debugindex": (debugindex, [], _('debugindex FILE')),
3009 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3009 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3010 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3010 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3011 "debugwalk":
3011 "debugwalk":
3012 (debugwalk,
3012 (debugwalk,
3013 [('I', 'include', [], _('include names matching the given patterns')),
3013 [('I', 'include', [], _('include names matching the given patterns')),
3014 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3014 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3015 _('debugwalk [OPTION]... [FILE]...')),
3015 _('debugwalk [OPTION]... [FILE]...')),
3016 "^diff":
3016 "^diff":
3017 (diff,
3017 (diff,
3018 [('r', 'rev', [], _('revision')),
3018 [('r', 'rev', [], _('revision')),
3019 ('a', 'text', None, _('treat all files as text')),
3019 ('a', 'text', None, _('treat all files as text')),
3020 ('p', 'show-function', None,
3020 ('p', 'show-function', None,
3021 _('show which function each change is in')),
3021 _('show which function each change is in')),
3022 ('w', 'ignore-all-space', None,
3022 ('w', 'ignore-all-space', None,
3023 _('ignore white space when comparing lines')),
3023 _('ignore white space when comparing lines')),
3024 ('I', 'include', [], _('include names matching the given patterns')),
3024 ('I', 'include', [], _('include names matching the given patterns')),
3025 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3025 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3026 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3026 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3027 "^export":
3027 "^export":
3028 (export,
3028 (export,
3029 [('o', 'output', '', _('print output to file with formatted name')),
3029 [('o', 'output', '', _('print output to file with formatted name')),
3030 ('a', 'text', None, _('treat all files as text')),
3030 ('a', 'text', None, _('treat all files as text')),
3031 ('', 'switch-parent', None, _('diff against the second parent'))],
3031 ('', 'switch-parent', None, _('diff against the second parent'))],
3032 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3032 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3033 "debugforget|forget":
3033 "debugforget|forget":
3034 (forget,
3034 (forget,
3035 [('I', 'include', [], _('include names matching the given patterns')),
3035 [('I', 'include', [], _('include names matching the given patterns')),
3036 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3036 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3037 _('hg forget [OPTION]... FILE...')),
3037 _('hg forget [OPTION]... FILE...')),
3038 "grep":
3038 "grep":
3039 (grep,
3039 (grep,
3040 [('0', 'print0', None, _('end fields with NUL')),
3040 [('0', 'print0', None, _('end fields with NUL')),
3041 ('', 'all', None, _('print all revisions that match')),
3041 ('', 'all', None, _('print all revisions that match')),
3042 ('i', 'ignore-case', None, _('ignore case when matching')),
3042 ('i', 'ignore-case', None, _('ignore case when matching')),
3043 ('l', 'files-with-matches', None,
3043 ('l', 'files-with-matches', None,
3044 _('print only filenames and revs that match')),
3044 _('print only filenames and revs that match')),
3045 ('n', 'line-number', None, _('print matching line numbers')),
3045 ('n', 'line-number', None, _('print matching line numbers')),
3046 ('r', 'rev', [], _('search in given revision range')),
3046 ('r', 'rev', [], _('search in given revision range')),
3047 ('u', 'user', None, _('print user who committed change')),
3047 ('u', 'user', None, _('print user who committed change')),
3048 ('I', 'include', [], _('include names matching the given patterns')),
3048 ('I', 'include', [], _('include names matching the given patterns')),
3049 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3049 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3050 _('hg grep [OPTION]... PATTERN [FILE]...')),
3050 _('hg grep [OPTION]... PATTERN [FILE]...')),
3051 "heads":
3051 "heads":
3052 (heads,
3052 (heads,
3053 [('b', 'branches', None, _('show branches')),
3053 [('b', 'branches', None, _('show branches')),
3054 ('', 'style', '', _('display using template map file')),
3054 ('', 'style', '', _('display using template map file')),
3055 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3055 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3056 ('', 'template', '', _('display with template'))],
3056 ('', 'template', '', _('display with template'))],
3057 _('hg heads [-b] [-r <rev>]')),
3057 _('hg heads [-b] [-r <rev>]')),
3058 "help": (help_, [], _('hg help [COMMAND]')),
3058 "help": (help_, [], _('hg help [COMMAND]')),
3059 "identify|id": (identify, [], _('hg identify')),
3059 "identify|id": (identify, [], _('hg identify')),
3060 "import|patch":
3060 "import|patch":
3061 (import_,
3061 (import_,
3062 [('p', 'strip', 1,
3062 [('p', 'strip', 1,
3063 _('directory strip option for patch. This has the same\n'
3063 _('directory strip option for patch. This has the same\n'
3064 'meaning as the corresponding patch option')),
3064 'meaning as the corresponding patch option')),
3065 ('b', 'base', '', _('base path')),
3065 ('b', 'base', '', _('base path')),
3066 ('f', 'force', None,
3066 ('f', 'force', None,
3067 _('skip check for outstanding uncommitted changes'))],
3067 _('skip check for outstanding uncommitted changes'))],
3068 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
3068 _('hg import [-p NUM] [-b BASE] [-f] PATCH...')),
3069 "incoming|in": (incoming,
3069 "incoming|in": (incoming,
3070 [('M', 'no-merges', None, _('do not show merges')),
3070 [('M', 'no-merges', None, _('do not show merges')),
3071 ('f', 'force', None,
3071 ('f', 'force', None,
3072 _('run even when remote repository is unrelated')),
3072 _('run even when remote repository is unrelated')),
3073 ('', 'style', '', _('display using template map file')),
3073 ('', 'style', '', _('display using template map file')),
3074 ('n', 'newest-first', None, _('show newest record first')),
3074 ('n', 'newest-first', None, _('show newest record first')),
3075 ('', 'bundle', '', _('file to store the bundles into')),
3075 ('', 'bundle', '', _('file to store the bundles into')),
3076 ('p', 'patch', None, _('show patch')),
3076 ('p', 'patch', None, _('show patch')),
3077 ('', 'template', '', _('display with template')),
3077 ('', 'template', '', _('display with template')),
3078 ('e', 'ssh', '', _('specify ssh command to use')),
3078 ('e', 'ssh', '', _('specify ssh command to use')),
3079 ('', 'remotecmd', '',
3079 ('', 'remotecmd', '',
3080 _('specify hg command to run on the remote side'))],
3080 _('specify hg command to run on the remote side'))],
3081 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
3081 _('hg incoming [-p] [-n] [-M] [--bundle FILENAME] [SOURCE]')),
3082 "^init": (init, [], _('hg init [DEST]')),
3082 "^init": (init, [], _('hg init [DEST]')),
3083 "locate":
3083 "locate":
3084 (locate,
3084 (locate,
3085 [('r', 'rev', '', _('search the repository as it stood at rev')),
3085 [('r', 'rev', '', _('search the repository as it stood at rev')),
3086 ('0', 'print0', None,
3086 ('0', 'print0', None,
3087 _('end filenames with NUL, for use with xargs')),
3087 _('end filenames with NUL, for use with xargs')),
3088 ('f', 'fullpath', None,
3088 ('f', 'fullpath', None,
3089 _('print complete paths from the filesystem root')),
3089 _('print complete paths from the filesystem root')),
3090 ('I', 'include', [], _('include names matching the given patterns')),
3090 ('I', 'include', [], _('include names matching the given patterns')),
3091 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3091 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3092 _('hg locate [OPTION]... [PATTERN]...')),
3092 _('hg locate [OPTION]... [PATTERN]...')),
3093 "^log|history":
3093 "^log|history":
3094 (log,
3094 (log,
3095 [('b', 'branches', None, _('show branches')),
3095 [('b', 'branches', None, _('show branches')),
3096 ('k', 'keyword', [], _('search for a keyword')),
3096 ('k', 'keyword', [], _('search for a keyword')),
3097 ('l', 'limit', '', _('limit number of changes displayed')),
3097 ('l', 'limit', '', _('limit number of changes displayed')),
3098 ('r', 'rev', [], _('show the specified revision or range')),
3098 ('r', 'rev', [], _('show the specified revision or range')),
3099 ('M', 'no-merges', None, _('do not show merges')),
3099 ('M', 'no-merges', None, _('do not show merges')),
3100 ('', 'style', '', _('display using template map file')),
3100 ('', 'style', '', _('display using template map file')),
3101 ('m', 'only-merges', None, _('show only merges')),
3101 ('m', 'only-merges', None, _('show only merges')),
3102 ('p', 'patch', None, _('show patch')),
3102 ('p', 'patch', None, _('show patch')),
3103 ('', 'template', '', _('display with template')),
3103 ('', 'template', '', _('display with template')),
3104 ('I', 'include', [], _('include names matching the given patterns')),
3104 ('I', 'include', [], _('include names matching the given patterns')),
3105 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3105 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3106 _('hg log [OPTION]... [FILE]')),
3106 _('hg log [OPTION]... [FILE]')),
3107 "manifest": (manifest, [], _('hg manifest [REV]')),
3107 "manifest": (manifest, [], _('hg manifest [REV]')),
3108 "merge":
3108 "merge":
3109 (merge,
3109 (merge,
3110 [('b', 'branch', '', _('merge with head of a specific branch')),
3110 [('b', 'branch', '', _('merge with head of a specific branch')),
3111 ('f', 'force', None, _('force a merge with outstanding changes'))],
3111 ('f', 'force', None, _('force a merge with outstanding changes'))],
3112 _('hg merge [-b TAG] [-f] [REV]')),
3112 _('hg merge [-b TAG] [-f] [REV]')),
3113 "outgoing|out": (outgoing,
3113 "outgoing|out": (outgoing,
3114 [('M', 'no-merges', None, _('do not show merges')),
3114 [('M', 'no-merges', None, _('do not show merges')),
3115 ('f', 'force', None,
3115 ('f', 'force', None,
3116 _('run even when remote repository is unrelated')),
3116 _('run even when remote repository is unrelated')),
3117 ('p', 'patch', None, _('show patch')),
3117 ('p', 'patch', None, _('show patch')),
3118 ('', 'style', '', _('display using template map file')),
3118 ('', 'style', '', _('display using template map file')),
3119 ('n', 'newest-first', None, _('show newest record first')),
3119 ('n', 'newest-first', None, _('show newest record first')),
3120 ('', 'template', '', _('display with template')),
3120 ('', 'template', '', _('display with template')),
3121 ('e', 'ssh', '', _('specify ssh command to use')),
3121 ('e', 'ssh', '', _('specify ssh command to use')),
3122 ('', 'remotecmd', '',
3122 ('', 'remotecmd', '',
3123 _('specify hg command to run on the remote side'))],
3123 _('specify hg command to run on the remote side'))],
3124 _('hg outgoing [-M] [-p] [-n] [DEST]')),
3124 _('hg outgoing [-M] [-p] [-n] [DEST]')),
3125 "^parents":
3125 "^parents":
3126 (parents,
3126 (parents,
3127 [('b', 'branches', None, _('show branches')),
3127 [('b', 'branches', None, _('show branches')),
3128 ('', 'style', '', _('display using template map file')),
3128 ('', 'style', '', _('display using template map file')),
3129 ('', 'template', '', _('display with template'))],
3129 ('', 'template', '', _('display with template'))],
3130 _('hg parents [-b] [REV]')),
3130 _('hg parents [-b] [REV]')),
3131 "paths": (paths, [], _('hg paths [NAME]')),
3131 "paths": (paths, [], _('hg paths [NAME]')),
3132 "^pull":
3132 "^pull":
3133 (pull,
3133 (pull,
3134 [('u', 'update', None,
3134 [('u', 'update', None,
3135 _('update the working directory to tip after pull')),
3135 _('update the working directory to tip after pull')),
3136 ('e', 'ssh', '', _('specify ssh command to use')),
3136 ('e', 'ssh', '', _('specify ssh command to use')),
3137 ('f', 'force', None,
3137 ('f', 'force', None,
3138 _('run even when remote repository is unrelated')),
3138 _('run even when remote repository is unrelated')),
3139 ('r', 'rev', [], _('a specific revision you would like to pull')),
3139 ('r', 'rev', [], _('a specific revision you would like to pull')),
3140 ('', 'remotecmd', '',
3140 ('', 'remotecmd', '',
3141 _('specify hg command to run on the remote side'))],
3141 _('specify hg command to run on the remote side'))],
3142 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3142 _('hg pull [-u] [-e FILE] [-r REV]... [--remotecmd FILE] [SOURCE]')),
3143 "^push":
3143 "^push":
3144 (push,
3144 (push,
3145 [('f', 'force', None, _('force push')),
3145 [('f', 'force', None, _('force push')),
3146 ('e', 'ssh', '', _('specify ssh command to use')),
3146 ('e', 'ssh', '', _('specify ssh command to use')),
3147 ('r', 'rev', [], _('a specific revision you would like to push')),
3147 ('r', 'rev', [], _('a specific revision you would like to push')),
3148 ('', 'remotecmd', '',
3148 ('', 'remotecmd', '',
3149 _('specify hg command to run on the remote side'))],
3149 _('specify hg command to run on the remote side'))],
3150 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3150 _('hg push [-f] [-e FILE] [-r REV]... [--remotecmd FILE] [DEST]')),
3151 "debugrawcommit|rawcommit":
3151 "debugrawcommit|rawcommit":
3152 (rawcommit,
3152 (rawcommit,
3153 [('p', 'parent', [], _('parent')),
3153 [('p', 'parent', [], _('parent')),
3154 ('d', 'date', '', _('date code')),
3154 ('d', 'date', '', _('date code')),
3155 ('u', 'user', '', _('user')),
3155 ('u', 'user', '', _('user')),
3156 ('F', 'files', '', _('file list')),
3156 ('F', 'files', '', _('file list')),
3157 ('m', 'message', '', _('commit message')),
3157 ('m', 'message', '', _('commit message')),
3158 ('l', 'logfile', '', _('commit message file'))],
3158 ('l', 'logfile', '', _('commit message file'))],
3159 _('hg debugrawcommit [OPTION]... [FILE]...')),
3159 _('hg debugrawcommit [OPTION]... [FILE]...')),
3160 "recover": (recover, [], _('hg recover')),
3160 "recover": (recover, [], _('hg recover')),
3161 "^remove|rm":
3161 "^remove|rm":
3162 (remove,
3162 (remove,
3163 [('f', 'force', None, _('remove file even if modified')),
3163 [('f', 'force', None, _('remove file even if modified')),
3164 ('I', 'include', [], _('include names matching the given patterns')),
3164 ('I', 'include', [], _('include names matching the given patterns')),
3165 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3165 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3166 _('hg remove [OPTION]... FILE...')),
3166 _('hg remove [OPTION]... FILE...')),
3167 "rename|mv":
3167 "rename|mv":
3168 (rename,
3168 (rename,
3169 [('A', 'after', None, _('record a rename that has already occurred')),
3169 [('A', 'after', None, _('record a rename that has already occurred')),
3170 ('f', 'force', None,
3170 ('f', 'force', None,
3171 _('forcibly copy over an existing managed file')),
3171 _('forcibly copy over an existing managed file')),
3172 ('I', 'include', [], _('include names matching the given patterns')),
3172 ('I', 'include', [], _('include names matching the given patterns')),
3173 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3173 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3174 _('hg rename [OPTION]... SOURCE... DEST')),
3174 _('hg rename [OPTION]... SOURCE... DEST')),
3175 "^revert":
3175 "^revert":
3176 (revert,
3176 (revert,
3177 [('r', 'rev', '', _('revision to revert to')),
3177 [('r', 'rev', '', _('revision to revert to')),
3178 ('', 'no-backup', None, _('do not save backup copies of files')),
3178 ('', 'no-backup', None, _('do not save backup copies of files')),
3179 ('I', 'include', [], _('include names matching given patterns')),
3179 ('I', 'include', [], _('include names matching given patterns')),
3180 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3180 ('X', 'exclude', [], _('exclude names matching given patterns'))],
3181 _('hg revert [-r REV] [NAME]...')),
3181 _('hg revert [-r REV] [NAME]...')),
3182 "root": (root, [], _('hg root')),
3182 "root": (root, [], _('hg root')),
3183 "^serve":
3183 "^serve":
3184 (serve,
3184 (serve,
3185 [('A', 'accesslog', '', _('name of access log file to write to')),
3185 [('A', 'accesslog', '', _('name of access log file to write to')),
3186 ('d', 'daemon', None, _('run server in background')),
3186 ('d', 'daemon', None, _('run server in background')),
3187 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3187 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3188 ('E', 'errorlog', '', _('name of error log file to write to')),
3188 ('E', 'errorlog', '', _('name of error log file to write to')),
3189 ('p', 'port', 0, _('port to use (default: 8000)')),
3189 ('p', 'port', 0, _('port to use (default: 8000)')),
3190 ('a', 'address', '', _('address to use')),
3190 ('a', 'address', '', _('address to use')),
3191 ('n', 'name', '',
3191 ('n', 'name', '',
3192 _('name to show in web pages (default: working dir)')),
3192 _('name to show in web pages (default: working dir)')),
3193 ('', 'webdir-conf', '', _('name of the webdir config file'
3193 ('', 'webdir-conf', '', _('name of the webdir config file'
3194 ' (serve more than one repo)')),
3194 ' (serve more than one repo)')),
3195 ('', 'pid-file', '', _('name of file to write process ID to')),
3195 ('', 'pid-file', '', _('name of file to write process ID to')),
3196 ('', 'stdio', None, _('for remote clients')),
3196 ('', 'stdio', None, _('for remote clients')),
3197 ('t', 'templates', '', _('web templates to use')),
3197 ('t', 'templates', '', _('web templates to use')),
3198 ('', 'style', '', _('template style to use')),
3198 ('', 'style', '', _('template style to use')),
3199 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3199 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3200 _('hg serve [OPTION]...')),
3200 _('hg serve [OPTION]...')),
3201 "^status|st":
3201 "^status|st":
3202 (status,
3202 (status,
3203 [('m', 'modified', None, _('show only modified files')),
3203 [('m', 'modified', None, _('show only modified files')),
3204 ('a', 'added', None, _('show only added files')),
3204 ('a', 'added', None, _('show only added files')),
3205 ('r', 'removed', None, _('show only removed files')),
3205 ('r', 'removed', None, _('show only removed files')),
3206 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3206 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3207 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3207 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3208 ('i', 'ignored', None, _('show ignored files')),
3208 ('i', 'ignored', None, _('show ignored files')),
3209 ('n', 'no-status', None, _('hide status prefix')),
3209 ('n', 'no-status', None, _('hide status prefix')),
3210 ('0', 'print0', None,
3210 ('0', 'print0', None,
3211 _('end filenames with NUL, for use with xargs')),
3211 _('end filenames with NUL, for use with xargs')),
3212 ('I', 'include', [], _('include names matching the given patterns')),
3212 ('I', 'include', [], _('include names matching the given patterns')),
3213 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3213 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3214 _('hg status [OPTION]... [FILE]...')),
3214 _('hg status [OPTION]... [FILE]...')),
3215 "tag":
3215 "tag":
3216 (tag,
3216 (tag,
3217 [('l', 'local', None, _('make the tag local')),
3217 [('l', 'local', None, _('make the tag local')),
3218 ('m', 'message', '', _('message for tag commit log entry')),
3218 ('m', 'message', '', _('message for tag commit log entry')),
3219 ('d', 'date', '', _('record datecode as commit date')),
3219 ('d', 'date', '', _('record datecode as commit date')),
3220 ('u', 'user', '', _('record user as commiter')),
3220 ('u', 'user', '', _('record user as commiter')),
3221 ('r', 'rev', '', _('revision to tag'))],
3221 ('r', 'rev', '', _('revision to tag'))],
3222 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3222 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3223 "tags": (tags, [], _('hg tags')),
3223 "tags": (tags, [], _('hg tags')),
3224 "tip":
3224 "tip":
3225 (tip,
3225 (tip,
3226 [('b', 'branches', None, _('show branches')),
3226 [('b', 'branches', None, _('show branches')),
3227 ('', 'style', '', _('display using template map file')),
3227 ('', 'style', '', _('display using template map file')),
3228 ('p', 'patch', None, _('show patch')),
3228 ('p', 'patch', None, _('show patch')),
3229 ('', 'template', '', _('display with template'))],
3229 ('', 'template', '', _('display with template'))],
3230 _('hg tip [-b] [-p]')),
3230 _('hg tip [-b] [-p]')),
3231 "unbundle":
3231 "unbundle":
3232 (unbundle,
3232 (unbundle,
3233 [('u', 'update', None,
3233 [('u', 'update', None,
3234 _('update the working directory to tip after unbundle'))],
3234 _('update the working directory to tip after unbundle'))],
3235 _('hg unbundle [-u] FILE')),
3235 _('hg unbundle [-u] FILE')),
3236 "undo": (undo, [], _('hg undo')),
3236 "undo": (undo, [], _('hg undo')),
3237 "^update|up|checkout|co":
3237 "^update|up|checkout|co":
3238 (update,
3238 (update,
3239 [('b', 'branch', '', _('checkout the head of a specific branch')),
3239 [('b', 'branch', '', _('checkout the head of a specific branch')),
3240 ('m', 'merge', None, _('allow merging of branches')),
3240 ('m', 'merge', None, _('allow merging of branches')),
3241 ('C', 'clean', None, _('overwrite locally modified files')),
3241 ('C', 'clean', None, _('overwrite locally modified files')),
3242 ('f', 'force', None, _('force a merge with outstanding changes'))],
3242 ('f', 'force', None, _('force a merge with outstanding changes'))],
3243 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3243 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3244 "verify": (verify, [], _('hg verify')),
3244 "verify": (verify, [], _('hg verify')),
3245 "version": (show_version, [], _('hg version')),
3245 "version": (show_version, [], _('hg version')),
3246 }
3246 }
3247
3247
3248 globalopts = [
3248 globalopts = [
3249 ('R', 'repository', '',
3249 ('R', 'repository', '',
3250 _('repository root directory or symbolic path name')),
3250 _('repository root directory or symbolic path name')),
3251 ('', 'cwd', '', _('change working directory')),
3251 ('', 'cwd', '', _('change working directory')),
3252 ('y', 'noninteractive', None,
3252 ('y', 'noninteractive', None,
3253 _('do not prompt, assume \'yes\' for any required answers')),
3253 _('do not prompt, assume \'yes\' for any required answers')),
3254 ('q', 'quiet', None, _('suppress output')),
3254 ('q', 'quiet', None, _('suppress output')),
3255 ('v', 'verbose', None, _('enable additional output')),
3255 ('v', 'verbose', None, _('enable additional output')),
3256 ('', 'debug', None, _('enable debugging output')),
3256 ('', 'debug', None, _('enable debugging output')),
3257 ('', 'debugger', None, _('start debugger')),
3257 ('', 'debugger', None, _('start debugger')),
3258 ('', 'traceback', None, _('print traceback on exception')),
3258 ('', 'traceback', None, _('print traceback on exception')),
3259 ('', 'time', None, _('time how long the command takes')),
3259 ('', 'time', None, _('time how long the command takes')),
3260 ('', 'profile', None, _('print command execution profile')),
3260 ('', 'profile', None, _('print command execution profile')),
3261 ('', 'version', None, _('output version information and exit')),
3261 ('', 'version', None, _('output version information and exit')),
3262 ('h', 'help', None, _('display help and exit')),
3262 ('h', 'help', None, _('display help and exit')),
3263 ]
3263 ]
3264
3264
3265 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3265 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3266 " debugindex debugindexdot")
3266 " debugindex debugindexdot")
3267 optionalrepo = ("paths serve debugconfig")
3267 optionalrepo = ("paths serve debugconfig")
3268
3268
3269 def findpossible(cmd):
3269 def findpossible(cmd):
3270 """
3270 """
3271 Return cmd -> (aliases, command table entry)
3271 Return cmd -> (aliases, command table entry)
3272 for each matching command.
3272 for each matching command.
3273 Return debug commands (or their aliases) only if no normal command matches.
3273 Return debug commands (or their aliases) only if no normal command matches.
3274 """
3274 """
3275 choice = {}
3275 choice = {}
3276 debugchoice = {}
3276 debugchoice = {}
3277 for e in table.keys():
3277 for e in table.keys():
3278 aliases = e.lstrip("^").split("|")
3278 aliases = e.lstrip("^").split("|")
3279 found = None
3279 found = None
3280 if cmd in aliases:
3280 if cmd in aliases:
3281 found = cmd
3281 found = cmd
3282 else:
3282 else:
3283 for a in aliases:
3283 for a in aliases:
3284 if a.startswith(cmd):
3284 if a.startswith(cmd):
3285 found = a
3285 found = a
3286 break
3286 break
3287 if found is not None:
3287 if found is not None:
3288 if aliases[0].startswith("debug"):
3288 if aliases[0].startswith("debug"):
3289 debugchoice[found] = (aliases, table[e])
3289 debugchoice[found] = (aliases, table[e])
3290 else:
3290 else:
3291 choice[found] = (aliases, table[e])
3291 choice[found] = (aliases, table[e])
3292
3292
3293 if not choice and debugchoice:
3293 if not choice and debugchoice:
3294 choice = debugchoice
3294 choice = debugchoice
3295
3295
3296 return choice
3296 return choice
3297
3297
3298 def find(cmd):
3298 def find(cmd):
3299 """Return (aliases, command table entry) for command string."""
3299 """Return (aliases, command table entry) for command string."""
3300 choice = findpossible(cmd)
3300 choice = findpossible(cmd)
3301
3301
3302 if choice.has_key(cmd):
3302 if choice.has_key(cmd):
3303 return choice[cmd]
3303 return choice[cmd]
3304
3304
3305 if len(choice) > 1:
3305 if len(choice) > 1:
3306 clist = choice.keys()
3306 clist = choice.keys()
3307 clist.sort()
3307 clist.sort()
3308 raise AmbiguousCommand(cmd, clist)
3308 raise AmbiguousCommand(cmd, clist)
3309
3309
3310 if choice:
3310 if choice:
3311 return choice.values()[0]
3311 return choice.values()[0]
3312
3312
3313 raise UnknownCommand(cmd)
3313 raise UnknownCommand(cmd)
3314
3314
3315 def catchterm(*args):
3315 def catchterm(*args):
3316 raise util.SignalInterrupt
3316 raise util.SignalInterrupt
3317
3317
3318 def run():
3318 def run():
3319 sys.exit(dispatch(sys.argv[1:]))
3319 sys.exit(dispatch(sys.argv[1:]))
3320
3320
3321 class ParseError(Exception):
3321 class ParseError(Exception):
3322 """Exception raised on errors in parsing the command line."""
3322 """Exception raised on errors in parsing the command line."""
3323
3323
3324 def parse(ui, args):
3324 def parse(ui, args):
3325 options = {}
3325 options = {}
3326 cmdoptions = {}
3326 cmdoptions = {}
3327
3327
3328 try:
3328 try:
3329 args = fancyopts.fancyopts(args, globalopts, options)
3329 args = fancyopts.fancyopts(args, globalopts, options)
3330 except fancyopts.getopt.GetoptError, inst:
3330 except fancyopts.getopt.GetoptError, inst:
3331 raise ParseError(None, inst)
3331 raise ParseError(None, inst)
3332
3332
3333 if args:
3333 if args:
3334 cmd, args = args[0], args[1:]
3334 cmd, args = args[0], args[1:]
3335 aliases, i = find(cmd)
3335 aliases, i = find(cmd)
3336 cmd = aliases[0]
3336 cmd = aliases[0]
3337 defaults = ui.config("defaults", cmd)
3337 defaults = ui.config("defaults", cmd)
3338 if defaults:
3338 if defaults:
3339 args = defaults.split() + args
3339 args = defaults.split() + args
3340 c = list(i[1])
3340 c = list(i[1])
3341 else:
3341 else:
3342 cmd = None
3342 cmd = None
3343 c = []
3343 c = []
3344
3344
3345 # combine global options into local
3345 # combine global options into local
3346 for o in globalopts:
3346 for o in globalopts:
3347 c.append((o[0], o[1], options[o[1]], o[3]))
3347 c.append((o[0], o[1], options[o[1]], o[3]))
3348
3348
3349 try:
3349 try:
3350 args = fancyopts.fancyopts(args, c, cmdoptions)
3350 args = fancyopts.fancyopts(args, c, cmdoptions)
3351 except fancyopts.getopt.GetoptError, inst:
3351 except fancyopts.getopt.GetoptError, inst:
3352 raise ParseError(cmd, inst)
3352 raise ParseError(cmd, inst)
3353
3353
3354 # separate global options back out
3354 # separate global options back out
3355 for o in globalopts:
3355 for o in globalopts:
3356 n = o[1]
3356 n = o[1]
3357 options[n] = cmdoptions[n]
3357 options[n] = cmdoptions[n]
3358 del cmdoptions[n]
3358 del cmdoptions[n]
3359
3359
3360 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3360 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3361
3361
3362 def dispatch(args):
3362 def dispatch(args):
3363 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3363 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3364 num = getattr(signal, name, None)
3364 num = getattr(signal, name, None)
3365 if num: signal.signal(num, catchterm)
3365 if num: signal.signal(num, catchterm)
3366
3366
3367 try:
3367 try:
3368 u = ui.ui()
3368 u = ui.ui()
3369 except util.Abort, inst:
3369 except util.Abort, inst:
3370 sys.stderr.write(_("abort: %s\n") % inst)
3370 sys.stderr.write(_("abort: %s\n") % inst)
3371 return -1
3371 return -1
3372
3372
3373 external = []
3373 external = []
3374 for x in u.extensions():
3374 for x in u.extensions():
3375 try:
3375 try:
3376 if x[1]:
3376 if x[1]:
3377 mod = imp.load_source(x[0], x[1])
3377 mod = imp.load_source(x[0], x[1])
3378 else:
3378 else:
3379 def importh(name):
3379 def importh(name):
3380 mod = __import__(name)
3380 mod = __import__(name)
3381 components = name.split('.')
3381 components = name.split('.')
3382 for comp in components[1:]:
3382 for comp in components[1:]:
3383 mod = getattr(mod, comp)
3383 mod = getattr(mod, comp)
3384 return mod
3384 return mod
3385 try:
3385 try:
3386 mod = importh("hgext." + x[0])
3386 mod = importh("hgext." + x[0])
3387 except ImportError:
3387 except ImportError:
3388 mod = importh(x[0])
3388 mod = importh(x[0])
3389 external.append(mod)
3389 external.append(mod)
3390 except Exception, inst:
3390 except Exception, inst:
3391 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3391 u.warn(_("*** failed to import extension %s: %s\n") % (x[0], inst))
3392 if "--traceback" in sys.argv[1:]:
3392 if "--traceback" in sys.argv[1:]:
3393 traceback.print_exc()
3393 traceback.print_exc()
3394 return 1
3394 return 1
3395 continue
3395 continue
3396
3396
3397 for x in external:
3397 for x in external:
3398 cmdtable = getattr(x, 'cmdtable', {})
3398 cmdtable = getattr(x, 'cmdtable', {})
3399 for t in cmdtable:
3399 for t in cmdtable:
3400 if t in table:
3400 if t in table:
3401 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3401 u.warn(_("module %s overrides %s\n") % (x.__name__, t))
3402 table.update(cmdtable)
3402 table.update(cmdtable)
3403
3403
3404 try:
3404 try:
3405 cmd, func, args, options, cmdoptions = parse(u, args)
3405 cmd, func, args, options, cmdoptions = parse(u, args)
3406 if options["time"]:
3406 if options["time"]:
3407 def get_times():
3407 def get_times():
3408 t = os.times()
3408 t = os.times()
3409 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3409 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3410 t = (t[0], t[1], t[2], t[3], time.clock())
3410 t = (t[0], t[1], t[2], t[3], time.clock())
3411 return t
3411 return t
3412 s = get_times()
3412 s = get_times()
3413 def print_time():
3413 def print_time():
3414 t = get_times()
3414 t = get_times()
3415 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3415 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3416 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3416 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3417 atexit.register(print_time)
3417 atexit.register(print_time)
3418
3418
3419 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3419 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3420 not options["noninteractive"])
3420 not options["noninteractive"])
3421
3421
3422 # enter the debugger before command execution
3422 # enter the debugger before command execution
3423 if options['debugger']:
3423 if options['debugger']:
3424 pdb.set_trace()
3424 pdb.set_trace()
3425
3425
3426 try:
3426 try:
3427 if options['cwd']:
3427 if options['cwd']:
3428 try:
3428 try:
3429 os.chdir(options['cwd'])
3429 os.chdir(options['cwd'])
3430 except OSError, inst:
3430 except OSError, inst:
3431 raise util.Abort('%s: %s' %
3431 raise util.Abort('%s: %s' %
3432 (options['cwd'], inst.strerror))
3432 (options['cwd'], inst.strerror))
3433
3433
3434 path = u.expandpath(options["repository"]) or ""
3434 path = u.expandpath(options["repository"]) or ""
3435 repo = path and hg.repository(u, path=path) or None
3435 repo = path and hg.repository(u, path=path) or None
3436
3436
3437 if options['help']:
3437 if options['help']:
3438 return help_(u, cmd, options['version'])
3438 return help_(u, cmd, options['version'])
3439 elif options['version']:
3439 elif options['version']:
3440 return show_version(u)
3440 return show_version(u)
3441 elif not cmd:
3441 elif not cmd:
3442 return help_(u, 'shortlist')
3442 return help_(u, 'shortlist')
3443
3443
3444 if cmd not in norepo.split():
3444 if cmd not in norepo.split():
3445 try:
3445 try:
3446 if not repo:
3446 if not repo:
3447 repo = hg.repository(u, path=path)
3447 repo = hg.repository(u, path=path)
3448 u = repo.ui
3448 u = repo.ui
3449 for x in external:
3449 for x in external:
3450 if hasattr(x, 'reposetup'):
3450 if hasattr(x, 'reposetup'):
3451 x.reposetup(u, repo)
3451 x.reposetup(u, repo)
3452 except hg.RepoError:
3452 except hg.RepoError:
3453 if cmd not in optionalrepo.split():
3453 if cmd not in optionalrepo.split():
3454 raise
3454 raise
3455 d = lambda: func(u, repo, *args, **cmdoptions)
3455 d = lambda: func(u, repo, *args, **cmdoptions)
3456 else:
3456 else:
3457 d = lambda: func(u, *args, **cmdoptions)
3457 d = lambda: func(u, *args, **cmdoptions)
3458
3458
3459 try:
3459 try:
3460 if options['profile']:
3460 if options['profile']:
3461 import hotshot, hotshot.stats
3461 import hotshot, hotshot.stats
3462 prof = hotshot.Profile("hg.prof")
3462 prof = hotshot.Profile("hg.prof")
3463 try:
3463 try:
3464 try:
3464 try:
3465 return prof.runcall(d)
3465 return prof.runcall(d)
3466 except:
3466 except:
3467 try:
3467 try:
3468 u.warn(_('exception raised - generating '
3468 u.warn(_('exception raised - generating '
3469 'profile anyway\n'))
3469 'profile anyway\n'))
3470 except:
3470 except:
3471 pass
3471 pass
3472 raise
3472 raise
3473 finally:
3473 finally:
3474 prof.close()
3474 prof.close()
3475 stats = hotshot.stats.load("hg.prof")
3475 stats = hotshot.stats.load("hg.prof")
3476 stats.strip_dirs()
3476 stats.strip_dirs()
3477 stats.sort_stats('time', 'calls')
3477 stats.sort_stats('time', 'calls')
3478 stats.print_stats(40)
3478 stats.print_stats(40)
3479 else:
3479 else:
3480 return d()
3480 return d()
3481 finally:
3481 finally:
3482 u.flush()
3482 u.flush()
3483 except:
3483 except:
3484 # enter the debugger when we hit an exception
3484 # enter the debugger when we hit an exception
3485 if options['debugger']:
3485 if options['debugger']:
3486 pdb.post_mortem(sys.exc_info()[2])
3486 pdb.post_mortem(sys.exc_info()[2])
3487 if options['traceback']:
3487 if options['traceback']:
3488 traceback.print_exc()
3488 traceback.print_exc()
3489 raise
3489 raise
3490 except ParseError, inst:
3490 except ParseError, inst:
3491 if inst.args[0]:
3491 if inst.args[0]:
3492 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3492 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3493 help_(u, inst.args[0])
3493 help_(u, inst.args[0])
3494 else:
3494 else:
3495 u.warn(_("hg: %s\n") % inst.args[1])
3495 u.warn(_("hg: %s\n") % inst.args[1])
3496 help_(u, 'shortlist')
3496 help_(u, 'shortlist')
3497 except AmbiguousCommand, inst:
3497 except AmbiguousCommand, inst:
3498 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3498 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3499 (inst.args[0], " ".join(inst.args[1])))
3499 (inst.args[0], " ".join(inst.args[1])))
3500 except UnknownCommand, inst:
3500 except UnknownCommand, inst:
3501 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3501 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3502 help_(u, 'shortlist')
3502 help_(u, 'shortlist')
3503 except hg.RepoError, inst:
3503 except hg.RepoError, inst:
3504 u.warn(_("abort: %s!\n") % inst)
3504 u.warn(_("abort: %s!\n") % inst)
3505 except lock.LockHeld, inst:
3505 except lock.LockHeld, inst:
3506 if inst.errno == errno.ETIMEDOUT:
3506 if inst.errno == errno.ETIMEDOUT:
3507 reason = _('timed out waiting for lock held by %s') % inst.locker
3507 reason = _('timed out waiting for lock held by %s') % inst.locker
3508 else:
3508 else:
3509 reason = _('lock held by %s') % inst.locker
3509 reason = _('lock held by %s') % inst.locker
3510 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3510 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3511 except lock.LockUnavailable, inst:
3511 except lock.LockUnavailable, inst:
3512 u.warn(_("abort: could not lock %s: %s\n") %
3512 u.warn(_("abort: could not lock %s: %s\n") %
3513 (inst.desc or inst.filename, inst.strerror))
3513 (inst.desc or inst.filename, inst.strerror))
3514 except revlog.RevlogError, inst:
3514 except revlog.RevlogError, inst:
3515 u.warn(_("abort: "), inst, "!\n")
3515 u.warn(_("abort: "), inst, "!\n")
3516 except util.SignalInterrupt:
3516 except util.SignalInterrupt:
3517 u.warn(_("killed!\n"))
3517 u.warn(_("killed!\n"))
3518 except KeyboardInterrupt:
3518 except KeyboardInterrupt:
3519 try:
3519 try:
3520 u.warn(_("interrupted!\n"))
3520 u.warn(_("interrupted!\n"))
3521 except IOError, inst:
3521 except IOError, inst:
3522 if inst.errno == errno.EPIPE:
3522 if inst.errno == errno.EPIPE:
3523 if u.debugflag:
3523 if u.debugflag:
3524 u.warn(_("\nbroken pipe\n"))
3524 u.warn(_("\nbroken pipe\n"))
3525 else:
3525 else:
3526 raise
3526 raise
3527 except IOError, inst:
3527 except IOError, inst:
3528 if hasattr(inst, "code"):
3528 if hasattr(inst, "code"):
3529 u.warn(_("abort: %s\n") % inst)
3529 u.warn(_("abort: %s\n") % inst)
3530 elif hasattr(inst, "reason"):
3530 elif hasattr(inst, "reason"):
3531 u.warn(_("abort: error: %s\n") % inst.reason[1])
3531 u.warn(_("abort: error: %s\n") % inst.reason[1])
3532 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3532 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3533 if u.debugflag:
3533 if u.debugflag:
3534 u.warn(_("broken pipe\n"))
3534 u.warn(_("broken pipe\n"))
3535 elif getattr(inst, "strerror", None):
3535 elif getattr(inst, "strerror", None):
3536 if getattr(inst, "filename", None):
3536 if getattr(inst, "filename", None):
3537 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3537 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3538 else:
3538 else:
3539 u.warn(_("abort: %s\n") % inst.strerror)
3539 u.warn(_("abort: %s\n") % inst.strerror)
3540 else:
3540 else:
3541 raise
3541 raise
3542 except OSError, inst:
3542 except OSError, inst:
3543 if hasattr(inst, "filename"):
3543 if hasattr(inst, "filename"):
3544 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3544 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3545 else:
3545 else:
3546 u.warn(_("abort: %s\n") % inst.strerror)
3546 u.warn(_("abort: %s\n") % inst.strerror)
3547 except util.Abort, inst:
3547 except util.Abort, inst:
3548 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3548 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3549 except TypeError, inst:
3549 except TypeError, inst:
3550 # was this an argument error?
3550 # was this an argument error?
3551 tb = traceback.extract_tb(sys.exc_info()[2])
3551 tb = traceback.extract_tb(sys.exc_info()[2])
3552 if len(tb) > 2: # no
3552 if len(tb) > 2: # no
3553 raise
3553 raise
3554 u.debug(inst, "\n")
3554 u.debug(inst, "\n")
3555 u.warn(_("%s: invalid arguments\n") % cmd)
3555 u.warn(_("%s: invalid arguments\n") % cmd)
3556 help_(u, cmd)
3556 help_(u, cmd)
3557 except SystemExit, inst:
3557 except SystemExit, inst:
3558 # Commands shouldn't sys.exit directly, but give a return code.
3558 # Commands shouldn't sys.exit directly, but give a return code.
3559 # Just in case catch this and and pass exit code to caller.
3559 # Just in case catch this and and pass exit code to caller.
3560 return inst.code
3560 return inst.code
3561 except:
3561 except:
3562 u.warn(_("** unknown exception encountered, details follow\n"))
3562 u.warn(_("** unknown exception encountered, details follow\n"))
3563 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3563 u.warn(_("** report bug details to mercurial@selenic.com\n"))
3564 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3564 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3565 % version.get_version())
3565 % version.get_version())
3566 raise
3566 raise
3567
3567
3568 return -1
3568 return -1
@@ -1,2056 +1,2056
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "appendfile changegroup")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "revlog sys traceback")
15 demandload(globals(), "revlog sys traceback")
16
16
17 class localrepository(object):
17 class localrepository(object):
18 def __del__(self):
18 def __del__(self):
19 self.transhandle = None
19 self.transhandle = None
20 def __init__(self, parentui, path=None, create=0):
20 def __init__(self, parentui, path=None, create=0):
21 if not path:
21 if not path:
22 p = os.getcwd()
22 p = os.getcwd()
23 while not os.path.isdir(os.path.join(p, ".hg")):
23 while not os.path.isdir(os.path.join(p, ".hg")):
24 oldp = p
24 oldp = p
25 p = os.path.dirname(p)
25 p = os.path.dirname(p)
26 if p == oldp:
26 if p == oldp:
27 raise repo.RepoError(_("no repo found"))
27 raise repo.RepoError(_("no repo found"))
28 path = p
28 path = p
29 self.path = os.path.join(path, ".hg")
29 self.path = os.path.join(path, ".hg")
30
30
31 if not create and not os.path.isdir(self.path):
31 if not create and not os.path.isdir(self.path):
32 raise repo.RepoError(_("repository %s not found") % path)
32 raise repo.RepoError(_("repository %s not found") % path)
33
33
34 self.root = os.path.abspath(path)
34 self.root = os.path.abspath(path)
35 self.origroot = path
35 self.origroot = path
36 self.ui = ui.ui(parentui=parentui)
36 self.ui = ui.ui(parentui=parentui)
37 self.opener = util.opener(self.path)
37 self.opener = util.opener(self.path)
38 self.wopener = util.opener(self.root)
38 self.wopener = util.opener(self.root)
39
39
40 try:
40 try:
41 self.ui.readconfig(self.join("hgrc"), self.root)
41 self.ui.readconfig(self.join("hgrc"), self.root)
42 except IOError:
42 except IOError:
43 pass
43 pass
44
44
45 v = self.ui.revlogopts
45 v = self.ui.revlogopts
46 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
46 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
47 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
48 flags = 0
48 flags = 0
49 for x in v.get('flags', "").split():
49 for x in v.get('flags', "").split():
50 flags |= revlog.flagstr(x)
50 flags |= revlog.flagstr(x)
51
51
52 v = self.revlogversion | flags
52 v = self.revlogversion | flags
53 self.manifest = manifest.manifest(self.opener, v)
53 self.manifest = manifest.manifest(self.opener, v)
54 self.changelog = changelog.changelog(self.opener, v)
54 self.changelog = changelog.changelog(self.opener, v)
55
55
56 # the changelog might not have the inline index flag
56 # the changelog might not have the inline index flag
57 # on. If the format of the changelog is the same as found in
57 # on. If the format of the changelog is the same as found in
58 # .hgrc, apply any flags found in the .hgrc as well.
58 # .hgrc, apply any flags found in the .hgrc as well.
59 # Otherwise, just version from the changelog
59 # Otherwise, just version from the changelog
60 v = self.changelog.version
60 v = self.changelog.version
61 if v == self.revlogversion:
61 if v == self.revlogversion:
62 v |= flags
62 v |= flags
63 self.revlogversion = v
63 self.revlogversion = v
64
64
65 self.tagscache = None
65 self.tagscache = None
66 self.nodetagscache = None
66 self.nodetagscache = None
67 self.encodepats = None
67 self.encodepats = None
68 self.decodepats = None
68 self.decodepats = None
69 self.transhandle = None
69 self.transhandle = None
70
70
71 if create:
71 if create:
72 os.mkdir(self.path)
72 os.mkdir(self.path)
73 os.mkdir(self.join("data"))
73 os.mkdir(self.join("data"))
74
74
75 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
75 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
76
76
77 def hook(self, name, throw=False, **args):
77 def hook(self, name, throw=False, **args):
78 def callhook(hname, funcname):
78 def callhook(hname, funcname):
79 '''call python hook. hook is callable object, looked up as
79 '''call python hook. hook is callable object, looked up as
80 name in python module. if callable returns "true", hook
80 name in python module. if callable returns "true", hook
81 passes, else fails. if hook raises exception, treated as
81 passes, else fails. if hook raises exception, treated as
82 hook failure. exception propagates if throw is "true".'''
82 hook failure. exception propagates if throw is "true".'''
83
83
84 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
84 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
85 d = funcname.rfind('.')
85 d = funcname.rfind('.')
86 if d == -1:
86 if d == -1:
87 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
87 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
88 % (hname, funcname))
88 % (hname, funcname))
89 modname = funcname[:d]
89 modname = funcname[:d]
90 try:
90 try:
91 obj = __import__(modname)
91 obj = __import__(modname)
92 except ImportError:
92 except ImportError:
93 raise util.Abort(_('%s hook is invalid '
93 raise util.Abort(_('%s hook is invalid '
94 '(import of "%s" failed)') %
94 '(import of "%s" failed)') %
95 (hname, modname))
95 (hname, modname))
96 try:
96 try:
97 for p in funcname.split('.')[1:]:
97 for p in funcname.split('.')[1:]:
98 obj = getattr(obj, p)
98 obj = getattr(obj, p)
99 except AttributeError, err:
99 except AttributeError, err:
100 raise util.Abort(_('%s hook is invalid '
100 raise util.Abort(_('%s hook is invalid '
101 '("%s" is not defined)') %
101 '("%s" is not defined)') %
102 (hname, funcname))
102 (hname, funcname))
103 if not callable(obj):
103 if not callable(obj):
104 raise util.Abort(_('%s hook is invalid '
104 raise util.Abort(_('%s hook is invalid '
105 '("%s" is not callable)') %
105 '("%s" is not callable)') %
106 (hname, funcname))
106 (hname, funcname))
107 try:
107 try:
108 r = obj(ui=ui, repo=repo, hooktype=name, **args)
108 r = obj(ui=ui, repo=repo, hooktype=name, **args)
109 except (KeyboardInterrupt, util.SignalInterrupt):
109 except (KeyboardInterrupt, util.SignalInterrupt):
110 raise
110 raise
111 except Exception, exc:
111 except Exception, exc:
112 if isinstance(exc, util.Abort):
112 if isinstance(exc, util.Abort):
113 self.ui.warn(_('error: %s hook failed: %s\n') %
113 self.ui.warn(_('error: %s hook failed: %s\n') %
114 (hname, exc.args[0] % exc.args[1:]))
114 (hname, exc.args[0] % exc.args[1:]))
115 else:
115 else:
116 self.ui.warn(_('error: %s hook raised an exception: '
116 self.ui.warn(_('error: %s hook raised an exception: '
117 '%s\n') % (hname, exc))
117 '%s\n') % (hname, exc))
118 if throw:
118 if throw:
119 raise
119 raise
120 if "--traceback" in sys.argv[1:]:
120 if "--traceback" in sys.argv[1:]:
121 traceback.print_exc()
121 traceback.print_exc()
122 return False
122 return False
123 if not r:
123 if not r:
124 if throw:
124 if throw:
125 raise util.Abort(_('%s hook failed') % hname)
125 raise util.Abort(_('%s hook failed') % hname)
126 self.ui.warn(_('error: %s hook failed\n') % hname)
126 self.ui.warn(_('error: %s hook failed\n') % hname)
127 return r
127 return r
128
128
129 def runhook(name, cmd):
129 def runhook(name, cmd):
130 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
130 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
131 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
131 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
132 [(k.upper(), v) for k, v in args.iteritems()])
132 [(k.upper(), v) for k, v in args.iteritems()])
133 r = util.system(cmd, environ=env, cwd=self.root)
133 r = util.system(cmd, environ=env, cwd=self.root)
134 if r:
134 if r:
135 desc, r = util.explain_exit(r)
135 desc, r = util.explain_exit(r)
136 if throw:
136 if throw:
137 raise util.Abort(_('%s hook %s') % (name, desc))
137 raise util.Abort(_('%s hook %s') % (name, desc))
138 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
138 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
139 return False
139 return False
140 return True
140 return True
141
141
142 r = True
142 r = True
143 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
143 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
144 if hname.split(".", 1)[0] == name and cmd]
144 if hname.split(".", 1)[0] == name and cmd]
145 hooks.sort()
145 hooks.sort()
146 for hname, cmd in hooks:
146 for hname, cmd in hooks:
147 if cmd.startswith('python:'):
147 if cmd.startswith('python:'):
148 r = callhook(hname, cmd[7:].strip()) and r
148 r = callhook(hname, cmd[7:].strip()) and r
149 else:
149 else:
150 r = runhook(hname, cmd) and r
150 r = runhook(hname, cmd) and r
151 return r
151 return r
152
152
153 def tags(self):
153 def tags(self):
154 '''return a mapping of tag to node'''
154 '''return a mapping of tag to node'''
155 if not self.tagscache:
155 if not self.tagscache:
156 self.tagscache = {}
156 self.tagscache = {}
157
157
158 def parsetag(line, context):
158 def parsetag(line, context):
159 if not line:
159 if not line:
160 return
160 return
161 s = l.split(" ", 1)
161 s = l.split(" ", 1)
162 if len(s) != 2:
162 if len(s) != 2:
163 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
163 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
164 return
164 return
165 node, key = s
165 node, key = s
166 try:
166 try:
167 bin_n = bin(node)
167 bin_n = bin(node)
168 except TypeError:
168 except TypeError:
169 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
169 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
170 return
170 return
171 if bin_n not in self.changelog.nodemap:
171 if bin_n not in self.changelog.nodemap:
172 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
172 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
173 return
173 return
174 self.tagscache[key.strip()] = bin_n
174 self.tagscache[key.strip()] = bin_n
175
175
176 # read each head of the tags file, ending with the tip
176 # read each head of the tags file, ending with the tip
177 # and add each tag found to the map, with "newer" ones
177 # and add each tag found to the map, with "newer" ones
178 # taking precedence
178 # taking precedence
179 fl = self.file(".hgtags")
179 fl = self.file(".hgtags")
180 h = fl.heads()
180 h = fl.heads()
181 h.reverse()
181 h.reverse()
182 for r in h:
182 for r in h:
183 count = 0
183 count = 0
184 for l in fl.read(r).splitlines():
184 for l in fl.read(r).splitlines():
185 count += 1
185 count += 1
186 parsetag(l, ".hgtags:%d" % count)
186 parsetag(l, ".hgtags:%d" % count)
187
187
188 try:
188 try:
189 f = self.opener("localtags")
189 f = self.opener("localtags")
190 count = 0
190 count = 0
191 for l in f:
191 for l in f:
192 count += 1
192 count += 1
193 parsetag(l, "localtags:%d" % count)
193 parsetag(l, "localtags:%d" % count)
194 except IOError:
194 except IOError:
195 pass
195 pass
196
196
197 self.tagscache['tip'] = self.changelog.tip()
197 self.tagscache['tip'] = self.changelog.tip()
198
198
199 return self.tagscache
199 return self.tagscache
200
200
201 def tagslist(self):
201 def tagslist(self):
202 '''return a list of tags ordered by revision'''
202 '''return a list of tags ordered by revision'''
203 l = []
203 l = []
204 for t, n in self.tags().items():
204 for t, n in self.tags().items():
205 try:
205 try:
206 r = self.changelog.rev(n)
206 r = self.changelog.rev(n)
207 except:
207 except:
208 r = -2 # sort to the beginning of the list if unknown
208 r = -2 # sort to the beginning of the list if unknown
209 l.append((r, t, n))
209 l.append((r, t, n))
210 l.sort()
210 l.sort()
211 return [(t, n) for r, t, n in l]
211 return [(t, n) for r, t, n in l]
212
212
213 def nodetags(self, node):
213 def nodetags(self, node):
214 '''return the tags associated with a node'''
214 '''return the tags associated with a node'''
215 if not self.nodetagscache:
215 if not self.nodetagscache:
216 self.nodetagscache = {}
216 self.nodetagscache = {}
217 for t, n in self.tags().items():
217 for t, n in self.tags().items():
218 self.nodetagscache.setdefault(n, []).append(t)
218 self.nodetagscache.setdefault(n, []).append(t)
219 return self.nodetagscache.get(node, [])
219 return self.nodetagscache.get(node, [])
220
220
221 def lookup(self, key):
221 def lookup(self, key):
222 try:
222 try:
223 return self.tags()[key]
223 return self.tags()[key]
224 except KeyError:
224 except KeyError:
225 try:
225 try:
226 return self.changelog.lookup(key)
226 return self.changelog.lookup(key)
227 except:
227 except:
228 raise repo.RepoError(_("unknown revision '%s'") % key)
228 raise repo.RepoError(_("unknown revision '%s'") % key)
229
229
230 def dev(self):
230 def dev(self):
231 return os.stat(self.path).st_dev
231 return os.stat(self.path).st_dev
232
232
233 def local(self):
233 def local(self):
234 return True
234 return True
235
235
236 def join(self, f):
236 def join(self, f):
237 return os.path.join(self.path, f)
237 return os.path.join(self.path, f)
238
238
239 def wjoin(self, f):
239 def wjoin(self, f):
240 return os.path.join(self.root, f)
240 return os.path.join(self.root, f)
241
241
242 def file(self, f):
242 def file(self, f):
243 if f[0] == '/':
243 if f[0] == '/':
244 f = f[1:]
244 f = f[1:]
245 return filelog.filelog(self.opener, f, self.revlogversion)
245 return filelog.filelog(self.opener, f, self.revlogversion)
246
246
247 def getcwd(self):
247 def getcwd(self):
248 return self.dirstate.getcwd()
248 return self.dirstate.getcwd()
249
249
250 def wfile(self, f, mode='r'):
250 def wfile(self, f, mode='r'):
251 return self.wopener(f, mode)
251 return self.wopener(f, mode)
252
252
253 def wread(self, filename):
253 def wread(self, filename):
254 if self.encodepats == None:
254 if self.encodepats == None:
255 l = []
255 l = []
256 for pat, cmd in self.ui.configitems("encode"):
256 for pat, cmd in self.ui.configitems("encode"):
257 mf = util.matcher(self.root, "", [pat], [], [])[1]
257 mf = util.matcher(self.root, "", [pat], [], [])[1]
258 l.append((mf, cmd))
258 l.append((mf, cmd))
259 self.encodepats = l
259 self.encodepats = l
260
260
261 data = self.wopener(filename, 'r').read()
261 data = self.wopener(filename, 'r').read()
262
262
263 for mf, cmd in self.encodepats:
263 for mf, cmd in self.encodepats:
264 if mf(filename):
264 if mf(filename):
265 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
265 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
266 data = util.filter(data, cmd)
266 data = util.filter(data, cmd)
267 break
267 break
268
268
269 return data
269 return data
270
270
271 def wwrite(self, filename, data, fd=None):
271 def wwrite(self, filename, data, fd=None):
272 if self.decodepats == None:
272 if self.decodepats == None:
273 l = []
273 l = []
274 for pat, cmd in self.ui.configitems("decode"):
274 for pat, cmd in self.ui.configitems("decode"):
275 mf = util.matcher(self.root, "", [pat], [], [])[1]
275 mf = util.matcher(self.root, "", [pat], [], [])[1]
276 l.append((mf, cmd))
276 l.append((mf, cmd))
277 self.decodepats = l
277 self.decodepats = l
278
278
279 for mf, cmd in self.decodepats:
279 for mf, cmd in self.decodepats:
280 if mf(filename):
280 if mf(filename):
281 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
281 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
282 data = util.filter(data, cmd)
282 data = util.filter(data, cmd)
283 break
283 break
284
284
285 if fd:
285 if fd:
286 return fd.write(data)
286 return fd.write(data)
287 return self.wopener(filename, 'w').write(data)
287 return self.wopener(filename, 'w').write(data)
288
288
289 def transaction(self):
289 def transaction(self):
290 tr = self.transhandle
290 tr = self.transhandle
291 if tr != None and tr.running():
291 if tr != None and tr.running():
292 return tr.nest()
292 return tr.nest()
293
293
294 # save dirstate for undo
294 # save dirstate for undo
295 try:
295 try:
296 ds = self.opener("dirstate").read()
296 ds = self.opener("dirstate").read()
297 except IOError:
297 except IOError:
298 ds = ""
298 ds = ""
299 self.opener("journal.dirstate", "w").write(ds)
299 self.opener("journal.dirstate", "w").write(ds)
300
300
301 tr = transaction.transaction(self.ui.warn, self.opener,
301 tr = transaction.transaction(self.ui.warn, self.opener,
302 self.join("journal"),
302 self.join("journal"),
303 aftertrans(self.path))
303 aftertrans(self.path))
304 self.transhandle = tr
304 self.transhandle = tr
305 return tr
305 return tr
306
306
307 def recover(self):
307 def recover(self):
308 l = self.lock()
308 l = self.lock()
309 if os.path.exists(self.join("journal")):
309 if os.path.exists(self.join("journal")):
310 self.ui.status(_("rolling back interrupted transaction\n"))
310 self.ui.status(_("rolling back interrupted transaction\n"))
311 transaction.rollback(self.opener, self.join("journal"))
311 transaction.rollback(self.opener, self.join("journal"))
312 self.reload()
312 self.reload()
313 return True
313 return True
314 else:
314 else:
315 self.ui.warn(_("no interrupted transaction available\n"))
315 self.ui.warn(_("no interrupted transaction available\n"))
316 return False
316 return False
317
317
318 def undo(self, wlock=None):
318 def undo(self, wlock=None):
319 if not wlock:
319 if not wlock:
320 wlock = self.wlock()
320 wlock = self.wlock()
321 l = self.lock()
321 l = self.lock()
322 if os.path.exists(self.join("undo")):
322 if os.path.exists(self.join("undo")):
323 self.ui.status(_("rolling back last transaction\n"))
323 self.ui.status(_("rolling back last transaction\n"))
324 transaction.rollback(self.opener, self.join("undo"))
324 transaction.rollback(self.opener, self.join("undo"))
325 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
325 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
326 self.reload()
326 self.reload()
327 self.wreload()
327 self.wreload()
328 else:
328 else:
329 self.ui.warn(_("no undo information available\n"))
329 self.ui.warn(_("no undo information available\n"))
330
330
331 def wreload(self):
331 def wreload(self):
332 self.dirstate.read()
332 self.dirstate.read()
333
333
334 def reload(self):
334 def reload(self):
335 self.changelog.load()
335 self.changelog.load()
336 self.manifest.load()
336 self.manifest.load()
337 self.tagscache = None
337 self.tagscache = None
338 self.nodetagscache = None
338 self.nodetagscache = None
339
339
340 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
340 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
341 desc=None):
341 desc=None):
342 try:
342 try:
343 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
343 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
344 except lock.LockHeld, inst:
344 except lock.LockHeld, inst:
345 if not wait:
345 if not wait:
346 raise
346 raise
347 self.ui.warn(_("waiting for lock on %s held by %s\n") %
347 self.ui.warn(_("waiting for lock on %s held by %s\n") %
348 (desc, inst.args[0]))
348 (desc, inst.args[0]))
349 # default to 600 seconds timeout
349 # default to 600 seconds timeout
350 l = lock.lock(self.join(lockname),
350 l = lock.lock(self.join(lockname),
351 int(self.ui.config("ui", "timeout") or 600),
351 int(self.ui.config("ui", "timeout") or 600),
352 releasefn, desc=desc)
352 releasefn, desc=desc)
353 if acquirefn:
353 if acquirefn:
354 acquirefn()
354 acquirefn()
355 return l
355 return l
356
356
357 def lock(self, wait=1):
357 def lock(self, wait=1):
358 return self.do_lock("lock", wait, acquirefn=self.reload,
358 return self.do_lock("lock", wait, acquirefn=self.reload,
359 desc=_('repository %s') % self.origroot)
359 desc=_('repository %s') % self.origroot)
360
360
361 def wlock(self, wait=1):
361 def wlock(self, wait=1):
362 return self.do_lock("wlock", wait, self.dirstate.write,
362 return self.do_lock("wlock", wait, self.dirstate.write,
363 self.wreload,
363 self.wreload,
364 desc=_('working directory of %s') % self.origroot)
364 desc=_('working directory of %s') % self.origroot)
365
365
366 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
366 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
367 "determine whether a new filenode is needed"
367 "determine whether a new filenode is needed"
368 fp1 = manifest1.get(filename, nullid)
368 fp1 = manifest1.get(filename, nullid)
369 fp2 = manifest2.get(filename, nullid)
369 fp2 = manifest2.get(filename, nullid)
370
370
371 if fp2 != nullid:
371 if fp2 != nullid:
372 # is one parent an ancestor of the other?
372 # is one parent an ancestor of the other?
373 fpa = filelog.ancestor(fp1, fp2)
373 fpa = filelog.ancestor(fp1, fp2)
374 if fpa == fp1:
374 if fpa == fp1:
375 fp1, fp2 = fp2, nullid
375 fp1, fp2 = fp2, nullid
376 elif fpa == fp2:
376 elif fpa == fp2:
377 fp2 = nullid
377 fp2 = nullid
378
378
379 # is the file unmodified from the parent? report existing entry
379 # is the file unmodified from the parent? report existing entry
380 if fp2 == nullid and text == filelog.read(fp1):
380 if fp2 == nullid and text == filelog.read(fp1):
381 return (fp1, None, None)
381 return (fp1, None, None)
382
382
383 return (None, fp1, fp2)
383 return (None, fp1, fp2)
384
384
385 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
385 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
386 orig_parent = self.dirstate.parents()[0] or nullid
386 orig_parent = self.dirstate.parents()[0] or nullid
387 p1 = p1 or self.dirstate.parents()[0] or nullid
387 p1 = p1 or self.dirstate.parents()[0] or nullid
388 p2 = p2 or self.dirstate.parents()[1] or nullid
388 p2 = p2 or self.dirstate.parents()[1] or nullid
389 c1 = self.changelog.read(p1)
389 c1 = self.changelog.read(p1)
390 c2 = self.changelog.read(p2)
390 c2 = self.changelog.read(p2)
391 m1 = self.manifest.read(c1[0])
391 m1 = self.manifest.read(c1[0])
392 mf1 = self.manifest.readflags(c1[0])
392 mf1 = self.manifest.readflags(c1[0])
393 m2 = self.manifest.read(c2[0])
393 m2 = self.manifest.read(c2[0])
394 changed = []
394 changed = []
395
395
396 if orig_parent == p1:
396 if orig_parent == p1:
397 update_dirstate = 1
397 update_dirstate = 1
398 else:
398 else:
399 update_dirstate = 0
399 update_dirstate = 0
400
400
401 if not wlock:
401 if not wlock:
402 wlock = self.wlock()
402 wlock = self.wlock()
403 l = self.lock()
403 l = self.lock()
404 tr = self.transaction()
404 tr = self.transaction()
405 mm = m1.copy()
405 mm = m1.copy()
406 mfm = mf1.copy()
406 mfm = mf1.copy()
407 linkrev = self.changelog.count()
407 linkrev = self.changelog.count()
408 for f in files:
408 for f in files:
409 try:
409 try:
410 t = self.wread(f)
410 t = self.wread(f)
411 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
411 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
412 r = self.file(f)
412 r = self.file(f)
413 mfm[f] = tm
413 mfm[f] = tm
414
414
415 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
415 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
416 if entry:
416 if entry:
417 mm[f] = entry
417 mm[f] = entry
418 continue
418 continue
419
419
420 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
420 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
421 changed.append(f)
421 changed.append(f)
422 if update_dirstate:
422 if update_dirstate:
423 self.dirstate.update([f], "n")
423 self.dirstate.update([f], "n")
424 except IOError:
424 except IOError:
425 try:
425 try:
426 del mm[f]
426 del mm[f]
427 del mfm[f]
427 del mfm[f]
428 if update_dirstate:
428 if update_dirstate:
429 self.dirstate.forget([f])
429 self.dirstate.forget([f])
430 except:
430 except:
431 # deleted from p2?
431 # deleted from p2?
432 pass
432 pass
433
433
434 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
434 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
435 user = user or self.ui.username()
435 user = user or self.ui.username()
436 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
436 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
437 tr.close()
437 tr.close()
438 if update_dirstate:
438 if update_dirstate:
439 self.dirstate.setparents(n, nullid)
439 self.dirstate.setparents(n, nullid)
440
440
441 def commit(self, files=None, text="", user=None, date=None,
441 def commit(self, files=None, text="", user=None, date=None,
442 match=util.always, force=False, lock=None, wlock=None):
442 match=util.always, force=False, lock=None, wlock=None):
443 commit = []
443 commit = []
444 remove = []
444 remove = []
445 changed = []
445 changed = []
446
446
447 if files:
447 if files:
448 for f in files:
448 for f in files:
449 s = self.dirstate.state(f)
449 s = self.dirstate.state(f)
450 if s in 'nmai':
450 if s in 'nmai':
451 commit.append(f)
451 commit.append(f)
452 elif s == 'r':
452 elif s == 'r':
453 remove.append(f)
453 remove.append(f)
454 else:
454 else:
455 self.ui.warn(_("%s not tracked!\n") % f)
455 self.ui.warn(_("%s not tracked!\n") % f)
456 else:
456 else:
457 modified, added, removed, deleted, unknown = self.changes(match=match)
457 modified, added, removed, deleted, unknown = self.changes(match=match)
458 commit = modified + added
458 commit = modified + added
459 remove = removed
459 remove = removed
460
460
461 p1, p2 = self.dirstate.parents()
461 p1, p2 = self.dirstate.parents()
462 c1 = self.changelog.read(p1)
462 c1 = self.changelog.read(p1)
463 c2 = self.changelog.read(p2)
463 c2 = self.changelog.read(p2)
464 m1 = self.manifest.read(c1[0])
464 m1 = self.manifest.read(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
465 mf1 = self.manifest.readflags(c1[0])
466 m2 = self.manifest.read(c2[0])
466 m2 = self.manifest.read(c2[0])
467
467
468 if not commit and not remove and not force and p2 == nullid:
468 if not commit and not remove and not force and p2 == nullid:
469 self.ui.status(_("nothing changed\n"))
469 self.ui.status(_("nothing changed\n"))
470 return None
470 return None
471
471
472 xp1 = hex(p1)
472 xp1 = hex(p1)
473 if p2 == nullid: xp2 = ''
473 if p2 == nullid: xp2 = ''
474 else: xp2 = hex(p2)
474 else: xp2 = hex(p2)
475
475
476 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
476 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
477
477
478 if not wlock:
478 if not wlock:
479 wlock = self.wlock()
479 wlock = self.wlock()
480 if not lock:
480 if not lock:
481 lock = self.lock()
481 lock = self.lock()
482 tr = self.transaction()
482 tr = self.transaction()
483
483
484 # check in files
484 # check in files
485 new = {}
485 new = {}
486 linkrev = self.changelog.count()
486 linkrev = self.changelog.count()
487 commit.sort()
487 commit.sort()
488 for f in commit:
488 for f in commit:
489 self.ui.note(f + "\n")
489 self.ui.note(f + "\n")
490 try:
490 try:
491 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
491 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
492 t = self.wread(f)
492 t = self.wread(f)
493 except IOError:
493 except IOError:
494 self.ui.warn(_("trouble committing %s!\n") % f)
494 self.ui.warn(_("trouble committing %s!\n") % f)
495 raise
495 raise
496
496
497 r = self.file(f)
497 r = self.file(f)
498
498
499 meta = {}
499 meta = {}
500 cp = self.dirstate.copied(f)
500 cp = self.dirstate.copied(f)
501 if cp:
501 if cp:
502 meta["copy"] = cp
502 meta["copy"] = cp
503 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
503 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
504 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
504 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
505 fp1, fp2 = nullid, nullid
505 fp1, fp2 = nullid, nullid
506 else:
506 else:
507 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
507 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
508 if entry:
508 if entry:
509 new[f] = entry
509 new[f] = entry
510 continue
510 continue
511
511
512 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
512 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
513 # remember what we've added so that we can later calculate
513 # remember what we've added so that we can later calculate
514 # the files to pull from a set of changesets
514 # the files to pull from a set of changesets
515 changed.append(f)
515 changed.append(f)
516
516
517 # update manifest
517 # update manifest
518 m1 = m1.copy()
518 m1 = m1.copy()
519 m1.update(new)
519 m1.update(new)
520 for f in remove:
520 for f in remove:
521 if f in m1:
521 if f in m1:
522 del m1[f]
522 del m1[f]
523 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
523 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
524 (new, remove))
524 (new, remove))
525
525
526 # add changeset
526 # add changeset
527 new = new.keys()
527 new = new.keys()
528 new.sort()
528 new.sort()
529
529
530 user = user or self.ui.username()
530 user = user or self.ui.username()
531 if not text:
531 if not text:
532 edittext = [""]
532 edittext = [""]
533 if p2 != nullid:
533 if p2 != nullid:
534 edittext.append("HG: branch merge")
534 edittext.append("HG: branch merge")
535 edittext.extend(["HG: changed %s" % f for f in changed])
535 edittext.extend(["HG: changed %s" % f for f in changed])
536 edittext.extend(["HG: removed %s" % f for f in remove])
536 edittext.extend(["HG: removed %s" % f for f in remove])
537 if not changed and not remove:
537 if not changed and not remove:
538 edittext.append("HG: no files changed")
538 edittext.append("HG: no files changed")
539 edittext.append("")
539 edittext.append("")
540 # run editor in the repository root
540 # run editor in the repository root
541 olddir = os.getcwd()
541 olddir = os.getcwd()
542 os.chdir(self.root)
542 os.chdir(self.root)
543 edittext = self.ui.edit("\n".join(edittext), user)
543 edittext = self.ui.edit("\n".join(edittext), user)
544 os.chdir(olddir)
544 os.chdir(olddir)
545 if not edittext.rstrip():
545 if not edittext.rstrip():
546 return None
546 return None
547 text = edittext
547 text = edittext
548
548
549 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
549 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
550 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
550 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
551 parent2=xp2)
551 parent2=xp2)
552 tr.close()
552 tr.close()
553
553
554 self.dirstate.setparents(n)
554 self.dirstate.setparents(n)
555 self.dirstate.update(new, "n")
555 self.dirstate.update(new, "n")
556 self.dirstate.forget(remove)
556 self.dirstate.forget(remove)
557
557
558 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
558 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
559 return n
559 return n
560
560
561 def walk(self, node=None, files=[], match=util.always, badmatch=None):
561 def walk(self, node=None, files=[], match=util.always, badmatch=None):
562 if node:
562 if node:
563 fdict = dict.fromkeys(files)
563 fdict = dict.fromkeys(files)
564 for fn in self.manifest.read(self.changelog.read(node)[0]):
564 for fn in self.manifest.read(self.changelog.read(node)[0]):
565 fdict.pop(fn, None)
565 fdict.pop(fn, None)
566 if match(fn):
566 if match(fn):
567 yield 'm', fn
567 yield 'm', fn
568 for fn in fdict:
568 for fn in fdict:
569 if badmatch and badmatch(fn):
569 if badmatch and badmatch(fn):
570 if match(fn):
570 if match(fn):
571 yield 'b', fn
571 yield 'b', fn
572 else:
572 else:
573 self.ui.warn(_('%s: No such file in rev %s\n') % (
573 self.ui.warn(_('%s: No such file in rev %s\n') % (
574 util.pathto(self.getcwd(), fn), short(node)))
574 util.pathto(self.getcwd(), fn), short(node)))
575 else:
575 else:
576 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
576 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
577 yield src, fn
577 yield src, fn
578
578
579 def changes(self, node1=None, node2=None, files=[], match=util.always,
579 def changes(self, node1=None, node2=None, files=[], match=util.always,
580 wlock=None, show_ignored=None):
580 wlock=None, show_ignored=None):
581 """return changes between two nodes or node and working directory
581 """return changes between two nodes or node and working directory
582
582
583 If node1 is None, use the first dirstate parent instead.
583 If node1 is None, use the first dirstate parent instead.
584 If node2 is None, compare node1 with working directory.
584 If node2 is None, compare node1 with working directory.
585 """
585 """
586
586
587 def fcmp(fn, mf):
587 def fcmp(fn, mf):
588 t1 = self.wread(fn)
588 t1 = self.wread(fn)
589 t2 = self.file(fn).read(mf.get(fn, nullid))
589 t2 = self.file(fn).read(mf.get(fn, nullid))
590 return cmp(t1, t2)
590 return cmp(t1, t2)
591
591
592 def mfmatches(node):
592 def mfmatches(node):
593 change = self.changelog.read(node)
593 change = self.changelog.read(node)
594 mf = dict(self.manifest.read(change[0]))
594 mf = dict(self.manifest.read(change[0]))
595 for fn in mf.keys():
595 for fn in mf.keys():
596 if not match(fn):
596 if not match(fn):
597 del mf[fn]
597 del mf[fn]
598 return mf
598 return mf
599
599
600 if node1:
600 if node1:
601 # read the manifest from node1 before the manifest from node2,
601 # read the manifest from node1 before the manifest from node2,
602 # so that we'll hit the manifest cache if we're going through
602 # so that we'll hit the manifest cache if we're going through
603 # all the revisions in parent->child order.
603 # all the revisions in parent->child order.
604 mf1 = mfmatches(node1)
604 mf1 = mfmatches(node1)
605
605
606 # are we comparing the working directory?
606 # are we comparing the working directory?
607 if not node2:
607 if not node2:
608 if not wlock:
608 if not wlock:
609 try:
609 try:
610 wlock = self.wlock(wait=0)
610 wlock = self.wlock(wait=0)
611 except lock.LockException:
611 except lock.LockException:
612 wlock = None
612 wlock = None
613 lookup, modified, added, removed, deleted, unknown, ignored = (
613 lookup, modified, added, removed, deleted, unknown, ignored = (
614 self.dirstate.changes(files, match, show_ignored))
614 self.dirstate.changes(files, match, show_ignored))
615
615
616 # are we comparing working dir against its parent?
616 # are we comparing working dir against its parent?
617 if not node1:
617 if not node1:
618 if lookup:
618 if lookup:
619 # do a full compare of any files that might have changed
619 # do a full compare of any files that might have changed
620 mf2 = mfmatches(self.dirstate.parents()[0])
620 mf2 = mfmatches(self.dirstate.parents()[0])
621 for f in lookup:
621 for f in lookup:
622 if fcmp(f, mf2):
622 if fcmp(f, mf2):
623 modified.append(f)
623 modified.append(f)
624 elif wlock is not None:
624 elif wlock is not None:
625 self.dirstate.update([f], "n")
625 self.dirstate.update([f], "n")
626 else:
626 else:
627 # we are comparing working dir against non-parent
627 # we are comparing working dir against non-parent
628 # generate a pseudo-manifest for the working dir
628 # generate a pseudo-manifest for the working dir
629 mf2 = mfmatches(self.dirstate.parents()[0])
629 mf2 = mfmatches(self.dirstate.parents()[0])
630 for f in lookup + modified + added:
630 for f in lookup + modified + added:
631 mf2[f] = ""
631 mf2[f] = ""
632 for f in removed:
632 for f in removed:
633 if f in mf2:
633 if f in mf2:
634 del mf2[f]
634 del mf2[f]
635 else:
635 else:
636 # we are comparing two revisions
636 # we are comparing two revisions
637 deleted, unknown, ignored = [], [], []
637 deleted, unknown, ignored = [], [], []
638 mf2 = mfmatches(node2)
638 mf2 = mfmatches(node2)
639
639
640 if node1:
640 if node1:
641 # flush lists from dirstate before comparing manifests
641 # flush lists from dirstate before comparing manifests
642 modified, added = [], []
642 modified, added = [], []
643
643
644 for fn in mf2:
644 for fn in mf2:
645 if mf1.has_key(fn):
645 if mf1.has_key(fn):
646 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
646 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
647 modified.append(fn)
647 modified.append(fn)
648 del mf1[fn]
648 del mf1[fn]
649 else:
649 else:
650 added.append(fn)
650 added.append(fn)
651
651
652 removed = mf1.keys()
652 removed = mf1.keys()
653
653
654 # sort and return results:
654 # sort and return results:
655 for l in modified, added, removed, deleted, unknown, ignored:
655 for l in modified, added, removed, deleted, unknown, ignored:
656 l.sort()
656 l.sort()
657 if show_ignored is None:
657 if show_ignored is None:
658 return (modified, added, removed, deleted, unknown)
658 return (modified, added, removed, deleted, unknown)
659 else:
659 else:
660 return (modified, added, removed, deleted, unknown, ignored)
660 return (modified, added, removed, deleted, unknown, ignored)
661
661
662 def add(self, list, wlock=None):
662 def add(self, list, wlock=None):
663 if not wlock:
663 if not wlock:
664 wlock = self.wlock()
664 wlock = self.wlock()
665 for f in list:
665 for f in list:
666 p = self.wjoin(f)
666 p = self.wjoin(f)
667 if not os.path.exists(p):
667 if not os.path.exists(p):
668 self.ui.warn(_("%s does not exist!\n") % f)
668 self.ui.warn(_("%s does not exist!\n") % f)
669 elif not os.path.isfile(p):
669 elif not os.path.isfile(p):
670 self.ui.warn(_("%s not added: only files supported currently\n")
670 self.ui.warn(_("%s not added: only files supported currently\n")
671 % f)
671 % f)
672 elif self.dirstate.state(f) in 'an':
672 elif self.dirstate.state(f) in 'an':
673 self.ui.warn(_("%s already tracked!\n") % f)
673 self.ui.warn(_("%s already tracked!\n") % f)
674 else:
674 else:
675 self.dirstate.update([f], "a")
675 self.dirstate.update([f], "a")
676
676
677 def forget(self, list, wlock=None):
677 def forget(self, list, wlock=None):
678 if not wlock:
678 if not wlock:
679 wlock = self.wlock()
679 wlock = self.wlock()
680 for f in list:
680 for f in list:
681 if self.dirstate.state(f) not in 'ai':
681 if self.dirstate.state(f) not in 'ai':
682 self.ui.warn(_("%s not added!\n") % f)
682 self.ui.warn(_("%s not added!\n") % f)
683 else:
683 else:
684 self.dirstate.forget([f])
684 self.dirstate.forget([f])
685
685
686 def remove(self, list, unlink=False, wlock=None):
686 def remove(self, list, unlink=False, wlock=None):
687 if unlink:
687 if unlink:
688 for f in list:
688 for f in list:
689 try:
689 try:
690 util.unlink(self.wjoin(f))
690 util.unlink(self.wjoin(f))
691 except OSError, inst:
691 except OSError, inst:
692 if inst.errno != errno.ENOENT:
692 if inst.errno != errno.ENOENT:
693 raise
693 raise
694 if not wlock:
694 if not wlock:
695 wlock = self.wlock()
695 wlock = self.wlock()
696 for f in list:
696 for f in list:
697 p = self.wjoin(f)
697 p = self.wjoin(f)
698 if os.path.exists(p):
698 if os.path.exists(p):
699 self.ui.warn(_("%s still exists!\n") % f)
699 self.ui.warn(_("%s still exists!\n") % f)
700 elif self.dirstate.state(f) == 'a':
700 elif self.dirstate.state(f) == 'a':
701 self.dirstate.forget([f])
701 self.dirstate.forget([f])
702 elif f not in self.dirstate:
702 elif f not in self.dirstate:
703 self.ui.warn(_("%s not tracked!\n") % f)
703 self.ui.warn(_("%s not tracked!\n") % f)
704 else:
704 else:
705 self.dirstate.update([f], "r")
705 self.dirstate.update([f], "r")
706
706
707 def undelete(self, list, wlock=None):
707 def undelete(self, list, wlock=None):
708 p = self.dirstate.parents()[0]
708 p = self.dirstate.parents()[0]
709 mn = self.changelog.read(p)[0]
709 mn = self.changelog.read(p)[0]
710 mf = self.manifest.readflags(mn)
710 mf = self.manifest.readflags(mn)
711 m = self.manifest.read(mn)
711 m = self.manifest.read(mn)
712 if not wlock:
712 if not wlock:
713 wlock = self.wlock()
713 wlock = self.wlock()
714 for f in list:
714 for f in list:
715 if self.dirstate.state(f) not in "r":
715 if self.dirstate.state(f) not in "r":
716 self.ui.warn("%s not removed!\n" % f)
716 self.ui.warn("%s not removed!\n" % f)
717 else:
717 else:
718 t = self.file(f).read(m[f])
718 t = self.file(f).read(m[f])
719 self.wwrite(f, t)
719 self.wwrite(f, t)
720 util.set_exec(self.wjoin(f), mf[f])
720 util.set_exec(self.wjoin(f), mf[f])
721 self.dirstate.update([f], "n")
721 self.dirstate.update([f], "n")
722
722
723 def copy(self, source, dest, wlock=None):
723 def copy(self, source, dest, wlock=None):
724 p = self.wjoin(dest)
724 p = self.wjoin(dest)
725 if not os.path.exists(p):
725 if not os.path.exists(p):
726 self.ui.warn(_("%s does not exist!\n") % dest)
726 self.ui.warn(_("%s does not exist!\n") % dest)
727 elif not os.path.isfile(p):
727 elif not os.path.isfile(p):
728 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
728 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
729 else:
729 else:
730 if not wlock:
730 if not wlock:
731 wlock = self.wlock()
731 wlock = self.wlock()
732 if self.dirstate.state(dest) == '?':
732 if self.dirstate.state(dest) == '?':
733 self.dirstate.update([dest], "a")
733 self.dirstate.update([dest], "a")
734 self.dirstate.copy(source, dest)
734 self.dirstate.copy(source, dest)
735
735
736 def heads(self, start=None):
736 def heads(self, start=None):
737 heads = self.changelog.heads(start)
737 heads = self.changelog.heads(start)
738 # sort the output in rev descending order
738 # sort the output in rev descending order
739 heads = [(-self.changelog.rev(h), h) for h in heads]
739 heads = [(-self.changelog.rev(h), h) for h in heads]
740 heads.sort()
740 heads.sort()
741 return [n for (r, n) in heads]
741 return [n for (r, n) in heads]
742
742
743 # branchlookup returns a dict giving a list of branches for
743 # branchlookup returns a dict giving a list of branches for
744 # each head. A branch is defined as the tag of a node or
744 # each head. A branch is defined as the tag of a node or
745 # the branch of the node's parents. If a node has multiple
745 # the branch of the node's parents. If a node has multiple
746 # branch tags, tags are eliminated if they are visible from other
746 # branch tags, tags are eliminated if they are visible from other
747 # branch tags.
747 # branch tags.
748 #
748 #
749 # So, for this graph: a->b->c->d->e
749 # So, for this graph: a->b->c->d->e
750 # \ /
750 # \ /
751 # aa -----/
751 # aa -----/
752 # a has tag 2.6.12
752 # a has tag 2.6.12
753 # d has tag 2.6.13
753 # d has tag 2.6.13
754 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
754 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
755 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
755 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
756 # from the list.
756 # from the list.
757 #
757 #
758 # It is possible that more than one head will have the same branch tag.
758 # It is possible that more than one head will have the same branch tag.
759 # callers need to check the result for multiple heads under the same
759 # callers need to check the result for multiple heads under the same
760 # branch tag if that is a problem for them (ie checkout of a specific
760 # branch tag if that is a problem for them (ie checkout of a specific
761 # branch).
761 # branch).
762 #
762 #
763 # passing in a specific branch will limit the depth of the search
763 # passing in a specific branch will limit the depth of the search
764 # through the parents. It won't limit the branches returned in the
764 # through the parents. It won't limit the branches returned in the
765 # result though.
765 # result though.
766 def branchlookup(self, heads=None, branch=None):
766 def branchlookup(self, heads=None, branch=None):
767 if not heads:
767 if not heads:
768 heads = self.heads()
768 heads = self.heads()
769 headt = [ h for h in heads ]
769 headt = [ h for h in heads ]
770 chlog = self.changelog
770 chlog = self.changelog
771 branches = {}
771 branches = {}
772 merges = []
772 merges = []
773 seenmerge = {}
773 seenmerge = {}
774
774
775 # traverse the tree once for each head, recording in the branches
775 # traverse the tree once for each head, recording in the branches
776 # dict which tags are visible from this head. The branches
776 # dict which tags are visible from this head. The branches
777 # dict also records which tags are visible from each tag
777 # dict also records which tags are visible from each tag
778 # while we traverse.
778 # while we traverse.
779 while headt or merges:
779 while headt or merges:
780 if merges:
780 if merges:
781 n, found = merges.pop()
781 n, found = merges.pop()
782 visit = [n]
782 visit = [n]
783 else:
783 else:
784 h = headt.pop()
784 h = headt.pop()
785 visit = [h]
785 visit = [h]
786 found = [h]
786 found = [h]
787 seen = {}
787 seen = {}
788 while visit:
788 while visit:
789 n = visit.pop()
789 n = visit.pop()
790 if n in seen:
790 if n in seen:
791 continue
791 continue
792 pp = chlog.parents(n)
792 pp = chlog.parents(n)
793 tags = self.nodetags(n)
793 tags = self.nodetags(n)
794 if tags:
794 if tags:
795 for x in tags:
795 for x in tags:
796 if x == 'tip':
796 if x == 'tip':
797 continue
797 continue
798 for f in found:
798 for f in found:
799 branches.setdefault(f, {})[n] = 1
799 branches.setdefault(f, {})[n] = 1
800 branches.setdefault(n, {})[n] = 1
800 branches.setdefault(n, {})[n] = 1
801 break
801 break
802 if n not in found:
802 if n not in found:
803 found.append(n)
803 found.append(n)
804 if branch in tags:
804 if branch in tags:
805 continue
805 continue
806 seen[n] = 1
806 seen[n] = 1
807 if pp[1] != nullid and n not in seenmerge:
807 if pp[1] != nullid and n not in seenmerge:
808 merges.append((pp[1], [x for x in found]))
808 merges.append((pp[1], [x for x in found]))
809 seenmerge[n] = 1
809 seenmerge[n] = 1
810 if pp[0] != nullid:
810 if pp[0] != nullid:
811 visit.append(pp[0])
811 visit.append(pp[0])
812 # traverse the branches dict, eliminating branch tags from each
812 # traverse the branches dict, eliminating branch tags from each
813 # head that are visible from another branch tag for that head.
813 # head that are visible from another branch tag for that head.
814 out = {}
814 out = {}
815 viscache = {}
815 viscache = {}
816 for h in heads:
816 for h in heads:
817 def visible(node):
817 def visible(node):
818 if node in viscache:
818 if node in viscache:
819 return viscache[node]
819 return viscache[node]
820 ret = {}
820 ret = {}
821 visit = [node]
821 visit = [node]
822 while visit:
822 while visit:
823 x = visit.pop()
823 x = visit.pop()
824 if x in viscache:
824 if x in viscache:
825 ret.update(viscache[x])
825 ret.update(viscache[x])
826 elif x not in ret:
826 elif x not in ret:
827 ret[x] = 1
827 ret[x] = 1
828 if x in branches:
828 if x in branches:
829 visit[len(visit):] = branches[x].keys()
829 visit[len(visit):] = branches[x].keys()
830 viscache[node] = ret
830 viscache[node] = ret
831 return ret
831 return ret
832 if h not in branches:
832 if h not in branches:
833 continue
833 continue
834 # O(n^2), but somewhat limited. This only searches the
834 # O(n^2), but somewhat limited. This only searches the
835 # tags visible from a specific head, not all the tags in the
835 # tags visible from a specific head, not all the tags in the
836 # whole repo.
836 # whole repo.
837 for b in branches[h]:
837 for b in branches[h]:
838 vis = False
838 vis = False
839 for bb in branches[h].keys():
839 for bb in branches[h].keys():
840 if b != bb:
840 if b != bb:
841 if b in visible(bb):
841 if b in visible(bb):
842 vis = True
842 vis = True
843 break
843 break
844 if not vis:
844 if not vis:
845 l = out.setdefault(h, [])
845 l = out.setdefault(h, [])
846 l[len(l):] = self.nodetags(b)
846 l[len(l):] = self.nodetags(b)
847 return out
847 return out
848
848
849 def branches(self, nodes):
849 def branches(self, nodes):
850 if not nodes:
850 if not nodes:
851 nodes = [self.changelog.tip()]
851 nodes = [self.changelog.tip()]
852 b = []
852 b = []
853 for n in nodes:
853 for n in nodes:
854 t = n
854 t = n
855 while n:
855 while n:
856 p = self.changelog.parents(n)
856 p = self.changelog.parents(n)
857 if p[1] != nullid or p[0] == nullid:
857 if p[1] != nullid or p[0] == nullid:
858 b.append((t, n, p[0], p[1]))
858 b.append((t, n, p[0], p[1]))
859 break
859 break
860 n = p[0]
860 n = p[0]
861 return b
861 return b
862
862
863 def between(self, pairs):
863 def between(self, pairs):
864 r = []
864 r = []
865
865
866 for top, bottom in pairs:
866 for top, bottom in pairs:
867 n, l, i = top, [], 0
867 n, l, i = top, [], 0
868 f = 1
868 f = 1
869
869
870 while n != bottom:
870 while n != bottom:
871 p = self.changelog.parents(n)[0]
871 p = self.changelog.parents(n)[0]
872 if i == f:
872 if i == f:
873 l.append(n)
873 l.append(n)
874 f = f * 2
874 f = f * 2
875 n = p
875 n = p
876 i += 1
876 i += 1
877
877
878 r.append(l)
878 r.append(l)
879
879
880 return r
880 return r
881
881
882 def findincoming(self, remote, base=None, heads=None, force=False):
882 def findincoming(self, remote, base=None, heads=None, force=False):
883 m = self.changelog.nodemap
883 m = self.changelog.nodemap
884 search = []
884 search = []
885 fetch = {}
885 fetch = {}
886 seen = {}
886 seen = {}
887 seenbranch = {}
887 seenbranch = {}
888 if base == None:
888 if base == None:
889 base = {}
889 base = {}
890
890
891 if not heads:
891 if not heads:
892 heads = remote.heads()
892 heads = remote.heads()
893
893
894 if self.changelog.tip() == nullid:
894 if self.changelog.tip() == nullid:
895 if heads != [nullid]:
895 if heads != [nullid]:
896 return [nullid]
896 return [nullid]
897 return []
897 return []
898
898
899 # assume we're closer to the tip than the root
899 # assume we're closer to the tip than the root
900 # and start by examining the heads
900 # and start by examining the heads
901 self.ui.status(_("searching for changes\n"))
901 self.ui.status(_("searching for changes\n"))
902
902
903 unknown = []
903 unknown = []
904 for h in heads:
904 for h in heads:
905 if h not in m:
905 if h not in m:
906 unknown.append(h)
906 unknown.append(h)
907 else:
907 else:
908 base[h] = 1
908 base[h] = 1
909
909
910 if not unknown:
910 if not unknown:
911 return []
911 return []
912
912
913 rep = {}
913 rep = {}
914 reqcnt = 0
914 reqcnt = 0
915
915
916 # search through remote branches
916 # search through remote branches
917 # a 'branch' here is a linear segment of history, with four parts:
917 # a 'branch' here is a linear segment of history, with four parts:
918 # head, root, first parent, second parent
918 # head, root, first parent, second parent
919 # (a branch always has two parents (or none) by definition)
919 # (a branch always has two parents (or none) by definition)
920 unknown = remote.branches(unknown)
920 unknown = remote.branches(unknown)
921 while unknown:
921 while unknown:
922 r = []
922 r = []
923 while unknown:
923 while unknown:
924 n = unknown.pop(0)
924 n = unknown.pop(0)
925 if n[0] in seen:
925 if n[0] in seen:
926 continue
926 continue
927
927
928 self.ui.debug(_("examining %s:%s\n")
928 self.ui.debug(_("examining %s:%s\n")
929 % (short(n[0]), short(n[1])))
929 % (short(n[0]), short(n[1])))
930 if n[0] == nullid:
930 if n[0] == nullid:
931 break
931 break
932 if n in seenbranch:
932 if n in seenbranch:
933 self.ui.debug(_("branch already found\n"))
933 self.ui.debug(_("branch already found\n"))
934 continue
934 continue
935 if n[1] and n[1] in m: # do we know the base?
935 if n[1] and n[1] in m: # do we know the base?
936 self.ui.debug(_("found incomplete branch %s:%s\n")
936 self.ui.debug(_("found incomplete branch %s:%s\n")
937 % (short(n[0]), short(n[1])))
937 % (short(n[0]), short(n[1])))
938 search.append(n) # schedule branch range for scanning
938 search.append(n) # schedule branch range for scanning
939 seenbranch[n] = 1
939 seenbranch[n] = 1
940 else:
940 else:
941 if n[1] not in seen and n[1] not in fetch:
941 if n[1] not in seen and n[1] not in fetch:
942 if n[2] in m and n[3] in m:
942 if n[2] in m and n[3] in m:
943 self.ui.debug(_("found new changeset %s\n") %
943 self.ui.debug(_("found new changeset %s\n") %
944 short(n[1]))
944 short(n[1]))
945 fetch[n[1]] = 1 # earliest unknown
945 fetch[n[1]] = 1 # earliest unknown
946 base[n[2]] = 1 # latest known
946 base[n[2]] = 1 # latest known
947 continue
947 continue
948
948
949 for a in n[2:4]:
949 for a in n[2:4]:
950 if a not in rep:
950 if a not in rep:
951 r.append(a)
951 r.append(a)
952 rep[a] = 1
952 rep[a] = 1
953
953
954 seen[n[0]] = 1
954 seen[n[0]] = 1
955
955
956 if r:
956 if r:
957 reqcnt += 1
957 reqcnt += 1
958 self.ui.debug(_("request %d: %s\n") %
958 self.ui.debug(_("request %d: %s\n") %
959 (reqcnt, " ".join(map(short, r))))
959 (reqcnt, " ".join(map(short, r))))
960 for p in range(0, len(r), 10):
960 for p in range(0, len(r), 10):
961 for b in remote.branches(r[p:p+10]):
961 for b in remote.branches(r[p:p+10]):
962 self.ui.debug(_("received %s:%s\n") %
962 self.ui.debug(_("received %s:%s\n") %
963 (short(b[0]), short(b[1])))
963 (short(b[0]), short(b[1])))
964 if b[0] in m:
964 if b[0] in m:
965 self.ui.debug(_("found base node %s\n")
965 self.ui.debug(_("found base node %s\n")
966 % short(b[0]))
966 % short(b[0]))
967 base[b[0]] = 1
967 base[b[0]] = 1
968 elif b[0] not in seen:
968 elif b[0] not in seen:
969 unknown.append(b)
969 unknown.append(b)
970
970
971 # do binary search on the branches we found
971 # do binary search on the branches we found
972 while search:
972 while search:
973 n = search.pop(0)
973 n = search.pop(0)
974 reqcnt += 1
974 reqcnt += 1
975 l = remote.between([(n[0], n[1])])[0]
975 l = remote.between([(n[0], n[1])])[0]
976 l.append(n[1])
976 l.append(n[1])
977 p = n[0]
977 p = n[0]
978 f = 1
978 f = 1
979 for i in l:
979 for i in l:
980 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
980 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
981 if i in m:
981 if i in m:
982 if f <= 2:
982 if f <= 2:
983 self.ui.debug(_("found new branch changeset %s\n") %
983 self.ui.debug(_("found new branch changeset %s\n") %
984 short(p))
984 short(p))
985 fetch[p] = 1
985 fetch[p] = 1
986 base[i] = 1
986 base[i] = 1
987 else:
987 else:
988 self.ui.debug(_("narrowed branch search to %s:%s\n")
988 self.ui.debug(_("narrowed branch search to %s:%s\n")
989 % (short(p), short(i)))
989 % (short(p), short(i)))
990 search.append((p, i))
990 search.append((p, i))
991 break
991 break
992 p, f = i, f * 2
992 p, f = i, f * 2
993
993
994 # sanity check our fetch list
994 # sanity check our fetch list
995 for f in fetch.keys():
995 for f in fetch.keys():
996 if f in m:
996 if f in m:
997 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
997 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
998
998
999 if base.keys() == [nullid]:
999 if base.keys() == [nullid]:
1000 if force:
1000 if force:
1001 self.ui.warn(_("warning: repository is unrelated\n"))
1001 self.ui.warn(_("warning: repository is unrelated\n"))
1002 else:
1002 else:
1003 raise util.Abort(_("repository is unrelated"))
1003 raise util.Abort(_("repository is unrelated"))
1004
1004
1005 self.ui.note(_("found new changesets starting at ") +
1005 self.ui.note(_("found new changesets starting at ") +
1006 " ".join([short(f) for f in fetch]) + "\n")
1006 " ".join([short(f) for f in fetch]) + "\n")
1007
1007
1008 self.ui.debug(_("%d total queries\n") % reqcnt)
1008 self.ui.debug(_("%d total queries\n") % reqcnt)
1009
1009
1010 return fetch.keys()
1010 return fetch.keys()
1011
1011
1012 def findoutgoing(self, remote, base=None, heads=None, force=False):
1012 def findoutgoing(self, remote, base=None, heads=None, force=False):
1013 """Return list of nodes that are roots of subsets not in remote
1013 """Return list of nodes that are roots of subsets not in remote
1014
1014
1015 If base dict is specified, assume that these nodes and their parents
1015 If base dict is specified, assume that these nodes and their parents
1016 exist on the remote side.
1016 exist on the remote side.
1017 If a list of heads is specified, return only nodes which are heads
1017 If a list of heads is specified, return only nodes which are heads
1018 or ancestors of these heads, and return a second element which
1018 or ancestors of these heads, and return a second element which
1019 contains all remote heads which get new children.
1019 contains all remote heads which get new children.
1020 """
1020 """
1021 if base == None:
1021 if base == None:
1022 base = {}
1022 base = {}
1023 self.findincoming(remote, base, heads, force=force)
1023 self.findincoming(remote, base, heads, force=force)
1024
1024
1025 self.ui.debug(_("common changesets up to ")
1025 self.ui.debug(_("common changesets up to ")
1026 + " ".join(map(short, base.keys())) + "\n")
1026 + " ".join(map(short, base.keys())) + "\n")
1027
1027
1028 remain = dict.fromkeys(self.changelog.nodemap)
1028 remain = dict.fromkeys(self.changelog.nodemap)
1029
1029
1030 # prune everything remote has from the tree
1030 # prune everything remote has from the tree
1031 del remain[nullid]
1031 del remain[nullid]
1032 remove = base.keys()
1032 remove = base.keys()
1033 while remove:
1033 while remove:
1034 n = remove.pop(0)
1034 n = remove.pop(0)
1035 if n in remain:
1035 if n in remain:
1036 del remain[n]
1036 del remain[n]
1037 for p in self.changelog.parents(n):
1037 for p in self.changelog.parents(n):
1038 remove.append(p)
1038 remove.append(p)
1039
1039
1040 # find every node whose parents have been pruned
1040 # find every node whose parents have been pruned
1041 subset = []
1041 subset = []
1042 # find every remote head that will get new children
1042 # find every remote head that will get new children
1043 updated_heads = {}
1043 updated_heads = {}
1044 for n in remain:
1044 for n in remain:
1045 p1, p2 = self.changelog.parents(n)
1045 p1, p2 = self.changelog.parents(n)
1046 if p1 not in remain and p2 not in remain:
1046 if p1 not in remain and p2 not in remain:
1047 subset.append(n)
1047 subset.append(n)
1048 if heads:
1048 if heads:
1049 if p1 in heads:
1049 if p1 in heads:
1050 updated_heads[p1] = True
1050 updated_heads[p1] = True
1051 if p2 in heads:
1051 if p2 in heads:
1052 updated_heads[p2] = True
1052 updated_heads[p2] = True
1053
1053
1054 # this is the set of all roots we have to push
1054 # this is the set of all roots we have to push
1055 if heads:
1055 if heads:
1056 return subset, updated_heads.keys()
1056 return subset, updated_heads.keys()
1057 else:
1057 else:
1058 return subset
1058 return subset
1059
1059
1060 def pull(self, remote, heads=None, force=False):
1060 def pull(self, remote, heads=None, force=False):
1061 l = self.lock()
1061 l = self.lock()
1062
1062
1063 fetch = self.findincoming(remote, force=force)
1063 fetch = self.findincoming(remote, force=force)
1064 if fetch == [nullid]:
1064 if fetch == [nullid]:
1065 self.ui.status(_("requesting all changes\n"))
1065 self.ui.status(_("requesting all changes\n"))
1066
1066
1067 if not fetch:
1067 if not fetch:
1068 self.ui.status(_("no changes found\n"))
1068 self.ui.status(_("no changes found\n"))
1069 return 0
1069 return 0
1070
1070
1071 if heads is None:
1071 if heads is None:
1072 cg = remote.changegroup(fetch, 'pull')
1072 cg = remote.changegroup(fetch, 'pull')
1073 else:
1073 else:
1074 cg = remote.changegroupsubset(fetch, heads, 'pull')
1074 cg = remote.changegroupsubset(fetch, heads, 'pull')
1075 return self.addchangegroup(cg)
1075 return self.addchangegroup(cg)
1076
1076
1077 def push(self, remote, force=False, revs=None):
1077 def push(self, remote, force=False, revs=None):
1078 lock = remote.lock()
1078 lock = remote.lock()
1079
1079
1080 base = {}
1080 base = {}
1081 remote_heads = remote.heads()
1081 remote_heads = remote.heads()
1082 inc = self.findincoming(remote, base, remote_heads, force=force)
1082 inc = self.findincoming(remote, base, remote_heads, force=force)
1083 if not force and inc:
1083 if not force and inc:
1084 self.ui.warn(_("abort: unsynced remote changes!\n"))
1084 self.ui.warn(_("abort: unsynced remote changes!\n"))
1085 self.ui.status(_("(did you forget to sync?"
1085 self.ui.status(_("(did you forget to sync?"
1086 " use push -f to force)\n"))
1086 " use push -f to force)\n"))
1087 return 1
1087 return 1
1088
1088
1089 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1089 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1090 if revs is not None:
1090 if revs is not None:
1091 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1091 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1092 else:
1092 else:
1093 bases, heads = update, self.changelog.heads()
1093 bases, heads = update, self.changelog.heads()
1094
1094
1095 if not bases:
1095 if not bases:
1096 self.ui.status(_("no changes found\n"))
1096 self.ui.status(_("no changes found\n"))
1097 return 1
1097 return 1
1098 elif not force:
1098 elif not force:
1099 # FIXME we don't properly detect creation of new heads
1099 # FIXME we don't properly detect creation of new heads
1100 # in the push -r case, assume the user knows what he's doing
1100 # in the push -r case, assume the user knows what he's doing
1101 if not revs and len(remote_heads) < len(heads) \
1101 if not revs and len(remote_heads) < len(heads) \
1102 and remote_heads != [nullid]:
1102 and remote_heads != [nullid]:
1103 self.ui.warn(_("abort: push creates new remote branches!\n"))
1103 self.ui.warn(_("abort: push creates new remote branches!\n"))
1104 self.ui.status(_("(did you forget to merge?"
1104 self.ui.status(_("(did you forget to merge?"
1105 " use push -f to force)\n"))
1105 " use push -f to force)\n"))
1106 return 1
1106 return 1
1107
1107
1108 if revs is None:
1108 if revs is None:
1109 cg = self.changegroup(update, 'push')
1109 cg = self.changegroup(update, 'push')
1110 else:
1110 else:
1111 cg = self.changegroupsubset(update, revs, 'push')
1111 cg = self.changegroupsubset(update, revs, 'push')
1112 return remote.addchangegroup(cg)
1112 return remote.addchangegroup(cg)
1113
1113
1114 def changegroupsubset(self, bases, heads, source):
1114 def changegroupsubset(self, bases, heads, source):
1115 """This function generates a changegroup consisting of all the nodes
1115 """This function generates a changegroup consisting of all the nodes
1116 that are descendents of any of the bases, and ancestors of any of
1116 that are descendents of any of the bases, and ancestors of any of
1117 the heads.
1117 the heads.
1118
1118
1119 It is fairly complex as determining which filenodes and which
1119 It is fairly complex as determining which filenodes and which
1120 manifest nodes need to be included for the changeset to be complete
1120 manifest nodes need to be included for the changeset to be complete
1121 is non-trivial.
1121 is non-trivial.
1122
1122
1123 Another wrinkle is doing the reverse, figuring out which changeset in
1123 Another wrinkle is doing the reverse, figuring out which changeset in
1124 the changegroup a particular filenode or manifestnode belongs to."""
1124 the changegroup a particular filenode or manifestnode belongs to."""
1125
1125
1126 self.hook('preoutgoing', throw=True, source=source)
1126 self.hook('preoutgoing', throw=True, source=source)
1127
1127
1128 # Set up some initial variables
1128 # Set up some initial variables
1129 # Make it easy to refer to self.changelog
1129 # Make it easy to refer to self.changelog
1130 cl = self.changelog
1130 cl = self.changelog
1131 # msng is short for missing - compute the list of changesets in this
1131 # msng is short for missing - compute the list of changesets in this
1132 # changegroup.
1132 # changegroup.
1133 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1133 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1134 # Some bases may turn out to be superfluous, and some heads may be
1134 # Some bases may turn out to be superfluous, and some heads may be
1135 # too. nodesbetween will return the minimal set of bases and heads
1135 # too. nodesbetween will return the minimal set of bases and heads
1136 # necessary to re-create the changegroup.
1136 # necessary to re-create the changegroup.
1137
1137
1138 # Known heads are the list of heads that it is assumed the recipient
1138 # Known heads are the list of heads that it is assumed the recipient
1139 # of this changegroup will know about.
1139 # of this changegroup will know about.
1140 knownheads = {}
1140 knownheads = {}
1141 # We assume that all parents of bases are known heads.
1141 # We assume that all parents of bases are known heads.
1142 for n in bases:
1142 for n in bases:
1143 for p in cl.parents(n):
1143 for p in cl.parents(n):
1144 if p != nullid:
1144 if p != nullid:
1145 knownheads[p] = 1
1145 knownheads[p] = 1
1146 knownheads = knownheads.keys()
1146 knownheads = knownheads.keys()
1147 if knownheads:
1147 if knownheads:
1148 # Now that we know what heads are known, we can compute which
1148 # Now that we know what heads are known, we can compute which
1149 # changesets are known. The recipient must know about all
1149 # changesets are known. The recipient must know about all
1150 # changesets required to reach the known heads from the null
1150 # changesets required to reach the known heads from the null
1151 # changeset.
1151 # changeset.
1152 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1152 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1153 junk = None
1153 junk = None
1154 # Transform the list into an ersatz set.
1154 # Transform the list into an ersatz set.
1155 has_cl_set = dict.fromkeys(has_cl_set)
1155 has_cl_set = dict.fromkeys(has_cl_set)
1156 else:
1156 else:
1157 # If there were no known heads, the recipient cannot be assumed to
1157 # If there were no known heads, the recipient cannot be assumed to
1158 # know about any changesets.
1158 # know about any changesets.
1159 has_cl_set = {}
1159 has_cl_set = {}
1160
1160
1161 # Make it easy to refer to self.manifest
1161 # Make it easy to refer to self.manifest
1162 mnfst = self.manifest
1162 mnfst = self.manifest
1163 # We don't know which manifests are missing yet
1163 # We don't know which manifests are missing yet
1164 msng_mnfst_set = {}
1164 msng_mnfst_set = {}
1165 # Nor do we know which filenodes are missing.
1165 # Nor do we know which filenodes are missing.
1166 msng_filenode_set = {}
1166 msng_filenode_set = {}
1167
1167
1168 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1168 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1169 junk = None
1169 junk = None
1170
1170
1171 # A changeset always belongs to itself, so the changenode lookup
1171 # A changeset always belongs to itself, so the changenode lookup
1172 # function for a changenode is identity.
1172 # function for a changenode is identity.
1173 def identity(x):
1173 def identity(x):
1174 return x
1174 return x
1175
1175
1176 # A function generating function. Sets up an environment for the
1176 # A function generating function. Sets up an environment for the
1177 # inner function.
1177 # inner function.
1178 def cmp_by_rev_func(revlog):
1178 def cmp_by_rev_func(revlog):
1179 # Compare two nodes by their revision number in the environment's
1179 # Compare two nodes by their revision number in the environment's
1180 # revision history. Since the revision number both represents the
1180 # revision history. Since the revision number both represents the
1181 # most efficient order to read the nodes in, and represents a
1181 # most efficient order to read the nodes in, and represents a
1182 # topological sorting of the nodes, this function is often useful.
1182 # topological sorting of the nodes, this function is often useful.
1183 def cmp_by_rev(a, b):
1183 def cmp_by_rev(a, b):
1184 return cmp(revlog.rev(a), revlog.rev(b))
1184 return cmp(revlog.rev(a), revlog.rev(b))
1185 return cmp_by_rev
1185 return cmp_by_rev
1186
1186
1187 # If we determine that a particular file or manifest node must be a
1187 # If we determine that a particular file or manifest node must be a
1188 # node that the recipient of the changegroup will already have, we can
1188 # node that the recipient of the changegroup will already have, we can
1189 # also assume the recipient will have all the parents. This function
1189 # also assume the recipient will have all the parents. This function
1190 # prunes them from the set of missing nodes.
1190 # prunes them from the set of missing nodes.
1191 def prune_parents(revlog, hasset, msngset):
1191 def prune_parents(revlog, hasset, msngset):
1192 haslst = hasset.keys()
1192 haslst = hasset.keys()
1193 haslst.sort(cmp_by_rev_func(revlog))
1193 haslst.sort(cmp_by_rev_func(revlog))
1194 for node in haslst:
1194 for node in haslst:
1195 parentlst = [p for p in revlog.parents(node) if p != nullid]
1195 parentlst = [p for p in revlog.parents(node) if p != nullid]
1196 while parentlst:
1196 while parentlst:
1197 n = parentlst.pop()
1197 n = parentlst.pop()
1198 if n not in hasset:
1198 if n not in hasset:
1199 hasset[n] = 1
1199 hasset[n] = 1
1200 p = [p for p in revlog.parents(n) if p != nullid]
1200 p = [p for p in revlog.parents(n) if p != nullid]
1201 parentlst.extend(p)
1201 parentlst.extend(p)
1202 for n in hasset:
1202 for n in hasset:
1203 msngset.pop(n, None)
1203 msngset.pop(n, None)
1204
1204
1205 # This is a function generating function used to set up an environment
1205 # This is a function generating function used to set up an environment
1206 # for the inner function to execute in.
1206 # for the inner function to execute in.
1207 def manifest_and_file_collector(changedfileset):
1207 def manifest_and_file_collector(changedfileset):
1208 # This is an information gathering function that gathers
1208 # This is an information gathering function that gathers
1209 # information from each changeset node that goes out as part of
1209 # information from each changeset node that goes out as part of
1210 # the changegroup. The information gathered is a list of which
1210 # the changegroup. The information gathered is a list of which
1211 # manifest nodes are potentially required (the recipient may
1211 # manifest nodes are potentially required (the recipient may
1212 # already have them) and total list of all files which were
1212 # already have them) and total list of all files which were
1213 # changed in any changeset in the changegroup.
1213 # changed in any changeset in the changegroup.
1214 #
1214 #
1215 # We also remember the first changenode we saw any manifest
1215 # We also remember the first changenode we saw any manifest
1216 # referenced by so we can later determine which changenode 'owns'
1216 # referenced by so we can later determine which changenode 'owns'
1217 # the manifest.
1217 # the manifest.
1218 def collect_manifests_and_files(clnode):
1218 def collect_manifests_and_files(clnode):
1219 c = cl.read(clnode)
1219 c = cl.read(clnode)
1220 for f in c[3]:
1220 for f in c[3]:
1221 # This is to make sure we only have one instance of each
1221 # This is to make sure we only have one instance of each
1222 # filename string for each filename.
1222 # filename string for each filename.
1223 changedfileset.setdefault(f, f)
1223 changedfileset.setdefault(f, f)
1224 msng_mnfst_set.setdefault(c[0], clnode)
1224 msng_mnfst_set.setdefault(c[0], clnode)
1225 return collect_manifests_and_files
1225 return collect_manifests_and_files
1226
1226
1227 # Figure out which manifest nodes (of the ones we think might be part
1227 # Figure out which manifest nodes (of the ones we think might be part
1228 # of the changegroup) the recipient must know about and remove them
1228 # of the changegroup) the recipient must know about and remove them
1229 # from the changegroup.
1229 # from the changegroup.
1230 def prune_manifests():
1230 def prune_manifests():
1231 has_mnfst_set = {}
1231 has_mnfst_set = {}
1232 for n in msng_mnfst_set:
1232 for n in msng_mnfst_set:
1233 # If a 'missing' manifest thinks it belongs to a changenode
1233 # If a 'missing' manifest thinks it belongs to a changenode
1234 # the recipient is assumed to have, obviously the recipient
1234 # the recipient is assumed to have, obviously the recipient
1235 # must have that manifest.
1235 # must have that manifest.
1236 linknode = cl.node(mnfst.linkrev(n))
1236 linknode = cl.node(mnfst.linkrev(n))
1237 if linknode in has_cl_set:
1237 if linknode in has_cl_set:
1238 has_mnfst_set[n] = 1
1238 has_mnfst_set[n] = 1
1239 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1239 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1240
1240
1241 # Use the information collected in collect_manifests_and_files to say
1241 # Use the information collected in collect_manifests_and_files to say
1242 # which changenode any manifestnode belongs to.
1242 # which changenode any manifestnode belongs to.
1243 def lookup_manifest_link(mnfstnode):
1243 def lookup_manifest_link(mnfstnode):
1244 return msng_mnfst_set[mnfstnode]
1244 return msng_mnfst_set[mnfstnode]
1245
1245
1246 # A function generating function that sets up the initial environment
1246 # A function generating function that sets up the initial environment
1247 # the inner function.
1247 # the inner function.
1248 def filenode_collector(changedfiles):
1248 def filenode_collector(changedfiles):
1249 next_rev = [0]
1249 next_rev = [0]
1250 # This gathers information from each manifestnode included in the
1250 # This gathers information from each manifestnode included in the
1251 # changegroup about which filenodes the manifest node references
1251 # changegroup about which filenodes the manifest node references
1252 # so we can include those in the changegroup too.
1252 # so we can include those in the changegroup too.
1253 #
1253 #
1254 # It also remembers which changenode each filenode belongs to. It
1254 # It also remembers which changenode each filenode belongs to. It
1255 # does this by assuming the a filenode belongs to the changenode
1255 # does this by assuming the a filenode belongs to the changenode
1256 # the first manifest that references it belongs to.
1256 # the first manifest that references it belongs to.
1257 def collect_msng_filenodes(mnfstnode):
1257 def collect_msng_filenodes(mnfstnode):
1258 r = mnfst.rev(mnfstnode)
1258 r = mnfst.rev(mnfstnode)
1259 if r == next_rev[0]:
1259 if r == next_rev[0]:
1260 # If the last rev we looked at was the one just previous,
1260 # If the last rev we looked at was the one just previous,
1261 # we only need to see a diff.
1261 # we only need to see a diff.
1262 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1262 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1263 # For each line in the delta
1263 # For each line in the delta
1264 for dline in delta.splitlines():
1264 for dline in delta.splitlines():
1265 # get the filename and filenode for that line
1265 # get the filename and filenode for that line
1266 f, fnode = dline.split('\0')
1266 f, fnode = dline.split('\0')
1267 fnode = bin(fnode[:40])
1267 fnode = bin(fnode[:40])
1268 f = changedfiles.get(f, None)
1268 f = changedfiles.get(f, None)
1269 # And if the file is in the list of files we care
1269 # And if the file is in the list of files we care
1270 # about.
1270 # about.
1271 if f is not None:
1271 if f is not None:
1272 # Get the changenode this manifest belongs to
1272 # Get the changenode this manifest belongs to
1273 clnode = msng_mnfst_set[mnfstnode]
1273 clnode = msng_mnfst_set[mnfstnode]
1274 # Create the set of filenodes for the file if
1274 # Create the set of filenodes for the file if
1275 # there isn't one already.
1275 # there isn't one already.
1276 ndset = msng_filenode_set.setdefault(f, {})
1276 ndset = msng_filenode_set.setdefault(f, {})
1277 # And set the filenode's changelog node to the
1277 # And set the filenode's changelog node to the
1278 # manifest's if it hasn't been set already.
1278 # manifest's if it hasn't been set already.
1279 ndset.setdefault(fnode, clnode)
1279 ndset.setdefault(fnode, clnode)
1280 else:
1280 else:
1281 # Otherwise we need a full manifest.
1281 # Otherwise we need a full manifest.
1282 m = mnfst.read(mnfstnode)
1282 m = mnfst.read(mnfstnode)
1283 # For every file in we care about.
1283 # For every file in we care about.
1284 for f in changedfiles:
1284 for f in changedfiles:
1285 fnode = m.get(f, None)
1285 fnode = m.get(f, None)
1286 # If it's in the manifest
1286 # If it's in the manifest
1287 if fnode is not None:
1287 if fnode is not None:
1288 # See comments above.
1288 # See comments above.
1289 clnode = msng_mnfst_set[mnfstnode]
1289 clnode = msng_mnfst_set[mnfstnode]
1290 ndset = msng_filenode_set.setdefault(f, {})
1290 ndset = msng_filenode_set.setdefault(f, {})
1291 ndset.setdefault(fnode, clnode)
1291 ndset.setdefault(fnode, clnode)
1292 # Remember the revision we hope to see next.
1292 # Remember the revision we hope to see next.
1293 next_rev[0] = r + 1
1293 next_rev[0] = r + 1
1294 return collect_msng_filenodes
1294 return collect_msng_filenodes
1295
1295
1296 # We have a list of filenodes we think we need for a file, lets remove
1296 # We have a list of filenodes we think we need for a file, lets remove
1297 # all those we now the recipient must have.
1297 # all those we now the recipient must have.
1298 def prune_filenodes(f, filerevlog):
1298 def prune_filenodes(f, filerevlog):
1299 msngset = msng_filenode_set[f]
1299 msngset = msng_filenode_set[f]
1300 hasset = {}
1300 hasset = {}
1301 # If a 'missing' filenode thinks it belongs to a changenode we
1301 # If a 'missing' filenode thinks it belongs to a changenode we
1302 # assume the recipient must have, then the recipient must have
1302 # assume the recipient must have, then the recipient must have
1303 # that filenode.
1303 # that filenode.
1304 for n in msngset:
1304 for n in msngset:
1305 clnode = cl.node(filerevlog.linkrev(n))
1305 clnode = cl.node(filerevlog.linkrev(n))
1306 if clnode in has_cl_set:
1306 if clnode in has_cl_set:
1307 hasset[n] = 1
1307 hasset[n] = 1
1308 prune_parents(filerevlog, hasset, msngset)
1308 prune_parents(filerevlog, hasset, msngset)
1309
1309
1310 # A function generator function that sets up the a context for the
1310 # A function generator function that sets up the a context for the
1311 # inner function.
1311 # inner function.
1312 def lookup_filenode_link_func(fname):
1312 def lookup_filenode_link_func(fname):
1313 msngset = msng_filenode_set[fname]
1313 msngset = msng_filenode_set[fname]
1314 # Lookup the changenode the filenode belongs to.
1314 # Lookup the changenode the filenode belongs to.
1315 def lookup_filenode_link(fnode):
1315 def lookup_filenode_link(fnode):
1316 return msngset[fnode]
1316 return msngset[fnode]
1317 return lookup_filenode_link
1317 return lookup_filenode_link
1318
1318
1319 # Now that we have all theses utility functions to help out and
1319 # Now that we have all theses utility functions to help out and
1320 # logically divide up the task, generate the group.
1320 # logically divide up the task, generate the group.
1321 def gengroup():
1321 def gengroup():
1322 # The set of changed files starts empty.
1322 # The set of changed files starts empty.
1323 changedfiles = {}
1323 changedfiles = {}
1324 # Create a changenode group generator that will call our functions
1324 # Create a changenode group generator that will call our functions
1325 # back to lookup the owning changenode and collect information.
1325 # back to lookup the owning changenode and collect information.
1326 group = cl.group(msng_cl_lst, identity,
1326 group = cl.group(msng_cl_lst, identity,
1327 manifest_and_file_collector(changedfiles))
1327 manifest_and_file_collector(changedfiles))
1328 for chnk in group:
1328 for chnk in group:
1329 yield chnk
1329 yield chnk
1330
1330
1331 # The list of manifests has been collected by the generator
1331 # The list of manifests has been collected by the generator
1332 # calling our functions back.
1332 # calling our functions back.
1333 prune_manifests()
1333 prune_manifests()
1334 msng_mnfst_lst = msng_mnfst_set.keys()
1334 msng_mnfst_lst = msng_mnfst_set.keys()
1335 # Sort the manifestnodes by revision number.
1335 # Sort the manifestnodes by revision number.
1336 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1336 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1337 # Create a generator for the manifestnodes that calls our lookup
1337 # Create a generator for the manifestnodes that calls our lookup
1338 # and data collection functions back.
1338 # and data collection functions back.
1339 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1339 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1340 filenode_collector(changedfiles))
1340 filenode_collector(changedfiles))
1341 for chnk in group:
1341 for chnk in group:
1342 yield chnk
1342 yield chnk
1343
1343
1344 # These are no longer needed, dereference and toss the memory for
1344 # These are no longer needed, dereference and toss the memory for
1345 # them.
1345 # them.
1346 msng_mnfst_lst = None
1346 msng_mnfst_lst = None
1347 msng_mnfst_set.clear()
1347 msng_mnfst_set.clear()
1348
1348
1349 changedfiles = changedfiles.keys()
1349 changedfiles = changedfiles.keys()
1350 changedfiles.sort()
1350 changedfiles.sort()
1351 # Go through all our files in order sorted by name.
1351 # Go through all our files in order sorted by name.
1352 for fname in changedfiles:
1352 for fname in changedfiles:
1353 filerevlog = self.file(fname)
1353 filerevlog = self.file(fname)
1354 # Toss out the filenodes that the recipient isn't really
1354 # Toss out the filenodes that the recipient isn't really
1355 # missing.
1355 # missing.
1356 if msng_filenode_set.has_key(fname):
1356 if msng_filenode_set.has_key(fname):
1357 prune_filenodes(fname, filerevlog)
1357 prune_filenodes(fname, filerevlog)
1358 msng_filenode_lst = msng_filenode_set[fname].keys()
1358 msng_filenode_lst = msng_filenode_set[fname].keys()
1359 else:
1359 else:
1360 msng_filenode_lst = []
1360 msng_filenode_lst = []
1361 # If any filenodes are left, generate the group for them,
1361 # If any filenodes are left, generate the group for them,
1362 # otherwise don't bother.
1362 # otherwise don't bother.
1363 if len(msng_filenode_lst) > 0:
1363 if len(msng_filenode_lst) > 0:
1364 yield changegroup.genchunk(fname)
1364 yield changegroup.genchunk(fname)
1365 # Sort the filenodes by their revision #
1365 # Sort the filenodes by their revision #
1366 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1366 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1367 # Create a group generator and only pass in a changenode
1367 # Create a group generator and only pass in a changenode
1368 # lookup function as we need to collect no information
1368 # lookup function as we need to collect no information
1369 # from filenodes.
1369 # from filenodes.
1370 group = filerevlog.group(msng_filenode_lst,
1370 group = filerevlog.group(msng_filenode_lst,
1371 lookup_filenode_link_func(fname))
1371 lookup_filenode_link_func(fname))
1372 for chnk in group:
1372 for chnk in group:
1373 yield chnk
1373 yield chnk
1374 if msng_filenode_set.has_key(fname):
1374 if msng_filenode_set.has_key(fname):
1375 # Don't need this anymore, toss it to free memory.
1375 # Don't need this anymore, toss it to free memory.
1376 del msng_filenode_set[fname]
1376 del msng_filenode_set[fname]
1377 # Signal that no more groups are left.
1377 # Signal that no more groups are left.
1378 yield changegroup.closechunk()
1378 yield changegroup.closechunk()
1379
1379
1380 if msng_cl_lst:
1380 if msng_cl_lst:
1381 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1381 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1382
1382
1383 return util.chunkbuffer(gengroup())
1383 return util.chunkbuffer(gengroup())
1384
1384
1385 def changegroup(self, basenodes, source):
1385 def changegroup(self, basenodes, source):
1386 """Generate a changegroup of all nodes that we have that a recipient
1386 """Generate a changegroup of all nodes that we have that a recipient
1387 doesn't.
1387 doesn't.
1388
1388
1389 This is much easier than the previous function as we can assume that
1389 This is much easier than the previous function as we can assume that
1390 the recipient has any changenode we aren't sending them."""
1390 the recipient has any changenode we aren't sending them."""
1391
1391
1392 self.hook('preoutgoing', throw=True, source=source)
1392 self.hook('preoutgoing', throw=True, source=source)
1393
1393
1394 cl = self.changelog
1394 cl = self.changelog
1395 nodes = cl.nodesbetween(basenodes, None)[0]
1395 nodes = cl.nodesbetween(basenodes, None)[0]
1396 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1396 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1397
1397
1398 def identity(x):
1398 def identity(x):
1399 return x
1399 return x
1400
1400
1401 def gennodelst(revlog):
1401 def gennodelst(revlog):
1402 for r in xrange(0, revlog.count()):
1402 for r in xrange(0, revlog.count()):
1403 n = revlog.node(r)
1403 n = revlog.node(r)
1404 if revlog.linkrev(n) in revset:
1404 if revlog.linkrev(n) in revset:
1405 yield n
1405 yield n
1406
1406
1407 def changed_file_collector(changedfileset):
1407 def changed_file_collector(changedfileset):
1408 def collect_changed_files(clnode):
1408 def collect_changed_files(clnode):
1409 c = cl.read(clnode)
1409 c = cl.read(clnode)
1410 for fname in c[3]:
1410 for fname in c[3]:
1411 changedfileset[fname] = 1
1411 changedfileset[fname] = 1
1412 return collect_changed_files
1412 return collect_changed_files
1413
1413
1414 def lookuprevlink_func(revlog):
1414 def lookuprevlink_func(revlog):
1415 def lookuprevlink(n):
1415 def lookuprevlink(n):
1416 return cl.node(revlog.linkrev(n))
1416 return cl.node(revlog.linkrev(n))
1417 return lookuprevlink
1417 return lookuprevlink
1418
1418
1419 def gengroup():
1419 def gengroup():
1420 # construct a list of all changed files
1420 # construct a list of all changed files
1421 changedfiles = {}
1421 changedfiles = {}
1422
1422
1423 for chnk in cl.group(nodes, identity,
1423 for chnk in cl.group(nodes, identity,
1424 changed_file_collector(changedfiles)):
1424 changed_file_collector(changedfiles)):
1425 yield chnk
1425 yield chnk
1426 changedfiles = changedfiles.keys()
1426 changedfiles = changedfiles.keys()
1427 changedfiles.sort()
1427 changedfiles.sort()
1428
1428
1429 mnfst = self.manifest
1429 mnfst = self.manifest
1430 nodeiter = gennodelst(mnfst)
1430 nodeiter = gennodelst(mnfst)
1431 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1431 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1432 yield chnk
1432 yield chnk
1433
1433
1434 for fname in changedfiles:
1434 for fname in changedfiles:
1435 filerevlog = self.file(fname)
1435 filerevlog = self.file(fname)
1436 nodeiter = gennodelst(filerevlog)
1436 nodeiter = gennodelst(filerevlog)
1437 nodeiter = list(nodeiter)
1437 nodeiter = list(nodeiter)
1438 if nodeiter:
1438 if nodeiter:
1439 yield changegroup.genchunk(fname)
1439 yield changegroup.genchunk(fname)
1440 lookup = lookuprevlink_func(filerevlog)
1440 lookup = lookuprevlink_func(filerevlog)
1441 for chnk in filerevlog.group(nodeiter, lookup):
1441 for chnk in filerevlog.group(nodeiter, lookup):
1442 yield chnk
1442 yield chnk
1443
1443
1444 yield changegroup.closechunk()
1444 yield changegroup.closechunk()
1445
1445
1446 if nodes:
1446 if nodes:
1447 self.hook('outgoing', node=hex(nodes[0]), source=source)
1447 self.hook('outgoing', node=hex(nodes[0]), source=source)
1448
1448
1449 return util.chunkbuffer(gengroup())
1449 return util.chunkbuffer(gengroup())
1450
1450
1451 def addchangegroup(self, source):
1451 def addchangegroup(self, source):
1452 """add changegroup to repo.
1452 """add changegroup to repo.
1453 returns number of heads modified or added + 1."""
1453 returns number of heads modified or added + 1."""
1454
1454
1455 def csmap(x):
1455 def csmap(x):
1456 self.ui.debug(_("add changeset %s\n") % short(x))
1456 self.ui.debug(_("add changeset %s\n") % short(x))
1457 return cl.count()
1457 return cl.count()
1458
1458
1459 def revmap(x):
1459 def revmap(x):
1460 return cl.rev(x)
1460 return cl.rev(x)
1461
1461
1462 if not source:
1462 if not source:
1463 return 0
1463 return 0
1464
1464
1465 self.hook('prechangegroup', throw=True)
1465 self.hook('prechangegroup', throw=True)
1466
1466
1467 changesets = files = revisions = 0
1467 changesets = files = revisions = 0
1468
1468
1469 tr = self.transaction()
1469 tr = self.transaction()
1470
1470
1471 # write changelog and manifest data to temp files so
1471 # write changelog and manifest data to temp files so
1472 # concurrent readers will not see inconsistent view
1472 # concurrent readers will not see inconsistent view
1473 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1473 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1474
1474
1475 oldheads = len(cl.heads())
1475 oldheads = len(cl.heads())
1476
1476
1477 # pull off the changeset group
1477 # pull off the changeset group
1478 self.ui.status(_("adding changesets\n"))
1478 self.ui.status(_("adding changesets\n"))
1479 co = cl.tip()
1479 co = cl.tip()
1480 chunkiter = changegroup.chunkiter(source)
1480 chunkiter = changegroup.chunkiter(source)
1481 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1481 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1482 cnr, cor = map(cl.rev, (cn, co))
1482 cnr, cor = map(cl.rev, (cn, co))
1483 if cn == nullid:
1483 if cn == nullid:
1484 cnr = cor
1484 cnr = cor
1485 changesets = cnr - cor
1485 changesets = cnr - cor
1486
1486
1487 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1487 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1488
1488
1489 # pull off the manifest group
1489 # pull off the manifest group
1490 self.ui.status(_("adding manifests\n"))
1490 self.ui.status(_("adding manifests\n"))
1491 mm = mf.tip()
1491 mm = mf.tip()
1492 chunkiter = changegroup.chunkiter(source)
1492 chunkiter = changegroup.chunkiter(source)
1493 mo = mf.addgroup(chunkiter, revmap, tr)
1493 mo = mf.addgroup(chunkiter, revmap, tr)
1494
1494
1495 # process the files
1495 # process the files
1496 self.ui.status(_("adding file changes\n"))
1496 self.ui.status(_("adding file changes\n"))
1497 while 1:
1497 while 1:
1498 f = changegroup.getchunk(source)
1498 f = changegroup.getchunk(source)
1499 if not f:
1499 if not f:
1500 break
1500 break
1501 self.ui.debug(_("adding %s revisions\n") % f)
1501 self.ui.debug(_("adding %s revisions\n") % f)
1502 fl = self.file(f)
1502 fl = self.file(f)
1503 o = fl.count()
1503 o = fl.count()
1504 chunkiter = changegroup.chunkiter(source)
1504 chunkiter = changegroup.chunkiter(source)
1505 n = fl.addgroup(chunkiter, revmap, tr)
1505 n = fl.addgroup(chunkiter, revmap, tr)
1506 revisions += fl.count() - o
1506 revisions += fl.count() - o
1507 files += 1
1507 files += 1
1508
1508
1509 # write order here is important so concurrent readers will see
1509 # write order here is important so concurrent readers will see
1510 # consistent view of repo
1510 # consistent view of repo
1511 mf.writedata()
1511 mf.writedata()
1512 cl.writedata()
1512 cl.writedata()
1513
1513
1514 # make changelog and manifest see real files again
1514 # make changelog and manifest see real files again
1515 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1515 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1516 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1516 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1517 self.changelog.checkinlinesize(tr)
1517 self.changelog.checkinlinesize(tr)
1518 self.manifest.checkinlinesize(tr)
1518 self.manifest.checkinlinesize(tr)
1519
1519
1520 newheads = len(self.changelog.heads())
1520 newheads = len(self.changelog.heads())
1521 heads = ""
1521 heads = ""
1522 if oldheads and newheads > oldheads:
1522 if oldheads and newheads > oldheads:
1523 heads = _(" (+%d heads)") % (newheads - oldheads)
1523 heads = _(" (+%d heads)") % (newheads - oldheads)
1524
1524
1525 self.ui.status(_("added %d changesets"
1525 self.ui.status(_("added %d changesets"
1526 " with %d changes to %d files%s\n")
1526 " with %d changes to %d files%s\n")
1527 % (changesets, revisions, files, heads))
1527 % (changesets, revisions, files, heads))
1528
1528
1529 self.hook('pretxnchangegroup', throw=True,
1529 self.hook('pretxnchangegroup', throw=True,
1530 node=hex(self.changelog.node(cor+1)))
1530 node=hex(self.changelog.node(cor+1)))
1531
1531
1532 tr.close()
1532 tr.close()
1533
1533
1534 if changesets > 0:
1534 if changesets > 0:
1535 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1535 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1536
1536
1537 for i in range(cor + 1, cnr + 1):
1537 for i in range(cor + 1, cnr + 1):
1538 self.hook("incoming", node=hex(self.changelog.node(i)))
1538 self.hook("incoming", node=hex(self.changelog.node(i)))
1539
1539
1540 return newheads - oldheads + 1
1540 return newheads - oldheads + 1
1541
1541
1542 def update(self, node, allow=False, force=False, choose=None,
1542 def update(self, node, allow=False, force=False, choose=None,
1543 moddirstate=True, forcemerge=False, wlock=None):
1543 moddirstate=True, forcemerge=False, wlock=None):
1544 pl = self.dirstate.parents()
1544 pl = self.dirstate.parents()
1545 if not force and pl[1] != nullid:
1545 if not force and pl[1] != nullid:
1546 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1546 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1547 return 1
1547 return 1
1548
1548
1549 err = False
1549 err = False
1550
1550
1551 p1, p2 = pl[0], node
1551 p1, p2 = pl[0], node
1552 pa = self.changelog.ancestor(p1, p2)
1552 pa = self.changelog.ancestor(p1, p2)
1553 m1n = self.changelog.read(p1)[0]
1553 m1n = self.changelog.read(p1)[0]
1554 m2n = self.changelog.read(p2)[0]
1554 m2n = self.changelog.read(p2)[0]
1555 man = self.manifest.ancestor(m1n, m2n)
1555 man = self.manifest.ancestor(m1n, m2n)
1556 m1 = self.manifest.read(m1n)
1556 m1 = self.manifest.read(m1n)
1557 mf1 = self.manifest.readflags(m1n)
1557 mf1 = self.manifest.readflags(m1n)
1558 m2 = self.manifest.read(m2n).copy()
1558 m2 = self.manifest.read(m2n).copy()
1559 mf2 = self.manifest.readflags(m2n)
1559 mf2 = self.manifest.readflags(m2n)
1560 ma = self.manifest.read(man)
1560 ma = self.manifest.read(man)
1561 mfa = self.manifest.readflags(man)
1561 mfa = self.manifest.readflags(man)
1562
1562
1563 modified, added, removed, deleted, unknown = self.changes()
1563 modified, added, removed, deleted, unknown = self.changes()
1564
1564
1565 # is this a jump, or a merge? i.e. is there a linear path
1565 # is this a jump, or a merge? i.e. is there a linear path
1566 # from p1 to p2?
1566 # from p1 to p2?
1567 linear_path = (pa == p1 or pa == p2)
1567 linear_path = (pa == p1 or pa == p2)
1568
1568
1569 if allow and linear_path:
1569 if allow and linear_path:
1570 raise util.Abort(_("there is nothing to merge, "
1570 raise util.Abort(_("there is nothing to merge, "
1571 "just use 'hg update'"))
1571 "just use 'hg update'"))
1572 if allow and not forcemerge:
1572 if allow and not forcemerge:
1573 if modified or added or removed:
1573 if modified or added or removed:
1574 raise util.Abort(_("outstanding uncommitted changes"))
1574 raise util.Abort(_("outstanding uncommitted changes"))
1575 if not forcemerge and not force:
1575 if not forcemerge and not force:
1576 for f in unknown:
1576 for f in unknown:
1577 if f in m2:
1577 if f in m2:
1578 t1 = self.wread(f)
1578 t1 = self.wread(f)
1579 t2 = self.file(f).read(m2[f])
1579 t2 = self.file(f).read(m2[f])
1580 if cmp(t1, t2) != 0:
1580 if cmp(t1, t2) != 0:
1581 raise util.Abort(_("'%s' already exists in the working"
1581 raise util.Abort(_("'%s' already exists in the working"
1582 " dir and differs from remote") % f)
1582 " dir and differs from remote") % f)
1583
1583
1584 # resolve the manifest to determine which files
1584 # resolve the manifest to determine which files
1585 # we care about merging
1585 # we care about merging
1586 self.ui.note(_("resolving manifests\n"))
1586 self.ui.note(_("resolving manifests\n"))
1587 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1587 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1588 (force, allow, moddirstate, linear_path))
1588 (force, allow, moddirstate, linear_path))
1589 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1589 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1590 (short(man), short(m1n), short(m2n)))
1590 (short(man), short(m1n), short(m2n)))
1591
1591
1592 merge = {}
1592 merge = {}
1593 get = {}
1593 get = {}
1594 remove = []
1594 remove = []
1595
1595
1596 # construct a working dir manifest
1596 # construct a working dir manifest
1597 mw = m1.copy()
1597 mw = m1.copy()
1598 mfw = mf1.copy()
1598 mfw = mf1.copy()
1599 umap = dict.fromkeys(unknown)
1599 umap = dict.fromkeys(unknown)
1600
1600
1601 for f in added + modified + unknown:
1601 for f in added + modified + unknown:
1602 mw[f] = ""
1602 mw[f] = ""
1603 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1603 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1604
1604
1605 if moddirstate and not wlock:
1605 if moddirstate and not wlock:
1606 wlock = self.wlock()
1606 wlock = self.wlock()
1607
1607
1608 for f in deleted + removed:
1608 for f in deleted + removed:
1609 if f in mw:
1609 if f in mw:
1610 del mw[f]
1610 del mw[f]
1611
1611
1612 # If we're jumping between revisions (as opposed to merging),
1612 # If we're jumping between revisions (as opposed to merging),
1613 # and if neither the working directory nor the target rev has
1613 # and if neither the working directory nor the target rev has
1614 # the file, then we need to remove it from the dirstate, to
1614 # the file, then we need to remove it from the dirstate, to
1615 # prevent the dirstate from listing the file when it is no
1615 # prevent the dirstate from listing the file when it is no
1616 # longer in the manifest.
1616 # longer in the manifest.
1617 if moddirstate and linear_path and f not in m2:
1617 if moddirstate and linear_path and f not in m2:
1618 self.dirstate.forget((f,))
1618 self.dirstate.forget((f,))
1619
1619
1620 # Compare manifests
1620 # Compare manifests
1621 for f, n in mw.iteritems():
1621 for f, n in mw.iteritems():
1622 if choose and not choose(f):
1622 if choose and not choose(f):
1623 continue
1623 continue
1624 if f in m2:
1624 if f in m2:
1625 s = 0
1625 s = 0
1626
1626
1627 # is the wfile new since m1, and match m2?
1627 # is the wfile new since m1, and match m2?
1628 if f not in m1:
1628 if f not in m1:
1629 t1 = self.wread(f)
1629 t1 = self.wread(f)
1630 t2 = self.file(f).read(m2[f])
1630 t2 = self.file(f).read(m2[f])
1631 if cmp(t1, t2) == 0:
1631 if cmp(t1, t2) == 0:
1632 n = m2[f]
1632 n = m2[f]
1633 del t1, t2
1633 del t1, t2
1634
1634
1635 # are files different?
1635 # are files different?
1636 if n != m2[f]:
1636 if n != m2[f]:
1637 a = ma.get(f, nullid)
1637 a = ma.get(f, nullid)
1638 # are both different from the ancestor?
1638 # are both different from the ancestor?
1639 if n != a and m2[f] != a:
1639 if n != a and m2[f] != a:
1640 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1640 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1641 # merge executable bits
1641 # merge executable bits
1642 # "if we changed or they changed, change in merge"
1642 # "if we changed or they changed, change in merge"
1643 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1643 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1644 mode = ((a^b) | (a^c)) ^ a
1644 mode = ((a^b) | (a^c)) ^ a
1645 merge[f] = (m1.get(f, nullid), m2[f], mode)
1645 merge[f] = (m1.get(f, nullid), m2[f], mode)
1646 s = 1
1646 s = 1
1647 # are we clobbering?
1647 # are we clobbering?
1648 # is remote's version newer?
1648 # is remote's version newer?
1649 # or are we going back in time?
1649 # or are we going back in time?
1650 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1650 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1651 self.ui.debug(_(" remote %s is newer, get\n") % f)
1651 self.ui.debug(_(" remote %s is newer, get\n") % f)
1652 get[f] = m2[f]
1652 get[f] = m2[f]
1653 s = 1
1653 s = 1
1654 elif f in umap or f in added:
1654 elif f in umap or f in added:
1655 # this unknown file is the same as the checkout
1655 # this unknown file is the same as the checkout
1656 # we need to reset the dirstate if the file was added
1656 # we need to reset the dirstate if the file was added
1657 get[f] = m2[f]
1657 get[f] = m2[f]
1658
1658
1659 if not s and mfw[f] != mf2[f]:
1659 if not s and mfw[f] != mf2[f]:
1660 if force:
1660 if force:
1661 self.ui.debug(_(" updating permissions for %s\n") % f)
1661 self.ui.debug(_(" updating permissions for %s\n") % f)
1662 util.set_exec(self.wjoin(f), mf2[f])
1662 util.set_exec(self.wjoin(f), mf2[f])
1663 else:
1663 else:
1664 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1664 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1665 mode = ((a^b) | (a^c)) ^ a
1665 mode = ((a^b) | (a^c)) ^ a
1666 if mode != b:
1666 if mode != b:
1667 self.ui.debug(_(" updating permissions for %s\n")
1667 self.ui.debug(_(" updating permissions for %s\n")
1668 % f)
1668 % f)
1669 util.set_exec(self.wjoin(f), mode)
1669 util.set_exec(self.wjoin(f), mode)
1670 del m2[f]
1670 del m2[f]
1671 elif f in ma:
1671 elif f in ma:
1672 if n != ma[f]:
1672 if n != ma[f]:
1673 r = _("d")
1673 r = _("d")
1674 if not force and (linear_path or allow):
1674 if not force and (linear_path or allow):
1675 r = self.ui.prompt(
1675 r = self.ui.prompt(
1676 (_(" local changed %s which remote deleted\n") % f) +
1676 (_(" local changed %s which remote deleted\n") % f) +
1677 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1677 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1678 if r == _("d"):
1678 if r == _("d"):
1679 remove.append(f)
1679 remove.append(f)
1680 else:
1680 else:
1681 self.ui.debug(_("other deleted %s\n") % f)
1681 self.ui.debug(_("other deleted %s\n") % f)
1682 remove.append(f) # other deleted it
1682 remove.append(f) # other deleted it
1683 else:
1683 else:
1684 # file is created on branch or in working directory
1684 # file is created on branch or in working directory
1685 if force and f not in umap:
1685 if force and f not in umap:
1686 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1686 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1687 remove.append(f)
1687 remove.append(f)
1688 elif n == m1.get(f, nullid): # same as parent
1688 elif n == m1.get(f, nullid): # same as parent
1689 if p2 == pa: # going backwards?
1689 if p2 == pa: # going backwards?
1690 self.ui.debug(_("remote deleted %s\n") % f)
1690 self.ui.debug(_("remote deleted %s\n") % f)
1691 remove.append(f)
1691 remove.append(f)
1692 else:
1692 else:
1693 self.ui.debug(_("local modified %s, keeping\n") % f)
1693 self.ui.debug(_("local modified %s, keeping\n") % f)
1694 else:
1694 else:
1695 self.ui.debug(_("working dir created %s, keeping\n") % f)
1695 self.ui.debug(_("working dir created %s, keeping\n") % f)
1696
1696
1697 for f, n in m2.iteritems():
1697 for f, n in m2.iteritems():
1698 if choose and not choose(f):
1698 if choose and not choose(f):
1699 continue
1699 continue
1700 if f[0] == "/":
1700 if f[0] == "/":
1701 continue
1701 continue
1702 if f in ma and n != ma[f]:
1702 if f in ma and n != ma[f]:
1703 r = _("k")
1703 r = _("k")
1704 if not force and (linear_path or allow):
1704 if not force and (linear_path or allow):
1705 r = self.ui.prompt(
1705 r = self.ui.prompt(
1706 (_("remote changed %s which local deleted\n") % f) +
1706 (_("remote changed %s which local deleted\n") % f) +
1707 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1707 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1708 if r == _("k"):
1708 if r == _("k"):
1709 get[f] = n
1709 get[f] = n
1710 elif f not in ma:
1710 elif f not in ma:
1711 self.ui.debug(_("remote created %s\n") % f)
1711 self.ui.debug(_("remote created %s\n") % f)
1712 get[f] = n
1712 get[f] = n
1713 else:
1713 else:
1714 if force or p2 == pa: # going backwards?
1714 if force or p2 == pa: # going backwards?
1715 self.ui.debug(_("local deleted %s, recreating\n") % f)
1715 self.ui.debug(_("local deleted %s, recreating\n") % f)
1716 get[f] = n
1716 get[f] = n
1717 else:
1717 else:
1718 self.ui.debug(_("local deleted %s\n") % f)
1718 self.ui.debug(_("local deleted %s\n") % f)
1719
1719
1720 del mw, m1, m2, ma
1720 del mw, m1, m2, ma
1721
1721
1722 if force:
1722 if force:
1723 for f in merge:
1723 for f in merge:
1724 get[f] = merge[f][1]
1724 get[f] = merge[f][1]
1725 merge = {}
1725 merge = {}
1726
1726
1727 if linear_path or force:
1727 if linear_path or force:
1728 # we don't need to do any magic, just jump to the new rev
1728 # we don't need to do any magic, just jump to the new rev
1729 branch_merge = False
1729 branch_merge = False
1730 p1, p2 = p2, nullid
1730 p1, p2 = p2, nullid
1731 else:
1731 else:
1732 if not allow:
1732 if not allow:
1733 self.ui.status(_("this update spans a branch"
1733 self.ui.status(_("this update spans a branch"
1734 " affecting the following files:\n"))
1734 " affecting the following files:\n"))
1735 fl = merge.keys() + get.keys()
1735 fl = merge.keys() + get.keys()
1736 fl.sort()
1736 fl.sort()
1737 for f in fl:
1737 for f in fl:
1738 cf = ""
1738 cf = ""
1739 if f in merge:
1739 if f in merge:
1740 cf = _(" (resolve)")
1740 cf = _(" (resolve)")
1741 self.ui.status(" %s%s\n" % (f, cf))
1741 self.ui.status(" %s%s\n" % (f, cf))
1742 self.ui.warn(_("aborting update spanning branches!\n"))
1742 self.ui.warn(_("aborting update spanning branches!\n"))
1743 self.ui.status(_("(use 'hg merge' to merge across branches"
1743 self.ui.status(_("(use 'hg merge' to merge across branches"
1744 " or 'hg update -C' to lose changes)\n"))
1744 " or 'hg update -C' to lose changes)\n"))
1745 return 1
1745 return 1
1746 branch_merge = True
1746 branch_merge = True
1747
1747
1748 # get the files we don't need to change
1748 # get the files we don't need to change
1749 files = get.keys()
1749 files = get.keys()
1750 files.sort()
1750 files.sort()
1751 for f in files:
1751 for f in files:
1752 if f[0] == "/":
1752 if f[0] == "/":
1753 continue
1753 continue
1754 self.ui.note(_("getting %s\n") % f)
1754 self.ui.note(_("getting %s\n") % f)
1755 t = self.file(f).read(get[f])
1755 t = self.file(f).read(get[f])
1756 self.wwrite(f, t)
1756 self.wwrite(f, t)
1757 util.set_exec(self.wjoin(f), mf2[f])
1757 util.set_exec(self.wjoin(f), mf2[f])
1758 if moddirstate:
1758 if moddirstate:
1759 if branch_merge:
1759 if branch_merge:
1760 self.dirstate.update([f], 'n', st_mtime=-1)
1760 self.dirstate.update([f], 'n', st_mtime=-1)
1761 else:
1761 else:
1762 self.dirstate.update([f], 'n')
1762 self.dirstate.update([f], 'n')
1763
1763
1764 # merge the tricky bits
1764 # merge the tricky bits
1765 failedmerge = []
1765 failedmerge = []
1766 files = merge.keys()
1766 files = merge.keys()
1767 files.sort()
1767 files.sort()
1768 xp1 = hex(p1)
1768 xp1 = hex(p1)
1769 xp2 = hex(p2)
1769 xp2 = hex(p2)
1770 for f in files:
1770 for f in files:
1771 self.ui.status(_("merging %s\n") % f)
1771 self.ui.status(_("merging %s\n") % f)
1772 my, other, flag = merge[f]
1772 my, other, flag = merge[f]
1773 ret = self.merge3(f, my, other, xp1, xp2)
1773 ret = self.merge3(f, my, other, xp1, xp2)
1774 if ret:
1774 if ret:
1775 err = True
1775 err = True
1776 failedmerge.append(f)
1776 failedmerge.append(f)
1777 util.set_exec(self.wjoin(f), flag)
1777 util.set_exec(self.wjoin(f), flag)
1778 if moddirstate:
1778 if moddirstate:
1779 if branch_merge:
1779 if branch_merge:
1780 # We've done a branch merge, mark this file as merged
1780 # We've done a branch merge, mark this file as merged
1781 # so that we properly record the merger later
1781 # so that we properly record the merger later
1782 self.dirstate.update([f], 'm')
1782 self.dirstate.update([f], 'm')
1783 else:
1783 else:
1784 # We've update-merged a locally modified file, so
1784 # We've update-merged a locally modified file, so
1785 # we set the dirstate to emulate a normal checkout
1785 # we set the dirstate to emulate a normal checkout
1786 # of that file some time in the past. Thus our
1786 # of that file some time in the past. Thus our
1787 # merge will appear as a normal local file
1787 # merge will appear as a normal local file
1788 # modification.
1788 # modification.
1789 f_len = len(self.file(f).read(other))
1789 f_len = len(self.file(f).read(other))
1790 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1790 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1791
1791
1792 remove.sort()
1792 remove.sort()
1793 for f in remove:
1793 for f in remove:
1794 self.ui.note(_("removing %s\n") % f)
1794 self.ui.note(_("removing %s\n") % f)
1795 util.audit_path(f)
1795 util.audit_path(f)
1796 try:
1796 try:
1797 util.unlink(self.wjoin(f))
1797 util.unlink(self.wjoin(f))
1798 except OSError, inst:
1798 except OSError, inst:
1799 if inst.errno != errno.ENOENT:
1799 if inst.errno != errno.ENOENT:
1800 self.ui.warn(_("update failed to remove %s: %s!\n") %
1800 self.ui.warn(_("update failed to remove %s: %s!\n") %
1801 (f, inst.strerror))
1801 (f, inst.strerror))
1802 if moddirstate:
1802 if moddirstate:
1803 if branch_merge:
1803 if branch_merge:
1804 self.dirstate.update(remove, 'r')
1804 self.dirstate.update(remove, 'r')
1805 else:
1805 else:
1806 self.dirstate.forget(remove)
1806 self.dirstate.forget(remove)
1807
1807
1808 if moddirstate:
1808 if moddirstate:
1809 self.dirstate.setparents(p1, p2)
1809 self.dirstate.setparents(p1, p2)
1810
1810
1811 stat = ((len(get), _("updated")),
1811 stat = ((len(get), _("updated")),
1812 (len(merge) - len(failedmerge), _("merged")),
1812 (len(merge) - len(failedmerge), _("merged")),
1813 (len(remove), _("removed")),
1813 (len(remove), _("removed")),
1814 (len(failedmerge), _("unresolved")))
1814 (len(failedmerge), _("unresolved")))
1815 note = ", ".join([_("%d files %s") % s for s in stat])
1815 note = ", ".join([_("%d files %s") % s for s in stat])
1816 self.ui.note("%s\n" % note)
1816 self.ui.note("%s\n" % note)
1817 if moddirstate and branch_merge:
1817 if moddirstate and branch_merge:
1818 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1818 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1819
1819
1820 return err
1820 return err
1821
1821
1822 def merge3(self, fn, my, other, p1, p2):
1822 def merge3(self, fn, my, other, p1, p2):
1823 """perform a 3-way merge in the working directory"""
1823 """perform a 3-way merge in the working directory"""
1824
1824
1825 def temp(prefix, node):
1825 def temp(prefix, node):
1826 pre = "%s~%s." % (os.path.basename(fn), prefix)
1826 pre = "%s~%s." % (os.path.basename(fn), prefix)
1827 (fd, name) = tempfile.mkstemp("", pre)
1827 (fd, name) = tempfile.mkstemp(prefix=pre)
1828 f = os.fdopen(fd, "wb")
1828 f = os.fdopen(fd, "wb")
1829 self.wwrite(fn, fl.read(node), f)
1829 self.wwrite(fn, fl.read(node), f)
1830 f.close()
1830 f.close()
1831 return name
1831 return name
1832
1832
1833 fl = self.file(fn)
1833 fl = self.file(fn)
1834 base = fl.ancestor(my, other)
1834 base = fl.ancestor(my, other)
1835 a = self.wjoin(fn)
1835 a = self.wjoin(fn)
1836 b = temp("base", base)
1836 b = temp("base", base)
1837 c = temp("other", other)
1837 c = temp("other", other)
1838
1838
1839 self.ui.note(_("resolving %s\n") % fn)
1839 self.ui.note(_("resolving %s\n") % fn)
1840 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1840 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1841 (fn, short(my), short(other), short(base)))
1841 (fn, short(my), short(other), short(base)))
1842
1842
1843 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1843 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1844 or "hgmerge")
1844 or "hgmerge")
1845 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1845 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1846 environ={'HG_FILE': fn,
1846 environ={'HG_FILE': fn,
1847 'HG_MY_NODE': p1,
1847 'HG_MY_NODE': p1,
1848 'HG_OTHER_NODE': p2,
1848 'HG_OTHER_NODE': p2,
1849 'HG_FILE_MY_NODE': hex(my),
1849 'HG_FILE_MY_NODE': hex(my),
1850 'HG_FILE_OTHER_NODE': hex(other),
1850 'HG_FILE_OTHER_NODE': hex(other),
1851 'HG_FILE_BASE_NODE': hex(base)})
1851 'HG_FILE_BASE_NODE': hex(base)})
1852 if r:
1852 if r:
1853 self.ui.warn(_("merging %s failed!\n") % fn)
1853 self.ui.warn(_("merging %s failed!\n") % fn)
1854
1854
1855 os.unlink(b)
1855 os.unlink(b)
1856 os.unlink(c)
1856 os.unlink(c)
1857 return r
1857 return r
1858
1858
1859 def verify(self):
1859 def verify(self):
1860 filelinkrevs = {}
1860 filelinkrevs = {}
1861 filenodes = {}
1861 filenodes = {}
1862 changesets = revisions = files = 0
1862 changesets = revisions = files = 0
1863 errors = [0]
1863 errors = [0]
1864 warnings = [0]
1864 warnings = [0]
1865 neededmanifests = {}
1865 neededmanifests = {}
1866
1866
1867 def err(msg):
1867 def err(msg):
1868 self.ui.warn(msg + "\n")
1868 self.ui.warn(msg + "\n")
1869 errors[0] += 1
1869 errors[0] += 1
1870
1870
1871 def warn(msg):
1871 def warn(msg):
1872 self.ui.warn(msg + "\n")
1872 self.ui.warn(msg + "\n")
1873 warnings[0] += 1
1873 warnings[0] += 1
1874
1874
1875 def checksize(obj, name):
1875 def checksize(obj, name):
1876 d = obj.checksize()
1876 d = obj.checksize()
1877 if d[0]:
1877 if d[0]:
1878 err(_("%s data length off by %d bytes") % (name, d[0]))
1878 err(_("%s data length off by %d bytes") % (name, d[0]))
1879 if d[1]:
1879 if d[1]:
1880 err(_("%s index contains %d extra bytes") % (name, d[1]))
1880 err(_("%s index contains %d extra bytes") % (name, d[1]))
1881
1881
1882 def checkversion(obj, name):
1882 def checkversion(obj, name):
1883 if obj.version != revlog.REVLOGV0:
1883 if obj.version != revlog.REVLOGV0:
1884 if not revlogv1:
1884 if not revlogv1:
1885 warn(_("warning: `%s' uses revlog format 1") % name)
1885 warn(_("warning: `%s' uses revlog format 1") % name)
1886 elif revlogv1:
1886 elif revlogv1:
1887 warn(_("warning: `%s' uses revlog format 0") % name)
1887 warn(_("warning: `%s' uses revlog format 0") % name)
1888
1888
1889 revlogv1 = self.revlogversion != revlog.REVLOGV0
1889 revlogv1 = self.revlogversion != revlog.REVLOGV0
1890 if self.ui.verbose or revlogv1 != self.revlogv1:
1890 if self.ui.verbose or revlogv1 != self.revlogv1:
1891 self.ui.status(_("repository uses revlog format %d\n") %
1891 self.ui.status(_("repository uses revlog format %d\n") %
1892 (revlogv1 and 1 or 0))
1892 (revlogv1 and 1 or 0))
1893
1893
1894 seen = {}
1894 seen = {}
1895 self.ui.status(_("checking changesets\n"))
1895 self.ui.status(_("checking changesets\n"))
1896 checksize(self.changelog, "changelog")
1896 checksize(self.changelog, "changelog")
1897
1897
1898 for i in range(self.changelog.count()):
1898 for i in range(self.changelog.count()):
1899 changesets += 1
1899 changesets += 1
1900 n = self.changelog.node(i)
1900 n = self.changelog.node(i)
1901 l = self.changelog.linkrev(n)
1901 l = self.changelog.linkrev(n)
1902 if l != i:
1902 if l != i:
1903 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1903 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1904 if n in seen:
1904 if n in seen:
1905 err(_("duplicate changeset at revision %d") % i)
1905 err(_("duplicate changeset at revision %d") % i)
1906 seen[n] = 1
1906 seen[n] = 1
1907
1907
1908 for p in self.changelog.parents(n):
1908 for p in self.changelog.parents(n):
1909 if p not in self.changelog.nodemap:
1909 if p not in self.changelog.nodemap:
1910 err(_("changeset %s has unknown parent %s") %
1910 err(_("changeset %s has unknown parent %s") %
1911 (short(n), short(p)))
1911 (short(n), short(p)))
1912 try:
1912 try:
1913 changes = self.changelog.read(n)
1913 changes = self.changelog.read(n)
1914 except KeyboardInterrupt:
1914 except KeyboardInterrupt:
1915 self.ui.warn(_("interrupted"))
1915 self.ui.warn(_("interrupted"))
1916 raise
1916 raise
1917 except Exception, inst:
1917 except Exception, inst:
1918 err(_("unpacking changeset %s: %s") % (short(n), inst))
1918 err(_("unpacking changeset %s: %s") % (short(n), inst))
1919 continue
1919 continue
1920
1920
1921 neededmanifests[changes[0]] = n
1921 neededmanifests[changes[0]] = n
1922
1922
1923 for f in changes[3]:
1923 for f in changes[3]:
1924 filelinkrevs.setdefault(f, []).append(i)
1924 filelinkrevs.setdefault(f, []).append(i)
1925
1925
1926 seen = {}
1926 seen = {}
1927 self.ui.status(_("checking manifests\n"))
1927 self.ui.status(_("checking manifests\n"))
1928 checkversion(self.manifest, "manifest")
1928 checkversion(self.manifest, "manifest")
1929 checksize(self.manifest, "manifest")
1929 checksize(self.manifest, "manifest")
1930
1930
1931 for i in range(self.manifest.count()):
1931 for i in range(self.manifest.count()):
1932 n = self.manifest.node(i)
1932 n = self.manifest.node(i)
1933 l = self.manifest.linkrev(n)
1933 l = self.manifest.linkrev(n)
1934
1934
1935 if l < 0 or l >= self.changelog.count():
1935 if l < 0 or l >= self.changelog.count():
1936 err(_("bad manifest link (%d) at revision %d") % (l, i))
1936 err(_("bad manifest link (%d) at revision %d") % (l, i))
1937
1937
1938 if n in neededmanifests:
1938 if n in neededmanifests:
1939 del neededmanifests[n]
1939 del neededmanifests[n]
1940
1940
1941 if n in seen:
1941 if n in seen:
1942 err(_("duplicate manifest at revision %d") % i)
1942 err(_("duplicate manifest at revision %d") % i)
1943
1943
1944 seen[n] = 1
1944 seen[n] = 1
1945
1945
1946 for p in self.manifest.parents(n):
1946 for p in self.manifest.parents(n):
1947 if p not in self.manifest.nodemap:
1947 if p not in self.manifest.nodemap:
1948 err(_("manifest %s has unknown parent %s") %
1948 err(_("manifest %s has unknown parent %s") %
1949 (short(n), short(p)))
1949 (short(n), short(p)))
1950
1950
1951 try:
1951 try:
1952 delta = mdiff.patchtext(self.manifest.delta(n))
1952 delta = mdiff.patchtext(self.manifest.delta(n))
1953 except KeyboardInterrupt:
1953 except KeyboardInterrupt:
1954 self.ui.warn(_("interrupted"))
1954 self.ui.warn(_("interrupted"))
1955 raise
1955 raise
1956 except Exception, inst:
1956 except Exception, inst:
1957 err(_("unpacking manifest %s: %s") % (short(n), inst))
1957 err(_("unpacking manifest %s: %s") % (short(n), inst))
1958 continue
1958 continue
1959
1959
1960 try:
1960 try:
1961 ff = [ l.split('\0') for l in delta.splitlines() ]
1961 ff = [ l.split('\0') for l in delta.splitlines() ]
1962 for f, fn in ff:
1962 for f, fn in ff:
1963 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1963 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1964 except (ValueError, TypeError), inst:
1964 except (ValueError, TypeError), inst:
1965 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1965 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1966
1966
1967 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1967 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1968
1968
1969 for m, c in neededmanifests.items():
1969 for m, c in neededmanifests.items():
1970 err(_("Changeset %s refers to unknown manifest %s") %
1970 err(_("Changeset %s refers to unknown manifest %s") %
1971 (short(m), short(c)))
1971 (short(m), short(c)))
1972 del neededmanifests
1972 del neededmanifests
1973
1973
1974 for f in filenodes:
1974 for f in filenodes:
1975 if f not in filelinkrevs:
1975 if f not in filelinkrevs:
1976 err(_("file %s in manifest but not in changesets") % f)
1976 err(_("file %s in manifest but not in changesets") % f)
1977
1977
1978 for f in filelinkrevs:
1978 for f in filelinkrevs:
1979 if f not in filenodes:
1979 if f not in filenodes:
1980 err(_("file %s in changeset but not in manifest") % f)
1980 err(_("file %s in changeset but not in manifest") % f)
1981
1981
1982 self.ui.status(_("checking files\n"))
1982 self.ui.status(_("checking files\n"))
1983 ff = filenodes.keys()
1983 ff = filenodes.keys()
1984 ff.sort()
1984 ff.sort()
1985 for f in ff:
1985 for f in ff:
1986 if f == "/dev/null":
1986 if f == "/dev/null":
1987 continue
1987 continue
1988 files += 1
1988 files += 1
1989 if not f:
1989 if not f:
1990 err(_("file without name in manifest %s") % short(n))
1990 err(_("file without name in manifest %s") % short(n))
1991 continue
1991 continue
1992 fl = self.file(f)
1992 fl = self.file(f)
1993 checkversion(fl, f)
1993 checkversion(fl, f)
1994 checksize(fl, f)
1994 checksize(fl, f)
1995
1995
1996 nodes = {nullid: 1}
1996 nodes = {nullid: 1}
1997 seen = {}
1997 seen = {}
1998 for i in range(fl.count()):
1998 for i in range(fl.count()):
1999 revisions += 1
1999 revisions += 1
2000 n = fl.node(i)
2000 n = fl.node(i)
2001
2001
2002 if n in seen:
2002 if n in seen:
2003 err(_("%s: duplicate revision %d") % (f, i))
2003 err(_("%s: duplicate revision %d") % (f, i))
2004 if n not in filenodes[f]:
2004 if n not in filenodes[f]:
2005 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2005 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
2006 else:
2006 else:
2007 del filenodes[f][n]
2007 del filenodes[f][n]
2008
2008
2009 flr = fl.linkrev(n)
2009 flr = fl.linkrev(n)
2010 if flr not in filelinkrevs.get(f, []):
2010 if flr not in filelinkrevs.get(f, []):
2011 err(_("%s:%s points to unexpected changeset %d")
2011 err(_("%s:%s points to unexpected changeset %d")
2012 % (f, short(n), flr))
2012 % (f, short(n), flr))
2013 else:
2013 else:
2014 filelinkrevs[f].remove(flr)
2014 filelinkrevs[f].remove(flr)
2015
2015
2016 # verify contents
2016 # verify contents
2017 try:
2017 try:
2018 t = fl.read(n)
2018 t = fl.read(n)
2019 except KeyboardInterrupt:
2019 except KeyboardInterrupt:
2020 self.ui.warn(_("interrupted"))
2020 self.ui.warn(_("interrupted"))
2021 raise
2021 raise
2022 except Exception, inst:
2022 except Exception, inst:
2023 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2023 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
2024
2024
2025 # verify parents
2025 # verify parents
2026 (p1, p2) = fl.parents(n)
2026 (p1, p2) = fl.parents(n)
2027 if p1 not in nodes:
2027 if p1 not in nodes:
2028 err(_("file %s:%s unknown parent 1 %s") %
2028 err(_("file %s:%s unknown parent 1 %s") %
2029 (f, short(n), short(p1)))
2029 (f, short(n), short(p1)))
2030 if p2 not in nodes:
2030 if p2 not in nodes:
2031 err(_("file %s:%s unknown parent 2 %s") %
2031 err(_("file %s:%s unknown parent 2 %s") %
2032 (f, short(n), short(p1)))
2032 (f, short(n), short(p1)))
2033 nodes[n] = 1
2033 nodes[n] = 1
2034
2034
2035 # cross-check
2035 # cross-check
2036 for node in filenodes[f]:
2036 for node in filenodes[f]:
2037 err(_("node %s in manifests not in %s") % (hex(node), f))
2037 err(_("node %s in manifests not in %s") % (hex(node), f))
2038
2038
2039 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2039 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
2040 (files, changesets, revisions))
2040 (files, changesets, revisions))
2041
2041
2042 if warnings[0]:
2042 if warnings[0]:
2043 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2043 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
2044 if errors[0]:
2044 if errors[0]:
2045 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2045 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
2046 return 1
2046 return 1
2047
2047
2048 # used to avoid circular references so destructors work
2048 # used to avoid circular references so destructors work
2049 def aftertrans(base):
2049 def aftertrans(base):
2050 p = base
2050 p = base
2051 def a():
2051 def a():
2052 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2052 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
2053 util.rename(os.path.join(p, "journal.dirstate"),
2053 util.rename(os.path.join(p, "journal.dirstate"),
2054 os.path.join(p, "undo.dirstate"))
2054 os.path.join(p, "undo.dirstate"))
2055 return a
2055 return a
2056
2056
@@ -1,848 +1,848
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8
8
9 This contains helper routines that are independent of the SCM core and hide
9 This contains helper routines that are independent of the SCM core and hide
10 platform-specific details from the core.
10 platform-specific details from the core.
11 """
11 """
12
12
13 import os, errno
13 import os, errno
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import *
15 from demandload import *
16 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
16 demandload(globals(), "cStringIO errno popen2 re shutil sys tempfile")
17 demandload(globals(), "threading time")
17 demandload(globals(), "threading time")
18
18
19 class SignalInterrupt(Exception):
19 class SignalInterrupt(Exception):
20 """Exception raised on SIGTERM and SIGHUP."""
20 """Exception raised on SIGTERM and SIGHUP."""
21
21
22 def pipefilter(s, cmd):
22 def pipefilter(s, cmd):
23 '''filter string S through command CMD, returning its output'''
23 '''filter string S through command CMD, returning its output'''
24 (pout, pin) = popen2.popen2(cmd, -1, 'b')
24 (pout, pin) = popen2.popen2(cmd, -1, 'b')
25 def writer():
25 def writer():
26 try:
26 try:
27 pin.write(s)
27 pin.write(s)
28 pin.close()
28 pin.close()
29 except IOError, inst:
29 except IOError, inst:
30 if inst.errno != errno.EPIPE:
30 if inst.errno != errno.EPIPE:
31 raise
31 raise
32
32
33 # we should use select instead on UNIX, but this will work on most
33 # we should use select instead on UNIX, but this will work on most
34 # systems, including Windows
34 # systems, including Windows
35 w = threading.Thread(target=writer)
35 w = threading.Thread(target=writer)
36 w.start()
36 w.start()
37 f = pout.read()
37 f = pout.read()
38 pout.close()
38 pout.close()
39 w.join()
39 w.join()
40 return f
40 return f
41
41
42 def tempfilter(s, cmd):
42 def tempfilter(s, cmd):
43 '''filter string S through a pair of temporary files with CMD.
43 '''filter string S through a pair of temporary files with CMD.
44 CMD is used as a template to create the real command to be run,
44 CMD is used as a template to create the real command to be run,
45 with the strings INFILE and OUTFILE replaced by the real names of
45 with the strings INFILE and OUTFILE replaced by the real names of
46 the temporary files generated.'''
46 the temporary files generated.'''
47 inname, outname = None, None
47 inname, outname = None, None
48 try:
48 try:
49 infd, inname = tempfile.mkstemp(prefix='hgfin')
49 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
50 fp = os.fdopen(infd, 'wb')
50 fp = os.fdopen(infd, 'wb')
51 fp.write(s)
51 fp.write(s)
52 fp.close()
52 fp.close()
53 outfd, outname = tempfile.mkstemp(prefix='hgfout')
53 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
54 os.close(outfd)
54 os.close(outfd)
55 cmd = cmd.replace('INFILE', inname)
55 cmd = cmd.replace('INFILE', inname)
56 cmd = cmd.replace('OUTFILE', outname)
56 cmd = cmd.replace('OUTFILE', outname)
57 code = os.system(cmd)
57 code = os.system(cmd)
58 if code: raise Abort(_("command '%s' failed: %s") %
58 if code: raise Abort(_("command '%s' failed: %s") %
59 (cmd, explain_exit(code)))
59 (cmd, explain_exit(code)))
60 return open(outname, 'rb').read()
60 return open(outname, 'rb').read()
61 finally:
61 finally:
62 try:
62 try:
63 if inname: os.unlink(inname)
63 if inname: os.unlink(inname)
64 except: pass
64 except: pass
65 try:
65 try:
66 if outname: os.unlink(outname)
66 if outname: os.unlink(outname)
67 except: pass
67 except: pass
68
68
69 filtertable = {
69 filtertable = {
70 'tempfile:': tempfilter,
70 'tempfile:': tempfilter,
71 'pipe:': pipefilter,
71 'pipe:': pipefilter,
72 }
72 }
73
73
74 def filter(s, cmd):
74 def filter(s, cmd):
75 "filter a string through a command that transforms its input to its output"
75 "filter a string through a command that transforms its input to its output"
76 for name, fn in filtertable.iteritems():
76 for name, fn in filtertable.iteritems():
77 if cmd.startswith(name):
77 if cmd.startswith(name):
78 return fn(s, cmd[len(name):].lstrip())
78 return fn(s, cmd[len(name):].lstrip())
79 return pipefilter(s, cmd)
79 return pipefilter(s, cmd)
80
80
81 def find_in_path(name, path, default=None):
81 def find_in_path(name, path, default=None):
82 '''find name in search path. path can be string (will be split
82 '''find name in search path. path can be string (will be split
83 with os.pathsep), or iterable thing that returns strings. if name
83 with os.pathsep), or iterable thing that returns strings. if name
84 found, return path to name. else return default.'''
84 found, return path to name. else return default.'''
85 if isinstance(path, str):
85 if isinstance(path, str):
86 path = path.split(os.pathsep)
86 path = path.split(os.pathsep)
87 for p in path:
87 for p in path:
88 p_name = os.path.join(p, name)
88 p_name = os.path.join(p, name)
89 if os.path.exists(p_name):
89 if os.path.exists(p_name):
90 return p_name
90 return p_name
91 return default
91 return default
92
92
93 def patch(strip, patchname, ui):
93 def patch(strip, patchname, ui):
94 """apply the patch <patchname> to the working directory.
94 """apply the patch <patchname> to the working directory.
95 a list of patched files is returned"""
95 a list of patched files is returned"""
96 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
96 patcher = find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
97 fp = os.popen('"%s" -p%d < "%s"' % (patcher, strip, patchname))
97 fp = os.popen('"%s" -p%d < "%s"' % (patcher, strip, patchname))
98 files = {}
98 files = {}
99 for line in fp:
99 for line in fp:
100 line = line.rstrip()
100 line = line.rstrip()
101 ui.status("%s\n" % line)
101 ui.status("%s\n" % line)
102 if line.startswith('patching file '):
102 if line.startswith('patching file '):
103 pf = parse_patch_output(line)
103 pf = parse_patch_output(line)
104 files.setdefault(pf, 1)
104 files.setdefault(pf, 1)
105 code = fp.close()
105 code = fp.close()
106 if code:
106 if code:
107 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
107 raise Abort(_("patch command failed: %s") % explain_exit(code)[0])
108 return files.keys()
108 return files.keys()
109
109
110 def binary(s):
110 def binary(s):
111 """return true if a string is binary data using diff's heuristic"""
111 """return true if a string is binary data using diff's heuristic"""
112 if s and '\0' in s[:4096]:
112 if s and '\0' in s[:4096]:
113 return True
113 return True
114 return False
114 return False
115
115
116 def unique(g):
116 def unique(g):
117 """return the uniq elements of iterable g"""
117 """return the uniq elements of iterable g"""
118 seen = {}
118 seen = {}
119 for f in g:
119 for f in g:
120 if f not in seen:
120 if f not in seen:
121 seen[f] = 1
121 seen[f] = 1
122 yield f
122 yield f
123
123
124 class Abort(Exception):
124 class Abort(Exception):
125 """Raised if a command needs to print an error and exit."""
125 """Raised if a command needs to print an error and exit."""
126
126
127 def always(fn): return True
127 def always(fn): return True
128 def never(fn): return False
128 def never(fn): return False
129
129
130 def patkind(name, dflt_pat='glob'):
130 def patkind(name, dflt_pat='glob'):
131 """Split a string into an optional pattern kind prefix and the
131 """Split a string into an optional pattern kind prefix and the
132 actual pattern."""
132 actual pattern."""
133 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
133 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
134 if name.startswith(prefix + ':'): return name.split(':', 1)
134 if name.startswith(prefix + ':'): return name.split(':', 1)
135 return dflt_pat, name
135 return dflt_pat, name
136
136
137 def globre(pat, head='^', tail='$'):
137 def globre(pat, head='^', tail='$'):
138 "convert a glob pattern into a regexp"
138 "convert a glob pattern into a regexp"
139 i, n = 0, len(pat)
139 i, n = 0, len(pat)
140 res = ''
140 res = ''
141 group = False
141 group = False
142 def peek(): return i < n and pat[i]
142 def peek(): return i < n and pat[i]
143 while i < n:
143 while i < n:
144 c = pat[i]
144 c = pat[i]
145 i = i+1
145 i = i+1
146 if c == '*':
146 if c == '*':
147 if peek() == '*':
147 if peek() == '*':
148 i += 1
148 i += 1
149 res += '.*'
149 res += '.*'
150 else:
150 else:
151 res += '[^/]*'
151 res += '[^/]*'
152 elif c == '?':
152 elif c == '?':
153 res += '.'
153 res += '.'
154 elif c == '[':
154 elif c == '[':
155 j = i
155 j = i
156 if j < n and pat[j] in '!]':
156 if j < n and pat[j] in '!]':
157 j += 1
157 j += 1
158 while j < n and pat[j] != ']':
158 while j < n and pat[j] != ']':
159 j += 1
159 j += 1
160 if j >= n:
160 if j >= n:
161 res += '\\['
161 res += '\\['
162 else:
162 else:
163 stuff = pat[i:j].replace('\\','\\\\')
163 stuff = pat[i:j].replace('\\','\\\\')
164 i = j + 1
164 i = j + 1
165 if stuff[0] == '!':
165 if stuff[0] == '!':
166 stuff = '^' + stuff[1:]
166 stuff = '^' + stuff[1:]
167 elif stuff[0] == '^':
167 elif stuff[0] == '^':
168 stuff = '\\' + stuff
168 stuff = '\\' + stuff
169 res = '%s[%s]' % (res, stuff)
169 res = '%s[%s]' % (res, stuff)
170 elif c == '{':
170 elif c == '{':
171 group = True
171 group = True
172 res += '(?:'
172 res += '(?:'
173 elif c == '}' and group:
173 elif c == '}' and group:
174 res += ')'
174 res += ')'
175 group = False
175 group = False
176 elif c == ',' and group:
176 elif c == ',' and group:
177 res += '|'
177 res += '|'
178 elif c == '\\':
178 elif c == '\\':
179 p = peek()
179 p = peek()
180 if p:
180 if p:
181 i += 1
181 i += 1
182 res += re.escape(p)
182 res += re.escape(p)
183 else:
183 else:
184 res += re.escape(c)
184 res += re.escape(c)
185 else:
185 else:
186 res += re.escape(c)
186 res += re.escape(c)
187 return head + res + tail
187 return head + res + tail
188
188
189 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
189 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
190
190
191 def pathto(n1, n2):
191 def pathto(n1, n2):
192 '''return the relative path from one place to another.
192 '''return the relative path from one place to another.
193 this returns a path in the form used by the local filesystem, not hg.'''
193 this returns a path in the form used by the local filesystem, not hg.'''
194 if not n1: return localpath(n2)
194 if not n1: return localpath(n2)
195 a, b = n1.split('/'), n2.split('/')
195 a, b = n1.split('/'), n2.split('/')
196 a.reverse()
196 a.reverse()
197 b.reverse()
197 b.reverse()
198 while a and b and a[-1] == b[-1]:
198 while a and b and a[-1] == b[-1]:
199 a.pop()
199 a.pop()
200 b.pop()
200 b.pop()
201 b.reverse()
201 b.reverse()
202 return os.sep.join((['..'] * len(a)) + b)
202 return os.sep.join((['..'] * len(a)) + b)
203
203
204 def canonpath(root, cwd, myname):
204 def canonpath(root, cwd, myname):
205 """return the canonical path of myname, given cwd and root"""
205 """return the canonical path of myname, given cwd and root"""
206 if root == os.sep:
206 if root == os.sep:
207 rootsep = os.sep
207 rootsep = os.sep
208 else:
208 else:
209 rootsep = root + os.sep
209 rootsep = root + os.sep
210 name = myname
210 name = myname
211 if not os.path.isabs(name):
211 if not os.path.isabs(name):
212 name = os.path.join(root, cwd, name)
212 name = os.path.join(root, cwd, name)
213 name = os.path.normpath(name)
213 name = os.path.normpath(name)
214 if name.startswith(rootsep):
214 if name.startswith(rootsep):
215 name = name[len(rootsep):]
215 name = name[len(rootsep):]
216 audit_path(name)
216 audit_path(name)
217 return pconvert(name)
217 return pconvert(name)
218 elif name == root:
218 elif name == root:
219 return ''
219 return ''
220 else:
220 else:
221 # Determine whether `name' is in the hierarchy at or beneath `root',
221 # Determine whether `name' is in the hierarchy at or beneath `root',
222 # by iterating name=dirname(name) until that causes no change (can't
222 # by iterating name=dirname(name) until that causes no change (can't
223 # check name == '/', because that doesn't work on windows). For each
223 # check name == '/', because that doesn't work on windows). For each
224 # `name', compare dev/inode numbers. If they match, the list `rel'
224 # `name', compare dev/inode numbers. If they match, the list `rel'
225 # holds the reversed list of components making up the relative file
225 # holds the reversed list of components making up the relative file
226 # name we want.
226 # name we want.
227 root_st = os.stat(root)
227 root_st = os.stat(root)
228 rel = []
228 rel = []
229 while True:
229 while True:
230 try:
230 try:
231 name_st = os.stat(name)
231 name_st = os.stat(name)
232 except OSError:
232 except OSError:
233 break
233 break
234 if os.path.samestat(name_st, root_st):
234 if os.path.samestat(name_st, root_st):
235 rel.reverse()
235 rel.reverse()
236 name = os.path.join(*rel)
236 name = os.path.join(*rel)
237 audit_path(name)
237 audit_path(name)
238 return pconvert(name)
238 return pconvert(name)
239 dirname, basename = os.path.split(name)
239 dirname, basename = os.path.split(name)
240 rel.append(basename)
240 rel.append(basename)
241 if dirname == name:
241 if dirname == name:
242 break
242 break
243 name = dirname
243 name = dirname
244
244
245 raise Abort('%s not under root' % myname)
245 raise Abort('%s not under root' % myname)
246
246
247 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
247 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
248 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
248 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
249
249
250 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
250 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
251 if os.name == 'nt':
251 if os.name == 'nt':
252 dflt_pat = 'glob'
252 dflt_pat = 'glob'
253 else:
253 else:
254 dflt_pat = 'relpath'
254 dflt_pat = 'relpath'
255 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
255 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
256
256
257 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
257 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
258 """build a function to match a set of file patterns
258 """build a function to match a set of file patterns
259
259
260 arguments:
260 arguments:
261 canonroot - the canonical root of the tree you're matching against
261 canonroot - the canonical root of the tree you're matching against
262 cwd - the current working directory, if relevant
262 cwd - the current working directory, if relevant
263 names - patterns to find
263 names - patterns to find
264 inc - patterns to include
264 inc - patterns to include
265 exc - patterns to exclude
265 exc - patterns to exclude
266 head - a regex to prepend to patterns to control whether a match is rooted
266 head - a regex to prepend to patterns to control whether a match is rooted
267
267
268 a pattern is one of:
268 a pattern is one of:
269 'glob:<rooted glob>'
269 'glob:<rooted glob>'
270 're:<rooted regexp>'
270 're:<rooted regexp>'
271 'path:<rooted path>'
271 'path:<rooted path>'
272 'relglob:<relative glob>'
272 'relglob:<relative glob>'
273 'relpath:<relative path>'
273 'relpath:<relative path>'
274 'relre:<relative regexp>'
274 'relre:<relative regexp>'
275 '<rooted path or regexp>'
275 '<rooted path or regexp>'
276
276
277 returns:
277 returns:
278 a 3-tuple containing
278 a 3-tuple containing
279 - list of explicit non-pattern names passed in
279 - list of explicit non-pattern names passed in
280 - a bool match(filename) function
280 - a bool match(filename) function
281 - a bool indicating if any patterns were passed in
281 - a bool indicating if any patterns were passed in
282
282
283 todo:
283 todo:
284 make head regex a rooted bool
284 make head regex a rooted bool
285 """
285 """
286
286
287 def contains_glob(name):
287 def contains_glob(name):
288 for c in name:
288 for c in name:
289 if c in _globchars: return True
289 if c in _globchars: return True
290 return False
290 return False
291
291
292 def regex(kind, name, tail):
292 def regex(kind, name, tail):
293 '''convert a pattern into a regular expression'''
293 '''convert a pattern into a regular expression'''
294 if kind == 're':
294 if kind == 're':
295 return name
295 return name
296 elif kind == 'path':
296 elif kind == 'path':
297 return '^' + re.escape(name) + '(?:/|$)'
297 return '^' + re.escape(name) + '(?:/|$)'
298 elif kind == 'relglob':
298 elif kind == 'relglob':
299 return head + globre(name, '(?:|.*/)', tail)
299 return head + globre(name, '(?:|.*/)', tail)
300 elif kind == 'relpath':
300 elif kind == 'relpath':
301 return head + re.escape(name) + tail
301 return head + re.escape(name) + tail
302 elif kind == 'relre':
302 elif kind == 'relre':
303 if name.startswith('^'):
303 if name.startswith('^'):
304 return name
304 return name
305 return '.*' + name
305 return '.*' + name
306 return head + globre(name, '', tail)
306 return head + globre(name, '', tail)
307
307
308 def matchfn(pats, tail):
308 def matchfn(pats, tail):
309 """build a matching function from a set of patterns"""
309 """build a matching function from a set of patterns"""
310 if not pats:
310 if not pats:
311 return
311 return
312 matches = []
312 matches = []
313 for k, p in pats:
313 for k, p in pats:
314 try:
314 try:
315 pat = '(?:%s)' % regex(k, p, tail)
315 pat = '(?:%s)' % regex(k, p, tail)
316 matches.append(re.compile(pat).match)
316 matches.append(re.compile(pat).match)
317 except re.error:
317 except re.error:
318 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
318 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
319 else: raise Abort("invalid pattern (%s): %s" % (k, p))
319 else: raise Abort("invalid pattern (%s): %s" % (k, p))
320
320
321 def buildfn(text):
321 def buildfn(text):
322 for m in matches:
322 for m in matches:
323 r = m(text)
323 r = m(text)
324 if r:
324 if r:
325 return r
325 return r
326
326
327 return buildfn
327 return buildfn
328
328
329 def globprefix(pat):
329 def globprefix(pat):
330 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
330 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
331 root = []
331 root = []
332 for p in pat.split(os.sep):
332 for p in pat.split(os.sep):
333 if contains_glob(p): break
333 if contains_glob(p): break
334 root.append(p)
334 root.append(p)
335 return '/'.join(root)
335 return '/'.join(root)
336
336
337 pats = []
337 pats = []
338 files = []
338 files = []
339 roots = []
339 roots = []
340 for kind, name in [patkind(p, dflt_pat) for p in names]:
340 for kind, name in [patkind(p, dflt_pat) for p in names]:
341 if kind in ('glob', 'relpath'):
341 if kind in ('glob', 'relpath'):
342 name = canonpath(canonroot, cwd, name)
342 name = canonpath(canonroot, cwd, name)
343 if name == '':
343 if name == '':
344 kind, name = 'glob', '**'
344 kind, name = 'glob', '**'
345 if kind in ('glob', 'path', 're'):
345 if kind in ('glob', 'path', 're'):
346 pats.append((kind, name))
346 pats.append((kind, name))
347 if kind == 'glob':
347 if kind == 'glob':
348 root = globprefix(name)
348 root = globprefix(name)
349 if root: roots.append(root)
349 if root: roots.append(root)
350 elif kind == 'relpath':
350 elif kind == 'relpath':
351 files.append((kind, name))
351 files.append((kind, name))
352 roots.append(name)
352 roots.append(name)
353
353
354 patmatch = matchfn(pats, '$') or always
354 patmatch = matchfn(pats, '$') or always
355 filematch = matchfn(files, '(?:/|$)') or always
355 filematch = matchfn(files, '(?:/|$)') or always
356 incmatch = always
356 incmatch = always
357 if inc:
357 if inc:
358 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
358 incmatch = matchfn(map(patkind, inc), '(?:/|$)')
359 excmatch = lambda fn: False
359 excmatch = lambda fn: False
360 if exc:
360 if exc:
361 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
361 excmatch = matchfn(map(patkind, exc), '(?:/|$)')
362
362
363 return (roots,
363 return (roots,
364 lambda fn: (incmatch(fn) and not excmatch(fn) and
364 lambda fn: (incmatch(fn) and not excmatch(fn) and
365 (fn.endswith('/') or
365 (fn.endswith('/') or
366 (not pats and not files) or
366 (not pats and not files) or
367 (pats and patmatch(fn)) or
367 (pats and patmatch(fn)) or
368 (files and filematch(fn)))),
368 (files and filematch(fn)))),
369 (inc or exc or (pats and pats != [('glob', '**')])) and True)
369 (inc or exc or (pats and pats != [('glob', '**')])) and True)
370
370
371 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
371 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
372 '''enhanced shell command execution.
372 '''enhanced shell command execution.
373 run with environment maybe modified, maybe in different dir.
373 run with environment maybe modified, maybe in different dir.
374
374
375 if command fails and onerr is None, return status. if ui object,
375 if command fails and onerr is None, return status. if ui object,
376 print error message and return status, else raise onerr object as
376 print error message and return status, else raise onerr object as
377 exception.'''
377 exception.'''
378 oldenv = {}
378 oldenv = {}
379 for k in environ:
379 for k in environ:
380 oldenv[k] = os.environ.get(k)
380 oldenv[k] = os.environ.get(k)
381 if cwd is not None:
381 if cwd is not None:
382 oldcwd = os.getcwd()
382 oldcwd = os.getcwd()
383 try:
383 try:
384 for k, v in environ.iteritems():
384 for k, v in environ.iteritems():
385 os.environ[k] = str(v)
385 os.environ[k] = str(v)
386 if cwd is not None and oldcwd != cwd:
386 if cwd is not None and oldcwd != cwd:
387 os.chdir(cwd)
387 os.chdir(cwd)
388 rc = os.system(cmd)
388 rc = os.system(cmd)
389 if rc and onerr:
389 if rc and onerr:
390 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
390 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
391 explain_exit(rc)[0])
391 explain_exit(rc)[0])
392 if errprefix:
392 if errprefix:
393 errmsg = '%s: %s' % (errprefix, errmsg)
393 errmsg = '%s: %s' % (errprefix, errmsg)
394 try:
394 try:
395 onerr.warn(errmsg + '\n')
395 onerr.warn(errmsg + '\n')
396 except AttributeError:
396 except AttributeError:
397 raise onerr(errmsg)
397 raise onerr(errmsg)
398 return rc
398 return rc
399 finally:
399 finally:
400 for k, v in oldenv.iteritems():
400 for k, v in oldenv.iteritems():
401 if v is None:
401 if v is None:
402 del os.environ[k]
402 del os.environ[k]
403 else:
403 else:
404 os.environ[k] = v
404 os.environ[k] = v
405 if cwd is not None and oldcwd != cwd:
405 if cwd is not None and oldcwd != cwd:
406 os.chdir(oldcwd)
406 os.chdir(oldcwd)
407
407
408 def rename(src, dst):
408 def rename(src, dst):
409 """forcibly rename a file"""
409 """forcibly rename a file"""
410 try:
410 try:
411 os.rename(src, dst)
411 os.rename(src, dst)
412 except:
412 except:
413 os.unlink(dst)
413 os.unlink(dst)
414 os.rename(src, dst)
414 os.rename(src, dst)
415
415
416 def unlink(f):
416 def unlink(f):
417 """unlink and remove the directory if it is empty"""
417 """unlink and remove the directory if it is empty"""
418 os.unlink(f)
418 os.unlink(f)
419 # try removing directories that might now be empty
419 # try removing directories that might now be empty
420 try:
420 try:
421 os.removedirs(os.path.dirname(f))
421 os.removedirs(os.path.dirname(f))
422 except OSError:
422 except OSError:
423 pass
423 pass
424
424
425 def copyfiles(src, dst, hardlink=None):
425 def copyfiles(src, dst, hardlink=None):
426 """Copy a directory tree using hardlinks if possible"""
426 """Copy a directory tree using hardlinks if possible"""
427
427
428 if hardlink is None:
428 if hardlink is None:
429 hardlink = (os.stat(src).st_dev ==
429 hardlink = (os.stat(src).st_dev ==
430 os.stat(os.path.dirname(dst)).st_dev)
430 os.stat(os.path.dirname(dst)).st_dev)
431
431
432 if os.path.isdir(src):
432 if os.path.isdir(src):
433 os.mkdir(dst)
433 os.mkdir(dst)
434 for name in os.listdir(src):
434 for name in os.listdir(src):
435 srcname = os.path.join(src, name)
435 srcname = os.path.join(src, name)
436 dstname = os.path.join(dst, name)
436 dstname = os.path.join(dst, name)
437 copyfiles(srcname, dstname, hardlink)
437 copyfiles(srcname, dstname, hardlink)
438 else:
438 else:
439 if hardlink:
439 if hardlink:
440 try:
440 try:
441 os_link(src, dst)
441 os_link(src, dst)
442 except (IOError, OSError):
442 except (IOError, OSError):
443 hardlink = False
443 hardlink = False
444 shutil.copy(src, dst)
444 shutil.copy(src, dst)
445 else:
445 else:
446 shutil.copy(src, dst)
446 shutil.copy(src, dst)
447
447
448 def audit_path(path):
448 def audit_path(path):
449 """Abort if path contains dangerous components"""
449 """Abort if path contains dangerous components"""
450 parts = os.path.normcase(path).split(os.sep)
450 parts = os.path.normcase(path).split(os.sep)
451 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
451 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
452 or os.pardir in parts):
452 or os.pardir in parts):
453 raise Abort(_("path contains illegal component: %s\n") % path)
453 raise Abort(_("path contains illegal component: %s\n") % path)
454
454
455 def opener(base, audit=True):
455 def opener(base, audit=True):
456 """
456 """
457 return a function that opens files relative to base
457 return a function that opens files relative to base
458
458
459 this function is used to hide the details of COW semantics and
459 this function is used to hide the details of COW semantics and
460 remote file access from higher level code.
460 remote file access from higher level code.
461 """
461 """
462 p = base
462 p = base
463 audit_p = audit
463 audit_p = audit
464
464
465 def mktempcopy(name):
465 def mktempcopy(name):
466 d, fn = os.path.split(name)
466 d, fn = os.path.split(name)
467 fd, temp = tempfile.mkstemp(prefix=fn, dir=d)
467 fd, temp = tempfile.mkstemp(prefix=".%s-" % fn, dir=d)
468 fp = os.fdopen(fd, "wb")
468 fp = os.fdopen(fd, "wb")
469 try:
469 try:
470 fp.write(file(name, "rb").read())
470 fp.write(file(name, "rb").read())
471 except:
471 except:
472 try: os.unlink(temp)
472 try: os.unlink(temp)
473 except: pass
473 except: pass
474 raise
474 raise
475 fp.close()
475 fp.close()
476 st = os.lstat(name)
476 st = os.lstat(name)
477 os.chmod(temp, st.st_mode)
477 os.chmod(temp, st.st_mode)
478 return temp
478 return temp
479
479
480 class atomictempfile(file):
480 class atomictempfile(file):
481 """the file will only be copied when rename is called"""
481 """the file will only be copied when rename is called"""
482 def __init__(self, name, mode):
482 def __init__(self, name, mode):
483 self.__name = name
483 self.__name = name
484 self.temp = mktempcopy(name)
484 self.temp = mktempcopy(name)
485 file.__init__(self, self.temp, mode)
485 file.__init__(self, self.temp, mode)
486 def rename(self):
486 def rename(self):
487 if not self.closed:
487 if not self.closed:
488 file.close(self)
488 file.close(self)
489 rename(self.temp, self.__name)
489 rename(self.temp, self.__name)
490 def __del__(self):
490 def __del__(self):
491 if not self.closed:
491 if not self.closed:
492 try:
492 try:
493 os.unlink(self.temp)
493 os.unlink(self.temp)
494 except: pass
494 except: pass
495 file.close(self)
495 file.close(self)
496
496
497 class atomicfile(atomictempfile):
497 class atomicfile(atomictempfile):
498 """the file will only be copied on close"""
498 """the file will only be copied on close"""
499 def __init__(self, name, mode):
499 def __init__(self, name, mode):
500 atomictempfile.__init__(self, name, mode)
500 atomictempfile.__init__(self, name, mode)
501 def close(self):
501 def close(self):
502 self.rename()
502 self.rename()
503 def __del__(self):
503 def __del__(self):
504 self.rename()
504 self.rename()
505
505
506 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
506 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
507 if audit_p:
507 if audit_p:
508 audit_path(path)
508 audit_path(path)
509 f = os.path.join(p, path)
509 f = os.path.join(p, path)
510
510
511 if not text:
511 if not text:
512 mode += "b" # for that other OS
512 mode += "b" # for that other OS
513
513
514 if mode[0] != "r":
514 if mode[0] != "r":
515 try:
515 try:
516 nlink = nlinks(f)
516 nlink = nlinks(f)
517 except OSError:
517 except OSError:
518 d = os.path.dirname(f)
518 d = os.path.dirname(f)
519 if not os.path.isdir(d):
519 if not os.path.isdir(d):
520 os.makedirs(d)
520 os.makedirs(d)
521 else:
521 else:
522 if atomic:
522 if atomic:
523 return atomicfile(f, mode)
523 return atomicfile(f, mode)
524 elif atomictemp:
524 elif atomictemp:
525 return atomictempfile(f, mode)
525 return atomictempfile(f, mode)
526 if nlink > 1:
526 if nlink > 1:
527 rename(mktempcopy(f), f)
527 rename(mktempcopy(f), f)
528 return file(f, mode)
528 return file(f, mode)
529
529
530 return o
530 return o
531
531
532 def _makelock_file(info, pathname):
532 def _makelock_file(info, pathname):
533 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
533 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
534 os.write(ld, info)
534 os.write(ld, info)
535 os.close(ld)
535 os.close(ld)
536
536
537 def _readlock_file(pathname):
537 def _readlock_file(pathname):
538 return file(pathname).read()
538 return file(pathname).read()
539
539
540 def nlinks(pathname):
540 def nlinks(pathname):
541 """Return number of hardlinks for the given file."""
541 """Return number of hardlinks for the given file."""
542 return os.stat(pathname).st_nlink
542 return os.stat(pathname).st_nlink
543
543
544 if hasattr(os, 'link'):
544 if hasattr(os, 'link'):
545 os_link = os.link
545 os_link = os.link
546 else:
546 else:
547 def os_link(src, dst):
547 def os_link(src, dst):
548 raise OSError(0, _("Hardlinks not supported"))
548 raise OSError(0, _("Hardlinks not supported"))
549
549
550 # Platform specific variants
550 # Platform specific variants
551 if os.name == 'nt':
551 if os.name == 'nt':
552 demandload(globals(), "msvcrt")
552 demandload(globals(), "msvcrt")
553 nulldev = 'NUL:'
553 nulldev = 'NUL:'
554
554
555 class winstdout:
555 class winstdout:
556 '''stdout on windows misbehaves if sent through a pipe'''
556 '''stdout on windows misbehaves if sent through a pipe'''
557
557
558 def __init__(self, fp):
558 def __init__(self, fp):
559 self.fp = fp
559 self.fp = fp
560
560
561 def __getattr__(self, key):
561 def __getattr__(self, key):
562 return getattr(self.fp, key)
562 return getattr(self.fp, key)
563
563
564 def close(self):
564 def close(self):
565 try:
565 try:
566 self.fp.close()
566 self.fp.close()
567 except: pass
567 except: pass
568
568
569 def write(self, s):
569 def write(self, s):
570 try:
570 try:
571 return self.fp.write(s)
571 return self.fp.write(s)
572 except IOError, inst:
572 except IOError, inst:
573 if inst.errno != 0: raise
573 if inst.errno != 0: raise
574 self.close()
574 self.close()
575 raise IOError(errno.EPIPE, 'Broken pipe')
575 raise IOError(errno.EPIPE, 'Broken pipe')
576
576
577 sys.stdout = winstdout(sys.stdout)
577 sys.stdout = winstdout(sys.stdout)
578
578
579 def system_rcpath():
579 def system_rcpath():
580 try:
580 try:
581 return system_rcpath_win32()
581 return system_rcpath_win32()
582 except:
582 except:
583 return [r'c:\mercurial\mercurial.ini']
583 return [r'c:\mercurial\mercurial.ini']
584
584
585 def os_rcpath():
585 def os_rcpath():
586 '''return default os-specific hgrc search path'''
586 '''return default os-specific hgrc search path'''
587 return system_rcpath() + [os.path.join(os.path.expanduser('~'),
587 return system_rcpath() + [os.path.join(os.path.expanduser('~'),
588 'mercurial.ini')]
588 'mercurial.ini')]
589
589
590 def parse_patch_output(output_line):
590 def parse_patch_output(output_line):
591 """parses the output produced by patch and returns the file name"""
591 """parses the output produced by patch and returns the file name"""
592 pf = output_line[14:]
592 pf = output_line[14:]
593 if pf[0] == '`':
593 if pf[0] == '`':
594 pf = pf[1:-1] # Remove the quotes
594 pf = pf[1:-1] # Remove the quotes
595 return pf
595 return pf
596
596
597 def testpid(pid):
597 def testpid(pid):
598 '''return False if pid dead, True if running or not known'''
598 '''return False if pid dead, True if running or not known'''
599 return True
599 return True
600
600
601 def is_exec(f, last):
601 def is_exec(f, last):
602 return last
602 return last
603
603
604 def set_exec(f, mode):
604 def set_exec(f, mode):
605 pass
605 pass
606
606
607 def set_binary(fd):
607 def set_binary(fd):
608 msvcrt.setmode(fd.fileno(), os.O_BINARY)
608 msvcrt.setmode(fd.fileno(), os.O_BINARY)
609
609
610 def pconvert(path):
610 def pconvert(path):
611 return path.replace("\\", "/")
611 return path.replace("\\", "/")
612
612
613 def localpath(path):
613 def localpath(path):
614 return path.replace('/', '\\')
614 return path.replace('/', '\\')
615
615
616 def normpath(path):
616 def normpath(path):
617 return pconvert(os.path.normpath(path))
617 return pconvert(os.path.normpath(path))
618
618
619 makelock = _makelock_file
619 makelock = _makelock_file
620 readlock = _readlock_file
620 readlock = _readlock_file
621
621
622 def explain_exit(code):
622 def explain_exit(code):
623 return _("exited with status %d") % code, code
623 return _("exited with status %d") % code, code
624
624
625 try:
625 try:
626 # override functions with win32 versions if possible
626 # override functions with win32 versions if possible
627 from util_win32 import *
627 from util_win32 import *
628 except ImportError:
628 except ImportError:
629 pass
629 pass
630
630
631 else:
631 else:
632 nulldev = '/dev/null'
632 nulldev = '/dev/null'
633
633
634 def rcfiles(path):
634 def rcfiles(path):
635 rcs = [os.path.join(path, 'hgrc')]
635 rcs = [os.path.join(path, 'hgrc')]
636 rcdir = os.path.join(path, 'hgrc.d')
636 rcdir = os.path.join(path, 'hgrc.d')
637 try:
637 try:
638 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
638 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
639 if f.endswith(".rc")])
639 if f.endswith(".rc")])
640 except OSError, inst: pass
640 except OSError, inst: pass
641 return rcs
641 return rcs
642
642
643 def os_rcpath():
643 def os_rcpath():
644 '''return default os-specific hgrc search path'''
644 '''return default os-specific hgrc search path'''
645 path = []
645 path = []
646 if len(sys.argv) > 0:
646 if len(sys.argv) > 0:
647 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
647 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
648 '/../etc/mercurial'))
648 '/../etc/mercurial'))
649 path.extend(rcfiles('/etc/mercurial'))
649 path.extend(rcfiles('/etc/mercurial'))
650 path.append(os.path.expanduser('~/.hgrc'))
650 path.append(os.path.expanduser('~/.hgrc'))
651 path = [os.path.normpath(f) for f in path]
651 path = [os.path.normpath(f) for f in path]
652 return path
652 return path
653
653
654 def parse_patch_output(output_line):
654 def parse_patch_output(output_line):
655 """parses the output produced by patch and returns the file name"""
655 """parses the output produced by patch and returns the file name"""
656 pf = output_line[14:]
656 pf = output_line[14:]
657 if pf.startswith("'") and pf.endswith("'") and pf.find(" ") >= 0:
657 if pf.startswith("'") and pf.endswith("'") and pf.find(" ") >= 0:
658 pf = pf[1:-1] # Remove the quotes
658 pf = pf[1:-1] # Remove the quotes
659 return pf
659 return pf
660
660
661 def is_exec(f, last):
661 def is_exec(f, last):
662 """check whether a file is executable"""
662 """check whether a file is executable"""
663 return (os.stat(f).st_mode & 0100 != 0)
663 return (os.stat(f).st_mode & 0100 != 0)
664
664
665 def set_exec(f, mode):
665 def set_exec(f, mode):
666 s = os.stat(f).st_mode
666 s = os.stat(f).st_mode
667 if (s & 0100 != 0) == mode:
667 if (s & 0100 != 0) == mode:
668 return
668 return
669 if mode:
669 if mode:
670 # Turn on +x for every +r bit when making a file executable
670 # Turn on +x for every +r bit when making a file executable
671 # and obey umask.
671 # and obey umask.
672 umask = os.umask(0)
672 umask = os.umask(0)
673 os.umask(umask)
673 os.umask(umask)
674 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
674 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
675 else:
675 else:
676 os.chmod(f, s & 0666)
676 os.chmod(f, s & 0666)
677
677
678 def set_binary(fd):
678 def set_binary(fd):
679 pass
679 pass
680
680
681 def pconvert(path):
681 def pconvert(path):
682 return path
682 return path
683
683
684 def localpath(path):
684 def localpath(path):
685 return path
685 return path
686
686
687 normpath = os.path.normpath
687 normpath = os.path.normpath
688
688
689 def makelock(info, pathname):
689 def makelock(info, pathname):
690 try:
690 try:
691 os.symlink(info, pathname)
691 os.symlink(info, pathname)
692 except OSError, why:
692 except OSError, why:
693 if why.errno == errno.EEXIST:
693 if why.errno == errno.EEXIST:
694 raise
694 raise
695 else:
695 else:
696 _makelock_file(info, pathname)
696 _makelock_file(info, pathname)
697
697
698 def readlock(pathname):
698 def readlock(pathname):
699 try:
699 try:
700 return os.readlink(pathname)
700 return os.readlink(pathname)
701 except OSError, why:
701 except OSError, why:
702 if why.errno == errno.EINVAL:
702 if why.errno == errno.EINVAL:
703 return _readlock_file(pathname)
703 return _readlock_file(pathname)
704 else:
704 else:
705 raise
705 raise
706
706
707 def testpid(pid):
707 def testpid(pid):
708 '''return False if pid dead, True if running or not sure'''
708 '''return False if pid dead, True if running or not sure'''
709 try:
709 try:
710 os.kill(pid, 0)
710 os.kill(pid, 0)
711 return True
711 return True
712 except OSError, inst:
712 except OSError, inst:
713 return inst.errno != errno.ESRCH
713 return inst.errno != errno.ESRCH
714
714
715 def explain_exit(code):
715 def explain_exit(code):
716 """return a 2-tuple (desc, code) describing a process's status"""
716 """return a 2-tuple (desc, code) describing a process's status"""
717 if os.WIFEXITED(code):
717 if os.WIFEXITED(code):
718 val = os.WEXITSTATUS(code)
718 val = os.WEXITSTATUS(code)
719 return _("exited with status %d") % val, val
719 return _("exited with status %d") % val, val
720 elif os.WIFSIGNALED(code):
720 elif os.WIFSIGNALED(code):
721 val = os.WTERMSIG(code)
721 val = os.WTERMSIG(code)
722 return _("killed by signal %d") % val, val
722 return _("killed by signal %d") % val, val
723 elif os.WIFSTOPPED(code):
723 elif os.WIFSTOPPED(code):
724 val = os.WSTOPSIG(code)
724 val = os.WSTOPSIG(code)
725 return _("stopped by signal %d") % val, val
725 return _("stopped by signal %d") % val, val
726 raise ValueError(_("invalid exit code"))
726 raise ValueError(_("invalid exit code"))
727
727
728 class chunkbuffer(object):
728 class chunkbuffer(object):
729 """Allow arbitrary sized chunks of data to be efficiently read from an
729 """Allow arbitrary sized chunks of data to be efficiently read from an
730 iterator over chunks of arbitrary size."""
730 iterator over chunks of arbitrary size."""
731
731
732 def __init__(self, in_iter, targetsize = 2**16):
732 def __init__(self, in_iter, targetsize = 2**16):
733 """in_iter is the iterator that's iterating over the input chunks.
733 """in_iter is the iterator that's iterating over the input chunks.
734 targetsize is how big a buffer to try to maintain."""
734 targetsize is how big a buffer to try to maintain."""
735 self.in_iter = iter(in_iter)
735 self.in_iter = iter(in_iter)
736 self.buf = ''
736 self.buf = ''
737 self.targetsize = int(targetsize)
737 self.targetsize = int(targetsize)
738 if self.targetsize <= 0:
738 if self.targetsize <= 0:
739 raise ValueError(_("targetsize must be greater than 0, was %d") %
739 raise ValueError(_("targetsize must be greater than 0, was %d") %
740 targetsize)
740 targetsize)
741 self.iterempty = False
741 self.iterempty = False
742
742
743 def fillbuf(self):
743 def fillbuf(self):
744 """Ignore target size; read every chunk from iterator until empty."""
744 """Ignore target size; read every chunk from iterator until empty."""
745 if not self.iterempty:
745 if not self.iterempty:
746 collector = cStringIO.StringIO()
746 collector = cStringIO.StringIO()
747 collector.write(self.buf)
747 collector.write(self.buf)
748 for ch in self.in_iter:
748 for ch in self.in_iter:
749 collector.write(ch)
749 collector.write(ch)
750 self.buf = collector.getvalue()
750 self.buf = collector.getvalue()
751 self.iterempty = True
751 self.iterempty = True
752
752
753 def read(self, l):
753 def read(self, l):
754 """Read L bytes of data from the iterator of chunks of data.
754 """Read L bytes of data from the iterator of chunks of data.
755 Returns less than L bytes if the iterator runs dry."""
755 Returns less than L bytes if the iterator runs dry."""
756 if l > len(self.buf) and not self.iterempty:
756 if l > len(self.buf) and not self.iterempty:
757 # Clamp to a multiple of self.targetsize
757 # Clamp to a multiple of self.targetsize
758 targetsize = self.targetsize * ((l // self.targetsize) + 1)
758 targetsize = self.targetsize * ((l // self.targetsize) + 1)
759 collector = cStringIO.StringIO()
759 collector = cStringIO.StringIO()
760 collector.write(self.buf)
760 collector.write(self.buf)
761 collected = len(self.buf)
761 collected = len(self.buf)
762 for chunk in self.in_iter:
762 for chunk in self.in_iter:
763 collector.write(chunk)
763 collector.write(chunk)
764 collected += len(chunk)
764 collected += len(chunk)
765 if collected >= targetsize:
765 if collected >= targetsize:
766 break
766 break
767 if collected < targetsize:
767 if collected < targetsize:
768 self.iterempty = True
768 self.iterempty = True
769 self.buf = collector.getvalue()
769 self.buf = collector.getvalue()
770 s, self.buf = self.buf[:l], buffer(self.buf, l)
770 s, self.buf = self.buf[:l], buffer(self.buf, l)
771 return s
771 return s
772
772
773 def filechunkiter(f, size = 65536):
773 def filechunkiter(f, size = 65536):
774 """Create a generator that produces all the data in the file size
774 """Create a generator that produces all the data in the file size
775 (default 65536) bytes at a time. Chunks may be less than size
775 (default 65536) bytes at a time. Chunks may be less than size
776 bytes if the chunk is the last chunk in the file, or the file is a
776 bytes if the chunk is the last chunk in the file, or the file is a
777 socket or some other type of file that sometimes reads less data
777 socket or some other type of file that sometimes reads less data
778 than is requested."""
778 than is requested."""
779 s = f.read(size)
779 s = f.read(size)
780 while len(s) > 0:
780 while len(s) > 0:
781 yield s
781 yield s
782 s = f.read(size)
782 s = f.read(size)
783
783
784 def makedate():
784 def makedate():
785 lt = time.localtime()
785 lt = time.localtime()
786 if lt[8] == 1 and time.daylight:
786 if lt[8] == 1 and time.daylight:
787 tz = time.altzone
787 tz = time.altzone
788 else:
788 else:
789 tz = time.timezone
789 tz = time.timezone
790 return time.mktime(lt), tz
790 return time.mktime(lt), tz
791
791
792 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
792 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
793 """represent a (unixtime, offset) tuple as a localized time.
793 """represent a (unixtime, offset) tuple as a localized time.
794 unixtime is seconds since the epoch, and offset is the time zone's
794 unixtime is seconds since the epoch, and offset is the time zone's
795 number of seconds away from UTC. if timezone is false, do not
795 number of seconds away from UTC. if timezone is false, do not
796 append time zone to string."""
796 append time zone to string."""
797 t, tz = date or makedate()
797 t, tz = date or makedate()
798 s = time.strftime(format, time.gmtime(float(t) - tz))
798 s = time.strftime(format, time.gmtime(float(t) - tz))
799 if timezone:
799 if timezone:
800 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
800 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
801 return s
801 return s
802
802
803 def shortuser(user):
803 def shortuser(user):
804 """Return a short representation of a user name or email address."""
804 """Return a short representation of a user name or email address."""
805 f = user.find('@')
805 f = user.find('@')
806 if f >= 0:
806 if f >= 0:
807 user = user[:f]
807 user = user[:f]
808 f = user.find('<')
808 f = user.find('<')
809 if f >= 0:
809 if f >= 0:
810 user = user[f+1:]
810 user = user[f+1:]
811 return user
811 return user
812
812
813 def walkrepos(path):
813 def walkrepos(path):
814 '''yield every hg repository under path, recursively.'''
814 '''yield every hg repository under path, recursively.'''
815 def errhandler(err):
815 def errhandler(err):
816 if err.filename == path:
816 if err.filename == path:
817 raise err
817 raise err
818
818
819 for root, dirs, files in os.walk(path, onerror=errhandler):
819 for root, dirs, files in os.walk(path, onerror=errhandler):
820 for d in dirs:
820 for d in dirs:
821 if d == '.hg':
821 if d == '.hg':
822 yield root
822 yield root
823 dirs[:] = []
823 dirs[:] = []
824 break
824 break
825
825
826 _rcpath = None
826 _rcpath = None
827
827
828 def rcpath():
828 def rcpath():
829 '''return hgrc search path. if env var HGRCPATH is set, use it.
829 '''return hgrc search path. if env var HGRCPATH is set, use it.
830 for each item in path, if directory, use files ending in .rc,
830 for each item in path, if directory, use files ending in .rc,
831 else use item.
831 else use item.
832 make HGRCPATH empty to only look in .hg/hgrc of current repo.
832 make HGRCPATH empty to only look in .hg/hgrc of current repo.
833 if no HGRCPATH, use default os-specific path.'''
833 if no HGRCPATH, use default os-specific path.'''
834 global _rcpath
834 global _rcpath
835 if _rcpath is None:
835 if _rcpath is None:
836 if 'HGRCPATH' in os.environ:
836 if 'HGRCPATH' in os.environ:
837 _rcpath = []
837 _rcpath = []
838 for p in os.environ['HGRCPATH'].split(os.pathsep):
838 for p in os.environ['HGRCPATH'].split(os.pathsep):
839 if not p: continue
839 if not p: continue
840 if os.path.isdir(p):
840 if os.path.isdir(p):
841 for f in os.listdir(p):
841 for f in os.listdir(p):
842 if f.endswith('.rc'):
842 if f.endswith('.rc'):
843 _rcpath.append(os.path.join(p, f))
843 _rcpath.append(os.path.join(p, f))
844 else:
844 else:
845 _rcpath.append(p)
845 _rcpath.append(p)
846 else:
846 else:
847 _rcpath = os_rcpath()
847 _rcpath = os_rcpath()
848 return _rcpath
848 return _rcpath
General Comments 0
You need to be logged in to leave comments. Login now