##// END OF EJS Templates
compat: use open() instead of file() everywhere
Alejandro Santos -
r9031:3b76321a default
parent child Browse files
Show More
@@ -1,370 +1,370 b''
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import os, locale, re, socket, errno
8 import os, locale, re, socket, errno
9 from cStringIO import StringIO
9 from cStringIO import StringIO
10 from mercurial import util
10 from mercurial import util
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 from common import NoRepo, commit, converter_source, checktool
13 from common import NoRepo, commit, converter_source, checktool
14 import cvsps
14 import cvsps
15
15
16 class convert_cvs(converter_source):
16 class convert_cvs(converter_source):
17 def __init__(self, ui, path, rev=None):
17 def __init__(self, ui, path, rev=None):
18 super(convert_cvs, self).__init__(ui, path, rev=rev)
18 super(convert_cvs, self).__init__(ui, path, rev=rev)
19
19
20 cvs = os.path.join(path, "CVS")
20 cvs = os.path.join(path, "CVS")
21 if not os.path.exists(cvs):
21 if not os.path.exists(cvs):
22 raise NoRepo("%s does not look like a CVS checkout" % path)
22 raise NoRepo("%s does not look like a CVS checkout" % path)
23
23
24 checktool('cvs')
24 checktool('cvs')
25 self.cmd = ui.config('convert', 'cvsps', 'builtin')
25 self.cmd = ui.config('convert', 'cvsps', 'builtin')
26 cvspsexe = self.cmd.split(None, 1)[0]
26 cvspsexe = self.cmd.split(None, 1)[0]
27 self.builtin = cvspsexe == 'builtin'
27 self.builtin = cvspsexe == 'builtin'
28 if not self.builtin:
28 if not self.builtin:
29 ui.warn(_('warning: support for external cvsps is deprecated and '
29 ui.warn(_('warning: support for external cvsps is deprecated and '
30 'will be removed in Mercurial 1.4\n'))
30 'will be removed in Mercurial 1.4\n'))
31
31
32 if not self.builtin:
32 if not self.builtin:
33 checktool(cvspsexe)
33 checktool(cvspsexe)
34
34
35 self.changeset = None
35 self.changeset = None
36 self.files = {}
36 self.files = {}
37 self.tags = {}
37 self.tags = {}
38 self.lastbranch = {}
38 self.lastbranch = {}
39 self.parent = {}
39 self.parent = {}
40 self.socket = None
40 self.socket = None
41 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
41 self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
42 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
42 self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
43 self.encoding = locale.getpreferredencoding()
43 self.encoding = locale.getpreferredencoding()
44
44
45 self._connect()
45 self._connect()
46
46
47 def _parse(self):
47 def _parse(self):
48 if self.changeset is not None:
48 if self.changeset is not None:
49 return
49 return
50 self.changeset = {}
50 self.changeset = {}
51
51
52 maxrev = 0
52 maxrev = 0
53 cmd = self.cmd
53 cmd = self.cmd
54 if self.rev:
54 if self.rev:
55 # TODO: handle tags
55 # TODO: handle tags
56 try:
56 try:
57 # patchset number?
57 # patchset number?
58 maxrev = int(self.rev)
58 maxrev = int(self.rev)
59 except ValueError:
59 except ValueError:
60 try:
60 try:
61 # date
61 # date
62 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
62 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
63 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
63 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
64 except util.Abort:
64 except util.Abort:
65 raise util.Abort(_('revision %s is not a patchset number or date') % self.rev)
65 raise util.Abort(_('revision %s is not a patchset number or date') % self.rev)
66
66
67 d = os.getcwd()
67 d = os.getcwd()
68 try:
68 try:
69 os.chdir(self.path)
69 os.chdir(self.path)
70 id = None
70 id = None
71 state = 0
71 state = 0
72 filerevids = {}
72 filerevids = {}
73
73
74 if self.builtin:
74 if self.builtin:
75 # builtin cvsps code
75 # builtin cvsps code
76 self.ui.status(_('using builtin cvsps\n'))
76 self.ui.status(_('using builtin cvsps\n'))
77
77
78 cache = 'update'
78 cache = 'update'
79 if not self.ui.configbool('convert', 'cvsps.cache', True):
79 if not self.ui.configbool('convert', 'cvsps.cache', True):
80 cache = None
80 cache = None
81 db = cvsps.createlog(self.ui, cache=cache)
81 db = cvsps.createlog(self.ui, cache=cache)
82 db = cvsps.createchangeset(self.ui, db,
82 db = cvsps.createchangeset(self.ui, db,
83 fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
83 fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
84 mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
84 mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
85 mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
85 mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
86
86
87 for cs in db:
87 for cs in db:
88 if maxrev and cs.id>maxrev:
88 if maxrev and cs.id>maxrev:
89 break
89 break
90 id = str(cs.id)
90 id = str(cs.id)
91 cs.author = self.recode(cs.author)
91 cs.author = self.recode(cs.author)
92 self.lastbranch[cs.branch] = id
92 self.lastbranch[cs.branch] = id
93 cs.comment = self.recode(cs.comment)
93 cs.comment = self.recode(cs.comment)
94 date = util.datestr(cs.date)
94 date = util.datestr(cs.date)
95 self.tags.update(dict.fromkeys(cs.tags, id))
95 self.tags.update(dict.fromkeys(cs.tags, id))
96
96
97 files = {}
97 files = {}
98 for f in cs.entries:
98 for f in cs.entries:
99 files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]),
99 files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]),
100 ['', '(DEAD)'][f.dead])
100 ['', '(DEAD)'][f.dead])
101
101
102 # add current commit to set
102 # add current commit to set
103 c = commit(author=cs.author, date=date,
103 c = commit(author=cs.author, date=date,
104 parents=[str(p.id) for p in cs.parents],
104 parents=[str(p.id) for p in cs.parents],
105 desc=cs.comment, branch=cs.branch or '')
105 desc=cs.comment, branch=cs.branch or '')
106 self.changeset[id] = c
106 self.changeset[id] = c
107 self.files[id] = files
107 self.files[id] = files
108 else:
108 else:
109 # external cvsps
109 # external cvsps
110 for l in util.popen(cmd):
110 for l in util.popen(cmd):
111 if state == 0: # header
111 if state == 0: # header
112 if l.startswith("PatchSet"):
112 if l.startswith("PatchSet"):
113 id = l[9:-2]
113 id = l[9:-2]
114 if maxrev and int(id) > maxrev:
114 if maxrev and int(id) > maxrev:
115 # ignore everything
115 # ignore everything
116 state = 3
116 state = 3
117 elif l.startswith("Date:"):
117 elif l.startswith("Date:"):
118 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
118 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
119 date = util.datestr(date)
119 date = util.datestr(date)
120 elif l.startswith("Branch:"):
120 elif l.startswith("Branch:"):
121 branch = l[8:-1]
121 branch = l[8:-1]
122 self.parent[id] = self.lastbranch.get(branch, 'bad')
122 self.parent[id] = self.lastbranch.get(branch, 'bad')
123 self.lastbranch[branch] = id
123 self.lastbranch[branch] = id
124 elif l.startswith("Ancestor branch:"):
124 elif l.startswith("Ancestor branch:"):
125 ancestor = l[17:-1]
125 ancestor = l[17:-1]
126 # figure out the parent later
126 # figure out the parent later
127 self.parent[id] = self.lastbranch[ancestor]
127 self.parent[id] = self.lastbranch[ancestor]
128 elif l.startswith("Author:"):
128 elif l.startswith("Author:"):
129 author = self.recode(l[8:-1])
129 author = self.recode(l[8:-1])
130 elif l.startswith("Tag:") or l.startswith("Tags:"):
130 elif l.startswith("Tag:") or l.startswith("Tags:"):
131 t = l[l.index(':')+1:]
131 t = l[l.index(':')+1:]
132 t = [ut.strip() for ut in t.split(',')]
132 t = [ut.strip() for ut in t.split(',')]
133 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
133 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
134 self.tags.update(dict.fromkeys(t, id))
134 self.tags.update(dict.fromkeys(t, id))
135 elif l.startswith("Log:"):
135 elif l.startswith("Log:"):
136 # switch to gathering log
136 # switch to gathering log
137 state = 1
137 state = 1
138 log = ""
138 log = ""
139 elif state == 1: # log
139 elif state == 1: # log
140 if l == "Members: \n":
140 if l == "Members: \n":
141 # switch to gathering members
141 # switch to gathering members
142 files = {}
142 files = {}
143 oldrevs = []
143 oldrevs = []
144 log = self.recode(log[:-1])
144 log = self.recode(log[:-1])
145 state = 2
145 state = 2
146 else:
146 else:
147 # gather log
147 # gather log
148 log += l
148 log += l
149 elif state == 2: # members
149 elif state == 2: # members
150 if l == "\n": # start of next entry
150 if l == "\n": # start of next entry
151 state = 0
151 state = 0
152 p = [self.parent[id]]
152 p = [self.parent[id]]
153 if id == "1":
153 if id == "1":
154 p = []
154 p = []
155 if branch == "HEAD":
155 if branch == "HEAD":
156 branch = ""
156 branch = ""
157 if branch:
157 if branch:
158 latest = 0
158 latest = 0
159 # the last changeset that contains a base
159 # the last changeset that contains a base
160 # file is our parent
160 # file is our parent
161 for r in oldrevs:
161 for r in oldrevs:
162 latest = max(filerevids.get(r, 0), latest)
162 latest = max(filerevids.get(r, 0), latest)
163 if latest:
163 if latest:
164 p = [latest]
164 p = [latest]
165
165
166 # add current commit to set
166 # add current commit to set
167 c = commit(author=author, date=date, parents=p,
167 c = commit(author=author, date=date, parents=p,
168 desc=log, branch=branch)
168 desc=log, branch=branch)
169 self.changeset[id] = c
169 self.changeset[id] = c
170 self.files[id] = files
170 self.files[id] = files
171 else:
171 else:
172 colon = l.rfind(':')
172 colon = l.rfind(':')
173 file = l[1:colon]
173 file = l[1:colon]
174 rev = l[colon+1:-2]
174 rev = l[colon+1:-2]
175 oldrev, rev = rev.split("->")
175 oldrev, rev = rev.split("->")
176 files[file] = rev
176 files[file] = rev
177
177
178 # save some information for identifying branch points
178 # save some information for identifying branch points
179 oldrevs.append("%s:%s" % (oldrev, file))
179 oldrevs.append("%s:%s" % (oldrev, file))
180 filerevids["%s:%s" % (rev, file)] = id
180 filerevids["%s:%s" % (rev, file)] = id
181 elif state == 3:
181 elif state == 3:
182 # swallow all input
182 # swallow all input
183 continue
183 continue
184
184
185 self.heads = self.lastbranch.values()
185 self.heads = self.lastbranch.values()
186 finally:
186 finally:
187 os.chdir(d)
187 os.chdir(d)
188
188
189 def _connect(self):
189 def _connect(self):
190 root = self.cvsroot
190 root = self.cvsroot
191 conntype = None
191 conntype = None
192 user, host = None, None
192 user, host = None, None
193 cmd = ['cvs', 'server']
193 cmd = ['cvs', 'server']
194
194
195 self.ui.status(_("connecting to %s\n") % root)
195 self.ui.status(_("connecting to %s\n") % root)
196
196
197 if root.startswith(":pserver:"):
197 if root.startswith(":pserver:"):
198 root = root[9:]
198 root = root[9:]
199 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
199 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
200 root)
200 root)
201 if m:
201 if m:
202 conntype = "pserver"
202 conntype = "pserver"
203 user, passw, serv, port, root = m.groups()
203 user, passw, serv, port, root = m.groups()
204 if not user:
204 if not user:
205 user = "anonymous"
205 user = "anonymous"
206 if not port:
206 if not port:
207 port = 2401
207 port = 2401
208 else:
208 else:
209 port = int(port)
209 port = int(port)
210 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
210 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
211 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
211 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
212
212
213 if not passw:
213 if not passw:
214 passw = "A"
214 passw = "A"
215 cvspass = os.path.expanduser("~/.cvspass")
215 cvspass = os.path.expanduser("~/.cvspass")
216 try:
216 try:
217 pf = open(cvspass)
217 pf = open(cvspass)
218 for line in pf.read().splitlines():
218 for line in pf.read().splitlines():
219 part1, part2 = line.split(' ', 1)
219 part1, part2 = line.split(' ', 1)
220 if part1 == '/1':
220 if part1 == '/1':
221 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
221 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
222 part1, part2 = part2.split(' ', 1)
222 part1, part2 = part2.split(' ', 1)
223 format = format1
223 format = format1
224 else:
224 else:
225 # :pserver:user@example.com:/cvsroot/foo Ah<Z
225 # :pserver:user@example.com:/cvsroot/foo Ah<Z
226 format = format0
226 format = format0
227 if part1 == format:
227 if part1 == format:
228 passw = part2
228 passw = part2
229 break
229 break
230 pf.close()
230 pf.close()
231 except IOError, inst:
231 except IOError, inst:
232 if inst.errno != errno.ENOENT:
232 if inst.errno != errno.ENOENT:
233 if not getattr(inst, 'filename', None):
233 if not getattr(inst, 'filename', None):
234 inst.filename = cvspass
234 inst.filename = cvspass
235 raise
235 raise
236
236
237 sck = socket.socket()
237 sck = socket.socket()
238 sck.connect((serv, port))
238 sck.connect((serv, port))
239 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
239 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
240 "END AUTH REQUEST", ""]))
240 "END AUTH REQUEST", ""]))
241 if sck.recv(128) != "I LOVE YOU\n":
241 if sck.recv(128) != "I LOVE YOU\n":
242 raise util.Abort(_("CVS pserver authentication failed"))
242 raise util.Abort(_("CVS pserver authentication failed"))
243
243
244 self.writep = self.readp = sck.makefile('r+')
244 self.writep = self.readp = sck.makefile('r+')
245
245
246 if not conntype and root.startswith(":local:"):
246 if not conntype and root.startswith(":local:"):
247 conntype = "local"
247 conntype = "local"
248 root = root[7:]
248 root = root[7:]
249
249
250 if not conntype:
250 if not conntype:
251 # :ext:user@host/home/user/path/to/cvsroot
251 # :ext:user@host/home/user/path/to/cvsroot
252 if root.startswith(":ext:"):
252 if root.startswith(":ext:"):
253 root = root[5:]
253 root = root[5:]
254 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
254 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
255 # Do not take Windows path "c:\foo\bar" for a connection strings
255 # Do not take Windows path "c:\foo\bar" for a connection strings
256 if os.path.isdir(root) or not m:
256 if os.path.isdir(root) or not m:
257 conntype = "local"
257 conntype = "local"
258 else:
258 else:
259 conntype = "rsh"
259 conntype = "rsh"
260 user, host, root = m.group(1), m.group(2), m.group(3)
260 user, host, root = m.group(1), m.group(2), m.group(3)
261
261
262 if conntype != "pserver":
262 if conntype != "pserver":
263 if conntype == "rsh":
263 if conntype == "rsh":
264 rsh = os.environ.get("CVS_RSH") or "ssh"
264 rsh = os.environ.get("CVS_RSH") or "ssh"
265 if user:
265 if user:
266 cmd = [rsh, '-l', user, host] + cmd
266 cmd = [rsh, '-l', user, host] + cmd
267 else:
267 else:
268 cmd = [rsh, host] + cmd
268 cmd = [rsh, host] + cmd
269
269
270 # popen2 does not support argument lists under Windows
270 # popen2 does not support argument lists under Windows
271 cmd = [util.shellquote(arg) for arg in cmd]
271 cmd = [util.shellquote(arg) for arg in cmd]
272 cmd = util.quotecommand(' '.join(cmd))
272 cmd = util.quotecommand(' '.join(cmd))
273 self.writep, self.readp = util.popen2(cmd)
273 self.writep, self.readp = util.popen2(cmd)
274
274
275 self.realroot = root
275 self.realroot = root
276
276
277 self.writep.write("Root %s\n" % root)
277 self.writep.write("Root %s\n" % root)
278 self.writep.write("Valid-responses ok error Valid-requests Mode"
278 self.writep.write("Valid-responses ok error Valid-requests Mode"
279 " M Mbinary E Checked-in Created Updated"
279 " M Mbinary E Checked-in Created Updated"
280 " Merged Removed\n")
280 " Merged Removed\n")
281 self.writep.write("valid-requests\n")
281 self.writep.write("valid-requests\n")
282 self.writep.flush()
282 self.writep.flush()
283 r = self.readp.readline()
283 r = self.readp.readline()
284 if not r.startswith("Valid-requests"):
284 if not r.startswith("Valid-requests"):
285 raise util.Abort(_("server sucks"))
285 raise util.Abort(_("server sucks"))
286 if "UseUnchanged" in r:
286 if "UseUnchanged" in r:
287 self.writep.write("UseUnchanged\n")
287 self.writep.write("UseUnchanged\n")
288 self.writep.flush()
288 self.writep.flush()
289 r = self.readp.readline()
289 r = self.readp.readline()
290
290
291 def getheads(self):
291 def getheads(self):
292 self._parse()
292 self._parse()
293 return self.heads
293 return self.heads
294
294
295 def _getfile(self, name, rev):
295 def _getfile(self, name, rev):
296
296
297 def chunkedread(fp, count):
297 def chunkedread(fp, count):
298 # file-objects returned by socked.makefile() do not handle
298 # file-objects returned by socked.makefile() do not handle
299 # large read() requests very well.
299 # large read() requests very well.
300 chunksize = 65536
300 chunksize = 65536
301 output = StringIO()
301 output = StringIO()
302 while count > 0:
302 while count > 0:
303 data = fp.read(min(count, chunksize))
303 data = fp.read(min(count, chunksize))
304 if not data:
304 if not data:
305 raise util.Abort(_("%d bytes missing from remote file") % count)
305 raise util.Abort(_("%d bytes missing from remote file") % count)
306 count -= len(data)
306 count -= len(data)
307 output.write(data)
307 output.write(data)
308 return output.getvalue()
308 return output.getvalue()
309
309
310 if rev.endswith("(DEAD)"):
310 if rev.endswith("(DEAD)"):
311 raise IOError
311 raise IOError
312
312
313 args = ("-N -P -kk -r %s --" % rev).split()
313 args = ("-N -P -kk -r %s --" % rev).split()
314 args.append(self.cvsrepo + '/' + name)
314 args.append(self.cvsrepo + '/' + name)
315 for x in args:
315 for x in args:
316 self.writep.write("Argument %s\n" % x)
316 self.writep.write("Argument %s\n" % x)
317 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
317 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
318 self.writep.flush()
318 self.writep.flush()
319
319
320 data = ""
320 data = ""
321 while 1:
321 while 1:
322 line = self.readp.readline()
322 line = self.readp.readline()
323 if line.startswith("Created ") or line.startswith("Updated "):
323 if line.startswith("Created ") or line.startswith("Updated "):
324 self.readp.readline() # path
324 self.readp.readline() # path
325 self.readp.readline() # entries
325 self.readp.readline() # entries
326 mode = self.readp.readline()[:-1]
326 mode = self.readp.readline()[:-1]
327 count = int(self.readp.readline()[:-1])
327 count = int(self.readp.readline()[:-1])
328 data = chunkedread(self.readp, count)
328 data = chunkedread(self.readp, count)
329 elif line.startswith(" "):
329 elif line.startswith(" "):
330 data += line[1:]
330 data += line[1:]
331 elif line.startswith("M "):
331 elif line.startswith("M "):
332 pass
332 pass
333 elif line.startswith("Mbinary "):
333 elif line.startswith("Mbinary "):
334 count = int(self.readp.readline()[:-1])
334 count = int(self.readp.readline()[:-1])
335 data = chunkedread(self.readp, count)
335 data = chunkedread(self.readp, count)
336 else:
336 else:
337 if line == "ok\n":
337 if line == "ok\n":
338 return (data, "x" in mode and "x" or "")
338 return (data, "x" in mode and "x" or "")
339 elif line.startswith("E "):
339 elif line.startswith("E "):
340 self.ui.warn(_("cvs server: %s\n") % line[2:])
340 self.ui.warn(_("cvs server: %s\n") % line[2:])
341 elif line.startswith("Remove"):
341 elif line.startswith("Remove"):
342 self.readp.readline()
342 self.readp.readline()
343 else:
343 else:
344 raise util.Abort(_("unknown CVS response: %s") % line)
344 raise util.Abort(_("unknown CVS response: %s") % line)
345
345
346 def getfile(self, file, rev):
346 def getfile(self, file, rev):
347 self._parse()
347 self._parse()
348 data, mode = self._getfile(file, rev)
348 data, mode = self._getfile(file, rev)
349 self.modecache[(file, rev)] = mode
349 self.modecache[(file, rev)] = mode
350 return data
350 return data
351
351
352 def getmode(self, file, rev):
352 def getmode(self, file, rev):
353 return self.modecache[(file, rev)]
353 return self.modecache[(file, rev)]
354
354
355 def getchanges(self, rev):
355 def getchanges(self, rev):
356 self._parse()
356 self._parse()
357 self.modecache = {}
357 self.modecache = {}
358 return sorted(self.files[rev].iteritems()), {}
358 return sorted(self.files[rev].iteritems()), {}
359
359
360 def getcommit(self, rev):
360 def getcommit(self, rev):
361 self._parse()
361 self._parse()
362 return self.changeset[rev]
362 return self.changeset[rev]
363
363
364 def gettags(self):
364 def gettags(self):
365 self._parse()
365 self._parse()
366 return self.tags
366 return self.tags
367
367
368 def getchangedfiles(self, rev, i):
368 def getchangedfiles(self, rev, i):
369 self._parse()
369 self._parse()
370 return sorted(self.files[rev])
370 return sorted(self.files[rev])
@@ -1,838 +1,838 b''
1 #
1 #
2 # Mercurial built-in replacement for cvsps.
2 # Mercurial built-in replacement for cvsps.
3 #
3 #
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 import os
9 import os
10 import re
10 import re
11 import cPickle as pickle
11 import cPickle as pickle
12 from mercurial import util
12 from mercurial import util
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14
14
15 def listsort(list, key):
15 def listsort(list, key):
16 "helper to sort by key in Python 2.3"
16 "helper to sort by key in Python 2.3"
17 try:
17 try:
18 list.sort(key=key)
18 list.sort(key=key)
19 except TypeError:
19 except TypeError:
20 list.sort(lambda l, r: cmp(key(l), key(r)))
20 list.sort(lambda l, r: cmp(key(l), key(r)))
21
21
22 class logentry(object):
22 class logentry(object):
23 '''Class logentry has the following attributes:
23 '''Class logentry has the following attributes:
24 .author - author name as CVS knows it
24 .author - author name as CVS knows it
25 .branch - name of branch this revision is on
25 .branch - name of branch this revision is on
26 .branches - revision tuple of branches starting at this revision
26 .branches - revision tuple of branches starting at this revision
27 .comment - commit message
27 .comment - commit message
28 .date - the commit date as a (time, tz) tuple
28 .date - the commit date as a (time, tz) tuple
29 .dead - true if file revision is dead
29 .dead - true if file revision is dead
30 .file - Name of file
30 .file - Name of file
31 .lines - a tuple (+lines, -lines) or None
31 .lines - a tuple (+lines, -lines) or None
32 .parent - Previous revision of this entry
32 .parent - Previous revision of this entry
33 .rcs - name of file as returned from CVS
33 .rcs - name of file as returned from CVS
34 .revision - revision number as tuple
34 .revision - revision number as tuple
35 .tags - list of tags on the file
35 .tags - list of tags on the file
36 .synthetic - is this a synthetic "file ... added on ..." revision?
36 .synthetic - is this a synthetic "file ... added on ..." revision?
37 .mergepoint- the branch that has been merged from
37 .mergepoint- the branch that has been merged from
38 (if present in rlog output)
38 (if present in rlog output)
39 .branchpoints- the branches that start at the current entry
39 .branchpoints- the branches that start at the current entry
40 '''
40 '''
41 def __init__(self, **entries):
41 def __init__(self, **entries):
42 self.__dict__.update(entries)
42 self.__dict__.update(entries)
43
43
44 def __repr__(self):
44 def __repr__(self):
45 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
45 return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
46 id(self),
46 id(self),
47 self.file,
47 self.file,
48 ".".join(map(str, self.revision)))
48 ".".join(map(str, self.revision)))
49
49
50 class logerror(Exception):
50 class logerror(Exception):
51 pass
51 pass
52
52
53 def getrepopath(cvspath):
53 def getrepopath(cvspath):
54 """Return the repository path from a CVS path.
54 """Return the repository path from a CVS path.
55
55
56 >>> getrepopath('/foo/bar')
56 >>> getrepopath('/foo/bar')
57 '/foo/bar'
57 '/foo/bar'
58 >>> getrepopath('c:/foo/bar')
58 >>> getrepopath('c:/foo/bar')
59 'c:/foo/bar'
59 'c:/foo/bar'
60 >>> getrepopath(':pserver:10/foo/bar')
60 >>> getrepopath(':pserver:10/foo/bar')
61 '/foo/bar'
61 '/foo/bar'
62 >>> getrepopath(':pserver:10c:/foo/bar')
62 >>> getrepopath(':pserver:10c:/foo/bar')
63 '/foo/bar'
63 '/foo/bar'
64 >>> getrepopath(':pserver:/foo/bar')
64 >>> getrepopath(':pserver:/foo/bar')
65 '/foo/bar'
65 '/foo/bar'
66 >>> getrepopath(':pserver:c:/foo/bar')
66 >>> getrepopath(':pserver:c:/foo/bar')
67 'c:/foo/bar'
67 'c:/foo/bar'
68 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
68 >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
69 '/foo/bar'
69 '/foo/bar'
70 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
70 >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
71 'c:/foo/bar'
71 'c:/foo/bar'
72 """
72 """
73 # According to CVS manual, CVS paths are expressed like:
73 # According to CVS manual, CVS paths are expressed like:
74 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
74 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
75 #
75 #
76 # Unfortunately, Windows absolute paths start with a drive letter
76 # Unfortunately, Windows absolute paths start with a drive letter
77 # like 'c:' making it harder to parse. Here we assume that drive
77 # like 'c:' making it harder to parse. Here we assume that drive
78 # letters are only one character long and any CVS component before
78 # letters are only one character long and any CVS component before
79 # the repository path is at least 2 characters long, and use this
79 # the repository path is at least 2 characters long, and use this
80 # to disambiguate.
80 # to disambiguate.
81 parts = cvspath.split(':')
81 parts = cvspath.split(':')
82 if len(parts) == 1:
82 if len(parts) == 1:
83 return parts[0]
83 return parts[0]
84 # Here there is an ambiguous case if we have a port number
84 # Here there is an ambiguous case if we have a port number
85 # immediately followed by a Windows driver letter. We assume this
85 # immediately followed by a Windows driver letter. We assume this
86 # never happens and decide it must be CVS path component,
86 # never happens and decide it must be CVS path component,
87 # therefore ignoring it.
87 # therefore ignoring it.
88 if len(parts[-2]) > 1:
88 if len(parts[-2]) > 1:
89 return parts[-1].lstrip('0123456789')
89 return parts[-1].lstrip('0123456789')
90 return parts[-2] + ':' + parts[-1]
90 return parts[-2] + ':' + parts[-1]
91
91
92 def createlog(ui, directory=None, root="", rlog=True, cache=None):
92 def createlog(ui, directory=None, root="", rlog=True, cache=None):
93 '''Collect the CVS rlog'''
93 '''Collect the CVS rlog'''
94
94
95 # Because we store many duplicate commit log messages, reusing strings
95 # Because we store many duplicate commit log messages, reusing strings
96 # saves a lot of memory and pickle storage space.
96 # saves a lot of memory and pickle storage space.
97 _scache = {}
97 _scache = {}
98 def scache(s):
98 def scache(s):
99 "return a shared version of a string"
99 "return a shared version of a string"
100 return _scache.setdefault(s, s)
100 return _scache.setdefault(s, s)
101
101
102 ui.status(_('collecting CVS rlog\n'))
102 ui.status(_('collecting CVS rlog\n'))
103
103
104 log = [] # list of logentry objects containing the CVS state
104 log = [] # list of logentry objects containing the CVS state
105
105
106 # patterns to match in CVS (r)log output, by state of use
106 # patterns to match in CVS (r)log output, by state of use
107 re_00 = re.compile('RCS file: (.+)$')
107 re_00 = re.compile('RCS file: (.+)$')
108 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
108 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
109 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
109 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
110 re_03 = re.compile("(Cannot access.+CVSROOT)|"
110 re_03 = re.compile("(Cannot access.+CVSROOT)|"
111 "(can't create temporary directory.+)$")
111 "(can't create temporary directory.+)$")
112 re_10 = re.compile('Working file: (.+)$')
112 re_10 = re.compile('Working file: (.+)$')
113 re_20 = re.compile('symbolic names:')
113 re_20 = re.compile('symbolic names:')
114 re_30 = re.compile('\t(.+): ([\\d.]+)$')
114 re_30 = re.compile('\t(.+): ([\\d.]+)$')
115 re_31 = re.compile('----------------------------$')
115 re_31 = re.compile('----------------------------$')
116 re_32 = re.compile('======================================='
116 re_32 = re.compile('======================================='
117 '======================================$')
117 '======================================$')
118 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
118 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
119 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
119 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
120 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
120 r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
121 r'(.*mergepoint:\s+([^;]+);)?')
121 r'(.*mergepoint:\s+([^;]+);)?')
122 re_70 = re.compile('branches: (.+);$')
122 re_70 = re.compile('branches: (.+);$')
123
123
124 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
124 file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
125
125
126 prefix = '' # leading path to strip of what we get from CVS
126 prefix = '' # leading path to strip of what we get from CVS
127
127
128 if directory is None:
128 if directory is None:
129 # Current working directory
129 # Current working directory
130
130
131 # Get the real directory in the repository
131 # Get the real directory in the repository
132 try:
132 try:
133 prefix = file(os.path.join('CVS','Repository')).read().strip()
133 prefix = open(os.path.join('CVS','Repository')).read().strip()
134 if prefix == ".":
134 if prefix == ".":
135 prefix = ""
135 prefix = ""
136 directory = prefix
136 directory = prefix
137 except IOError:
137 except IOError:
138 raise logerror('Not a CVS sandbox')
138 raise logerror('Not a CVS sandbox')
139
139
140 if prefix and not prefix.endswith(os.sep):
140 if prefix and not prefix.endswith(os.sep):
141 prefix += os.sep
141 prefix += os.sep
142
142
143 # Use the Root file in the sandbox, if it exists
143 # Use the Root file in the sandbox, if it exists
144 try:
144 try:
145 root = file(os.path.join('CVS','Root')).read().strip()
145 root = open(os.path.join('CVS','Root')).read().strip()
146 except IOError:
146 except IOError:
147 pass
147 pass
148
148
149 if not root:
149 if not root:
150 root = os.environ.get('CVSROOT', '')
150 root = os.environ.get('CVSROOT', '')
151
151
152 # read log cache if one exists
152 # read log cache if one exists
153 oldlog = []
153 oldlog = []
154 date = None
154 date = None
155
155
156 if cache:
156 if cache:
157 cachedir = os.path.expanduser('~/.hg.cvsps')
157 cachedir = os.path.expanduser('~/.hg.cvsps')
158 if not os.path.exists(cachedir):
158 if not os.path.exists(cachedir):
159 os.mkdir(cachedir)
159 os.mkdir(cachedir)
160
160
161 # The cvsps cache pickle needs a uniquified name, based on the
161 # The cvsps cache pickle needs a uniquified name, based on the
162 # repository location. The address may have all sort of nasties
162 # repository location. The address may have all sort of nasties
163 # in it, slashes, colons and such. So here we take just the
163 # in it, slashes, colons and such. So here we take just the
164 # alphanumerics, concatenated in a way that does not mix up the
164 # alphanumerics, concatenated in a way that does not mix up the
165 # various components, so that
165 # various components, so that
166 # :pserver:user@server:/path
166 # :pserver:user@server:/path
167 # and
167 # and
168 # /pserver/user/server/path
168 # /pserver/user/server/path
169 # are mapped to different cache file names.
169 # are mapped to different cache file names.
170 cachefile = root.split(":") + [directory, "cache"]
170 cachefile = root.split(":") + [directory, "cache"]
171 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
171 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
172 cachefile = os.path.join(cachedir,
172 cachefile = os.path.join(cachedir,
173 '.'.join([s for s in cachefile if s]))
173 '.'.join([s for s in cachefile if s]))
174
174
175 if cache == 'update':
175 if cache == 'update':
176 try:
176 try:
177 ui.note(_('reading cvs log cache %s\n') % cachefile)
177 ui.note(_('reading cvs log cache %s\n') % cachefile)
178 oldlog = pickle.load(file(cachefile))
178 oldlog = pickle.load(open(cachefile))
179 ui.note(_('cache has %d log entries\n') % len(oldlog))
179 ui.note(_('cache has %d log entries\n') % len(oldlog))
180 except Exception, e:
180 except Exception, e:
181 ui.note(_('error reading cache: %r\n') % e)
181 ui.note(_('error reading cache: %r\n') % e)
182
182
183 if oldlog:
183 if oldlog:
184 date = oldlog[-1].date # last commit date as a (time,tz) tuple
184 date = oldlog[-1].date # last commit date as a (time,tz) tuple
185 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
185 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
186
186
187 # build the CVS commandline
187 # build the CVS commandline
188 cmd = ['cvs', '-q']
188 cmd = ['cvs', '-q']
189 if root:
189 if root:
190 cmd.append('-d%s' % root)
190 cmd.append('-d%s' % root)
191 p = util.normpath(getrepopath(root))
191 p = util.normpath(getrepopath(root))
192 if not p.endswith('/'):
192 if not p.endswith('/'):
193 p += '/'
193 p += '/'
194 prefix = p + util.normpath(prefix)
194 prefix = p + util.normpath(prefix)
195 cmd.append(['log', 'rlog'][rlog])
195 cmd.append(['log', 'rlog'][rlog])
196 if date:
196 if date:
197 # no space between option and date string
197 # no space between option and date string
198 cmd.append('-d>%s' % date)
198 cmd.append('-d>%s' % date)
199 cmd.append(directory)
199 cmd.append(directory)
200
200
201 # state machine begins here
201 # state machine begins here
202 tags = {} # dictionary of revisions on current file with their tags
202 tags = {} # dictionary of revisions on current file with their tags
203 branchmap = {} # mapping between branch names and revision numbers
203 branchmap = {} # mapping between branch names and revision numbers
204 state = 0
204 state = 0
205 store = False # set when a new record can be appended
205 store = False # set when a new record can be appended
206
206
207 cmd = [util.shellquote(arg) for arg in cmd]
207 cmd = [util.shellquote(arg) for arg in cmd]
208 ui.note(_("running %s\n") % (' '.join(cmd)))
208 ui.note(_("running %s\n") % (' '.join(cmd)))
209 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
209 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
210
210
211 pfp = util.popen(' '.join(cmd))
211 pfp = util.popen(' '.join(cmd))
212 peek = pfp.readline()
212 peek = pfp.readline()
213 while True:
213 while True:
214 line = peek
214 line = peek
215 if line == '':
215 if line == '':
216 break
216 break
217 peek = pfp.readline()
217 peek = pfp.readline()
218 if line.endswith('\n'):
218 if line.endswith('\n'):
219 line = line[:-1]
219 line = line[:-1]
220 #ui.debug('state=%d line=%r\n' % (state, line))
220 #ui.debug('state=%d line=%r\n' % (state, line))
221
221
222 if state == 0:
222 if state == 0:
223 # initial state, consume input until we see 'RCS file'
223 # initial state, consume input until we see 'RCS file'
224 match = re_00.match(line)
224 match = re_00.match(line)
225 if match:
225 if match:
226 rcs = match.group(1)
226 rcs = match.group(1)
227 tags = {}
227 tags = {}
228 if rlog:
228 if rlog:
229 filename = util.normpath(rcs[:-2])
229 filename = util.normpath(rcs[:-2])
230 if filename.startswith(prefix):
230 if filename.startswith(prefix):
231 filename = filename[len(prefix):]
231 filename = filename[len(prefix):]
232 if filename.startswith('/'):
232 if filename.startswith('/'):
233 filename = filename[1:]
233 filename = filename[1:]
234 if filename.startswith('Attic/'):
234 if filename.startswith('Attic/'):
235 filename = filename[6:]
235 filename = filename[6:]
236 else:
236 else:
237 filename = filename.replace('/Attic/', '/')
237 filename = filename.replace('/Attic/', '/')
238 state = 2
238 state = 2
239 continue
239 continue
240 state = 1
240 state = 1
241 continue
241 continue
242 match = re_01.match(line)
242 match = re_01.match(line)
243 if match:
243 if match:
244 raise Exception(match.group(1))
244 raise Exception(match.group(1))
245 match = re_02.match(line)
245 match = re_02.match(line)
246 if match:
246 if match:
247 raise Exception(match.group(2))
247 raise Exception(match.group(2))
248 if re_03.match(line):
248 if re_03.match(line):
249 raise Exception(line)
249 raise Exception(line)
250
250
251 elif state == 1:
251 elif state == 1:
252 # expect 'Working file' (only when using log instead of rlog)
252 # expect 'Working file' (only when using log instead of rlog)
253 match = re_10.match(line)
253 match = re_10.match(line)
254 assert match, _('RCS file must be followed by working file')
254 assert match, _('RCS file must be followed by working file')
255 filename = util.normpath(match.group(1))
255 filename = util.normpath(match.group(1))
256 state = 2
256 state = 2
257
257
258 elif state == 2:
258 elif state == 2:
259 # expect 'symbolic names'
259 # expect 'symbolic names'
260 if re_20.match(line):
260 if re_20.match(line):
261 branchmap = {}
261 branchmap = {}
262 state = 3
262 state = 3
263
263
264 elif state == 3:
264 elif state == 3:
265 # read the symbolic names and store as tags
265 # read the symbolic names and store as tags
266 match = re_30.match(line)
266 match = re_30.match(line)
267 if match:
267 if match:
268 rev = [int(x) for x in match.group(2).split('.')]
268 rev = [int(x) for x in match.group(2).split('.')]
269
269
270 # Convert magic branch number to an odd-numbered one
270 # Convert magic branch number to an odd-numbered one
271 revn = len(rev)
271 revn = len(rev)
272 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
272 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
273 rev = rev[:-2] + rev[-1:]
273 rev = rev[:-2] + rev[-1:]
274 rev = tuple(rev)
274 rev = tuple(rev)
275
275
276 if rev not in tags:
276 if rev not in tags:
277 tags[rev] = []
277 tags[rev] = []
278 tags[rev].append(match.group(1))
278 tags[rev].append(match.group(1))
279 branchmap[match.group(1)] = match.group(2)
279 branchmap[match.group(1)] = match.group(2)
280
280
281 elif re_31.match(line):
281 elif re_31.match(line):
282 state = 5
282 state = 5
283 elif re_32.match(line):
283 elif re_32.match(line):
284 state = 0
284 state = 0
285
285
286 elif state == 4:
286 elif state == 4:
287 # expecting '------' separator before first revision
287 # expecting '------' separator before first revision
288 if re_31.match(line):
288 if re_31.match(line):
289 state = 5
289 state = 5
290 else:
290 else:
291 assert not re_32.match(line), _('must have at least '
291 assert not re_32.match(line), _('must have at least '
292 'some revisions')
292 'some revisions')
293
293
294 elif state == 5:
294 elif state == 5:
295 # expecting revision number and possibly (ignored) lock indication
295 # expecting revision number and possibly (ignored) lock indication
296 # we create the logentry here from values stored in states 0 to 4,
296 # we create the logentry here from values stored in states 0 to 4,
297 # as this state is re-entered for subsequent revisions of a file.
297 # as this state is re-entered for subsequent revisions of a file.
298 match = re_50.match(line)
298 match = re_50.match(line)
299 assert match, _('expected revision number')
299 assert match, _('expected revision number')
300 e = logentry(rcs=scache(rcs), file=scache(filename),
300 e = logentry(rcs=scache(rcs), file=scache(filename),
301 revision=tuple([int(x) for x in match.group(1).split('.')]),
301 revision=tuple([int(x) for x in match.group(1).split('.')]),
302 branches=[], parent=None,
302 branches=[], parent=None,
303 synthetic=False)
303 synthetic=False)
304 state = 6
304 state = 6
305
305
306 elif state == 6:
306 elif state == 6:
307 # expecting date, author, state, lines changed
307 # expecting date, author, state, lines changed
308 match = re_60.match(line)
308 match = re_60.match(line)
309 assert match, _('revision must be followed by date line')
309 assert match, _('revision must be followed by date line')
310 d = match.group(1)
310 d = match.group(1)
311 if d[2] == '/':
311 if d[2] == '/':
312 # Y2K
312 # Y2K
313 d = '19' + d
313 d = '19' + d
314
314
315 if len(d.split()) != 3:
315 if len(d.split()) != 3:
316 # cvs log dates always in GMT
316 # cvs log dates always in GMT
317 d = d + ' UTC'
317 d = d + ' UTC'
318 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
318 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
319 '%Y/%m/%d %H:%M:%S',
319 '%Y/%m/%d %H:%M:%S',
320 '%Y-%m-%d %H:%M:%S'])
320 '%Y-%m-%d %H:%M:%S'])
321 e.author = scache(match.group(2))
321 e.author = scache(match.group(2))
322 e.dead = match.group(3).lower() == 'dead'
322 e.dead = match.group(3).lower() == 'dead'
323
323
324 if match.group(5):
324 if match.group(5):
325 if match.group(6):
325 if match.group(6):
326 e.lines = (int(match.group(5)), int(match.group(6)))
326 e.lines = (int(match.group(5)), int(match.group(6)))
327 else:
327 else:
328 e.lines = (int(match.group(5)), 0)
328 e.lines = (int(match.group(5)), 0)
329 elif match.group(6):
329 elif match.group(6):
330 e.lines = (0, int(match.group(6)))
330 e.lines = (0, int(match.group(6)))
331 else:
331 else:
332 e.lines = None
332 e.lines = None
333
333
334 if match.group(7): # cvsnt mergepoint
334 if match.group(7): # cvsnt mergepoint
335 myrev = match.group(8).split('.')
335 myrev = match.group(8).split('.')
336 if len(myrev) == 2: # head
336 if len(myrev) == 2: # head
337 e.mergepoint = 'HEAD'
337 e.mergepoint = 'HEAD'
338 else:
338 else:
339 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
339 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
340 branches = [b for b in branchmap if branchmap[b] == myrev]
340 branches = [b for b in branchmap if branchmap[b] == myrev]
341 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
341 assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
342 e.mergepoint = branches[0]
342 e.mergepoint = branches[0]
343 else:
343 else:
344 e.mergepoint = None
344 e.mergepoint = None
345 e.comment = []
345 e.comment = []
346 state = 7
346 state = 7
347
347
348 elif state == 7:
348 elif state == 7:
349 # read the revision numbers of branches that start at this revision
349 # read the revision numbers of branches that start at this revision
350 # or store the commit log message otherwise
350 # or store the commit log message otherwise
351 m = re_70.match(line)
351 m = re_70.match(line)
352 if m:
352 if m:
353 e.branches = [tuple([int(y) for y in x.strip().split('.')])
353 e.branches = [tuple([int(y) for y in x.strip().split('.')])
354 for x in m.group(1).split(';')]
354 for x in m.group(1).split(';')]
355 state = 8
355 state = 8
356 elif re_31.match(line) and re_50.match(peek):
356 elif re_31.match(line) and re_50.match(peek):
357 state = 5
357 state = 5
358 store = True
358 store = True
359 elif re_32.match(line):
359 elif re_32.match(line):
360 state = 0
360 state = 0
361 store = True
361 store = True
362 else:
362 else:
363 e.comment.append(line)
363 e.comment.append(line)
364
364
365 elif state == 8:
365 elif state == 8:
366 # store commit log message
366 # store commit log message
367 if re_31.match(line):
367 if re_31.match(line):
368 state = 5
368 state = 5
369 store = True
369 store = True
370 elif re_32.match(line):
370 elif re_32.match(line):
371 state = 0
371 state = 0
372 store = True
372 store = True
373 else:
373 else:
374 e.comment.append(line)
374 e.comment.append(line)
375
375
376 # When a file is added on a branch B1, CVS creates a synthetic
376 # When a file is added on a branch B1, CVS creates a synthetic
377 # dead trunk revision 1.1 so that the branch has a root.
377 # dead trunk revision 1.1 so that the branch has a root.
378 # Likewise, if you merge such a file to a later branch B2 (one
378 # Likewise, if you merge such a file to a later branch B2 (one
379 # that already existed when the file was added on B1), CVS
379 # that already existed when the file was added on B1), CVS
380 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
380 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
381 # these revisions now, but mark them synthetic so
381 # these revisions now, but mark them synthetic so
382 # createchangeset() can take care of them.
382 # createchangeset() can take care of them.
383 if (store and
383 if (store and
384 e.dead and
384 e.dead and
385 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
385 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
386 len(e.comment) == 1 and
386 len(e.comment) == 1 and
387 file_added_re.match(e.comment[0])):
387 file_added_re.match(e.comment[0])):
388 ui.debug(_('found synthetic revision in %s: %r\n')
388 ui.debug(_('found synthetic revision in %s: %r\n')
389 % (e.rcs, e.comment[0]))
389 % (e.rcs, e.comment[0]))
390 e.synthetic = True
390 e.synthetic = True
391
391
392 if store:
392 if store:
393 # clean up the results and save in the log.
393 # clean up the results and save in the log.
394 store = False
394 store = False
395 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
395 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
396 e.comment = scache('\n'.join(e.comment))
396 e.comment = scache('\n'.join(e.comment))
397
397
398 revn = len(e.revision)
398 revn = len(e.revision)
399 if revn > 3 and (revn % 2) == 0:
399 if revn > 3 and (revn % 2) == 0:
400 e.branch = tags.get(e.revision[:-1], [None])[0]
400 e.branch = tags.get(e.revision[:-1], [None])[0]
401 else:
401 else:
402 e.branch = None
402 e.branch = None
403
403
404 # find the branches starting from this revision
404 # find the branches starting from this revision
405 branchpoints = set()
405 branchpoints = set()
406 for branch, revision in branchmap.iteritems():
406 for branch, revision in branchmap.iteritems():
407 revparts = tuple([int(i) for i in revision.split('.')])
407 revparts = tuple([int(i) for i in revision.split('.')])
408 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
408 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
409 # normal branch
409 # normal branch
410 if revparts[:-2] == e.revision:
410 if revparts[:-2] == e.revision:
411 branchpoints.add(branch)
411 branchpoints.add(branch)
412 elif revparts == (1,1,1): # vendor branch
412 elif revparts == (1,1,1): # vendor branch
413 if revparts in e.branches:
413 if revparts in e.branches:
414 branchpoints.add(branch)
414 branchpoints.add(branch)
415 e.branchpoints = branchpoints
415 e.branchpoints = branchpoints
416
416
417 log.append(e)
417 log.append(e)
418
418
419 if len(log) % 100 == 0:
419 if len(log) % 100 == 0:
420 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
420 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
421
421
422 listsort(log, key=lambda x:(x.rcs, x.revision))
422 listsort(log, key=lambda x:(x.rcs, x.revision))
423
423
424 # find parent revisions of individual files
424 # find parent revisions of individual files
425 versions = {}
425 versions = {}
426 for e in log:
426 for e in log:
427 branch = e.revision[:-1]
427 branch = e.revision[:-1]
428 p = versions.get((e.rcs, branch), None)
428 p = versions.get((e.rcs, branch), None)
429 if p is None:
429 if p is None:
430 p = e.revision[:-2]
430 p = e.revision[:-2]
431 e.parent = p
431 e.parent = p
432 versions[(e.rcs, branch)] = e.revision
432 versions[(e.rcs, branch)] = e.revision
433
433
434 # update the log cache
434 # update the log cache
435 if cache:
435 if cache:
436 if log:
436 if log:
437 # join up the old and new logs
437 # join up the old and new logs
438 listsort(log, key=lambda x:x.date)
438 listsort(log, key=lambda x:x.date)
439
439
440 if oldlog and oldlog[-1].date >= log[0].date:
440 if oldlog and oldlog[-1].date >= log[0].date:
441 raise logerror('Log cache overlaps with new log entries,'
441 raise logerror('Log cache overlaps with new log entries,'
442 ' re-run without cache.')
442 ' re-run without cache.')
443
443
444 log = oldlog + log
444 log = oldlog + log
445
445
446 # write the new cachefile
446 # write the new cachefile
447 ui.note(_('writing cvs log cache %s\n') % cachefile)
447 ui.note(_('writing cvs log cache %s\n') % cachefile)
448 pickle.dump(log, file(cachefile, 'w'))
448 pickle.dump(log, open(cachefile, 'w'))
449 else:
449 else:
450 log = oldlog
450 log = oldlog
451
451
452 ui.status(_('%d log entries\n') % len(log))
452 ui.status(_('%d log entries\n') % len(log))
453
453
454 return log
454 return log
455
455
456
456
457 class changeset(object):
457 class changeset(object):
458 '''Class changeset has the following attributes:
458 '''Class changeset has the following attributes:
459 .id - integer identifying this changeset (list index)
459 .id - integer identifying this changeset (list index)
460 .author - author name as CVS knows it
460 .author - author name as CVS knows it
461 .branch - name of branch this changeset is on, or None
461 .branch - name of branch this changeset is on, or None
462 .comment - commit message
462 .comment - commit message
463 .date - the commit date as a (time,tz) tuple
463 .date - the commit date as a (time,tz) tuple
464 .entries - list of logentry objects in this changeset
464 .entries - list of logentry objects in this changeset
465 .parents - list of one or two parent changesets
465 .parents - list of one or two parent changesets
466 .tags - list of tags on this changeset
466 .tags - list of tags on this changeset
467 .synthetic - from synthetic revision "file ... added on branch ..."
467 .synthetic - from synthetic revision "file ... added on branch ..."
468 .mergepoint- the branch that has been merged from
468 .mergepoint- the branch that has been merged from
469 (if present in rlog output)
469 (if present in rlog output)
470 .branchpoints- the branches that start at the current entry
470 .branchpoints- the branches that start at the current entry
471 '''
471 '''
472 def __init__(self, **entries):
472 def __init__(self, **entries):
473 self.__dict__.update(entries)
473 self.__dict__.update(entries)
474
474
475 def __repr__(self):
475 def __repr__(self):
476 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
476 return "<%s at 0x%x: %s>" % (self.__class__.__name__,
477 id(self),
477 id(self),
478 getattr(self, 'id', "(no id)"))
478 getattr(self, 'id', "(no id)"))
479
479
480 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
480 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
481 '''Convert log into changesets.'''
481 '''Convert log into changesets.'''
482
482
483 ui.status(_('creating changesets\n'))
483 ui.status(_('creating changesets\n'))
484
484
485 # Merge changesets
485 # Merge changesets
486
486
487 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
487 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
488
488
489 changesets = []
489 changesets = []
490 files = set()
490 files = set()
491 c = None
491 c = None
492 for i, e in enumerate(log):
492 for i, e in enumerate(log):
493
493
494 # Check if log entry belongs to the current changeset or not.
494 # Check if log entry belongs to the current changeset or not.
495
495
496 # Since CVS is file centric, two different file revisions with
496 # Since CVS is file centric, two different file revisions with
497 # different branchpoints should be treated as belonging to two
497 # different branchpoints should be treated as belonging to two
498 # different changesets (and the ordering is important and not
498 # different changesets (and the ordering is important and not
499 # honoured by cvsps at this point).
499 # honoured by cvsps at this point).
500 #
500 #
501 # Consider the following case:
501 # Consider the following case:
502 # foo 1.1 branchpoints: [MYBRANCH]
502 # foo 1.1 branchpoints: [MYBRANCH]
503 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
503 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
504 #
504 #
505 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
505 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
506 # later version of foo may be in MYBRANCH2, so foo should be the
506 # later version of foo may be in MYBRANCH2, so foo should be the
507 # first changeset and bar the next and MYBRANCH and MYBRANCH2
507 # first changeset and bar the next and MYBRANCH and MYBRANCH2
508 # should both start off of the bar changeset. No provisions are
508 # should both start off of the bar changeset. No provisions are
509 # made to ensure that this is, in fact, what happens.
509 # made to ensure that this is, in fact, what happens.
510 if not (c and
510 if not (c and
511 e.comment == c.comment and
511 e.comment == c.comment and
512 e.author == c.author and
512 e.author == c.author and
513 e.branch == c.branch and
513 e.branch == c.branch and
514 (not hasattr(e, 'branchpoints') or
514 (not hasattr(e, 'branchpoints') or
515 not hasattr (c, 'branchpoints') or
515 not hasattr (c, 'branchpoints') or
516 e.branchpoints == c.branchpoints) and
516 e.branchpoints == c.branchpoints) and
517 ((c.date[0] + c.date[1]) <=
517 ((c.date[0] + c.date[1]) <=
518 (e.date[0] + e.date[1]) <=
518 (e.date[0] + e.date[1]) <=
519 (c.date[0] + c.date[1]) + fuzz) and
519 (c.date[0] + c.date[1]) + fuzz) and
520 e.file not in files):
520 e.file not in files):
521 c = changeset(comment=e.comment, author=e.author,
521 c = changeset(comment=e.comment, author=e.author,
522 branch=e.branch, date=e.date, entries=[],
522 branch=e.branch, date=e.date, entries=[],
523 mergepoint=getattr(e, 'mergepoint', None),
523 mergepoint=getattr(e, 'mergepoint', None),
524 branchpoints=getattr(e, 'branchpoints', set()))
524 branchpoints=getattr(e, 'branchpoints', set()))
525 changesets.append(c)
525 changesets.append(c)
526 files = set()
526 files = set()
527 if len(changesets) % 100 == 0:
527 if len(changesets) % 100 == 0:
528 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
528 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
529 ui.status(util.ellipsis(t, 80) + '\n')
529 ui.status(util.ellipsis(t, 80) + '\n')
530
530
531 c.entries.append(e)
531 c.entries.append(e)
532 files.add(e.file)
532 files.add(e.file)
533 c.date = e.date # changeset date is date of latest commit in it
533 c.date = e.date # changeset date is date of latest commit in it
534
534
535 # Mark synthetic changesets
535 # Mark synthetic changesets
536
536
537 for c in changesets:
537 for c in changesets:
538 # Synthetic revisions always get their own changeset, because
538 # Synthetic revisions always get their own changeset, because
539 # the log message includes the filename. E.g. if you add file3
539 # the log message includes the filename. E.g. if you add file3
540 # and file4 on a branch, you get four log entries and three
540 # and file4 on a branch, you get four log entries and three
541 # changesets:
541 # changesets:
542 # "File file3 was added on branch ..." (synthetic, 1 entry)
542 # "File file3 was added on branch ..." (synthetic, 1 entry)
543 # "File file4 was added on branch ..." (synthetic, 1 entry)
543 # "File file4 was added on branch ..." (synthetic, 1 entry)
544 # "Add file3 and file4 to fix ..." (real, 2 entries)
544 # "Add file3 and file4 to fix ..." (real, 2 entries)
545 # Hence the check for 1 entry here.
545 # Hence the check for 1 entry here.
546 synth = getattr(c.entries[0], 'synthetic', None)
546 synth = getattr(c.entries[0], 'synthetic', None)
547 c.synthetic = (len(c.entries) == 1 and synth)
547 c.synthetic = (len(c.entries) == 1 and synth)
548
548
549 # Sort files in each changeset
549 # Sort files in each changeset
550
550
551 for c in changesets:
551 for c in changesets:
552 def pathcompare(l, r):
552 def pathcompare(l, r):
553 'Mimic cvsps sorting order'
553 'Mimic cvsps sorting order'
554 l = l.split('/')
554 l = l.split('/')
555 r = r.split('/')
555 r = r.split('/')
556 nl = len(l)
556 nl = len(l)
557 nr = len(r)
557 nr = len(r)
558 n = min(nl, nr)
558 n = min(nl, nr)
559 for i in range(n):
559 for i in range(n):
560 if i + 1 == nl and nl < nr:
560 if i + 1 == nl and nl < nr:
561 return -1
561 return -1
562 elif i + 1 == nr and nl > nr:
562 elif i + 1 == nr and nl > nr:
563 return +1
563 return +1
564 elif l[i] < r[i]:
564 elif l[i] < r[i]:
565 return -1
565 return -1
566 elif l[i] > r[i]:
566 elif l[i] > r[i]:
567 return +1
567 return +1
568 return 0
568 return 0
569 def entitycompare(l, r):
569 def entitycompare(l, r):
570 return pathcompare(l.file, r.file)
570 return pathcompare(l.file, r.file)
571
571
572 c.entries.sort(entitycompare)
572 c.entries.sort(entitycompare)
573
573
574 # Sort changesets by date
574 # Sort changesets by date
575
575
576 def cscmp(l, r):
576 def cscmp(l, r):
577 d = sum(l.date) - sum(r.date)
577 d = sum(l.date) - sum(r.date)
578 if d:
578 if d:
579 return d
579 return d
580
580
581 # detect vendor branches and initial commits on a branch
581 # detect vendor branches and initial commits on a branch
582 le = {}
582 le = {}
583 for e in l.entries:
583 for e in l.entries:
584 le[e.rcs] = e.revision
584 le[e.rcs] = e.revision
585 re = {}
585 re = {}
586 for e in r.entries:
586 for e in r.entries:
587 re[e.rcs] = e.revision
587 re[e.rcs] = e.revision
588
588
589 d = 0
589 d = 0
590 for e in l.entries:
590 for e in l.entries:
591 if re.get(e.rcs, None) == e.parent:
591 if re.get(e.rcs, None) == e.parent:
592 assert not d
592 assert not d
593 d = 1
593 d = 1
594 break
594 break
595
595
596 for e in r.entries:
596 for e in r.entries:
597 if le.get(e.rcs, None) == e.parent:
597 if le.get(e.rcs, None) == e.parent:
598 assert not d
598 assert not d
599 d = -1
599 d = -1
600 break
600 break
601
601
602 return d
602 return d
603
603
604 changesets.sort(cscmp)
604 changesets.sort(cscmp)
605
605
606 # Collect tags
606 # Collect tags
607
607
608 globaltags = {}
608 globaltags = {}
609 for c in changesets:
609 for c in changesets:
610 for e in c.entries:
610 for e in c.entries:
611 for tag in e.tags:
611 for tag in e.tags:
612 # remember which is the latest changeset to have this tag
612 # remember which is the latest changeset to have this tag
613 globaltags[tag] = c
613 globaltags[tag] = c
614
614
615 for c in changesets:
615 for c in changesets:
616 tags = set()
616 tags = set()
617 for e in c.entries:
617 for e in c.entries:
618 tags.update(e.tags)
618 tags.update(e.tags)
619 # remember tags only if this is the latest changeset to have it
619 # remember tags only if this is the latest changeset to have it
620 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
620 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
621
621
622 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
622 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
623 # by inserting dummy changesets with two parents, and handle
623 # by inserting dummy changesets with two parents, and handle
624 # {{mergefrombranch BRANCHNAME}} by setting two parents.
624 # {{mergefrombranch BRANCHNAME}} by setting two parents.
625
625
626 if mergeto is None:
626 if mergeto is None:
627 mergeto = r'{{mergetobranch ([-\w]+)}}'
627 mergeto = r'{{mergetobranch ([-\w]+)}}'
628 if mergeto:
628 if mergeto:
629 mergeto = re.compile(mergeto)
629 mergeto = re.compile(mergeto)
630
630
631 if mergefrom is None:
631 if mergefrom is None:
632 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
632 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
633 if mergefrom:
633 if mergefrom:
634 mergefrom = re.compile(mergefrom)
634 mergefrom = re.compile(mergefrom)
635
635
636 versions = {} # changeset index where we saw any particular file version
636 versions = {} # changeset index where we saw any particular file version
637 branches = {} # changeset index where we saw a branch
637 branches = {} # changeset index where we saw a branch
638 n = len(changesets)
638 n = len(changesets)
639 i = 0
639 i = 0
640 while i<n:
640 while i<n:
641 c = changesets[i]
641 c = changesets[i]
642
642
643 for f in c.entries:
643 for f in c.entries:
644 versions[(f.rcs, f.revision)] = i
644 versions[(f.rcs, f.revision)] = i
645
645
646 p = None
646 p = None
647 if c.branch in branches:
647 if c.branch in branches:
648 p = branches[c.branch]
648 p = branches[c.branch]
649 else:
649 else:
650 # first changeset on a new branch
650 # first changeset on a new branch
651 # the parent is a changeset with the branch in its
651 # the parent is a changeset with the branch in its
652 # branchpoints such that it is the latest possible
652 # branchpoints such that it is the latest possible
653 # commit without any intervening, unrelated commits.
653 # commit without any intervening, unrelated commits.
654
654
655 for candidate in xrange(i):
655 for candidate in xrange(i):
656 if c.branch not in changesets[candidate].branchpoints:
656 if c.branch not in changesets[candidate].branchpoints:
657 if p is not None:
657 if p is not None:
658 break
658 break
659 continue
659 continue
660 p = candidate
660 p = candidate
661
661
662 c.parents = []
662 c.parents = []
663 if p is not None:
663 if p is not None:
664 p = changesets[p]
664 p = changesets[p]
665
665
666 # Ensure no changeset has a synthetic changeset as a parent.
666 # Ensure no changeset has a synthetic changeset as a parent.
667 while p.synthetic:
667 while p.synthetic:
668 assert len(p.parents) <= 1, \
668 assert len(p.parents) <= 1, \
669 _('synthetic changeset cannot have multiple parents')
669 _('synthetic changeset cannot have multiple parents')
670 if p.parents:
670 if p.parents:
671 p = p.parents[0]
671 p = p.parents[0]
672 else:
672 else:
673 p = None
673 p = None
674 break
674 break
675
675
676 if p is not None:
676 if p is not None:
677 c.parents.append(p)
677 c.parents.append(p)
678
678
679 if c.mergepoint:
679 if c.mergepoint:
680 if c.mergepoint == 'HEAD':
680 if c.mergepoint == 'HEAD':
681 c.mergepoint = None
681 c.mergepoint = None
682 c.parents.append(changesets[branches[c.mergepoint]])
682 c.parents.append(changesets[branches[c.mergepoint]])
683
683
684 if mergefrom:
684 if mergefrom:
685 m = mergefrom.search(c.comment)
685 m = mergefrom.search(c.comment)
686 if m:
686 if m:
687 m = m.group(1)
687 m = m.group(1)
688 if m == 'HEAD':
688 if m == 'HEAD':
689 m = None
689 m = None
690 try:
690 try:
691 candidate = changesets[branches[m]]
691 candidate = changesets[branches[m]]
692 except KeyError:
692 except KeyError:
693 ui.warn(_("warning: CVS commit message references "
693 ui.warn(_("warning: CVS commit message references "
694 "non-existent branch %r:\n%s\n")
694 "non-existent branch %r:\n%s\n")
695 % (m, c.comment))
695 % (m, c.comment))
696 if m in branches and c.branch != m and not candidate.synthetic:
696 if m in branches and c.branch != m and not candidate.synthetic:
697 c.parents.append(candidate)
697 c.parents.append(candidate)
698
698
699 if mergeto:
699 if mergeto:
700 m = mergeto.search(c.comment)
700 m = mergeto.search(c.comment)
701 if m:
701 if m:
702 try:
702 try:
703 m = m.group(1)
703 m = m.group(1)
704 if m == 'HEAD':
704 if m == 'HEAD':
705 m = None
705 m = None
706 except:
706 except:
707 m = None # if no group found then merge to HEAD
707 m = None # if no group found then merge to HEAD
708 if m in branches and c.branch != m:
708 if m in branches and c.branch != m:
709 # insert empty changeset for merge
709 # insert empty changeset for merge
710 cc = changeset(author=c.author, branch=m, date=c.date,
710 cc = changeset(author=c.author, branch=m, date=c.date,
711 comment='convert-repo: CVS merge from branch %s' % c.branch,
711 comment='convert-repo: CVS merge from branch %s' % c.branch,
712 entries=[], tags=[], parents=[changesets[branches[m]], c])
712 entries=[], tags=[], parents=[changesets[branches[m]], c])
713 changesets.insert(i + 1, cc)
713 changesets.insert(i + 1, cc)
714 branches[m] = i + 1
714 branches[m] = i + 1
715
715
716 # adjust our loop counters now we have inserted a new entry
716 # adjust our loop counters now we have inserted a new entry
717 n += 1
717 n += 1
718 i += 2
718 i += 2
719 continue
719 continue
720
720
721 branches[c.branch] = i
721 branches[c.branch] = i
722 i += 1
722 i += 1
723
723
724 # Drop synthetic changesets (safe now that we have ensured no other
724 # Drop synthetic changesets (safe now that we have ensured no other
725 # changesets can have them as parents).
725 # changesets can have them as parents).
726 i = 0
726 i = 0
727 while i < len(changesets):
727 while i < len(changesets):
728 if changesets[i].synthetic:
728 if changesets[i].synthetic:
729 del changesets[i]
729 del changesets[i]
730 else:
730 else:
731 i += 1
731 i += 1
732
732
733 # Number changesets
733 # Number changesets
734
734
735 for i, c in enumerate(changesets):
735 for i, c in enumerate(changesets):
736 c.id = i + 1
736 c.id = i + 1
737
737
738 ui.status(_('%d changeset entries\n') % len(changesets))
738 ui.status(_('%d changeset entries\n') % len(changesets))
739
739
740 return changesets
740 return changesets
741
741
742
742
743 def debugcvsps(ui, *args, **opts):
743 def debugcvsps(ui, *args, **opts):
744 '''Read CVS rlog for current directory or named path in
744 '''Read CVS rlog for current directory or named path in
745 repository, and convert the log to changesets based on matching
745 repository, and convert the log to changesets based on matching
746 commit log entries and dates.
746 commit log entries and dates.
747 '''
747 '''
748 if opts["new_cache"]:
748 if opts["new_cache"]:
749 cache = "write"
749 cache = "write"
750 elif opts["update_cache"]:
750 elif opts["update_cache"]:
751 cache = "update"
751 cache = "update"
752 else:
752 else:
753 cache = None
753 cache = None
754
754
755 revisions = opts["revisions"]
755 revisions = opts["revisions"]
756
756
757 try:
757 try:
758 if args:
758 if args:
759 log = []
759 log = []
760 for d in args:
760 for d in args:
761 log += createlog(ui, d, root=opts["root"], cache=cache)
761 log += createlog(ui, d, root=opts["root"], cache=cache)
762 else:
762 else:
763 log = createlog(ui, root=opts["root"], cache=cache)
763 log = createlog(ui, root=opts["root"], cache=cache)
764 except logerror, e:
764 except logerror, e:
765 ui.write("%r\n"%e)
765 ui.write("%r\n"%e)
766 return
766 return
767
767
768 changesets = createchangeset(ui, log, opts["fuzz"])
768 changesets = createchangeset(ui, log, opts["fuzz"])
769 del log
769 del log
770
770
771 # Print changesets (optionally filtered)
771 # Print changesets (optionally filtered)
772
772
773 off = len(revisions)
773 off = len(revisions)
774 branches = {} # latest version number in each branch
774 branches = {} # latest version number in each branch
775 ancestors = {} # parent branch
775 ancestors = {} # parent branch
776 for cs in changesets:
776 for cs in changesets:
777
777
778 if opts["ancestors"]:
778 if opts["ancestors"]:
779 if cs.branch not in branches and cs.parents and cs.parents[0].id:
779 if cs.branch not in branches and cs.parents and cs.parents[0].id:
780 ancestors[cs.branch] = (changesets[cs.parents[0].id-1].branch,
780 ancestors[cs.branch] = (changesets[cs.parents[0].id-1].branch,
781 cs.parents[0].id)
781 cs.parents[0].id)
782 branches[cs.branch] = cs.id
782 branches[cs.branch] = cs.id
783
783
784 # limit by branches
784 # limit by branches
785 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
785 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
786 continue
786 continue
787
787
788 if not off:
788 if not off:
789 # Note: trailing spaces on several lines here are needed to have
789 # Note: trailing spaces on several lines here are needed to have
790 # bug-for-bug compatibility with cvsps.
790 # bug-for-bug compatibility with cvsps.
791 ui.write('---------------------\n')
791 ui.write('---------------------\n')
792 ui.write('PatchSet %d \n' % cs.id)
792 ui.write('PatchSet %d \n' % cs.id)
793 ui.write('Date: %s\n' % util.datestr(cs.date,
793 ui.write('Date: %s\n' % util.datestr(cs.date,
794 '%Y/%m/%d %H:%M:%S %1%2'))
794 '%Y/%m/%d %H:%M:%S %1%2'))
795 ui.write('Author: %s\n' % cs.author)
795 ui.write('Author: %s\n' % cs.author)
796 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
796 ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
797 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags)>1],
797 ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags)>1],
798 ','.join(cs.tags) or '(none)'))
798 ','.join(cs.tags) or '(none)'))
799 branchpoints = getattr(cs, 'branchpoints', None)
799 branchpoints = getattr(cs, 'branchpoints', None)
800 if branchpoints:
800 if branchpoints:
801 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
801 ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
802 if opts["parents"] and cs.parents:
802 if opts["parents"] and cs.parents:
803 if len(cs.parents)>1:
803 if len(cs.parents)>1:
804 ui.write('Parents: %s\n' % (','.join([str(p.id) for p in cs.parents])))
804 ui.write('Parents: %s\n' % (','.join([str(p.id) for p in cs.parents])))
805 else:
805 else:
806 ui.write('Parent: %d\n' % cs.parents[0].id)
806 ui.write('Parent: %d\n' % cs.parents[0].id)
807
807
808 if opts["ancestors"]:
808 if opts["ancestors"]:
809 b = cs.branch
809 b = cs.branch
810 r = []
810 r = []
811 while b:
811 while b:
812 b, c = ancestors[b]
812 b, c = ancestors[b]
813 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
813 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
814 if r:
814 if r:
815 ui.write('Ancestors: %s\n' % (','.join(r)))
815 ui.write('Ancestors: %s\n' % (','.join(r)))
816
816
817 ui.write('Log:\n')
817 ui.write('Log:\n')
818 ui.write('%s\n\n' % cs.comment)
818 ui.write('%s\n\n' % cs.comment)
819 ui.write('Members: \n')
819 ui.write('Members: \n')
820 for f in cs.entries:
820 for f in cs.entries:
821 fn = f.file
821 fn = f.file
822 if fn.startswith(opts["prefix"]):
822 if fn.startswith(opts["prefix"]):
823 fn = fn[len(opts["prefix"]):]
823 fn = fn[len(opts["prefix"]):]
824 ui.write('\t%s:%s->%s%s \n' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
824 ui.write('\t%s:%s->%s%s \n' % (fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
825 '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead]))
825 '.'.join([str(x) for x in f.revision]), ['', '(DEAD)'][f.dead]))
826 ui.write('\n')
826 ui.write('\n')
827
827
828 # have we seen the start tag?
828 # have we seen the start tag?
829 if revisions and off:
829 if revisions and off:
830 if revisions[0] == str(cs.id) or \
830 if revisions[0] == str(cs.id) or \
831 revisions[0] in cs.tags:
831 revisions[0] in cs.tags:
832 off = False
832 off = False
833
833
834 # see if we reached the end tag
834 # see if we reached the end tag
835 if len(revisions)>1 and not off:
835 if len(revisions)>1 and not off:
836 if revisions[1] == str(cs.id) or \
836 if revisions[1] == str(cs.id) or \
837 revisions[1] in cs.tags:
837 revisions[1] in cs.tags:
838 break
838 break
@@ -1,3558 +1,3558 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, subprocess, difflib, time
11 import os, re, sys, subprocess, difflib, time
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 import patch, help, mdiff, tempfile, url, encoding
13 import patch, help, mdiff, tempfile, url, encoding
14 import archival, changegroup, cmdutil, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server
15 from hgweb import server
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the
23 Schedule files to be version controlled and added to the
24 repository.
24 repository.
25
25
26 The files will be added to the repository at the next commit. To
26 The files will be added to the repository at the next commit. To
27 undo an add before that, see hg forget.
27 undo an add before that, see hg forget.
28
28
29 If no names are given, add all files to the repository.
29 If no names are given, add all files to the repository.
30 """
30 """
31
31
32 bad = []
32 bad = []
33 exacts = {}
33 exacts = {}
34 names = []
34 names = []
35 m = cmdutil.match(repo, pats, opts)
35 m = cmdutil.match(repo, pats, opts)
36 oldbad = m.bad
36 oldbad = m.bad
37 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
37 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
38
38
39 for f in repo.walk(m):
39 for f in repo.walk(m):
40 exact = m.exact(f)
40 exact = m.exact(f)
41 if exact or f not in repo.dirstate:
41 if exact or f not in repo.dirstate:
42 names.append(f)
42 names.append(f)
43 if ui.verbose or not exact:
43 if ui.verbose or not exact:
44 ui.status(_('adding %s\n') % m.rel(f))
44 ui.status(_('adding %s\n') % m.rel(f))
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 bad += [f for f in repo.add(names) if f in m.files()]
46 bad += [f for f in repo.add(names) if f in m.files()]
47 return bad and 1 or 0
47 return bad and 1 or 0
48
48
49 def addremove(ui, repo, *pats, **opts):
49 def addremove(ui, repo, *pats, **opts):
50 """add all new files, delete all missing files
50 """add all new files, delete all missing files
51
51
52 Add all new files and remove all missing files from the
52 Add all new files and remove all missing files from the
53 repository.
53 repository.
54
54
55 New files are ignored if they match any of the patterns in
55 New files are ignored if they match any of the patterns in
56 .hgignore. As with add, these changes take effect at the next
56 .hgignore. As with add, these changes take effect at the next
57 commit.
57 commit.
58
58
59 Use the -s/--similarity option to detect renamed files. With a
59 Use the -s/--similarity option to detect renamed files. With a
60 parameter > 0, this compares every removed file with every added
60 parameter > 0, this compares every removed file with every added
61 file and records those similar enough as renames. This option
61 file and records those similar enough as renames. This option
62 takes a percentage between 0 (disabled) and 100 (files must be
62 takes a percentage between 0 (disabled) and 100 (files must be
63 identical) as its parameter. Detecting renamed files this way can
63 identical) as its parameter. Detecting renamed files this way can
64 be expensive.
64 be expensive.
65 """
65 """
66 try:
66 try:
67 sim = float(opts.get('similarity') or 0)
67 sim = float(opts.get('similarity') or 0)
68 except ValueError:
68 except ValueError:
69 raise util.Abort(_('similarity must be a number'))
69 raise util.Abort(_('similarity must be a number'))
70 if sim < 0 or sim > 100:
70 if sim < 0 or sim > 100:
71 raise util.Abort(_('similarity must be between 0 and 100'))
71 raise util.Abort(_('similarity must be between 0 and 100'))
72 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
72 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
73
73
74 def annotate(ui, repo, *pats, **opts):
74 def annotate(ui, repo, *pats, **opts):
75 """show changeset information by line for each file
75 """show changeset information by line for each file
76
76
77 List changes in files, showing the revision id responsible for
77 List changes in files, showing the revision id responsible for
78 each line
78 each line
79
79
80 This command is useful for discovering when a change was made and
80 This command is useful for discovering when a change was made and
81 by whom.
81 by whom.
82
82
83 Without the -a/--text option, annotate will avoid processing files
83 Without the -a/--text option, annotate will avoid processing files
84 it detects as binary. With -a, annotate will annotate the file
84 it detects as binary. With -a, annotate will annotate the file
85 anyway, although the results will probably be neither useful
85 anyway, although the results will probably be neither useful
86 nor desirable.
86 nor desirable.
87 """
87 """
88 datefunc = ui.quiet and util.shortdate or util.datestr
88 datefunc = ui.quiet and util.shortdate or util.datestr
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
89 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
90
90
91 if not pats:
91 if not pats:
92 raise util.Abort(_('at least one filename or pattern is required'))
92 raise util.Abort(_('at least one filename or pattern is required'))
93
93
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
94 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
95 ('number', lambda x: str(x[0].rev())),
95 ('number', lambda x: str(x[0].rev())),
96 ('changeset', lambda x: short(x[0].node())),
96 ('changeset', lambda x: short(x[0].node())),
97 ('date', getdate),
97 ('date', getdate),
98 ('follow', lambda x: x[0].path()),
98 ('follow', lambda x: x[0].path()),
99 ]
99 ]
100
100
101 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
101 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
102 and not opts.get('follow')):
102 and not opts.get('follow')):
103 opts['number'] = 1
103 opts['number'] = 1
104
104
105 linenumber = opts.get('line_number') is not None
105 linenumber = opts.get('line_number') is not None
106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
106 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
107 raise util.Abort(_('at least one of -n/-c is required for -l'))
108
108
109 funcmap = [func for op, func in opmap if opts.get(op)]
109 funcmap = [func for op, func in opmap if opts.get(op)]
110 if linenumber:
110 if linenumber:
111 lastfunc = funcmap[-1]
111 lastfunc = funcmap[-1]
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
112 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
113
113
114 ctx = repo[opts.get('rev')]
114 ctx = repo[opts.get('rev')]
115
115
116 m = cmdutil.match(repo, pats, opts)
116 m = cmdutil.match(repo, pats, opts)
117 for abs in ctx.walk(m):
117 for abs in ctx.walk(m):
118 fctx = ctx[abs]
118 fctx = ctx[abs]
119 if not opts.get('text') and util.binary(fctx.data()):
119 if not opts.get('text') and util.binary(fctx.data()):
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
120 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
121 continue
121 continue
122
122
123 lines = fctx.annotate(follow=opts.get('follow'),
123 lines = fctx.annotate(follow=opts.get('follow'),
124 linenumber=linenumber)
124 linenumber=linenumber)
125 pieces = []
125 pieces = []
126
126
127 for f in funcmap:
127 for f in funcmap:
128 l = [f(n) for n, dummy in lines]
128 l = [f(n) for n, dummy in lines]
129 if l:
129 if l:
130 ml = max(map(len, l))
130 ml = max(map(len, l))
131 pieces.append(["%*s" % (ml, x) for x in l])
131 pieces.append(["%*s" % (ml, x) for x in l])
132
132
133 if pieces:
133 if pieces:
134 for p, l in zip(zip(*pieces), lines):
134 for p, l in zip(zip(*pieces), lines):
135 ui.write("%s: %s" % (" ".join(p), l[1]))
135 ui.write("%s: %s" % (" ".join(p), l[1]))
136
136
137 def archive(ui, repo, dest, **opts):
137 def archive(ui, repo, dest, **opts):
138 '''create an unversioned archive of a repository revision
138 '''create an unversioned archive of a repository revision
139
139
140 By default, the revision used is the parent of the working
140 By default, the revision used is the parent of the working
141 directory; use -r/--rev to specify a different revision.
141 directory; use -r/--rev to specify a different revision.
142
142
143 To specify the type of archive to create, use -t/--type. Valid
143 To specify the type of archive to create, use -t/--type. Valid
144 types are:
144 types are:
145
145
146 "files" (default): a directory full of files
146 "files" (default): a directory full of files
147 "tar": tar archive, uncompressed
147 "tar": tar archive, uncompressed
148 "tbz2": tar archive, compressed using bzip2
148 "tbz2": tar archive, compressed using bzip2
149 "tgz": tar archive, compressed using gzip
149 "tgz": tar archive, compressed using gzip
150 "uzip": zip archive, uncompressed
150 "uzip": zip archive, uncompressed
151 "zip": zip archive, compressed using deflate
151 "zip": zip archive, compressed using deflate
152
152
153 The exact name of the destination archive or directory is given
153 The exact name of the destination archive or directory is given
154 using a format string; see 'hg help export' for details.
154 using a format string; see 'hg help export' for details.
155
155
156 Each member added to an archive file has a directory prefix
156 Each member added to an archive file has a directory prefix
157 prepended. Use -p/--prefix to specify a format string for the
157 prepended. Use -p/--prefix to specify a format string for the
158 prefix. The default is the basename of the archive, with suffixes
158 prefix. The default is the basename of the archive, with suffixes
159 removed.
159 removed.
160 '''
160 '''
161
161
162 ctx = repo[opts.get('rev')]
162 ctx = repo[opts.get('rev')]
163 if not ctx:
163 if not ctx:
164 raise util.Abort(_('no working directory: please specify a revision'))
164 raise util.Abort(_('no working directory: please specify a revision'))
165 node = ctx.node()
165 node = ctx.node()
166 dest = cmdutil.make_filename(repo, dest, node)
166 dest = cmdutil.make_filename(repo, dest, node)
167 if os.path.realpath(dest) == repo.root:
167 if os.path.realpath(dest) == repo.root:
168 raise util.Abort(_('repository root cannot be destination'))
168 raise util.Abort(_('repository root cannot be destination'))
169 matchfn = cmdutil.match(repo, [], opts)
169 matchfn = cmdutil.match(repo, [], opts)
170 kind = opts.get('type') or 'files'
170 kind = opts.get('type') or 'files'
171 prefix = opts.get('prefix')
171 prefix = opts.get('prefix')
172 if dest == '-':
172 if dest == '-':
173 if kind == 'files':
173 if kind == 'files':
174 raise util.Abort(_('cannot archive plain files to stdout'))
174 raise util.Abort(_('cannot archive plain files to stdout'))
175 dest = sys.stdout
175 dest = sys.stdout
176 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
176 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
177 prefix = cmdutil.make_filename(repo, prefix, node)
177 prefix = cmdutil.make_filename(repo, prefix, node)
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
178 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
179 matchfn, prefix)
179 matchfn, prefix)
180
180
181 def backout(ui, repo, node=None, rev=None, **opts):
181 def backout(ui, repo, node=None, rev=None, **opts):
182 '''reverse effect of earlier changeset
182 '''reverse effect of earlier changeset
183
183
184 Commit the backed out changes as a new changeset. The new
184 Commit the backed out changes as a new changeset. The new
185 changeset is a child of the backed out changeset.
185 changeset is a child of the backed out changeset.
186
186
187 If you backout a changeset other than the tip, a new head is
187 If you backout a changeset other than the tip, a new head is
188 created. This head will be the new tip and you should merge this
188 created. This head will be the new tip and you should merge this
189 backout changeset with another head.
189 backout changeset with another head.
190
190
191 The --merge option remembers the parent of the working directory
191 The --merge option remembers the parent of the working directory
192 before starting the backout, then merges the new head with that
192 before starting the backout, then merges the new head with that
193 changeset afterwards. This saves you from doing the merge by hand.
193 changeset afterwards. This saves you from doing the merge by hand.
194 The result of this merge is not committed, as with a normal merge.
194 The result of this merge is not committed, as with a normal merge.
195
195
196 See 'hg help dates' for a list of formats valid for -d/--date.
196 See 'hg help dates' for a list of formats valid for -d/--date.
197 '''
197 '''
198 if rev and node:
198 if rev and node:
199 raise util.Abort(_("please specify just one revision"))
199 raise util.Abort(_("please specify just one revision"))
200
200
201 if not rev:
201 if not rev:
202 rev = node
202 rev = node
203
203
204 if not rev:
204 if not rev:
205 raise util.Abort(_("please specify a revision to backout"))
205 raise util.Abort(_("please specify a revision to backout"))
206
206
207 date = opts.get('date')
207 date = opts.get('date')
208 if date:
208 if date:
209 opts['date'] = util.parsedate(date)
209 opts['date'] = util.parsedate(date)
210
210
211 cmdutil.bail_if_changed(repo)
211 cmdutil.bail_if_changed(repo)
212 node = repo.lookup(rev)
212 node = repo.lookup(rev)
213
213
214 op1, op2 = repo.dirstate.parents()
214 op1, op2 = repo.dirstate.parents()
215 a = repo.changelog.ancestor(op1, node)
215 a = repo.changelog.ancestor(op1, node)
216 if a != node:
216 if a != node:
217 raise util.Abort(_('cannot backout change on a different branch'))
217 raise util.Abort(_('cannot backout change on a different branch'))
218
218
219 p1, p2 = repo.changelog.parents(node)
219 p1, p2 = repo.changelog.parents(node)
220 if p1 == nullid:
220 if p1 == nullid:
221 raise util.Abort(_('cannot backout a change with no parents'))
221 raise util.Abort(_('cannot backout a change with no parents'))
222 if p2 != nullid:
222 if p2 != nullid:
223 if not opts.get('parent'):
223 if not opts.get('parent'):
224 raise util.Abort(_('cannot backout a merge changeset without '
224 raise util.Abort(_('cannot backout a merge changeset without '
225 '--parent'))
225 '--parent'))
226 p = repo.lookup(opts['parent'])
226 p = repo.lookup(opts['parent'])
227 if p not in (p1, p2):
227 if p not in (p1, p2):
228 raise util.Abort(_('%s is not a parent of %s') %
228 raise util.Abort(_('%s is not a parent of %s') %
229 (short(p), short(node)))
229 (short(p), short(node)))
230 parent = p
230 parent = p
231 else:
231 else:
232 if opts.get('parent'):
232 if opts.get('parent'):
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
233 raise util.Abort(_('cannot use --parent on non-merge changeset'))
234 parent = p1
234 parent = p1
235
235
236 # the backout should appear on the same branch
236 # the backout should appear on the same branch
237 branch = repo.dirstate.branch()
237 branch = repo.dirstate.branch()
238 hg.clean(repo, node, show_stats=False)
238 hg.clean(repo, node, show_stats=False)
239 repo.dirstate.setbranch(branch)
239 repo.dirstate.setbranch(branch)
240 revert_opts = opts.copy()
240 revert_opts = opts.copy()
241 revert_opts['date'] = None
241 revert_opts['date'] = None
242 revert_opts['all'] = True
242 revert_opts['all'] = True
243 revert_opts['rev'] = hex(parent)
243 revert_opts['rev'] = hex(parent)
244 revert_opts['no_backup'] = None
244 revert_opts['no_backup'] = None
245 revert(ui, repo, **revert_opts)
245 revert(ui, repo, **revert_opts)
246 commit_opts = opts.copy()
246 commit_opts = opts.copy()
247 commit_opts['addremove'] = False
247 commit_opts['addremove'] = False
248 if not commit_opts['message'] and not commit_opts['logfile']:
248 if not commit_opts['message'] and not commit_opts['logfile']:
249 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
249 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
250 commit_opts['force_editor'] = True
250 commit_opts['force_editor'] = True
251 commit(ui, repo, **commit_opts)
251 commit(ui, repo, **commit_opts)
252 def nice(node):
252 def nice(node):
253 return '%d:%s' % (repo.changelog.rev(node), short(node))
253 return '%d:%s' % (repo.changelog.rev(node), short(node))
254 ui.status(_('changeset %s backs out changeset %s\n') %
254 ui.status(_('changeset %s backs out changeset %s\n') %
255 (nice(repo.changelog.tip()), nice(node)))
255 (nice(repo.changelog.tip()), nice(node)))
256 if op1 != node:
256 if op1 != node:
257 hg.clean(repo, op1, show_stats=False)
257 hg.clean(repo, op1, show_stats=False)
258 if opts.get('merge'):
258 if opts.get('merge'):
259 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
259 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
260 hg.merge(repo, hex(repo.changelog.tip()))
260 hg.merge(repo, hex(repo.changelog.tip()))
261 else:
261 else:
262 ui.status(_('the backout changeset is a new head - '
262 ui.status(_('the backout changeset is a new head - '
263 'do not forget to merge\n'))
263 'do not forget to merge\n'))
264 ui.status(_('(use "backout --merge" '
264 ui.status(_('(use "backout --merge" '
265 'if you want to auto-merge)\n'))
265 'if you want to auto-merge)\n'))
266
266
267 def bisect(ui, repo, rev=None, extra=None, command=None,
267 def bisect(ui, repo, rev=None, extra=None, command=None,
268 reset=None, good=None, bad=None, skip=None, noupdate=None):
268 reset=None, good=None, bad=None, skip=None, noupdate=None):
269 """subdivision search of changesets
269 """subdivision search of changesets
270
270
271 This command helps to find changesets which introduce problems. To
271 This command helps to find changesets which introduce problems. To
272 use, mark the earliest changeset you know exhibits the problem as
272 use, mark the earliest changeset you know exhibits the problem as
273 bad, then mark the latest changeset which is free from the problem
273 bad, then mark the latest changeset which is free from the problem
274 as good. Bisect will update your working directory to a revision
274 as good. Bisect will update your working directory to a revision
275 for testing (unless the -U/--noupdate option is specified). Once
275 for testing (unless the -U/--noupdate option is specified). Once
276 you have performed tests, mark the working directory as good or
276 you have performed tests, mark the working directory as good or
277 bad, and bisect will either update to another candidate changeset
277 bad, and bisect will either update to another candidate changeset
278 or announce that it has found the bad revision.
278 or announce that it has found the bad revision.
279
279
280 As a shortcut, you can also use the revision argument to mark a
280 As a shortcut, you can also use the revision argument to mark a
281 revision as good or bad without checking it out first.
281 revision as good or bad without checking it out first.
282
282
283 If you supply a command, it will be used for automatic bisection.
283 If you supply a command, it will be used for automatic bisection.
284 Its exit status will be used to mark revisions as good or bad:
284 Its exit status will be used to mark revisions as good or bad:
285 status 0 means good, 125 means to skip the revision, 127
285 status 0 means good, 125 means to skip the revision, 127
286 (command not found) will abort the bisection, and any other
286 (command not found) will abort the bisection, and any other
287 non-zero exit status means the revision is bad.
287 non-zero exit status means the revision is bad.
288 """
288 """
289 def print_result(nodes, good):
289 def print_result(nodes, good):
290 displayer = cmdutil.show_changeset(ui, repo, {})
290 displayer = cmdutil.show_changeset(ui, repo, {})
291 if len(nodes) == 1:
291 if len(nodes) == 1:
292 # narrowed it down to a single revision
292 # narrowed it down to a single revision
293 if good:
293 if good:
294 ui.write(_("The first good revision is:\n"))
294 ui.write(_("The first good revision is:\n"))
295 else:
295 else:
296 ui.write(_("The first bad revision is:\n"))
296 ui.write(_("The first bad revision is:\n"))
297 displayer.show(repo[nodes[0]])
297 displayer.show(repo[nodes[0]])
298 else:
298 else:
299 # multiple possible revisions
299 # multiple possible revisions
300 if good:
300 if good:
301 ui.write(_("Due to skipped revisions, the first "
301 ui.write(_("Due to skipped revisions, the first "
302 "good revision could be any of:\n"))
302 "good revision could be any of:\n"))
303 else:
303 else:
304 ui.write(_("Due to skipped revisions, the first "
304 ui.write(_("Due to skipped revisions, the first "
305 "bad revision could be any of:\n"))
305 "bad revision could be any of:\n"))
306 for n in nodes:
306 for n in nodes:
307 displayer.show(repo[n])
307 displayer.show(repo[n])
308
308
309 def check_state(state, interactive=True):
309 def check_state(state, interactive=True):
310 if not state['good'] or not state['bad']:
310 if not state['good'] or not state['bad']:
311 if (good or bad or skip or reset) and interactive:
311 if (good or bad or skip or reset) and interactive:
312 return
312 return
313 if not state['good']:
313 if not state['good']:
314 raise util.Abort(_('cannot bisect (no known good revisions)'))
314 raise util.Abort(_('cannot bisect (no known good revisions)'))
315 else:
315 else:
316 raise util.Abort(_('cannot bisect (no known bad revisions)'))
316 raise util.Abort(_('cannot bisect (no known bad revisions)'))
317 return True
317 return True
318
318
319 # backward compatibility
319 # backward compatibility
320 if rev in "good bad reset init".split():
320 if rev in "good bad reset init".split():
321 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
321 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
322 cmd, rev, extra = rev, extra, None
322 cmd, rev, extra = rev, extra, None
323 if cmd == "good":
323 if cmd == "good":
324 good = True
324 good = True
325 elif cmd == "bad":
325 elif cmd == "bad":
326 bad = True
326 bad = True
327 else:
327 else:
328 reset = True
328 reset = True
329 elif extra or good + bad + skip + reset + bool(command) > 1:
329 elif extra or good + bad + skip + reset + bool(command) > 1:
330 raise util.Abort(_('incompatible arguments'))
330 raise util.Abort(_('incompatible arguments'))
331
331
332 if reset:
332 if reset:
333 p = repo.join("bisect.state")
333 p = repo.join("bisect.state")
334 if os.path.exists(p):
334 if os.path.exists(p):
335 os.unlink(p)
335 os.unlink(p)
336 return
336 return
337
337
338 state = hbisect.load_state(repo)
338 state = hbisect.load_state(repo)
339
339
340 if command:
340 if command:
341 commandpath = util.find_exe(command)
341 commandpath = util.find_exe(command)
342 if commandpath is None:
342 if commandpath is None:
343 raise util.Abort(_("cannot find executable: %s") % command)
343 raise util.Abort(_("cannot find executable: %s") % command)
344 changesets = 1
344 changesets = 1
345 try:
345 try:
346 while changesets:
346 while changesets:
347 # update state
347 # update state
348 status = subprocess.call([commandpath])
348 status = subprocess.call([commandpath])
349 if status == 125:
349 if status == 125:
350 transition = "skip"
350 transition = "skip"
351 elif status == 0:
351 elif status == 0:
352 transition = "good"
352 transition = "good"
353 # status < 0 means process was killed
353 # status < 0 means process was killed
354 elif status == 127:
354 elif status == 127:
355 raise util.Abort(_("failed to execute %s") % command)
355 raise util.Abort(_("failed to execute %s") % command)
356 elif status < 0:
356 elif status < 0:
357 raise util.Abort(_("%s killed") % command)
357 raise util.Abort(_("%s killed") % command)
358 else:
358 else:
359 transition = "bad"
359 transition = "bad"
360 ctx = repo[rev or '.']
360 ctx = repo[rev or '.']
361 state[transition].append(ctx.node())
361 state[transition].append(ctx.node())
362 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
362 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
363 check_state(state, interactive=False)
363 check_state(state, interactive=False)
364 # bisect
364 # bisect
365 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
365 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
366 # update to next check
366 # update to next check
367 cmdutil.bail_if_changed(repo)
367 cmdutil.bail_if_changed(repo)
368 hg.clean(repo, nodes[0], show_stats=False)
368 hg.clean(repo, nodes[0], show_stats=False)
369 finally:
369 finally:
370 hbisect.save_state(repo, state)
370 hbisect.save_state(repo, state)
371 return print_result(nodes, not status)
371 return print_result(nodes, not status)
372
372
373 # update state
373 # update state
374 node = repo.lookup(rev or '.')
374 node = repo.lookup(rev or '.')
375 if good:
375 if good:
376 state['good'].append(node)
376 state['good'].append(node)
377 elif bad:
377 elif bad:
378 state['bad'].append(node)
378 state['bad'].append(node)
379 elif skip:
379 elif skip:
380 state['skip'].append(node)
380 state['skip'].append(node)
381
381
382 hbisect.save_state(repo, state)
382 hbisect.save_state(repo, state)
383
383
384 if not check_state(state):
384 if not check_state(state):
385 return
385 return
386
386
387 # actually bisect
387 # actually bisect
388 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
388 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
389 if changesets == 0:
389 if changesets == 0:
390 print_result(nodes, good)
390 print_result(nodes, good)
391 else:
391 else:
392 assert len(nodes) == 1 # only a single node can be tested next
392 assert len(nodes) == 1 # only a single node can be tested next
393 node = nodes[0]
393 node = nodes[0]
394 # compute the approximate number of remaining tests
394 # compute the approximate number of remaining tests
395 tests, size = 0, 2
395 tests, size = 0, 2
396 while size <= changesets:
396 while size <= changesets:
397 tests, size = tests + 1, size * 2
397 tests, size = tests + 1, size * 2
398 rev = repo.changelog.rev(node)
398 rev = repo.changelog.rev(node)
399 ui.write(_("Testing changeset %d:%s "
399 ui.write(_("Testing changeset %d:%s "
400 "(%d changesets remaining, ~%d tests)\n")
400 "(%d changesets remaining, ~%d tests)\n")
401 % (rev, short(node), changesets, tests))
401 % (rev, short(node), changesets, tests))
402 if not noupdate:
402 if not noupdate:
403 cmdutil.bail_if_changed(repo)
403 cmdutil.bail_if_changed(repo)
404 return hg.clean(repo, node)
404 return hg.clean(repo, node)
405
405
406 def branch(ui, repo, label=None, **opts):
406 def branch(ui, repo, label=None, **opts):
407 """set or show the current branch name
407 """set or show the current branch name
408
408
409 With no argument, show the current branch name. With one argument,
409 With no argument, show the current branch name. With one argument,
410 set the working directory branch name (the branch will not exist
410 set the working directory branch name (the branch will not exist
411 in the repository until the next commit). Standard practice
411 in the repository until the next commit). Standard practice
412 recommends that primary development take place on the 'default'
412 recommends that primary development take place on the 'default'
413 branch.
413 branch.
414
414
415 Unless -f/--force is specified, branch will not let you set a
415 Unless -f/--force is specified, branch will not let you set a
416 branch name that already exists, even if it's inactive.
416 branch name that already exists, even if it's inactive.
417
417
418 Use -C/--clean to reset the working directory branch to that of
418 Use -C/--clean to reset the working directory branch to that of
419 the parent of the working directory, negating a previous branch
419 the parent of the working directory, negating a previous branch
420 change.
420 change.
421
421
422 Use the command 'hg update' to switch to an existing branch. Use
422 Use the command 'hg update' to switch to an existing branch. Use
423 'hg commit --close-branch' to mark this branch as closed.
423 'hg commit --close-branch' to mark this branch as closed.
424 """
424 """
425
425
426 if opts.get('clean'):
426 if opts.get('clean'):
427 label = repo[None].parents()[0].branch()
427 label = repo[None].parents()[0].branch()
428 repo.dirstate.setbranch(label)
428 repo.dirstate.setbranch(label)
429 ui.status(_('reset working directory to branch %s\n') % label)
429 ui.status(_('reset working directory to branch %s\n') % label)
430 elif label:
430 elif label:
431 if not opts.get('force') and label in repo.branchtags():
431 if not opts.get('force') and label in repo.branchtags():
432 if label not in [p.branch() for p in repo.parents()]:
432 if label not in [p.branch() for p in repo.parents()]:
433 raise util.Abort(_('a branch of the same name already exists'
433 raise util.Abort(_('a branch of the same name already exists'
434 ' (use --force to override)'))
434 ' (use --force to override)'))
435 repo.dirstate.setbranch(encoding.fromlocal(label))
435 repo.dirstate.setbranch(encoding.fromlocal(label))
436 ui.status(_('marked working directory as branch %s\n') % label)
436 ui.status(_('marked working directory as branch %s\n') % label)
437 else:
437 else:
438 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
438 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
439
439
440 def branches(ui, repo, active=False, closed=False):
440 def branches(ui, repo, active=False, closed=False):
441 """list repository named branches
441 """list repository named branches
442
442
443 List the repository's named branches, indicating which ones are
443 List the repository's named branches, indicating which ones are
444 inactive. If -c/--closed is specified, also list branches which have
444 inactive. If -c/--closed is specified, also list branches which have
445 been marked closed (see hg commit --close-branch).
445 been marked closed (see hg commit --close-branch).
446
446
447 If -a/--active is specified, only show active branches. A branch
447 If -a/--active is specified, only show active branches. A branch
448 is considered active if it contains repository heads.
448 is considered active if it contains repository heads.
449
449
450 Use the command 'hg update' to switch to an existing branch.
450 Use the command 'hg update' to switch to an existing branch.
451 """
451 """
452
452
453 hexfunc = ui.debugflag and hex or short
453 hexfunc = ui.debugflag and hex or short
454 activebranches = [encoding.tolocal(repo[n].branch())
454 activebranches = [encoding.tolocal(repo[n].branch())
455 for n in repo.heads()]
455 for n in repo.heads()]
456 def testactive(tag, node):
456 def testactive(tag, node):
457 realhead = tag in activebranches
457 realhead = tag in activebranches
458 open = node in repo.branchheads(tag, closed=False)
458 open = node in repo.branchheads(tag, closed=False)
459 return realhead and open
459 return realhead and open
460 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
460 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
461 for tag, node in repo.branchtags().items()],
461 for tag, node in repo.branchtags().items()],
462 reverse=True)
462 reverse=True)
463
463
464 for isactive, node, tag in branches:
464 for isactive, node, tag in branches:
465 if (not active) or isactive:
465 if (not active) or isactive:
466 if ui.quiet:
466 if ui.quiet:
467 ui.write("%s\n" % tag)
467 ui.write("%s\n" % tag)
468 else:
468 else:
469 hn = repo.lookup(node)
469 hn = repo.lookup(node)
470 if isactive:
470 if isactive:
471 notice = ''
471 notice = ''
472 elif hn not in repo.branchheads(tag, closed=False):
472 elif hn not in repo.branchheads(tag, closed=False):
473 if not closed:
473 if not closed:
474 continue
474 continue
475 notice = ' (closed)'
475 notice = ' (closed)'
476 else:
476 else:
477 notice = ' (inactive)'
477 notice = ' (inactive)'
478 rev = str(node).rjust(31 - encoding.colwidth(tag))
478 rev = str(node).rjust(31 - encoding.colwidth(tag))
479 data = tag, rev, hexfunc(hn), notice
479 data = tag, rev, hexfunc(hn), notice
480 ui.write("%s %s:%s%s\n" % data)
480 ui.write("%s %s:%s%s\n" % data)
481
481
482 def bundle(ui, repo, fname, dest=None, **opts):
482 def bundle(ui, repo, fname, dest=None, **opts):
483 """create a changegroup file
483 """create a changegroup file
484
484
485 Generate a compressed changegroup file collecting changesets not
485 Generate a compressed changegroup file collecting changesets not
486 known to be in another repository.
486 known to be in another repository.
487
487
488 If no destination repository is specified the destination is
488 If no destination repository is specified the destination is
489 assumed to have all the nodes specified by one or more --base
489 assumed to have all the nodes specified by one or more --base
490 parameters. To create a bundle containing all changesets, use
490 parameters. To create a bundle containing all changesets, use
491 -a/--all (or --base null).
491 -a/--all (or --base null).
492
492
493 You can change compression method with the -t/--type option.
493 You can change compression method with the -t/--type option.
494 The available compression methods are: none, bzip2, and
494 The available compression methods are: none, bzip2, and
495 gzip (by default, bundles are compressed using bzip2).
495 gzip (by default, bundles are compressed using bzip2).
496
496
497 The bundle file can then be transferred using conventional means
497 The bundle file can then be transferred using conventional means
498 and applied to another repository with the unbundle or pull
498 and applied to another repository with the unbundle or pull
499 command. This is useful when direct push and pull are not
499 command. This is useful when direct push and pull are not
500 available or when exporting an entire repository is undesirable.
500 available or when exporting an entire repository is undesirable.
501
501
502 Applying bundles preserves all changeset contents including
502 Applying bundles preserves all changeset contents including
503 permissions, copy/rename information, and revision history.
503 permissions, copy/rename information, and revision history.
504 """
504 """
505 revs = opts.get('rev') or None
505 revs = opts.get('rev') or None
506 if revs:
506 if revs:
507 revs = [repo.lookup(rev) for rev in revs]
507 revs = [repo.lookup(rev) for rev in revs]
508 if opts.get('all'):
508 if opts.get('all'):
509 base = ['null']
509 base = ['null']
510 else:
510 else:
511 base = opts.get('base')
511 base = opts.get('base')
512 if base:
512 if base:
513 if dest:
513 if dest:
514 raise util.Abort(_("--base is incompatible with specifying "
514 raise util.Abort(_("--base is incompatible with specifying "
515 "a destination"))
515 "a destination"))
516 base = [repo.lookup(rev) for rev in base]
516 base = [repo.lookup(rev) for rev in base]
517 # create the right base
517 # create the right base
518 # XXX: nodesbetween / changegroup* should be "fixed" instead
518 # XXX: nodesbetween / changegroup* should be "fixed" instead
519 o = []
519 o = []
520 has = set((nullid,))
520 has = set((nullid,))
521 for n in base:
521 for n in base:
522 has.update(repo.changelog.reachable(n))
522 has.update(repo.changelog.reachable(n))
523 if revs:
523 if revs:
524 visit = list(revs)
524 visit = list(revs)
525 else:
525 else:
526 visit = repo.changelog.heads()
526 visit = repo.changelog.heads()
527 seen = {}
527 seen = {}
528 while visit:
528 while visit:
529 n = visit.pop(0)
529 n = visit.pop(0)
530 parents = [p for p in repo.changelog.parents(n) if p not in has]
530 parents = [p for p in repo.changelog.parents(n) if p not in has]
531 if len(parents) == 0:
531 if len(parents) == 0:
532 o.insert(0, n)
532 o.insert(0, n)
533 else:
533 else:
534 for p in parents:
534 for p in parents:
535 if p not in seen:
535 if p not in seen:
536 seen[p] = 1
536 seen[p] = 1
537 visit.append(p)
537 visit.append(p)
538 else:
538 else:
539 dest, revs, checkout = hg.parseurl(
539 dest, revs, checkout = hg.parseurl(
540 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
540 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
541 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
541 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
542 o = repo.findoutgoing(other, force=opts.get('force'))
542 o = repo.findoutgoing(other, force=opts.get('force'))
543
543
544 if revs:
544 if revs:
545 cg = repo.changegroupsubset(o, revs, 'bundle')
545 cg = repo.changegroupsubset(o, revs, 'bundle')
546 else:
546 else:
547 cg = repo.changegroup(o, 'bundle')
547 cg = repo.changegroup(o, 'bundle')
548
548
549 bundletype = opts.get('type', 'bzip2').lower()
549 bundletype = opts.get('type', 'bzip2').lower()
550 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
550 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
551 bundletype = btypes.get(bundletype)
551 bundletype = btypes.get(bundletype)
552 if bundletype not in changegroup.bundletypes:
552 if bundletype not in changegroup.bundletypes:
553 raise util.Abort(_('unknown bundle type specified with --type'))
553 raise util.Abort(_('unknown bundle type specified with --type'))
554
554
555 changegroup.writebundle(cg, fname, bundletype)
555 changegroup.writebundle(cg, fname, bundletype)
556
556
557 def cat(ui, repo, file1, *pats, **opts):
557 def cat(ui, repo, file1, *pats, **opts):
558 """output the current or given revision of files
558 """output the current or given revision of files
559
559
560 Print the specified files as they were at the given revision. If
560 Print the specified files as they were at the given revision. If
561 no revision is given, the parent of the working directory is used,
561 no revision is given, the parent of the working directory is used,
562 or tip if no revision is checked out.
562 or tip if no revision is checked out.
563
563
564 Output may be to a file, in which case the name of the file is
564 Output may be to a file, in which case the name of the file is
565 given using a format string. The formatting rules are the same as
565 given using a format string. The formatting rules are the same as
566 for the export command, with the following additions:
566 for the export command, with the following additions:
567
567
568 %s basename of file being printed
568 %s basename of file being printed
569 %d dirname of file being printed, or '.' if in repository root
569 %d dirname of file being printed, or '.' if in repository root
570 %p root-relative path name of file being printed
570 %p root-relative path name of file being printed
571 """
571 """
572 ctx = repo[opts.get('rev')]
572 ctx = repo[opts.get('rev')]
573 err = 1
573 err = 1
574 m = cmdutil.match(repo, (file1,) + pats, opts)
574 m = cmdutil.match(repo, (file1,) + pats, opts)
575 for abs in ctx.walk(m):
575 for abs in ctx.walk(m):
576 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
576 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
577 data = ctx[abs].data()
577 data = ctx[abs].data()
578 if opts.get('decode'):
578 if opts.get('decode'):
579 data = repo.wwritedata(abs, data)
579 data = repo.wwritedata(abs, data)
580 fp.write(data)
580 fp.write(data)
581 err = 0
581 err = 0
582 return err
582 return err
583
583
584 def clone(ui, source, dest=None, **opts):
584 def clone(ui, source, dest=None, **opts):
585 """make a copy of an existing repository
585 """make a copy of an existing repository
586
586
587 Create a copy of an existing repository in a new directory.
587 Create a copy of an existing repository in a new directory.
588
588
589 If no destination directory name is specified, it defaults to the
589 If no destination directory name is specified, it defaults to the
590 basename of the source.
590 basename of the source.
591
591
592 The location of the source is added to the new repository's
592 The location of the source is added to the new repository's
593 .hg/hgrc file, as the default to be used for future pulls.
593 .hg/hgrc file, as the default to be used for future pulls.
594
594
595 If you use the -r/--rev option to clone up to a specific revision,
595 If you use the -r/--rev option to clone up to a specific revision,
596 no subsequent revisions (including subsequent tags) will be
596 no subsequent revisions (including subsequent tags) will be
597 present in the cloned repository. This option implies --pull, even
597 present in the cloned repository. This option implies --pull, even
598 on local repositories.
598 on local repositories.
599
599
600 By default, clone will check out the head of the 'default' branch.
600 By default, clone will check out the head of the 'default' branch.
601 If the -U/--noupdate option is used, the new clone will contain
601 If the -U/--noupdate option is used, the new clone will contain
602 only a repository (.hg) and no working copy (the working copy
602 only a repository (.hg) and no working copy (the working copy
603 parent is the null revision).
603 parent is the null revision).
604
604
605 See 'hg help urls' for valid source format details.
605 See 'hg help urls' for valid source format details.
606
606
607 It is possible to specify an ssh:// URL as the destination, but no
607 It is possible to specify an ssh:// URL as the destination, but no
608 .hg/hgrc and working directory will be created on the remote side.
608 .hg/hgrc and working directory will be created on the remote side.
609 Please see 'hg help urls' for important details about ssh:// URLs.
609 Please see 'hg help urls' for important details about ssh:// URLs.
610
610
611 For efficiency, hardlinks are used for cloning whenever the source
611 For efficiency, hardlinks are used for cloning whenever the source
612 and destination are on the same filesystem (note this applies only
612 and destination are on the same filesystem (note this applies only
613 to the repository data, not to the checked out files). Some
613 to the repository data, not to the checked out files). Some
614 filesystems, such as AFS, implement hardlinking incorrectly, but
614 filesystems, such as AFS, implement hardlinking incorrectly, but
615 do not report errors. In these cases, use the --pull option to
615 do not report errors. In these cases, use the --pull option to
616 avoid hardlinking.
616 avoid hardlinking.
617
617
618 In some cases, you can clone repositories and checked out files
618 In some cases, you can clone repositories and checked out files
619 using full hardlinks with
619 using full hardlinks with
620
620
621 $ cp -al REPO REPOCLONE
621 $ cp -al REPO REPOCLONE
622
622
623 This is the fastest way to clone, but it is not always safe. The
623 This is the fastest way to clone, but it is not always safe. The
624 operation is not atomic (making sure REPO is not modified during
624 operation is not atomic (making sure REPO is not modified during
625 the operation is up to you) and you have to make sure your editor
625 the operation is up to you) and you have to make sure your editor
626 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
626 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
627 this is not compatible with certain extensions that place their
627 this is not compatible with certain extensions that place their
628 metadata under the .hg directory, such as mq.
628 metadata under the .hg directory, such as mq.
629
629
630 """
630 """
631 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
631 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
632 pull=opts.get('pull'),
632 pull=opts.get('pull'),
633 stream=opts.get('uncompressed'),
633 stream=opts.get('uncompressed'),
634 rev=opts.get('rev'),
634 rev=opts.get('rev'),
635 update=not opts.get('noupdate'))
635 update=not opts.get('noupdate'))
636
636
637 def commit(ui, repo, *pats, **opts):
637 def commit(ui, repo, *pats, **opts):
638 """commit the specified files or all outstanding changes
638 """commit the specified files or all outstanding changes
639
639
640 Commit changes to the given files into the repository. Unlike a
640 Commit changes to the given files into the repository. Unlike a
641 centralized RCS, this operation is a local operation. See hg push
641 centralized RCS, this operation is a local operation. See hg push
642 for a way to actively distribute your changes.
642 for a way to actively distribute your changes.
643
643
644 If a list of files is omitted, all changes reported by "hg status"
644 If a list of files is omitted, all changes reported by "hg status"
645 will be committed.
645 will be committed.
646
646
647 If you are committing the result of a merge, do not provide any
647 If you are committing the result of a merge, do not provide any
648 filenames or -I/-X filters.
648 filenames or -I/-X filters.
649
649
650 If no commit message is specified, the configured editor is
650 If no commit message is specified, the configured editor is
651 started to prompt you for a message.
651 started to prompt you for a message.
652
652
653 See 'hg help dates' for a list of formats valid for -d/--date.
653 See 'hg help dates' for a list of formats valid for -d/--date.
654 """
654 """
655 extra = {}
655 extra = {}
656 if opts.get('close_branch'):
656 if opts.get('close_branch'):
657 extra['close'] = 1
657 extra['close'] = 1
658 e = cmdutil.commiteditor
658 e = cmdutil.commiteditor
659 if opts.get('force_editor'):
659 if opts.get('force_editor'):
660 e = cmdutil.commitforceeditor
660 e = cmdutil.commitforceeditor
661
661
662 def commitfunc(ui, repo, message, match, opts):
662 def commitfunc(ui, repo, message, match, opts):
663 return repo.commit(message, opts.get('user'), opts.get('date'), match,
663 return repo.commit(message, opts.get('user'), opts.get('date'), match,
664 editor=e, extra=extra)
664 editor=e, extra=extra)
665
665
666 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
666 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
667 if not node:
667 if not node:
668 ui.status(_("nothing changed\n"))
668 ui.status(_("nothing changed\n"))
669 return
669 return
670 cl = repo.changelog
670 cl = repo.changelog
671 rev = cl.rev(node)
671 rev = cl.rev(node)
672 parents = cl.parentrevs(rev)
672 parents = cl.parentrevs(rev)
673 if rev - 1 in parents:
673 if rev - 1 in parents:
674 # one of the parents was the old tip
674 # one of the parents was the old tip
675 pass
675 pass
676 elif (parents == (nullrev, nullrev) or
676 elif (parents == (nullrev, nullrev) or
677 len(cl.heads(cl.node(parents[0]))) > 1 and
677 len(cl.heads(cl.node(parents[0]))) > 1 and
678 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
678 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
679 ui.status(_('created new head\n'))
679 ui.status(_('created new head\n'))
680
680
681 if ui.debugflag:
681 if ui.debugflag:
682 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
682 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
683 elif ui.verbose:
683 elif ui.verbose:
684 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
684 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
685
685
686 def copy(ui, repo, *pats, **opts):
686 def copy(ui, repo, *pats, **opts):
687 """mark files as copied for the next commit
687 """mark files as copied for the next commit
688
688
689 Mark dest as having copies of source files. If dest is a
689 Mark dest as having copies of source files. If dest is a
690 directory, copies are put in that directory. If dest is a file,
690 directory, copies are put in that directory. If dest is a file,
691 the source must be a single file.
691 the source must be a single file.
692
692
693 By default, this command copies the contents of files as they
693 By default, this command copies the contents of files as they
694 exist in the working directory. If invoked with -A/--after, the
694 exist in the working directory. If invoked with -A/--after, the
695 operation is recorded, but no copying is performed.
695 operation is recorded, but no copying is performed.
696
696
697 This command takes effect with the next commit. To undo a copy
697 This command takes effect with the next commit. To undo a copy
698 before that, see hg revert.
698 before that, see hg revert.
699 """
699 """
700 wlock = repo.wlock(False)
700 wlock = repo.wlock(False)
701 try:
701 try:
702 return cmdutil.copy(ui, repo, pats, opts)
702 return cmdutil.copy(ui, repo, pats, opts)
703 finally:
703 finally:
704 wlock.release()
704 wlock.release()
705
705
706 def debugancestor(ui, repo, *args):
706 def debugancestor(ui, repo, *args):
707 """find the ancestor revision of two revisions in a given index"""
707 """find the ancestor revision of two revisions in a given index"""
708 if len(args) == 3:
708 if len(args) == 3:
709 index, rev1, rev2 = args
709 index, rev1, rev2 = args
710 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
710 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
711 lookup = r.lookup
711 lookup = r.lookup
712 elif len(args) == 2:
712 elif len(args) == 2:
713 if not repo:
713 if not repo:
714 raise util.Abort(_("There is no Mercurial repository here "
714 raise util.Abort(_("There is no Mercurial repository here "
715 "(.hg not found)"))
715 "(.hg not found)"))
716 rev1, rev2 = args
716 rev1, rev2 = args
717 r = repo.changelog
717 r = repo.changelog
718 lookup = repo.lookup
718 lookup = repo.lookup
719 else:
719 else:
720 raise util.Abort(_('either two or three arguments required'))
720 raise util.Abort(_('either two or three arguments required'))
721 a = r.ancestor(lookup(rev1), lookup(rev2))
721 a = r.ancestor(lookup(rev1), lookup(rev2))
722 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
722 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
723
723
724 def debugcommands(ui, cmd='', *args):
724 def debugcommands(ui, cmd='', *args):
725 for cmd, vals in sorted(table.iteritems()):
725 for cmd, vals in sorted(table.iteritems()):
726 cmd = cmd.split('|')[0].strip('^')
726 cmd = cmd.split('|')[0].strip('^')
727 opts = ', '.join([i[1] for i in vals[1]])
727 opts = ', '.join([i[1] for i in vals[1]])
728 ui.write('%s: %s\n' % (cmd, opts))
728 ui.write('%s: %s\n' % (cmd, opts))
729
729
730 def debugcomplete(ui, cmd='', **opts):
730 def debugcomplete(ui, cmd='', **opts):
731 """returns the completion list associated with the given command"""
731 """returns the completion list associated with the given command"""
732
732
733 if opts.get('options'):
733 if opts.get('options'):
734 options = []
734 options = []
735 otables = [globalopts]
735 otables = [globalopts]
736 if cmd:
736 if cmd:
737 aliases, entry = cmdutil.findcmd(cmd, table, False)
737 aliases, entry = cmdutil.findcmd(cmd, table, False)
738 otables.append(entry[1])
738 otables.append(entry[1])
739 for t in otables:
739 for t in otables:
740 for o in t:
740 for o in t:
741 if o[0]:
741 if o[0]:
742 options.append('-%s' % o[0])
742 options.append('-%s' % o[0])
743 options.append('--%s' % o[1])
743 options.append('--%s' % o[1])
744 ui.write("%s\n" % "\n".join(options))
744 ui.write("%s\n" % "\n".join(options))
745 return
745 return
746
746
747 cmdlist = cmdutil.findpossible(cmd, table)
747 cmdlist = cmdutil.findpossible(cmd, table)
748 if ui.verbose:
748 if ui.verbose:
749 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
749 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
750 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
750 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
751
751
752 def debugfsinfo(ui, path = "."):
752 def debugfsinfo(ui, path = "."):
753 file('.debugfsinfo', 'w').write('')
753 open('.debugfsinfo', 'w').write('')
754 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
754 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
755 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
755 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
756 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
756 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
757 and 'yes' or 'no'))
757 and 'yes' or 'no'))
758 os.unlink('.debugfsinfo')
758 os.unlink('.debugfsinfo')
759
759
760 def debugrebuildstate(ui, repo, rev="tip"):
760 def debugrebuildstate(ui, repo, rev="tip"):
761 """rebuild the dirstate as it would look like for the given revision"""
761 """rebuild the dirstate as it would look like for the given revision"""
762 ctx = repo[rev]
762 ctx = repo[rev]
763 wlock = repo.wlock()
763 wlock = repo.wlock()
764 try:
764 try:
765 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
765 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
766 finally:
766 finally:
767 wlock.release()
767 wlock.release()
768
768
769 def debugcheckstate(ui, repo):
769 def debugcheckstate(ui, repo):
770 """validate the correctness of the current dirstate"""
770 """validate the correctness of the current dirstate"""
771 parent1, parent2 = repo.dirstate.parents()
771 parent1, parent2 = repo.dirstate.parents()
772 m1 = repo[parent1].manifest()
772 m1 = repo[parent1].manifest()
773 m2 = repo[parent2].manifest()
773 m2 = repo[parent2].manifest()
774 errors = 0
774 errors = 0
775 for f in repo.dirstate:
775 for f in repo.dirstate:
776 state = repo.dirstate[f]
776 state = repo.dirstate[f]
777 if state in "nr" and f not in m1:
777 if state in "nr" and f not in m1:
778 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
778 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
779 errors += 1
779 errors += 1
780 if state in "a" and f in m1:
780 if state in "a" and f in m1:
781 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
781 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
782 errors += 1
782 errors += 1
783 if state in "m" and f not in m1 and f not in m2:
783 if state in "m" and f not in m1 and f not in m2:
784 ui.warn(_("%s in state %s, but not in either manifest\n") %
784 ui.warn(_("%s in state %s, but not in either manifest\n") %
785 (f, state))
785 (f, state))
786 errors += 1
786 errors += 1
787 for f in m1:
787 for f in m1:
788 state = repo.dirstate[f]
788 state = repo.dirstate[f]
789 if state not in "nrm":
789 if state not in "nrm":
790 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
790 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
791 errors += 1
791 errors += 1
792 if errors:
792 if errors:
793 error = _(".hg/dirstate inconsistent with current parent's manifest")
793 error = _(".hg/dirstate inconsistent with current parent's manifest")
794 raise util.Abort(error)
794 raise util.Abort(error)
795
795
796 def showconfig(ui, repo, *values, **opts):
796 def showconfig(ui, repo, *values, **opts):
797 """show combined config settings from all hgrc files
797 """show combined config settings from all hgrc files
798
798
799 With no arguments, print names and values of all config items.
799 With no arguments, print names and values of all config items.
800
800
801 With one argument of the form section.name, print just the value
801 With one argument of the form section.name, print just the value
802 of that config item.
802 of that config item.
803
803
804 With multiple arguments, print names and values of all config
804 With multiple arguments, print names and values of all config
805 items with matching section names.
805 items with matching section names.
806
806
807 With --debug, the source (filename and line number) is printed
807 With --debug, the source (filename and line number) is printed
808 for each config item.
808 for each config item.
809 """
809 """
810
810
811 untrusted = bool(opts.get('untrusted'))
811 untrusted = bool(opts.get('untrusted'))
812 if values:
812 if values:
813 if len([v for v in values if '.' in v]) > 1:
813 if len([v for v in values if '.' in v]) > 1:
814 raise util.Abort(_('only one config item permitted'))
814 raise util.Abort(_('only one config item permitted'))
815 for section, name, value in ui.walkconfig(untrusted=untrusted):
815 for section, name, value in ui.walkconfig(untrusted=untrusted):
816 sectname = section + '.' + name
816 sectname = section + '.' + name
817 if values:
817 if values:
818 for v in values:
818 for v in values:
819 if v == section:
819 if v == section:
820 ui.debug('%s: ' %
820 ui.debug('%s: ' %
821 ui.configsource(section, name, untrusted))
821 ui.configsource(section, name, untrusted))
822 ui.write('%s=%s\n' % (sectname, value))
822 ui.write('%s=%s\n' % (sectname, value))
823 elif v == sectname:
823 elif v == sectname:
824 ui.debug('%s: ' %
824 ui.debug('%s: ' %
825 ui.configsource(section, name, untrusted))
825 ui.configsource(section, name, untrusted))
826 ui.write(value, '\n')
826 ui.write(value, '\n')
827 else:
827 else:
828 ui.debug('%s: ' %
828 ui.debug('%s: ' %
829 ui.configsource(section, name, untrusted))
829 ui.configsource(section, name, untrusted))
830 ui.write('%s=%s\n' % (sectname, value))
830 ui.write('%s=%s\n' % (sectname, value))
831
831
832 def debugsetparents(ui, repo, rev1, rev2=None):
832 def debugsetparents(ui, repo, rev1, rev2=None):
833 """manually set the parents of the current working directory
833 """manually set the parents of the current working directory
834
834
835 This is useful for writing repository conversion tools, but should
835 This is useful for writing repository conversion tools, but should
836 be used with care.
836 be used with care.
837 """
837 """
838
838
839 if not rev2:
839 if not rev2:
840 rev2 = hex(nullid)
840 rev2 = hex(nullid)
841
841
842 wlock = repo.wlock()
842 wlock = repo.wlock()
843 try:
843 try:
844 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
844 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
845 finally:
845 finally:
846 wlock.release()
846 wlock.release()
847
847
848 def debugstate(ui, repo, nodates=None):
848 def debugstate(ui, repo, nodates=None):
849 """show the contents of the current dirstate"""
849 """show the contents of the current dirstate"""
850 timestr = ""
850 timestr = ""
851 showdate = not nodates
851 showdate = not nodates
852 for file_, ent in sorted(repo.dirstate._map.iteritems()):
852 for file_, ent in sorted(repo.dirstate._map.iteritems()):
853 if showdate:
853 if showdate:
854 if ent[3] == -1:
854 if ent[3] == -1:
855 # Pad or slice to locale representation
855 # Pad or slice to locale representation
856 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
856 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
857 timestr = 'unset'
857 timestr = 'unset'
858 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
858 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
859 else:
859 else:
860 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
860 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
861 if ent[1] & 020000:
861 if ent[1] & 020000:
862 mode = 'lnk'
862 mode = 'lnk'
863 else:
863 else:
864 mode = '%3o' % (ent[1] & 0777)
864 mode = '%3o' % (ent[1] & 0777)
865 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
865 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
866 for f in repo.dirstate.copies():
866 for f in repo.dirstate.copies():
867 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
867 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
868
868
869 def debugsub(ui, repo, rev=None):
869 def debugsub(ui, repo, rev=None):
870 if rev == '':
870 if rev == '':
871 rev = None
871 rev = None
872 for k,v in sorted(repo[rev].substate.items()):
872 for k,v in sorted(repo[rev].substate.items()):
873 ui.write('path %s\n' % k)
873 ui.write('path %s\n' % k)
874 ui.write(' source %s\n' % v[0])
874 ui.write(' source %s\n' % v[0])
875 ui.write(' revision %s\n' % v[1])
875 ui.write(' revision %s\n' % v[1])
876
876
877 def debugdata(ui, file_, rev):
877 def debugdata(ui, file_, rev):
878 """dump the contents of a data file revision"""
878 """dump the contents of a data file revision"""
879 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
879 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
880 try:
880 try:
881 ui.write(r.revision(r.lookup(rev)))
881 ui.write(r.revision(r.lookup(rev)))
882 except KeyError:
882 except KeyError:
883 raise util.Abort(_('invalid revision identifier %s') % rev)
883 raise util.Abort(_('invalid revision identifier %s') % rev)
884
884
885 def debugdate(ui, date, range=None, **opts):
885 def debugdate(ui, date, range=None, **opts):
886 """parse and display a date"""
886 """parse and display a date"""
887 if opts["extended"]:
887 if opts["extended"]:
888 d = util.parsedate(date, util.extendeddateformats)
888 d = util.parsedate(date, util.extendeddateformats)
889 else:
889 else:
890 d = util.parsedate(date)
890 d = util.parsedate(date)
891 ui.write("internal: %s %s\n" % d)
891 ui.write("internal: %s %s\n" % d)
892 ui.write("standard: %s\n" % util.datestr(d))
892 ui.write("standard: %s\n" % util.datestr(d))
893 if range:
893 if range:
894 m = util.matchdate(range)
894 m = util.matchdate(range)
895 ui.write("match: %s\n" % m(d[0]))
895 ui.write("match: %s\n" % m(d[0]))
896
896
897 def debugindex(ui, file_):
897 def debugindex(ui, file_):
898 """dump the contents of an index file"""
898 """dump the contents of an index file"""
899 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
899 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
900 ui.write(" rev offset length base linkrev"
900 ui.write(" rev offset length base linkrev"
901 " nodeid p1 p2\n")
901 " nodeid p1 p2\n")
902 for i in r:
902 for i in r:
903 node = r.node(i)
903 node = r.node(i)
904 try:
904 try:
905 pp = r.parents(node)
905 pp = r.parents(node)
906 except:
906 except:
907 pp = [nullid, nullid]
907 pp = [nullid, nullid]
908 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
908 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
909 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
909 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
910 short(node), short(pp[0]), short(pp[1])))
910 short(node), short(pp[0]), short(pp[1])))
911
911
912 def debugindexdot(ui, file_):
912 def debugindexdot(ui, file_):
913 """dump an index DAG as a graphviz dot file"""
913 """dump an index DAG as a graphviz dot file"""
914 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
914 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
915 ui.write("digraph G {\n")
915 ui.write("digraph G {\n")
916 for i in r:
916 for i in r:
917 node = r.node(i)
917 node = r.node(i)
918 pp = r.parents(node)
918 pp = r.parents(node)
919 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
919 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
920 if pp[1] != nullid:
920 if pp[1] != nullid:
921 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
921 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
922 ui.write("}\n")
922 ui.write("}\n")
923
923
924 def debuginstall(ui):
924 def debuginstall(ui):
925 '''test Mercurial installation'''
925 '''test Mercurial installation'''
926
926
927 def writetemp(contents):
927 def writetemp(contents):
928 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
928 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
929 f = os.fdopen(fd, "wb")
929 f = os.fdopen(fd, "wb")
930 f.write(contents)
930 f.write(contents)
931 f.close()
931 f.close()
932 return name
932 return name
933
933
934 problems = 0
934 problems = 0
935
935
936 # encoding
936 # encoding
937 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
937 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
938 try:
938 try:
939 encoding.fromlocal("test")
939 encoding.fromlocal("test")
940 except util.Abort, inst:
940 except util.Abort, inst:
941 ui.write(" %s\n" % inst)
941 ui.write(" %s\n" % inst)
942 ui.write(_(" (check that your locale is properly set)\n"))
942 ui.write(_(" (check that your locale is properly set)\n"))
943 problems += 1
943 problems += 1
944
944
945 # compiled modules
945 # compiled modules
946 ui.status(_("Checking extensions...\n"))
946 ui.status(_("Checking extensions...\n"))
947 try:
947 try:
948 import bdiff, mpatch, base85
948 import bdiff, mpatch, base85
949 except Exception, inst:
949 except Exception, inst:
950 ui.write(" %s\n" % inst)
950 ui.write(" %s\n" % inst)
951 ui.write(_(" One or more extensions could not be found"))
951 ui.write(_(" One or more extensions could not be found"))
952 ui.write(_(" (check that you compiled the extensions)\n"))
952 ui.write(_(" (check that you compiled the extensions)\n"))
953 problems += 1
953 problems += 1
954
954
955 # templates
955 # templates
956 ui.status(_("Checking templates...\n"))
956 ui.status(_("Checking templates...\n"))
957 try:
957 try:
958 import templater
958 import templater
959 templater.templater(templater.templatepath("map-cmdline.default"))
959 templater.templater(templater.templatepath("map-cmdline.default"))
960 except Exception, inst:
960 except Exception, inst:
961 ui.write(" %s\n" % inst)
961 ui.write(" %s\n" % inst)
962 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
962 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
963 problems += 1
963 problems += 1
964
964
965 # patch
965 # patch
966 ui.status(_("Checking patch...\n"))
966 ui.status(_("Checking patch...\n"))
967 patchproblems = 0
967 patchproblems = 0
968 a = "1\n2\n3\n4\n"
968 a = "1\n2\n3\n4\n"
969 b = "1\n2\n3\ninsert\n4\n"
969 b = "1\n2\n3\ninsert\n4\n"
970 fa = writetemp(a)
970 fa = writetemp(a)
971 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
971 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
972 os.path.basename(fa))
972 os.path.basename(fa))
973 fd = writetemp(d)
973 fd = writetemp(d)
974
974
975 files = {}
975 files = {}
976 try:
976 try:
977 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
977 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
978 except util.Abort, e:
978 except util.Abort, e:
979 ui.write(_(" patch call failed:\n"))
979 ui.write(_(" patch call failed:\n"))
980 ui.write(" " + str(e) + "\n")
980 ui.write(" " + str(e) + "\n")
981 patchproblems += 1
981 patchproblems += 1
982 else:
982 else:
983 if list(files) != [os.path.basename(fa)]:
983 if list(files) != [os.path.basename(fa)]:
984 ui.write(_(" unexpected patch output!\n"))
984 ui.write(_(" unexpected patch output!\n"))
985 patchproblems += 1
985 patchproblems += 1
986 a = file(fa).read()
986 a = open(fa).read()
987 if a != b:
987 if a != b:
988 ui.write(_(" patch test failed!\n"))
988 ui.write(_(" patch test failed!\n"))
989 patchproblems += 1
989 patchproblems += 1
990
990
991 if patchproblems:
991 if patchproblems:
992 if ui.config('ui', 'patch'):
992 if ui.config('ui', 'patch'):
993 ui.write(_(" (Current patch tool may be incompatible with patch,"
993 ui.write(_(" (Current patch tool may be incompatible with patch,"
994 " or misconfigured. Please check your .hgrc file)\n"))
994 " or misconfigured. Please check your .hgrc file)\n"))
995 else:
995 else:
996 ui.write(_(" Internal patcher failure, please report this error"
996 ui.write(_(" Internal patcher failure, please report this error"
997 " to http://mercurial.selenic.com/bts/\n"))
997 " to http://mercurial.selenic.com/bts/\n"))
998 problems += patchproblems
998 problems += patchproblems
999
999
1000 os.unlink(fa)
1000 os.unlink(fa)
1001 os.unlink(fd)
1001 os.unlink(fd)
1002
1002
1003 # editor
1003 # editor
1004 ui.status(_("Checking commit editor...\n"))
1004 ui.status(_("Checking commit editor...\n"))
1005 editor = ui.geteditor()
1005 editor = ui.geteditor()
1006 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1006 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1007 if not cmdpath:
1007 if not cmdpath:
1008 if editor == 'vi':
1008 if editor == 'vi':
1009 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1009 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1010 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1010 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1011 else:
1011 else:
1012 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1012 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1013 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1013 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1014 problems += 1
1014 problems += 1
1015
1015
1016 # check username
1016 # check username
1017 ui.status(_("Checking username...\n"))
1017 ui.status(_("Checking username...\n"))
1018 user = os.environ.get("HGUSER")
1018 user = os.environ.get("HGUSER")
1019 if user is None:
1019 if user is None:
1020 user = ui.config("ui", "username")
1020 user = ui.config("ui", "username")
1021 if user is None:
1021 if user is None:
1022 user = os.environ.get("EMAIL")
1022 user = os.environ.get("EMAIL")
1023 if not user:
1023 if not user:
1024 ui.warn(" ")
1024 ui.warn(" ")
1025 ui.username()
1025 ui.username()
1026 ui.write(_(" (specify a username in your .hgrc file)\n"))
1026 ui.write(_(" (specify a username in your .hgrc file)\n"))
1027
1027
1028 if not problems:
1028 if not problems:
1029 ui.status(_("No problems detected\n"))
1029 ui.status(_("No problems detected\n"))
1030 else:
1030 else:
1031 ui.write(_("%s problems detected,"
1031 ui.write(_("%s problems detected,"
1032 " please check your install!\n") % problems)
1032 " please check your install!\n") % problems)
1033
1033
1034 return problems
1034 return problems
1035
1035
1036 def debugrename(ui, repo, file1, *pats, **opts):
1036 def debugrename(ui, repo, file1, *pats, **opts):
1037 """dump rename information"""
1037 """dump rename information"""
1038
1038
1039 ctx = repo[opts.get('rev')]
1039 ctx = repo[opts.get('rev')]
1040 m = cmdutil.match(repo, (file1,) + pats, opts)
1040 m = cmdutil.match(repo, (file1,) + pats, opts)
1041 for abs in ctx.walk(m):
1041 for abs in ctx.walk(m):
1042 fctx = ctx[abs]
1042 fctx = ctx[abs]
1043 o = fctx.filelog().renamed(fctx.filenode())
1043 o = fctx.filelog().renamed(fctx.filenode())
1044 rel = m.rel(abs)
1044 rel = m.rel(abs)
1045 if o:
1045 if o:
1046 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1046 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1047 else:
1047 else:
1048 ui.write(_("%s not renamed\n") % rel)
1048 ui.write(_("%s not renamed\n") % rel)
1049
1049
1050 def debugwalk(ui, repo, *pats, **opts):
1050 def debugwalk(ui, repo, *pats, **opts):
1051 """show how files match on given patterns"""
1051 """show how files match on given patterns"""
1052 m = cmdutil.match(repo, pats, opts)
1052 m = cmdutil.match(repo, pats, opts)
1053 items = list(repo.walk(m))
1053 items = list(repo.walk(m))
1054 if not items:
1054 if not items:
1055 return
1055 return
1056 fmt = 'f %%-%ds %%-%ds %%s' % (
1056 fmt = 'f %%-%ds %%-%ds %%s' % (
1057 max([len(abs) for abs in items]),
1057 max([len(abs) for abs in items]),
1058 max([len(m.rel(abs)) for abs in items]))
1058 max([len(m.rel(abs)) for abs in items]))
1059 for abs in items:
1059 for abs in items:
1060 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1060 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1061 ui.write("%s\n" % line.rstrip())
1061 ui.write("%s\n" % line.rstrip())
1062
1062
1063 def diff(ui, repo, *pats, **opts):
1063 def diff(ui, repo, *pats, **opts):
1064 """diff repository (or selected files)
1064 """diff repository (or selected files)
1065
1065
1066 Show differences between revisions for the specified files.
1066 Show differences between revisions for the specified files.
1067
1067
1068 Differences between files are shown using the unified diff format.
1068 Differences between files are shown using the unified diff format.
1069
1069
1070 NOTE: diff may generate unexpected results for merges, as it will
1070 NOTE: diff may generate unexpected results for merges, as it will
1071 default to comparing against the working directory's first parent
1071 default to comparing against the working directory's first parent
1072 changeset if no revisions are specified.
1072 changeset if no revisions are specified.
1073
1073
1074 When two revision arguments are given, then changes are shown
1074 When two revision arguments are given, then changes are shown
1075 between those revisions. If only one revision is specified then
1075 between those revisions. If only one revision is specified then
1076 that revision is compared to the working directory, and, when no
1076 that revision is compared to the working directory, and, when no
1077 revisions are specified, the working directory files are compared
1077 revisions are specified, the working directory files are compared
1078 to its parent.
1078 to its parent.
1079
1079
1080 Without the -a/--text option, diff will avoid generating diffs of
1080 Without the -a/--text option, diff will avoid generating diffs of
1081 files it detects as binary. With -a, diff will generate a diff
1081 files it detects as binary. With -a, diff will generate a diff
1082 anyway, probably with undesirable results.
1082 anyway, probably with undesirable results.
1083
1083
1084 Use the -g/--git option to generate diffs in the git extended diff
1084 Use the -g/--git option to generate diffs in the git extended diff
1085 format. For more information, read 'hg help diffs'.
1085 format. For more information, read 'hg help diffs'.
1086 """
1086 """
1087
1087
1088 revs = opts.get('rev')
1088 revs = opts.get('rev')
1089 change = opts.get('change')
1089 change = opts.get('change')
1090
1090
1091 if revs and change:
1091 if revs and change:
1092 msg = _('cannot specify --rev and --change at the same time')
1092 msg = _('cannot specify --rev and --change at the same time')
1093 raise util.Abort(msg)
1093 raise util.Abort(msg)
1094 elif change:
1094 elif change:
1095 node2 = repo.lookup(change)
1095 node2 = repo.lookup(change)
1096 node1 = repo[node2].parents()[0].node()
1096 node1 = repo[node2].parents()[0].node()
1097 else:
1097 else:
1098 node1, node2 = cmdutil.revpair(repo, revs)
1098 node1, node2 = cmdutil.revpair(repo, revs)
1099
1099
1100 m = cmdutil.match(repo, pats, opts)
1100 m = cmdutil.match(repo, pats, opts)
1101 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1101 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1102 for chunk in it:
1102 for chunk in it:
1103 ui.write(chunk)
1103 ui.write(chunk)
1104
1104
1105 def export(ui, repo, *changesets, **opts):
1105 def export(ui, repo, *changesets, **opts):
1106 """dump the header and diffs for one or more changesets
1106 """dump the header and diffs for one or more changesets
1107
1107
1108 Print the changeset header and diffs for one or more revisions.
1108 Print the changeset header and diffs for one or more revisions.
1109
1109
1110 The information shown in the changeset header is: author,
1110 The information shown in the changeset header is: author,
1111 changeset hash, parent(s) and commit comment.
1111 changeset hash, parent(s) and commit comment.
1112
1112
1113 NOTE: export may generate unexpected diff output for merge
1113 NOTE: export may generate unexpected diff output for merge
1114 changesets, as it will compare the merge changeset against its
1114 changesets, as it will compare the merge changeset against its
1115 first parent only.
1115 first parent only.
1116
1116
1117 Output may be to a file, in which case the name of the file is
1117 Output may be to a file, in which case the name of the file is
1118 given using a format string. The formatting rules are as follows:
1118 given using a format string. The formatting rules are as follows:
1119
1119
1120 %% literal "%" character
1120 %% literal "%" character
1121 %H changeset hash (40 bytes of hexadecimal)
1121 %H changeset hash (40 bytes of hexadecimal)
1122 %N number of patches being generated
1122 %N number of patches being generated
1123 %R changeset revision number
1123 %R changeset revision number
1124 %b basename of the exporting repository
1124 %b basename of the exporting repository
1125 %h short-form changeset hash (12 bytes of hexadecimal)
1125 %h short-form changeset hash (12 bytes of hexadecimal)
1126 %n zero-padded sequence number, starting at 1
1126 %n zero-padded sequence number, starting at 1
1127 %r zero-padded changeset revision number
1127 %r zero-padded changeset revision number
1128
1128
1129 Without the -a/--text option, export will avoid generating diffs
1129 Without the -a/--text option, export will avoid generating diffs
1130 of files it detects as binary. With -a, export will generate a
1130 of files it detects as binary. With -a, export will generate a
1131 diff anyway, probably with undesirable results.
1131 diff anyway, probably with undesirable results.
1132
1132
1133 Use the -g/--git option to generate diffs in the git extended diff
1133 Use the -g/--git option to generate diffs in the git extended diff
1134 format. See 'hg help diffs' for more information.
1134 format. See 'hg help diffs' for more information.
1135
1135
1136 With the --switch-parent option, the diff will be against the
1136 With the --switch-parent option, the diff will be against the
1137 second parent. It can be useful to review a merge.
1137 second parent. It can be useful to review a merge.
1138 """
1138 """
1139 if not changesets:
1139 if not changesets:
1140 raise util.Abort(_("export requires at least one changeset"))
1140 raise util.Abort(_("export requires at least one changeset"))
1141 revs = cmdutil.revrange(repo, changesets)
1141 revs = cmdutil.revrange(repo, changesets)
1142 if len(revs) > 1:
1142 if len(revs) > 1:
1143 ui.note(_('exporting patches:\n'))
1143 ui.note(_('exporting patches:\n'))
1144 else:
1144 else:
1145 ui.note(_('exporting patch:\n'))
1145 ui.note(_('exporting patch:\n'))
1146 patch.export(repo, revs, template=opts.get('output'),
1146 patch.export(repo, revs, template=opts.get('output'),
1147 switch_parent=opts.get('switch_parent'),
1147 switch_parent=opts.get('switch_parent'),
1148 opts=patch.diffopts(ui, opts))
1148 opts=patch.diffopts(ui, opts))
1149
1149
1150 def forget(ui, repo, *pats, **opts):
1150 def forget(ui, repo, *pats, **opts):
1151 """forget the specified files on the next commit
1151 """forget the specified files on the next commit
1152
1152
1153 Mark the specified files so they will no longer be tracked
1153 Mark the specified files so they will no longer be tracked
1154 after the next commit.
1154 after the next commit.
1155
1155
1156 This only removes files from the current branch, not from the
1156 This only removes files from the current branch, not from the
1157 entire project history, and it does not delete them from the
1157 entire project history, and it does not delete them from the
1158 working directory.
1158 working directory.
1159
1159
1160 To undo a forget before the next commit, see hg add.
1160 To undo a forget before the next commit, see hg add.
1161 """
1161 """
1162
1162
1163 if not pats:
1163 if not pats:
1164 raise util.Abort(_('no files specified'))
1164 raise util.Abort(_('no files specified'))
1165
1165
1166 m = cmdutil.match(repo, pats, opts)
1166 m = cmdutil.match(repo, pats, opts)
1167 s = repo.status(match=m, clean=True)
1167 s = repo.status(match=m, clean=True)
1168 forget = sorted(s[0] + s[1] + s[3] + s[6])
1168 forget = sorted(s[0] + s[1] + s[3] + s[6])
1169
1169
1170 for f in m.files():
1170 for f in m.files():
1171 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1171 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1172 ui.warn(_('not removing %s: file is already untracked\n')
1172 ui.warn(_('not removing %s: file is already untracked\n')
1173 % m.rel(f))
1173 % m.rel(f))
1174
1174
1175 for f in forget:
1175 for f in forget:
1176 if ui.verbose or not m.exact(f):
1176 if ui.verbose or not m.exact(f):
1177 ui.status(_('removing %s\n') % m.rel(f))
1177 ui.status(_('removing %s\n') % m.rel(f))
1178
1178
1179 repo.remove(forget, unlink=False)
1179 repo.remove(forget, unlink=False)
1180
1180
1181 def grep(ui, repo, pattern, *pats, **opts):
1181 def grep(ui, repo, pattern, *pats, **opts):
1182 """search for a pattern in specified files and revisions
1182 """search for a pattern in specified files and revisions
1183
1183
1184 Search revisions of files for a regular expression.
1184 Search revisions of files for a regular expression.
1185
1185
1186 This command behaves differently than Unix grep. It only accepts
1186 This command behaves differently than Unix grep. It only accepts
1187 Python/Perl regexps. It searches repository history, not the
1187 Python/Perl regexps. It searches repository history, not the
1188 working directory. It always prints the revision number in which a
1188 working directory. It always prints the revision number in which a
1189 match appears.
1189 match appears.
1190
1190
1191 By default, grep only prints output for the first revision of a
1191 By default, grep only prints output for the first revision of a
1192 file in which it finds a match. To get it to print every revision
1192 file in which it finds a match. To get it to print every revision
1193 that contains a change in match status ("-" for a match that
1193 that contains a change in match status ("-" for a match that
1194 becomes a non-match, or "+" for a non-match that becomes a match),
1194 becomes a non-match, or "+" for a non-match that becomes a match),
1195 use the --all flag.
1195 use the --all flag.
1196 """
1196 """
1197 reflags = 0
1197 reflags = 0
1198 if opts.get('ignore_case'):
1198 if opts.get('ignore_case'):
1199 reflags |= re.I
1199 reflags |= re.I
1200 try:
1200 try:
1201 regexp = re.compile(pattern, reflags)
1201 regexp = re.compile(pattern, reflags)
1202 except Exception, inst:
1202 except Exception, inst:
1203 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1203 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1204 return None
1204 return None
1205 sep, eol = ':', '\n'
1205 sep, eol = ':', '\n'
1206 if opts.get('print0'):
1206 if opts.get('print0'):
1207 sep = eol = '\0'
1207 sep = eol = '\0'
1208
1208
1209 fcache = {}
1209 fcache = {}
1210 forder = []
1210 forder = []
1211 def getfile(fn):
1211 def getfile(fn):
1212 if fn not in fcache:
1212 if fn not in fcache:
1213 if len(fcache) > 20:
1213 if len(fcache) > 20:
1214 del fcache[forder.pop(0)]
1214 del fcache[forder.pop(0)]
1215 fcache[fn] = repo.file(fn)
1215 fcache[fn] = repo.file(fn)
1216 else:
1216 else:
1217 forder.remove(fn)
1217 forder.remove(fn)
1218
1218
1219 forder.append(fn)
1219 forder.append(fn)
1220 return fcache[fn]
1220 return fcache[fn]
1221
1221
1222 def matchlines(body):
1222 def matchlines(body):
1223 begin = 0
1223 begin = 0
1224 linenum = 0
1224 linenum = 0
1225 while True:
1225 while True:
1226 match = regexp.search(body, begin)
1226 match = regexp.search(body, begin)
1227 if not match:
1227 if not match:
1228 break
1228 break
1229 mstart, mend = match.span()
1229 mstart, mend = match.span()
1230 linenum += body.count('\n', begin, mstart) + 1
1230 linenum += body.count('\n', begin, mstart) + 1
1231 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1231 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1232 begin = body.find('\n', mend) + 1 or len(body)
1232 begin = body.find('\n', mend) + 1 or len(body)
1233 lend = begin - 1
1233 lend = begin - 1
1234 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1234 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1235
1235
1236 class linestate(object):
1236 class linestate(object):
1237 def __init__(self, line, linenum, colstart, colend):
1237 def __init__(self, line, linenum, colstart, colend):
1238 self.line = line
1238 self.line = line
1239 self.linenum = linenum
1239 self.linenum = linenum
1240 self.colstart = colstart
1240 self.colstart = colstart
1241 self.colend = colend
1241 self.colend = colend
1242
1242
1243 def __hash__(self):
1243 def __hash__(self):
1244 return hash((self.linenum, self.line))
1244 return hash((self.linenum, self.line))
1245
1245
1246 def __eq__(self, other):
1246 def __eq__(self, other):
1247 return self.line == other.line
1247 return self.line == other.line
1248
1248
1249 matches = {}
1249 matches = {}
1250 copies = {}
1250 copies = {}
1251 def grepbody(fn, rev, body):
1251 def grepbody(fn, rev, body):
1252 matches[rev].setdefault(fn, [])
1252 matches[rev].setdefault(fn, [])
1253 m = matches[rev][fn]
1253 m = matches[rev][fn]
1254 for lnum, cstart, cend, line in matchlines(body):
1254 for lnum, cstart, cend, line in matchlines(body):
1255 s = linestate(line, lnum, cstart, cend)
1255 s = linestate(line, lnum, cstart, cend)
1256 m.append(s)
1256 m.append(s)
1257
1257
1258 def difflinestates(a, b):
1258 def difflinestates(a, b):
1259 sm = difflib.SequenceMatcher(None, a, b)
1259 sm = difflib.SequenceMatcher(None, a, b)
1260 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1260 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1261 if tag == 'insert':
1261 if tag == 'insert':
1262 for i in xrange(blo, bhi):
1262 for i in xrange(blo, bhi):
1263 yield ('+', b[i])
1263 yield ('+', b[i])
1264 elif tag == 'delete':
1264 elif tag == 'delete':
1265 for i in xrange(alo, ahi):
1265 for i in xrange(alo, ahi):
1266 yield ('-', a[i])
1266 yield ('-', a[i])
1267 elif tag == 'replace':
1267 elif tag == 'replace':
1268 for i in xrange(alo, ahi):
1268 for i in xrange(alo, ahi):
1269 yield ('-', a[i])
1269 yield ('-', a[i])
1270 for i in xrange(blo, bhi):
1270 for i in xrange(blo, bhi):
1271 yield ('+', b[i])
1271 yield ('+', b[i])
1272
1272
1273 def display(fn, r, pstates, states):
1273 def display(fn, r, pstates, states):
1274 datefunc = ui.quiet and util.shortdate or util.datestr
1274 datefunc = ui.quiet and util.shortdate or util.datestr
1275 found = False
1275 found = False
1276 filerevmatches = {}
1276 filerevmatches = {}
1277 if opts.get('all'):
1277 if opts.get('all'):
1278 iter = difflinestates(pstates, states)
1278 iter = difflinestates(pstates, states)
1279 else:
1279 else:
1280 iter = [('', l) for l in states]
1280 iter = [('', l) for l in states]
1281 for change, l in iter:
1281 for change, l in iter:
1282 cols = [fn, str(r)]
1282 cols = [fn, str(r)]
1283 if opts.get('line_number'):
1283 if opts.get('line_number'):
1284 cols.append(str(l.linenum))
1284 cols.append(str(l.linenum))
1285 if opts.get('all'):
1285 if opts.get('all'):
1286 cols.append(change)
1286 cols.append(change)
1287 if opts.get('user'):
1287 if opts.get('user'):
1288 cols.append(ui.shortuser(get(r)[1]))
1288 cols.append(ui.shortuser(get(r)[1]))
1289 if opts.get('date'):
1289 if opts.get('date'):
1290 cols.append(datefunc(get(r)[2]))
1290 cols.append(datefunc(get(r)[2]))
1291 if opts.get('files_with_matches'):
1291 if opts.get('files_with_matches'):
1292 c = (fn, r)
1292 c = (fn, r)
1293 if c in filerevmatches:
1293 if c in filerevmatches:
1294 continue
1294 continue
1295 filerevmatches[c] = 1
1295 filerevmatches[c] = 1
1296 else:
1296 else:
1297 cols.append(l.line)
1297 cols.append(l.line)
1298 ui.write(sep.join(cols), eol)
1298 ui.write(sep.join(cols), eol)
1299 found = True
1299 found = True
1300 return found
1300 return found
1301
1301
1302 skip = {}
1302 skip = {}
1303 revfiles = {}
1303 revfiles = {}
1304 get = util.cachefunc(lambda r: repo[r].changeset())
1304 get = util.cachefunc(lambda r: repo[r].changeset())
1305 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1305 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1306 found = False
1306 found = False
1307 follow = opts.get('follow')
1307 follow = opts.get('follow')
1308 for st, rev, fns in changeiter:
1308 for st, rev, fns in changeiter:
1309 if st == 'window':
1309 if st == 'window':
1310 matches.clear()
1310 matches.clear()
1311 revfiles.clear()
1311 revfiles.clear()
1312 elif st == 'add':
1312 elif st == 'add':
1313 ctx = repo[rev]
1313 ctx = repo[rev]
1314 pctx = ctx.parents()[0]
1314 pctx = ctx.parents()[0]
1315 parent = pctx.rev()
1315 parent = pctx.rev()
1316 matches.setdefault(rev, {})
1316 matches.setdefault(rev, {})
1317 matches.setdefault(parent, {})
1317 matches.setdefault(parent, {})
1318 files = revfiles.setdefault(rev, [])
1318 files = revfiles.setdefault(rev, [])
1319 for fn in fns:
1319 for fn in fns:
1320 flog = getfile(fn)
1320 flog = getfile(fn)
1321 try:
1321 try:
1322 fnode = ctx.filenode(fn)
1322 fnode = ctx.filenode(fn)
1323 except error.LookupError:
1323 except error.LookupError:
1324 continue
1324 continue
1325
1325
1326 copied = flog.renamed(fnode)
1326 copied = flog.renamed(fnode)
1327 copy = follow and copied and copied[0]
1327 copy = follow and copied and copied[0]
1328 if copy:
1328 if copy:
1329 copies.setdefault(rev, {})[fn] = copy
1329 copies.setdefault(rev, {})[fn] = copy
1330 if fn in skip:
1330 if fn in skip:
1331 if copy:
1331 if copy:
1332 skip[copy] = True
1332 skip[copy] = True
1333 continue
1333 continue
1334 files.append(fn)
1334 files.append(fn)
1335
1335
1336 if not matches[rev].has_key(fn):
1336 if not matches[rev].has_key(fn):
1337 grepbody(fn, rev, flog.read(fnode))
1337 grepbody(fn, rev, flog.read(fnode))
1338
1338
1339 pfn = copy or fn
1339 pfn = copy or fn
1340 if not matches[parent].has_key(pfn):
1340 if not matches[parent].has_key(pfn):
1341 try:
1341 try:
1342 fnode = pctx.filenode(pfn)
1342 fnode = pctx.filenode(pfn)
1343 grepbody(pfn, parent, flog.read(fnode))
1343 grepbody(pfn, parent, flog.read(fnode))
1344 except error.LookupError:
1344 except error.LookupError:
1345 pass
1345 pass
1346 elif st == 'iter':
1346 elif st == 'iter':
1347 parent = repo[rev].parents()[0].rev()
1347 parent = repo[rev].parents()[0].rev()
1348 for fn in sorted(revfiles.get(rev, [])):
1348 for fn in sorted(revfiles.get(rev, [])):
1349 states = matches[rev][fn]
1349 states = matches[rev][fn]
1350 copy = copies.get(rev, {}).get(fn)
1350 copy = copies.get(rev, {}).get(fn)
1351 if fn in skip:
1351 if fn in skip:
1352 if copy:
1352 if copy:
1353 skip[copy] = True
1353 skip[copy] = True
1354 continue
1354 continue
1355 pstates = matches.get(parent, {}).get(copy or fn, [])
1355 pstates = matches.get(parent, {}).get(copy or fn, [])
1356 if pstates or states:
1356 if pstates or states:
1357 r = display(fn, rev, pstates, states)
1357 r = display(fn, rev, pstates, states)
1358 found = found or r
1358 found = found or r
1359 if r and not opts.get('all'):
1359 if r and not opts.get('all'):
1360 skip[fn] = True
1360 skip[fn] = True
1361 if copy:
1361 if copy:
1362 skip[copy] = True
1362 skip[copy] = True
1363
1363
1364 def heads(ui, repo, *branchrevs, **opts):
1364 def heads(ui, repo, *branchrevs, **opts):
1365 """show current repository heads or show branch heads
1365 """show current repository heads or show branch heads
1366
1366
1367 With no arguments, show all repository head changesets.
1367 With no arguments, show all repository head changesets.
1368
1368
1369 Repository "heads" are changesets that don't have child
1369 Repository "heads" are changesets that don't have child
1370 changesets. They are where development generally takes place and
1370 changesets. They are where development generally takes place and
1371 are the usual targets for update and merge operations.
1371 are the usual targets for update and merge operations.
1372
1372
1373 If one or more REV is given, the "branch heads" will be shown for
1373 If one or more REV is given, the "branch heads" will be shown for
1374 the named branch associated with that revision. The name of the
1374 the named branch associated with that revision. The name of the
1375 branch is called the revision's branch tag.
1375 branch is called the revision's branch tag.
1376
1376
1377 Branch heads are revisions on a given named branch that do not have
1377 Branch heads are revisions on a given named branch that do not have
1378 any descendants on the same branch. A branch head could be a true head
1378 any descendants on the same branch. A branch head could be a true head
1379 or it could be the last changeset on a branch before a new branch
1379 or it could be the last changeset on a branch before a new branch
1380 was created. If none of the branch heads are true heads, the branch
1380 was created. If none of the branch heads are true heads, the branch
1381 is considered inactive. If -c/--closed is specified, also show branch
1381 is considered inactive. If -c/--closed is specified, also show branch
1382 heads marked closed (see hg commit --close-branch).
1382 heads marked closed (see hg commit --close-branch).
1383
1383
1384 If STARTREV is specified only those heads (or branch heads) that
1384 If STARTREV is specified only those heads (or branch heads) that
1385 are descendants of STARTREV will be displayed.
1385 are descendants of STARTREV will be displayed.
1386 """
1386 """
1387 if opts.get('rev'):
1387 if opts.get('rev'):
1388 start = repo.lookup(opts['rev'])
1388 start = repo.lookup(opts['rev'])
1389 else:
1389 else:
1390 start = None
1390 start = None
1391 closed = opts.get('closed')
1391 closed = opts.get('closed')
1392 hideinactive, _heads = opts.get('active'), None
1392 hideinactive, _heads = opts.get('active'), None
1393 if not branchrevs:
1393 if not branchrevs:
1394 # Assume we're looking repo-wide heads if no revs were specified.
1394 # Assume we're looking repo-wide heads if no revs were specified.
1395 heads = repo.heads(start)
1395 heads = repo.heads(start)
1396 else:
1396 else:
1397 if hideinactive:
1397 if hideinactive:
1398 _heads = repo.heads(start)
1398 _heads = repo.heads(start)
1399 heads = []
1399 heads = []
1400 visitedset = set()
1400 visitedset = set()
1401 for branchrev in branchrevs:
1401 for branchrev in branchrevs:
1402 branch = repo[branchrev].branch()
1402 branch = repo[branchrev].branch()
1403 if branch in visitedset:
1403 if branch in visitedset:
1404 continue
1404 continue
1405 visitedset.add(branch)
1405 visitedset.add(branch)
1406 bheads = repo.branchheads(branch, start, closed=closed)
1406 bheads = repo.branchheads(branch, start, closed=closed)
1407 if not bheads:
1407 if not bheads:
1408 if not opts.get('rev'):
1408 if not opts.get('rev'):
1409 ui.warn(_("no open branch heads on branch %s\n") % branch)
1409 ui.warn(_("no open branch heads on branch %s\n") % branch)
1410 elif branch != branchrev:
1410 elif branch != branchrev:
1411 ui.warn(_("no changes on branch %s containing %s are "
1411 ui.warn(_("no changes on branch %s containing %s are "
1412 "reachable from %s\n")
1412 "reachable from %s\n")
1413 % (branch, branchrev, opts.get('rev')))
1413 % (branch, branchrev, opts.get('rev')))
1414 else:
1414 else:
1415 ui.warn(_("no changes on branch %s are reachable from %s\n")
1415 ui.warn(_("no changes on branch %s are reachable from %s\n")
1416 % (branch, opts.get('rev')))
1416 % (branch, opts.get('rev')))
1417 if hideinactive:
1417 if hideinactive:
1418 bheads = [bhead for bhead in bheads if bhead in _heads]
1418 bheads = [bhead for bhead in bheads if bhead in _heads]
1419 heads.extend(bheads)
1419 heads.extend(bheads)
1420 if not heads:
1420 if not heads:
1421 return 1
1421 return 1
1422 displayer = cmdutil.show_changeset(ui, repo, opts)
1422 displayer = cmdutil.show_changeset(ui, repo, opts)
1423 for n in heads:
1423 for n in heads:
1424 displayer.show(repo[n])
1424 displayer.show(repo[n])
1425
1425
1426 def help_(ui, name=None, with_version=False):
1426 def help_(ui, name=None, with_version=False):
1427 """show help for a given topic or a help overview
1427 """show help for a given topic or a help overview
1428
1428
1429 With no arguments, print a list of commands with short help messages.
1429 With no arguments, print a list of commands with short help messages.
1430
1430
1431 Given a topic, extension, or command name, print help for that
1431 Given a topic, extension, or command name, print help for that
1432 topic."""
1432 topic."""
1433 option_lists = []
1433 option_lists = []
1434
1434
1435 def addglobalopts(aliases):
1435 def addglobalopts(aliases):
1436 if ui.verbose:
1436 if ui.verbose:
1437 option_lists.append((_("global options:"), globalopts))
1437 option_lists.append((_("global options:"), globalopts))
1438 if name == 'shortlist':
1438 if name == 'shortlist':
1439 option_lists.append((_('use "hg help" for the full list '
1439 option_lists.append((_('use "hg help" for the full list '
1440 'of commands'), ()))
1440 'of commands'), ()))
1441 else:
1441 else:
1442 if name == 'shortlist':
1442 if name == 'shortlist':
1443 msg = _('use "hg help" for the full list of commands '
1443 msg = _('use "hg help" for the full list of commands '
1444 'or "hg -v" for details')
1444 'or "hg -v" for details')
1445 elif aliases:
1445 elif aliases:
1446 msg = _('use "hg -v help%s" to show aliases and '
1446 msg = _('use "hg -v help%s" to show aliases and '
1447 'global options') % (name and " " + name or "")
1447 'global options') % (name and " " + name or "")
1448 else:
1448 else:
1449 msg = _('use "hg -v help %s" to show global options') % name
1449 msg = _('use "hg -v help %s" to show global options') % name
1450 option_lists.append((msg, ()))
1450 option_lists.append((msg, ()))
1451
1451
1452 def helpcmd(name):
1452 def helpcmd(name):
1453 if with_version:
1453 if with_version:
1454 version_(ui)
1454 version_(ui)
1455 ui.write('\n')
1455 ui.write('\n')
1456
1456
1457 try:
1457 try:
1458 aliases, i = cmdutil.findcmd(name, table, False)
1458 aliases, i = cmdutil.findcmd(name, table, False)
1459 except error.AmbiguousCommand, inst:
1459 except error.AmbiguousCommand, inst:
1460 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1460 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1461 helplist(_('list of commands:\n\n'), select)
1461 helplist(_('list of commands:\n\n'), select)
1462 return
1462 return
1463
1463
1464 # synopsis
1464 # synopsis
1465 if len(i) > 2:
1465 if len(i) > 2:
1466 if i[2].startswith('hg'):
1466 if i[2].startswith('hg'):
1467 ui.write("%s\n" % i[2])
1467 ui.write("%s\n" % i[2])
1468 else:
1468 else:
1469 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1469 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1470 else:
1470 else:
1471 ui.write('hg %s\n' % aliases[0])
1471 ui.write('hg %s\n' % aliases[0])
1472
1472
1473 # aliases
1473 # aliases
1474 if not ui.quiet and len(aliases) > 1:
1474 if not ui.quiet and len(aliases) > 1:
1475 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1475 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1476
1476
1477 # description
1477 # description
1478 doc = gettext(i[0].__doc__)
1478 doc = gettext(i[0].__doc__)
1479 if not doc:
1479 if not doc:
1480 doc = _("(no help text available)")
1480 doc = _("(no help text available)")
1481 if ui.quiet:
1481 if ui.quiet:
1482 doc = doc.splitlines(0)[0]
1482 doc = doc.splitlines(0)[0]
1483 ui.write("\n%s\n" % doc.rstrip())
1483 ui.write("\n%s\n" % doc.rstrip())
1484
1484
1485 if not ui.quiet:
1485 if not ui.quiet:
1486 # options
1486 # options
1487 if i[1]:
1487 if i[1]:
1488 option_lists.append((_("options:\n"), i[1]))
1488 option_lists.append((_("options:\n"), i[1]))
1489
1489
1490 addglobalopts(False)
1490 addglobalopts(False)
1491
1491
1492 def helplist(header, select=None):
1492 def helplist(header, select=None):
1493 h = {}
1493 h = {}
1494 cmds = {}
1494 cmds = {}
1495 for c, e in table.iteritems():
1495 for c, e in table.iteritems():
1496 f = c.split("|", 1)[0]
1496 f = c.split("|", 1)[0]
1497 if select and not select(f):
1497 if select and not select(f):
1498 continue
1498 continue
1499 if (not select and name != 'shortlist' and
1499 if (not select and name != 'shortlist' and
1500 e[0].__module__ != __name__):
1500 e[0].__module__ != __name__):
1501 continue
1501 continue
1502 if name == "shortlist" and not f.startswith("^"):
1502 if name == "shortlist" and not f.startswith("^"):
1503 continue
1503 continue
1504 f = f.lstrip("^")
1504 f = f.lstrip("^")
1505 if not ui.debugflag and f.startswith("debug"):
1505 if not ui.debugflag and f.startswith("debug"):
1506 continue
1506 continue
1507 doc = gettext(e[0].__doc__)
1507 doc = gettext(e[0].__doc__)
1508 if not doc:
1508 if not doc:
1509 doc = _("(no help text available)")
1509 doc = _("(no help text available)")
1510 h[f] = doc.splitlines(0)[0].rstrip()
1510 h[f] = doc.splitlines(0)[0].rstrip()
1511 cmds[f] = c.lstrip("^")
1511 cmds[f] = c.lstrip("^")
1512
1512
1513 if not h:
1513 if not h:
1514 ui.status(_('no commands defined\n'))
1514 ui.status(_('no commands defined\n'))
1515 return
1515 return
1516
1516
1517 ui.status(header)
1517 ui.status(header)
1518 fns = sorted(h)
1518 fns = sorted(h)
1519 m = max(map(len, fns))
1519 m = max(map(len, fns))
1520 for f in fns:
1520 for f in fns:
1521 if ui.verbose:
1521 if ui.verbose:
1522 commands = cmds[f].replace("|",", ")
1522 commands = cmds[f].replace("|",", ")
1523 ui.write(" %s:\n %s\n"%(commands, h[f]))
1523 ui.write(" %s:\n %s\n"%(commands, h[f]))
1524 else:
1524 else:
1525 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1525 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1526
1526
1527 if name != 'shortlist':
1527 if name != 'shortlist':
1528 exts, maxlength = extensions.enabled()
1528 exts, maxlength = extensions.enabled()
1529 ui.write(help.listexts(_('enabled extensions:'), exts, maxlength))
1529 ui.write(help.listexts(_('enabled extensions:'), exts, maxlength))
1530
1530
1531 if not ui.quiet:
1531 if not ui.quiet:
1532 addglobalopts(True)
1532 addglobalopts(True)
1533
1533
1534 def helptopic(name):
1534 def helptopic(name):
1535 for names, header, doc in help.helptable:
1535 for names, header, doc in help.helptable:
1536 if name in names:
1536 if name in names:
1537 break
1537 break
1538 else:
1538 else:
1539 raise error.UnknownCommand(name)
1539 raise error.UnknownCommand(name)
1540
1540
1541 # description
1541 # description
1542 if not doc:
1542 if not doc:
1543 doc = _("(no help text available)")
1543 doc = _("(no help text available)")
1544 if hasattr(doc, '__call__'):
1544 if hasattr(doc, '__call__'):
1545 doc = doc()
1545 doc = doc()
1546
1546
1547 ui.write("%s\n" % header)
1547 ui.write("%s\n" % header)
1548 ui.write("%s\n" % doc.rstrip())
1548 ui.write("%s\n" % doc.rstrip())
1549
1549
1550 def helpext(name):
1550 def helpext(name):
1551 try:
1551 try:
1552 mod = extensions.find(name)
1552 mod = extensions.find(name)
1553 except KeyError:
1553 except KeyError:
1554 raise error.UnknownCommand(name)
1554 raise error.UnknownCommand(name)
1555
1555
1556 doc = gettext(mod.__doc__) or _('no help text available')
1556 doc = gettext(mod.__doc__) or _('no help text available')
1557 doc = doc.splitlines(0)
1557 doc = doc.splitlines(0)
1558 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1558 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1559 for d in doc[1:]:
1559 for d in doc[1:]:
1560 ui.write(d, '\n')
1560 ui.write(d, '\n')
1561
1561
1562 ui.status('\n')
1562 ui.status('\n')
1563
1563
1564 try:
1564 try:
1565 ct = mod.cmdtable
1565 ct = mod.cmdtable
1566 except AttributeError:
1566 except AttributeError:
1567 ct = {}
1567 ct = {}
1568
1568
1569 modcmds = set([c.split('|', 1)[0] for c in ct])
1569 modcmds = set([c.split('|', 1)[0] for c in ct])
1570 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1570 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1571
1571
1572 if name and name != 'shortlist':
1572 if name and name != 'shortlist':
1573 i = None
1573 i = None
1574 for f in (helptopic, helpcmd, helpext):
1574 for f in (helptopic, helpcmd, helpext):
1575 try:
1575 try:
1576 f(name)
1576 f(name)
1577 i = None
1577 i = None
1578 break
1578 break
1579 except error.UnknownCommand, inst:
1579 except error.UnknownCommand, inst:
1580 i = inst
1580 i = inst
1581 if i:
1581 if i:
1582 raise i
1582 raise i
1583
1583
1584 else:
1584 else:
1585 # program name
1585 # program name
1586 if ui.verbose or with_version:
1586 if ui.verbose or with_version:
1587 version_(ui)
1587 version_(ui)
1588 else:
1588 else:
1589 ui.status(_("Mercurial Distributed SCM\n"))
1589 ui.status(_("Mercurial Distributed SCM\n"))
1590 ui.status('\n')
1590 ui.status('\n')
1591
1591
1592 # list of commands
1592 # list of commands
1593 if name == "shortlist":
1593 if name == "shortlist":
1594 header = _('basic commands:\n\n')
1594 header = _('basic commands:\n\n')
1595 else:
1595 else:
1596 header = _('list of commands:\n\n')
1596 header = _('list of commands:\n\n')
1597
1597
1598 helplist(header)
1598 helplist(header)
1599
1599
1600 # list all option lists
1600 # list all option lists
1601 opt_output = []
1601 opt_output = []
1602 for title, options in option_lists:
1602 for title, options in option_lists:
1603 opt_output.append(("\n%s" % title, None))
1603 opt_output.append(("\n%s" % title, None))
1604 for shortopt, longopt, default, desc in options:
1604 for shortopt, longopt, default, desc in options:
1605 if "DEPRECATED" in desc and not ui.verbose: continue
1605 if "DEPRECATED" in desc and not ui.verbose: continue
1606 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1606 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1607 longopt and " --%s" % longopt),
1607 longopt and " --%s" % longopt),
1608 "%s%s" % (desc,
1608 "%s%s" % (desc,
1609 default
1609 default
1610 and _(" (default: %s)") % default
1610 and _(" (default: %s)") % default
1611 or "")))
1611 or "")))
1612
1612
1613 if not name:
1613 if not name:
1614 ui.write(_("\nadditional help topics:\n\n"))
1614 ui.write(_("\nadditional help topics:\n\n"))
1615 topics = []
1615 topics = []
1616 for names, header, doc in help.helptable:
1616 for names, header, doc in help.helptable:
1617 names = [(-len(name), name) for name in names]
1617 names = [(-len(name), name) for name in names]
1618 names.sort()
1618 names.sort()
1619 topics.append((names[0][1], header))
1619 topics.append((names[0][1], header))
1620 topics_len = max([len(s[0]) for s in topics])
1620 topics_len = max([len(s[0]) for s in topics])
1621 for t, desc in topics:
1621 for t, desc in topics:
1622 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1622 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1623
1623
1624 if opt_output:
1624 if opt_output:
1625 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1625 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1626 for first, second in opt_output:
1626 for first, second in opt_output:
1627 if second:
1627 if second:
1628 second = util.wrap(second, opts_len + 3)
1628 second = util.wrap(second, opts_len + 3)
1629 ui.write(" %-*s %s\n" % (opts_len, first, second))
1629 ui.write(" %-*s %s\n" % (opts_len, first, second))
1630 else:
1630 else:
1631 ui.write("%s\n" % first)
1631 ui.write("%s\n" % first)
1632
1632
1633 def identify(ui, repo, source=None,
1633 def identify(ui, repo, source=None,
1634 rev=None, num=None, id=None, branch=None, tags=None):
1634 rev=None, num=None, id=None, branch=None, tags=None):
1635 """identify the working copy or specified revision
1635 """identify the working copy or specified revision
1636
1636
1637 With no revision, print a summary of the current state of the
1637 With no revision, print a summary of the current state of the
1638 repository.
1638 repository.
1639
1639
1640 Specifying a path to a repository root or Mercurial bundle will
1640 Specifying a path to a repository root or Mercurial bundle will
1641 cause lookup to operate on that repository/bundle.
1641 cause lookup to operate on that repository/bundle.
1642
1642
1643 This summary identifies the repository state using one or two
1643 This summary identifies the repository state using one or two
1644 parent hash identifiers, followed by a "+" if there are
1644 parent hash identifiers, followed by a "+" if there are
1645 uncommitted changes in the working directory, a list of tags for
1645 uncommitted changes in the working directory, a list of tags for
1646 this revision and a branch name for non-default branches.
1646 this revision and a branch name for non-default branches.
1647 """
1647 """
1648
1648
1649 if not repo and not source:
1649 if not repo and not source:
1650 raise util.Abort(_("There is no Mercurial repository here "
1650 raise util.Abort(_("There is no Mercurial repository here "
1651 "(.hg not found)"))
1651 "(.hg not found)"))
1652
1652
1653 hexfunc = ui.debugflag and hex or short
1653 hexfunc = ui.debugflag and hex or short
1654 default = not (num or id or branch or tags)
1654 default = not (num or id or branch or tags)
1655 output = []
1655 output = []
1656
1656
1657 revs = []
1657 revs = []
1658 if source:
1658 if source:
1659 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1659 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1660 repo = hg.repository(ui, source)
1660 repo = hg.repository(ui, source)
1661
1661
1662 if not repo.local():
1662 if not repo.local():
1663 if not rev and revs:
1663 if not rev and revs:
1664 rev = revs[0]
1664 rev = revs[0]
1665 if not rev:
1665 if not rev:
1666 rev = "tip"
1666 rev = "tip"
1667 if num or branch or tags:
1667 if num or branch or tags:
1668 raise util.Abort(
1668 raise util.Abort(
1669 "can't query remote revision number, branch, or tags")
1669 "can't query remote revision number, branch, or tags")
1670 output = [hexfunc(repo.lookup(rev))]
1670 output = [hexfunc(repo.lookup(rev))]
1671 elif not rev:
1671 elif not rev:
1672 ctx = repo[None]
1672 ctx = repo[None]
1673 parents = ctx.parents()
1673 parents = ctx.parents()
1674 changed = False
1674 changed = False
1675 if default or id or num:
1675 if default or id or num:
1676 changed = ctx.files() + ctx.deleted()
1676 changed = ctx.files() + ctx.deleted()
1677 if default or id:
1677 if default or id:
1678 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1678 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1679 (changed) and "+" or "")]
1679 (changed) and "+" or "")]
1680 if num:
1680 if num:
1681 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1681 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1682 (changed) and "+" or ""))
1682 (changed) and "+" or ""))
1683 else:
1683 else:
1684 ctx = repo[rev]
1684 ctx = repo[rev]
1685 if default or id:
1685 if default or id:
1686 output = [hexfunc(ctx.node())]
1686 output = [hexfunc(ctx.node())]
1687 if num:
1687 if num:
1688 output.append(str(ctx.rev()))
1688 output.append(str(ctx.rev()))
1689
1689
1690 if repo.local() and default and not ui.quiet:
1690 if repo.local() and default and not ui.quiet:
1691 b = encoding.tolocal(ctx.branch())
1691 b = encoding.tolocal(ctx.branch())
1692 if b != 'default':
1692 if b != 'default':
1693 output.append("(%s)" % b)
1693 output.append("(%s)" % b)
1694
1694
1695 # multiple tags for a single parent separated by '/'
1695 # multiple tags for a single parent separated by '/'
1696 t = "/".join(ctx.tags())
1696 t = "/".join(ctx.tags())
1697 if t:
1697 if t:
1698 output.append(t)
1698 output.append(t)
1699
1699
1700 if branch:
1700 if branch:
1701 output.append(encoding.tolocal(ctx.branch()))
1701 output.append(encoding.tolocal(ctx.branch()))
1702
1702
1703 if tags:
1703 if tags:
1704 output.extend(ctx.tags())
1704 output.extend(ctx.tags())
1705
1705
1706 ui.write("%s\n" % ' '.join(output))
1706 ui.write("%s\n" % ' '.join(output))
1707
1707
1708 def import_(ui, repo, patch1, *patches, **opts):
1708 def import_(ui, repo, patch1, *patches, **opts):
1709 """import an ordered set of patches
1709 """import an ordered set of patches
1710
1710
1711 Import a list of patches and commit them individually.
1711 Import a list of patches and commit them individually.
1712
1712
1713 If there are outstanding changes in the working directory, import
1713 If there are outstanding changes in the working directory, import
1714 will abort unless given the -f/--force flag.
1714 will abort unless given the -f/--force flag.
1715
1715
1716 You can import a patch straight from a mail message. Even patches
1716 You can import a patch straight from a mail message. Even patches
1717 as attachments work (to use the body part, it must have type
1717 as attachments work (to use the body part, it must have type
1718 text/plain or text/x-patch). From and Subject headers of email
1718 text/plain or text/x-patch). From and Subject headers of email
1719 message are used as default committer and commit message. All
1719 message are used as default committer and commit message. All
1720 text/plain body parts before first diff are added to commit
1720 text/plain body parts before first diff are added to commit
1721 message.
1721 message.
1722
1722
1723 If the imported patch was generated by hg export, user and
1723 If the imported patch was generated by hg export, user and
1724 description from patch override values from message headers and
1724 description from patch override values from message headers and
1725 body. Values given on command line with -m/--message and -u/--user
1725 body. Values given on command line with -m/--message and -u/--user
1726 override these.
1726 override these.
1727
1727
1728 If --exact is specified, import will set the working directory to
1728 If --exact is specified, import will set the working directory to
1729 the parent of each patch before applying it, and will abort if the
1729 the parent of each patch before applying it, and will abort if the
1730 resulting changeset has a different ID than the one recorded in
1730 resulting changeset has a different ID than the one recorded in
1731 the patch. This may happen due to character set problems or other
1731 the patch. This may happen due to character set problems or other
1732 deficiencies in the text patch format.
1732 deficiencies in the text patch format.
1733
1733
1734 With -s/--similarity, hg will attempt to discover renames and
1734 With -s/--similarity, hg will attempt to discover renames and
1735 copies in the patch in the same way as 'addremove'.
1735 copies in the patch in the same way as 'addremove'.
1736
1736
1737 To read a patch from standard input, use "-" as the patch name. If
1737 To read a patch from standard input, use "-" as the patch name. If
1738 a URL is specified, the patch will be downloaded from it.
1738 a URL is specified, the patch will be downloaded from it.
1739 See 'hg help dates' for a list of formats valid for -d/--date.
1739 See 'hg help dates' for a list of formats valid for -d/--date.
1740 """
1740 """
1741 patches = (patch1,) + patches
1741 patches = (patch1,) + patches
1742
1742
1743 date = opts.get('date')
1743 date = opts.get('date')
1744 if date:
1744 if date:
1745 opts['date'] = util.parsedate(date)
1745 opts['date'] = util.parsedate(date)
1746
1746
1747 try:
1747 try:
1748 sim = float(opts.get('similarity') or 0)
1748 sim = float(opts.get('similarity') or 0)
1749 except ValueError:
1749 except ValueError:
1750 raise util.Abort(_('similarity must be a number'))
1750 raise util.Abort(_('similarity must be a number'))
1751 if sim < 0 or sim > 100:
1751 if sim < 0 or sim > 100:
1752 raise util.Abort(_('similarity must be between 0 and 100'))
1752 raise util.Abort(_('similarity must be between 0 and 100'))
1753
1753
1754 if opts.get('exact') or not opts.get('force'):
1754 if opts.get('exact') or not opts.get('force'):
1755 cmdutil.bail_if_changed(repo)
1755 cmdutil.bail_if_changed(repo)
1756
1756
1757 d = opts["base"]
1757 d = opts["base"]
1758 strip = opts["strip"]
1758 strip = opts["strip"]
1759 wlock = lock = None
1759 wlock = lock = None
1760 try:
1760 try:
1761 wlock = repo.wlock()
1761 wlock = repo.wlock()
1762 lock = repo.lock()
1762 lock = repo.lock()
1763 for p in patches:
1763 for p in patches:
1764 pf = os.path.join(d, p)
1764 pf = os.path.join(d, p)
1765
1765
1766 if pf == '-':
1766 if pf == '-':
1767 ui.status(_("applying patch from stdin\n"))
1767 ui.status(_("applying patch from stdin\n"))
1768 pf = sys.stdin
1768 pf = sys.stdin
1769 else:
1769 else:
1770 ui.status(_("applying %s\n") % p)
1770 ui.status(_("applying %s\n") % p)
1771 pf = url.open(ui, pf)
1771 pf = url.open(ui, pf)
1772 data = patch.extract(ui, pf)
1772 data = patch.extract(ui, pf)
1773 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1773 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1774
1774
1775 if tmpname is None:
1775 if tmpname is None:
1776 raise util.Abort(_('no diffs found'))
1776 raise util.Abort(_('no diffs found'))
1777
1777
1778 try:
1778 try:
1779 cmdline_message = cmdutil.logmessage(opts)
1779 cmdline_message = cmdutil.logmessage(opts)
1780 if cmdline_message:
1780 if cmdline_message:
1781 # pickup the cmdline msg
1781 # pickup the cmdline msg
1782 message = cmdline_message
1782 message = cmdline_message
1783 elif message:
1783 elif message:
1784 # pickup the patch msg
1784 # pickup the patch msg
1785 message = message.strip()
1785 message = message.strip()
1786 else:
1786 else:
1787 # launch the editor
1787 # launch the editor
1788 message = None
1788 message = None
1789 ui.debug(_('message:\n%s\n') % message)
1789 ui.debug(_('message:\n%s\n') % message)
1790
1790
1791 wp = repo.parents()
1791 wp = repo.parents()
1792 if opts.get('exact'):
1792 if opts.get('exact'):
1793 if not nodeid or not p1:
1793 if not nodeid or not p1:
1794 raise util.Abort(_('not a Mercurial patch'))
1794 raise util.Abort(_('not a Mercurial patch'))
1795 p1 = repo.lookup(p1)
1795 p1 = repo.lookup(p1)
1796 p2 = repo.lookup(p2 or hex(nullid))
1796 p2 = repo.lookup(p2 or hex(nullid))
1797
1797
1798 if p1 != wp[0].node():
1798 if p1 != wp[0].node():
1799 hg.clean(repo, p1)
1799 hg.clean(repo, p1)
1800 repo.dirstate.setparents(p1, p2)
1800 repo.dirstate.setparents(p1, p2)
1801 elif p2:
1801 elif p2:
1802 try:
1802 try:
1803 p1 = repo.lookup(p1)
1803 p1 = repo.lookup(p1)
1804 p2 = repo.lookup(p2)
1804 p2 = repo.lookup(p2)
1805 if p1 == wp[0].node():
1805 if p1 == wp[0].node():
1806 repo.dirstate.setparents(p1, p2)
1806 repo.dirstate.setparents(p1, p2)
1807 except error.RepoError:
1807 except error.RepoError:
1808 pass
1808 pass
1809 if opts.get('exact') or opts.get('import_branch'):
1809 if opts.get('exact') or opts.get('import_branch'):
1810 repo.dirstate.setbranch(branch or 'default')
1810 repo.dirstate.setbranch(branch or 'default')
1811
1811
1812 files = {}
1812 files = {}
1813 try:
1813 try:
1814 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1814 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1815 files=files, eolmode=None)
1815 files=files, eolmode=None)
1816 finally:
1816 finally:
1817 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1817 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1818 if not opts.get('no_commit'):
1818 if not opts.get('no_commit'):
1819 m = cmdutil.matchfiles(repo, files or [])
1819 m = cmdutil.matchfiles(repo, files or [])
1820 n = repo.commit(message, opts.get('user') or user,
1820 n = repo.commit(message, opts.get('user') or user,
1821 opts.get('date') or date, match=m,
1821 opts.get('date') or date, match=m,
1822 editor=cmdutil.commiteditor)
1822 editor=cmdutil.commiteditor)
1823 if opts.get('exact'):
1823 if opts.get('exact'):
1824 if hex(n) != nodeid:
1824 if hex(n) != nodeid:
1825 repo.rollback()
1825 repo.rollback()
1826 raise util.Abort(_('patch is damaged'
1826 raise util.Abort(_('patch is damaged'
1827 ' or loses information'))
1827 ' or loses information'))
1828 # Force a dirstate write so that the next transaction
1828 # Force a dirstate write so that the next transaction
1829 # backups an up-do-date file.
1829 # backups an up-do-date file.
1830 repo.dirstate.write()
1830 repo.dirstate.write()
1831 finally:
1831 finally:
1832 os.unlink(tmpname)
1832 os.unlink(tmpname)
1833 finally:
1833 finally:
1834 release(lock, wlock)
1834 release(lock, wlock)
1835
1835
1836 def incoming(ui, repo, source="default", **opts):
1836 def incoming(ui, repo, source="default", **opts):
1837 """show new changesets found in source
1837 """show new changesets found in source
1838
1838
1839 Show new changesets found in the specified path/URL or the default
1839 Show new changesets found in the specified path/URL or the default
1840 pull location. These are the changesets that would have been pulled
1840 pull location. These are the changesets that would have been pulled
1841 if a pull at the time you issued this command.
1841 if a pull at the time you issued this command.
1842
1842
1843 For remote repository, using --bundle avoids downloading the
1843 For remote repository, using --bundle avoids downloading the
1844 changesets twice if the incoming is followed by a pull.
1844 changesets twice if the incoming is followed by a pull.
1845
1845
1846 See pull for valid source format details.
1846 See pull for valid source format details.
1847 """
1847 """
1848 limit = cmdutil.loglimit(opts)
1848 limit = cmdutil.loglimit(opts)
1849 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1849 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1850 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1850 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1851 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1851 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1852 if revs:
1852 if revs:
1853 revs = [other.lookup(rev) for rev in revs]
1853 revs = [other.lookup(rev) for rev in revs]
1854 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1854 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1855 force=opts["force"])
1855 force=opts["force"])
1856 if not incoming:
1856 if not incoming:
1857 try:
1857 try:
1858 os.unlink(opts["bundle"])
1858 os.unlink(opts["bundle"])
1859 except:
1859 except:
1860 pass
1860 pass
1861 ui.status(_("no changes found\n"))
1861 ui.status(_("no changes found\n"))
1862 return 1
1862 return 1
1863
1863
1864 cleanup = None
1864 cleanup = None
1865 try:
1865 try:
1866 fname = opts["bundle"]
1866 fname = opts["bundle"]
1867 if fname or not other.local():
1867 if fname or not other.local():
1868 # create a bundle (uncompressed if other repo is not local)
1868 # create a bundle (uncompressed if other repo is not local)
1869
1869
1870 if revs is None and other.capable('changegroupsubset'):
1870 if revs is None and other.capable('changegroupsubset'):
1871 revs = rheads
1871 revs = rheads
1872
1872
1873 if revs is None:
1873 if revs is None:
1874 cg = other.changegroup(incoming, "incoming")
1874 cg = other.changegroup(incoming, "incoming")
1875 else:
1875 else:
1876 cg = other.changegroupsubset(incoming, revs, 'incoming')
1876 cg = other.changegroupsubset(incoming, revs, 'incoming')
1877 bundletype = other.local() and "HG10BZ" or "HG10UN"
1877 bundletype = other.local() and "HG10BZ" or "HG10UN"
1878 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1878 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1879 # keep written bundle?
1879 # keep written bundle?
1880 if opts["bundle"]:
1880 if opts["bundle"]:
1881 cleanup = None
1881 cleanup = None
1882 if not other.local():
1882 if not other.local():
1883 # use the created uncompressed bundlerepo
1883 # use the created uncompressed bundlerepo
1884 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1884 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1885
1885
1886 o = other.changelog.nodesbetween(incoming, revs)[0]
1886 o = other.changelog.nodesbetween(incoming, revs)[0]
1887 if opts.get('newest_first'):
1887 if opts.get('newest_first'):
1888 o.reverse()
1888 o.reverse()
1889 displayer = cmdutil.show_changeset(ui, other, opts)
1889 displayer = cmdutil.show_changeset(ui, other, opts)
1890 count = 0
1890 count = 0
1891 for n in o:
1891 for n in o:
1892 if count >= limit:
1892 if count >= limit:
1893 break
1893 break
1894 parents = [p for p in other.changelog.parents(n) if p != nullid]
1894 parents = [p for p in other.changelog.parents(n) if p != nullid]
1895 if opts.get('no_merges') and len(parents) == 2:
1895 if opts.get('no_merges') and len(parents) == 2:
1896 continue
1896 continue
1897 count += 1
1897 count += 1
1898 displayer.show(other[n])
1898 displayer.show(other[n])
1899 finally:
1899 finally:
1900 if hasattr(other, 'close'):
1900 if hasattr(other, 'close'):
1901 other.close()
1901 other.close()
1902 if cleanup:
1902 if cleanup:
1903 os.unlink(cleanup)
1903 os.unlink(cleanup)
1904
1904
1905 def init(ui, dest=".", **opts):
1905 def init(ui, dest=".", **opts):
1906 """create a new repository in the given directory
1906 """create a new repository in the given directory
1907
1907
1908 Initialize a new repository in the given directory. If the given
1908 Initialize a new repository in the given directory. If the given
1909 directory does not exist, it will be created.
1909 directory does not exist, it will be created.
1910
1910
1911 If no directory is given, the current directory is used.
1911 If no directory is given, the current directory is used.
1912
1912
1913 It is possible to specify an ssh:// URL as the destination.
1913 It is possible to specify an ssh:// URL as the destination.
1914 See 'hg help urls' for more information.
1914 See 'hg help urls' for more information.
1915 """
1915 """
1916 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1916 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1917
1917
1918 def locate(ui, repo, *pats, **opts):
1918 def locate(ui, repo, *pats, **opts):
1919 """locate files matching specific patterns
1919 """locate files matching specific patterns
1920
1920
1921 Print files under Mercurial control in the working directory whose
1921 Print files under Mercurial control in the working directory whose
1922 names match the given patterns.
1922 names match the given patterns.
1923
1923
1924 By default, this command searches all directories in the working
1924 By default, this command searches all directories in the working
1925 directory. To search just the current directory and its
1925 directory. To search just the current directory and its
1926 subdirectories, use "--include .".
1926 subdirectories, use "--include .".
1927
1927
1928 If no patterns are given to match, this command prints the names
1928 If no patterns are given to match, this command prints the names
1929 of all files under Mercurial control in the working directory.
1929 of all files under Mercurial control in the working directory.
1930
1930
1931 If you want to feed the output of this command into the "xargs"
1931 If you want to feed the output of this command into the "xargs"
1932 command, use the -0 option to both this command and "xargs". This
1932 command, use the -0 option to both this command and "xargs". This
1933 will avoid the problem of "xargs" treating single filenames that
1933 will avoid the problem of "xargs" treating single filenames that
1934 contain whitespace as multiple filenames.
1934 contain whitespace as multiple filenames.
1935 """
1935 """
1936 end = opts.get('print0') and '\0' or '\n'
1936 end = opts.get('print0') and '\0' or '\n'
1937 rev = opts.get('rev') or None
1937 rev = opts.get('rev') or None
1938
1938
1939 ret = 1
1939 ret = 1
1940 m = cmdutil.match(repo, pats, opts, default='relglob')
1940 m = cmdutil.match(repo, pats, opts, default='relglob')
1941 m.bad = lambda x,y: False
1941 m.bad = lambda x,y: False
1942 for abs in repo[rev].walk(m):
1942 for abs in repo[rev].walk(m):
1943 if not rev and abs not in repo.dirstate:
1943 if not rev and abs not in repo.dirstate:
1944 continue
1944 continue
1945 if opts.get('fullpath'):
1945 if opts.get('fullpath'):
1946 ui.write(repo.wjoin(abs), end)
1946 ui.write(repo.wjoin(abs), end)
1947 else:
1947 else:
1948 ui.write(((pats and m.rel(abs)) or abs), end)
1948 ui.write(((pats and m.rel(abs)) or abs), end)
1949 ret = 0
1949 ret = 0
1950
1950
1951 return ret
1951 return ret
1952
1952
1953 def log(ui, repo, *pats, **opts):
1953 def log(ui, repo, *pats, **opts):
1954 """show revision history of entire repository or files
1954 """show revision history of entire repository or files
1955
1955
1956 Print the revision history of the specified files or the entire
1956 Print the revision history of the specified files or the entire
1957 project.
1957 project.
1958
1958
1959 File history is shown without following rename or copy history of
1959 File history is shown without following rename or copy history of
1960 files. Use -f/--follow with a filename to follow history across
1960 files. Use -f/--follow with a filename to follow history across
1961 renames and copies. --follow without a filename will only show
1961 renames and copies. --follow without a filename will only show
1962 ancestors or descendants of the starting revision. --follow-first
1962 ancestors or descendants of the starting revision. --follow-first
1963 only follows the first parent of merge revisions.
1963 only follows the first parent of merge revisions.
1964
1964
1965 If no revision range is specified, the default is tip:0 unless
1965 If no revision range is specified, the default is tip:0 unless
1966 --follow is set, in which case the working directory parent is
1966 --follow is set, in which case the working directory parent is
1967 used as the starting revision.
1967 used as the starting revision.
1968
1968
1969 See 'hg help dates' for a list of formats valid for -d/--date.
1969 See 'hg help dates' for a list of formats valid for -d/--date.
1970
1970
1971 By default this command prints revision number and changeset id,
1971 By default this command prints revision number and changeset id,
1972 tags, non-trivial parents, user, date and time, and a summary for
1972 tags, non-trivial parents, user, date and time, and a summary for
1973 each commit. When the -v/--verbose switch is used, the list of
1973 each commit. When the -v/--verbose switch is used, the list of
1974 changed files and full commit message are shown.
1974 changed files and full commit message are shown.
1975
1975
1976 NOTE: log -p/--patch may generate unexpected diff output for merge
1976 NOTE: log -p/--patch may generate unexpected diff output for merge
1977 changesets, as it will only compare the merge changeset against
1977 changesets, as it will only compare the merge changeset against
1978 its first parent. Also, only files different from BOTH parents
1978 its first parent. Also, only files different from BOTH parents
1979 will appear in files:.
1979 will appear in files:.
1980 """
1980 """
1981
1981
1982 get = util.cachefunc(lambda r: repo[r].changeset())
1982 get = util.cachefunc(lambda r: repo[r].changeset())
1983 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1983 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1984
1984
1985 limit = cmdutil.loglimit(opts)
1985 limit = cmdutil.loglimit(opts)
1986 count = 0
1986 count = 0
1987
1987
1988 if opts.get('copies') and opts.get('rev'):
1988 if opts.get('copies') and opts.get('rev'):
1989 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1989 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1990 else:
1990 else:
1991 endrev = len(repo)
1991 endrev = len(repo)
1992 rcache = {}
1992 rcache = {}
1993 ncache = {}
1993 ncache = {}
1994 def getrenamed(fn, rev):
1994 def getrenamed(fn, rev):
1995 '''looks up all renames for a file (up to endrev) the first
1995 '''looks up all renames for a file (up to endrev) the first
1996 time the file is given. It indexes on the changerev and only
1996 time the file is given. It indexes on the changerev and only
1997 parses the manifest if linkrev != changerev.
1997 parses the manifest if linkrev != changerev.
1998 Returns rename info for fn at changerev rev.'''
1998 Returns rename info for fn at changerev rev.'''
1999 if fn not in rcache:
1999 if fn not in rcache:
2000 rcache[fn] = {}
2000 rcache[fn] = {}
2001 ncache[fn] = {}
2001 ncache[fn] = {}
2002 fl = repo.file(fn)
2002 fl = repo.file(fn)
2003 for i in fl:
2003 for i in fl:
2004 node = fl.node(i)
2004 node = fl.node(i)
2005 lr = fl.linkrev(i)
2005 lr = fl.linkrev(i)
2006 renamed = fl.renamed(node)
2006 renamed = fl.renamed(node)
2007 rcache[fn][lr] = renamed
2007 rcache[fn][lr] = renamed
2008 if renamed:
2008 if renamed:
2009 ncache[fn][node] = renamed
2009 ncache[fn][node] = renamed
2010 if lr >= endrev:
2010 if lr >= endrev:
2011 break
2011 break
2012 if rev in rcache[fn]:
2012 if rev in rcache[fn]:
2013 return rcache[fn][rev]
2013 return rcache[fn][rev]
2014
2014
2015 # If linkrev != rev (i.e. rev not found in rcache) fallback to
2015 # If linkrev != rev (i.e. rev not found in rcache) fallback to
2016 # filectx logic.
2016 # filectx logic.
2017
2017
2018 try:
2018 try:
2019 return repo[rev][fn].renamed()
2019 return repo[rev][fn].renamed()
2020 except error.LookupError:
2020 except error.LookupError:
2021 pass
2021 pass
2022 return None
2022 return None
2023
2023
2024 df = False
2024 df = False
2025 if opts["date"]:
2025 if opts["date"]:
2026 df = util.matchdate(opts["date"])
2026 df = util.matchdate(opts["date"])
2027
2027
2028 only_branches = opts.get('only_branch')
2028 only_branches = opts.get('only_branch')
2029
2029
2030 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2030 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2031 for st, rev, fns in changeiter:
2031 for st, rev, fns in changeiter:
2032 if st == 'add':
2032 if st == 'add':
2033 parents = [p for p in repo.changelog.parentrevs(rev)
2033 parents = [p for p in repo.changelog.parentrevs(rev)
2034 if p != nullrev]
2034 if p != nullrev]
2035 if opts.get('no_merges') and len(parents) == 2:
2035 if opts.get('no_merges') and len(parents) == 2:
2036 continue
2036 continue
2037 if opts.get('only_merges') and len(parents) != 2:
2037 if opts.get('only_merges') and len(parents) != 2:
2038 continue
2038 continue
2039
2039
2040 if only_branches:
2040 if only_branches:
2041 revbranch = get(rev)[5]['branch']
2041 revbranch = get(rev)[5]['branch']
2042 if revbranch not in only_branches:
2042 if revbranch not in only_branches:
2043 continue
2043 continue
2044
2044
2045 if df:
2045 if df:
2046 changes = get(rev)
2046 changes = get(rev)
2047 if not df(changes[2][0]):
2047 if not df(changes[2][0]):
2048 continue
2048 continue
2049
2049
2050 if opts.get('keyword'):
2050 if opts.get('keyword'):
2051 changes = get(rev)
2051 changes = get(rev)
2052 miss = 0
2052 miss = 0
2053 for k in [kw.lower() for kw in opts['keyword']]:
2053 for k in [kw.lower() for kw in opts['keyword']]:
2054 if not (k in changes[1].lower() or
2054 if not (k in changes[1].lower() or
2055 k in changes[4].lower() or
2055 k in changes[4].lower() or
2056 k in " ".join(changes[3]).lower()):
2056 k in " ".join(changes[3]).lower()):
2057 miss = 1
2057 miss = 1
2058 break
2058 break
2059 if miss:
2059 if miss:
2060 continue
2060 continue
2061
2061
2062 if opts['user']:
2062 if opts['user']:
2063 changes = get(rev)
2063 changes = get(rev)
2064 if not [k for k in opts['user'] if k in changes[1]]:
2064 if not [k for k in opts['user'] if k in changes[1]]:
2065 continue
2065 continue
2066
2066
2067 copies = []
2067 copies = []
2068 if opts.get('copies') and rev:
2068 if opts.get('copies') and rev:
2069 for fn in get(rev)[3]:
2069 for fn in get(rev)[3]:
2070 rename = getrenamed(fn, rev)
2070 rename = getrenamed(fn, rev)
2071 if rename:
2071 if rename:
2072 copies.append((fn, rename[0]))
2072 copies.append((fn, rename[0]))
2073 displayer.show(context.changectx(repo, rev), copies=copies)
2073 displayer.show(context.changectx(repo, rev), copies=copies)
2074 elif st == 'iter':
2074 elif st == 'iter':
2075 if count == limit: break
2075 if count == limit: break
2076 if displayer.flush(rev):
2076 if displayer.flush(rev):
2077 count += 1
2077 count += 1
2078
2078
2079 def manifest(ui, repo, node=None, rev=None):
2079 def manifest(ui, repo, node=None, rev=None):
2080 """output the current or given revision of the project manifest
2080 """output the current or given revision of the project manifest
2081
2081
2082 Print a list of version controlled files for the given revision.
2082 Print a list of version controlled files for the given revision.
2083 If no revision is given, the first parent of the working directory
2083 If no revision is given, the first parent of the working directory
2084 is used, or the null revision if no revision is checked out.
2084 is used, or the null revision if no revision is checked out.
2085
2085
2086 With -v, print file permissions, symlink and executable bits.
2086 With -v, print file permissions, symlink and executable bits.
2087 With --debug, print file revision hashes.
2087 With --debug, print file revision hashes.
2088 """
2088 """
2089
2089
2090 if rev and node:
2090 if rev and node:
2091 raise util.Abort(_("please specify just one revision"))
2091 raise util.Abort(_("please specify just one revision"))
2092
2092
2093 if not node:
2093 if not node:
2094 node = rev
2094 node = rev
2095
2095
2096 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2096 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2097 ctx = repo[node]
2097 ctx = repo[node]
2098 for f in ctx:
2098 for f in ctx:
2099 if ui.debugflag:
2099 if ui.debugflag:
2100 ui.write("%40s " % hex(ctx.manifest()[f]))
2100 ui.write("%40s " % hex(ctx.manifest()[f]))
2101 if ui.verbose:
2101 if ui.verbose:
2102 ui.write(decor[ctx.flags(f)])
2102 ui.write(decor[ctx.flags(f)])
2103 ui.write("%s\n" % f)
2103 ui.write("%s\n" % f)
2104
2104
2105 def merge(ui, repo, node=None, **opts):
2105 def merge(ui, repo, node=None, **opts):
2106 """merge working directory with another revision
2106 """merge working directory with another revision
2107
2107
2108 The current working directory is updated with all changes made in
2108 The current working directory is updated with all changes made in
2109 the requested revision since the last common predecessor revision.
2109 the requested revision since the last common predecessor revision.
2110
2110
2111 Files that changed between either parent are marked as changed for
2111 Files that changed between either parent are marked as changed for
2112 the next commit and a commit must be performed before any further
2112 the next commit and a commit must be performed before any further
2113 updates to the repository are allowed. The next commit will have
2113 updates to the repository are allowed. The next commit will have
2114 two parents.
2114 two parents.
2115
2115
2116 If no revision is specified, the working directory's parent is a
2116 If no revision is specified, the working directory's parent is a
2117 head revision, and the current branch contains exactly one other
2117 head revision, and the current branch contains exactly one other
2118 head, the other head is merged with by default. Otherwise, an
2118 head, the other head is merged with by default. Otherwise, an
2119 explicit revision with which to merge with must be provided.
2119 explicit revision with which to merge with must be provided.
2120 """
2120 """
2121
2121
2122 if opts.get('rev') and node:
2122 if opts.get('rev') and node:
2123 raise util.Abort(_("please specify just one revision"))
2123 raise util.Abort(_("please specify just one revision"))
2124 if not node:
2124 if not node:
2125 node = opts.get('rev')
2125 node = opts.get('rev')
2126
2126
2127 if not node:
2127 if not node:
2128 branch = repo.changectx(None).branch()
2128 branch = repo.changectx(None).branch()
2129 bheads = repo.branchheads(branch)
2129 bheads = repo.branchheads(branch)
2130 if len(bheads) > 2:
2130 if len(bheads) > 2:
2131 raise util.Abort(_("branch '%s' has %d heads - "
2131 raise util.Abort(_("branch '%s' has %d heads - "
2132 "please merge with an explicit rev") %
2132 "please merge with an explicit rev") %
2133 (branch, len(bheads)))
2133 (branch, len(bheads)))
2134
2134
2135 parent = repo.dirstate.parents()[0]
2135 parent = repo.dirstate.parents()[0]
2136 if len(bheads) == 1:
2136 if len(bheads) == 1:
2137 if len(repo.heads()) > 1:
2137 if len(repo.heads()) > 1:
2138 raise util.Abort(_("branch '%s' has one head - "
2138 raise util.Abort(_("branch '%s' has one head - "
2139 "please merge with an explicit rev") %
2139 "please merge with an explicit rev") %
2140 branch)
2140 branch)
2141 msg = _('there is nothing to merge')
2141 msg = _('there is nothing to merge')
2142 if parent != repo.lookup(repo[None].branch()):
2142 if parent != repo.lookup(repo[None].branch()):
2143 msg = _('%s - use "hg update" instead') % msg
2143 msg = _('%s - use "hg update" instead') % msg
2144 raise util.Abort(msg)
2144 raise util.Abort(msg)
2145
2145
2146 if parent not in bheads:
2146 if parent not in bheads:
2147 raise util.Abort(_('working dir not at a head rev - '
2147 raise util.Abort(_('working dir not at a head rev - '
2148 'use "hg update" or merge with an explicit rev'))
2148 'use "hg update" or merge with an explicit rev'))
2149 node = parent == bheads[0] and bheads[-1] or bheads[0]
2149 node = parent == bheads[0] and bheads[-1] or bheads[0]
2150
2150
2151 if opts.get('preview'):
2151 if opts.get('preview'):
2152 p1 = repo['.']
2152 p1 = repo['.']
2153 p2 = repo[node]
2153 p2 = repo[node]
2154 common = p1.ancestor(p2)
2154 common = p1.ancestor(p2)
2155 roots, heads = [common.node()], [p2.node()]
2155 roots, heads = [common.node()], [p2.node()]
2156 displayer = cmdutil.show_changeset(ui, repo, opts)
2156 displayer = cmdutil.show_changeset(ui, repo, opts)
2157 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2157 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2158 displayer.show(repo[node])
2158 displayer.show(repo[node])
2159 return 0
2159 return 0
2160
2160
2161 return hg.merge(repo, node, force=opts.get('force'))
2161 return hg.merge(repo, node, force=opts.get('force'))
2162
2162
2163 def outgoing(ui, repo, dest=None, **opts):
2163 def outgoing(ui, repo, dest=None, **opts):
2164 """show changesets not found in destination
2164 """show changesets not found in destination
2165
2165
2166 Show changesets not found in the specified destination repository
2166 Show changesets not found in the specified destination repository
2167 or the default push location. These are the changesets that would
2167 or the default push location. These are the changesets that would
2168 be pushed if a push was requested.
2168 be pushed if a push was requested.
2169
2169
2170 See pull for valid destination format details.
2170 See pull for valid destination format details.
2171 """
2171 """
2172 limit = cmdutil.loglimit(opts)
2172 limit = cmdutil.loglimit(opts)
2173 dest, revs, checkout = hg.parseurl(
2173 dest, revs, checkout = hg.parseurl(
2174 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2174 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2175 if revs:
2175 if revs:
2176 revs = [repo.lookup(rev) for rev in revs]
2176 revs = [repo.lookup(rev) for rev in revs]
2177
2177
2178 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2178 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2179 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2179 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2180 o = repo.findoutgoing(other, force=opts.get('force'))
2180 o = repo.findoutgoing(other, force=opts.get('force'))
2181 if not o:
2181 if not o:
2182 ui.status(_("no changes found\n"))
2182 ui.status(_("no changes found\n"))
2183 return 1
2183 return 1
2184 o = repo.changelog.nodesbetween(o, revs)[0]
2184 o = repo.changelog.nodesbetween(o, revs)[0]
2185 if opts.get('newest_first'):
2185 if opts.get('newest_first'):
2186 o.reverse()
2186 o.reverse()
2187 displayer = cmdutil.show_changeset(ui, repo, opts)
2187 displayer = cmdutil.show_changeset(ui, repo, opts)
2188 count = 0
2188 count = 0
2189 for n in o:
2189 for n in o:
2190 if count >= limit:
2190 if count >= limit:
2191 break
2191 break
2192 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2192 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2193 if opts.get('no_merges') and len(parents) == 2:
2193 if opts.get('no_merges') and len(parents) == 2:
2194 continue
2194 continue
2195 count += 1
2195 count += 1
2196 displayer.show(repo[n])
2196 displayer.show(repo[n])
2197
2197
2198 def parents(ui, repo, file_=None, **opts):
2198 def parents(ui, repo, file_=None, **opts):
2199 """show the parents of the working directory or revision
2199 """show the parents of the working directory or revision
2200
2200
2201 Print the working directory's parent revisions. If a revision is
2201 Print the working directory's parent revisions. If a revision is
2202 given via -r/--rev, the parent of that revision will be printed.
2202 given via -r/--rev, the parent of that revision will be printed.
2203 If a file argument is given, the revision in which the file was
2203 If a file argument is given, the revision in which the file was
2204 last changed (before the working directory revision or the
2204 last changed (before the working directory revision or the
2205 argument to --rev if given) is printed.
2205 argument to --rev if given) is printed.
2206 """
2206 """
2207 rev = opts.get('rev')
2207 rev = opts.get('rev')
2208 if rev:
2208 if rev:
2209 ctx = repo[rev]
2209 ctx = repo[rev]
2210 else:
2210 else:
2211 ctx = repo[None]
2211 ctx = repo[None]
2212
2212
2213 if file_:
2213 if file_:
2214 m = cmdutil.match(repo, (file_,), opts)
2214 m = cmdutil.match(repo, (file_,), opts)
2215 if m.anypats() or len(m.files()) != 1:
2215 if m.anypats() or len(m.files()) != 1:
2216 raise util.Abort(_('can only specify an explicit filename'))
2216 raise util.Abort(_('can only specify an explicit filename'))
2217 file_ = m.files()[0]
2217 file_ = m.files()[0]
2218 filenodes = []
2218 filenodes = []
2219 for cp in ctx.parents():
2219 for cp in ctx.parents():
2220 if not cp:
2220 if not cp:
2221 continue
2221 continue
2222 try:
2222 try:
2223 filenodes.append(cp.filenode(file_))
2223 filenodes.append(cp.filenode(file_))
2224 except error.LookupError:
2224 except error.LookupError:
2225 pass
2225 pass
2226 if not filenodes:
2226 if not filenodes:
2227 raise util.Abort(_("'%s' not found in manifest!") % file_)
2227 raise util.Abort(_("'%s' not found in manifest!") % file_)
2228 fl = repo.file(file_)
2228 fl = repo.file(file_)
2229 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2229 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2230 else:
2230 else:
2231 p = [cp.node() for cp in ctx.parents()]
2231 p = [cp.node() for cp in ctx.parents()]
2232
2232
2233 displayer = cmdutil.show_changeset(ui, repo, opts)
2233 displayer = cmdutil.show_changeset(ui, repo, opts)
2234 for n in p:
2234 for n in p:
2235 if n != nullid:
2235 if n != nullid:
2236 displayer.show(repo[n])
2236 displayer.show(repo[n])
2237
2237
2238 def paths(ui, repo, search=None):
2238 def paths(ui, repo, search=None):
2239 """show aliases for remote repositories
2239 """show aliases for remote repositories
2240
2240
2241 Show definition of symbolic path name NAME. If no name is given,
2241 Show definition of symbolic path name NAME. If no name is given,
2242 show definition of all available names.
2242 show definition of all available names.
2243
2243
2244 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2244 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2245 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2245 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2246
2246
2247 See 'hg help urls' for more information.
2247 See 'hg help urls' for more information.
2248 """
2248 """
2249 if search:
2249 if search:
2250 for name, path in ui.configitems("paths"):
2250 for name, path in ui.configitems("paths"):
2251 if name == search:
2251 if name == search:
2252 ui.write("%s\n" % url.hidepassword(path))
2252 ui.write("%s\n" % url.hidepassword(path))
2253 return
2253 return
2254 ui.warn(_("not found!\n"))
2254 ui.warn(_("not found!\n"))
2255 return 1
2255 return 1
2256 else:
2256 else:
2257 for name, path in ui.configitems("paths"):
2257 for name, path in ui.configitems("paths"):
2258 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2258 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2259
2259
2260 def postincoming(ui, repo, modheads, optupdate, checkout):
2260 def postincoming(ui, repo, modheads, optupdate, checkout):
2261 if modheads == 0:
2261 if modheads == 0:
2262 return
2262 return
2263 if optupdate:
2263 if optupdate:
2264 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2264 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2265 return hg.update(repo, checkout)
2265 return hg.update(repo, checkout)
2266 else:
2266 else:
2267 ui.status(_("not updating, since new heads added\n"))
2267 ui.status(_("not updating, since new heads added\n"))
2268 if modheads > 1:
2268 if modheads > 1:
2269 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2269 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2270 else:
2270 else:
2271 ui.status(_("(run 'hg update' to get a working copy)\n"))
2271 ui.status(_("(run 'hg update' to get a working copy)\n"))
2272
2272
2273 def pull(ui, repo, source="default", **opts):
2273 def pull(ui, repo, source="default", **opts):
2274 """pull changes from the specified source
2274 """pull changes from the specified source
2275
2275
2276 Pull changes from a remote repository to a local one.
2276 Pull changes from a remote repository to a local one.
2277
2277
2278 This finds all changes from the repository at the specified path
2278 This finds all changes from the repository at the specified path
2279 or URL and adds them to a local repository (the current one unless
2279 or URL and adds them to a local repository (the current one unless
2280 -R is specified). By default, this does not update the copy of the
2280 -R is specified). By default, this does not update the copy of the
2281 project in the working directory.
2281 project in the working directory.
2282
2282
2283 Use hg incoming if you want to see what would have been added by a
2283 Use hg incoming if you want to see what would have been added by a
2284 pull at the time you issued this command. If you then decide to
2284 pull at the time you issued this command. If you then decide to
2285 added those changes to the repository, you should use pull -r X
2285 added those changes to the repository, you should use pull -r X
2286 where X is the last changeset listed by hg incoming.
2286 where X is the last changeset listed by hg incoming.
2287
2287
2288 If SOURCE is omitted, the 'default' path will be used.
2288 If SOURCE is omitted, the 'default' path will be used.
2289 See 'hg help urls' for more information.
2289 See 'hg help urls' for more information.
2290 """
2290 """
2291 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2291 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2292 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2292 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2293 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2293 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2294 if revs:
2294 if revs:
2295 try:
2295 try:
2296 revs = [other.lookup(rev) for rev in revs]
2296 revs = [other.lookup(rev) for rev in revs]
2297 except error.CapabilityError:
2297 except error.CapabilityError:
2298 err = _("Other repository doesn't support revision lookup, "
2298 err = _("Other repository doesn't support revision lookup, "
2299 "so a rev cannot be specified.")
2299 "so a rev cannot be specified.")
2300 raise util.Abort(err)
2300 raise util.Abort(err)
2301
2301
2302 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2302 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2303 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2303 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2304
2304
2305 def push(ui, repo, dest=None, **opts):
2305 def push(ui, repo, dest=None, **opts):
2306 """push changes to the specified destination
2306 """push changes to the specified destination
2307
2307
2308 Push changes from the local repository to the given destination.
2308 Push changes from the local repository to the given destination.
2309
2309
2310 This is the symmetrical operation for pull. It moves changes from
2310 This is the symmetrical operation for pull. It moves changes from
2311 the current repository to a different one. If the destination is
2311 the current repository to a different one. If the destination is
2312 local this is identical to a pull in that directory from the
2312 local this is identical to a pull in that directory from the
2313 current one.
2313 current one.
2314
2314
2315 By default, push will refuse to run if it detects the result would
2315 By default, push will refuse to run if it detects the result would
2316 increase the number of remote heads. This generally indicates the
2316 increase the number of remote heads. This generally indicates the
2317 user forgot to pull and merge before pushing.
2317 user forgot to pull and merge before pushing.
2318
2318
2319 If -r/--rev is used, the named revision and all its ancestors will
2319 If -r/--rev is used, the named revision and all its ancestors will
2320 be pushed to the remote repository.
2320 be pushed to the remote repository.
2321
2321
2322 Please see 'hg help urls' for important details about ssh://
2322 Please see 'hg help urls' for important details about ssh://
2323 URLs. If DESTINATION is omitted, a default path will be used.
2323 URLs. If DESTINATION is omitted, a default path will be used.
2324 """
2324 """
2325 dest, revs, checkout = hg.parseurl(
2325 dest, revs, checkout = hg.parseurl(
2326 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2326 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2327 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2327 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2328 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2328 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2329 if revs:
2329 if revs:
2330 revs = [repo.lookup(rev) for rev in revs]
2330 revs = [repo.lookup(rev) for rev in revs]
2331
2331
2332 # push subrepos depth-first for coherent ordering
2332 # push subrepos depth-first for coherent ordering
2333 c = repo['']
2333 c = repo['']
2334 subs = c.substate # only repos that are committed
2334 subs = c.substate # only repos that are committed
2335 for s in sorted(subs):
2335 for s in sorted(subs):
2336 c.sub(s).push(opts.get('force'))
2336 c.sub(s).push(opts.get('force'))
2337
2337
2338 r = repo.push(other, opts.get('force'), revs=revs)
2338 r = repo.push(other, opts.get('force'), revs=revs)
2339 return r == 0
2339 return r == 0
2340
2340
2341 def recover(ui, repo):
2341 def recover(ui, repo):
2342 """roll back an interrupted transaction
2342 """roll back an interrupted transaction
2343
2343
2344 Recover from an interrupted commit or pull.
2344 Recover from an interrupted commit or pull.
2345
2345
2346 This command tries to fix the repository status after an
2346 This command tries to fix the repository status after an
2347 interrupted operation. It should only be necessary when Mercurial
2347 interrupted operation. It should only be necessary when Mercurial
2348 suggests it.
2348 suggests it.
2349 """
2349 """
2350 if repo.recover():
2350 if repo.recover():
2351 return hg.verify(repo)
2351 return hg.verify(repo)
2352 return 1
2352 return 1
2353
2353
2354 def remove(ui, repo, *pats, **opts):
2354 def remove(ui, repo, *pats, **opts):
2355 """remove the specified files on the next commit
2355 """remove the specified files on the next commit
2356
2356
2357 Schedule the indicated files for removal from the repository.
2357 Schedule the indicated files for removal from the repository.
2358
2358
2359 This only removes files from the current branch, not from the
2359 This only removes files from the current branch, not from the
2360 entire project history. -A/--after can be used to remove only
2360 entire project history. -A/--after can be used to remove only
2361 files that have already been deleted, -f/--force can be used to
2361 files that have already been deleted, -f/--force can be used to
2362 force deletion, and -Af can be used to remove files from the next
2362 force deletion, and -Af can be used to remove files from the next
2363 revision without deleting them from the working directory.
2363 revision without deleting them from the working directory.
2364
2364
2365 The following table details the behavior of remove for different
2365 The following table details the behavior of remove for different
2366 file states (columns) and option combinations (rows). The file
2366 file states (columns) and option combinations (rows). The file
2367 states are Added [A], Clean [C], Modified [M] and Missing [!]
2367 states are Added [A], Clean [C], Modified [M] and Missing [!]
2368 (as reported by hg status). The actions are Warn, Remove (from
2368 (as reported by hg status). The actions are Warn, Remove (from
2369 branch) and Delete (from disk).
2369 branch) and Delete (from disk).
2370
2370
2371 A C M !
2371 A C M !
2372 none W RD W R
2372 none W RD W R
2373 -f R RD RD R
2373 -f R RD RD R
2374 -A W W W R
2374 -A W W W R
2375 -Af R R R R
2375 -Af R R R R
2376
2376
2377 This command schedules the files to be removed at the next commit.
2377 This command schedules the files to be removed at the next commit.
2378 To undo a remove before that, see hg revert.
2378 To undo a remove before that, see hg revert.
2379 """
2379 """
2380
2380
2381 after, force = opts.get('after'), opts.get('force')
2381 after, force = opts.get('after'), opts.get('force')
2382 if not pats and not after:
2382 if not pats and not after:
2383 raise util.Abort(_('no files specified'))
2383 raise util.Abort(_('no files specified'))
2384
2384
2385 m = cmdutil.match(repo, pats, opts)
2385 m = cmdutil.match(repo, pats, opts)
2386 s = repo.status(match=m, clean=True)
2386 s = repo.status(match=m, clean=True)
2387 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2387 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2388
2388
2389 for f in m.files():
2389 for f in m.files():
2390 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2390 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2391 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2391 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2392
2392
2393 def warn(files, reason):
2393 def warn(files, reason):
2394 for f in files:
2394 for f in files:
2395 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2395 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2396 % (m.rel(f), reason))
2396 % (m.rel(f), reason))
2397
2397
2398 if force:
2398 if force:
2399 remove, forget = modified + deleted + clean, added
2399 remove, forget = modified + deleted + clean, added
2400 elif after:
2400 elif after:
2401 remove, forget = deleted, []
2401 remove, forget = deleted, []
2402 warn(modified + added + clean, _('still exists'))
2402 warn(modified + added + clean, _('still exists'))
2403 else:
2403 else:
2404 remove, forget = deleted + clean, []
2404 remove, forget = deleted + clean, []
2405 warn(modified, _('is modified'))
2405 warn(modified, _('is modified'))
2406 warn(added, _('has been marked for add'))
2406 warn(added, _('has been marked for add'))
2407
2407
2408 for f in sorted(remove + forget):
2408 for f in sorted(remove + forget):
2409 if ui.verbose or not m.exact(f):
2409 if ui.verbose or not m.exact(f):
2410 ui.status(_('removing %s\n') % m.rel(f))
2410 ui.status(_('removing %s\n') % m.rel(f))
2411
2411
2412 repo.forget(forget)
2412 repo.forget(forget)
2413 repo.remove(remove, unlink=not after)
2413 repo.remove(remove, unlink=not after)
2414
2414
2415 def rename(ui, repo, *pats, **opts):
2415 def rename(ui, repo, *pats, **opts):
2416 """rename files; equivalent of copy + remove
2416 """rename files; equivalent of copy + remove
2417
2417
2418 Mark dest as copies of sources; mark sources for deletion. If dest
2418 Mark dest as copies of sources; mark sources for deletion. If dest
2419 is a directory, copies are put in that directory. If dest is a
2419 is a directory, copies are put in that directory. If dest is a
2420 file, there can only be one source.
2420 file, there can only be one source.
2421
2421
2422 By default, this command copies the contents of files as they
2422 By default, this command copies the contents of files as they
2423 exist in the working directory. If invoked with -A/--after, the
2423 exist in the working directory. If invoked with -A/--after, the
2424 operation is recorded, but no copying is performed.
2424 operation is recorded, but no copying is performed.
2425
2425
2426 This command takes effect at the next commit. To undo a rename
2426 This command takes effect at the next commit. To undo a rename
2427 before that, see hg revert.
2427 before that, see hg revert.
2428 """
2428 """
2429 wlock = repo.wlock(False)
2429 wlock = repo.wlock(False)
2430 try:
2430 try:
2431 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2431 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2432 finally:
2432 finally:
2433 wlock.release()
2433 wlock.release()
2434
2434
2435 def resolve(ui, repo, *pats, **opts):
2435 def resolve(ui, repo, *pats, **opts):
2436 """retry file merges from a merge or update
2436 """retry file merges from a merge or update
2437
2437
2438 This command will cleanly retry unresolved file merges using file
2438 This command will cleanly retry unresolved file merges using file
2439 revisions preserved from the last update or merge. To attempt to
2439 revisions preserved from the last update or merge. To attempt to
2440 resolve all unresolved files, use the -a/--all switch.
2440 resolve all unresolved files, use the -a/--all switch.
2441
2441
2442 If a conflict is resolved manually, please note that the changes
2442 If a conflict is resolved manually, please note that the changes
2443 will be overwritten if the merge is retried with resolve. The
2443 will be overwritten if the merge is retried with resolve. The
2444 -m/--mark switch should be used to mark the file as resolved.
2444 -m/--mark switch should be used to mark the file as resolved.
2445
2445
2446 This command also allows listing resolved files and manually
2446 This command also allows listing resolved files and manually
2447 indicating whether or not files are resolved. All files must be
2447 indicating whether or not files are resolved. All files must be
2448 marked as resolved before a commit is permitted.
2448 marked as resolved before a commit is permitted.
2449
2449
2450 The codes used to show the status of files are:
2450 The codes used to show the status of files are:
2451 U = unresolved
2451 U = unresolved
2452 R = resolved
2452 R = resolved
2453 """
2453 """
2454
2454
2455 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2455 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2456
2456
2457 if (show and (mark or unmark)) or (mark and unmark):
2457 if (show and (mark or unmark)) or (mark and unmark):
2458 raise util.Abort(_("too many options specified"))
2458 raise util.Abort(_("too many options specified"))
2459 if pats and all:
2459 if pats and all:
2460 raise util.Abort(_("can't specify --all and patterns"))
2460 raise util.Abort(_("can't specify --all and patterns"))
2461 if not (all or pats or show or mark or unmark):
2461 if not (all or pats or show or mark or unmark):
2462 raise util.Abort(_('no files or directories specified; '
2462 raise util.Abort(_('no files or directories specified; '
2463 'use --all to remerge all files'))
2463 'use --all to remerge all files'))
2464
2464
2465 ms = merge_.mergestate(repo)
2465 ms = merge_.mergestate(repo)
2466 m = cmdutil.match(repo, pats, opts)
2466 m = cmdutil.match(repo, pats, opts)
2467
2467
2468 for f in ms:
2468 for f in ms:
2469 if m(f):
2469 if m(f):
2470 if show:
2470 if show:
2471 ui.write("%s %s\n" % (ms[f].upper(), f))
2471 ui.write("%s %s\n" % (ms[f].upper(), f))
2472 elif mark:
2472 elif mark:
2473 ms.mark(f, "r")
2473 ms.mark(f, "r")
2474 elif unmark:
2474 elif unmark:
2475 ms.mark(f, "u")
2475 ms.mark(f, "u")
2476 else:
2476 else:
2477 wctx = repo[None]
2477 wctx = repo[None]
2478 mctx = wctx.parents()[-1]
2478 mctx = wctx.parents()[-1]
2479
2479
2480 # backup pre-resolve (merge uses .orig for its own purposes)
2480 # backup pre-resolve (merge uses .orig for its own purposes)
2481 a = repo.wjoin(f)
2481 a = repo.wjoin(f)
2482 util.copyfile(a, a + ".resolve")
2482 util.copyfile(a, a + ".resolve")
2483
2483
2484 # resolve file
2484 # resolve file
2485 ms.resolve(f, wctx, mctx)
2485 ms.resolve(f, wctx, mctx)
2486
2486
2487 # replace filemerge's .orig file with our resolve file
2487 # replace filemerge's .orig file with our resolve file
2488 util.rename(a + ".resolve", a + ".orig")
2488 util.rename(a + ".resolve", a + ".orig")
2489
2489
2490 def revert(ui, repo, *pats, **opts):
2490 def revert(ui, repo, *pats, **opts):
2491 """restore individual files or directories to an earlier state
2491 """restore individual files or directories to an earlier state
2492
2492
2493 (Use update -r to check out earlier revisions, revert does not
2493 (Use update -r to check out earlier revisions, revert does not
2494 change the working directory parents.)
2494 change the working directory parents.)
2495
2495
2496 With no revision specified, revert the named files or directories
2496 With no revision specified, revert the named files or directories
2497 to the contents they had in the parent of the working directory.
2497 to the contents they had in the parent of the working directory.
2498 This restores the contents of the affected files to an unmodified
2498 This restores the contents of the affected files to an unmodified
2499 state and unschedules adds, removes, copies, and renames. If the
2499 state and unschedules adds, removes, copies, and renames. If the
2500 working directory has two parents, you must explicitly specify the
2500 working directory has two parents, you must explicitly specify the
2501 revision to revert to.
2501 revision to revert to.
2502
2502
2503 Using the -r/--rev option, revert the given files or directories
2503 Using the -r/--rev option, revert the given files or directories
2504 to their contents as of a specific revision. This can be helpful
2504 to their contents as of a specific revision. This can be helpful
2505 to "roll back" some or all of an earlier change. See 'hg help
2505 to "roll back" some or all of an earlier change. See 'hg help
2506 dates' for a list of formats valid for -d/--date.
2506 dates' for a list of formats valid for -d/--date.
2507
2507
2508 Revert modifies the working directory. It does not commit any
2508 Revert modifies the working directory. It does not commit any
2509 changes, or change the parent of the working directory. If you
2509 changes, or change the parent of the working directory. If you
2510 revert to a revision other than the parent of the working
2510 revert to a revision other than the parent of the working
2511 directory, the reverted files will thus appear modified
2511 directory, the reverted files will thus appear modified
2512 afterwards.
2512 afterwards.
2513
2513
2514 If a file has been deleted, it is restored. If the executable mode
2514 If a file has been deleted, it is restored. If the executable mode
2515 of a file was changed, it is reset.
2515 of a file was changed, it is reset.
2516
2516
2517 If names are given, all files matching the names are reverted.
2517 If names are given, all files matching the names are reverted.
2518 If no arguments are given, no files are reverted.
2518 If no arguments are given, no files are reverted.
2519
2519
2520 Modified files are saved with a .orig suffix before reverting.
2520 Modified files are saved with a .orig suffix before reverting.
2521 To disable these backups, use --no-backup.
2521 To disable these backups, use --no-backup.
2522 """
2522 """
2523
2523
2524 if opts["date"]:
2524 if opts["date"]:
2525 if opts["rev"]:
2525 if opts["rev"]:
2526 raise util.Abort(_("you can't specify a revision and a date"))
2526 raise util.Abort(_("you can't specify a revision and a date"))
2527 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2527 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2528
2528
2529 if not pats and not opts.get('all'):
2529 if not pats and not opts.get('all'):
2530 raise util.Abort(_('no files or directories specified; '
2530 raise util.Abort(_('no files or directories specified; '
2531 'use --all to revert the whole repo'))
2531 'use --all to revert the whole repo'))
2532
2532
2533 parent, p2 = repo.dirstate.parents()
2533 parent, p2 = repo.dirstate.parents()
2534 if not opts.get('rev') and p2 != nullid:
2534 if not opts.get('rev') and p2 != nullid:
2535 raise util.Abort(_('uncommitted merge - please provide a '
2535 raise util.Abort(_('uncommitted merge - please provide a '
2536 'specific revision'))
2536 'specific revision'))
2537 ctx = repo[opts.get('rev')]
2537 ctx = repo[opts.get('rev')]
2538 node = ctx.node()
2538 node = ctx.node()
2539 mf = ctx.manifest()
2539 mf = ctx.manifest()
2540 if node == parent:
2540 if node == parent:
2541 pmf = mf
2541 pmf = mf
2542 else:
2542 else:
2543 pmf = None
2543 pmf = None
2544
2544
2545 # need all matching names in dirstate and manifest of target rev,
2545 # need all matching names in dirstate and manifest of target rev,
2546 # so have to walk both. do not print errors if files exist in one
2546 # so have to walk both. do not print errors if files exist in one
2547 # but not other.
2547 # but not other.
2548
2548
2549 names = {}
2549 names = {}
2550
2550
2551 wlock = repo.wlock()
2551 wlock = repo.wlock()
2552 try:
2552 try:
2553 # walk dirstate.
2553 # walk dirstate.
2554
2554
2555 m = cmdutil.match(repo, pats, opts)
2555 m = cmdutil.match(repo, pats, opts)
2556 m.bad = lambda x,y: False
2556 m.bad = lambda x,y: False
2557 for abs in repo.walk(m):
2557 for abs in repo.walk(m):
2558 names[abs] = m.rel(abs), m.exact(abs)
2558 names[abs] = m.rel(abs), m.exact(abs)
2559
2559
2560 # walk target manifest.
2560 # walk target manifest.
2561
2561
2562 def badfn(path, msg):
2562 def badfn(path, msg):
2563 if path in names:
2563 if path in names:
2564 return
2564 return
2565 path_ = path + '/'
2565 path_ = path + '/'
2566 for f in names:
2566 for f in names:
2567 if f.startswith(path_):
2567 if f.startswith(path_):
2568 return
2568 return
2569 ui.warn("%s: %s\n" % (m.rel(path), msg))
2569 ui.warn("%s: %s\n" % (m.rel(path), msg))
2570
2570
2571 m = cmdutil.match(repo, pats, opts)
2571 m = cmdutil.match(repo, pats, opts)
2572 m.bad = badfn
2572 m.bad = badfn
2573 for abs in repo[node].walk(m):
2573 for abs in repo[node].walk(m):
2574 if abs not in names:
2574 if abs not in names:
2575 names[abs] = m.rel(abs), m.exact(abs)
2575 names[abs] = m.rel(abs), m.exact(abs)
2576
2576
2577 m = cmdutil.matchfiles(repo, names)
2577 m = cmdutil.matchfiles(repo, names)
2578 changes = repo.status(match=m)[:4]
2578 changes = repo.status(match=m)[:4]
2579 modified, added, removed, deleted = map(set, changes)
2579 modified, added, removed, deleted = map(set, changes)
2580
2580
2581 # if f is a rename, also revert the source
2581 # if f is a rename, also revert the source
2582 cwd = repo.getcwd()
2582 cwd = repo.getcwd()
2583 for f in added:
2583 for f in added:
2584 src = repo.dirstate.copied(f)
2584 src = repo.dirstate.copied(f)
2585 if src and src not in names and repo.dirstate[src] == 'r':
2585 if src and src not in names and repo.dirstate[src] == 'r':
2586 removed.add(src)
2586 removed.add(src)
2587 names[src] = (repo.pathto(src, cwd), True)
2587 names[src] = (repo.pathto(src, cwd), True)
2588
2588
2589 def removeforget(abs):
2589 def removeforget(abs):
2590 if repo.dirstate[abs] == 'a':
2590 if repo.dirstate[abs] == 'a':
2591 return _('forgetting %s\n')
2591 return _('forgetting %s\n')
2592 return _('removing %s\n')
2592 return _('removing %s\n')
2593
2593
2594 revert = ([], _('reverting %s\n'))
2594 revert = ([], _('reverting %s\n'))
2595 add = ([], _('adding %s\n'))
2595 add = ([], _('adding %s\n'))
2596 remove = ([], removeforget)
2596 remove = ([], removeforget)
2597 undelete = ([], _('undeleting %s\n'))
2597 undelete = ([], _('undeleting %s\n'))
2598
2598
2599 disptable = (
2599 disptable = (
2600 # dispatch table:
2600 # dispatch table:
2601 # file state
2601 # file state
2602 # action if in target manifest
2602 # action if in target manifest
2603 # action if not in target manifest
2603 # action if not in target manifest
2604 # make backup if in target manifest
2604 # make backup if in target manifest
2605 # make backup if not in target manifest
2605 # make backup if not in target manifest
2606 (modified, revert, remove, True, True),
2606 (modified, revert, remove, True, True),
2607 (added, revert, remove, True, False),
2607 (added, revert, remove, True, False),
2608 (removed, undelete, None, False, False),
2608 (removed, undelete, None, False, False),
2609 (deleted, revert, remove, False, False),
2609 (deleted, revert, remove, False, False),
2610 )
2610 )
2611
2611
2612 for abs, (rel, exact) in sorted(names.items()):
2612 for abs, (rel, exact) in sorted(names.items()):
2613 mfentry = mf.get(abs)
2613 mfentry = mf.get(abs)
2614 target = repo.wjoin(abs)
2614 target = repo.wjoin(abs)
2615 def handle(xlist, dobackup):
2615 def handle(xlist, dobackup):
2616 xlist[0].append(abs)
2616 xlist[0].append(abs)
2617 if dobackup and not opts.get('no_backup') and util.lexists(target):
2617 if dobackup and not opts.get('no_backup') and util.lexists(target):
2618 bakname = "%s.orig" % rel
2618 bakname = "%s.orig" % rel
2619 ui.note(_('saving current version of %s as %s\n') %
2619 ui.note(_('saving current version of %s as %s\n') %
2620 (rel, bakname))
2620 (rel, bakname))
2621 if not opts.get('dry_run'):
2621 if not opts.get('dry_run'):
2622 util.copyfile(target, bakname)
2622 util.copyfile(target, bakname)
2623 if ui.verbose or not exact:
2623 if ui.verbose or not exact:
2624 msg = xlist[1]
2624 msg = xlist[1]
2625 if not isinstance(msg, basestring):
2625 if not isinstance(msg, basestring):
2626 msg = msg(abs)
2626 msg = msg(abs)
2627 ui.status(msg % rel)
2627 ui.status(msg % rel)
2628 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2628 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2629 if abs not in table: continue
2629 if abs not in table: continue
2630 # file has changed in dirstate
2630 # file has changed in dirstate
2631 if mfentry:
2631 if mfentry:
2632 handle(hitlist, backuphit)
2632 handle(hitlist, backuphit)
2633 elif misslist is not None:
2633 elif misslist is not None:
2634 handle(misslist, backupmiss)
2634 handle(misslist, backupmiss)
2635 break
2635 break
2636 else:
2636 else:
2637 if abs not in repo.dirstate:
2637 if abs not in repo.dirstate:
2638 if mfentry:
2638 if mfentry:
2639 handle(add, True)
2639 handle(add, True)
2640 elif exact:
2640 elif exact:
2641 ui.warn(_('file not managed: %s\n') % rel)
2641 ui.warn(_('file not managed: %s\n') % rel)
2642 continue
2642 continue
2643 # file has not changed in dirstate
2643 # file has not changed in dirstate
2644 if node == parent:
2644 if node == parent:
2645 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2645 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2646 continue
2646 continue
2647 if pmf is None:
2647 if pmf is None:
2648 # only need parent manifest in this unlikely case,
2648 # only need parent manifest in this unlikely case,
2649 # so do not read by default
2649 # so do not read by default
2650 pmf = repo[parent].manifest()
2650 pmf = repo[parent].manifest()
2651 if abs in pmf:
2651 if abs in pmf:
2652 if mfentry:
2652 if mfentry:
2653 # if version of file is same in parent and target
2653 # if version of file is same in parent and target
2654 # manifests, do nothing
2654 # manifests, do nothing
2655 if (pmf[abs] != mfentry or
2655 if (pmf[abs] != mfentry or
2656 pmf.flags(abs) != mf.flags(abs)):
2656 pmf.flags(abs) != mf.flags(abs)):
2657 handle(revert, False)
2657 handle(revert, False)
2658 else:
2658 else:
2659 handle(remove, False)
2659 handle(remove, False)
2660
2660
2661 if not opts.get('dry_run'):
2661 if not opts.get('dry_run'):
2662 def checkout(f):
2662 def checkout(f):
2663 fc = ctx[f]
2663 fc = ctx[f]
2664 repo.wwrite(f, fc.data(), fc.flags())
2664 repo.wwrite(f, fc.data(), fc.flags())
2665
2665
2666 audit_path = util.path_auditor(repo.root)
2666 audit_path = util.path_auditor(repo.root)
2667 for f in remove[0]:
2667 for f in remove[0]:
2668 if repo.dirstate[f] == 'a':
2668 if repo.dirstate[f] == 'a':
2669 repo.dirstate.forget(f)
2669 repo.dirstate.forget(f)
2670 continue
2670 continue
2671 audit_path(f)
2671 audit_path(f)
2672 try:
2672 try:
2673 util.unlink(repo.wjoin(f))
2673 util.unlink(repo.wjoin(f))
2674 except OSError:
2674 except OSError:
2675 pass
2675 pass
2676 repo.dirstate.remove(f)
2676 repo.dirstate.remove(f)
2677
2677
2678 normal = None
2678 normal = None
2679 if node == parent:
2679 if node == parent:
2680 # We're reverting to our parent. If possible, we'd like status
2680 # We're reverting to our parent. If possible, we'd like status
2681 # to report the file as clean. We have to use normallookup for
2681 # to report the file as clean. We have to use normallookup for
2682 # merges to avoid losing information about merged/dirty files.
2682 # merges to avoid losing information about merged/dirty files.
2683 if p2 != nullid:
2683 if p2 != nullid:
2684 normal = repo.dirstate.normallookup
2684 normal = repo.dirstate.normallookup
2685 else:
2685 else:
2686 normal = repo.dirstate.normal
2686 normal = repo.dirstate.normal
2687 for f in revert[0]:
2687 for f in revert[0]:
2688 checkout(f)
2688 checkout(f)
2689 if normal:
2689 if normal:
2690 normal(f)
2690 normal(f)
2691
2691
2692 for f in add[0]:
2692 for f in add[0]:
2693 checkout(f)
2693 checkout(f)
2694 repo.dirstate.add(f)
2694 repo.dirstate.add(f)
2695
2695
2696 normal = repo.dirstate.normallookup
2696 normal = repo.dirstate.normallookup
2697 if node == parent and p2 == nullid:
2697 if node == parent and p2 == nullid:
2698 normal = repo.dirstate.normal
2698 normal = repo.dirstate.normal
2699 for f in undelete[0]:
2699 for f in undelete[0]:
2700 checkout(f)
2700 checkout(f)
2701 normal(f)
2701 normal(f)
2702
2702
2703 finally:
2703 finally:
2704 wlock.release()
2704 wlock.release()
2705
2705
2706 def rollback(ui, repo):
2706 def rollback(ui, repo):
2707 """roll back the last transaction
2707 """roll back the last transaction
2708
2708
2709 This command should be used with care. There is only one level of
2709 This command should be used with care. There is only one level of
2710 rollback, and there is no way to undo a rollback. It will also
2710 rollback, and there is no way to undo a rollback. It will also
2711 restore the dirstate at the time of the last transaction, losing
2711 restore the dirstate at the time of the last transaction, losing
2712 any dirstate changes since that time. This command does not alter
2712 any dirstate changes since that time. This command does not alter
2713 the working directory.
2713 the working directory.
2714
2714
2715 Transactions are used to encapsulate the effects of all commands
2715 Transactions are used to encapsulate the effects of all commands
2716 that create new changesets or propagate existing changesets into a
2716 that create new changesets or propagate existing changesets into a
2717 repository. For example, the following commands are transactional,
2717 repository. For example, the following commands are transactional,
2718 and their effects can be rolled back:
2718 and their effects can be rolled back:
2719
2719
2720 commit
2720 commit
2721 import
2721 import
2722 pull
2722 pull
2723 push (with this repository as destination)
2723 push (with this repository as destination)
2724 unbundle
2724 unbundle
2725
2725
2726 This command is not intended for use on public repositories. Once
2726 This command is not intended for use on public repositories. Once
2727 changes are visible for pull by other users, rolling a transaction
2727 changes are visible for pull by other users, rolling a transaction
2728 back locally is ineffective (someone else may already have pulled
2728 back locally is ineffective (someone else may already have pulled
2729 the changes). Furthermore, a race is possible with readers of the
2729 the changes). Furthermore, a race is possible with readers of the
2730 repository; for example an in-progress pull from the repository
2730 repository; for example an in-progress pull from the repository
2731 may fail if a rollback is performed.
2731 may fail if a rollback is performed.
2732 """
2732 """
2733 repo.rollback()
2733 repo.rollback()
2734
2734
2735 def root(ui, repo):
2735 def root(ui, repo):
2736 """print the root (top) of the current working directory
2736 """print the root (top) of the current working directory
2737
2737
2738 Print the root directory of the current repository.
2738 Print the root directory of the current repository.
2739 """
2739 """
2740 ui.write(repo.root + "\n")
2740 ui.write(repo.root + "\n")
2741
2741
2742 def serve(ui, repo, **opts):
2742 def serve(ui, repo, **opts):
2743 """export the repository via HTTP
2743 """export the repository via HTTP
2744
2744
2745 Start a local HTTP repository browser and pull server.
2745 Start a local HTTP repository browser and pull server.
2746
2746
2747 By default, the server logs accesses to stdout and errors to
2747 By default, the server logs accesses to stdout and errors to
2748 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2748 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2749 files.
2749 files.
2750 """
2750 """
2751
2751
2752 if opts["stdio"]:
2752 if opts["stdio"]:
2753 if repo is None:
2753 if repo is None:
2754 raise error.RepoError(_("There is no Mercurial repository here"
2754 raise error.RepoError(_("There is no Mercurial repository here"
2755 " (.hg not found)"))
2755 " (.hg not found)"))
2756 s = sshserver.sshserver(ui, repo)
2756 s = sshserver.sshserver(ui, repo)
2757 s.serve_forever()
2757 s.serve_forever()
2758
2758
2759 baseui = repo and repo.baseui or ui
2759 baseui = repo and repo.baseui or ui
2760 optlist = ("name templates style address port prefix ipv6"
2760 optlist = ("name templates style address port prefix ipv6"
2761 " accesslog errorlog webdir_conf certificate encoding")
2761 " accesslog errorlog webdir_conf certificate encoding")
2762 for o in optlist.split():
2762 for o in optlist.split():
2763 if opts.get(o, None):
2763 if opts.get(o, None):
2764 baseui.setconfig("web", o, str(opts[o]))
2764 baseui.setconfig("web", o, str(opts[o]))
2765 if (repo is not None) and (repo.ui != baseui):
2765 if (repo is not None) and (repo.ui != baseui):
2766 repo.ui.setconfig("web", o, str(opts[o]))
2766 repo.ui.setconfig("web", o, str(opts[o]))
2767
2767
2768 if repo is None and not ui.config("web", "webdir_conf"):
2768 if repo is None and not ui.config("web", "webdir_conf"):
2769 raise error.RepoError(_("There is no Mercurial repository here"
2769 raise error.RepoError(_("There is no Mercurial repository here"
2770 " (.hg not found)"))
2770 " (.hg not found)"))
2771
2771
2772 class service(object):
2772 class service(object):
2773 def init(self):
2773 def init(self):
2774 util.set_signal_handler()
2774 util.set_signal_handler()
2775 self.httpd = server.create_server(baseui, repo)
2775 self.httpd = server.create_server(baseui, repo)
2776
2776
2777 if not ui.verbose: return
2777 if not ui.verbose: return
2778
2778
2779 if self.httpd.prefix:
2779 if self.httpd.prefix:
2780 prefix = self.httpd.prefix.strip('/') + '/'
2780 prefix = self.httpd.prefix.strip('/') + '/'
2781 else:
2781 else:
2782 prefix = ''
2782 prefix = ''
2783
2783
2784 port = ':%d' % self.httpd.port
2784 port = ':%d' % self.httpd.port
2785 if port == ':80':
2785 if port == ':80':
2786 port = ''
2786 port = ''
2787
2787
2788 bindaddr = self.httpd.addr
2788 bindaddr = self.httpd.addr
2789 if bindaddr == '0.0.0.0':
2789 if bindaddr == '0.0.0.0':
2790 bindaddr = '*'
2790 bindaddr = '*'
2791 elif ':' in bindaddr: # IPv6
2791 elif ':' in bindaddr: # IPv6
2792 bindaddr = '[%s]' % bindaddr
2792 bindaddr = '[%s]' % bindaddr
2793
2793
2794 fqaddr = self.httpd.fqaddr
2794 fqaddr = self.httpd.fqaddr
2795 if ':' in fqaddr:
2795 if ':' in fqaddr:
2796 fqaddr = '[%s]' % fqaddr
2796 fqaddr = '[%s]' % fqaddr
2797 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2797 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2798 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2798 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2799
2799
2800 def run(self):
2800 def run(self):
2801 self.httpd.serve_forever()
2801 self.httpd.serve_forever()
2802
2802
2803 service = service()
2803 service = service()
2804
2804
2805 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2805 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2806
2806
2807 def status(ui, repo, *pats, **opts):
2807 def status(ui, repo, *pats, **opts):
2808 """show changed files in the working directory
2808 """show changed files in the working directory
2809
2809
2810 Show status of files in the repository. If names are given, only
2810 Show status of files in the repository. If names are given, only
2811 files that match are shown. Files that are clean or ignored or
2811 files that match are shown. Files that are clean or ignored or
2812 the source of a copy/move operation, are not listed unless
2812 the source of a copy/move operation, are not listed unless
2813 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2813 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
2814 Unless options described with "show only ..." are given, the
2814 Unless options described with "show only ..." are given, the
2815 options -mardu are used.
2815 options -mardu are used.
2816
2816
2817 Option -q/--quiet hides untracked (unknown and ignored) files
2817 Option -q/--quiet hides untracked (unknown and ignored) files
2818 unless explicitly requested with -u/--unknown or -i/--ignored.
2818 unless explicitly requested with -u/--unknown or -i/--ignored.
2819
2819
2820 NOTE: status may appear to disagree with diff if permissions have
2820 NOTE: status may appear to disagree with diff if permissions have
2821 changed or a merge has occurred. The standard diff format does not
2821 changed or a merge has occurred. The standard diff format does not
2822 report permission changes and diff only reports changes relative
2822 report permission changes and diff only reports changes relative
2823 to one merge parent.
2823 to one merge parent.
2824
2824
2825 If one revision is given, it is used as the base revision.
2825 If one revision is given, it is used as the base revision.
2826 If two revisions are given, the differences between them are
2826 If two revisions are given, the differences between them are
2827 shown.
2827 shown.
2828
2828
2829 The codes used to show the status of files are:
2829 The codes used to show the status of files are:
2830 M = modified
2830 M = modified
2831 A = added
2831 A = added
2832 R = removed
2832 R = removed
2833 C = clean
2833 C = clean
2834 ! = missing (deleted by non-hg command, but still tracked)
2834 ! = missing (deleted by non-hg command, but still tracked)
2835 ? = not tracked
2835 ? = not tracked
2836 I = ignored
2836 I = ignored
2837 = origin of the previous file listed as A (added)
2837 = origin of the previous file listed as A (added)
2838 """
2838 """
2839
2839
2840 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2840 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2841 cwd = (pats and repo.getcwd()) or ''
2841 cwd = (pats and repo.getcwd()) or ''
2842 end = opts.get('print0') and '\0' or '\n'
2842 end = opts.get('print0') and '\0' or '\n'
2843 copy = {}
2843 copy = {}
2844 states = 'modified added removed deleted unknown ignored clean'.split()
2844 states = 'modified added removed deleted unknown ignored clean'.split()
2845 show = [k for k in states if opts.get(k)]
2845 show = [k for k in states if opts.get(k)]
2846 if opts.get('all'):
2846 if opts.get('all'):
2847 show += ui.quiet and (states[:4] + ['clean']) or states
2847 show += ui.quiet and (states[:4] + ['clean']) or states
2848 if not show:
2848 if not show:
2849 show = ui.quiet and states[:4] or states[:5]
2849 show = ui.quiet and states[:4] or states[:5]
2850
2850
2851 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2851 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2852 'ignored' in show, 'clean' in show, 'unknown' in show)
2852 'ignored' in show, 'clean' in show, 'unknown' in show)
2853 changestates = zip(states, 'MAR!?IC', stat)
2853 changestates = zip(states, 'MAR!?IC', stat)
2854
2854
2855 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2855 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2856 ctxn = repo[nullid]
2856 ctxn = repo[nullid]
2857 ctx1 = repo[node1]
2857 ctx1 = repo[node1]
2858 ctx2 = repo[node2]
2858 ctx2 = repo[node2]
2859 added = stat[1]
2859 added = stat[1]
2860 if node2 is None:
2860 if node2 is None:
2861 added = stat[0] + stat[1] # merged?
2861 added = stat[0] + stat[1] # merged?
2862
2862
2863 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2863 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2864 if k in added:
2864 if k in added:
2865 copy[k] = v
2865 copy[k] = v
2866 elif v in added:
2866 elif v in added:
2867 copy[v] = k
2867 copy[v] = k
2868
2868
2869 for state, char, files in changestates:
2869 for state, char, files in changestates:
2870 if state in show:
2870 if state in show:
2871 format = "%s %%s%s" % (char, end)
2871 format = "%s %%s%s" % (char, end)
2872 if opts.get('no_status'):
2872 if opts.get('no_status'):
2873 format = "%%s%s" % end
2873 format = "%%s%s" % end
2874
2874
2875 for f in files:
2875 for f in files:
2876 ui.write(format % repo.pathto(f, cwd))
2876 ui.write(format % repo.pathto(f, cwd))
2877 if f in copy:
2877 if f in copy:
2878 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2878 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2879
2879
2880 def tag(ui, repo, name1, *names, **opts):
2880 def tag(ui, repo, name1, *names, **opts):
2881 """add one or more tags for the current or given revision
2881 """add one or more tags for the current or given revision
2882
2882
2883 Name a particular revision using <name>.
2883 Name a particular revision using <name>.
2884
2884
2885 Tags are used to name particular revisions of the repository and are
2885 Tags are used to name particular revisions of the repository and are
2886 very useful to compare different revisions, to go back to significant
2886 very useful to compare different revisions, to go back to significant
2887 earlier versions or to mark branch points as releases, etc.
2887 earlier versions or to mark branch points as releases, etc.
2888
2888
2889 If no revision is given, the parent of the working directory is
2889 If no revision is given, the parent of the working directory is
2890 used, or tip if no revision is checked out.
2890 used, or tip if no revision is checked out.
2891
2891
2892 To facilitate version control, distribution, and merging of tags,
2892 To facilitate version control, distribution, and merging of tags,
2893 they are stored as a file named ".hgtags" which is managed
2893 they are stored as a file named ".hgtags" which is managed
2894 similarly to other project files and can be hand-edited if
2894 similarly to other project files and can be hand-edited if
2895 necessary. The file '.hg/localtags' is used for local tags (not
2895 necessary. The file '.hg/localtags' is used for local tags (not
2896 shared among repositories).
2896 shared among repositories).
2897
2897
2898 See 'hg help dates' for a list of formats valid for -d/--date.
2898 See 'hg help dates' for a list of formats valid for -d/--date.
2899 """
2899 """
2900
2900
2901 rev_ = "."
2901 rev_ = "."
2902 names = (name1,) + names
2902 names = (name1,) + names
2903 if len(names) != len(set(names)):
2903 if len(names) != len(set(names)):
2904 raise util.Abort(_('tag names must be unique'))
2904 raise util.Abort(_('tag names must be unique'))
2905 for n in names:
2905 for n in names:
2906 if n in ['tip', '.', 'null']:
2906 if n in ['tip', '.', 'null']:
2907 raise util.Abort(_('the name \'%s\' is reserved') % n)
2907 raise util.Abort(_('the name \'%s\' is reserved') % n)
2908 if opts.get('rev') and opts.get('remove'):
2908 if opts.get('rev') and opts.get('remove'):
2909 raise util.Abort(_("--rev and --remove are incompatible"))
2909 raise util.Abort(_("--rev and --remove are incompatible"))
2910 if opts.get('rev'):
2910 if opts.get('rev'):
2911 rev_ = opts['rev']
2911 rev_ = opts['rev']
2912 message = opts.get('message')
2912 message = opts.get('message')
2913 if opts.get('remove'):
2913 if opts.get('remove'):
2914 expectedtype = opts.get('local') and 'local' or 'global'
2914 expectedtype = opts.get('local') and 'local' or 'global'
2915 for n in names:
2915 for n in names:
2916 if not repo.tagtype(n):
2916 if not repo.tagtype(n):
2917 raise util.Abort(_('tag \'%s\' does not exist') % n)
2917 raise util.Abort(_('tag \'%s\' does not exist') % n)
2918 if repo.tagtype(n) != expectedtype:
2918 if repo.tagtype(n) != expectedtype:
2919 if expectedtype == 'global':
2919 if expectedtype == 'global':
2920 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2920 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2921 else:
2921 else:
2922 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2922 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2923 rev_ = nullid
2923 rev_ = nullid
2924 if not message:
2924 if not message:
2925 message = _('Removed tag %s') % ', '.join(names)
2925 message = _('Removed tag %s') % ', '.join(names)
2926 elif not opts.get('force'):
2926 elif not opts.get('force'):
2927 for n in names:
2927 for n in names:
2928 if n in repo.tags():
2928 if n in repo.tags():
2929 raise util.Abort(_('tag \'%s\' already exists '
2929 raise util.Abort(_('tag \'%s\' already exists '
2930 '(use -f to force)') % n)
2930 '(use -f to force)') % n)
2931 if not rev_ and repo.dirstate.parents()[1] != nullid:
2931 if not rev_ and repo.dirstate.parents()[1] != nullid:
2932 raise util.Abort(_('uncommitted merge - please provide a '
2932 raise util.Abort(_('uncommitted merge - please provide a '
2933 'specific revision'))
2933 'specific revision'))
2934 r = repo[rev_].node()
2934 r = repo[rev_].node()
2935
2935
2936 if not message:
2936 if not message:
2937 message = (_('Added tag %s for changeset %s') %
2937 message = (_('Added tag %s for changeset %s') %
2938 (', '.join(names), short(r)))
2938 (', '.join(names), short(r)))
2939
2939
2940 date = opts.get('date')
2940 date = opts.get('date')
2941 if date:
2941 if date:
2942 date = util.parsedate(date)
2942 date = util.parsedate(date)
2943
2943
2944 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2944 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2945
2945
2946 def tags(ui, repo):
2946 def tags(ui, repo):
2947 """list repository tags
2947 """list repository tags
2948
2948
2949 This lists both regular and local tags. When the -v/--verbose
2949 This lists both regular and local tags. When the -v/--verbose
2950 switch is used, a third column "local" is printed for local tags.
2950 switch is used, a third column "local" is printed for local tags.
2951 """
2951 """
2952
2952
2953 hexfunc = ui.debugflag and hex or short
2953 hexfunc = ui.debugflag and hex or short
2954 tagtype = ""
2954 tagtype = ""
2955
2955
2956 for t, n in reversed(repo.tagslist()):
2956 for t, n in reversed(repo.tagslist()):
2957 if ui.quiet:
2957 if ui.quiet:
2958 ui.write("%s\n" % t)
2958 ui.write("%s\n" % t)
2959 continue
2959 continue
2960
2960
2961 try:
2961 try:
2962 hn = hexfunc(n)
2962 hn = hexfunc(n)
2963 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2963 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2964 except error.LookupError:
2964 except error.LookupError:
2965 r = " ?:%s" % hn
2965 r = " ?:%s" % hn
2966 else:
2966 else:
2967 spaces = " " * (30 - encoding.colwidth(t))
2967 spaces = " " * (30 - encoding.colwidth(t))
2968 if ui.verbose:
2968 if ui.verbose:
2969 if repo.tagtype(t) == 'local':
2969 if repo.tagtype(t) == 'local':
2970 tagtype = " local"
2970 tagtype = " local"
2971 else:
2971 else:
2972 tagtype = ""
2972 tagtype = ""
2973 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2973 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2974
2974
2975 def tip(ui, repo, **opts):
2975 def tip(ui, repo, **opts):
2976 """show the tip revision
2976 """show the tip revision
2977
2977
2978 The tip revision (usually just called the tip) is the changeset
2978 The tip revision (usually just called the tip) is the changeset
2979 most recently added to the repository (and therefore the most
2979 most recently added to the repository (and therefore the most
2980 recently changed head).
2980 recently changed head).
2981
2981
2982 If you have just made a commit, that commit will be the tip. If
2982 If you have just made a commit, that commit will be the tip. If
2983 you have just pulled changes from another repository, the tip of
2983 you have just pulled changes from another repository, the tip of
2984 that repository becomes the current tip. The "tip" tag is special
2984 that repository becomes the current tip. The "tip" tag is special
2985 and cannot be renamed or assigned to a different changeset.
2985 and cannot be renamed or assigned to a different changeset.
2986 """
2986 """
2987 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2987 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2988
2988
2989 def unbundle(ui, repo, fname1, *fnames, **opts):
2989 def unbundle(ui, repo, fname1, *fnames, **opts):
2990 """apply one or more changegroup files
2990 """apply one or more changegroup files
2991
2991
2992 Apply one or more compressed changegroup files generated by the
2992 Apply one or more compressed changegroup files generated by the
2993 bundle command.
2993 bundle command.
2994 """
2994 """
2995 fnames = (fname1,) + fnames
2995 fnames = (fname1,) + fnames
2996
2996
2997 lock = repo.lock()
2997 lock = repo.lock()
2998 try:
2998 try:
2999 for fname in fnames:
2999 for fname in fnames:
3000 f = url.open(ui, fname)
3000 f = url.open(ui, fname)
3001 gen = changegroup.readbundle(f, fname)
3001 gen = changegroup.readbundle(f, fname)
3002 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3002 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3003 finally:
3003 finally:
3004 lock.release()
3004 lock.release()
3005
3005
3006 return postincoming(ui, repo, modheads, opts.get('update'), None)
3006 return postincoming(ui, repo, modheads, opts.get('update'), None)
3007
3007
3008 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3008 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3009 """update working directory
3009 """update working directory
3010
3010
3011 Update the repository's working directory to the specified
3011 Update the repository's working directory to the specified
3012 revision, or the tip of the current branch if none is specified.
3012 revision, or the tip of the current branch if none is specified.
3013 Use null as the revision to remove the working copy (like 'hg
3013 Use null as the revision to remove the working copy (like 'hg
3014 clone -U').
3014 clone -U').
3015
3015
3016 When the working directory contains no uncommitted changes, it
3016 When the working directory contains no uncommitted changes, it
3017 will be replaced by the state of the requested revision from the
3017 will be replaced by the state of the requested revision from the
3018 repository. When the requested revision is on a different branch,
3018 repository. When the requested revision is on a different branch,
3019 the working directory will additionally be switched to that
3019 the working directory will additionally be switched to that
3020 branch.
3020 branch.
3021
3021
3022 When there are uncommitted changes, use option -C/--clean to
3022 When there are uncommitted changes, use option -C/--clean to
3023 discard them, forcibly replacing the state of the working
3023 discard them, forcibly replacing the state of the working
3024 directory with the requested revision. Alternately, use -c/--check
3024 directory with the requested revision. Alternately, use -c/--check
3025 to abort.
3025 to abort.
3026
3026
3027 When there are uncommitted changes and option -C/--clean is not
3027 When there are uncommitted changes and option -C/--clean is not
3028 used, and the parent revision and requested revision are on the
3028 used, and the parent revision and requested revision are on the
3029 same branch, and one of them is an ancestor of the other, then the
3029 same branch, and one of them is an ancestor of the other, then the
3030 new working directory will contain the requested revision merged
3030 new working directory will contain the requested revision merged
3031 with the uncommitted changes. Otherwise, the update will fail with
3031 with the uncommitted changes. Otherwise, the update will fail with
3032 a suggestion to use 'merge' or 'update -C' instead.
3032 a suggestion to use 'merge' or 'update -C' instead.
3033
3033
3034 If you want to update just one file to an older revision, use
3034 If you want to update just one file to an older revision, use
3035 revert.
3035 revert.
3036
3036
3037 See 'hg help dates' for a list of formats valid for -d/--date.
3037 See 'hg help dates' for a list of formats valid for -d/--date.
3038 """
3038 """
3039 if rev and node:
3039 if rev and node:
3040 raise util.Abort(_("please specify just one revision"))
3040 raise util.Abort(_("please specify just one revision"))
3041
3041
3042 if not rev:
3042 if not rev:
3043 rev = node
3043 rev = node
3044
3044
3045 if not clean and check:
3045 if not clean and check:
3046 # we could use dirty() but we can ignore merge and branch trivia
3046 # we could use dirty() but we can ignore merge and branch trivia
3047 c = repo[None]
3047 c = repo[None]
3048 if c.modified() or c.added() or c.removed():
3048 if c.modified() or c.added() or c.removed():
3049 raise util.Abort(_("uncommitted local changes"))
3049 raise util.Abort(_("uncommitted local changes"))
3050
3050
3051 if date:
3051 if date:
3052 if rev:
3052 if rev:
3053 raise util.Abort(_("you can't specify a revision and a date"))
3053 raise util.Abort(_("you can't specify a revision and a date"))
3054 rev = cmdutil.finddate(ui, repo, date)
3054 rev = cmdutil.finddate(ui, repo, date)
3055
3055
3056 if clean:
3056 if clean:
3057 return hg.clean(repo, rev)
3057 return hg.clean(repo, rev)
3058 else:
3058 else:
3059 return hg.update(repo, rev)
3059 return hg.update(repo, rev)
3060
3060
3061 def verify(ui, repo):
3061 def verify(ui, repo):
3062 """verify the integrity of the repository
3062 """verify the integrity of the repository
3063
3063
3064 Verify the integrity of the current repository.
3064 Verify the integrity of the current repository.
3065
3065
3066 This will perform an extensive check of the repository's
3066 This will perform an extensive check of the repository's
3067 integrity, validating the hashes and checksums of each entry in
3067 integrity, validating the hashes and checksums of each entry in
3068 the changelog, manifest, and tracked files, as well as the
3068 the changelog, manifest, and tracked files, as well as the
3069 integrity of their crosslinks and indices.
3069 integrity of their crosslinks and indices.
3070 """
3070 """
3071 return hg.verify(repo)
3071 return hg.verify(repo)
3072
3072
3073 def version_(ui):
3073 def version_(ui):
3074 """output version and copyright information"""
3074 """output version and copyright information"""
3075 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3075 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3076 % util.version())
3076 % util.version())
3077 ui.status(_(
3077 ui.status(_(
3078 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
3078 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
3079 "This is free software; see the source for copying conditions. "
3079 "This is free software; see the source for copying conditions. "
3080 "There is NO\nwarranty; "
3080 "There is NO\nwarranty; "
3081 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3081 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3082 ))
3082 ))
3083
3083
3084 # Command options and aliases are listed here, alphabetically
3084 # Command options and aliases are listed here, alphabetically
3085
3085
3086 globalopts = [
3086 globalopts = [
3087 ('R', 'repository', '',
3087 ('R', 'repository', '',
3088 _('repository root directory or symbolic path name')),
3088 _('repository root directory or symbolic path name')),
3089 ('', 'cwd', '', _('change working directory')),
3089 ('', 'cwd', '', _('change working directory')),
3090 ('y', 'noninteractive', None,
3090 ('y', 'noninteractive', None,
3091 _('do not prompt, assume \'yes\' for any required answers')),
3091 _('do not prompt, assume \'yes\' for any required answers')),
3092 ('q', 'quiet', None, _('suppress output')),
3092 ('q', 'quiet', None, _('suppress output')),
3093 ('v', 'verbose', None, _('enable additional output')),
3093 ('v', 'verbose', None, _('enable additional output')),
3094 ('', 'config', [], _('set/override config option')),
3094 ('', 'config', [], _('set/override config option')),
3095 ('', 'debug', None, _('enable debugging output')),
3095 ('', 'debug', None, _('enable debugging output')),
3096 ('', 'debugger', None, _('start debugger')),
3096 ('', 'debugger', None, _('start debugger')),
3097 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3097 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3098 ('', 'encodingmode', encoding.encodingmode,
3098 ('', 'encodingmode', encoding.encodingmode,
3099 _('set the charset encoding mode')),
3099 _('set the charset encoding mode')),
3100 ('', 'traceback', None, _('print traceback on exception')),
3100 ('', 'traceback', None, _('print traceback on exception')),
3101 ('', 'time', None, _('time how long the command takes')),
3101 ('', 'time', None, _('time how long the command takes')),
3102 ('', 'profile', None, _('print command execution profile')),
3102 ('', 'profile', None, _('print command execution profile')),
3103 ('', 'version', None, _('output version information and exit')),
3103 ('', 'version', None, _('output version information and exit')),
3104 ('h', 'help', None, _('display help and exit')),
3104 ('h', 'help', None, _('display help and exit')),
3105 ]
3105 ]
3106
3106
3107 dryrunopts = [('n', 'dry-run', None,
3107 dryrunopts = [('n', 'dry-run', None,
3108 _('do not perform actions, just print output'))]
3108 _('do not perform actions, just print output'))]
3109
3109
3110 remoteopts = [
3110 remoteopts = [
3111 ('e', 'ssh', '', _('specify ssh command to use')),
3111 ('e', 'ssh', '', _('specify ssh command to use')),
3112 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3112 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3113 ]
3113 ]
3114
3114
3115 walkopts = [
3115 walkopts = [
3116 ('I', 'include', [], _('include names matching the given patterns')),
3116 ('I', 'include', [], _('include names matching the given patterns')),
3117 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3117 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3118 ]
3118 ]
3119
3119
3120 commitopts = [
3120 commitopts = [
3121 ('m', 'message', '', _('use <text> as commit message')),
3121 ('m', 'message', '', _('use <text> as commit message')),
3122 ('l', 'logfile', '', _('read commit message from <file>')),
3122 ('l', 'logfile', '', _('read commit message from <file>')),
3123 ]
3123 ]
3124
3124
3125 commitopts2 = [
3125 commitopts2 = [
3126 ('d', 'date', '', _('record datecode as commit date')),
3126 ('d', 'date', '', _('record datecode as commit date')),
3127 ('u', 'user', '', _('record the specified user as committer')),
3127 ('u', 'user', '', _('record the specified user as committer')),
3128 ]
3128 ]
3129
3129
3130 templateopts = [
3130 templateopts = [
3131 ('', 'style', '', _('display using template map file')),
3131 ('', 'style', '', _('display using template map file')),
3132 ('', 'template', '', _('display with template')),
3132 ('', 'template', '', _('display with template')),
3133 ]
3133 ]
3134
3134
3135 logopts = [
3135 logopts = [
3136 ('p', 'patch', None, _('show patch')),
3136 ('p', 'patch', None, _('show patch')),
3137 ('g', 'git', None, _('use git extended diff format')),
3137 ('g', 'git', None, _('use git extended diff format')),
3138 ('l', 'limit', '', _('limit number of changes displayed')),
3138 ('l', 'limit', '', _('limit number of changes displayed')),
3139 ('M', 'no-merges', None, _('do not show merges')),
3139 ('M', 'no-merges', None, _('do not show merges')),
3140 ] + templateopts
3140 ] + templateopts
3141
3141
3142 diffopts = [
3142 diffopts = [
3143 ('a', 'text', None, _('treat all files as text')),
3143 ('a', 'text', None, _('treat all files as text')),
3144 ('g', 'git', None, _('use git extended diff format')),
3144 ('g', 'git', None, _('use git extended diff format')),
3145 ('', 'nodates', None, _("don't include dates in diff headers"))
3145 ('', 'nodates', None, _("don't include dates in diff headers"))
3146 ]
3146 ]
3147
3147
3148 diffopts2 = [
3148 diffopts2 = [
3149 ('p', 'show-function', None, _('show which function each change is in')),
3149 ('p', 'show-function', None, _('show which function each change is in')),
3150 ('w', 'ignore-all-space', None,
3150 ('w', 'ignore-all-space', None,
3151 _('ignore white space when comparing lines')),
3151 _('ignore white space when comparing lines')),
3152 ('b', 'ignore-space-change', None,
3152 ('b', 'ignore-space-change', None,
3153 _('ignore changes in the amount of white space')),
3153 _('ignore changes in the amount of white space')),
3154 ('B', 'ignore-blank-lines', None,
3154 ('B', 'ignore-blank-lines', None,
3155 _('ignore changes whose lines are all blank')),
3155 _('ignore changes whose lines are all blank')),
3156 ('U', 'unified', '', _('number of lines of context to show'))
3156 ('U', 'unified', '', _('number of lines of context to show'))
3157 ]
3157 ]
3158
3158
3159 similarityopts = [
3159 similarityopts = [
3160 ('s', 'similarity', '',
3160 ('s', 'similarity', '',
3161 _('guess renamed files by similarity (0<=s<=100)'))
3161 _('guess renamed files by similarity (0<=s<=100)'))
3162 ]
3162 ]
3163
3163
3164 table = {
3164 table = {
3165 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3165 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3166 "addremove":
3166 "addremove":
3167 (addremove, similarityopts + walkopts + dryrunopts,
3167 (addremove, similarityopts + walkopts + dryrunopts,
3168 _('[OPTION]... [FILE]...')),
3168 _('[OPTION]... [FILE]...')),
3169 "^annotate|blame":
3169 "^annotate|blame":
3170 (annotate,
3170 (annotate,
3171 [('r', 'rev', '', _('annotate the specified revision')),
3171 [('r', 'rev', '', _('annotate the specified revision')),
3172 ('f', 'follow', None, _('follow file copies and renames')),
3172 ('f', 'follow', None, _('follow file copies and renames')),
3173 ('a', 'text', None, _('treat all files as text')),
3173 ('a', 'text', None, _('treat all files as text')),
3174 ('u', 'user', None, _('list the author (long with -v)')),
3174 ('u', 'user', None, _('list the author (long with -v)')),
3175 ('d', 'date', None, _('list the date (short with -q)')),
3175 ('d', 'date', None, _('list the date (short with -q)')),
3176 ('n', 'number', None, _('list the revision number (default)')),
3176 ('n', 'number', None, _('list the revision number (default)')),
3177 ('c', 'changeset', None, _('list the changeset')),
3177 ('c', 'changeset', None, _('list the changeset')),
3178 ('l', 'line-number', None,
3178 ('l', 'line-number', None,
3179 _('show line number at the first appearance'))
3179 _('show line number at the first appearance'))
3180 ] + walkopts,
3180 ] + walkopts,
3181 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3181 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3182 "archive":
3182 "archive":
3183 (archive,
3183 (archive,
3184 [('', 'no-decode', None, _('do not pass files through decoders')),
3184 [('', 'no-decode', None, _('do not pass files through decoders')),
3185 ('p', 'prefix', '', _('directory prefix for files in archive')),
3185 ('p', 'prefix', '', _('directory prefix for files in archive')),
3186 ('r', 'rev', '', _('revision to distribute')),
3186 ('r', 'rev', '', _('revision to distribute')),
3187 ('t', 'type', '', _('type of distribution to create')),
3187 ('t', 'type', '', _('type of distribution to create')),
3188 ] + walkopts,
3188 ] + walkopts,
3189 _('[OPTION]... DEST')),
3189 _('[OPTION]... DEST')),
3190 "backout":
3190 "backout":
3191 (backout,
3191 (backout,
3192 [('', 'merge', None,
3192 [('', 'merge', None,
3193 _('merge with old dirstate parent after backout')),
3193 _('merge with old dirstate parent after backout')),
3194 ('', 'parent', '', _('parent to choose when backing out merge')),
3194 ('', 'parent', '', _('parent to choose when backing out merge')),
3195 ('r', 'rev', '', _('revision to backout')),
3195 ('r', 'rev', '', _('revision to backout')),
3196 ] + walkopts + commitopts + commitopts2,
3196 ] + walkopts + commitopts + commitopts2,
3197 _('[OPTION]... [-r] REV')),
3197 _('[OPTION]... [-r] REV')),
3198 "bisect":
3198 "bisect":
3199 (bisect,
3199 (bisect,
3200 [('r', 'reset', False, _('reset bisect state')),
3200 [('r', 'reset', False, _('reset bisect state')),
3201 ('g', 'good', False, _('mark changeset good')),
3201 ('g', 'good', False, _('mark changeset good')),
3202 ('b', 'bad', False, _('mark changeset bad')),
3202 ('b', 'bad', False, _('mark changeset bad')),
3203 ('s', 'skip', False, _('skip testing changeset')),
3203 ('s', 'skip', False, _('skip testing changeset')),
3204 ('c', 'command', '', _('use command to check changeset state')),
3204 ('c', 'command', '', _('use command to check changeset state')),
3205 ('U', 'noupdate', False, _('do not update to target'))],
3205 ('U', 'noupdate', False, _('do not update to target'))],
3206 _("[-gbsr] [-c CMD] [REV]")),
3206 _("[-gbsr] [-c CMD] [REV]")),
3207 "branch":
3207 "branch":
3208 (branch,
3208 (branch,
3209 [('f', 'force', None,
3209 [('f', 'force', None,
3210 _('set branch name even if it shadows an existing branch')),
3210 _('set branch name even if it shadows an existing branch')),
3211 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3211 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3212 _('[-fC] [NAME]')),
3212 _('[-fC] [NAME]')),
3213 "branches":
3213 "branches":
3214 (branches,
3214 (branches,
3215 [('a', 'active', False,
3215 [('a', 'active', False,
3216 _('show only branches that have unmerged heads')),
3216 _('show only branches that have unmerged heads')),
3217 ('c', 'closed', False,
3217 ('c', 'closed', False,
3218 _('show normal and closed heads'))],
3218 _('show normal and closed heads'))],
3219 _('[-a]')),
3219 _('[-a]')),
3220 "bundle":
3220 "bundle":
3221 (bundle,
3221 (bundle,
3222 [('f', 'force', None,
3222 [('f', 'force', None,
3223 _('run even when remote repository is unrelated')),
3223 _('run even when remote repository is unrelated')),
3224 ('r', 'rev', [],
3224 ('r', 'rev', [],
3225 _('a changeset up to which you would like to bundle')),
3225 _('a changeset up to which you would like to bundle')),
3226 ('', 'base', [],
3226 ('', 'base', [],
3227 _('a base changeset to specify instead of a destination')),
3227 _('a base changeset to specify instead of a destination')),
3228 ('a', 'all', None, _('bundle all changesets in the repository')),
3228 ('a', 'all', None, _('bundle all changesets in the repository')),
3229 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3229 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3230 ] + remoteopts,
3230 ] + remoteopts,
3231 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3231 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3232 "cat":
3232 "cat":
3233 (cat,
3233 (cat,
3234 [('o', 'output', '', _('print output to file with formatted name')),
3234 [('o', 'output', '', _('print output to file with formatted name')),
3235 ('r', 'rev', '', _('print the given revision')),
3235 ('r', 'rev', '', _('print the given revision')),
3236 ('', 'decode', None, _('apply any matching decode filter')),
3236 ('', 'decode', None, _('apply any matching decode filter')),
3237 ] + walkopts,
3237 ] + walkopts,
3238 _('[OPTION]... FILE...')),
3238 _('[OPTION]... FILE...')),
3239 "^clone":
3239 "^clone":
3240 (clone,
3240 (clone,
3241 [('U', 'noupdate', None,
3241 [('U', 'noupdate', None,
3242 _('the clone will only contain a repository (no working copy)')),
3242 _('the clone will only contain a repository (no working copy)')),
3243 ('r', 'rev', [],
3243 ('r', 'rev', [],
3244 _('a changeset you would like to have after cloning')),
3244 _('a changeset you would like to have after cloning')),
3245 ('', 'pull', None, _('use pull protocol to copy metadata')),
3245 ('', 'pull', None, _('use pull protocol to copy metadata')),
3246 ('', 'uncompressed', None,
3246 ('', 'uncompressed', None,
3247 _('use uncompressed transfer (fast over LAN)')),
3247 _('use uncompressed transfer (fast over LAN)')),
3248 ] + remoteopts,
3248 ] + remoteopts,
3249 _('[OPTION]... SOURCE [DEST]')),
3249 _('[OPTION]... SOURCE [DEST]')),
3250 "^commit|ci":
3250 "^commit|ci":
3251 (commit,
3251 (commit,
3252 [('A', 'addremove', None,
3252 [('A', 'addremove', None,
3253 _('mark new/missing files as added/removed before committing')),
3253 _('mark new/missing files as added/removed before committing')),
3254 ('', 'close-branch', None,
3254 ('', 'close-branch', None,
3255 _('mark a branch as closed, hiding it from the branch list')),
3255 _('mark a branch as closed, hiding it from the branch list')),
3256 ] + walkopts + commitopts + commitopts2,
3256 ] + walkopts + commitopts + commitopts2,
3257 _('[OPTION]... [FILE]...')),
3257 _('[OPTION]... [FILE]...')),
3258 "copy|cp":
3258 "copy|cp":
3259 (copy,
3259 (copy,
3260 [('A', 'after', None, _('record a copy that has already occurred')),
3260 [('A', 'after', None, _('record a copy that has already occurred')),
3261 ('f', 'force', None,
3261 ('f', 'force', None,
3262 _('forcibly copy over an existing managed file')),
3262 _('forcibly copy over an existing managed file')),
3263 ] + walkopts + dryrunopts,
3263 ] + walkopts + dryrunopts,
3264 _('[OPTION]... [SOURCE]... DEST')),
3264 _('[OPTION]... [SOURCE]... DEST')),
3265 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3265 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3266 "debugcheckstate": (debugcheckstate, []),
3266 "debugcheckstate": (debugcheckstate, []),
3267 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3267 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3268 "debugcomplete":
3268 "debugcomplete":
3269 (debugcomplete,
3269 (debugcomplete,
3270 [('o', 'options', None, _('show the command options'))],
3270 [('o', 'options', None, _('show the command options'))],
3271 _('[-o] CMD')),
3271 _('[-o] CMD')),
3272 "debugdate":
3272 "debugdate":
3273 (debugdate,
3273 (debugdate,
3274 [('e', 'extended', None, _('try extended date formats'))],
3274 [('e', 'extended', None, _('try extended date formats'))],
3275 _('[-e] DATE [RANGE]')),
3275 _('[-e] DATE [RANGE]')),
3276 "debugdata": (debugdata, [], _('FILE REV')),
3276 "debugdata": (debugdata, [], _('FILE REV')),
3277 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3277 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3278 "debugindex": (debugindex, [], _('FILE')),
3278 "debugindex": (debugindex, [], _('FILE')),
3279 "debugindexdot": (debugindexdot, [], _('FILE')),
3279 "debugindexdot": (debugindexdot, [], _('FILE')),
3280 "debuginstall": (debuginstall, []),
3280 "debuginstall": (debuginstall, []),
3281 "debugrebuildstate":
3281 "debugrebuildstate":
3282 (debugrebuildstate,
3282 (debugrebuildstate,
3283 [('r', 'rev', '', _('revision to rebuild to'))],
3283 [('r', 'rev', '', _('revision to rebuild to'))],
3284 _('[-r REV] [REV]')),
3284 _('[-r REV] [REV]')),
3285 "debugrename":
3285 "debugrename":
3286 (debugrename,
3286 (debugrename,
3287 [('r', 'rev', '', _('revision to debug'))],
3287 [('r', 'rev', '', _('revision to debug'))],
3288 _('[-r REV] FILE')),
3288 _('[-r REV] FILE')),
3289 "debugsetparents":
3289 "debugsetparents":
3290 (debugsetparents, [], _('REV1 [REV2]')),
3290 (debugsetparents, [], _('REV1 [REV2]')),
3291 "debugstate":
3291 "debugstate":
3292 (debugstate,
3292 (debugstate,
3293 [('', 'nodates', None, _('do not display the saved mtime'))],
3293 [('', 'nodates', None, _('do not display the saved mtime'))],
3294 _('[OPTION]...')),
3294 _('[OPTION]...')),
3295 "debugsub":
3295 "debugsub":
3296 (debugsub,
3296 (debugsub,
3297 [('r', 'rev', '', _('revision to check'))],
3297 [('r', 'rev', '', _('revision to check'))],
3298 _('[-r REV] [REV]')),
3298 _('[-r REV] [REV]')),
3299 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3299 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3300 "^diff":
3300 "^diff":
3301 (diff,
3301 (diff,
3302 [('r', 'rev', [], _('revision')),
3302 [('r', 'rev', [], _('revision')),
3303 ('c', 'change', '', _('change made by revision'))
3303 ('c', 'change', '', _('change made by revision'))
3304 ] + diffopts + diffopts2 + walkopts,
3304 ] + diffopts + diffopts2 + walkopts,
3305 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3305 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3306 "^export":
3306 "^export":
3307 (export,
3307 (export,
3308 [('o', 'output', '', _('print output to file with formatted name')),
3308 [('o', 'output', '', _('print output to file with formatted name')),
3309 ('', 'switch-parent', None, _('diff against the second parent'))
3309 ('', 'switch-parent', None, _('diff against the second parent'))
3310 ] + diffopts,
3310 ] + diffopts,
3311 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3311 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3312 "^forget":
3312 "^forget":
3313 (forget,
3313 (forget,
3314 [] + walkopts,
3314 [] + walkopts,
3315 _('[OPTION]... FILE...')),
3315 _('[OPTION]... FILE...')),
3316 "grep":
3316 "grep":
3317 (grep,
3317 (grep,
3318 [('0', 'print0', None, _('end fields with NUL')),
3318 [('0', 'print0', None, _('end fields with NUL')),
3319 ('', 'all', None, _('print all revisions that match')),
3319 ('', 'all', None, _('print all revisions that match')),
3320 ('f', 'follow', None,
3320 ('f', 'follow', None,
3321 _('follow changeset history, or file history across copies and renames')),
3321 _('follow changeset history, or file history across copies and renames')),
3322 ('i', 'ignore-case', None, _('ignore case when matching')),
3322 ('i', 'ignore-case', None, _('ignore case when matching')),
3323 ('l', 'files-with-matches', None,
3323 ('l', 'files-with-matches', None,
3324 _('print only filenames and revisions that match')),
3324 _('print only filenames and revisions that match')),
3325 ('n', 'line-number', None, _('print matching line numbers')),
3325 ('n', 'line-number', None, _('print matching line numbers')),
3326 ('r', 'rev', [], _('search in given revision range')),
3326 ('r', 'rev', [], _('search in given revision range')),
3327 ('u', 'user', None, _('list the author (long with -v)')),
3327 ('u', 'user', None, _('list the author (long with -v)')),
3328 ('d', 'date', None, _('list the date (short with -q)')),
3328 ('d', 'date', None, _('list the date (short with -q)')),
3329 ] + walkopts,
3329 ] + walkopts,
3330 _('[OPTION]... PATTERN [FILE]...')),
3330 _('[OPTION]... PATTERN [FILE]...')),
3331 "heads":
3331 "heads":
3332 (heads,
3332 (heads,
3333 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3333 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3334 ('a', 'active', False,
3334 ('a', 'active', False,
3335 _('show only the active heads from open branches')),
3335 _('show only the active heads from open branches')),
3336 ('c', 'closed', False,
3336 ('c', 'closed', False,
3337 _('show normal and closed heads')),
3337 _('show normal and closed heads')),
3338 ] + templateopts,
3338 ] + templateopts,
3339 _('[-r STARTREV] [REV]...')),
3339 _('[-r STARTREV] [REV]...')),
3340 "help": (help_, [], _('[TOPIC]')),
3340 "help": (help_, [], _('[TOPIC]')),
3341 "identify|id":
3341 "identify|id":
3342 (identify,
3342 (identify,
3343 [('r', 'rev', '', _('identify the specified revision')),
3343 [('r', 'rev', '', _('identify the specified revision')),
3344 ('n', 'num', None, _('show local revision number')),
3344 ('n', 'num', None, _('show local revision number')),
3345 ('i', 'id', None, _('show global revision id')),
3345 ('i', 'id', None, _('show global revision id')),
3346 ('b', 'branch', None, _('show branch')),
3346 ('b', 'branch', None, _('show branch')),
3347 ('t', 'tags', None, _('show tags'))],
3347 ('t', 'tags', None, _('show tags'))],
3348 _('[-nibt] [-r REV] [SOURCE]')),
3348 _('[-nibt] [-r REV] [SOURCE]')),
3349 "import|patch":
3349 "import|patch":
3350 (import_,
3350 (import_,
3351 [('p', 'strip', 1,
3351 [('p', 'strip', 1,
3352 _('directory strip option for patch. This has the same '
3352 _('directory strip option for patch. This has the same '
3353 'meaning as the corresponding patch option')),
3353 'meaning as the corresponding patch option')),
3354 ('b', 'base', '', _('base path')),
3354 ('b', 'base', '', _('base path')),
3355 ('f', 'force', None,
3355 ('f', 'force', None,
3356 _('skip check for outstanding uncommitted changes')),
3356 _('skip check for outstanding uncommitted changes')),
3357 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3357 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3358 ('', 'exact', None,
3358 ('', 'exact', None,
3359 _('apply patch to the nodes from which it was generated')),
3359 _('apply patch to the nodes from which it was generated')),
3360 ('', 'import-branch', None,
3360 ('', 'import-branch', None,
3361 _('use any branch information in patch (implied by --exact)'))] +
3361 _('use any branch information in patch (implied by --exact)'))] +
3362 commitopts + commitopts2 + similarityopts,
3362 commitopts + commitopts2 + similarityopts,
3363 _('[OPTION]... PATCH...')),
3363 _('[OPTION]... PATCH...')),
3364 "incoming|in":
3364 "incoming|in":
3365 (incoming,
3365 (incoming,
3366 [('f', 'force', None,
3366 [('f', 'force', None,
3367 _('run even when remote repository is unrelated')),
3367 _('run even when remote repository is unrelated')),
3368 ('n', 'newest-first', None, _('show newest record first')),
3368 ('n', 'newest-first', None, _('show newest record first')),
3369 ('', 'bundle', '', _('file to store the bundles into')),
3369 ('', 'bundle', '', _('file to store the bundles into')),
3370 ('r', 'rev', [],
3370 ('r', 'rev', [],
3371 _('a specific revision up to which you would like to pull')),
3371 _('a specific revision up to which you would like to pull')),
3372 ] + logopts + remoteopts,
3372 ] + logopts + remoteopts,
3373 _('[-p] [-n] [-M] [-f] [-r REV]...'
3373 _('[-p] [-n] [-M] [-f] [-r REV]...'
3374 ' [--bundle FILENAME] [SOURCE]')),
3374 ' [--bundle FILENAME] [SOURCE]')),
3375 "^init":
3375 "^init":
3376 (init,
3376 (init,
3377 remoteopts,
3377 remoteopts,
3378 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3378 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3379 "locate":
3379 "locate":
3380 (locate,
3380 (locate,
3381 [('r', 'rev', '', _('search the repository as it stood at REV')),
3381 [('r', 'rev', '', _('search the repository as it stood at REV')),
3382 ('0', 'print0', None,
3382 ('0', 'print0', None,
3383 _('end filenames with NUL, for use with xargs')),
3383 _('end filenames with NUL, for use with xargs')),
3384 ('f', 'fullpath', None,
3384 ('f', 'fullpath', None,
3385 _('print complete paths from the filesystem root')),
3385 _('print complete paths from the filesystem root')),
3386 ] + walkopts,
3386 ] + walkopts,
3387 _('[OPTION]... [PATTERN]...')),
3387 _('[OPTION]... [PATTERN]...')),
3388 "^log|history":
3388 "^log|history":
3389 (log,
3389 (log,
3390 [('f', 'follow', None,
3390 [('f', 'follow', None,
3391 _('follow changeset history, or file history across copies and renames')),
3391 _('follow changeset history, or file history across copies and renames')),
3392 ('', 'follow-first', None,
3392 ('', 'follow-first', None,
3393 _('only follow the first parent of merge changesets')),
3393 _('only follow the first parent of merge changesets')),
3394 ('d', 'date', '', _('show revisions matching date spec')),
3394 ('d', 'date', '', _('show revisions matching date spec')),
3395 ('C', 'copies', None, _('show copied files')),
3395 ('C', 'copies', None, _('show copied files')),
3396 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3396 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3397 ('r', 'rev', [], _('show the specified revision or range')),
3397 ('r', 'rev', [], _('show the specified revision or range')),
3398 ('', 'removed', None, _('include revisions where files were removed')),
3398 ('', 'removed', None, _('include revisions where files were removed')),
3399 ('m', 'only-merges', None, _('show only merges')),
3399 ('m', 'only-merges', None, _('show only merges')),
3400 ('u', 'user', [], _('revisions committed by user')),
3400 ('u', 'user', [], _('revisions committed by user')),
3401 ('b', 'only-branch', [],
3401 ('b', 'only-branch', [],
3402 _('show only changesets within the given named branch')),
3402 _('show only changesets within the given named branch')),
3403 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3403 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3404 ] + logopts + walkopts,
3404 ] + logopts + walkopts,
3405 _('[OPTION]... [FILE]')),
3405 _('[OPTION]... [FILE]')),
3406 "manifest":
3406 "manifest":
3407 (manifest,
3407 (manifest,
3408 [('r', 'rev', '', _('revision to display'))],
3408 [('r', 'rev', '', _('revision to display'))],
3409 _('[-r REV]')),
3409 _('[-r REV]')),
3410 "^merge":
3410 "^merge":
3411 (merge,
3411 (merge,
3412 [('f', 'force', None, _('force a merge with outstanding changes')),
3412 [('f', 'force', None, _('force a merge with outstanding changes')),
3413 ('r', 'rev', '', _('revision to merge')),
3413 ('r', 'rev', '', _('revision to merge')),
3414 ('P', 'preview', None,
3414 ('P', 'preview', None,
3415 _('review revisions to merge (no merge is performed)'))],
3415 _('review revisions to merge (no merge is performed)'))],
3416 _('[-f] [[-r] REV]')),
3416 _('[-f] [[-r] REV]')),
3417 "outgoing|out":
3417 "outgoing|out":
3418 (outgoing,
3418 (outgoing,
3419 [('f', 'force', None,
3419 [('f', 'force', None,
3420 _('run even when remote repository is unrelated')),
3420 _('run even when remote repository is unrelated')),
3421 ('r', 'rev', [],
3421 ('r', 'rev', [],
3422 _('a specific revision up to which you would like to push')),
3422 _('a specific revision up to which you would like to push')),
3423 ('n', 'newest-first', None, _('show newest record first')),
3423 ('n', 'newest-first', None, _('show newest record first')),
3424 ] + logopts + remoteopts,
3424 ] + logopts + remoteopts,
3425 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3425 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3426 "^parents":
3426 "^parents":
3427 (parents,
3427 (parents,
3428 [('r', 'rev', '', _('show parents from the specified revision')),
3428 [('r', 'rev', '', _('show parents from the specified revision')),
3429 ] + templateopts,
3429 ] + templateopts,
3430 _('[-r REV] [FILE]')),
3430 _('[-r REV] [FILE]')),
3431 "paths": (paths, [], _('[NAME]')),
3431 "paths": (paths, [], _('[NAME]')),
3432 "^pull":
3432 "^pull":
3433 (pull,
3433 (pull,
3434 [('u', 'update', None,
3434 [('u', 'update', None,
3435 _('update to new tip if changesets were pulled')),
3435 _('update to new tip if changesets were pulled')),
3436 ('f', 'force', None,
3436 ('f', 'force', None,
3437 _('run even when remote repository is unrelated')),
3437 _('run even when remote repository is unrelated')),
3438 ('r', 'rev', [],
3438 ('r', 'rev', [],
3439 _('a specific revision up to which you would like to pull')),
3439 _('a specific revision up to which you would like to pull')),
3440 ] + remoteopts,
3440 ] + remoteopts,
3441 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3441 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3442 "^push":
3442 "^push":
3443 (push,
3443 (push,
3444 [('f', 'force', None, _('force push')),
3444 [('f', 'force', None, _('force push')),
3445 ('r', 'rev', [],
3445 ('r', 'rev', [],
3446 _('a specific revision up to which you would like to push')),
3446 _('a specific revision up to which you would like to push')),
3447 ] + remoteopts,
3447 ] + remoteopts,
3448 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3448 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3449 "recover": (recover, []),
3449 "recover": (recover, []),
3450 "^remove|rm":
3450 "^remove|rm":
3451 (remove,
3451 (remove,
3452 [('A', 'after', None, _('record delete for missing files')),
3452 [('A', 'after', None, _('record delete for missing files')),
3453 ('f', 'force', None,
3453 ('f', 'force', None,
3454 _('remove (and delete) file even if added or modified')),
3454 _('remove (and delete) file even if added or modified')),
3455 ] + walkopts,
3455 ] + walkopts,
3456 _('[OPTION]... FILE...')),
3456 _('[OPTION]... FILE...')),
3457 "rename|mv":
3457 "rename|mv":
3458 (rename,
3458 (rename,
3459 [('A', 'after', None, _('record a rename that has already occurred')),
3459 [('A', 'after', None, _('record a rename that has already occurred')),
3460 ('f', 'force', None,
3460 ('f', 'force', None,
3461 _('forcibly copy over an existing managed file')),
3461 _('forcibly copy over an existing managed file')),
3462 ] + walkopts + dryrunopts,
3462 ] + walkopts + dryrunopts,
3463 _('[OPTION]... SOURCE... DEST')),
3463 _('[OPTION]... SOURCE... DEST')),
3464 "resolve":
3464 "resolve":
3465 (resolve,
3465 (resolve,
3466 [('a', 'all', None, _('remerge all unresolved files')),
3466 [('a', 'all', None, _('remerge all unresolved files')),
3467 ('l', 'list', None, _('list state of files needing merge')),
3467 ('l', 'list', None, _('list state of files needing merge')),
3468 ('m', 'mark', None, _('mark files as resolved')),
3468 ('m', 'mark', None, _('mark files as resolved')),
3469 ('u', 'unmark', None, _('unmark files as resolved'))]
3469 ('u', 'unmark', None, _('unmark files as resolved'))]
3470 + walkopts,
3470 + walkopts,
3471 _('[OPTION]... [FILE]...')),
3471 _('[OPTION]... [FILE]...')),
3472 "revert":
3472 "revert":
3473 (revert,
3473 (revert,
3474 [('a', 'all', None, _('revert all changes when no arguments given')),
3474 [('a', 'all', None, _('revert all changes when no arguments given')),
3475 ('d', 'date', '', _('tipmost revision matching date')),
3475 ('d', 'date', '', _('tipmost revision matching date')),
3476 ('r', 'rev', '', _('revision to revert to')),
3476 ('r', 'rev', '', _('revision to revert to')),
3477 ('', 'no-backup', None, _('do not save backup copies of files')),
3477 ('', 'no-backup', None, _('do not save backup copies of files')),
3478 ] + walkopts + dryrunopts,
3478 ] + walkopts + dryrunopts,
3479 _('[OPTION]... [-r REV] [NAME]...')),
3479 _('[OPTION]... [-r REV] [NAME]...')),
3480 "rollback": (rollback, []),
3480 "rollback": (rollback, []),
3481 "root": (root, []),
3481 "root": (root, []),
3482 "^serve":
3482 "^serve":
3483 (serve,
3483 (serve,
3484 [('A', 'accesslog', '', _('name of access log file to write to')),
3484 [('A', 'accesslog', '', _('name of access log file to write to')),
3485 ('d', 'daemon', None, _('run server in background')),
3485 ('d', 'daemon', None, _('run server in background')),
3486 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3486 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3487 ('E', 'errorlog', '', _('name of error log file to write to')),
3487 ('E', 'errorlog', '', _('name of error log file to write to')),
3488 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3488 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3489 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3489 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3490 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3490 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3491 ('n', 'name', '',
3491 ('n', 'name', '',
3492 _('name to show in web pages (default: working directory)')),
3492 _('name to show in web pages (default: working directory)')),
3493 ('', 'webdir-conf', '', _('name of the webdir config file'
3493 ('', 'webdir-conf', '', _('name of the webdir config file'
3494 ' (serve more than one repository)')),
3494 ' (serve more than one repository)')),
3495 ('', 'pid-file', '', _('name of file to write process ID to')),
3495 ('', 'pid-file', '', _('name of file to write process ID to')),
3496 ('', 'stdio', None, _('for remote clients')),
3496 ('', 'stdio', None, _('for remote clients')),
3497 ('t', 'templates', '', _('web templates to use')),
3497 ('t', 'templates', '', _('web templates to use')),
3498 ('', 'style', '', _('template style to use')),
3498 ('', 'style', '', _('template style to use')),
3499 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3499 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3500 ('', 'certificate', '', _('SSL certificate file'))],
3500 ('', 'certificate', '', _('SSL certificate file'))],
3501 _('[OPTION]...')),
3501 _('[OPTION]...')),
3502 "showconfig|debugconfig":
3502 "showconfig|debugconfig":
3503 (showconfig,
3503 (showconfig,
3504 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3504 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3505 _('[-u] [NAME]...')),
3505 _('[-u] [NAME]...')),
3506 "^status|st":
3506 "^status|st":
3507 (status,
3507 (status,
3508 [('A', 'all', None, _('show status of all files')),
3508 [('A', 'all', None, _('show status of all files')),
3509 ('m', 'modified', None, _('show only modified files')),
3509 ('m', 'modified', None, _('show only modified files')),
3510 ('a', 'added', None, _('show only added files')),
3510 ('a', 'added', None, _('show only added files')),
3511 ('r', 'removed', None, _('show only removed files')),
3511 ('r', 'removed', None, _('show only removed files')),
3512 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3512 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3513 ('c', 'clean', None, _('show only files without changes')),
3513 ('c', 'clean', None, _('show only files without changes')),
3514 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3514 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3515 ('i', 'ignored', None, _('show only ignored files')),
3515 ('i', 'ignored', None, _('show only ignored files')),
3516 ('n', 'no-status', None, _('hide status prefix')),
3516 ('n', 'no-status', None, _('hide status prefix')),
3517 ('C', 'copies', None, _('show source of copied files')),
3517 ('C', 'copies', None, _('show source of copied files')),
3518 ('0', 'print0', None,
3518 ('0', 'print0', None,
3519 _('end filenames with NUL, for use with xargs')),
3519 _('end filenames with NUL, for use with xargs')),
3520 ('', 'rev', [], _('show difference from revision')),
3520 ('', 'rev', [], _('show difference from revision')),
3521 ] + walkopts,
3521 ] + walkopts,
3522 _('[OPTION]... [FILE]...')),
3522 _('[OPTION]... [FILE]...')),
3523 "tag":
3523 "tag":
3524 (tag,
3524 (tag,
3525 [('f', 'force', None, _('replace existing tag')),
3525 [('f', 'force', None, _('replace existing tag')),
3526 ('l', 'local', None, _('make the tag local')),
3526 ('l', 'local', None, _('make the tag local')),
3527 ('r', 'rev', '', _('revision to tag')),
3527 ('r', 'rev', '', _('revision to tag')),
3528 ('', 'remove', None, _('remove a tag')),
3528 ('', 'remove', None, _('remove a tag')),
3529 # -l/--local is already there, commitopts cannot be used
3529 # -l/--local is already there, commitopts cannot be used
3530 ('m', 'message', '', _('use <text> as commit message')),
3530 ('m', 'message', '', _('use <text> as commit message')),
3531 ] + commitopts2,
3531 ] + commitopts2,
3532 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3532 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3533 "tags": (tags, []),
3533 "tags": (tags, []),
3534 "tip":
3534 "tip":
3535 (tip,
3535 (tip,
3536 [('p', 'patch', None, _('show patch')),
3536 [('p', 'patch', None, _('show patch')),
3537 ('g', 'git', None, _('use git extended diff format')),
3537 ('g', 'git', None, _('use git extended diff format')),
3538 ] + templateopts,
3538 ] + templateopts,
3539 _('[-p]')),
3539 _('[-p]')),
3540 "unbundle":
3540 "unbundle":
3541 (unbundle,
3541 (unbundle,
3542 [('u', 'update', None,
3542 [('u', 'update', None,
3543 _('update to new tip if changesets were unbundled'))],
3543 _('update to new tip if changesets were unbundled'))],
3544 _('[-u] FILE...')),
3544 _('[-u] FILE...')),
3545 "^update|up|checkout|co":
3545 "^update|up|checkout|co":
3546 (update,
3546 (update,
3547 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3547 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3548 ('c', 'check', None, _('check for uncommitted changes')),
3548 ('c', 'check', None, _('check for uncommitted changes')),
3549 ('d', 'date', '', _('tipmost revision matching date')),
3549 ('d', 'date', '', _('tipmost revision matching date')),
3550 ('r', 'rev', '', _('revision'))],
3550 ('r', 'rev', '', _('revision'))],
3551 _('[-C] [-d DATE] [[-r] REV]')),
3551 _('[-C] [-d DATE] [[-r] REV]')),
3552 "verify": (verify, []),
3552 "verify": (verify, []),
3553 "version": (version_, []),
3553 "version": (version_, []),
3554 }
3554 }
3555
3555
3556 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3556 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3557 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3557 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3558 optionalrepo = ("identify paths serve showconfig debugancestor")
3558 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,105 +1,105 b''
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 import errno, mimetypes, os
9 import errno, mimetypes, os
10
10
11 HTTP_OK = 200
11 HTTP_OK = 200
12 HTTP_BAD_REQUEST = 400
12 HTTP_BAD_REQUEST = 400
13 HTTP_UNAUTHORIZED = 401
13 HTTP_UNAUTHORIZED = 401
14 HTTP_FORBIDDEN = 403
14 HTTP_FORBIDDEN = 403
15 HTTP_NOT_FOUND = 404
15 HTTP_NOT_FOUND = 404
16 HTTP_METHOD_NOT_ALLOWED = 405
16 HTTP_METHOD_NOT_ALLOWED = 405
17 HTTP_SERVER_ERROR = 500
17 HTTP_SERVER_ERROR = 500
18
18
19 class ErrorResponse(Exception):
19 class ErrorResponse(Exception):
20 def __init__(self, code, message=None, headers=[]):
20 def __init__(self, code, message=None, headers=[]):
21 Exception.__init__(self)
21 Exception.__init__(self)
22 self.code = code
22 self.code = code
23 self.headers = headers
23 self.headers = headers
24 if message is not None:
24 if message is not None:
25 self.message = message
25 self.message = message
26 else:
26 else:
27 self.message = _statusmessage(code)
27 self.message = _statusmessage(code)
28
28
29 def _statusmessage(code):
29 def _statusmessage(code):
30 from BaseHTTPServer import BaseHTTPRequestHandler
30 from BaseHTTPServer import BaseHTTPRequestHandler
31 responses = BaseHTTPRequestHandler.responses
31 responses = BaseHTTPRequestHandler.responses
32 return responses.get(code, ('Error', 'Unknown error'))[0]
32 return responses.get(code, ('Error', 'Unknown error'))[0]
33
33
34 def statusmessage(code):
34 def statusmessage(code):
35 return '%d %s' % (code, _statusmessage(code))
35 return '%d %s' % (code, _statusmessage(code))
36
36
37 def get_mtime(repo_path):
37 def get_mtime(repo_path):
38 store_path = os.path.join(repo_path, ".hg")
38 store_path = os.path.join(repo_path, ".hg")
39 if not os.path.isdir(os.path.join(store_path, "data")):
39 if not os.path.isdir(os.path.join(store_path, "data")):
40 store_path = os.path.join(store_path, "store")
40 store_path = os.path.join(store_path, "store")
41 cl_path = os.path.join(store_path, "00changelog.i")
41 cl_path = os.path.join(store_path, "00changelog.i")
42 if os.path.exists(cl_path):
42 if os.path.exists(cl_path):
43 return os.stat(cl_path).st_mtime
43 return os.stat(cl_path).st_mtime
44 else:
44 else:
45 return os.stat(store_path).st_mtime
45 return os.stat(store_path).st_mtime
46
46
47 def staticfile(directory, fname, req):
47 def staticfile(directory, fname, req):
48 """return a file inside directory with guessed Content-Type header
48 """return a file inside directory with guessed Content-Type header
49
49
50 fname always uses '/' as directory separator and isn't allowed to
50 fname always uses '/' as directory separator and isn't allowed to
51 contain unusual path components.
51 contain unusual path components.
52 Content-Type is guessed using the mimetypes module.
52 Content-Type is guessed using the mimetypes module.
53 Return an empty string if fname is illegal or file not found.
53 Return an empty string if fname is illegal or file not found.
54
54
55 """
55 """
56 parts = fname.split('/')
56 parts = fname.split('/')
57 for part in parts:
57 for part in parts:
58 if (part in ('', os.curdir, os.pardir) or
58 if (part in ('', os.curdir, os.pardir) or
59 os.sep in part or os.altsep is not None and os.altsep in part):
59 os.sep in part or os.altsep is not None and os.altsep in part):
60 return ""
60 return ""
61 fpath = os.path.join(*parts)
61 fpath = os.path.join(*parts)
62 if isinstance(directory, str):
62 if isinstance(directory, str):
63 directory = [directory]
63 directory = [directory]
64 for d in directory:
64 for d in directory:
65 path = os.path.join(d, fpath)
65 path = os.path.join(d, fpath)
66 if os.path.exists(path):
66 if os.path.exists(path):
67 break
67 break
68 try:
68 try:
69 os.stat(path)
69 os.stat(path)
70 ct = mimetypes.guess_type(path)[0] or "text/plain"
70 ct = mimetypes.guess_type(path)[0] or "text/plain"
71 req.respond(HTTP_OK, ct, length = os.path.getsize(path))
71 req.respond(HTTP_OK, ct, length = os.path.getsize(path))
72 return file(path, 'rb').read()
72 return open(path, 'rb').read()
73 except TypeError:
73 except TypeError:
74 raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename')
74 raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename')
75 except OSError, err:
75 except OSError, err:
76 if err.errno == errno.ENOENT:
76 if err.errno == errno.ENOENT:
77 raise ErrorResponse(HTTP_NOT_FOUND)
77 raise ErrorResponse(HTTP_NOT_FOUND)
78 else:
78 else:
79 raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror)
79 raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror)
80
80
81 def paritygen(stripecount, offset=0):
81 def paritygen(stripecount, offset=0):
82 """count parity of horizontal stripes for easier reading"""
82 """count parity of horizontal stripes for easier reading"""
83 if stripecount and offset:
83 if stripecount and offset:
84 # account for offset, e.g. due to building the list in reverse
84 # account for offset, e.g. due to building the list in reverse
85 count = (stripecount + offset) % stripecount
85 count = (stripecount + offset) % stripecount
86 parity = (stripecount + offset) / stripecount & 1
86 parity = (stripecount + offset) / stripecount & 1
87 else:
87 else:
88 count = 0
88 count = 0
89 parity = 0
89 parity = 0
90 while True:
90 while True:
91 yield parity
91 yield parity
92 count += 1
92 count += 1
93 if stripecount and count >= stripecount:
93 if stripecount and count >= stripecount:
94 parity = 1 - parity
94 parity = 1 - parity
95 count = 0
95 count = 0
96
96
97 def get_contact(config):
97 def get_contact(config):
98 """Return repo contact information or empty string.
98 """Return repo contact information or empty string.
99
99
100 web.contact is the primary source, but if that is not set, try
100 web.contact is the primary source, but if that is not set, try
101 ui.username or $EMAIL as a fallback to display something useful.
101 ui.username or $EMAIL as a fallback to display something useful.
102 """
102 """
103 return (config("web", "contact") or
103 return (config("web", "contact") or
104 config("ui", "username") or
104 config("ui", "username") or
105 os.environ.get("EMAIL") or "")
105 os.environ.get("EMAIL") or "")
@@ -1,1434 +1,1434 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid, short
10 from node import hex, nullid, short
11 import base85, cmdutil, mdiff, util, diffhelpers, copies
11 import base85, cmdutil, mdiff, util, diffhelpers, copies
12 import cStringIO, email.Parser, os, re, math
12 import cStringIO, email.Parser, os, re, math
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
16
16
17 class PatchError(Exception):
17 class PatchError(Exception):
18 pass
18 pass
19
19
20 class NoHunks(PatchError):
20 class NoHunks(PatchError):
21 pass
21 pass
22
22
23 # helper functions
23 # helper functions
24
24
25 def copyfile(src, dst, basedir):
25 def copyfile(src, dst, basedir):
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
27 if os.path.exists(absdst):
27 if os.path.exists(absdst):
28 raise util.Abort(_("cannot create %s: destination already exists") %
28 raise util.Abort(_("cannot create %s: destination already exists") %
29 dst)
29 dst)
30
30
31 dstdir = os.path.dirname(absdst)
31 dstdir = os.path.dirname(absdst)
32 if dstdir and not os.path.isdir(dstdir):
32 if dstdir and not os.path.isdir(dstdir):
33 try:
33 try:
34 os.makedirs(dstdir)
34 os.makedirs(dstdir)
35 except IOError:
35 except IOError:
36 raise util.Abort(
36 raise util.Abort(
37 _("cannot create %s: unable to create destination directory")
37 _("cannot create %s: unable to create destination directory")
38 % dst)
38 % dst)
39
39
40 util.copyfile(abssrc, absdst)
40 util.copyfile(abssrc, absdst)
41
41
42 # public functions
42 # public functions
43
43
44 def extract(ui, fileobj):
44 def extract(ui, fileobj):
45 '''extract patch from data read from fileobj.
45 '''extract patch from data read from fileobj.
46
46
47 patch can be a normal patch or contained in an email message.
47 patch can be a normal patch or contained in an email message.
48
48
49 return tuple (filename, message, user, date, node, p1, p2).
49 return tuple (filename, message, user, date, node, p1, p2).
50 Any item in the returned tuple can be None. If filename is None,
50 Any item in the returned tuple can be None. If filename is None,
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
52
52
53 # attempt to detect the start of a patch
53 # attempt to detect the start of a patch
54 # (this heuristic is borrowed from quilt)
54 # (this heuristic is borrowed from quilt)
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
56 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
56 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
57 r'(---|\*\*\*)[ \t])', re.MULTILINE)
57 r'(---|\*\*\*)[ \t])', re.MULTILINE)
58
58
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
60 tmpfp = os.fdopen(fd, 'w')
60 tmpfp = os.fdopen(fd, 'w')
61 try:
61 try:
62 msg = email.Parser.Parser().parse(fileobj)
62 msg = email.Parser.Parser().parse(fileobj)
63
63
64 subject = msg['Subject']
64 subject = msg['Subject']
65 user = msg['From']
65 user = msg['From']
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
67 # should try to parse msg['Date']
67 # should try to parse msg['Date']
68 date = None
68 date = None
69 nodeid = None
69 nodeid = None
70 branch = None
70 branch = None
71 parents = []
71 parents = []
72
72
73 if subject:
73 if subject:
74 if subject.startswith('[PATCH'):
74 if subject.startswith('[PATCH'):
75 pend = subject.find(']')
75 pend = subject.find(']')
76 if pend >= 0:
76 if pend >= 0:
77 subject = subject[pend+1:].lstrip()
77 subject = subject[pend+1:].lstrip()
78 subject = subject.replace('\n\t', ' ')
78 subject = subject.replace('\n\t', ' ')
79 ui.debug('Subject: %s\n' % subject)
79 ui.debug('Subject: %s\n' % subject)
80 if user:
80 if user:
81 ui.debug('From: %s\n' % user)
81 ui.debug('From: %s\n' % user)
82 diffs_seen = 0
82 diffs_seen = 0
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
84 message = ''
84 message = ''
85 for part in msg.walk():
85 for part in msg.walk():
86 content_type = part.get_content_type()
86 content_type = part.get_content_type()
87 ui.debug('Content-Type: %s\n' % content_type)
87 ui.debug('Content-Type: %s\n' % content_type)
88 if content_type not in ok_types:
88 if content_type not in ok_types:
89 continue
89 continue
90 payload = part.get_payload(decode=True)
90 payload = part.get_payload(decode=True)
91 m = diffre.search(payload)
91 m = diffre.search(payload)
92 if m:
92 if m:
93 hgpatch = False
93 hgpatch = False
94 ignoretext = False
94 ignoretext = False
95
95
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
97 diffs_seen += 1
97 diffs_seen += 1
98 cfp = cStringIO.StringIO()
98 cfp = cStringIO.StringIO()
99 for line in payload[:m.start(0)].splitlines():
99 for line in payload[:m.start(0)].splitlines():
100 if line.startswith('# HG changeset patch'):
100 if line.startswith('# HG changeset patch'):
101 ui.debug(_('patch generated by hg export\n'))
101 ui.debug(_('patch generated by hg export\n'))
102 hgpatch = True
102 hgpatch = True
103 # drop earlier commit message content
103 # drop earlier commit message content
104 cfp.seek(0)
104 cfp.seek(0)
105 cfp.truncate()
105 cfp.truncate()
106 subject = None
106 subject = None
107 elif hgpatch:
107 elif hgpatch:
108 if line.startswith('# User '):
108 if line.startswith('# User '):
109 user = line[7:]
109 user = line[7:]
110 ui.debug('From: %s\n' % user)
110 ui.debug('From: %s\n' % user)
111 elif line.startswith("# Date "):
111 elif line.startswith("# Date "):
112 date = line[7:]
112 date = line[7:]
113 elif line.startswith("# Branch "):
113 elif line.startswith("# Branch "):
114 branch = line[9:]
114 branch = line[9:]
115 elif line.startswith("# Node ID "):
115 elif line.startswith("# Node ID "):
116 nodeid = line[10:]
116 nodeid = line[10:]
117 elif line.startswith("# Parent "):
117 elif line.startswith("# Parent "):
118 parents.append(line[10:])
118 parents.append(line[10:])
119 elif line == '---' and gitsendmail:
119 elif line == '---' and gitsendmail:
120 ignoretext = True
120 ignoretext = True
121 if not line.startswith('# ') and not ignoretext:
121 if not line.startswith('# ') and not ignoretext:
122 cfp.write(line)
122 cfp.write(line)
123 cfp.write('\n')
123 cfp.write('\n')
124 message = cfp.getvalue()
124 message = cfp.getvalue()
125 if tmpfp:
125 if tmpfp:
126 tmpfp.write(payload)
126 tmpfp.write(payload)
127 if not payload.endswith('\n'):
127 if not payload.endswith('\n'):
128 tmpfp.write('\n')
128 tmpfp.write('\n')
129 elif not diffs_seen and message and content_type == 'text/plain':
129 elif not diffs_seen and message and content_type == 'text/plain':
130 message += '\n' + payload
130 message += '\n' + payload
131 except:
131 except:
132 tmpfp.close()
132 tmpfp.close()
133 os.unlink(tmpname)
133 os.unlink(tmpname)
134 raise
134 raise
135
135
136 if subject and not message.startswith(subject):
136 if subject and not message.startswith(subject):
137 message = '%s\n%s' % (subject, message)
137 message = '%s\n%s' % (subject, message)
138 tmpfp.close()
138 tmpfp.close()
139 if not diffs_seen:
139 if not diffs_seen:
140 os.unlink(tmpname)
140 os.unlink(tmpname)
141 return None, message, user, date, branch, None, None, None
141 return None, message, user, date, branch, None, None, None
142 p1 = parents and parents.pop(0) or None
142 p1 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
144 return tmpname, message, user, date, branch, nodeid, p1, p2
144 return tmpname, message, user, date, branch, nodeid, p1, p2
145
145
146 GP_PATCH = 1 << 0 # we have to run patch
146 GP_PATCH = 1 << 0 # we have to run patch
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
148 GP_BINARY = 1 << 2 # there's a binary patch
148 GP_BINARY = 1 << 2 # there's a binary patch
149
149
150 class patchmeta(object):
150 class patchmeta(object):
151 """Patched file metadata
151 """Patched file metadata
152
152
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
157 'islink' is True if the file is a symlink and 'isexec' is True if
157 'islink' is True if the file is a symlink and 'isexec' is True if
158 the file is executable. Otherwise, 'mode' is None.
158 the file is executable. Otherwise, 'mode' is None.
159 """
159 """
160 def __init__(self, path):
160 def __init__(self, path):
161 self.path = path
161 self.path = path
162 self.oldpath = None
162 self.oldpath = None
163 self.mode = None
163 self.mode = None
164 self.op = 'MODIFY'
164 self.op = 'MODIFY'
165 self.lineno = 0
165 self.lineno = 0
166 self.binary = False
166 self.binary = False
167
167
168 def setmode(self, mode):
168 def setmode(self, mode):
169 islink = mode & 020000
169 islink = mode & 020000
170 isexec = mode & 0100
170 isexec = mode & 0100
171 self.mode = (islink, isexec)
171 self.mode = (islink, isexec)
172
172
173 def readgitpatch(lr):
173 def readgitpatch(lr):
174 """extract git-style metadata about patches from <patchname>"""
174 """extract git-style metadata about patches from <patchname>"""
175
175
176 # Filter patch for git information
176 # Filter patch for git information
177 gp = None
177 gp = None
178 gitpatches = []
178 gitpatches = []
179 # Can have a git patch with only metadata, causing patch to complain
179 # Can have a git patch with only metadata, causing patch to complain
180 dopatch = 0
180 dopatch = 0
181
181
182 lineno = 0
182 lineno = 0
183 for line in lr:
183 for line in lr:
184 lineno += 1
184 lineno += 1
185 if line.startswith('diff --git'):
185 if line.startswith('diff --git'):
186 m = gitre.match(line)
186 m = gitre.match(line)
187 if m:
187 if m:
188 if gp:
188 if gp:
189 gitpatches.append(gp)
189 gitpatches.append(gp)
190 src, dst = m.group(1, 2)
190 src, dst = m.group(1, 2)
191 gp = patchmeta(dst)
191 gp = patchmeta(dst)
192 gp.lineno = lineno
192 gp.lineno = lineno
193 elif gp:
193 elif gp:
194 if line.startswith('--- '):
194 if line.startswith('--- '):
195 if gp.op in ('COPY', 'RENAME'):
195 if gp.op in ('COPY', 'RENAME'):
196 dopatch |= GP_FILTER
196 dopatch |= GP_FILTER
197 gitpatches.append(gp)
197 gitpatches.append(gp)
198 gp = None
198 gp = None
199 dopatch |= GP_PATCH
199 dopatch |= GP_PATCH
200 continue
200 continue
201 if line.startswith('rename from '):
201 if line.startswith('rename from '):
202 gp.op = 'RENAME'
202 gp.op = 'RENAME'
203 gp.oldpath = line[12:].rstrip()
203 gp.oldpath = line[12:].rstrip()
204 elif line.startswith('rename to '):
204 elif line.startswith('rename to '):
205 gp.path = line[10:].rstrip()
205 gp.path = line[10:].rstrip()
206 elif line.startswith('copy from '):
206 elif line.startswith('copy from '):
207 gp.op = 'COPY'
207 gp.op = 'COPY'
208 gp.oldpath = line[10:].rstrip()
208 gp.oldpath = line[10:].rstrip()
209 elif line.startswith('copy to '):
209 elif line.startswith('copy to '):
210 gp.path = line[8:].rstrip()
210 gp.path = line[8:].rstrip()
211 elif line.startswith('deleted file'):
211 elif line.startswith('deleted file'):
212 gp.op = 'DELETE'
212 gp.op = 'DELETE'
213 # is the deleted file a symlink?
213 # is the deleted file a symlink?
214 gp.setmode(int(line.rstrip()[-6:], 8))
214 gp.setmode(int(line.rstrip()[-6:], 8))
215 elif line.startswith('new file mode '):
215 elif line.startswith('new file mode '):
216 gp.op = 'ADD'
216 gp.op = 'ADD'
217 gp.setmode(int(line.rstrip()[-6:], 8))
217 gp.setmode(int(line.rstrip()[-6:], 8))
218 elif line.startswith('new mode '):
218 elif line.startswith('new mode '):
219 gp.setmode(int(line.rstrip()[-6:], 8))
219 gp.setmode(int(line.rstrip()[-6:], 8))
220 elif line.startswith('GIT binary patch'):
220 elif line.startswith('GIT binary patch'):
221 dopatch |= GP_BINARY
221 dopatch |= GP_BINARY
222 gp.binary = True
222 gp.binary = True
223 if gp:
223 if gp:
224 gitpatches.append(gp)
224 gitpatches.append(gp)
225
225
226 if not gitpatches:
226 if not gitpatches:
227 dopatch = GP_PATCH
227 dopatch = GP_PATCH
228
228
229 return (dopatch, gitpatches)
229 return (dopatch, gitpatches)
230
230
231 class linereader(object):
231 class linereader(object):
232 # simple class to allow pushing lines back into the input stream
232 # simple class to allow pushing lines back into the input stream
233 def __init__(self, fp, textmode=False):
233 def __init__(self, fp, textmode=False):
234 self.fp = fp
234 self.fp = fp
235 self.buf = []
235 self.buf = []
236 self.textmode = textmode
236 self.textmode = textmode
237
237
238 def push(self, line):
238 def push(self, line):
239 if line is not None:
239 if line is not None:
240 self.buf.append(line)
240 self.buf.append(line)
241
241
242 def readline(self):
242 def readline(self):
243 if self.buf:
243 if self.buf:
244 l = self.buf[0]
244 l = self.buf[0]
245 del self.buf[0]
245 del self.buf[0]
246 return l
246 return l
247 l = self.fp.readline()
247 l = self.fp.readline()
248 if self.textmode and l.endswith('\r\n'):
248 if self.textmode and l.endswith('\r\n'):
249 l = l[:-2] + '\n'
249 l = l[:-2] + '\n'
250 return l
250 return l
251
251
252 def __iter__(self):
252 def __iter__(self):
253 while 1:
253 while 1:
254 l = self.readline()
254 l = self.readline()
255 if not l:
255 if not l:
256 break
256 break
257 yield l
257 yield l
258
258
259 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
259 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
260 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
260 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
261 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
261 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
262
262
263 class patchfile(object):
263 class patchfile(object):
264 def __init__(self, ui, fname, opener, missing=False, eol=None):
264 def __init__(self, ui, fname, opener, missing=False, eol=None):
265 self.fname = fname
265 self.fname = fname
266 self.eol = eol
266 self.eol = eol
267 self.opener = opener
267 self.opener = opener
268 self.ui = ui
268 self.ui = ui
269 self.lines = []
269 self.lines = []
270 self.exists = False
270 self.exists = False
271 self.missing = missing
271 self.missing = missing
272 if not missing:
272 if not missing:
273 try:
273 try:
274 self.lines = self.readlines(fname)
274 self.lines = self.readlines(fname)
275 self.exists = True
275 self.exists = True
276 except IOError:
276 except IOError:
277 pass
277 pass
278 else:
278 else:
279 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
279 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
280
280
281 self.hash = {}
281 self.hash = {}
282 self.dirty = 0
282 self.dirty = 0
283 self.offset = 0
283 self.offset = 0
284 self.rej = []
284 self.rej = []
285 self.fileprinted = False
285 self.fileprinted = False
286 self.printfile(False)
286 self.printfile(False)
287 self.hunks = 0
287 self.hunks = 0
288
288
289 def readlines(self, fname):
289 def readlines(self, fname):
290 fp = self.opener(fname, 'r')
290 fp = self.opener(fname, 'r')
291 try:
291 try:
292 return list(linereader(fp, self.eol is not None))
292 return list(linereader(fp, self.eol is not None))
293 finally:
293 finally:
294 fp.close()
294 fp.close()
295
295
296 def writelines(self, fname, lines):
296 def writelines(self, fname, lines):
297 fp = self.opener(fname, 'w')
297 fp = self.opener(fname, 'w')
298 try:
298 try:
299 if self.eol and self.eol != '\n':
299 if self.eol and self.eol != '\n':
300 for l in lines:
300 for l in lines:
301 if l and l[-1] == '\n':
301 if l and l[-1] == '\n':
302 l = l[:-1] + self.eol
302 l = l[:-1] + self.eol
303 fp.write(l)
303 fp.write(l)
304 else:
304 else:
305 fp.writelines(lines)
305 fp.writelines(lines)
306 finally:
306 finally:
307 fp.close()
307 fp.close()
308
308
309 def unlink(self, fname):
309 def unlink(self, fname):
310 os.unlink(fname)
310 os.unlink(fname)
311
311
312 def printfile(self, warn):
312 def printfile(self, warn):
313 if self.fileprinted:
313 if self.fileprinted:
314 return
314 return
315 if warn or self.ui.verbose:
315 if warn or self.ui.verbose:
316 self.fileprinted = True
316 self.fileprinted = True
317 s = _("patching file %s\n") % self.fname
317 s = _("patching file %s\n") % self.fname
318 if warn:
318 if warn:
319 self.ui.warn(s)
319 self.ui.warn(s)
320 else:
320 else:
321 self.ui.note(s)
321 self.ui.note(s)
322
322
323
323
324 def findlines(self, l, linenum):
324 def findlines(self, l, linenum):
325 # looks through the hash and finds candidate lines. The
325 # looks through the hash and finds candidate lines. The
326 # result is a list of line numbers sorted based on distance
326 # result is a list of line numbers sorted based on distance
327 # from linenum
327 # from linenum
328 def sorter(a, b):
328 def sorter(a, b):
329 vala = abs(a - linenum)
329 vala = abs(a - linenum)
330 valb = abs(b - linenum)
330 valb = abs(b - linenum)
331 return cmp(vala, valb)
331 return cmp(vala, valb)
332
332
333 try:
333 try:
334 cand = self.hash[l]
334 cand = self.hash[l]
335 except:
335 except:
336 return []
336 return []
337
337
338 if len(cand) > 1:
338 if len(cand) > 1:
339 # resort our list of potentials forward then back.
339 # resort our list of potentials forward then back.
340 cand.sort(sorter)
340 cand.sort(sorter)
341 return cand
341 return cand
342
342
343 def hashlines(self):
343 def hashlines(self):
344 self.hash = {}
344 self.hash = {}
345 for x, s in enumerate(self.lines):
345 for x, s in enumerate(self.lines):
346 self.hash.setdefault(s, []).append(x)
346 self.hash.setdefault(s, []).append(x)
347
347
348 def write_rej(self):
348 def write_rej(self):
349 # our rejects are a little different from patch(1). This always
349 # our rejects are a little different from patch(1). This always
350 # creates rejects in the same form as the original patch. A file
350 # creates rejects in the same form as the original patch. A file
351 # header is inserted so that you can run the reject through patch again
351 # header is inserted so that you can run the reject through patch again
352 # without having to type the filename.
352 # without having to type the filename.
353
353
354 if not self.rej:
354 if not self.rej:
355 return
355 return
356
356
357 fname = self.fname + ".rej"
357 fname = self.fname + ".rej"
358 self.ui.warn(
358 self.ui.warn(
359 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
359 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
360 (len(self.rej), self.hunks, fname))
360 (len(self.rej), self.hunks, fname))
361
361
362 def rejlines():
362 def rejlines():
363 base = os.path.basename(self.fname)
363 base = os.path.basename(self.fname)
364 yield "--- %s\n+++ %s\n" % (base, base)
364 yield "--- %s\n+++ %s\n" % (base, base)
365 for x in self.rej:
365 for x in self.rej:
366 for l in x.hunk:
366 for l in x.hunk:
367 yield l
367 yield l
368 if l[-1] != '\n':
368 if l[-1] != '\n':
369 yield "\n\ No newline at end of file\n"
369 yield "\n\ No newline at end of file\n"
370
370
371 self.writelines(fname, rejlines())
371 self.writelines(fname, rejlines())
372
372
373 def write(self, dest=None):
373 def write(self, dest=None):
374 if not self.dirty:
374 if not self.dirty:
375 return
375 return
376 if not dest:
376 if not dest:
377 dest = self.fname
377 dest = self.fname
378 self.writelines(dest, self.lines)
378 self.writelines(dest, self.lines)
379
379
380 def close(self):
380 def close(self):
381 self.write()
381 self.write()
382 self.write_rej()
382 self.write_rej()
383
383
384 def apply(self, h, reverse):
384 def apply(self, h, reverse):
385 if not h.complete():
385 if not h.complete():
386 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
386 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
387 (h.number, h.desc, len(h.a), h.lena, len(h.b),
387 (h.number, h.desc, len(h.a), h.lena, len(h.b),
388 h.lenb))
388 h.lenb))
389
389
390 self.hunks += 1
390 self.hunks += 1
391 if reverse:
391 if reverse:
392 h.reverse()
392 h.reverse()
393
393
394 if self.missing:
394 if self.missing:
395 self.rej.append(h)
395 self.rej.append(h)
396 return -1
396 return -1
397
397
398 if self.exists and h.createfile():
398 if self.exists and h.createfile():
399 self.ui.warn(_("file %s already exists\n") % self.fname)
399 self.ui.warn(_("file %s already exists\n") % self.fname)
400 self.rej.append(h)
400 self.rej.append(h)
401 return -1
401 return -1
402
402
403 if isinstance(h, githunk):
403 if isinstance(h, githunk):
404 if h.rmfile():
404 if h.rmfile():
405 self.unlink(self.fname)
405 self.unlink(self.fname)
406 else:
406 else:
407 self.lines[:] = h.new()
407 self.lines[:] = h.new()
408 self.offset += len(h.new())
408 self.offset += len(h.new())
409 self.dirty = 1
409 self.dirty = 1
410 return 0
410 return 0
411
411
412 # fast case first, no offsets, no fuzz
412 # fast case first, no offsets, no fuzz
413 old = h.old()
413 old = h.old()
414 # patch starts counting at 1 unless we are adding the file
414 # patch starts counting at 1 unless we are adding the file
415 if h.starta == 0:
415 if h.starta == 0:
416 start = 0
416 start = 0
417 else:
417 else:
418 start = h.starta + self.offset - 1
418 start = h.starta + self.offset - 1
419 orig_start = start
419 orig_start = start
420 if diffhelpers.testhunk(old, self.lines, start) == 0:
420 if diffhelpers.testhunk(old, self.lines, start) == 0:
421 if h.rmfile():
421 if h.rmfile():
422 self.unlink(self.fname)
422 self.unlink(self.fname)
423 else:
423 else:
424 self.lines[start : start + h.lena] = h.new()
424 self.lines[start : start + h.lena] = h.new()
425 self.offset += h.lenb - h.lena
425 self.offset += h.lenb - h.lena
426 self.dirty = 1
426 self.dirty = 1
427 return 0
427 return 0
428
428
429 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
429 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
430 self.hashlines()
430 self.hashlines()
431 if h.hunk[-1][0] != ' ':
431 if h.hunk[-1][0] != ' ':
432 # if the hunk tried to put something at the bottom of the file
432 # if the hunk tried to put something at the bottom of the file
433 # override the start line and use eof here
433 # override the start line and use eof here
434 search_start = len(self.lines)
434 search_start = len(self.lines)
435 else:
435 else:
436 search_start = orig_start
436 search_start = orig_start
437
437
438 for fuzzlen in xrange(3):
438 for fuzzlen in xrange(3):
439 for toponly in [ True, False ]:
439 for toponly in [ True, False ]:
440 old = h.old(fuzzlen, toponly)
440 old = h.old(fuzzlen, toponly)
441
441
442 cand = self.findlines(old[0][1:], search_start)
442 cand = self.findlines(old[0][1:], search_start)
443 for l in cand:
443 for l in cand:
444 if diffhelpers.testhunk(old, self.lines, l) == 0:
444 if diffhelpers.testhunk(old, self.lines, l) == 0:
445 newlines = h.new(fuzzlen, toponly)
445 newlines = h.new(fuzzlen, toponly)
446 self.lines[l : l + len(old)] = newlines
446 self.lines[l : l + len(old)] = newlines
447 self.offset += len(newlines) - len(old)
447 self.offset += len(newlines) - len(old)
448 self.dirty = 1
448 self.dirty = 1
449 if fuzzlen:
449 if fuzzlen:
450 fuzzstr = "with fuzz %d " % fuzzlen
450 fuzzstr = "with fuzz %d " % fuzzlen
451 f = self.ui.warn
451 f = self.ui.warn
452 self.printfile(True)
452 self.printfile(True)
453 else:
453 else:
454 fuzzstr = ""
454 fuzzstr = ""
455 f = self.ui.note
455 f = self.ui.note
456 offset = l - orig_start - fuzzlen
456 offset = l - orig_start - fuzzlen
457 if offset == 1:
457 if offset == 1:
458 msg = _("Hunk #%d succeeded at %d %s"
458 msg = _("Hunk #%d succeeded at %d %s"
459 "(offset %d line).\n")
459 "(offset %d line).\n")
460 else:
460 else:
461 msg = _("Hunk #%d succeeded at %d %s"
461 msg = _("Hunk #%d succeeded at %d %s"
462 "(offset %d lines).\n")
462 "(offset %d lines).\n")
463 f(msg % (h.number, l+1, fuzzstr, offset))
463 f(msg % (h.number, l+1, fuzzstr, offset))
464 return fuzzlen
464 return fuzzlen
465 self.printfile(True)
465 self.printfile(True)
466 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
466 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
467 self.rej.append(h)
467 self.rej.append(h)
468 return -1
468 return -1
469
469
470 class hunk(object):
470 class hunk(object):
471 def __init__(self, desc, num, lr, context, create=False, remove=False):
471 def __init__(self, desc, num, lr, context, create=False, remove=False):
472 self.number = num
472 self.number = num
473 self.desc = desc
473 self.desc = desc
474 self.hunk = [ desc ]
474 self.hunk = [ desc ]
475 self.a = []
475 self.a = []
476 self.b = []
476 self.b = []
477 if context:
477 if context:
478 self.read_context_hunk(lr)
478 self.read_context_hunk(lr)
479 else:
479 else:
480 self.read_unified_hunk(lr)
480 self.read_unified_hunk(lr)
481 self.create = create
481 self.create = create
482 self.remove = remove and not create
482 self.remove = remove and not create
483
483
484 def read_unified_hunk(self, lr):
484 def read_unified_hunk(self, lr):
485 m = unidesc.match(self.desc)
485 m = unidesc.match(self.desc)
486 if not m:
486 if not m:
487 raise PatchError(_("bad hunk #%d") % self.number)
487 raise PatchError(_("bad hunk #%d") % self.number)
488 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
488 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
489 if self.lena is None:
489 if self.lena is None:
490 self.lena = 1
490 self.lena = 1
491 else:
491 else:
492 self.lena = int(self.lena)
492 self.lena = int(self.lena)
493 if self.lenb is None:
493 if self.lenb is None:
494 self.lenb = 1
494 self.lenb = 1
495 else:
495 else:
496 self.lenb = int(self.lenb)
496 self.lenb = int(self.lenb)
497 self.starta = int(self.starta)
497 self.starta = int(self.starta)
498 self.startb = int(self.startb)
498 self.startb = int(self.startb)
499 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
499 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
500 # if we hit eof before finishing out the hunk, the last line will
500 # if we hit eof before finishing out the hunk, the last line will
501 # be zero length. Lets try to fix it up.
501 # be zero length. Lets try to fix it up.
502 while len(self.hunk[-1]) == 0:
502 while len(self.hunk[-1]) == 0:
503 del self.hunk[-1]
503 del self.hunk[-1]
504 del self.a[-1]
504 del self.a[-1]
505 del self.b[-1]
505 del self.b[-1]
506 self.lena -= 1
506 self.lena -= 1
507 self.lenb -= 1
507 self.lenb -= 1
508
508
509 def read_context_hunk(self, lr):
509 def read_context_hunk(self, lr):
510 self.desc = lr.readline()
510 self.desc = lr.readline()
511 m = contextdesc.match(self.desc)
511 m = contextdesc.match(self.desc)
512 if not m:
512 if not m:
513 raise PatchError(_("bad hunk #%d") % self.number)
513 raise PatchError(_("bad hunk #%d") % self.number)
514 foo, self.starta, foo2, aend, foo3 = m.groups()
514 foo, self.starta, foo2, aend, foo3 = m.groups()
515 self.starta = int(self.starta)
515 self.starta = int(self.starta)
516 if aend is None:
516 if aend is None:
517 aend = self.starta
517 aend = self.starta
518 self.lena = int(aend) - self.starta
518 self.lena = int(aend) - self.starta
519 if self.starta:
519 if self.starta:
520 self.lena += 1
520 self.lena += 1
521 for x in xrange(self.lena):
521 for x in xrange(self.lena):
522 l = lr.readline()
522 l = lr.readline()
523 if l.startswith('---'):
523 if l.startswith('---'):
524 lr.push(l)
524 lr.push(l)
525 break
525 break
526 s = l[2:]
526 s = l[2:]
527 if l.startswith('- ') or l.startswith('! '):
527 if l.startswith('- ') or l.startswith('! '):
528 u = '-' + s
528 u = '-' + s
529 elif l.startswith(' '):
529 elif l.startswith(' '):
530 u = ' ' + s
530 u = ' ' + s
531 else:
531 else:
532 raise PatchError(_("bad hunk #%d old text line %d") %
532 raise PatchError(_("bad hunk #%d old text line %d") %
533 (self.number, x))
533 (self.number, x))
534 self.a.append(u)
534 self.a.append(u)
535 self.hunk.append(u)
535 self.hunk.append(u)
536
536
537 l = lr.readline()
537 l = lr.readline()
538 if l.startswith('\ '):
538 if l.startswith('\ '):
539 s = self.a[-1][:-1]
539 s = self.a[-1][:-1]
540 self.a[-1] = s
540 self.a[-1] = s
541 self.hunk[-1] = s
541 self.hunk[-1] = s
542 l = lr.readline()
542 l = lr.readline()
543 m = contextdesc.match(l)
543 m = contextdesc.match(l)
544 if not m:
544 if not m:
545 raise PatchError(_("bad hunk #%d") % self.number)
545 raise PatchError(_("bad hunk #%d") % self.number)
546 foo, self.startb, foo2, bend, foo3 = m.groups()
546 foo, self.startb, foo2, bend, foo3 = m.groups()
547 self.startb = int(self.startb)
547 self.startb = int(self.startb)
548 if bend is None:
548 if bend is None:
549 bend = self.startb
549 bend = self.startb
550 self.lenb = int(bend) - self.startb
550 self.lenb = int(bend) - self.startb
551 if self.startb:
551 if self.startb:
552 self.lenb += 1
552 self.lenb += 1
553 hunki = 1
553 hunki = 1
554 for x in xrange(self.lenb):
554 for x in xrange(self.lenb):
555 l = lr.readline()
555 l = lr.readline()
556 if l.startswith('\ '):
556 if l.startswith('\ '):
557 s = self.b[-1][:-1]
557 s = self.b[-1][:-1]
558 self.b[-1] = s
558 self.b[-1] = s
559 self.hunk[hunki-1] = s
559 self.hunk[hunki-1] = s
560 continue
560 continue
561 if not l:
561 if not l:
562 lr.push(l)
562 lr.push(l)
563 break
563 break
564 s = l[2:]
564 s = l[2:]
565 if l.startswith('+ ') or l.startswith('! '):
565 if l.startswith('+ ') or l.startswith('! '):
566 u = '+' + s
566 u = '+' + s
567 elif l.startswith(' '):
567 elif l.startswith(' '):
568 u = ' ' + s
568 u = ' ' + s
569 elif len(self.b) == 0:
569 elif len(self.b) == 0:
570 # this can happen when the hunk does not add any lines
570 # this can happen when the hunk does not add any lines
571 lr.push(l)
571 lr.push(l)
572 break
572 break
573 else:
573 else:
574 raise PatchError(_("bad hunk #%d old text line %d") %
574 raise PatchError(_("bad hunk #%d old text line %d") %
575 (self.number, x))
575 (self.number, x))
576 self.b.append(s)
576 self.b.append(s)
577 while True:
577 while True:
578 if hunki >= len(self.hunk):
578 if hunki >= len(self.hunk):
579 h = ""
579 h = ""
580 else:
580 else:
581 h = self.hunk[hunki]
581 h = self.hunk[hunki]
582 hunki += 1
582 hunki += 1
583 if h == u:
583 if h == u:
584 break
584 break
585 elif h.startswith('-'):
585 elif h.startswith('-'):
586 continue
586 continue
587 else:
587 else:
588 self.hunk.insert(hunki-1, u)
588 self.hunk.insert(hunki-1, u)
589 break
589 break
590
590
591 if not self.a:
591 if not self.a:
592 # this happens when lines were only added to the hunk
592 # this happens when lines were only added to the hunk
593 for x in self.hunk:
593 for x in self.hunk:
594 if x.startswith('-') or x.startswith(' '):
594 if x.startswith('-') or x.startswith(' '):
595 self.a.append(x)
595 self.a.append(x)
596 if not self.b:
596 if not self.b:
597 # this happens when lines were only deleted from the hunk
597 # this happens when lines were only deleted from the hunk
598 for x in self.hunk:
598 for x in self.hunk:
599 if x.startswith('+') or x.startswith(' '):
599 if x.startswith('+') or x.startswith(' '):
600 self.b.append(x[1:])
600 self.b.append(x[1:])
601 # @@ -start,len +start,len @@
601 # @@ -start,len +start,len @@
602 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
602 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
603 self.startb, self.lenb)
603 self.startb, self.lenb)
604 self.hunk[0] = self.desc
604 self.hunk[0] = self.desc
605
605
606 def reverse(self):
606 def reverse(self):
607 self.create, self.remove = self.remove, self.create
607 self.create, self.remove = self.remove, self.create
608 origlena = self.lena
608 origlena = self.lena
609 origstarta = self.starta
609 origstarta = self.starta
610 self.lena = self.lenb
610 self.lena = self.lenb
611 self.starta = self.startb
611 self.starta = self.startb
612 self.lenb = origlena
612 self.lenb = origlena
613 self.startb = origstarta
613 self.startb = origstarta
614 self.a = []
614 self.a = []
615 self.b = []
615 self.b = []
616 # self.hunk[0] is the @@ description
616 # self.hunk[0] is the @@ description
617 for x in xrange(1, len(self.hunk)):
617 for x in xrange(1, len(self.hunk)):
618 o = self.hunk[x]
618 o = self.hunk[x]
619 if o.startswith('-'):
619 if o.startswith('-'):
620 n = '+' + o[1:]
620 n = '+' + o[1:]
621 self.b.append(o[1:])
621 self.b.append(o[1:])
622 elif o.startswith('+'):
622 elif o.startswith('+'):
623 n = '-' + o[1:]
623 n = '-' + o[1:]
624 self.a.append(n)
624 self.a.append(n)
625 else:
625 else:
626 n = o
626 n = o
627 self.b.append(o[1:])
627 self.b.append(o[1:])
628 self.a.append(o)
628 self.a.append(o)
629 self.hunk[x] = o
629 self.hunk[x] = o
630
630
631 def fix_newline(self):
631 def fix_newline(self):
632 diffhelpers.fix_newline(self.hunk, self.a, self.b)
632 diffhelpers.fix_newline(self.hunk, self.a, self.b)
633
633
634 def complete(self):
634 def complete(self):
635 return len(self.a) == self.lena and len(self.b) == self.lenb
635 return len(self.a) == self.lena and len(self.b) == self.lenb
636
636
637 def createfile(self):
637 def createfile(self):
638 return self.starta == 0 and self.lena == 0 and self.create
638 return self.starta == 0 and self.lena == 0 and self.create
639
639
640 def rmfile(self):
640 def rmfile(self):
641 return self.startb == 0 and self.lenb == 0 and self.remove
641 return self.startb == 0 and self.lenb == 0 and self.remove
642
642
643 def fuzzit(self, l, fuzz, toponly):
643 def fuzzit(self, l, fuzz, toponly):
644 # this removes context lines from the top and bottom of list 'l'. It
644 # this removes context lines from the top and bottom of list 'l'. It
645 # checks the hunk to make sure only context lines are removed, and then
645 # checks the hunk to make sure only context lines are removed, and then
646 # returns a new shortened list of lines.
646 # returns a new shortened list of lines.
647 fuzz = min(fuzz, len(l)-1)
647 fuzz = min(fuzz, len(l)-1)
648 if fuzz:
648 if fuzz:
649 top = 0
649 top = 0
650 bot = 0
650 bot = 0
651 hlen = len(self.hunk)
651 hlen = len(self.hunk)
652 for x in xrange(hlen-1):
652 for x in xrange(hlen-1):
653 # the hunk starts with the @@ line, so use x+1
653 # the hunk starts with the @@ line, so use x+1
654 if self.hunk[x+1][0] == ' ':
654 if self.hunk[x+1][0] == ' ':
655 top += 1
655 top += 1
656 else:
656 else:
657 break
657 break
658 if not toponly:
658 if not toponly:
659 for x in xrange(hlen-1):
659 for x in xrange(hlen-1):
660 if self.hunk[hlen-bot-1][0] == ' ':
660 if self.hunk[hlen-bot-1][0] == ' ':
661 bot += 1
661 bot += 1
662 else:
662 else:
663 break
663 break
664
664
665 # top and bot now count context in the hunk
665 # top and bot now count context in the hunk
666 # adjust them if either one is short
666 # adjust them if either one is short
667 context = max(top, bot, 3)
667 context = max(top, bot, 3)
668 if bot < context:
668 if bot < context:
669 bot = max(0, fuzz - (context - bot))
669 bot = max(0, fuzz - (context - bot))
670 else:
670 else:
671 bot = min(fuzz, bot)
671 bot = min(fuzz, bot)
672 if top < context:
672 if top < context:
673 top = max(0, fuzz - (context - top))
673 top = max(0, fuzz - (context - top))
674 else:
674 else:
675 top = min(fuzz, top)
675 top = min(fuzz, top)
676
676
677 return l[top:len(l)-bot]
677 return l[top:len(l)-bot]
678 return l
678 return l
679
679
680 def old(self, fuzz=0, toponly=False):
680 def old(self, fuzz=0, toponly=False):
681 return self.fuzzit(self.a, fuzz, toponly)
681 return self.fuzzit(self.a, fuzz, toponly)
682
682
683 def newctrl(self):
683 def newctrl(self):
684 res = []
684 res = []
685 for x in self.hunk:
685 for x in self.hunk:
686 c = x[0]
686 c = x[0]
687 if c == ' ' or c == '+':
687 if c == ' ' or c == '+':
688 res.append(x)
688 res.append(x)
689 return res
689 return res
690
690
691 def new(self, fuzz=0, toponly=False):
691 def new(self, fuzz=0, toponly=False):
692 return self.fuzzit(self.b, fuzz, toponly)
692 return self.fuzzit(self.b, fuzz, toponly)
693
693
694 class githunk(object):
694 class githunk(object):
695 """A git hunk"""
695 """A git hunk"""
696 def __init__(self, gitpatch):
696 def __init__(self, gitpatch):
697 self.gitpatch = gitpatch
697 self.gitpatch = gitpatch
698 self.text = None
698 self.text = None
699 self.hunk = []
699 self.hunk = []
700
700
701 def createfile(self):
701 def createfile(self):
702 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
702 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
703
703
704 def rmfile(self):
704 def rmfile(self):
705 return self.gitpatch.op == 'DELETE'
705 return self.gitpatch.op == 'DELETE'
706
706
707 def complete(self):
707 def complete(self):
708 return self.text is not None
708 return self.text is not None
709
709
710 def new(self):
710 def new(self):
711 return [self.text]
711 return [self.text]
712
712
713 class binhunk(githunk):
713 class binhunk(githunk):
714 'A binary patch file. Only understands literals so far.'
714 'A binary patch file. Only understands literals so far.'
715 def __init__(self, gitpatch):
715 def __init__(self, gitpatch):
716 super(binhunk, self).__init__(gitpatch)
716 super(binhunk, self).__init__(gitpatch)
717 self.hunk = ['GIT binary patch\n']
717 self.hunk = ['GIT binary patch\n']
718
718
719 def extract(self, lr):
719 def extract(self, lr):
720 line = lr.readline()
720 line = lr.readline()
721 self.hunk.append(line)
721 self.hunk.append(line)
722 while line and not line.startswith('literal '):
722 while line and not line.startswith('literal '):
723 line = lr.readline()
723 line = lr.readline()
724 self.hunk.append(line)
724 self.hunk.append(line)
725 if not line:
725 if not line:
726 raise PatchError(_('could not extract binary patch'))
726 raise PatchError(_('could not extract binary patch'))
727 size = int(line[8:].rstrip())
727 size = int(line[8:].rstrip())
728 dec = []
728 dec = []
729 line = lr.readline()
729 line = lr.readline()
730 self.hunk.append(line)
730 self.hunk.append(line)
731 while len(line) > 1:
731 while len(line) > 1:
732 l = line[0]
732 l = line[0]
733 if l <= 'Z' and l >= 'A':
733 if l <= 'Z' and l >= 'A':
734 l = ord(l) - ord('A') + 1
734 l = ord(l) - ord('A') + 1
735 else:
735 else:
736 l = ord(l) - ord('a') + 27
736 l = ord(l) - ord('a') + 27
737 dec.append(base85.b85decode(line[1:-1])[:l])
737 dec.append(base85.b85decode(line[1:-1])[:l])
738 line = lr.readline()
738 line = lr.readline()
739 self.hunk.append(line)
739 self.hunk.append(line)
740 text = zlib.decompress(''.join(dec))
740 text = zlib.decompress(''.join(dec))
741 if len(text) != size:
741 if len(text) != size:
742 raise PatchError(_('binary patch is %d bytes, not %d') %
742 raise PatchError(_('binary patch is %d bytes, not %d') %
743 len(text), size)
743 len(text), size)
744 self.text = text
744 self.text = text
745
745
746 class symlinkhunk(githunk):
746 class symlinkhunk(githunk):
747 """A git symlink hunk"""
747 """A git symlink hunk"""
748 def __init__(self, gitpatch, hunk):
748 def __init__(self, gitpatch, hunk):
749 super(symlinkhunk, self).__init__(gitpatch)
749 super(symlinkhunk, self).__init__(gitpatch)
750 self.hunk = hunk
750 self.hunk = hunk
751
751
752 def complete(self):
752 def complete(self):
753 return True
753 return True
754
754
755 def fix_newline(self):
755 def fix_newline(self):
756 return
756 return
757
757
758 def parsefilename(str):
758 def parsefilename(str):
759 # --- filename \t|space stuff
759 # --- filename \t|space stuff
760 s = str[4:].rstrip('\r\n')
760 s = str[4:].rstrip('\r\n')
761 i = s.find('\t')
761 i = s.find('\t')
762 if i < 0:
762 if i < 0:
763 i = s.find(' ')
763 i = s.find(' ')
764 if i < 0:
764 if i < 0:
765 return s
765 return s
766 return s[:i]
766 return s[:i]
767
767
768 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
768 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
769 def pathstrip(path, count=1):
769 def pathstrip(path, count=1):
770 pathlen = len(path)
770 pathlen = len(path)
771 i = 0
771 i = 0
772 if count == 0:
772 if count == 0:
773 return '', path.rstrip()
773 return '', path.rstrip()
774 while count > 0:
774 while count > 0:
775 i = path.find('/', i)
775 i = path.find('/', i)
776 if i == -1:
776 if i == -1:
777 raise PatchError(_("unable to strip away %d dirs from %s") %
777 raise PatchError(_("unable to strip away %d dirs from %s") %
778 (count, path))
778 (count, path))
779 i += 1
779 i += 1
780 # consume '//' in the path
780 # consume '//' in the path
781 while i < pathlen - 1 and path[i] == '/':
781 while i < pathlen - 1 and path[i] == '/':
782 i += 1
782 i += 1
783 count -= 1
783 count -= 1
784 return path[:i].lstrip(), path[i:].rstrip()
784 return path[:i].lstrip(), path[i:].rstrip()
785
785
786 nulla = afile_orig == "/dev/null"
786 nulla = afile_orig == "/dev/null"
787 nullb = bfile_orig == "/dev/null"
787 nullb = bfile_orig == "/dev/null"
788 abase, afile = pathstrip(afile_orig, strip)
788 abase, afile = pathstrip(afile_orig, strip)
789 gooda = not nulla and util.lexists(afile)
789 gooda = not nulla and util.lexists(afile)
790 bbase, bfile = pathstrip(bfile_orig, strip)
790 bbase, bfile = pathstrip(bfile_orig, strip)
791 if afile == bfile:
791 if afile == bfile:
792 goodb = gooda
792 goodb = gooda
793 else:
793 else:
794 goodb = not nullb and os.path.exists(bfile)
794 goodb = not nullb and os.path.exists(bfile)
795 createfunc = hunk.createfile
795 createfunc = hunk.createfile
796 if reverse:
796 if reverse:
797 createfunc = hunk.rmfile
797 createfunc = hunk.rmfile
798 missing = not goodb and not gooda and not createfunc()
798 missing = not goodb and not gooda and not createfunc()
799 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
799 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
800 # diff is between a file and its backup. In this case, the original
800 # diff is between a file and its backup. In this case, the original
801 # file should be patched (see original mpatch code).
801 # file should be patched (see original mpatch code).
802 isbackup = (abase == bbase and bfile.startswith(afile))
802 isbackup = (abase == bbase and bfile.startswith(afile))
803 fname = None
803 fname = None
804 if not missing:
804 if not missing:
805 if gooda and goodb:
805 if gooda and goodb:
806 fname = isbackup and afile or bfile
806 fname = isbackup and afile or bfile
807 elif gooda:
807 elif gooda:
808 fname = afile
808 fname = afile
809
809
810 if not fname:
810 if not fname:
811 if not nullb:
811 if not nullb:
812 fname = isbackup and afile or bfile
812 fname = isbackup and afile or bfile
813 elif not nulla:
813 elif not nulla:
814 fname = afile
814 fname = afile
815 else:
815 else:
816 raise PatchError(_("undefined source and destination files"))
816 raise PatchError(_("undefined source and destination files"))
817
817
818 return fname, missing
818 return fname, missing
819
819
820 def scangitpatch(lr, firstline):
820 def scangitpatch(lr, firstline):
821 """
821 """
822 Git patches can emit:
822 Git patches can emit:
823 - rename a to b
823 - rename a to b
824 - change b
824 - change b
825 - copy a to c
825 - copy a to c
826 - change c
826 - change c
827
827
828 We cannot apply this sequence as-is, the renamed 'a' could not be
828 We cannot apply this sequence as-is, the renamed 'a' could not be
829 found for it would have been renamed already. And we cannot copy
829 found for it would have been renamed already. And we cannot copy
830 from 'b' instead because 'b' would have been changed already. So
830 from 'b' instead because 'b' would have been changed already. So
831 we scan the git patch for copy and rename commands so we can
831 we scan the git patch for copy and rename commands so we can
832 perform the copies ahead of time.
832 perform the copies ahead of time.
833 """
833 """
834 pos = 0
834 pos = 0
835 try:
835 try:
836 pos = lr.fp.tell()
836 pos = lr.fp.tell()
837 fp = lr.fp
837 fp = lr.fp
838 except IOError:
838 except IOError:
839 fp = cStringIO.StringIO(lr.fp.read())
839 fp = cStringIO.StringIO(lr.fp.read())
840 gitlr = linereader(fp, lr.textmode)
840 gitlr = linereader(fp, lr.textmode)
841 gitlr.push(firstline)
841 gitlr.push(firstline)
842 (dopatch, gitpatches) = readgitpatch(gitlr)
842 (dopatch, gitpatches) = readgitpatch(gitlr)
843 fp.seek(pos)
843 fp.seek(pos)
844 return dopatch, gitpatches
844 return dopatch, gitpatches
845
845
846 def iterhunks(ui, fp, sourcefile=None, textmode=False):
846 def iterhunks(ui, fp, sourcefile=None, textmode=False):
847 """Read a patch and yield the following events:
847 """Read a patch and yield the following events:
848 - ("file", afile, bfile, firsthunk): select a new target file.
848 - ("file", afile, bfile, firsthunk): select a new target file.
849 - ("hunk", hunk): a new hunk is ready to be applied, follows a
849 - ("hunk", hunk): a new hunk is ready to be applied, follows a
850 "file" event.
850 "file" event.
851 - ("git", gitchanges): current diff is in git format, gitchanges
851 - ("git", gitchanges): current diff is in git format, gitchanges
852 maps filenames to gitpatch records. Unique event.
852 maps filenames to gitpatch records. Unique event.
853
853
854 If textmode is True, input line-endings are normalized to LF.
854 If textmode is True, input line-endings are normalized to LF.
855 """
855 """
856 changed = {}
856 changed = {}
857 current_hunk = None
857 current_hunk = None
858 afile = ""
858 afile = ""
859 bfile = ""
859 bfile = ""
860 state = None
860 state = None
861 hunknum = 0
861 hunknum = 0
862 emitfile = False
862 emitfile = False
863 git = False
863 git = False
864
864
865 # our states
865 # our states
866 BFILE = 1
866 BFILE = 1
867 context = None
867 context = None
868 lr = linereader(fp, textmode)
868 lr = linereader(fp, textmode)
869 dopatch = True
869 dopatch = True
870 # gitworkdone is True if a git operation (copy, rename, ...) was
870 # gitworkdone is True if a git operation (copy, rename, ...) was
871 # performed already for the current file. Useful when the file
871 # performed already for the current file. Useful when the file
872 # section may have no hunk.
872 # section may have no hunk.
873 gitworkdone = False
873 gitworkdone = False
874
874
875 while True:
875 while True:
876 newfile = False
876 newfile = False
877 x = lr.readline()
877 x = lr.readline()
878 if not x:
878 if not x:
879 break
879 break
880 if current_hunk:
880 if current_hunk:
881 if x.startswith('\ '):
881 if x.startswith('\ '):
882 current_hunk.fix_newline()
882 current_hunk.fix_newline()
883 yield 'hunk', current_hunk
883 yield 'hunk', current_hunk
884 current_hunk = None
884 current_hunk = None
885 gitworkdone = False
885 gitworkdone = False
886 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
886 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
887 ((context is not False) and x.startswith('***************')))):
887 ((context is not False) and x.startswith('***************')))):
888 try:
888 try:
889 if context is None and x.startswith('***************'):
889 if context is None and x.startswith('***************'):
890 context = True
890 context = True
891 gpatch = changed.get(bfile)
891 gpatch = changed.get(bfile)
892 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
892 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
893 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
893 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
894 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
894 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
895 if remove:
895 if remove:
896 gpatch = changed.get(afile[2:])
896 gpatch = changed.get(afile[2:])
897 if gpatch and gpatch.mode[0]:
897 if gpatch and gpatch.mode[0]:
898 current_hunk = symlinkhunk(gpatch, current_hunk)
898 current_hunk = symlinkhunk(gpatch, current_hunk)
899 except PatchError, err:
899 except PatchError, err:
900 ui.debug(err)
900 ui.debug(err)
901 current_hunk = None
901 current_hunk = None
902 continue
902 continue
903 hunknum += 1
903 hunknum += 1
904 if emitfile:
904 if emitfile:
905 emitfile = False
905 emitfile = False
906 yield 'file', (afile, bfile, current_hunk)
906 yield 'file', (afile, bfile, current_hunk)
907 elif state == BFILE and x.startswith('GIT binary patch'):
907 elif state == BFILE and x.startswith('GIT binary patch'):
908 current_hunk = binhunk(changed[bfile])
908 current_hunk = binhunk(changed[bfile])
909 hunknum += 1
909 hunknum += 1
910 if emitfile:
910 if emitfile:
911 emitfile = False
911 emitfile = False
912 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
912 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
913 current_hunk.extract(lr)
913 current_hunk.extract(lr)
914 elif x.startswith('diff --git'):
914 elif x.startswith('diff --git'):
915 # check for git diff, scanning the whole patch file if needed
915 # check for git diff, scanning the whole patch file if needed
916 m = gitre.match(x)
916 m = gitre.match(x)
917 if m:
917 if m:
918 afile, bfile = m.group(1, 2)
918 afile, bfile = m.group(1, 2)
919 if not git:
919 if not git:
920 git = True
920 git = True
921 dopatch, gitpatches = scangitpatch(lr, x)
921 dopatch, gitpatches = scangitpatch(lr, x)
922 yield 'git', gitpatches
922 yield 'git', gitpatches
923 for gp in gitpatches:
923 for gp in gitpatches:
924 changed[gp.path] = gp
924 changed[gp.path] = gp
925 # else error?
925 # else error?
926 # copy/rename + modify should modify target, not source
926 # copy/rename + modify should modify target, not source
927 gp = changed.get(bfile)
927 gp = changed.get(bfile)
928 if gp and gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD'):
928 if gp and gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD'):
929 afile = bfile
929 afile = bfile
930 gitworkdone = True
930 gitworkdone = True
931 newfile = True
931 newfile = True
932 elif x.startswith('---'):
932 elif x.startswith('---'):
933 # check for a unified diff
933 # check for a unified diff
934 l2 = lr.readline()
934 l2 = lr.readline()
935 if not l2.startswith('+++'):
935 if not l2.startswith('+++'):
936 lr.push(l2)
936 lr.push(l2)
937 continue
937 continue
938 newfile = True
938 newfile = True
939 context = False
939 context = False
940 afile = parsefilename(x)
940 afile = parsefilename(x)
941 bfile = parsefilename(l2)
941 bfile = parsefilename(l2)
942 elif x.startswith('***'):
942 elif x.startswith('***'):
943 # check for a context diff
943 # check for a context diff
944 l2 = lr.readline()
944 l2 = lr.readline()
945 if not l2.startswith('---'):
945 if not l2.startswith('---'):
946 lr.push(l2)
946 lr.push(l2)
947 continue
947 continue
948 l3 = lr.readline()
948 l3 = lr.readline()
949 lr.push(l3)
949 lr.push(l3)
950 if not l3.startswith("***************"):
950 if not l3.startswith("***************"):
951 lr.push(l2)
951 lr.push(l2)
952 continue
952 continue
953 newfile = True
953 newfile = True
954 context = True
954 context = True
955 afile = parsefilename(x)
955 afile = parsefilename(x)
956 bfile = parsefilename(l2)
956 bfile = parsefilename(l2)
957
957
958 if newfile:
958 if newfile:
959 emitfile = True
959 emitfile = True
960 state = BFILE
960 state = BFILE
961 hunknum = 0
961 hunknum = 0
962 if current_hunk:
962 if current_hunk:
963 if current_hunk.complete():
963 if current_hunk.complete():
964 yield 'hunk', current_hunk
964 yield 'hunk', current_hunk
965 else:
965 else:
966 raise PatchError(_("malformed patch %s %s") % (afile,
966 raise PatchError(_("malformed patch %s %s") % (afile,
967 current_hunk.desc))
967 current_hunk.desc))
968
968
969 if hunknum == 0 and dopatch and not gitworkdone:
969 if hunknum == 0 and dopatch and not gitworkdone:
970 raise NoHunks
970 raise NoHunks
971
971
972 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
972 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
973 eol=None):
973 eol=None):
974 """
974 """
975 Reads a patch from fp and tries to apply it.
975 Reads a patch from fp and tries to apply it.
976
976
977 The dict 'changed' is filled in with all of the filenames changed
977 The dict 'changed' is filled in with all of the filenames changed
978 by the patch. Returns 0 for a clean patch, -1 if any rejects were
978 by the patch. Returns 0 for a clean patch, -1 if any rejects were
979 found and 1 if there was any fuzz.
979 found and 1 if there was any fuzz.
980
980
981 If 'eol' is None, the patch content and patched file are read in
981 If 'eol' is None, the patch content and patched file are read in
982 binary mode. Otherwise, line endings are ignored when patching then
982 binary mode. Otherwise, line endings are ignored when patching then
983 normalized to 'eol' (usually '\n' or \r\n').
983 normalized to 'eol' (usually '\n' or \r\n').
984 """
984 """
985 rejects = 0
985 rejects = 0
986 err = 0
986 err = 0
987 current_file = None
987 current_file = None
988 gitpatches = None
988 gitpatches = None
989 opener = util.opener(os.getcwd())
989 opener = util.opener(os.getcwd())
990 textmode = eol is not None
990 textmode = eol is not None
991
991
992 def closefile():
992 def closefile():
993 if not current_file:
993 if not current_file:
994 return 0
994 return 0
995 current_file.close()
995 current_file.close()
996 return len(current_file.rej)
996 return len(current_file.rej)
997
997
998 for state, values in iterhunks(ui, fp, sourcefile, textmode):
998 for state, values in iterhunks(ui, fp, sourcefile, textmode):
999 if state == 'hunk':
999 if state == 'hunk':
1000 if not current_file:
1000 if not current_file:
1001 continue
1001 continue
1002 current_hunk = values
1002 current_hunk = values
1003 ret = current_file.apply(current_hunk, reverse)
1003 ret = current_file.apply(current_hunk, reverse)
1004 if ret >= 0:
1004 if ret >= 0:
1005 changed.setdefault(current_file.fname, None)
1005 changed.setdefault(current_file.fname, None)
1006 if ret > 0:
1006 if ret > 0:
1007 err = 1
1007 err = 1
1008 elif state == 'file':
1008 elif state == 'file':
1009 rejects += closefile()
1009 rejects += closefile()
1010 afile, bfile, first_hunk = values
1010 afile, bfile, first_hunk = values
1011 try:
1011 try:
1012 if sourcefile:
1012 if sourcefile:
1013 current_file = patchfile(ui, sourcefile, opener, eol=eol)
1013 current_file = patchfile(ui, sourcefile, opener, eol=eol)
1014 else:
1014 else:
1015 current_file, missing = selectfile(afile, bfile, first_hunk,
1015 current_file, missing = selectfile(afile, bfile, first_hunk,
1016 strip, reverse)
1016 strip, reverse)
1017 current_file = patchfile(ui, current_file, opener, missing, eol)
1017 current_file = patchfile(ui, current_file, opener, missing, eol)
1018 except PatchError, err:
1018 except PatchError, err:
1019 ui.warn(str(err) + '\n')
1019 ui.warn(str(err) + '\n')
1020 current_file, current_hunk = None, None
1020 current_file, current_hunk = None, None
1021 rejects += 1
1021 rejects += 1
1022 continue
1022 continue
1023 elif state == 'git':
1023 elif state == 'git':
1024 gitpatches = values
1024 gitpatches = values
1025 cwd = os.getcwd()
1025 cwd = os.getcwd()
1026 for gp in gitpatches:
1026 for gp in gitpatches:
1027 if gp.op in ('COPY', 'RENAME'):
1027 if gp.op in ('COPY', 'RENAME'):
1028 copyfile(gp.oldpath, gp.path, cwd)
1028 copyfile(gp.oldpath, gp.path, cwd)
1029 changed[gp.path] = gp
1029 changed[gp.path] = gp
1030 else:
1030 else:
1031 raise util.Abort(_('unsupported parser state: %s') % state)
1031 raise util.Abort(_('unsupported parser state: %s') % state)
1032
1032
1033 rejects += closefile()
1033 rejects += closefile()
1034
1034
1035 if rejects:
1035 if rejects:
1036 return -1
1036 return -1
1037 return err
1037 return err
1038
1038
1039 def diffopts(ui, opts={}, untrusted=False):
1039 def diffopts(ui, opts={}, untrusted=False):
1040 def get(key, name=None, getter=ui.configbool):
1040 def get(key, name=None, getter=ui.configbool):
1041 return (opts.get(key) or
1041 return (opts.get(key) or
1042 getter('diff', name or key, None, untrusted=untrusted))
1042 getter('diff', name or key, None, untrusted=untrusted))
1043 return mdiff.diffopts(
1043 return mdiff.diffopts(
1044 text=opts.get('text'),
1044 text=opts.get('text'),
1045 git=get('git'),
1045 git=get('git'),
1046 nodates=get('nodates'),
1046 nodates=get('nodates'),
1047 showfunc=get('show_function', 'showfunc'),
1047 showfunc=get('show_function', 'showfunc'),
1048 ignorews=get('ignore_all_space', 'ignorews'),
1048 ignorews=get('ignore_all_space', 'ignorews'),
1049 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1049 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1050 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1050 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1051 context=get('unified', getter=ui.config))
1051 context=get('unified', getter=ui.config))
1052
1052
1053 def updatedir(ui, repo, patches, similarity=0):
1053 def updatedir(ui, repo, patches, similarity=0):
1054 '''Update dirstate after patch application according to metadata'''
1054 '''Update dirstate after patch application according to metadata'''
1055 if not patches:
1055 if not patches:
1056 return
1056 return
1057 copies = []
1057 copies = []
1058 removes = set()
1058 removes = set()
1059 cfiles = patches.keys()
1059 cfiles = patches.keys()
1060 cwd = repo.getcwd()
1060 cwd = repo.getcwd()
1061 if cwd:
1061 if cwd:
1062 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1062 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1063 for f in patches:
1063 for f in patches:
1064 gp = patches[f]
1064 gp = patches[f]
1065 if not gp:
1065 if not gp:
1066 continue
1066 continue
1067 if gp.op == 'RENAME':
1067 if gp.op == 'RENAME':
1068 copies.append((gp.oldpath, gp.path))
1068 copies.append((gp.oldpath, gp.path))
1069 removes.add(gp.oldpath)
1069 removes.add(gp.oldpath)
1070 elif gp.op == 'COPY':
1070 elif gp.op == 'COPY':
1071 copies.append((gp.oldpath, gp.path))
1071 copies.append((gp.oldpath, gp.path))
1072 elif gp.op == 'DELETE':
1072 elif gp.op == 'DELETE':
1073 removes.add(gp.path)
1073 removes.add(gp.path)
1074 for src, dst in copies:
1074 for src, dst in copies:
1075 repo.copy(src, dst)
1075 repo.copy(src, dst)
1076 if (not similarity) and removes:
1076 if (not similarity) and removes:
1077 repo.remove(sorted(removes), True)
1077 repo.remove(sorted(removes), True)
1078 for f in patches:
1078 for f in patches:
1079 gp = patches[f]
1079 gp = patches[f]
1080 if gp and gp.mode:
1080 if gp and gp.mode:
1081 islink, isexec = gp.mode
1081 islink, isexec = gp.mode
1082 dst = repo.wjoin(gp.path)
1082 dst = repo.wjoin(gp.path)
1083 # patch won't create empty files
1083 # patch won't create empty files
1084 if gp.op == 'ADD' and not os.path.exists(dst):
1084 if gp.op == 'ADD' and not os.path.exists(dst):
1085 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1085 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1086 repo.wwrite(gp.path, '', flags)
1086 repo.wwrite(gp.path, '', flags)
1087 elif gp.op != 'DELETE':
1087 elif gp.op != 'DELETE':
1088 util.set_flags(dst, islink, isexec)
1088 util.set_flags(dst, islink, isexec)
1089 cmdutil.addremove(repo, cfiles, similarity=similarity)
1089 cmdutil.addremove(repo, cfiles, similarity=similarity)
1090 files = patches.keys()
1090 files = patches.keys()
1091 files.extend([r for r in removes if r not in files])
1091 files.extend([r for r in removes if r not in files])
1092 return sorted(files)
1092 return sorted(files)
1093
1093
1094 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1094 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1095 """use <patcher> to apply <patchname> to the working directory.
1095 """use <patcher> to apply <patchname> to the working directory.
1096 returns whether patch was applied with fuzz factor."""
1096 returns whether patch was applied with fuzz factor."""
1097
1097
1098 fuzz = False
1098 fuzz = False
1099 if cwd:
1099 if cwd:
1100 args.append('-d %s' % util.shellquote(cwd))
1100 args.append('-d %s' % util.shellquote(cwd))
1101 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1101 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1102 util.shellquote(patchname)))
1102 util.shellquote(patchname)))
1103
1103
1104 for line in fp:
1104 for line in fp:
1105 line = line.rstrip()
1105 line = line.rstrip()
1106 ui.note(line + '\n')
1106 ui.note(line + '\n')
1107 if line.startswith('patching file '):
1107 if line.startswith('patching file '):
1108 pf = util.parse_patch_output(line)
1108 pf = util.parse_patch_output(line)
1109 printed_file = False
1109 printed_file = False
1110 files.setdefault(pf, None)
1110 files.setdefault(pf, None)
1111 elif line.find('with fuzz') >= 0:
1111 elif line.find('with fuzz') >= 0:
1112 fuzz = True
1112 fuzz = True
1113 if not printed_file:
1113 if not printed_file:
1114 ui.warn(pf + '\n')
1114 ui.warn(pf + '\n')
1115 printed_file = True
1115 printed_file = True
1116 ui.warn(line + '\n')
1116 ui.warn(line + '\n')
1117 elif line.find('saving rejects to file') >= 0:
1117 elif line.find('saving rejects to file') >= 0:
1118 ui.warn(line + '\n')
1118 ui.warn(line + '\n')
1119 elif line.find('FAILED') >= 0:
1119 elif line.find('FAILED') >= 0:
1120 if not printed_file:
1120 if not printed_file:
1121 ui.warn(pf + '\n')
1121 ui.warn(pf + '\n')
1122 printed_file = True
1122 printed_file = True
1123 ui.warn(line + '\n')
1123 ui.warn(line + '\n')
1124 code = fp.close()
1124 code = fp.close()
1125 if code:
1125 if code:
1126 raise PatchError(_("patch command failed: %s") %
1126 raise PatchError(_("patch command failed: %s") %
1127 util.explain_exit(code)[0])
1127 util.explain_exit(code)[0])
1128 return fuzz
1128 return fuzz
1129
1129
1130 def internalpatch(patchobj, ui, strip, cwd, files={}, eolmode='strict'):
1130 def internalpatch(patchobj, ui, strip, cwd, files={}, eolmode='strict'):
1131 """use builtin patch to apply <patchobj> to the working directory.
1131 """use builtin patch to apply <patchobj> to the working directory.
1132 returns whether patch was applied with fuzz factor."""
1132 returns whether patch was applied with fuzz factor."""
1133
1133
1134 if eolmode is None:
1134 if eolmode is None:
1135 eolmode = ui.config('patch', 'eol', 'strict')
1135 eolmode = ui.config('patch', 'eol', 'strict')
1136 try:
1136 try:
1137 eol = {'strict': None, 'crlf': '\r\n', 'lf': '\n'}[eolmode.lower()]
1137 eol = {'strict': None, 'crlf': '\r\n', 'lf': '\n'}[eolmode.lower()]
1138 except KeyError:
1138 except KeyError:
1139 raise util.Abort(_('Unsupported line endings type: %s') % eolmode)
1139 raise util.Abort(_('Unsupported line endings type: %s') % eolmode)
1140
1140
1141 try:
1141 try:
1142 fp = file(patchobj, 'rb')
1142 fp = open(patchobj, 'rb')
1143 except TypeError:
1143 except TypeError:
1144 fp = patchobj
1144 fp = patchobj
1145 if cwd:
1145 if cwd:
1146 curdir = os.getcwd()
1146 curdir = os.getcwd()
1147 os.chdir(cwd)
1147 os.chdir(cwd)
1148 try:
1148 try:
1149 ret = applydiff(ui, fp, files, strip=strip, eol=eol)
1149 ret = applydiff(ui, fp, files, strip=strip, eol=eol)
1150 finally:
1150 finally:
1151 if cwd:
1151 if cwd:
1152 os.chdir(curdir)
1152 os.chdir(curdir)
1153 if ret < 0:
1153 if ret < 0:
1154 raise PatchError
1154 raise PatchError
1155 return ret > 0
1155 return ret > 0
1156
1156
1157 def patch(patchname, ui, strip=1, cwd=None, files={}, eolmode='strict'):
1157 def patch(patchname, ui, strip=1, cwd=None, files={}, eolmode='strict'):
1158 """Apply <patchname> to the working directory.
1158 """Apply <patchname> to the working directory.
1159
1159
1160 'eolmode' specifies how end of lines should be handled. It can be:
1160 'eolmode' specifies how end of lines should be handled. It can be:
1161 - 'strict': inputs are read in binary mode, EOLs are preserved
1161 - 'strict': inputs are read in binary mode, EOLs are preserved
1162 - 'crlf': EOLs are ignored when patching and reset to CRLF
1162 - 'crlf': EOLs are ignored when patching and reset to CRLF
1163 - 'lf': EOLs are ignored when patching and reset to LF
1163 - 'lf': EOLs are ignored when patching and reset to LF
1164 - None: get it from user settings, default to 'strict'
1164 - None: get it from user settings, default to 'strict'
1165 'eolmode' is ignored when using an external patcher program.
1165 'eolmode' is ignored when using an external patcher program.
1166
1166
1167 Returns whether patch was applied with fuzz factor.
1167 Returns whether patch was applied with fuzz factor.
1168 """
1168 """
1169 patcher = ui.config('ui', 'patch')
1169 patcher = ui.config('ui', 'patch')
1170 args = []
1170 args = []
1171 try:
1171 try:
1172 if patcher:
1172 if patcher:
1173 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1173 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1174 files)
1174 files)
1175 else:
1175 else:
1176 try:
1176 try:
1177 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1177 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1178 except NoHunks:
1178 except NoHunks:
1179 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1179 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1180 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1180 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1181 patcher)
1181 patcher)
1182 if util.needbinarypatch():
1182 if util.needbinarypatch():
1183 args.append('--binary')
1183 args.append('--binary')
1184 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1184 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1185 files)
1185 files)
1186 except PatchError, err:
1186 except PatchError, err:
1187 s = str(err)
1187 s = str(err)
1188 if s:
1188 if s:
1189 raise util.Abort(s)
1189 raise util.Abort(s)
1190 else:
1190 else:
1191 raise util.Abort(_('patch failed to apply'))
1191 raise util.Abort(_('patch failed to apply'))
1192
1192
1193 def b85diff(to, tn):
1193 def b85diff(to, tn):
1194 '''print base85-encoded binary diff'''
1194 '''print base85-encoded binary diff'''
1195 def gitindex(text):
1195 def gitindex(text):
1196 if not text:
1196 if not text:
1197 return '0' * 40
1197 return '0' * 40
1198 l = len(text)
1198 l = len(text)
1199 s = util.sha1('blob %d\0' % l)
1199 s = util.sha1('blob %d\0' % l)
1200 s.update(text)
1200 s.update(text)
1201 return s.hexdigest()
1201 return s.hexdigest()
1202
1202
1203 def fmtline(line):
1203 def fmtline(line):
1204 l = len(line)
1204 l = len(line)
1205 if l <= 26:
1205 if l <= 26:
1206 l = chr(ord('A') + l - 1)
1206 l = chr(ord('A') + l - 1)
1207 else:
1207 else:
1208 l = chr(l - 26 + ord('a') - 1)
1208 l = chr(l - 26 + ord('a') - 1)
1209 return '%c%s\n' % (l, base85.b85encode(line, True))
1209 return '%c%s\n' % (l, base85.b85encode(line, True))
1210
1210
1211 def chunk(text, csize=52):
1211 def chunk(text, csize=52):
1212 l = len(text)
1212 l = len(text)
1213 i = 0
1213 i = 0
1214 while i < l:
1214 while i < l:
1215 yield text[i:i+csize]
1215 yield text[i:i+csize]
1216 i += csize
1216 i += csize
1217
1217
1218 tohash = gitindex(to)
1218 tohash = gitindex(to)
1219 tnhash = gitindex(tn)
1219 tnhash = gitindex(tn)
1220 if tohash == tnhash:
1220 if tohash == tnhash:
1221 return ""
1221 return ""
1222
1222
1223 # TODO: deltas
1223 # TODO: deltas
1224 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1224 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1225 (tohash, tnhash, len(tn))]
1225 (tohash, tnhash, len(tn))]
1226 for l in chunk(zlib.compress(tn)):
1226 for l in chunk(zlib.compress(tn)):
1227 ret.append(fmtline(l))
1227 ret.append(fmtline(l))
1228 ret.append('\n')
1228 ret.append('\n')
1229 return ''.join(ret)
1229 return ''.join(ret)
1230
1230
1231 def _addmodehdr(header, omode, nmode):
1231 def _addmodehdr(header, omode, nmode):
1232 if omode != nmode:
1232 if omode != nmode:
1233 header.append('old mode %s\n' % omode)
1233 header.append('old mode %s\n' % omode)
1234 header.append('new mode %s\n' % nmode)
1234 header.append('new mode %s\n' % nmode)
1235
1235
1236 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1236 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1237 '''yields diff of changes to files between two nodes, or node and
1237 '''yields diff of changes to files between two nodes, or node and
1238 working directory.
1238 working directory.
1239
1239
1240 if node1 is None, use first dirstate parent instead.
1240 if node1 is None, use first dirstate parent instead.
1241 if node2 is None, compare node1 with working directory.'''
1241 if node2 is None, compare node1 with working directory.'''
1242
1242
1243 if opts is None:
1243 if opts is None:
1244 opts = mdiff.defaultopts
1244 opts = mdiff.defaultopts
1245
1245
1246 if not node1:
1246 if not node1:
1247 node1 = repo.dirstate.parents()[0]
1247 node1 = repo.dirstate.parents()[0]
1248
1248
1249 flcache = {}
1249 flcache = {}
1250 def getfilectx(f, ctx):
1250 def getfilectx(f, ctx):
1251 flctx = ctx.filectx(f, filelog=flcache.get(f))
1251 flctx = ctx.filectx(f, filelog=flcache.get(f))
1252 if f not in flcache:
1252 if f not in flcache:
1253 flcache[f] = flctx._filelog
1253 flcache[f] = flctx._filelog
1254 return flctx
1254 return flctx
1255
1255
1256 ctx1 = repo[node1]
1256 ctx1 = repo[node1]
1257 ctx2 = repo[node2]
1257 ctx2 = repo[node2]
1258
1258
1259 if not changes:
1259 if not changes:
1260 changes = repo.status(ctx1, ctx2, match=match)
1260 changes = repo.status(ctx1, ctx2, match=match)
1261 modified, added, removed = changes[:3]
1261 modified, added, removed = changes[:3]
1262
1262
1263 if not modified and not added and not removed:
1263 if not modified and not added and not removed:
1264 return
1264 return
1265
1265
1266 date1 = util.datestr(ctx1.date())
1266 date1 = util.datestr(ctx1.date())
1267 man1 = ctx1.manifest()
1267 man1 = ctx1.manifest()
1268
1268
1269 if repo.ui.quiet:
1269 if repo.ui.quiet:
1270 r = None
1270 r = None
1271 else:
1271 else:
1272 hexfunc = repo.ui.debugflag and hex or short
1272 hexfunc = repo.ui.debugflag and hex or short
1273 r = [hexfunc(node) for node in [node1, node2] if node]
1273 r = [hexfunc(node) for node in [node1, node2] if node]
1274
1274
1275 if opts.git:
1275 if opts.git:
1276 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1276 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1277 copy = copy.copy()
1277 copy = copy.copy()
1278 for k, v in copy.items():
1278 for k, v in copy.items():
1279 copy[v] = k
1279 copy[v] = k
1280
1280
1281 gone = set()
1281 gone = set()
1282 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1282 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1283
1283
1284 for f in sorted(modified + added + removed):
1284 for f in sorted(modified + added + removed):
1285 to = None
1285 to = None
1286 tn = None
1286 tn = None
1287 dodiff = True
1287 dodiff = True
1288 header = []
1288 header = []
1289 if f in man1:
1289 if f in man1:
1290 to = getfilectx(f, ctx1).data()
1290 to = getfilectx(f, ctx1).data()
1291 if f not in removed:
1291 if f not in removed:
1292 tn = getfilectx(f, ctx2).data()
1292 tn = getfilectx(f, ctx2).data()
1293 a, b = f, f
1293 a, b = f, f
1294 if opts.git:
1294 if opts.git:
1295 if f in added:
1295 if f in added:
1296 mode = gitmode[ctx2.flags(f)]
1296 mode = gitmode[ctx2.flags(f)]
1297 if f in copy:
1297 if f in copy:
1298 a = copy[f]
1298 a = copy[f]
1299 omode = gitmode[man1.flags(a)]
1299 omode = gitmode[man1.flags(a)]
1300 _addmodehdr(header, omode, mode)
1300 _addmodehdr(header, omode, mode)
1301 if a in removed and a not in gone:
1301 if a in removed and a not in gone:
1302 op = 'rename'
1302 op = 'rename'
1303 gone.add(a)
1303 gone.add(a)
1304 else:
1304 else:
1305 op = 'copy'
1305 op = 'copy'
1306 header.append('%s from %s\n' % (op, a))
1306 header.append('%s from %s\n' % (op, a))
1307 header.append('%s to %s\n' % (op, f))
1307 header.append('%s to %s\n' % (op, f))
1308 to = getfilectx(a, ctx1).data()
1308 to = getfilectx(a, ctx1).data()
1309 else:
1309 else:
1310 header.append('new file mode %s\n' % mode)
1310 header.append('new file mode %s\n' % mode)
1311 if util.binary(tn):
1311 if util.binary(tn):
1312 dodiff = 'binary'
1312 dodiff = 'binary'
1313 elif f in removed:
1313 elif f in removed:
1314 # have we already reported a copy above?
1314 # have we already reported a copy above?
1315 if f in copy and copy[f] in added and copy[copy[f]] == f:
1315 if f in copy and copy[f] in added and copy[copy[f]] == f:
1316 dodiff = False
1316 dodiff = False
1317 else:
1317 else:
1318 header.append('deleted file mode %s\n' %
1318 header.append('deleted file mode %s\n' %
1319 gitmode[man1.flags(f)])
1319 gitmode[man1.flags(f)])
1320 else:
1320 else:
1321 omode = gitmode[man1.flags(f)]
1321 omode = gitmode[man1.flags(f)]
1322 nmode = gitmode[ctx2.flags(f)]
1322 nmode = gitmode[ctx2.flags(f)]
1323 _addmodehdr(header, omode, nmode)
1323 _addmodehdr(header, omode, nmode)
1324 if util.binary(to) or util.binary(tn):
1324 if util.binary(to) or util.binary(tn):
1325 dodiff = 'binary'
1325 dodiff = 'binary'
1326 r = None
1326 r = None
1327 header.insert(0, mdiff.diffline(r, a, b, opts))
1327 header.insert(0, mdiff.diffline(r, a, b, opts))
1328 if dodiff:
1328 if dodiff:
1329 if dodiff == 'binary':
1329 if dodiff == 'binary':
1330 text = b85diff(to, tn)
1330 text = b85diff(to, tn)
1331 else:
1331 else:
1332 text = mdiff.unidiff(to, date1,
1332 text = mdiff.unidiff(to, date1,
1333 # ctx2 date may be dynamic
1333 # ctx2 date may be dynamic
1334 tn, util.datestr(ctx2.date()),
1334 tn, util.datestr(ctx2.date()),
1335 a, b, r, opts=opts)
1335 a, b, r, opts=opts)
1336 if header and (text or len(header) > 1):
1336 if header and (text or len(header) > 1):
1337 yield ''.join(header)
1337 yield ''.join(header)
1338 if text:
1338 if text:
1339 yield text
1339 yield text
1340
1340
1341 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1341 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1342 opts=None):
1342 opts=None):
1343 '''export changesets as hg patches.'''
1343 '''export changesets as hg patches.'''
1344
1344
1345 total = len(revs)
1345 total = len(revs)
1346 revwidth = max([len(str(rev)) for rev in revs])
1346 revwidth = max([len(str(rev)) for rev in revs])
1347
1347
1348 def single(rev, seqno, fp):
1348 def single(rev, seqno, fp):
1349 ctx = repo[rev]
1349 ctx = repo[rev]
1350 node = ctx.node()
1350 node = ctx.node()
1351 parents = [p.node() for p in ctx.parents() if p]
1351 parents = [p.node() for p in ctx.parents() if p]
1352 branch = ctx.branch()
1352 branch = ctx.branch()
1353 if switch_parent:
1353 if switch_parent:
1354 parents.reverse()
1354 parents.reverse()
1355 prev = (parents and parents[0]) or nullid
1355 prev = (parents and parents[0]) or nullid
1356
1356
1357 if not fp:
1357 if not fp:
1358 fp = cmdutil.make_file(repo, template, node, total=total,
1358 fp = cmdutil.make_file(repo, template, node, total=total,
1359 seqno=seqno, revwidth=revwidth,
1359 seqno=seqno, revwidth=revwidth,
1360 mode='ab')
1360 mode='ab')
1361 if fp != sys.stdout and hasattr(fp, 'name'):
1361 if fp != sys.stdout and hasattr(fp, 'name'):
1362 repo.ui.note("%s\n" % fp.name)
1362 repo.ui.note("%s\n" % fp.name)
1363
1363
1364 fp.write("# HG changeset patch\n")
1364 fp.write("# HG changeset patch\n")
1365 fp.write("# User %s\n" % ctx.user())
1365 fp.write("# User %s\n" % ctx.user())
1366 fp.write("# Date %d %d\n" % ctx.date())
1366 fp.write("# Date %d %d\n" % ctx.date())
1367 if branch and (branch != 'default'):
1367 if branch and (branch != 'default'):
1368 fp.write("# Branch %s\n" % branch)
1368 fp.write("# Branch %s\n" % branch)
1369 fp.write("# Node ID %s\n" % hex(node))
1369 fp.write("# Node ID %s\n" % hex(node))
1370 fp.write("# Parent %s\n" % hex(prev))
1370 fp.write("# Parent %s\n" % hex(prev))
1371 if len(parents) > 1:
1371 if len(parents) > 1:
1372 fp.write("# Parent %s\n" % hex(parents[1]))
1372 fp.write("# Parent %s\n" % hex(parents[1]))
1373 fp.write(ctx.description().rstrip())
1373 fp.write(ctx.description().rstrip())
1374 fp.write("\n\n")
1374 fp.write("\n\n")
1375
1375
1376 for chunk in diff(repo, prev, node, opts=opts):
1376 for chunk in diff(repo, prev, node, opts=opts):
1377 fp.write(chunk)
1377 fp.write(chunk)
1378
1378
1379 for seqno, rev in enumerate(revs):
1379 for seqno, rev in enumerate(revs):
1380 single(rev, seqno+1, fp)
1380 single(rev, seqno+1, fp)
1381
1381
1382 def diffstatdata(lines):
1382 def diffstatdata(lines):
1383 filename, adds, removes = None, 0, 0
1383 filename, adds, removes = None, 0, 0
1384 for line in lines:
1384 for line in lines:
1385 if line.startswith('diff'):
1385 if line.startswith('diff'):
1386 if filename:
1386 if filename:
1387 yield (filename, adds, removes)
1387 yield (filename, adds, removes)
1388 # set numbers to 0 anyway when starting new file
1388 # set numbers to 0 anyway when starting new file
1389 adds, removes = 0, 0
1389 adds, removes = 0, 0
1390 if line.startswith('diff --git'):
1390 if line.startswith('diff --git'):
1391 filename = gitre.search(line).group(1)
1391 filename = gitre.search(line).group(1)
1392 else:
1392 else:
1393 # format: "diff -r ... -r ... filename"
1393 # format: "diff -r ... -r ... filename"
1394 filename = line.split(None, 5)[-1]
1394 filename = line.split(None, 5)[-1]
1395 elif line.startswith('+') and not line.startswith('+++'):
1395 elif line.startswith('+') and not line.startswith('+++'):
1396 adds += 1
1396 adds += 1
1397 elif line.startswith('-') and not line.startswith('---'):
1397 elif line.startswith('-') and not line.startswith('---'):
1398 removes += 1
1398 removes += 1
1399 if filename:
1399 if filename:
1400 yield (filename, adds, removes)
1400 yield (filename, adds, removes)
1401
1401
1402 def diffstat(lines, width=80):
1402 def diffstat(lines, width=80):
1403 output = []
1403 output = []
1404 stats = list(diffstatdata(lines))
1404 stats = list(diffstatdata(lines))
1405
1405
1406 maxtotal, maxname = 0, 0
1406 maxtotal, maxname = 0, 0
1407 totaladds, totalremoves = 0, 0
1407 totaladds, totalremoves = 0, 0
1408 for filename, adds, removes in stats:
1408 for filename, adds, removes in stats:
1409 totaladds += adds
1409 totaladds += adds
1410 totalremoves += removes
1410 totalremoves += removes
1411 maxname = max(maxname, len(filename))
1411 maxname = max(maxname, len(filename))
1412 maxtotal = max(maxtotal, adds+removes)
1412 maxtotal = max(maxtotal, adds+removes)
1413
1413
1414 countwidth = len(str(maxtotal))
1414 countwidth = len(str(maxtotal))
1415 graphwidth = width - countwidth - maxname
1415 graphwidth = width - countwidth - maxname
1416 if graphwidth < 10:
1416 if graphwidth < 10:
1417 graphwidth = 10
1417 graphwidth = 10
1418
1418
1419 factor = max(int(math.ceil(float(maxtotal) / graphwidth)), 1)
1419 factor = max(int(math.ceil(float(maxtotal) / graphwidth)), 1)
1420
1420
1421 for filename, adds, removes in stats:
1421 for filename, adds, removes in stats:
1422 # If diffstat runs out of room it doesn't print anything, which
1422 # If diffstat runs out of room it doesn't print anything, which
1423 # isn't very useful, so always print at least one + or - if there
1423 # isn't very useful, so always print at least one + or - if there
1424 # were at least some changes
1424 # were at least some changes
1425 pluses = '+' * max(adds // factor, int(bool(adds)))
1425 pluses = '+' * max(adds // factor, int(bool(adds)))
1426 minuses = '-' * max(removes // factor, int(bool(removes)))
1426 minuses = '-' * max(removes // factor, int(bool(removes)))
1427 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1427 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1428 adds+removes, pluses, minuses))
1428 adds+removes, pluses, minuses))
1429
1429
1430 if stats:
1430 if stats:
1431 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n'
1431 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n'
1432 % (len(stats), totaladds, totalremoves))
1432 % (len(stats), totaladds, totalremoves))
1433
1433
1434 return ''.join(output)
1434 return ''.join(output)
@@ -1,214 +1,214 b''
1 # posix.py - Posix utility function implementations for Mercurial
1 # posix.py - Posix utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil
9 import osutil
10 import os, sys, errno, stat, getpass, pwd, grp
10 import os, sys, errno, stat, getpass, pwd, grp
11
11
12 posixfile = file
12 posixfile = open
13 nulldev = '/dev/null'
13 nulldev = '/dev/null'
14 normpath = os.path.normpath
14 normpath = os.path.normpath
15 samestat = os.path.samestat
15 samestat = os.path.samestat
16 expandglobs = False
16 expandglobs = False
17
17
18 umask = os.umask(0)
18 umask = os.umask(0)
19 os.umask(umask)
19 os.umask(umask)
20
20
21 def openhardlinks():
21 def openhardlinks():
22 '''return true if it is safe to hold open file handles to hardlinks'''
22 '''return true if it is safe to hold open file handles to hardlinks'''
23 return True
23 return True
24
24
25 def rcfiles(path):
25 def rcfiles(path):
26 rcs = [os.path.join(path, 'hgrc')]
26 rcs = [os.path.join(path, 'hgrc')]
27 rcdir = os.path.join(path, 'hgrc.d')
27 rcdir = os.path.join(path, 'hgrc.d')
28 try:
28 try:
29 rcs.extend([os.path.join(rcdir, f)
29 rcs.extend([os.path.join(rcdir, f)
30 for f, kind in osutil.listdir(rcdir)
30 for f, kind in osutil.listdir(rcdir)
31 if f.endswith(".rc")])
31 if f.endswith(".rc")])
32 except OSError:
32 except OSError:
33 pass
33 pass
34 return rcs
34 return rcs
35
35
36 def system_rcpath():
36 def system_rcpath():
37 path = []
37 path = []
38 # old mod_python does not set sys.argv
38 # old mod_python does not set sys.argv
39 if len(getattr(sys, 'argv', [])) > 0:
39 if len(getattr(sys, 'argv', [])) > 0:
40 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
40 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
41 '/../etc/mercurial'))
41 '/../etc/mercurial'))
42 path.extend(rcfiles('/etc/mercurial'))
42 path.extend(rcfiles('/etc/mercurial'))
43 return path
43 return path
44
44
45 def user_rcpath():
45 def user_rcpath():
46 return [os.path.expanduser('~/.hgrc')]
46 return [os.path.expanduser('~/.hgrc')]
47
47
48 def parse_patch_output(output_line):
48 def parse_patch_output(output_line):
49 """parses the output produced by patch and returns the filename"""
49 """parses the output produced by patch and returns the filename"""
50 pf = output_line[14:]
50 pf = output_line[14:]
51 if os.sys.platform == 'OpenVMS':
51 if os.sys.platform == 'OpenVMS':
52 if pf[0] == '`':
52 if pf[0] == '`':
53 pf = pf[1:-1] # Remove the quotes
53 pf = pf[1:-1] # Remove the quotes
54 else:
54 else:
55 if pf.startswith("'") and pf.endswith("'") and " " in pf:
55 if pf.startswith("'") and pf.endswith("'") and " " in pf:
56 pf = pf[1:-1] # Remove the quotes
56 pf = pf[1:-1] # Remove the quotes
57 return pf
57 return pf
58
58
59 def sshargs(sshcmd, host, user, port):
59 def sshargs(sshcmd, host, user, port):
60 '''Build argument list for ssh'''
60 '''Build argument list for ssh'''
61 args = user and ("%s@%s" % (user, host)) or host
61 args = user and ("%s@%s" % (user, host)) or host
62 return port and ("%s -p %s" % (args, port)) or args
62 return port and ("%s -p %s" % (args, port)) or args
63
63
64 def is_exec(f):
64 def is_exec(f):
65 """check whether a file is executable"""
65 """check whether a file is executable"""
66 return (os.lstat(f).st_mode & 0100 != 0)
66 return (os.lstat(f).st_mode & 0100 != 0)
67
67
68 def set_flags(f, l, x):
68 def set_flags(f, l, x):
69 s = os.lstat(f).st_mode
69 s = os.lstat(f).st_mode
70 if l:
70 if l:
71 if not stat.S_ISLNK(s):
71 if not stat.S_ISLNK(s):
72 # switch file to link
72 # switch file to link
73 data = file(f).read()
73 data = open(f).read()
74 os.unlink(f)
74 os.unlink(f)
75 try:
75 try:
76 os.symlink(data, f)
76 os.symlink(data, f)
77 except:
77 except:
78 # failed to make a link, rewrite file
78 # failed to make a link, rewrite file
79 file(f, "w").write(data)
79 open(f, "w").write(data)
80 # no chmod needed at this point
80 # no chmod needed at this point
81 return
81 return
82 if stat.S_ISLNK(s):
82 if stat.S_ISLNK(s):
83 # switch link to file
83 # switch link to file
84 data = os.readlink(f)
84 data = os.readlink(f)
85 os.unlink(f)
85 os.unlink(f)
86 file(f, "w").write(data)
86 open(f, "w").write(data)
87 s = 0666 & ~umask # avoid restatting for chmod
87 s = 0666 & ~umask # avoid restatting for chmod
88
88
89 sx = s & 0100
89 sx = s & 0100
90 if x and not sx:
90 if x and not sx:
91 # Turn on +x for every +r bit when making a file executable
91 # Turn on +x for every +r bit when making a file executable
92 # and obey umask.
92 # and obey umask.
93 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
93 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
94 elif not x and sx:
94 elif not x and sx:
95 # Turn off all +x bits
95 # Turn off all +x bits
96 os.chmod(f, s & 0666)
96 os.chmod(f, s & 0666)
97
97
98 def set_binary(fd):
98 def set_binary(fd):
99 pass
99 pass
100
100
101 def pconvert(path):
101 def pconvert(path):
102 return path
102 return path
103
103
104 def localpath(path):
104 def localpath(path):
105 return path
105 return path
106
106
107 def shellquote(s):
107 def shellquote(s):
108 if os.sys.platform == 'OpenVMS':
108 if os.sys.platform == 'OpenVMS':
109 return '"%s"' % s
109 return '"%s"' % s
110 else:
110 else:
111 return "'%s'" % s.replace("'", "'\\''")
111 return "'%s'" % s.replace("'", "'\\''")
112
112
113 def quotecommand(cmd):
113 def quotecommand(cmd):
114 return cmd
114 return cmd
115
115
116 def popen(command, mode='r'):
116 def popen(command, mode='r'):
117 return os.popen(command, mode)
117 return os.popen(command, mode)
118
118
119 def testpid(pid):
119 def testpid(pid):
120 '''return False if pid dead, True if running or not sure'''
120 '''return False if pid dead, True if running or not sure'''
121 if os.sys.platform == 'OpenVMS':
121 if os.sys.platform == 'OpenVMS':
122 return True
122 return True
123 try:
123 try:
124 os.kill(pid, 0)
124 os.kill(pid, 0)
125 return True
125 return True
126 except OSError, inst:
126 except OSError, inst:
127 return inst.errno != errno.ESRCH
127 return inst.errno != errno.ESRCH
128
128
129 def explain_exit(code):
129 def explain_exit(code):
130 """return a 2-tuple (desc, code) describing a process's status"""
130 """return a 2-tuple (desc, code) describing a process's status"""
131 if os.WIFEXITED(code):
131 if os.WIFEXITED(code):
132 val = os.WEXITSTATUS(code)
132 val = os.WEXITSTATUS(code)
133 return _("exited with status %d") % val, val
133 return _("exited with status %d") % val, val
134 elif os.WIFSIGNALED(code):
134 elif os.WIFSIGNALED(code):
135 val = os.WTERMSIG(code)
135 val = os.WTERMSIG(code)
136 return _("killed by signal %d") % val, val
136 return _("killed by signal %d") % val, val
137 elif os.WIFSTOPPED(code):
137 elif os.WIFSTOPPED(code):
138 val = os.WSTOPSIG(code)
138 val = os.WSTOPSIG(code)
139 return _("stopped by signal %d") % val, val
139 return _("stopped by signal %d") % val, val
140 raise ValueError(_("invalid exit code"))
140 raise ValueError(_("invalid exit code"))
141
141
142 def isowner(st):
142 def isowner(st):
143 """Return True if the stat object st is from the current user."""
143 """Return True if the stat object st is from the current user."""
144 return st.st_uid == os.getuid()
144 return st.st_uid == os.getuid()
145
145
146 def find_exe(command):
146 def find_exe(command):
147 '''Find executable for command searching like which does.
147 '''Find executable for command searching like which does.
148 If command is a basename then PATH is searched for command.
148 If command is a basename then PATH is searched for command.
149 PATH isn't searched if command is an absolute or relative path.
149 PATH isn't searched if command is an absolute or relative path.
150 If command isn't found None is returned.'''
150 If command isn't found None is returned.'''
151 if sys.platform == 'OpenVMS':
151 if sys.platform == 'OpenVMS':
152 return command
152 return command
153
153
154 def findexisting(executable):
154 def findexisting(executable):
155 'Will return executable if existing file'
155 'Will return executable if existing file'
156 if os.path.exists(executable):
156 if os.path.exists(executable):
157 return executable
157 return executable
158 return None
158 return None
159
159
160 if os.sep in command:
160 if os.sep in command:
161 return findexisting(command)
161 return findexisting(command)
162
162
163 for path in os.environ.get('PATH', '').split(os.pathsep):
163 for path in os.environ.get('PATH', '').split(os.pathsep):
164 executable = findexisting(os.path.join(path, command))
164 executable = findexisting(os.path.join(path, command))
165 if executable is not None:
165 if executable is not None:
166 return executable
166 return executable
167 return None
167 return None
168
168
169 def set_signal_handler():
169 def set_signal_handler():
170 pass
170 pass
171
171
172 def statfiles(files):
172 def statfiles(files):
173 'Stat each file in files and yield stat or None if file does not exist.'
173 'Stat each file in files and yield stat or None if file does not exist.'
174 lstat = os.lstat
174 lstat = os.lstat
175 for nf in files:
175 for nf in files:
176 try:
176 try:
177 st = lstat(nf)
177 st = lstat(nf)
178 except OSError, err:
178 except OSError, err:
179 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
179 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
180 raise
180 raise
181 st = None
181 st = None
182 yield st
182 yield st
183
183
184 def getuser():
184 def getuser():
185 '''return name of current user'''
185 '''return name of current user'''
186 return getpass.getuser()
186 return getpass.getuser()
187
187
188 def expand_glob(pats):
188 def expand_glob(pats):
189 '''On Windows, expand the implicit globs in a list of patterns'''
189 '''On Windows, expand the implicit globs in a list of patterns'''
190 return list(pats)
190 return list(pats)
191
191
192 def username(uid=None):
192 def username(uid=None):
193 """Return the name of the user with the given uid.
193 """Return the name of the user with the given uid.
194
194
195 If uid is None, return the name of the current user."""
195 If uid is None, return the name of the current user."""
196
196
197 if uid is None:
197 if uid is None:
198 uid = os.getuid()
198 uid = os.getuid()
199 try:
199 try:
200 return pwd.getpwuid(uid)[0]
200 return pwd.getpwuid(uid)[0]
201 except KeyError:
201 except KeyError:
202 return str(uid)
202 return str(uid)
203
203
204 def groupname(gid=None):
204 def groupname(gid=None):
205 """Return the name of the group with the given gid.
205 """Return the name of the group with the given gid.
206
206
207 If gid is None, return the name of the current group."""
207 If gid is None, return the name of the current group."""
208
208
209 if gid is None:
209 if gid is None:
210 gid = os.getgid()
210 gid = os.getgid()
211 try:
211 try:
212 return grp.getgrgid(gid)[0]
212 return grp.getgrgid(gid)[0]
213 except KeyError:
213 except KeyError:
214 return str(gid)
214 return str(gid)
@@ -1,52 +1,52 b''
1 # osutil.py - pure Python version of osutil.c
1 # osutil.py - pure Python version of osutil.c
2 #
2 #
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import os
8 import os
9 import stat as _stat
9 import stat as _stat
10
10
11 posixfile = file
11 posixfile = open
12
12
13 def _mode_to_kind(mode):
13 def _mode_to_kind(mode):
14 if _stat.S_ISREG(mode): return _stat.S_IFREG
14 if _stat.S_ISREG(mode): return _stat.S_IFREG
15 if _stat.S_ISDIR(mode): return _stat.S_IFDIR
15 if _stat.S_ISDIR(mode): return _stat.S_IFDIR
16 if _stat.S_ISLNK(mode): return _stat.S_IFLNK
16 if _stat.S_ISLNK(mode): return _stat.S_IFLNK
17 if _stat.S_ISBLK(mode): return _stat.S_IFBLK
17 if _stat.S_ISBLK(mode): return _stat.S_IFBLK
18 if _stat.S_ISCHR(mode): return _stat.S_IFCHR
18 if _stat.S_ISCHR(mode): return _stat.S_IFCHR
19 if _stat.S_ISFIFO(mode): return _stat.S_IFIFO
19 if _stat.S_ISFIFO(mode): return _stat.S_IFIFO
20 if _stat.S_ISSOCK(mode): return _stat.S_IFSOCK
20 if _stat.S_ISSOCK(mode): return _stat.S_IFSOCK
21 return mode
21 return mode
22
22
23 def listdir(path, stat=False, skip=None):
23 def listdir(path, stat=False, skip=None):
24 '''listdir(path, stat=False) -> list_of_tuples
24 '''listdir(path, stat=False) -> list_of_tuples
25
25
26 Return a sorted list containing information about the entries
26 Return a sorted list containing information about the entries
27 in the directory.
27 in the directory.
28
28
29 If stat is True, each element is a 3-tuple:
29 If stat is True, each element is a 3-tuple:
30
30
31 (name, type, stat object)
31 (name, type, stat object)
32
32
33 Otherwise, each element is a 2-tuple:
33 Otherwise, each element is a 2-tuple:
34
34
35 (name, type)
35 (name, type)
36 '''
36 '''
37 result = []
37 result = []
38 prefix = path
38 prefix = path
39 if not prefix.endswith(os.sep):
39 if not prefix.endswith(os.sep):
40 prefix += os.sep
40 prefix += os.sep
41 names = os.listdir(path)
41 names = os.listdir(path)
42 names.sort()
42 names.sort()
43 for fn in names:
43 for fn in names:
44 st = os.lstat(prefix + fn)
44 st = os.lstat(prefix + fn)
45 if fn == skip and _stat.S_ISDIR(st.st_mode):
45 if fn == skip and _stat.S_ISDIR(st.st_mode):
46 return []
46 return []
47 if stat:
47 if stat:
48 result.append((fn, _mode_to_kind(st.st_mode), st))
48 result.append((fn, _mode_to_kind(st.st_mode), st))
49 else:
49 else:
50 result.append((fn, _mode_to_kind(st.st_mode)))
50 result.append((fn, _mode_to_kind(st.st_mode)))
51 return result
51 return result
52
52
@@ -1,271 +1,271 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # This is the mercurial setup script.
3 # This is the mercurial setup script.
4 #
4 #
5 # 'python setup.py install', or
5 # 'python setup.py install', or
6 # 'python setup.py --help' for more options
6 # 'python setup.py --help' for more options
7
7
8 import sys
8 import sys
9 if not hasattr(sys, 'version_info') or sys.version_info < (2, 4, 0, 'final'):
9 if not hasattr(sys, 'version_info') or sys.version_info < (2, 4, 0, 'final'):
10 raise SystemExit("Mercurial requires Python 2.4 or later.")
10 raise SystemExit("Mercurial requires Python 2.4 or later.")
11
11
12 # Solaris Python packaging brain damage
12 # Solaris Python packaging brain damage
13 try:
13 try:
14 import hashlib
14 import hashlib
15 sha = hashlib.sha1()
15 sha = hashlib.sha1()
16 except:
16 except:
17 try:
17 try:
18 import sha
18 import sha
19 except:
19 except:
20 raise SystemExit(
20 raise SystemExit(
21 "Couldn't import standard hashlib (incomplete Python install).")
21 "Couldn't import standard hashlib (incomplete Python install).")
22
22
23 try:
23 try:
24 import zlib
24 import zlib
25 except:
25 except:
26 raise SystemExit(
26 raise SystemExit(
27 "Couldn't import standard zlib (incomplete Python install).")
27 "Couldn't import standard zlib (incomplete Python install).")
28
28
29 import os, subprocess, time
29 import os, subprocess, time
30 import shutil
30 import shutil
31 import tempfile
31 import tempfile
32 from distutils.core import setup, Extension
32 from distutils.core import setup, Extension
33 from distutils.dist import Distribution
33 from distutils.dist import Distribution
34 from distutils.command.install_data import install_data
34 from distutils.command.install_data import install_data
35 from distutils.command.build import build
35 from distutils.command.build import build
36 from distutils.command.build_py import build_py
36 from distutils.command.build_py import build_py
37 from distutils.spawn import spawn, find_executable
37 from distutils.spawn import spawn, find_executable
38 from distutils.ccompiler import new_compiler
38 from distutils.ccompiler import new_compiler
39
39
40 extra = {}
40 extra = {}
41 scripts = ['hg']
41 scripts = ['hg']
42 if os.name == 'nt':
42 if os.name == 'nt':
43 scripts.append('contrib/win32/hg.bat')
43 scripts.append('contrib/win32/hg.bat')
44
44
45 # simplified version of distutils.ccompiler.CCompiler.has_function
45 # simplified version of distutils.ccompiler.CCompiler.has_function
46 # that actually removes its temporary files.
46 # that actually removes its temporary files.
47 def has_function(cc, funcname):
47 def has_function(cc, funcname):
48 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
48 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
49 devnull = oldstderr = None
49 devnull = oldstderr = None
50 try:
50 try:
51 try:
51 try:
52 fname = os.path.join(tmpdir, 'funcname.c')
52 fname = os.path.join(tmpdir, 'funcname.c')
53 f = open(fname, 'w')
53 f = open(fname, 'w')
54 f.write('int main(void) {\n')
54 f.write('int main(void) {\n')
55 f.write(' %s();\n' % funcname)
55 f.write(' %s();\n' % funcname)
56 f.write('}\n')
56 f.write('}\n')
57 f.close()
57 f.close()
58 # Redirect stderr to /dev/null to hide any error messages
58 # Redirect stderr to /dev/null to hide any error messages
59 # from the compiler.
59 # from the compiler.
60 # This will have to be changed if we ever have to check
60 # This will have to be changed if we ever have to check
61 # for a function on Windows.
61 # for a function on Windows.
62 devnull = open('/dev/null', 'w')
62 devnull = open('/dev/null', 'w')
63 oldstderr = os.dup(sys.stderr.fileno())
63 oldstderr = os.dup(sys.stderr.fileno())
64 os.dup2(devnull.fileno(), sys.stderr.fileno())
64 os.dup2(devnull.fileno(), sys.stderr.fileno())
65 objects = cc.compile([fname])
65 objects = cc.compile([fname])
66 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
66 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
67 except:
67 except:
68 return False
68 return False
69 return True
69 return True
70 finally:
70 finally:
71 if oldstderr is not None:
71 if oldstderr is not None:
72 os.dup2(oldstderr, sys.stderr.fileno())
72 os.dup2(oldstderr, sys.stderr.fileno())
73 if devnull is not None:
73 if devnull is not None:
74 devnull.close()
74 devnull.close()
75 shutil.rmtree(tmpdir)
75 shutil.rmtree(tmpdir)
76
76
77 # py2exe needs to be installed to work
77 # py2exe needs to be installed to work
78 try:
78 try:
79 import py2exe
79 import py2exe
80
80
81 # Help py2exe to find win32com.shell
81 # Help py2exe to find win32com.shell
82 try:
82 try:
83 import modulefinder
83 import modulefinder
84 import win32com
84 import win32com
85 for p in win32com.__path__[1:]: # Take the path to win32comext
85 for p in win32com.__path__[1:]: # Take the path to win32comext
86 modulefinder.AddPackagePath("win32com", p)
86 modulefinder.AddPackagePath("win32com", p)
87 pn = "win32com.shell"
87 pn = "win32com.shell"
88 __import__(pn)
88 __import__(pn)
89 m = sys.modules[pn]
89 m = sys.modules[pn]
90 for p in m.__path__[1:]:
90 for p in m.__path__[1:]:
91 modulefinder.AddPackagePath(pn, p)
91 modulefinder.AddPackagePath(pn, p)
92 except ImportError:
92 except ImportError:
93 pass
93 pass
94
94
95 extra['console'] = ['hg']
95 extra['console'] = ['hg']
96
96
97 except ImportError:
97 except ImportError:
98 pass
98 pass
99
99
100 version = None
100 version = None
101
101
102 if os.path.isdir('.hg'):
102 if os.path.isdir('.hg'):
103 # Execute hg out of this directory with a custom environment which
103 # Execute hg out of this directory with a custom environment which
104 # includes the pure Python modules in mercurial/pure. We also take
104 # includes the pure Python modules in mercurial/pure. We also take
105 # care to not use any hgrc files and do no localization.
105 # care to not use any hgrc files and do no localization.
106 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
106 pypath = ['mercurial', os.path.join('mercurial', 'pure')]
107 env = {'PYTHONPATH': os.pathsep.join(pypath),
107 env = {'PYTHONPATH': os.pathsep.join(pypath),
108 'HGRCPATH': '',
108 'HGRCPATH': '',
109 'LANGUAGE': 'C'}
109 'LANGUAGE': 'C'}
110 if 'SystemRoot' in os.environ:
110 if 'SystemRoot' in os.environ:
111 # Copy SystemRoot into the custom environment for Python 2.6
111 # Copy SystemRoot into the custom environment for Python 2.6
112 # under Windows. Otherwise, the subprocess will fail with
112 # under Windows. Otherwise, the subprocess will fail with
113 # error 0xc0150004. See: http://bugs.python.org/issue3440
113 # error 0xc0150004. See: http://bugs.python.org/issue3440
114 env['SystemRoot'] = os.environ['SystemRoot']
114 env['SystemRoot'] = os.environ['SystemRoot']
115 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
115 cmd = [sys.executable, 'hg', 'id', '-i', '-t']
116
116
117 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
117 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
118 stderr=subprocess.PIPE, env=env)
118 stderr=subprocess.PIPE, env=env)
119 out, err = p.communicate()
119 out, err = p.communicate()
120
120
121 # If root is executing setup.py, but the repository is owned by
121 # If root is executing setup.py, but the repository is owned by
122 # another user (as in "sudo python setup.py install") we will get
122 # another user (as in "sudo python setup.py install") we will get
123 # trust warnings since the .hg/hgrc file is untrusted. That is
123 # trust warnings since the .hg/hgrc file is untrusted. That is
124 # fine, we don't want to load it anyway.
124 # fine, we don't want to load it anyway.
125 err = [e for e in err.splitlines()
125 err = [e for e in err.splitlines()
126 if not e.startswith('Not trusting file')]
126 if not e.startswith('Not trusting file')]
127 if err:
127 if err:
128 sys.stderr.write('warning: could not establish Mercurial '
128 sys.stderr.write('warning: could not establish Mercurial '
129 'version:\n%s\n' % '\n'.join(err))
129 'version:\n%s\n' % '\n'.join(err))
130 else:
130 else:
131 l = out.split()
131 l = out.split()
132 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
132 while len(l) > 1 and l[-1][0].isalpha(): # remove non-numbered tags
133 l.pop()
133 l.pop()
134 if l:
134 if l:
135 version = l[-1] # latest tag or revision number
135 version = l[-1] # latest tag or revision number
136 if version.endswith('+'):
136 if version.endswith('+'):
137 version += time.strftime('%Y%m%d')
137 version += time.strftime('%Y%m%d')
138 elif os.path.exists('.hg_archival.txt'):
138 elif os.path.exists('.hg_archival.txt'):
139 hgarchival = open('.hg_archival.txt')
139 hgarchival = open('.hg_archival.txt')
140 for line in hgarchival:
140 for line in hgarchival:
141 if line.startswith('node:'):
141 if line.startswith('node:'):
142 version = line.split(':')[1].strip()[:12]
142 version = line.split(':')[1].strip()[:12]
143 break
143 break
144
144
145 if version:
145 if version:
146 f = file("mercurial/__version__.py", "w")
146 f = open("mercurial/__version__.py", "w")
147 f.write('# this file is autogenerated by setup.py\n')
147 f.write('# this file is autogenerated by setup.py\n')
148 f.write('version = "%s"\n' % version)
148 f.write('version = "%s"\n' % version)
149 f.close()
149 f.close()
150
150
151
151
152 try:
152 try:
153 from mercurial import __version__
153 from mercurial import __version__
154 version = __version__.version
154 version = __version__.version
155 except ImportError:
155 except ImportError:
156 version = 'unknown'
156 version = 'unknown'
157
157
158 class install_package_data(install_data):
158 class install_package_data(install_data):
159 def finalize_options(self):
159 def finalize_options(self):
160 self.set_undefined_options('install',
160 self.set_undefined_options('install',
161 ('install_lib', 'install_dir'))
161 ('install_lib', 'install_dir'))
162 install_data.finalize_options(self)
162 install_data.finalize_options(self)
163
163
164 class build_mo(build):
164 class build_mo(build):
165
165
166 description = "build translations (.mo files)"
166 description = "build translations (.mo files)"
167
167
168 def run(self):
168 def run(self):
169 if not find_executable('msgfmt'):
169 if not find_executable('msgfmt'):
170 self.warn("could not find msgfmt executable, no translations "
170 self.warn("could not find msgfmt executable, no translations "
171 "will be built")
171 "will be built")
172 return
172 return
173
173
174 podir = 'i18n'
174 podir = 'i18n'
175 if not os.path.isdir(podir):
175 if not os.path.isdir(podir):
176 self.warn("could not find %s/ directory" % podir)
176 self.warn("could not find %s/ directory" % podir)
177 return
177 return
178
178
179 join = os.path.join
179 join = os.path.join
180 for po in os.listdir(podir):
180 for po in os.listdir(podir):
181 if not po.endswith('.po'):
181 if not po.endswith('.po'):
182 continue
182 continue
183 pofile = join(podir, po)
183 pofile = join(podir, po)
184 modir = join('locale', po[:-3], 'LC_MESSAGES')
184 modir = join('locale', po[:-3], 'LC_MESSAGES')
185 mofile = join(modir, 'hg.mo')
185 mofile = join(modir, 'hg.mo')
186 cmd = ['msgfmt', '-v', '-o', mofile, pofile]
186 cmd = ['msgfmt', '-v', '-o', mofile, pofile]
187 if sys.platform != 'sunos5':
187 if sys.platform != 'sunos5':
188 # msgfmt on Solaris does not know about -c
188 # msgfmt on Solaris does not know about -c
189 cmd.append('-c')
189 cmd.append('-c')
190 self.mkpath(modir)
190 self.mkpath(modir)
191 self.make_file([pofile], mofile, spawn, (cmd,))
191 self.make_file([pofile], mofile, spawn, (cmd,))
192 self.distribution.data_files.append((join('mercurial', modir),
192 self.distribution.data_files.append((join('mercurial', modir),
193 [mofile]))
193 [mofile]))
194
194
195 build.sub_commands.append(('build_mo', None))
195 build.sub_commands.append(('build_mo', None))
196
196
197 Distribution.pure = 0
197 Distribution.pure = 0
198 Distribution.global_options.append(('pure', None, "use pure (slow) Python "
198 Distribution.global_options.append(('pure', None, "use pure (slow) Python "
199 "code instead of C extensions"))
199 "code instead of C extensions"))
200
200
201 class hg_build_py(build_py):
201 class hg_build_py(build_py):
202
202
203 def finalize_options(self):
203 def finalize_options(self):
204 build_py.finalize_options(self)
204 build_py.finalize_options(self)
205
205
206 if self.distribution.pure:
206 if self.distribution.pure:
207 if self.py_modules is None:
207 if self.py_modules is None:
208 self.py_modules = []
208 self.py_modules = []
209 for ext in self.distribution.ext_modules:
209 for ext in self.distribution.ext_modules:
210 if ext.name.startswith("mercurial."):
210 if ext.name.startswith("mercurial."):
211 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
211 self.py_modules.append("mercurial.pure.%s" % ext.name[10:])
212 self.distribution.ext_modules = []
212 self.distribution.ext_modules = []
213
213
214 def find_modules(self):
214 def find_modules(self):
215 modules = build_py.find_modules(self)
215 modules = build_py.find_modules(self)
216 for module in modules:
216 for module in modules:
217 if module[0] == "mercurial.pure":
217 if module[0] == "mercurial.pure":
218 if module[1] != "__init__":
218 if module[1] != "__init__":
219 yield ("mercurial", module[1], module[2])
219 yield ("mercurial", module[1], module[2])
220 else:
220 else:
221 yield module
221 yield module
222
222
223 cmdclass = {'install_data': install_package_data,
223 cmdclass = {'install_data': install_package_data,
224 'build_mo': build_mo,
224 'build_mo': build_mo,
225 'build_py': hg_build_py}
225 'build_py': hg_build_py}
226
226
227 ext_modules=[
227 ext_modules=[
228 Extension('mercurial.base85', ['mercurial/base85.c']),
228 Extension('mercurial.base85', ['mercurial/base85.c']),
229 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
229 Extension('mercurial.bdiff', ['mercurial/bdiff.c']),
230 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
230 Extension('mercurial.diffhelpers', ['mercurial/diffhelpers.c']),
231 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
231 Extension('mercurial.mpatch', ['mercurial/mpatch.c']),
232 Extension('mercurial.parsers', ['mercurial/parsers.c']),
232 Extension('mercurial.parsers', ['mercurial/parsers.c']),
233 Extension('mercurial.osutil', ['mercurial/osutil.c']),
233 Extension('mercurial.osutil', ['mercurial/osutil.c']),
234 ]
234 ]
235
235
236 packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
236 packages = ['mercurial', 'mercurial.hgweb', 'hgext', 'hgext.convert',
237 'hgext.highlight', 'hgext.zeroconf', ]
237 'hgext.highlight', 'hgext.zeroconf', ]
238
238
239 if sys.platform == 'linux2' and os.uname()[2] > '2.6':
239 if sys.platform == 'linux2' and os.uname()[2] > '2.6':
240 # The inotify extension is only usable with Linux 2.6 kernels.
240 # The inotify extension is only usable with Linux 2.6 kernels.
241 # You also need a reasonably recent C library.
241 # You also need a reasonably recent C library.
242 cc = new_compiler()
242 cc = new_compiler()
243 if has_function(cc, 'inotify_add_watch'):
243 if has_function(cc, 'inotify_add_watch'):
244 ext_modules.append(Extension('hgext.inotify.linux._inotify',
244 ext_modules.append(Extension('hgext.inotify.linux._inotify',
245 ['hgext/inotify/linux/_inotify.c']))
245 ['hgext/inotify/linux/_inotify.c']))
246 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
246 packages.extend(['hgext.inotify', 'hgext.inotify.linux'])
247
247
248 datafiles = []
248 datafiles = []
249 for root in ('templates', 'i18n'):
249 for root in ('templates', 'i18n'):
250 for dir, dirs, files in os.walk(root):
250 for dir, dirs, files in os.walk(root):
251 datafiles.append((os.path.join('mercurial', dir),
251 datafiles.append((os.path.join('mercurial', dir),
252 [os.path.join(dir, file_) for file_ in files]))
252 [os.path.join(dir, file_) for file_ in files]))
253
253
254 setup(name='mercurial',
254 setup(name='mercurial',
255 version=version,
255 version=version,
256 author='Matt Mackall',
256 author='Matt Mackall',
257 author_email='mpm@selenic.com',
257 author_email='mpm@selenic.com',
258 url='http://mercurial.selenic.com/',
258 url='http://mercurial.selenic.com/',
259 description='Scalable distributed SCM',
259 description='Scalable distributed SCM',
260 license='GNU GPL',
260 license='GNU GPL',
261 scripts=scripts,
261 scripts=scripts,
262 packages=packages,
262 packages=packages,
263 ext_modules=ext_modules,
263 ext_modules=ext_modules,
264 data_files=datafiles,
264 data_files=datafiles,
265 cmdclass=cmdclass,
265 cmdclass=cmdclass,
266 options=dict(py2exe=dict(packages=['hgext', 'email']),
266 options=dict(py2exe=dict(packages=['hgext', 'email']),
267 bdist_mpkg=dict(zipdist=True,
267 bdist_mpkg=dict(zipdist=True,
268 license='COPYING',
268 license='COPYING',
269 readme='contrib/macosx/Readme.html',
269 readme='contrib/macosx/Readme.html',
270 welcome='contrib/macosx/Welcome.html')),
270 welcome='contrib/macosx/Welcome.html')),
271 **extra)
271 **extra)
@@ -1,25 +1,25 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2
3 import os, sys, time, errno, signal
3 import os, sys, time, errno, signal
4
4
5 # Kill off any leftover daemon processes
5 # Kill off any leftover daemon processes
6 try:
6 try:
7 fp = file(os.environ['DAEMON_PIDS'])
7 fp = open(os.environ['DAEMON_PIDS'])
8 for line in fp:
8 for line in fp:
9 try:
9 try:
10 pid = int(line)
10 pid = int(line)
11 except ValueError:
11 except ValueError:
12 continue
12 continue
13 try:
13 try:
14 os.kill(pid, 0)
14 os.kill(pid, 0)
15 os.kill(pid, signal.SIGTERM)
15 os.kill(pid, signal.SIGTERM)
16 for i in range(10):
16 for i in range(10):
17 time.sleep(0.05)
17 time.sleep(0.05)
18 os.kill(pid, 0)
18 os.kill(pid, 0)
19 os.kill(pid, signal.SIGKILL)
19 os.kill(pid, signal.SIGKILL)
20 except OSError, err:
20 except OSError, err:
21 if err.errno != errno.ESRCH:
21 if err.errno != errno.ESRCH:
22 raise
22 raise
23 fp.close()
23 fp.close()
24 except IOError:
24 except IOError:
25 pass
25 pass
@@ -1,824 +1,824 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install
34 # 8) parallel, coverage, local install
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 #
36 #
37 # (You could use any subset of the tests: test-s* happens to match
37 # (You could use any subset of the tests: test-s* happens to match
38 # enough that it's worth doing parallel runs, few enough that it
38 # enough that it's worth doing parallel runs, few enough that it
39 # completes fairly quickly, includes both shell and Python scripts, and
39 # completes fairly quickly, includes both shell and Python scripts, and
40 # includes some scripts that run daemon processes.)
40 # includes some scripts that run daemon processes.)
41
41
42 import difflib
42 import difflib
43 import errno
43 import errno
44 import optparse
44 import optparse
45 import os
45 import os
46 import subprocess
46 import subprocess
47 import shutil
47 import shutil
48 import signal
48 import signal
49 import sys
49 import sys
50 import tempfile
50 import tempfile
51 import time
51 import time
52
52
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54 def Popen4(cmd, bufsize=-1):
54 def Popen4(cmd, bufsize=-1):
55 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
55 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
56 close_fds=closefds,
56 close_fds=closefds,
57 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
57 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
58 stderr=subprocess.STDOUT)
58 stderr=subprocess.STDOUT)
59 p.fromchild = p.stdout
59 p.fromchild = p.stdout
60 p.tochild = p.stdin
60 p.tochild = p.stdin
61 p.childerr = p.stderr
61 p.childerr = p.stderr
62 return p
62 return p
63
63
64 # reserved exit code to skip test (used by hghave)
64 # reserved exit code to skip test (used by hghave)
65 SKIPPED_STATUS = 80
65 SKIPPED_STATUS = 80
66 SKIPPED_PREFIX = 'skipped: '
66 SKIPPED_PREFIX = 'skipped: '
67 FAILED_PREFIX = 'hghave check failed: '
67 FAILED_PREFIX = 'hghave check failed: '
68 PYTHON = sys.executable
68 PYTHON = sys.executable
69
69
70 requiredtools = ["python", "diff", "grep", "unzip", "gunzip", "bunzip2", "sed"]
70 requiredtools = ["python", "diff", "grep", "unzip", "gunzip", "bunzip2", "sed"]
71
71
72 defaults = {
72 defaults = {
73 'jobs': ('HGTEST_JOBS', 1),
73 'jobs': ('HGTEST_JOBS', 1),
74 'timeout': ('HGTEST_TIMEOUT', 180),
74 'timeout': ('HGTEST_TIMEOUT', 180),
75 'port': ('HGTEST_PORT', 20059),
75 'port': ('HGTEST_PORT', 20059),
76 }
76 }
77
77
78 def parseargs():
78 def parseargs():
79 parser = optparse.OptionParser("%prog [options] [tests]")
79 parser = optparse.OptionParser("%prog [options] [tests]")
80 parser.add_option("-C", "--annotate", action="store_true",
80 parser.add_option("-C", "--annotate", action="store_true",
81 help="output files annotated with coverage")
81 help="output files annotated with coverage")
82 parser.add_option("--child", type="int",
82 parser.add_option("--child", type="int",
83 help="run as child process, summary to given fd")
83 help="run as child process, summary to given fd")
84 parser.add_option("-c", "--cover", action="store_true",
84 parser.add_option("-c", "--cover", action="store_true",
85 help="print a test coverage report")
85 help="print a test coverage report")
86 parser.add_option("-f", "--first", action="store_true",
86 parser.add_option("-f", "--first", action="store_true",
87 help="exit on the first test failure")
87 help="exit on the first test failure")
88 parser.add_option("-i", "--interactive", action="store_true",
88 parser.add_option("-i", "--interactive", action="store_true",
89 help="prompt to accept changed output")
89 help="prompt to accept changed output")
90 parser.add_option("-j", "--jobs", type="int",
90 parser.add_option("-j", "--jobs", type="int",
91 help="number of jobs to run in parallel"
91 help="number of jobs to run in parallel"
92 " (default: $%s or %d)" % defaults['jobs'])
92 " (default: $%s or %d)" % defaults['jobs'])
93 parser.add_option("--keep-tmpdir", action="store_true",
93 parser.add_option("--keep-tmpdir", action="store_true",
94 help="keep temporary directory after running tests"
94 help="keep temporary directory after running tests"
95 " (best used with --tmpdir)")
95 " (best used with --tmpdir)")
96 parser.add_option("-R", "--restart", action="store_true",
96 parser.add_option("-R", "--restart", action="store_true",
97 help="restart at last error")
97 help="restart at last error")
98 parser.add_option("-p", "--port", type="int",
98 parser.add_option("-p", "--port", type="int",
99 help="port on which servers should listen"
99 help="port on which servers should listen"
100 " (default: $%s or %d)" % defaults['port'])
100 " (default: $%s or %d)" % defaults['port'])
101 parser.add_option("-r", "--retest", action="store_true",
101 parser.add_option("-r", "--retest", action="store_true",
102 help="retest failed tests")
102 help="retest failed tests")
103 parser.add_option("-s", "--cover_stdlib", action="store_true",
103 parser.add_option("-s", "--cover_stdlib", action="store_true",
104 help="print a test coverage report inc. standard libraries")
104 help="print a test coverage report inc. standard libraries")
105 parser.add_option("-t", "--timeout", type="int",
105 parser.add_option("-t", "--timeout", type="int",
106 help="kill errant tests after TIMEOUT seconds"
106 help="kill errant tests after TIMEOUT seconds"
107 " (default: $%s or %d)" % defaults['timeout'])
107 " (default: $%s or %d)" % defaults['timeout'])
108 parser.add_option("--tmpdir", type="string",
108 parser.add_option("--tmpdir", type="string",
109 help="run tests in the given temporary directory")
109 help="run tests in the given temporary directory")
110 parser.add_option("-v", "--verbose", action="store_true",
110 parser.add_option("-v", "--verbose", action="store_true",
111 help="output verbose messages")
111 help="output verbose messages")
112 parser.add_option("-n", "--nodiff", action="store_true",
112 parser.add_option("-n", "--nodiff", action="store_true",
113 help="skip showing test changes")
113 help="skip showing test changes")
114 parser.add_option("--with-hg", type="string",
114 parser.add_option("--with-hg", type="string",
115 metavar="HG",
115 metavar="HG",
116 help="test using specified hg script rather than a "
116 help="test using specified hg script rather than a "
117 "temporary installation")
117 "temporary installation")
118 parser.add_option("--local", action="store_true",
118 parser.add_option("--local", action="store_true",
119 help="shortcut for --with-hg=<testdir>/../hg")
119 help="shortcut for --with-hg=<testdir>/../hg")
120 parser.add_option("--pure", action="store_true",
120 parser.add_option("--pure", action="store_true",
121 help="use pure Python code instead of C extensions")
121 help="use pure Python code instead of C extensions")
122 parser.add_option("-3", "--py3k-warnings", action="store_true",
122 parser.add_option("-3", "--py3k-warnings", action="store_true",
123 help="enable Py3k warnings on Python 2.6+")
123 help="enable Py3k warnings on Python 2.6+")
124
124
125 for option, default in defaults.items():
125 for option, default in defaults.items():
126 defaults[option] = int(os.environ.get(*default))
126 defaults[option] = int(os.environ.get(*default))
127 parser.set_defaults(**defaults)
127 parser.set_defaults(**defaults)
128 (options, args) = parser.parse_args()
128 (options, args) = parser.parse_args()
129
129
130 if options.with_hg:
130 if options.with_hg:
131 if not (os.path.isfile(options.with_hg) and
131 if not (os.path.isfile(options.with_hg) and
132 os.access(options.with_hg, os.X_OK)):
132 os.access(options.with_hg, os.X_OK)):
133 parser.error('--with-hg must specify an executable hg script')
133 parser.error('--with-hg must specify an executable hg script')
134 if not os.path.basename(options.with_hg) == 'hg':
134 if not os.path.basename(options.with_hg) == 'hg':
135 sys.stderr.write('warning: --with-hg should specify an hg script')
135 sys.stderr.write('warning: --with-hg should specify an hg script')
136 if options.local:
136 if options.local:
137 testdir = os.path.dirname(os.path.realpath(sys.argv[0]))
137 testdir = os.path.dirname(os.path.realpath(sys.argv[0]))
138 hgbin = os.path.join(os.path.dirname(testdir), 'hg')
138 hgbin = os.path.join(os.path.dirname(testdir), 'hg')
139 if not os.access(hgbin, os.X_OK):
139 if not os.access(hgbin, os.X_OK):
140 parser.error('--local specified, but %r not found or not executable'
140 parser.error('--local specified, but %r not found or not executable'
141 % hgbin)
141 % hgbin)
142 options.with_hg = hgbin
142 options.with_hg = hgbin
143
143
144 options.anycoverage = (options.cover or
144 options.anycoverage = (options.cover or
145 options.cover_stdlib or
145 options.cover_stdlib or
146 options.annotate)
146 options.annotate)
147
147
148 if options.anycoverage and options.with_hg:
148 if options.anycoverage and options.with_hg:
149 # I'm not sure if this is a fundamental limitation or just a
149 # I'm not sure if this is a fundamental limitation or just a
150 # bug. But I don't want to waste people's time and energy doing
150 # bug. But I don't want to waste people's time and energy doing
151 # test runs that don't give the results they want.
151 # test runs that don't give the results they want.
152 parser.error("sorry, coverage options do not work when --with-hg "
152 parser.error("sorry, coverage options do not work when --with-hg "
153 "or --local specified")
153 "or --local specified")
154
154
155 global vlog
155 global vlog
156 if options.verbose:
156 if options.verbose:
157 if options.jobs > 1 or options.child is not None:
157 if options.jobs > 1 or options.child is not None:
158 pid = "[%d]" % os.getpid()
158 pid = "[%d]" % os.getpid()
159 else:
159 else:
160 pid = None
160 pid = None
161 def vlog(*msg):
161 def vlog(*msg):
162 if pid:
162 if pid:
163 print pid,
163 print pid,
164 for m in msg:
164 for m in msg:
165 print m,
165 print m,
166 print
166 print
167 else:
167 else:
168 vlog = lambda *msg: None
168 vlog = lambda *msg: None
169
169
170 if options.jobs < 1:
170 if options.jobs < 1:
171 print >> sys.stderr, 'ERROR: -j/--jobs must be positive'
171 print >> sys.stderr, 'ERROR: -j/--jobs must be positive'
172 sys.exit(1)
172 sys.exit(1)
173 if options.interactive and options.jobs > 1:
173 if options.interactive and options.jobs > 1:
174 print '(--interactive overrides --jobs)'
174 print '(--interactive overrides --jobs)'
175 options.jobs = 1
175 options.jobs = 1
176 if options.py3k_warnings:
176 if options.py3k_warnings:
177 if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
177 if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0):
178 print 'ERROR: Py3k warnings switch can only be used on Python 2.6+'
178 print 'ERROR: Py3k warnings switch can only be used on Python 2.6+'
179 sys.exit(1)
179 sys.exit(1)
180
180
181 return (options, args)
181 return (options, args)
182
182
183 def rename(src, dst):
183 def rename(src, dst):
184 """Like os.rename(), trade atomicity and opened files friendliness
184 """Like os.rename(), trade atomicity and opened files friendliness
185 for existing destination support.
185 for existing destination support.
186 """
186 """
187 shutil.copy(src, dst)
187 shutil.copy(src, dst)
188 os.remove(src)
188 os.remove(src)
189
189
190 def splitnewlines(text):
190 def splitnewlines(text):
191 '''like str.splitlines, but only split on newlines.
191 '''like str.splitlines, but only split on newlines.
192 keep line endings.'''
192 keep line endings.'''
193 i = 0
193 i = 0
194 lines = []
194 lines = []
195 while True:
195 while True:
196 n = text.find('\n', i)
196 n = text.find('\n', i)
197 if n == -1:
197 if n == -1:
198 last = text[i:]
198 last = text[i:]
199 if last:
199 if last:
200 lines.append(last)
200 lines.append(last)
201 return lines
201 return lines
202 lines.append(text[i:n+1])
202 lines.append(text[i:n+1])
203 i = n + 1
203 i = n + 1
204
204
205 def parsehghaveoutput(lines):
205 def parsehghaveoutput(lines):
206 '''Parse hghave log lines.
206 '''Parse hghave log lines.
207 Return tuple of lists (missing, failed):
207 Return tuple of lists (missing, failed):
208 * the missing/unknown features
208 * the missing/unknown features
209 * the features for which existence check failed'''
209 * the features for which existence check failed'''
210 missing = []
210 missing = []
211 failed = []
211 failed = []
212 for line in lines:
212 for line in lines:
213 if line.startswith(SKIPPED_PREFIX):
213 if line.startswith(SKIPPED_PREFIX):
214 line = line.splitlines()[0]
214 line = line.splitlines()[0]
215 missing.append(line[len(SKIPPED_PREFIX):])
215 missing.append(line[len(SKIPPED_PREFIX):])
216 elif line.startswith(FAILED_PREFIX):
216 elif line.startswith(FAILED_PREFIX):
217 line = line.splitlines()[0]
217 line = line.splitlines()[0]
218 failed.append(line[len(FAILED_PREFIX):])
218 failed.append(line[len(FAILED_PREFIX):])
219
219
220 return missing, failed
220 return missing, failed
221
221
222 def showdiff(expected, output):
222 def showdiff(expected, output):
223 for line in difflib.unified_diff(expected, output,
223 for line in difflib.unified_diff(expected, output,
224 "Expected output", "Test output"):
224 "Expected output", "Test output"):
225 sys.stdout.write(line)
225 sys.stdout.write(line)
226
226
227 def findprogram(program):
227 def findprogram(program):
228 """Search PATH for a executable program"""
228 """Search PATH for a executable program"""
229 for p in os.environ.get('PATH', os.defpath).split(os.pathsep):
229 for p in os.environ.get('PATH', os.defpath).split(os.pathsep):
230 name = os.path.join(p, program)
230 name = os.path.join(p, program)
231 if os.access(name, os.X_OK):
231 if os.access(name, os.X_OK):
232 return name
232 return name
233 return None
233 return None
234
234
235 def checktools():
235 def checktools():
236 # Before we go any further, check for pre-requisite tools
236 # Before we go any further, check for pre-requisite tools
237 # stuff from coreutils (cat, rm, etc) are not tested
237 # stuff from coreutils (cat, rm, etc) are not tested
238 for p in requiredtools:
238 for p in requiredtools:
239 if os.name == 'nt':
239 if os.name == 'nt':
240 p += '.exe'
240 p += '.exe'
241 found = findprogram(p)
241 found = findprogram(p)
242 if found:
242 if found:
243 vlog("# Found prerequisite", p, "at", found)
243 vlog("# Found prerequisite", p, "at", found)
244 else:
244 else:
245 print "WARNING: Did not find prerequisite tool: "+p
245 print "WARNING: Did not find prerequisite tool: "+p
246
246
247 def cleanup(options):
247 def cleanup(options):
248 if not options.keep_tmpdir:
248 if not options.keep_tmpdir:
249 vlog("# Cleaning up HGTMP", HGTMP)
249 vlog("# Cleaning up HGTMP", HGTMP)
250 shutil.rmtree(HGTMP, True)
250 shutil.rmtree(HGTMP, True)
251
251
252 def usecorrectpython():
252 def usecorrectpython():
253 # some tests run python interpreter. they must use same
253 # some tests run python interpreter. they must use same
254 # interpreter we use or bad things will happen.
254 # interpreter we use or bad things will happen.
255 exedir, exename = os.path.split(sys.executable)
255 exedir, exename = os.path.split(sys.executable)
256 if exename == 'python':
256 if exename == 'python':
257 path = findprogram('python')
257 path = findprogram('python')
258 if os.path.dirname(path) == exedir:
258 if os.path.dirname(path) == exedir:
259 return
259 return
260 vlog('# Making python executable in test path use correct Python')
260 vlog('# Making python executable in test path use correct Python')
261 mypython = os.path.join(BINDIR, 'python')
261 mypython = os.path.join(BINDIR, 'python')
262 try:
262 try:
263 os.symlink(sys.executable, mypython)
263 os.symlink(sys.executable, mypython)
264 except AttributeError:
264 except AttributeError:
265 # windows fallback
265 # windows fallback
266 shutil.copyfile(sys.executable, mypython)
266 shutil.copyfile(sys.executable, mypython)
267 shutil.copymode(sys.executable, mypython)
267 shutil.copymode(sys.executable, mypython)
268
268
269 def installhg(options):
269 def installhg(options):
270 vlog("# Performing temporary installation of HG")
270 vlog("# Performing temporary installation of HG")
271 installerrs = os.path.join("tests", "install.err")
271 installerrs = os.path.join("tests", "install.err")
272 pure = options.pure and "--pure" or ""
272 pure = options.pure and "--pure" or ""
273
273
274 # Run installer in hg root
274 # Run installer in hg root
275 script = os.path.realpath(sys.argv[0])
275 script = os.path.realpath(sys.argv[0])
276 hgroot = os.path.dirname(os.path.dirname(script))
276 hgroot = os.path.dirname(os.path.dirname(script))
277 os.chdir(hgroot)
277 os.chdir(hgroot)
278 cmd = ('%s setup.py %s clean --all'
278 cmd = ('%s setup.py %s clean --all'
279 ' install --force --prefix="%s" --install-lib="%s"'
279 ' install --force --prefix="%s" --install-lib="%s"'
280 ' --install-scripts="%s" >%s 2>&1'
280 ' --install-scripts="%s" >%s 2>&1'
281 % (sys.executable, pure, INST, PYTHONDIR, BINDIR, installerrs))
281 % (sys.executable, pure, INST, PYTHONDIR, BINDIR, installerrs))
282 vlog("# Running", cmd)
282 vlog("# Running", cmd)
283 if os.system(cmd) == 0:
283 if os.system(cmd) == 0:
284 if not options.verbose:
284 if not options.verbose:
285 os.remove(installerrs)
285 os.remove(installerrs)
286 else:
286 else:
287 f = open(installerrs)
287 f = open(installerrs)
288 for line in f:
288 for line in f:
289 print line,
289 print line,
290 f.close()
290 f.close()
291 sys.exit(1)
291 sys.exit(1)
292 os.chdir(TESTDIR)
292 os.chdir(TESTDIR)
293
293
294 usecorrectpython()
294 usecorrectpython()
295
295
296 vlog("# Installing dummy diffstat")
296 vlog("# Installing dummy diffstat")
297 f = open(os.path.join(BINDIR, 'diffstat'), 'w')
297 f = open(os.path.join(BINDIR, 'diffstat'), 'w')
298 f.write('#!' + sys.executable + '\n'
298 f.write('#!' + sys.executable + '\n'
299 'import sys\n'
299 'import sys\n'
300 'files = 0\n'
300 'files = 0\n'
301 'for line in sys.stdin:\n'
301 'for line in sys.stdin:\n'
302 ' if line.startswith("diff "):\n'
302 ' if line.startswith("diff "):\n'
303 ' files += 1\n'
303 ' files += 1\n'
304 'sys.stdout.write("files patched: %d\\n" % files)\n')
304 'sys.stdout.write("files patched: %d\\n" % files)\n')
305 f.close()
305 f.close()
306 os.chmod(os.path.join(BINDIR, 'diffstat'), 0700)
306 os.chmod(os.path.join(BINDIR, 'diffstat'), 0700)
307
307
308 if options.py3k_warnings and not options.anycoverage:
308 if options.py3k_warnings and not options.anycoverage:
309 vlog("# Updating hg command to enable Py3k Warnings switch")
309 vlog("# Updating hg command to enable Py3k Warnings switch")
310 f = open(os.path.join(BINDIR, 'hg'), 'r')
310 f = open(os.path.join(BINDIR, 'hg'), 'r')
311 lines = [line.rstrip() for line in f]
311 lines = [line.rstrip() for line in f]
312 lines[0] += ' -3'
312 lines[0] += ' -3'
313 f.close()
313 f.close()
314 f = open(os.path.join(BINDIR, 'hg'), 'w')
314 f = open(os.path.join(BINDIR, 'hg'), 'w')
315 for line in lines:
315 for line in lines:
316 f.write(line + '\n')
316 f.write(line + '\n')
317 f.close()
317 f.close()
318
318
319 if options.anycoverage:
319 if options.anycoverage:
320 vlog("# Installing coverage wrapper")
320 vlog("# Installing coverage wrapper")
321 os.environ['COVERAGE_FILE'] = COVERAGE_FILE
321 os.environ['COVERAGE_FILE'] = COVERAGE_FILE
322 if os.path.exists(COVERAGE_FILE):
322 if os.path.exists(COVERAGE_FILE):
323 os.unlink(COVERAGE_FILE)
323 os.unlink(COVERAGE_FILE)
324 # Create a wrapper script to invoke hg via coverage.py
324 # Create a wrapper script to invoke hg via coverage.py
325 os.rename(os.path.join(BINDIR, "hg"), os.path.join(BINDIR, "_hg.py"))
325 os.rename(os.path.join(BINDIR, "hg"), os.path.join(BINDIR, "_hg.py"))
326 f = open(os.path.join(BINDIR, 'hg'), 'w')
326 f = open(os.path.join(BINDIR, 'hg'), 'w')
327 f.write('#!' + sys.executable + '\n')
327 f.write('#!' + sys.executable + '\n')
328 f.write('import sys, os; os.execv(sys.executable, [sys.executable, '
328 f.write('import sys, os; os.execv(sys.executable, [sys.executable, '
329 '"%s", "-x", "-p", "%s"] + sys.argv[1:])\n' %
329 '"%s", "-x", "-p", "%s"] + sys.argv[1:])\n' %
330 (os.path.join(TESTDIR, 'coverage.py'),
330 (os.path.join(TESTDIR, 'coverage.py'),
331 os.path.join(BINDIR, '_hg.py')))
331 os.path.join(BINDIR, '_hg.py')))
332 f.close()
332 f.close()
333 os.chmod(os.path.join(BINDIR, 'hg'), 0700)
333 os.chmod(os.path.join(BINDIR, 'hg'), 0700)
334
334
335 def outputcoverage(options):
335 def outputcoverage(options):
336
336
337 vlog('# Producing coverage report')
337 vlog('# Producing coverage report')
338 os.chdir(PYTHONDIR)
338 os.chdir(PYTHONDIR)
339
339
340 def covrun(*args):
340 def covrun(*args):
341 start = sys.executable, os.path.join(TESTDIR, 'coverage.py')
341 start = sys.executable, os.path.join(TESTDIR, 'coverage.py')
342 cmd = '"%s" "%s" %s' % (start[0], start[1], ' '.join(args))
342 cmd = '"%s" "%s" %s' % (start[0], start[1], ' '.join(args))
343 vlog('# Running: %s' % cmd)
343 vlog('# Running: %s' % cmd)
344 os.system(cmd)
344 os.system(cmd)
345
345
346 omit = [BINDIR, TESTDIR, PYTHONDIR]
346 omit = [BINDIR, TESTDIR, PYTHONDIR]
347 if not options.cover_stdlib:
347 if not options.cover_stdlib:
348 # Exclude as system paths (ignoring empty strings seen on win)
348 # Exclude as system paths (ignoring empty strings seen on win)
349 omit += [x for x in sys.path if x != '']
349 omit += [x for x in sys.path if x != '']
350 omit = ','.join(omit)
350 omit = ','.join(omit)
351
351
352 covrun('-c') # combine from parallel processes
352 covrun('-c') # combine from parallel processes
353 for fn in os.listdir(TESTDIR):
353 for fn in os.listdir(TESTDIR):
354 if fn.startswith('.coverage.'):
354 if fn.startswith('.coverage.'):
355 os.unlink(os.path.join(TESTDIR, fn))
355 os.unlink(os.path.join(TESTDIR, fn))
356
356
357 covrun('-i', '-r', '"--omit=%s"' % omit) # report
357 covrun('-i', '-r', '"--omit=%s"' % omit) # report
358 if options.annotate:
358 if options.annotate:
359 adir = os.path.join(TESTDIR, 'annotated')
359 adir = os.path.join(TESTDIR, 'annotated')
360 if not os.path.isdir(adir):
360 if not os.path.isdir(adir):
361 os.mkdir(adir)
361 os.mkdir(adir)
362 covrun('-i', '-a', '"--directory=%s"' % adir, '"--omit=%s"' % omit)
362 covrun('-i', '-a', '"--directory=%s"' % adir, '"--omit=%s"' % omit)
363
363
364 class Timeout(Exception):
364 class Timeout(Exception):
365 pass
365 pass
366
366
367 def alarmed(signum, frame):
367 def alarmed(signum, frame):
368 raise Timeout
368 raise Timeout
369
369
370 def run(cmd, options):
370 def run(cmd, options):
371 """Run command in a sub-process, capturing the output (stdout and stderr).
371 """Run command in a sub-process, capturing the output (stdout and stderr).
372 Return the exist code, and output."""
372 Return the exist code, and output."""
373 # TODO: Use subprocess.Popen if we're running on Python 2.4
373 # TODO: Use subprocess.Popen if we're running on Python 2.4
374 if os.name == 'nt' or sys.platform.startswith('java'):
374 if os.name == 'nt' or sys.platform.startswith('java'):
375 tochild, fromchild = os.popen4(cmd)
375 tochild, fromchild = os.popen4(cmd)
376 tochild.close()
376 tochild.close()
377 output = fromchild.read()
377 output = fromchild.read()
378 ret = fromchild.close()
378 ret = fromchild.close()
379 if ret == None:
379 if ret == None:
380 ret = 0
380 ret = 0
381 else:
381 else:
382 proc = Popen4(cmd)
382 proc = Popen4(cmd)
383 try:
383 try:
384 output = ''
384 output = ''
385 proc.tochild.close()
385 proc.tochild.close()
386 output = proc.fromchild.read()
386 output = proc.fromchild.read()
387 ret = proc.wait()
387 ret = proc.wait()
388 if os.WIFEXITED(ret):
388 if os.WIFEXITED(ret):
389 ret = os.WEXITSTATUS(ret)
389 ret = os.WEXITSTATUS(ret)
390 except Timeout:
390 except Timeout:
391 vlog('# Process %d timed out - killing it' % proc.pid)
391 vlog('# Process %d timed out - killing it' % proc.pid)
392 os.kill(proc.pid, signal.SIGTERM)
392 os.kill(proc.pid, signal.SIGTERM)
393 ret = proc.wait()
393 ret = proc.wait()
394 if ret == 0:
394 if ret == 0:
395 ret = signal.SIGTERM << 8
395 ret = signal.SIGTERM << 8
396 output += ("\n### Abort: timeout after %d seconds.\n"
396 output += ("\n### Abort: timeout after %d seconds.\n"
397 % options.timeout)
397 % options.timeout)
398 return ret, splitnewlines(output)
398 return ret, splitnewlines(output)
399
399
400 def runone(options, test, skips, fails):
400 def runone(options, test, skips, fails):
401 '''tristate output:
401 '''tristate output:
402 None -> skipped
402 None -> skipped
403 True -> passed
403 True -> passed
404 False -> failed'''
404 False -> failed'''
405
405
406 def skip(msg):
406 def skip(msg):
407 if not options.verbose:
407 if not options.verbose:
408 skips.append((test, msg))
408 skips.append((test, msg))
409 else:
409 else:
410 print "\nSkipping %s: %s" % (test, msg)
410 print "\nSkipping %s: %s" % (test, msg)
411 return None
411 return None
412
412
413 def fail(msg):
413 def fail(msg):
414 fails.append((test, msg))
414 fails.append((test, msg))
415 if not options.nodiff:
415 if not options.nodiff:
416 print "\nERROR: %s %s" % (test, msg)
416 print "\nERROR: %s %s" % (test, msg)
417 return None
417 return None
418
418
419 vlog("# Test", test)
419 vlog("# Test", test)
420
420
421 # create a fresh hgrc
421 # create a fresh hgrc
422 hgrc = file(HGRCPATH, 'w+')
422 hgrc = open(HGRCPATH, 'w+')
423 hgrc.write('[ui]\n')
423 hgrc.write('[ui]\n')
424 hgrc.write('slash = True\n')
424 hgrc.write('slash = True\n')
425 hgrc.write('[defaults]\n')
425 hgrc.write('[defaults]\n')
426 hgrc.write('backout = -d "0 0"\n')
426 hgrc.write('backout = -d "0 0"\n')
427 hgrc.write('commit = -d "0 0"\n')
427 hgrc.write('commit = -d "0 0"\n')
428 hgrc.write('tag = -d "0 0"\n')
428 hgrc.write('tag = -d "0 0"\n')
429 hgrc.close()
429 hgrc.close()
430
430
431 err = os.path.join(TESTDIR, test+".err")
431 err = os.path.join(TESTDIR, test+".err")
432 ref = os.path.join(TESTDIR, test+".out")
432 ref = os.path.join(TESTDIR, test+".out")
433 testpath = os.path.join(TESTDIR, test)
433 testpath = os.path.join(TESTDIR, test)
434
434
435 if os.path.exists(err):
435 if os.path.exists(err):
436 os.remove(err) # Remove any previous output files
436 os.remove(err) # Remove any previous output files
437
437
438 # Make a tmp subdirectory to work in
438 # Make a tmp subdirectory to work in
439 tmpd = os.path.join(HGTMP, test)
439 tmpd = os.path.join(HGTMP, test)
440 os.mkdir(tmpd)
440 os.mkdir(tmpd)
441 os.chdir(tmpd)
441 os.chdir(tmpd)
442
442
443 try:
443 try:
444 tf = open(testpath)
444 tf = open(testpath)
445 firstline = tf.readline().rstrip()
445 firstline = tf.readline().rstrip()
446 tf.close()
446 tf.close()
447 except:
447 except:
448 firstline = ''
448 firstline = ''
449 lctest = test.lower()
449 lctest = test.lower()
450
450
451 if lctest.endswith('.py') or firstline == '#!/usr/bin/env python':
451 if lctest.endswith('.py') or firstline == '#!/usr/bin/env python':
452 py3kswitch = options.py3k_warnings and ' -3' or ''
452 py3kswitch = options.py3k_warnings and ' -3' or ''
453 cmd = '%s%s "%s"' % (PYTHON, py3kswitch, testpath)
453 cmd = '%s%s "%s"' % (PYTHON, py3kswitch, testpath)
454 elif lctest.endswith('.bat'):
454 elif lctest.endswith('.bat'):
455 # do not run batch scripts on non-windows
455 # do not run batch scripts on non-windows
456 if os.name != 'nt':
456 if os.name != 'nt':
457 return skip("batch script")
457 return skip("batch script")
458 # To reliably get the error code from batch files on WinXP,
458 # To reliably get the error code from batch files on WinXP,
459 # the "cmd /c call" prefix is needed. Grrr
459 # the "cmd /c call" prefix is needed. Grrr
460 cmd = 'cmd /c call "%s"' % testpath
460 cmd = 'cmd /c call "%s"' % testpath
461 else:
461 else:
462 # do not run shell scripts on windows
462 # do not run shell scripts on windows
463 if os.name == 'nt':
463 if os.name == 'nt':
464 return skip("shell script")
464 return skip("shell script")
465 # do not try to run non-executable programs
465 # do not try to run non-executable programs
466 if not os.path.exists(testpath):
466 if not os.path.exists(testpath):
467 return fail("does not exist")
467 return fail("does not exist")
468 elif not os.access(testpath, os.X_OK):
468 elif not os.access(testpath, os.X_OK):
469 return skip("not executable")
469 return skip("not executable")
470 cmd = '"%s"' % testpath
470 cmd = '"%s"' % testpath
471
471
472 if options.timeout > 0:
472 if options.timeout > 0:
473 signal.alarm(options.timeout)
473 signal.alarm(options.timeout)
474
474
475 vlog("# Running", cmd)
475 vlog("# Running", cmd)
476 ret, out = run(cmd, options)
476 ret, out = run(cmd, options)
477 vlog("# Ret was:", ret)
477 vlog("# Ret was:", ret)
478
478
479 if options.timeout > 0:
479 if options.timeout > 0:
480 signal.alarm(0)
480 signal.alarm(0)
481
481
482 mark = '.'
482 mark = '.'
483
483
484 skipped = (ret == SKIPPED_STATUS)
484 skipped = (ret == SKIPPED_STATUS)
485 # If reference output file exists, check test output against it
485 # If reference output file exists, check test output against it
486 if os.path.exists(ref):
486 if os.path.exists(ref):
487 f = open(ref, "r")
487 f = open(ref, "r")
488 refout = splitnewlines(f.read())
488 refout = splitnewlines(f.read())
489 f.close()
489 f.close()
490 else:
490 else:
491 refout = []
491 refout = []
492 if skipped:
492 if skipped:
493 mark = 's'
493 mark = 's'
494 missing, failed = parsehghaveoutput(out)
494 missing, failed = parsehghaveoutput(out)
495 if not missing:
495 if not missing:
496 missing = ['irrelevant']
496 missing = ['irrelevant']
497 if failed:
497 if failed:
498 fail("hghave failed checking for %s" % failed[-1])
498 fail("hghave failed checking for %s" % failed[-1])
499 skipped = False
499 skipped = False
500 else:
500 else:
501 skip(missing[-1])
501 skip(missing[-1])
502 elif out != refout:
502 elif out != refout:
503 mark = '!'
503 mark = '!'
504 if ret:
504 if ret:
505 fail("output changed and returned error code %d" % ret)
505 fail("output changed and returned error code %d" % ret)
506 else:
506 else:
507 fail("output changed")
507 fail("output changed")
508 if not options.nodiff:
508 if not options.nodiff:
509 showdiff(refout, out)
509 showdiff(refout, out)
510 ret = 1
510 ret = 1
511 elif ret:
511 elif ret:
512 mark = '!'
512 mark = '!'
513 fail("returned error code %d" % ret)
513 fail("returned error code %d" % ret)
514
514
515 if not options.verbose:
515 if not options.verbose:
516 sys.stdout.write(mark)
516 sys.stdout.write(mark)
517 sys.stdout.flush()
517 sys.stdout.flush()
518
518
519 if ret != 0 and not skipped:
519 if ret != 0 and not skipped:
520 # Save errors to a file for diagnosis
520 # Save errors to a file for diagnosis
521 f = open(err, "wb")
521 f = open(err, "wb")
522 for line in out:
522 for line in out:
523 f.write(line)
523 f.write(line)
524 f.close()
524 f.close()
525
525
526 # Kill off any leftover daemon processes
526 # Kill off any leftover daemon processes
527 try:
527 try:
528 fp = file(DAEMON_PIDS)
528 fp = open(DAEMON_PIDS)
529 for line in fp:
529 for line in fp:
530 try:
530 try:
531 pid = int(line)
531 pid = int(line)
532 except ValueError:
532 except ValueError:
533 continue
533 continue
534 try:
534 try:
535 os.kill(pid, 0)
535 os.kill(pid, 0)
536 vlog('# Killing daemon process %d' % pid)
536 vlog('# Killing daemon process %d' % pid)
537 os.kill(pid, signal.SIGTERM)
537 os.kill(pid, signal.SIGTERM)
538 time.sleep(0.25)
538 time.sleep(0.25)
539 os.kill(pid, 0)
539 os.kill(pid, 0)
540 vlog('# Daemon process %d is stuck - really killing it' % pid)
540 vlog('# Daemon process %d is stuck - really killing it' % pid)
541 os.kill(pid, signal.SIGKILL)
541 os.kill(pid, signal.SIGKILL)
542 except OSError, err:
542 except OSError, err:
543 if err.errno != errno.ESRCH:
543 if err.errno != errno.ESRCH:
544 raise
544 raise
545 fp.close()
545 fp.close()
546 os.unlink(DAEMON_PIDS)
546 os.unlink(DAEMON_PIDS)
547 except IOError:
547 except IOError:
548 pass
548 pass
549
549
550 os.chdir(TESTDIR)
550 os.chdir(TESTDIR)
551 if not options.keep_tmpdir:
551 if not options.keep_tmpdir:
552 shutil.rmtree(tmpd, True)
552 shutil.rmtree(tmpd, True)
553 if skipped:
553 if skipped:
554 return None
554 return None
555 return ret == 0
555 return ret == 0
556
556
557 _hgpath = None
557 _hgpath = None
558
558
559 def _gethgpath():
559 def _gethgpath():
560 """Return the path to the mercurial package that is actually found by
560 """Return the path to the mercurial package that is actually found by
561 the current Python interpreter."""
561 the current Python interpreter."""
562 global _hgpath
562 global _hgpath
563 if _hgpath is not None:
563 if _hgpath is not None:
564 return _hgpath
564 return _hgpath
565
565
566 cmd = '%s -c "import mercurial; print mercurial.__path__[0]"'
566 cmd = '%s -c "import mercurial; print mercurial.__path__[0]"'
567 pipe = os.popen(cmd % PYTHON)
567 pipe = os.popen(cmd % PYTHON)
568 try:
568 try:
569 _hgpath = pipe.read().strip()
569 _hgpath = pipe.read().strip()
570 finally:
570 finally:
571 pipe.close()
571 pipe.close()
572 return _hgpath
572 return _hgpath
573
573
574 def _checkhglib(verb):
574 def _checkhglib(verb):
575 """Ensure that the 'mercurial' package imported by python is
575 """Ensure that the 'mercurial' package imported by python is
576 the one we expect it to be. If not, print a warning to stderr."""
576 the one we expect it to be. If not, print a warning to stderr."""
577 expecthg = os.path.join(PYTHONDIR, 'mercurial')
577 expecthg = os.path.join(PYTHONDIR, 'mercurial')
578 actualhg = _gethgpath()
578 actualhg = _gethgpath()
579 if actualhg != expecthg:
579 if actualhg != expecthg:
580 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
580 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
581 ' (expected %s)\n'
581 ' (expected %s)\n'
582 % (verb, actualhg, expecthg))
582 % (verb, actualhg, expecthg))
583
583
584 def runchildren(options, tests):
584 def runchildren(options, tests):
585 if INST:
585 if INST:
586 installhg(options)
586 installhg(options)
587 _checkhglib("Testing")
587 _checkhglib("Testing")
588
588
589 optcopy = dict(options.__dict__)
589 optcopy = dict(options.__dict__)
590 optcopy['jobs'] = 1
590 optcopy['jobs'] = 1
591 if optcopy['with_hg'] is None:
591 if optcopy['with_hg'] is None:
592 optcopy['with_hg'] = os.path.join(BINDIR, "hg")
592 optcopy['with_hg'] = os.path.join(BINDIR, "hg")
593 opts = []
593 opts = []
594 for opt, value in optcopy.iteritems():
594 for opt, value in optcopy.iteritems():
595 name = '--' + opt.replace('_', '-')
595 name = '--' + opt.replace('_', '-')
596 if value is True:
596 if value is True:
597 opts.append(name)
597 opts.append(name)
598 elif value is not None:
598 elif value is not None:
599 opts.append(name + '=' + str(value))
599 opts.append(name + '=' + str(value))
600
600
601 tests.reverse()
601 tests.reverse()
602 jobs = [[] for j in xrange(options.jobs)]
602 jobs = [[] for j in xrange(options.jobs)]
603 while tests:
603 while tests:
604 for job in jobs:
604 for job in jobs:
605 if not tests: break
605 if not tests: break
606 job.append(tests.pop())
606 job.append(tests.pop())
607 fps = {}
607 fps = {}
608 for j, job in enumerate(jobs):
608 for j, job in enumerate(jobs):
609 if not job:
609 if not job:
610 continue
610 continue
611 rfd, wfd = os.pipe()
611 rfd, wfd = os.pipe()
612 childopts = ['--child=%d' % wfd, '--port=%d' % (options.port + j * 3)]
612 childopts = ['--child=%d' % wfd, '--port=%d' % (options.port + j * 3)]
613 cmdline = [PYTHON, sys.argv[0]] + opts + childopts + job
613 cmdline = [PYTHON, sys.argv[0]] + opts + childopts + job
614 vlog(' '.join(cmdline))
614 vlog(' '.join(cmdline))
615 fps[os.spawnvp(os.P_NOWAIT, cmdline[0], cmdline)] = os.fdopen(rfd, 'r')
615 fps[os.spawnvp(os.P_NOWAIT, cmdline[0], cmdline)] = os.fdopen(rfd, 'r')
616 os.close(wfd)
616 os.close(wfd)
617 failures = 0
617 failures = 0
618 tested, skipped, failed = 0, 0, 0
618 tested, skipped, failed = 0, 0, 0
619 skips = []
619 skips = []
620 fails = []
620 fails = []
621 while fps:
621 while fps:
622 pid, status = os.wait()
622 pid, status = os.wait()
623 fp = fps.pop(pid)
623 fp = fps.pop(pid)
624 l = fp.read().splitlines()
624 l = fp.read().splitlines()
625 test, skip, fail = map(int, l[:3])
625 test, skip, fail = map(int, l[:3])
626 split = -fail or len(l)
626 split = -fail or len(l)
627 for s in l[3:split]:
627 for s in l[3:split]:
628 skips.append(s.split(" ", 1))
628 skips.append(s.split(" ", 1))
629 for s in l[split:]:
629 for s in l[split:]:
630 fails.append(s.split(" ", 1))
630 fails.append(s.split(" ", 1))
631 tested += test
631 tested += test
632 skipped += skip
632 skipped += skip
633 failed += fail
633 failed += fail
634 vlog('pid %d exited, status %d' % (pid, status))
634 vlog('pid %d exited, status %d' % (pid, status))
635 failures |= status
635 failures |= status
636 print
636 print
637 for s in skips:
637 for s in skips:
638 print "Skipped %s: %s" % (s[0], s[1])
638 print "Skipped %s: %s" % (s[0], s[1])
639 for s in fails:
639 for s in fails:
640 print "Failed %s: %s" % (s[0], s[1])
640 print "Failed %s: %s" % (s[0], s[1])
641
641
642 _checkhglib("Tested")
642 _checkhglib("Tested")
643 print "# Ran %d tests, %d skipped, %d failed." % (
643 print "# Ran %d tests, %d skipped, %d failed." % (
644 tested, skipped, failed)
644 tested, skipped, failed)
645 sys.exit(failures != 0)
645 sys.exit(failures != 0)
646
646
647 def runtests(options, tests):
647 def runtests(options, tests):
648 global DAEMON_PIDS, HGRCPATH
648 global DAEMON_PIDS, HGRCPATH
649 DAEMON_PIDS = os.environ["DAEMON_PIDS"] = os.path.join(HGTMP, 'daemon.pids')
649 DAEMON_PIDS = os.environ["DAEMON_PIDS"] = os.path.join(HGTMP, 'daemon.pids')
650 HGRCPATH = os.environ["HGRCPATH"] = os.path.join(HGTMP, '.hgrc')
650 HGRCPATH = os.environ["HGRCPATH"] = os.path.join(HGTMP, '.hgrc')
651
651
652 try:
652 try:
653 if INST:
653 if INST:
654 installhg(options)
654 installhg(options)
655 _checkhglib("Testing")
655 _checkhglib("Testing")
656
656
657 if options.timeout > 0:
657 if options.timeout > 0:
658 try:
658 try:
659 signal.signal(signal.SIGALRM, alarmed)
659 signal.signal(signal.SIGALRM, alarmed)
660 vlog('# Running each test with %d second timeout' %
660 vlog('# Running each test with %d second timeout' %
661 options.timeout)
661 options.timeout)
662 except AttributeError:
662 except AttributeError:
663 print 'WARNING: cannot run tests with timeouts'
663 print 'WARNING: cannot run tests with timeouts'
664 options.timeout = 0
664 options.timeout = 0
665
665
666 tested = 0
666 tested = 0
667 failed = 0
667 failed = 0
668 skipped = 0
668 skipped = 0
669
669
670 if options.restart:
670 if options.restart:
671 orig = list(tests)
671 orig = list(tests)
672 while tests:
672 while tests:
673 if os.path.exists(tests[0] + ".err"):
673 if os.path.exists(tests[0] + ".err"):
674 break
674 break
675 tests.pop(0)
675 tests.pop(0)
676 if not tests:
676 if not tests:
677 print "running all tests"
677 print "running all tests"
678 tests = orig
678 tests = orig
679
679
680 skips = []
680 skips = []
681 fails = []
681 fails = []
682 for test in tests:
682 for test in tests:
683 if options.retest and not os.path.exists(test + ".err"):
683 if options.retest and not os.path.exists(test + ".err"):
684 skipped += 1
684 skipped += 1
685 continue
685 continue
686 ret = runone(options, test, skips, fails)
686 ret = runone(options, test, skips, fails)
687 if ret is None:
687 if ret is None:
688 skipped += 1
688 skipped += 1
689 elif not ret:
689 elif not ret:
690 if options.interactive:
690 if options.interactive:
691 print "Accept this change? [n] ",
691 print "Accept this change? [n] ",
692 answer = sys.stdin.readline().strip()
692 answer = sys.stdin.readline().strip()
693 if answer.lower() in "y yes".split():
693 if answer.lower() in "y yes".split():
694 rename(test + ".err", test + ".out")
694 rename(test + ".err", test + ".out")
695 tested += 1
695 tested += 1
696 fails.pop()
696 fails.pop()
697 continue
697 continue
698 failed += 1
698 failed += 1
699 if options.first:
699 if options.first:
700 break
700 break
701 tested += 1
701 tested += 1
702
702
703 if options.child:
703 if options.child:
704 fp = os.fdopen(options.child, 'w')
704 fp = os.fdopen(options.child, 'w')
705 fp.write('%d\n%d\n%d\n' % (tested, skipped, failed))
705 fp.write('%d\n%d\n%d\n' % (tested, skipped, failed))
706 for s in skips:
706 for s in skips:
707 fp.write("%s %s\n" % s)
707 fp.write("%s %s\n" % s)
708 for s in fails:
708 for s in fails:
709 fp.write("%s %s\n" % s)
709 fp.write("%s %s\n" % s)
710 fp.close()
710 fp.close()
711 else:
711 else:
712 print
712 print
713 for s in skips:
713 for s in skips:
714 print "Skipped %s: %s" % s
714 print "Skipped %s: %s" % s
715 for s in fails:
715 for s in fails:
716 print "Failed %s: %s" % s
716 print "Failed %s: %s" % s
717 _checkhglib("Tested")
717 _checkhglib("Tested")
718 print "# Ran %d tests, %d skipped, %d failed." % (
718 print "# Ran %d tests, %d skipped, %d failed." % (
719 tested, skipped, failed)
719 tested, skipped, failed)
720
720
721 if options.anycoverage:
721 if options.anycoverage:
722 outputcoverage(options)
722 outputcoverage(options)
723 except KeyboardInterrupt:
723 except KeyboardInterrupt:
724 failed = True
724 failed = True
725 print "\ninterrupted!"
725 print "\ninterrupted!"
726
726
727 if failed:
727 if failed:
728 sys.exit(1)
728 sys.exit(1)
729
729
730 def main():
730 def main():
731 (options, args) = parseargs()
731 (options, args) = parseargs()
732 if not options.child:
732 if not options.child:
733 os.umask(022)
733 os.umask(022)
734
734
735 checktools()
735 checktools()
736
736
737 # Reset some environment variables to well-known values so that
737 # Reset some environment variables to well-known values so that
738 # the tests produce repeatable output.
738 # the tests produce repeatable output.
739 os.environ['LANG'] = os.environ['LC_ALL'] = 'C'
739 os.environ['LANG'] = os.environ['LC_ALL'] = 'C'
740 os.environ['TZ'] = 'GMT'
740 os.environ['TZ'] = 'GMT'
741 os.environ["EMAIL"] = "Foo Bar <foo.bar@example.com>"
741 os.environ["EMAIL"] = "Foo Bar <foo.bar@example.com>"
742 os.environ['CDPATH'] = ''
742 os.environ['CDPATH'] = ''
743
743
744 global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
744 global TESTDIR, HGTMP, INST, BINDIR, PYTHONDIR, COVERAGE_FILE
745 TESTDIR = os.environ["TESTDIR"] = os.getcwd()
745 TESTDIR = os.environ["TESTDIR"] = os.getcwd()
746 HGTMP = os.environ['HGTMP'] = os.path.realpath(tempfile.mkdtemp('', 'hgtests.',
746 HGTMP = os.environ['HGTMP'] = os.path.realpath(tempfile.mkdtemp('', 'hgtests.',
747 options.tmpdir))
747 options.tmpdir))
748 DAEMON_PIDS = None
748 DAEMON_PIDS = None
749 HGRCPATH = None
749 HGRCPATH = None
750
750
751 os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
751 os.environ["HGEDITOR"] = sys.executable + ' -c "import sys; sys.exit(0)"'
752 os.environ["HGMERGE"] = "internal:merge"
752 os.environ["HGMERGE"] = "internal:merge"
753 os.environ["HGUSER"] = "test"
753 os.environ["HGUSER"] = "test"
754 os.environ["HGENCODING"] = "ascii"
754 os.environ["HGENCODING"] = "ascii"
755 os.environ["HGENCODINGMODE"] = "strict"
755 os.environ["HGENCODINGMODE"] = "strict"
756 os.environ["HGPORT"] = str(options.port)
756 os.environ["HGPORT"] = str(options.port)
757 os.environ["HGPORT1"] = str(options.port + 1)
757 os.environ["HGPORT1"] = str(options.port + 1)
758 os.environ["HGPORT2"] = str(options.port + 2)
758 os.environ["HGPORT2"] = str(options.port + 2)
759
759
760 if options.with_hg:
760 if options.with_hg:
761 INST = None
761 INST = None
762 BINDIR = os.path.dirname(os.path.realpath(options.with_hg))
762 BINDIR = os.path.dirname(os.path.realpath(options.with_hg))
763
763
764 # This looks redundant with how Python initializes sys.path from
764 # This looks redundant with how Python initializes sys.path from
765 # the location of the script being executed. Needed because the
765 # the location of the script being executed. Needed because the
766 # "hg" specified by --with-hg is not the only Python script
766 # "hg" specified by --with-hg is not the only Python script
767 # executed in the test suite that needs to import 'mercurial'
767 # executed in the test suite that needs to import 'mercurial'
768 # ... which means it's not really redundant at all.
768 # ... which means it's not really redundant at all.
769 PYTHONDIR = BINDIR
769 PYTHONDIR = BINDIR
770 else:
770 else:
771 INST = os.path.join(HGTMP, "install")
771 INST = os.path.join(HGTMP, "install")
772 BINDIR = os.environ["BINDIR"] = os.path.join(INST, "bin")
772 BINDIR = os.environ["BINDIR"] = os.path.join(INST, "bin")
773 PYTHONDIR = os.path.join(INST, "lib", "python")
773 PYTHONDIR = os.path.join(INST, "lib", "python")
774
774
775 os.environ["BINDIR"] = BINDIR
775 os.environ["BINDIR"] = BINDIR
776 os.environ["PYTHON"] = PYTHON
776 os.environ["PYTHON"] = PYTHON
777
777
778 if not options.child:
778 if not options.child:
779 path = [BINDIR] + os.environ["PATH"].split(os.pathsep)
779 path = [BINDIR] + os.environ["PATH"].split(os.pathsep)
780 os.environ["PATH"] = os.pathsep.join(path)
780 os.environ["PATH"] = os.pathsep.join(path)
781
781
782 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
782 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
783 # can run .../tests/run-tests.py test-foo where test-foo
783 # can run .../tests/run-tests.py test-foo where test-foo
784 # adds an extension to HGRC
784 # adds an extension to HGRC
785 pypath = [PYTHONDIR, TESTDIR]
785 pypath = [PYTHONDIR, TESTDIR]
786 # We have to augment PYTHONPATH, rather than simply replacing
786 # We have to augment PYTHONPATH, rather than simply replacing
787 # it, in case external libraries are only available via current
787 # it, in case external libraries are only available via current
788 # PYTHONPATH. (In particular, the Subversion bindings on OS X
788 # PYTHONPATH. (In particular, the Subversion bindings on OS X
789 # are in /opt/subversion.)
789 # are in /opt/subversion.)
790 oldpypath = os.environ.get('PYTHONPATH')
790 oldpypath = os.environ.get('PYTHONPATH')
791 if oldpypath:
791 if oldpypath:
792 pypath.append(oldpypath)
792 pypath.append(oldpypath)
793 os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
793 os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
794
794
795 COVERAGE_FILE = os.path.join(TESTDIR, ".coverage")
795 COVERAGE_FILE = os.path.join(TESTDIR, ".coverage")
796
796
797 if len(args) == 0:
797 if len(args) == 0:
798 args = os.listdir(".")
798 args = os.listdir(".")
799 args.sort()
799 args.sort()
800
800
801 tests = []
801 tests = []
802 for test in args:
802 for test in args:
803 if (test.startswith("test-") and '~' not in test and
803 if (test.startswith("test-") and '~' not in test and
804 ('.' not in test or test.endswith('.py') or
804 ('.' not in test or test.endswith('.py') or
805 test.endswith('.bat'))):
805 test.endswith('.bat'))):
806 tests.append(test)
806 tests.append(test)
807 if not tests:
807 if not tests:
808 print "# Ran 0 tests, 0 skipped, 0 failed."
808 print "# Ran 0 tests, 0 skipped, 0 failed."
809 return
809 return
810
810
811 vlog("# Using TESTDIR", TESTDIR)
811 vlog("# Using TESTDIR", TESTDIR)
812 vlog("# Using HGTMP", HGTMP)
812 vlog("# Using HGTMP", HGTMP)
813 vlog("# Using PATH", os.environ["PATH"])
813 vlog("# Using PATH", os.environ["PATH"])
814 vlog("# Using PYTHONPATH", os.environ["PYTHONPATH"])
814 vlog("# Using PYTHONPATH", os.environ["PYTHONPATH"])
815
815
816 try:
816 try:
817 if len(tests) > 1 and options.jobs > 1:
817 if len(tests) > 1 and options.jobs > 1:
818 runchildren(options, tests)
818 runchildren(options, tests)
819 else:
819 else:
820 runtests(options, tests)
820 runtests(options, tests)
821 finally:
821 finally:
822 cleanup(options)
822 cleanup(options)
823
823
824 main()
824 main()
@@ -1,19 +1,19 b''
1 import os
1 import os
2 from mercurial import hg, ui
2 from mercurial import hg, ui
3
3
4 u = ui.ui()
4 u = ui.ui()
5
5
6 repo = hg.repository(u, 'test1', create=1)
6 repo = hg.repository(u, 'test1', create=1)
7 os.chdir('test1')
7 os.chdir('test1')
8
8
9 # create 'foo' with fixed time stamp
9 # create 'foo' with fixed time stamp
10 f = file('foo', 'w')
10 f = open('foo', 'w')
11 f.write('foo\n')
11 f.write('foo\n')
12 f.close()
12 f.close()
13 os.utime('foo', (1000, 1000))
13 os.utime('foo', (1000, 1000))
14
14
15 # add+commit 'foo'
15 # add+commit 'foo'
16 repo.add(['foo'])
16 repo.add(['foo'])
17 repo.commit(text='commit1', date="0 0")
17 repo.commit(text='commit1', date="0 0")
18
18
19 print "workingfilectx.date =", repo[None]['foo'].date()
19 print "workingfilectx.date =", repo[None]['foo'].date()
@@ -1,32 +1,32 b''
1 import os
1 import os
2 from mercurial import dispatch
2 from mercurial import dispatch
3
3
4 def testdispatch(cmd):
4 def testdispatch(cmd):
5 """Simple wrapper around dispatch.dispatch()
5 """Simple wrapper around dispatch.dispatch()
6
6
7 Prints command and result value, but does not handle quoting.
7 Prints command and result value, but does not handle quoting.
8 """
8 """
9 print "running: %s" % (cmd,)
9 print "running: %s" % (cmd,)
10 result = dispatch.dispatch(cmd.split())
10 result = dispatch.dispatch(cmd.split())
11 print "result: %r" % (result,)
11 print "result: %r" % (result,)
12
12
13
13
14 testdispatch("init test1")
14 testdispatch("init test1")
15 os.chdir('test1')
15 os.chdir('test1')
16
16
17 # create file 'foo', add and commit
17 # create file 'foo', add and commit
18 f = file('foo', 'wb')
18 f = open('foo', 'wb')
19 f.write('foo\n')
19 f.write('foo\n')
20 f.close()
20 f.close()
21 testdispatch("add foo")
21 testdispatch("add foo")
22 testdispatch("commit -m commit1 -d 2000-01-01 foo")
22 testdispatch("commit -m commit1 -d 2000-01-01 foo")
23
23
24 # append to file 'foo' and commit
24 # append to file 'foo' and commit
25 f = file('foo', 'ab')
25 f = open('foo', 'ab')
26 f.write('bar\n')
26 f.write('bar\n')
27 f.close()
27 f.close()
28 testdispatch("commit -m commit2 -d 2000-01-02 foo")
28 testdispatch("commit -m commit2 -d 2000-01-02 foo")
29
29
30 # check 88803a69b24 (fancyopts modified command table)
30 # check 88803a69b24 (fancyopts modified command table)
31 testdispatch("log -r 0")
31 testdispatch("log -r 0")
32 testdispatch("log -r tip")
32 testdispatch("log -r tip")
@@ -1,73 +1,73 b''
1 import os
1 import os
2 from mercurial import hg, ui, merge
2 from mercurial import hg, ui, merge
3
3
4 u = ui.ui()
4 u = ui.ui()
5
5
6 repo = hg.repository(u, 'test1', create=1)
6 repo = hg.repository(u, 'test1', create=1)
7 os.chdir('test1')
7 os.chdir('test1')
8
8
9 def commit(text, time):
9 def commit(text, time):
10 repo.commit(text=text, date="%d 0" % time)
10 repo.commit(text=text, date="%d 0" % time)
11
11
12 def addcommit(name, time):
12 def addcommit(name, time):
13 f = file(name, 'w')
13 f = open(name, 'w')
14 f.write('%s\n' % name)
14 f.write('%s\n' % name)
15 f.close()
15 f.close()
16 repo.add([name])
16 repo.add([name])
17 commit(name, time)
17 commit(name, time)
18
18
19 def update(rev):
19 def update(rev):
20 merge.update(repo, rev, False, True, False)
20 merge.update(repo, rev, False, True, False)
21
21
22 def merge_(rev):
22 def merge_(rev):
23 merge.update(repo, rev, True, False, False)
23 merge.update(repo, rev, True, False, False)
24
24
25 if __name__ == '__main__':
25 if __name__ == '__main__':
26 addcommit("A", 0)
26 addcommit("A", 0)
27 addcommit("B", 1)
27 addcommit("B", 1)
28
28
29 update(0)
29 update(0)
30 addcommit("C", 2)
30 addcommit("C", 2)
31
31
32 merge_(1)
32 merge_(1)
33 commit("D", 3)
33 commit("D", 3)
34
34
35 update(2)
35 update(2)
36 addcommit("E", 4)
36 addcommit("E", 4)
37 addcommit("F", 5)
37 addcommit("F", 5)
38
38
39 update(3)
39 update(3)
40 addcommit("G", 6)
40 addcommit("G", 6)
41
41
42 merge_(5)
42 merge_(5)
43 commit("H", 7)
43 commit("H", 7)
44
44
45 update(5)
45 update(5)
46 addcommit("I", 8)
46 addcommit("I", 8)
47
47
48 # Ancestors
48 # Ancestors
49 print 'Ancestors of 5'
49 print 'Ancestors of 5'
50 for r in repo.changelog.ancestors(5):
50 for r in repo.changelog.ancestors(5):
51 print r,
51 print r,
52
52
53 print '\nAncestors of 6 and 5'
53 print '\nAncestors of 6 and 5'
54 for r in repo.changelog.ancestors(6, 5):
54 for r in repo.changelog.ancestors(6, 5):
55 print r,
55 print r,
56
56
57 print '\nAncestors of 5 and 4'
57 print '\nAncestors of 5 and 4'
58 for r in repo.changelog.ancestors(5, 4):
58 for r in repo.changelog.ancestors(5, 4):
59 print r,
59 print r,
60
60
61 # Descendants
61 # Descendants
62 print '\n\nDescendants of 5'
62 print '\n\nDescendants of 5'
63 for r in repo.changelog.descendants(5):
63 for r in repo.changelog.descendants(5):
64 print r,
64 print r,
65
65
66 print '\nDescendants of 5 and 3'
66 print '\nDescendants of 5 and 3'
67 for r in repo.changelog.descendants(5, 3):
67 for r in repo.changelog.descendants(5, 3):
68 print r,
68 print r,
69
69
70 print '\nDescendants of 5 and 4'
70 print '\nDescendants of 5 and 4'
71 for r in repo.changelog.descendants(5, 4):
71 for r in repo.changelog.descendants(5, 4):
72 print r,
72 print r,
73
73
General Comments 0
You need to be logged in to leave comments. Login now